1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/x64/assembler-x64.h"
12 #include <intrin.h> // _xgetbv()
15 #include <sys/sysctl.h>
18 #include "src/base/bits.h"
19 #include "src/macro-assembler.h"
25 // -----------------------------------------------------------------------------
26 // Implementation of CpuFeatures
32 V8_INLINE uint64_t _xgetbv(unsigned int xcr) {
34 // Check xgetbv; this uses a .byte sequence instead of the instruction
35 // directly because older assemblers do not include support for xgetbv and
36 // there is no easy way to conditionally compile based on the assembler
38 __asm__ volatile(".byte 0x0f, 0x01, 0xd0" : "=a"(eax), "=d"(edx) : "c"(xcr));
39 return static_cast<uint64_t>(eax) | (static_cast<uint64_t>(edx) << 32);
42 #define _XCR_XFEATURE_ENABLED_MASK 0
44 #endif // !V8_LIBC_MSVCRT
47 bool OSHasAVXSupport() {
49 // Mac OS X up to 10.9 has a bug where AVX transitions were indeed being
50 // caused by ISRs, so we detect that here and disable AVX in that case.
52 size_t buffer_size = arraysize(buffer);
53 int ctl_name[] = {CTL_KERN, KERN_OSRELEASE};
54 if (sysctl(ctl_name, 2, buffer, &buffer_size, nullptr, 0) != 0) {
55 V8_Fatal(__FILE__, __LINE__, "V8 failed to get kernel version");
57 // The buffer now contains a string of the form XX.YY.ZZ, where
58 // XX is the major kernel version component.
59 char* period_pos = strchr(buffer, '.');
60 DCHECK_NOT_NULL(period_pos);
62 long kernel_version_major = strtol(buffer, nullptr, 10); // NOLINT
63 if (kernel_version_major <= 13) return false;
64 #endif // V8_OS_MACOSX
65 // Check whether OS claims to support AVX.
66 uint64_t feature_mask = _xgetbv(_XCR_XFEATURE_ENABLED_MASK);
67 return (feature_mask & 0x6) == 0x6;
73 void CpuFeatures::ProbeImpl(bool cross_compile) {
75 CHECK(cpu.has_sse2()); // SSE2 support is mandatory.
76 CHECK(cpu.has_cmov()); // CMOV support is mandatory.
78 // Only use statically determined features for cross compile (snapshot).
79 if (cross_compile) return;
81 if (cpu.has_sse41() && FLAG_enable_sse4_1) supported_ |= 1u << SSE4_1;
82 if (cpu.has_sse3() && FLAG_enable_sse3) supported_ |= 1u << SSE3;
83 // SAHF is not generally available in long mode.
84 if (cpu.has_sahf() && FLAG_enable_sahf) supported_ |= 1u << SAHF;
85 if (cpu.has_avx() && FLAG_enable_avx && cpu.has_osxsave() &&
87 supported_ |= 1u << AVX;
89 if (cpu.has_fma3() && FLAG_enable_fma3 && cpu.has_osxsave() &&
91 supported_ |= 1u << FMA3;
93 if (strcmp(FLAG_mcpu, "auto") == 0) {
94 if (cpu.is_atom()) supported_ |= 1u << ATOM;
95 } else if (strcmp(FLAG_mcpu, "atom") == 0) {
96 supported_ |= 1u << ATOM;
101 void CpuFeatures::PrintTarget() { }
102 void CpuFeatures::PrintFeatures() {
103 printf("SSE3=%d SSE4_1=%d SAHF=%d AVX=%d FMA3=%d ATOM=%d\n",
104 CpuFeatures::IsSupported(SSE3), CpuFeatures::IsSupported(SSE4_1),
105 CpuFeatures::IsSupported(SAHF), CpuFeatures::IsSupported(AVX),
106 CpuFeatures::IsSupported(FMA3), CpuFeatures::IsSupported(ATOM));
110 // -----------------------------------------------------------------------------
111 // Implementation of RelocInfo
113 // Patch the code at the current PC with a call to the target address.
114 // Additional guard int3 instructions can be added if required.
115 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
116 int code_size = Assembler::kCallSequenceLength + guard_bytes;
118 // Create a code patcher.
119 CodePatcher patcher(pc_, code_size);
121 // Add a label for checking the size of the code used for returning.
123 Label check_codesize;
124 patcher.masm()->bind(&check_codesize);
128 patcher.masm()->movp(kScratchRegister, reinterpret_cast<void*>(target),
129 Assembler::RelocInfoNone());
130 patcher.masm()->call(kScratchRegister);
132 // Check that the size of the code generated is as expected.
133 DCHECK_EQ(Assembler::kCallSequenceLength,
134 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
136 // Add the requested number of int3 instructions after the call.
137 for (int i = 0; i < guard_bytes; i++) {
138 patcher.masm()->int3();
143 void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
144 // Patch the code at the current address with the supplied instructions.
145 for (int i = 0; i < instruction_count; i++) {
146 *(pc_ + i) = *(instructions + i);
149 // Indicate that code has changed.
150 CpuFeatures::FlushICache(pc_, instruction_count);
154 // -----------------------------------------------------------------------------
155 // Register constants.
158 Register::kRegisterCodeByAllocationIndex[kMaxNumAllocatableRegisters] = {
159 // rax, rbx, rdx, rcx, rsi, rdi, r8, r9, r11, r14, r15
160 0, 3, 2, 1, 6, 7, 8, 9, 11, 14, 15
163 const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
164 0, 3, 2, 1, -1, -1, 4, 5, 6, 7, -1, 8, -1, -1, 9, 10
168 // -----------------------------------------------------------------------------
169 // Implementation of Operand
171 Operand::Operand(Register base, int32_t disp) : rex_(0) {
173 if (base.is(rsp) || base.is(r12)) {
174 // SIB byte is needed to encode (rsp + offset) or (r12 + offset).
175 set_sib(times_1, rsp, base);
178 if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
180 } else if (is_int8(disp)) {
190 Operand::Operand(Register base,
193 int32_t disp) : rex_(0) {
194 DCHECK(!index.is(rsp));
196 set_sib(scale, index, base);
197 if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
198 // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
199 // possibly set by set_sib.
201 } else if (is_int8(disp)) {
211 Operand::Operand(Register index,
213 int32_t disp) : rex_(0) {
214 DCHECK(!index.is(rsp));
217 set_sib(scale, index, rbp);
222 Operand::Operand(Label* label) : rex_(0), len_(1) {
223 DCHECK_NOT_NULL(label);
225 set_disp64(reinterpret_cast<intptr_t>(label));
229 Operand::Operand(const Operand& operand, int32_t offset) {
230 DCHECK(operand.len_ >= 1);
231 // Operand encodes REX ModR/M [SIB] [Disp].
232 byte modrm = operand.buf_[0];
233 DCHECK(modrm < 0xC0); // Disallow mode 3 (register target).
234 bool has_sib = ((modrm & 0x07) == 0x04);
235 byte mode = modrm & 0xC0;
236 int disp_offset = has_sib ? 2 : 1;
237 int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
238 // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
240 bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base.
241 int32_t disp_value = 0;
242 if (mode == 0x80 || is_baseless) {
243 // Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
244 disp_value = *bit_cast<const int32_t*>(&operand.buf_[disp_offset]);
245 } else if (mode == 0x40) {
246 // Mode 1: Byte displacement.
247 disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
250 // Write new operand with same registers, but with modified displacement.
251 DCHECK(offset >= 0 ? disp_value + offset > disp_value
252 : disp_value + offset < disp_value); // No overflow.
253 disp_value += offset;
255 if (!is_int8(disp_value) || is_baseless) {
256 // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
257 buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
258 len_ = disp_offset + 4;
259 Memory::int32_at(&buf_[disp_offset]) = disp_value;
260 } else if (disp_value != 0 || (base_reg == 0x05)) {
261 // Need 8 bits of displacement.
262 buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1.
263 len_ = disp_offset + 1;
264 buf_[disp_offset] = static_cast<byte>(disp_value);
266 // Need no displacement.
267 buf_[0] = (modrm & 0x3f); // Mode 0.
271 buf_[1] = operand.buf_[1];
276 bool Operand::AddressUsesRegister(Register reg) const {
277 int code = reg.code();
278 DCHECK((buf_[0] & 0xC0) != 0xC0); // Always a memory operand.
279 // Start with only low three bits of base register. Initial decoding doesn't
280 // distinguish on the REX.B bit.
281 int base_code = buf_[0] & 0x07;
282 if (base_code == rsp.code()) {
283 // SIB byte present in buf_[1].
284 // Check the index register from the SIB byte + REX.X prefix.
285 int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
286 // Index code (including REX.X) of 0x04 (rsp) means no index register.
287 if (index_code != rsp.code() && index_code == code) return true;
288 // Add REX.B to get the full base register code.
289 base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
290 // A base register of 0x05 (rbp) with mod = 0 means no base register.
291 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
292 return code == base_code;
294 // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
296 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
297 base_code |= ((rex_ & 0x01) << 3);
298 return code == base_code;
303 // -----------------------------------------------------------------------------
304 // Implementation of Assembler.
306 #ifdef GENERATED_CODE_COVERAGE
307 static void InitCoverageLog();
310 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
311 : AssemblerBase(isolate, buffer, buffer_size),
313 positions_recorder_(this) {
314 // Clear the buffer in debug mode unless it was provided by the
315 // caller in which case we can't be sure it's okay to overwrite
316 // existing code in it.
319 memset(buffer_, 0xCC, buffer_size_); // int3
323 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
326 #ifdef GENERATED_CODE_COVERAGE
332 void Assembler::GetCode(CodeDesc* desc) {
333 // Finalize code (at this point overflow() may be true, but the gap ensures
334 // that we are still not overlapping instructions and relocation info).
335 reloc_info_writer.Finish();
336 DCHECK(pc_ <= reloc_info_writer.pos()); // No overlap.
337 // Set up code descriptor.
338 desc->buffer = buffer_;
339 desc->buffer_size = buffer_size_;
340 desc->instr_size = pc_offset();
341 DCHECK(desc->instr_size > 0); // Zero-size code objects upset the system.
343 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
348 void Assembler::Align(int m) {
349 DCHECK(base::bits::IsPowerOfTwo32(m));
350 int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
355 void Assembler::CodeTargetAlign() {
356 Align(16); // Preferred alignment of jump targets on x64.
360 bool Assembler::IsNop(Address addr) {
362 while (*a == 0x66) a++;
363 if (*a == 0x90) return true;
364 if (a[0] == 0xf && a[1] == 0x1f) return true;
369 void Assembler::bind_to(Label* L, int pos) {
370 DCHECK(!L->is_bound()); // Label may only be bound once.
371 DCHECK(0 <= pos && pos <= pc_offset()); // Position must be valid.
372 if (L->is_linked()) {
373 int current = L->pos();
374 int next = long_at(current);
375 while (next != current) {
376 if (current >= 4 && long_at(current - 4) == 0) {
378 intptr_t imm64 = reinterpret_cast<intptr_t>(buffer_ + pos);
379 *reinterpret_cast<intptr_t*>(addr_at(current - 4)) = imm64;
380 internal_reference_positions_.push_back(current - 4);
382 // Relative address, relative to point after address.
383 int imm32 = pos - (current + sizeof(int32_t));
384 long_at_put(current, imm32);
387 next = long_at(next);
389 // Fix up last fixup on linked list.
390 if (current >= 4 && long_at(current - 4) == 0) {
392 intptr_t imm64 = reinterpret_cast<intptr_t>(buffer_ + pos);
393 *reinterpret_cast<intptr_t*>(addr_at(current - 4)) = imm64;
394 internal_reference_positions_.push_back(current - 4);
396 // Relative address, relative to point after address.
397 int imm32 = pos - (current + sizeof(int32_t));
398 long_at_put(current, imm32);
401 while (L->is_near_linked()) {
402 int fixup_pos = L->near_link_pos();
404 static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
405 DCHECK(offset_to_next <= 0);
406 int disp = pos - (fixup_pos + sizeof(int8_t));
407 CHECK(is_int8(disp));
408 set_byte_at(fixup_pos, disp);
409 if (offset_to_next < 0) {
410 L->link_to(fixup_pos + offset_to_next, Label::kNear);
419 void Assembler::bind(Label* L) {
420 bind_to(L, pc_offset());
424 void Assembler::GrowBuffer() {
425 DCHECK(buffer_overflow());
426 if (!own_buffer_) FATAL("external code buffer is too small");
428 // Compute new buffer size.
429 CodeDesc desc; // the new buffer
430 desc.buffer_size = 2 * buffer_size_;
432 // Some internal data structures overflow for very large buffers,
433 // they must ensure that kMaximalBufferSize is not too large.
434 if ((desc.buffer_size > kMaximalBufferSize) ||
435 (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) {
436 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
439 // Set up new buffer.
440 desc.buffer = NewArray<byte>(desc.buffer_size);
441 desc.instr_size = pc_offset();
443 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
445 // Clear the buffer in debug mode. Use 'int3' instructions to make
446 // sure to get into problems if we ever run uninitialized code.
448 memset(desc.buffer, 0xCC, desc.buffer_size);
452 intptr_t pc_delta = desc.buffer - buffer_;
453 intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
454 (buffer_ + buffer_size_);
455 MemMove(desc.buffer, buffer_, desc.instr_size);
456 MemMove(rc_delta + reloc_info_writer.pos(), reloc_info_writer.pos(),
460 DeleteArray(buffer_);
461 buffer_ = desc.buffer;
462 buffer_size_ = desc.buffer_size;
464 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
465 reloc_info_writer.last_pc() + pc_delta);
467 // Relocate internal references.
468 for (auto pos : internal_reference_positions_) {
469 intptr_t* p = reinterpret_cast<intptr_t*>(buffer_ + pos);
473 DCHECK(!buffer_overflow());
477 void Assembler::emit_operand(int code, const Operand& adr) {
478 DCHECK(is_uint3(code));
479 const unsigned length = adr.len_;
482 // Emit updated ModR/M byte containing the given register.
483 DCHECK((adr.buf_[0] & 0x38) == 0);
484 *pc_++ = adr.buf_[0] | code << 3;
486 // Recognize RIP relative addressing.
487 if (adr.buf_[0] == 5) {
488 DCHECK_EQ(9u, length);
489 Label* label = *bit_cast<Label* const*>(&adr.buf_[1]);
490 if (label->is_bound()) {
491 int offset = label->pos() - pc_offset() - sizeof(int32_t);
492 DCHECK_GE(0, offset);
494 } else if (label->is_linked()) {
496 label->link_to(pc_offset() - sizeof(int32_t));
498 DCHECK(label->is_unused());
499 int32_t current = pc_offset();
501 label->link_to(current);
504 // Emit the rest of the encoded operand.
505 for (unsigned i = 1; i < length; i++) *pc_++ = adr.buf_[i];
510 // Assembler Instruction implementations.
512 void Assembler::arithmetic_op(byte opcode,
516 EnsureSpace ensure_space(this);
517 emit_rex(reg, op, size);
519 emit_operand(reg, op);
523 void Assembler::arithmetic_op(byte opcode,
527 EnsureSpace ensure_space(this);
528 DCHECK((opcode & 0xC6) == 2);
529 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
530 // Swap reg and rm_reg and change opcode operand order.
531 emit_rex(rm_reg, reg, size);
533 emit_modrm(rm_reg, reg);
535 emit_rex(reg, rm_reg, size);
537 emit_modrm(reg, rm_reg);
542 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
543 EnsureSpace ensure_space(this);
544 DCHECK((opcode & 0xC6) == 2);
545 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
546 // Swap reg and rm_reg and change opcode operand order.
548 emit_optional_rex_32(rm_reg, reg);
550 emit_modrm(rm_reg, reg);
553 emit_optional_rex_32(reg, rm_reg);
555 emit_modrm(reg, rm_reg);
560 void Assembler::arithmetic_op_16(byte opcode,
562 const Operand& rm_reg) {
563 EnsureSpace ensure_space(this);
565 emit_optional_rex_32(reg, rm_reg);
567 emit_operand(reg, rm_reg);
571 void Assembler::arithmetic_op_8(byte opcode, Register reg, const Operand& op) {
572 EnsureSpace ensure_space(this);
573 if (!reg.is_byte_register()) {
574 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
578 emit_operand(reg, op);
582 void Assembler::arithmetic_op_8(byte opcode, Register reg, Register rm_reg) {
583 EnsureSpace ensure_space(this);
584 DCHECK((opcode & 0xC6) == 2);
585 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
586 // Swap reg and rm_reg and change opcode operand order.
587 if (!rm_reg.is_byte_register() || !reg.is_byte_register()) {
588 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
589 emit_rex_32(rm_reg, reg);
592 emit_modrm(rm_reg, reg);
594 if (!reg.is_byte_register() || !rm_reg.is_byte_register()) {
595 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
596 emit_rex_32(reg, rm_reg);
599 emit_modrm(reg, rm_reg);
604 void Assembler::immediate_arithmetic_op(byte subcode,
608 EnsureSpace ensure_space(this);
610 if (is_int8(src.value_)) {
612 emit_modrm(subcode, dst);
614 } else if (dst.is(rax)) {
615 emit(0x05 | (subcode << 3));
619 emit_modrm(subcode, dst);
624 void Assembler::immediate_arithmetic_op(byte subcode,
628 EnsureSpace ensure_space(this);
630 if (is_int8(src.value_)) {
632 emit_operand(subcode, dst);
636 emit_operand(subcode, dst);
642 void Assembler::immediate_arithmetic_op_16(byte subcode,
645 EnsureSpace ensure_space(this);
646 emit(0x66); // Operand size override prefix.
647 emit_optional_rex_32(dst);
648 if (is_int8(src.value_)) {
650 emit_modrm(subcode, dst);
652 } else if (dst.is(rax)) {
653 emit(0x05 | (subcode << 3));
657 emit_modrm(subcode, dst);
663 void Assembler::immediate_arithmetic_op_16(byte subcode,
666 EnsureSpace ensure_space(this);
667 emit(0x66); // Operand size override prefix.
668 emit_optional_rex_32(dst);
669 if (is_int8(src.value_)) {
671 emit_operand(subcode, dst);
675 emit_operand(subcode, dst);
681 void Assembler::immediate_arithmetic_op_8(byte subcode,
684 EnsureSpace ensure_space(this);
685 emit_optional_rex_32(dst);
686 DCHECK(is_int8(src.value_) || is_uint8(src.value_));
688 emit_operand(subcode, dst);
693 void Assembler::immediate_arithmetic_op_8(byte subcode,
696 EnsureSpace ensure_space(this);
697 if (!dst.is_byte_register()) {
698 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
701 DCHECK(is_int8(src.value_) || is_uint8(src.value_));
703 emit_modrm(subcode, dst);
708 void Assembler::shift(Register dst,
709 Immediate shift_amount,
712 EnsureSpace ensure_space(this);
713 DCHECK(size == kInt64Size ? is_uint6(shift_amount.value_)
714 : is_uint5(shift_amount.value_));
715 if (shift_amount.value_ == 1) {
718 emit_modrm(subcode, dst);
722 emit_modrm(subcode, dst);
723 emit(shift_amount.value_);
728 void Assembler::shift(Operand dst, Immediate shift_amount, int subcode,
730 EnsureSpace ensure_space(this);
731 DCHECK(size == kInt64Size ? is_uint6(shift_amount.value_)
732 : is_uint5(shift_amount.value_));
733 if (shift_amount.value_ == 1) {
736 emit_operand(subcode, dst);
740 emit_operand(subcode, dst);
741 emit(shift_amount.value_);
746 void Assembler::shift(Register dst, int subcode, int size) {
747 EnsureSpace ensure_space(this);
750 emit_modrm(subcode, dst);
754 void Assembler::shift(Operand dst, int subcode, int size) {
755 EnsureSpace ensure_space(this);
758 emit_operand(subcode, dst);
762 void Assembler::bt(const Operand& dst, Register src) {
763 EnsureSpace ensure_space(this);
764 emit_rex_64(src, dst);
767 emit_operand(src, dst);
771 void Assembler::bts(const Operand& dst, Register src) {
772 EnsureSpace ensure_space(this);
773 emit_rex_64(src, dst);
776 emit_operand(src, dst);
780 void Assembler::bsrl(Register dst, Register src) {
781 EnsureSpace ensure_space(this);
782 emit_optional_rex_32(dst, src);
785 emit_modrm(dst, src);
789 void Assembler::call(Label* L) {
790 positions_recorder()->WriteRecordedPositions();
791 EnsureSpace ensure_space(this);
792 // 1110 1000 #32-bit disp.
795 int offset = L->pos() - pc_offset() - sizeof(int32_t);
798 } else if (L->is_linked()) {
800 L->link_to(pc_offset() - sizeof(int32_t));
802 DCHECK(L->is_unused());
803 int32_t current = pc_offset();
810 void Assembler::call(Address entry, RelocInfo::Mode rmode) {
811 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
812 positions_recorder()->WriteRecordedPositions();
813 EnsureSpace ensure_space(this);
814 // 1110 1000 #32-bit disp.
816 emit_runtime_entry(entry, rmode);
820 void Assembler::call(Handle<Code> target,
821 RelocInfo::Mode rmode,
822 TypeFeedbackId ast_id) {
823 positions_recorder()->WriteRecordedPositions();
824 EnsureSpace ensure_space(this);
825 // 1110 1000 #32-bit disp.
827 emit_code_target(target, rmode, ast_id);
831 void Assembler::call(Register adr) {
832 positions_recorder()->WriteRecordedPositions();
833 EnsureSpace ensure_space(this);
834 // Opcode: FF /2 r64.
835 emit_optional_rex_32(adr);
837 emit_modrm(0x2, adr);
841 void Assembler::call(const Operand& op) {
842 positions_recorder()->WriteRecordedPositions();
843 EnsureSpace ensure_space(this);
844 // Opcode: FF /2 m64.
845 emit_optional_rex_32(op);
847 emit_operand(0x2, op);
851 // Calls directly to the given address using a relative offset.
852 // Should only ever be used in Code objects for calls within the
853 // same Code object. Should not be used when generating new code (use labels),
854 // but only when patching existing code.
855 void Assembler::call(Address target) {
856 positions_recorder()->WriteRecordedPositions();
857 EnsureSpace ensure_space(this);
858 // 1110 1000 #32-bit disp.
860 Address source = pc_ + 4;
861 intptr_t displacement = target - source;
862 DCHECK(is_int32(displacement));
863 emitl(static_cast<int32_t>(displacement));
867 void Assembler::clc() {
868 EnsureSpace ensure_space(this);
873 void Assembler::cld() {
874 EnsureSpace ensure_space(this);
879 void Assembler::cdq() {
880 EnsureSpace ensure_space(this);
885 void Assembler::cmovq(Condition cc, Register dst, Register src) {
888 } else if (cc == never) {
891 // No need to check CpuInfo for CMOV support, it's a required part of the
892 // 64-bit architecture.
893 DCHECK(cc >= 0); // Use mov for unconditional moves.
894 EnsureSpace ensure_space(this);
895 // Opcode: REX.W 0f 40 + cc /r.
896 emit_rex_64(dst, src);
899 emit_modrm(dst, src);
903 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
906 } else if (cc == never) {
910 EnsureSpace ensure_space(this);
911 // Opcode: REX.W 0f 40 + cc /r.
912 emit_rex_64(dst, src);
915 emit_operand(dst, src);
919 void Assembler::cmovl(Condition cc, Register dst, Register src) {
922 } else if (cc == never) {
926 EnsureSpace ensure_space(this);
927 // Opcode: 0f 40 + cc /r.
928 emit_optional_rex_32(dst, src);
931 emit_modrm(dst, src);
935 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
938 } else if (cc == never) {
942 EnsureSpace ensure_space(this);
943 // Opcode: 0f 40 + cc /r.
944 emit_optional_rex_32(dst, src);
947 emit_operand(dst, src);
951 void Assembler::cmpb_al(Immediate imm8) {
952 DCHECK(is_int8(imm8.value_) || is_uint8(imm8.value_));
953 EnsureSpace ensure_space(this);
959 void Assembler::cpuid() {
960 EnsureSpace ensure_space(this);
966 void Assembler::cqo() {
967 EnsureSpace ensure_space(this);
973 void Assembler::emit_dec(Register dst, int size) {
974 EnsureSpace ensure_space(this);
977 emit_modrm(0x1, dst);
981 void Assembler::emit_dec(const Operand& dst, int size) {
982 EnsureSpace ensure_space(this);
985 emit_operand(1, dst);
989 void Assembler::decb(Register dst) {
990 EnsureSpace ensure_space(this);
991 if (!dst.is_byte_register()) {
992 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
996 emit_modrm(0x1, dst);
1000 void Assembler::decb(const Operand& dst) {
1001 EnsureSpace ensure_space(this);
1002 emit_optional_rex_32(dst);
1004 emit_operand(1, dst);
1008 void Assembler::enter(Immediate size) {
1009 EnsureSpace ensure_space(this);
1011 emitw(size.value_); // 16 bit operand, always.
1016 void Assembler::hlt() {
1017 EnsureSpace ensure_space(this);
1022 void Assembler::emit_idiv(Register src, int size) {
1023 EnsureSpace ensure_space(this);
1024 emit_rex(src, size);
1026 emit_modrm(0x7, src);
1030 void Assembler::emit_div(Register src, int size) {
1031 EnsureSpace ensure_space(this);
1032 emit_rex(src, size);
1034 emit_modrm(0x6, src);
1038 void Assembler::emit_imul(Register src, int size) {
1039 EnsureSpace ensure_space(this);
1040 emit_rex(src, size);
1042 emit_modrm(0x5, src);
1046 void Assembler::emit_imul(const Operand& src, int size) {
1047 EnsureSpace ensure_space(this);
1048 emit_rex(src, size);
1050 emit_operand(0x5, src);
1054 void Assembler::emit_imul(Register dst, Register src, int size) {
1055 EnsureSpace ensure_space(this);
1056 emit_rex(dst, src, size);
1059 emit_modrm(dst, src);
1063 void Assembler::emit_imul(Register dst, const Operand& src, int size) {
1064 EnsureSpace ensure_space(this);
1065 emit_rex(dst, src, size);
1068 emit_operand(dst, src);
1072 void Assembler::emit_imul(Register dst, Register src, Immediate imm, int size) {
1073 EnsureSpace ensure_space(this);
1074 emit_rex(dst, src, size);
1075 if (is_int8(imm.value_)) {
1077 emit_modrm(dst, src);
1081 emit_modrm(dst, src);
1087 void Assembler::emit_imul(Register dst, const Operand& src, Immediate imm,
1089 EnsureSpace ensure_space(this);
1090 emit_rex(dst, src, size);
1091 if (is_int8(imm.value_)) {
1093 emit_operand(dst, src);
1097 emit_operand(dst, src);
1103 void Assembler::emit_inc(Register dst, int size) {
1104 EnsureSpace ensure_space(this);
1105 emit_rex(dst, size);
1107 emit_modrm(0x0, dst);
1111 void Assembler::emit_inc(const Operand& dst, int size) {
1112 EnsureSpace ensure_space(this);
1113 emit_rex(dst, size);
1115 emit_operand(0, dst);
1119 void Assembler::int3() {
1120 EnsureSpace ensure_space(this);
1125 void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1129 } else if (cc == never) {
1132 EnsureSpace ensure_space(this);
1133 DCHECK(is_uint4(cc));
1134 if (L->is_bound()) {
1135 const int short_size = 2;
1136 const int long_size = 6;
1137 int offs = L->pos() - pc_offset();
1139 // Determine whether we can use 1-byte offsets for backwards branches,
1140 // which have a max range of 128 bytes.
1142 // We also need to check predictable_code_size() flag here, because on x64,
1143 // when the full code generator recompiles code for debugging, some places
1144 // need to be padded out to a certain size. The debugger is keeping track of
1145 // how often it did this so that it can adjust return addresses on the
1146 // stack, but if the size of jump instructions can also change, that's not
1147 // enough and the calculated offsets would be incorrect.
1148 if (is_int8(offs - short_size) && !predictable_code_size()) {
1149 // 0111 tttn #8-bit disp.
1151 emit((offs - short_size) & 0xFF);
1153 // 0000 1111 1000 tttn #32-bit disp.
1156 emitl(offs - long_size);
1158 } else if (distance == Label::kNear) {
1159 // 0111 tttn #8-bit disp
1162 if (L->is_near_linked()) {
1163 int offset = L->near_link_pos() - pc_offset();
1164 DCHECK(is_int8(offset));
1165 disp = static_cast<byte>(offset & 0xFF);
1167 L->link_to(pc_offset(), Label::kNear);
1169 } else if (L->is_linked()) {
1170 // 0000 1111 1000 tttn #32-bit disp.
1174 L->link_to(pc_offset() - sizeof(int32_t));
1176 DCHECK(L->is_unused());
1179 int32_t current = pc_offset();
1181 L->link_to(current);
1186 void Assembler::j(Condition cc, Address entry, RelocInfo::Mode rmode) {
1187 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
1188 EnsureSpace ensure_space(this);
1189 DCHECK(is_uint4(cc));
1192 emit_runtime_entry(entry, rmode);
1196 void Assembler::j(Condition cc,
1197 Handle<Code> target,
1198 RelocInfo::Mode rmode) {
1199 EnsureSpace ensure_space(this);
1200 DCHECK(is_uint4(cc));
1201 // 0000 1111 1000 tttn #32-bit disp.
1204 emit_code_target(target, rmode);
1208 void Assembler::jmp(Label* L, Label::Distance distance) {
1209 EnsureSpace ensure_space(this);
1210 const int short_size = sizeof(int8_t);
1211 const int long_size = sizeof(int32_t);
1212 if (L->is_bound()) {
1213 int offs = L->pos() - pc_offset() - 1;
1215 if (is_int8(offs - short_size) && !predictable_code_size()) {
1216 // 1110 1011 #8-bit disp.
1218 emit((offs - short_size) & 0xFF);
1220 // 1110 1001 #32-bit disp.
1222 emitl(offs - long_size);
1224 } else if (distance == Label::kNear) {
1227 if (L->is_near_linked()) {
1228 int offset = L->near_link_pos() - pc_offset();
1229 DCHECK(is_int8(offset));
1230 disp = static_cast<byte>(offset & 0xFF);
1232 L->link_to(pc_offset(), Label::kNear);
1234 } else if (L->is_linked()) {
1235 // 1110 1001 #32-bit disp.
1238 L->link_to(pc_offset() - long_size);
1240 // 1110 1001 #32-bit disp.
1241 DCHECK(L->is_unused());
1243 int32_t current = pc_offset();
1245 L->link_to(current);
1250 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1251 EnsureSpace ensure_space(this);
1252 // 1110 1001 #32-bit disp.
1254 emit_code_target(target, rmode);
1258 void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
1259 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
1260 EnsureSpace ensure_space(this);
1261 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
1263 emit_runtime_entry(entry, rmode);
1267 void Assembler::jmp(Register target) {
1268 EnsureSpace ensure_space(this);
1270 emit_optional_rex_32(target);
1272 emit_modrm(0x4, target);
1276 void Assembler::jmp(const Operand& src) {
1277 EnsureSpace ensure_space(this);
1279 emit_optional_rex_32(src);
1281 emit_operand(0x4, src);
1285 void Assembler::emit_lea(Register dst, const Operand& src, int size) {
1286 EnsureSpace ensure_space(this);
1287 emit_rex(dst, src, size);
1289 emit_operand(dst, src);
1293 void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1294 EnsureSpace ensure_space(this);
1295 if (kPointerSize == kInt64Size) {
1296 emit(0x48); // REX.W
1300 DCHECK(kPointerSize == kInt32Size);
1303 // In 64-bit mode, need to zero extend the operand to 8 bytes.
1304 // See 2.2.1.4 in Intel64 and IA32 Architectures Software
1305 // Developer's Manual Volume 2.
1311 void Assembler::load_rax(ExternalReference ref) {
1312 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1316 void Assembler::leave() {
1317 EnsureSpace ensure_space(this);
1322 void Assembler::movb(Register dst, const Operand& src) {
1323 EnsureSpace ensure_space(this);
1324 if (!dst.is_byte_register()) {
1325 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1326 emit_rex_32(dst, src);
1328 emit_optional_rex_32(dst, src);
1331 emit_operand(dst, src);
1335 void Assembler::movb(Register dst, Immediate imm) {
1336 EnsureSpace ensure_space(this);
1337 if (!dst.is_byte_register()) {
1338 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1341 emit(0xB0 + dst.low_bits());
1346 void Assembler::movb(const Operand& dst, Register src) {
1347 EnsureSpace ensure_space(this);
1348 if (!src.is_byte_register()) {
1349 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1350 emit_rex_32(src, dst);
1352 emit_optional_rex_32(src, dst);
1355 emit_operand(src, dst);
1359 void Assembler::movb(const Operand& dst, Immediate imm) {
1360 EnsureSpace ensure_space(this);
1361 emit_optional_rex_32(dst);
1363 emit_operand(0x0, dst);
1364 emit(static_cast<byte>(imm.value_));
1368 void Assembler::movw(Register dst, const Operand& src) {
1369 EnsureSpace ensure_space(this);
1371 emit_optional_rex_32(dst, src);
1373 emit_operand(dst, src);
1377 void Assembler::movw(const Operand& dst, Register src) {
1378 EnsureSpace ensure_space(this);
1380 emit_optional_rex_32(src, dst);
1382 emit_operand(src, dst);
1386 void Assembler::movw(const Operand& dst, Immediate imm) {
1387 EnsureSpace ensure_space(this);
1389 emit_optional_rex_32(dst);
1391 emit_operand(0x0, dst);
1392 emit(static_cast<byte>(imm.value_ & 0xff));
1393 emit(static_cast<byte>(imm.value_ >> 8));
1397 void Assembler::emit_mov(Register dst, const Operand& src, int size) {
1398 EnsureSpace ensure_space(this);
1399 emit_rex(dst, src, size);
1401 emit_operand(dst, src);
1405 void Assembler::emit_mov(Register dst, Register src, int size) {
1406 EnsureSpace ensure_space(this);
1407 if (src.low_bits() == 4) {
1408 emit_rex(src, dst, size);
1410 emit_modrm(src, dst);
1412 emit_rex(dst, src, size);
1414 emit_modrm(dst, src);
1419 void Assembler::emit_mov(const Operand& dst, Register src, int size) {
1420 EnsureSpace ensure_space(this);
1421 emit_rex(src, dst, size);
1423 emit_operand(src, dst);
1427 void Assembler::emit_mov(Register dst, Immediate value, int size) {
1428 EnsureSpace ensure_space(this);
1429 emit_rex(dst, size);
1430 if (size == kInt64Size) {
1432 emit_modrm(0x0, dst);
1434 DCHECK(size == kInt32Size);
1435 emit(0xB8 + dst.low_bits());
1441 void Assembler::emit_mov(const Operand& dst, Immediate value, int size) {
1442 EnsureSpace ensure_space(this);
1443 emit_rex(dst, size);
1445 emit_operand(0x0, dst);
1450 void Assembler::movp(Register dst, void* value, RelocInfo::Mode rmode) {
1451 EnsureSpace ensure_space(this);
1452 emit_rex(dst, kPointerSize);
1453 emit(0xB8 | dst.low_bits());
1454 emitp(value, rmode);
1458 void Assembler::movq(Register dst, int64_t value) {
1459 EnsureSpace ensure_space(this);
1461 emit(0xB8 | dst.low_bits());
1466 void Assembler::movq(Register dst, uint64_t value) {
1467 movq(dst, static_cast<int64_t>(value));
1471 // Loads the ip-relative location of the src label into the target location
1472 // (as a 32-bit offset sign extended to 64-bit).
1473 void Assembler::movl(const Operand& dst, Label* src) {
1474 EnsureSpace ensure_space(this);
1475 emit_optional_rex_32(dst);
1477 emit_operand(0, dst);
1478 if (src->is_bound()) {
1479 int offset = src->pos() - pc_offset() - sizeof(int32_t);
1480 DCHECK(offset <= 0);
1482 } else if (src->is_linked()) {
1484 src->link_to(pc_offset() - sizeof(int32_t));
1486 DCHECK(src->is_unused());
1487 int32_t current = pc_offset();
1489 src->link_to(current);
1494 void Assembler::movsxbl(Register dst, Register src) {
1495 EnsureSpace ensure_space(this);
1496 if (!src.is_byte_register()) {
1497 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1498 emit_rex_32(dst, src);
1500 emit_optional_rex_32(dst, src);
1504 emit_modrm(dst, src);
1508 void Assembler::movsxbl(Register dst, const Operand& src) {
1509 EnsureSpace ensure_space(this);
1510 emit_optional_rex_32(dst, src);
1513 emit_operand(dst, src);
1517 void Assembler::movsxbq(Register dst, const Operand& src) {
1518 EnsureSpace ensure_space(this);
1519 emit_rex_64(dst, src);
1522 emit_operand(dst, src);
1526 void Assembler::movsxwl(Register dst, Register src) {
1527 EnsureSpace ensure_space(this);
1528 emit_optional_rex_32(dst, src);
1531 emit_modrm(dst, src);
1535 void Assembler::movsxwl(Register dst, const Operand& src) {
1536 EnsureSpace ensure_space(this);
1537 emit_optional_rex_32(dst, src);
1540 emit_operand(dst, src);
1544 void Assembler::movsxwq(Register dst, const Operand& src) {
1545 EnsureSpace ensure_space(this);
1546 emit_rex_64(dst, src);
1549 emit_operand(dst, src);
1553 void Assembler::movsxlq(Register dst, Register src) {
1554 EnsureSpace ensure_space(this);
1555 emit_rex_64(dst, src);
1557 emit_modrm(dst, src);
1561 void Assembler::movsxlq(Register dst, const Operand& src) {
1562 EnsureSpace ensure_space(this);
1563 emit_rex_64(dst, src);
1565 emit_operand(dst, src);
1569 void Assembler::emit_movzxb(Register dst, const Operand& src, int size) {
1570 EnsureSpace ensure_space(this);
1571 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1572 // there is no need to make this a 64 bit operation.
1573 emit_optional_rex_32(dst, src);
1576 emit_operand(dst, src);
1580 void Assembler::emit_movzxb(Register dst, Register src, int size) {
1581 EnsureSpace ensure_space(this);
1582 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1583 // there is no need to make this a 64 bit operation.
1584 if (!src.is_byte_register()) {
1585 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1586 emit_rex_32(dst, src);
1588 emit_optional_rex_32(dst, src);
1592 emit_modrm(dst, src);
1596 void Assembler::emit_movzxw(Register dst, const Operand& src, int size) {
1597 EnsureSpace ensure_space(this);
1598 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1599 // there is no need to make this a 64 bit operation.
1600 emit_optional_rex_32(dst, src);
1603 emit_operand(dst, src);
1607 void Assembler::emit_movzxw(Register dst, Register src, int size) {
1608 EnsureSpace ensure_space(this);
1609 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1610 // there is no need to make this a 64 bit operation.
1611 emit_optional_rex_32(dst, src);
1614 emit_modrm(dst, src);
1618 void Assembler::repmovsb() {
1619 EnsureSpace ensure_space(this);
1625 void Assembler::repmovsw() {
1626 EnsureSpace ensure_space(this);
1627 emit(0x66); // Operand size override.
1633 void Assembler::emit_repmovs(int size) {
1634 EnsureSpace ensure_space(this);
1641 void Assembler::mull(Register src) {
1642 EnsureSpace ensure_space(this);
1643 emit_optional_rex_32(src);
1645 emit_modrm(0x4, src);
1649 void Assembler::mull(const Operand& src) {
1650 EnsureSpace ensure_space(this);
1651 emit_optional_rex_32(src);
1653 emit_operand(0x4, src);
1657 void Assembler::mulq(Register src) {
1658 EnsureSpace ensure_space(this);
1661 emit_modrm(0x4, src);
1665 void Assembler::emit_neg(Register dst, int size) {
1666 EnsureSpace ensure_space(this);
1667 emit_rex(dst, size);
1669 emit_modrm(0x3, dst);
1673 void Assembler::emit_neg(const Operand& dst, int size) {
1674 EnsureSpace ensure_space(this);
1677 emit_operand(3, dst);
1681 void Assembler::nop() {
1682 EnsureSpace ensure_space(this);
1687 void Assembler::emit_not(Register dst, int size) {
1688 EnsureSpace ensure_space(this);
1689 emit_rex(dst, size);
1691 emit_modrm(0x2, dst);
1695 void Assembler::emit_not(const Operand& dst, int size) {
1696 EnsureSpace ensure_space(this);
1697 emit_rex(dst, size);
1699 emit_operand(2, dst);
1703 void Assembler::Nop(int n) {
1704 // The recommended muti-byte sequences of NOP instructions from the Intel 64
1705 // and IA-32 Architectures Software Developer's Manual.
1707 // Length Assembly Byte Sequence
1708 // 2 bytes 66 NOP 66 90H
1709 // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H
1710 // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H
1711 // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H
1712 // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H
1713 // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H
1714 // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1715 // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00
1718 EnsureSpace ensure_space(this);
1780 void Assembler::popq(Register dst) {
1781 EnsureSpace ensure_space(this);
1782 emit_optional_rex_32(dst);
1783 emit(0x58 | dst.low_bits());
1787 void Assembler::popq(const Operand& dst) {
1788 EnsureSpace ensure_space(this);
1789 emit_optional_rex_32(dst);
1791 emit_operand(0, dst);
1795 void Assembler::popfq() {
1796 EnsureSpace ensure_space(this);
1801 void Assembler::pushq(Register src) {
1802 EnsureSpace ensure_space(this);
1803 emit_optional_rex_32(src);
1804 emit(0x50 | src.low_bits());
1808 void Assembler::pushq(const Operand& src) {
1809 EnsureSpace ensure_space(this);
1810 emit_optional_rex_32(src);
1812 emit_operand(6, src);
1816 void Assembler::pushq(Immediate value) {
1817 EnsureSpace ensure_space(this);
1818 if (is_int8(value.value_)) {
1820 emit(value.value_); // Emit low byte of value.
1823 emitl(value.value_);
1828 void Assembler::pushq_imm32(int32_t imm32) {
1829 EnsureSpace ensure_space(this);
1835 void Assembler::pushfq() {
1836 EnsureSpace ensure_space(this);
1841 void Assembler::ret(int imm16) {
1842 EnsureSpace ensure_space(this);
1843 DCHECK(is_uint16(imm16));
1849 emit((imm16 >> 8) & 0xFF);
1854 void Assembler::ud2() {
1855 EnsureSpace ensure_space(this);
1861 void Assembler::setcc(Condition cc, Register reg) {
1862 if (cc > last_condition) {
1863 movb(reg, Immediate(cc == always ? 1 : 0));
1866 EnsureSpace ensure_space(this);
1867 DCHECK(is_uint4(cc));
1868 if (!reg.is_byte_register()) {
1869 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1874 emit_modrm(0x0, reg);
1878 void Assembler::shld(Register dst, Register src) {
1879 EnsureSpace ensure_space(this);
1880 emit_rex_64(src, dst);
1883 emit_modrm(src, dst);
1887 void Assembler::shrd(Register dst, Register src) {
1888 EnsureSpace ensure_space(this);
1889 emit_rex_64(src, dst);
1892 emit_modrm(src, dst);
1896 void Assembler::emit_xchg(Register dst, Register src, int size) {
1897 EnsureSpace ensure_space(this);
1898 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1899 Register other = src.is(rax) ? dst : src;
1900 emit_rex(other, size);
1901 emit(0x90 | other.low_bits());
1902 } else if (dst.low_bits() == 4) {
1903 emit_rex(dst, src, size);
1905 emit_modrm(dst, src);
1907 emit_rex(src, dst, size);
1909 emit_modrm(src, dst);
1914 void Assembler::emit_xchg(Register dst, const Operand& src, int size) {
1915 EnsureSpace ensure_space(this);
1916 emit_rex(dst, src, size);
1918 emit_operand(dst, src);
1922 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1923 EnsureSpace ensure_space(this);
1924 if (kPointerSize == kInt64Size) {
1925 emit(0x48); // REX.W
1929 DCHECK(kPointerSize == kInt32Size);
1932 // In 64-bit mode, need to zero extend the operand to 8 bytes.
1933 // See 2.2.1.4 in Intel64 and IA32 Architectures Software
1934 // Developer's Manual Volume 2.
1940 void Assembler::store_rax(ExternalReference ref) {
1941 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1945 void Assembler::testb(Register dst, Register src) {
1946 EnsureSpace ensure_space(this);
1947 if (src.low_bits() == 4) {
1948 emit_rex_32(src, dst);
1950 emit_modrm(src, dst);
1952 if (!dst.is_byte_register() || !src.is_byte_register()) {
1953 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1954 emit_rex_32(dst, src);
1957 emit_modrm(dst, src);
1962 void Assembler::testb(Register reg, Immediate mask) {
1963 DCHECK(is_int8(mask.value_) || is_uint8(mask.value_));
1964 EnsureSpace ensure_space(this);
1967 emit(mask.value_); // Low byte emitted.
1969 if (!reg.is_byte_register()) {
1970 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1974 emit_modrm(0x0, reg);
1975 emit(mask.value_); // Low byte emitted.
1980 void Assembler::testb(const Operand& op, Immediate mask) {
1981 DCHECK(is_int8(mask.value_) || is_uint8(mask.value_));
1982 EnsureSpace ensure_space(this);
1983 emit_optional_rex_32(rax, op);
1985 emit_operand(rax, op); // Operation code 0
1986 emit(mask.value_); // Low byte emitted.
1990 void Assembler::testb(const Operand& op, Register reg) {
1991 EnsureSpace ensure_space(this);
1992 if (!reg.is_byte_register()) {
1993 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1994 emit_rex_32(reg, op);
1996 emit_optional_rex_32(reg, op);
1999 emit_operand(reg, op);
2003 void Assembler::emit_test(Register dst, Register src, int size) {
2004 EnsureSpace ensure_space(this);
2005 if (src.low_bits() == 4) {
2006 emit_rex(src, dst, size);
2008 emit_modrm(src, dst);
2010 emit_rex(dst, src, size);
2012 emit_modrm(dst, src);
2017 void Assembler::emit_test(Register reg, Immediate mask, int size) {
2018 // testl with a mask that fits in the low byte is exactly testb.
2019 if (is_uint8(mask.value_)) {
2023 EnsureSpace ensure_space(this);
2025 emit_rex(rax, size);
2029 emit_rex(reg, size);
2031 emit_modrm(0x0, reg);
2037 void Assembler::emit_test(const Operand& op, Immediate mask, int size) {
2038 // testl with a mask that fits in the low byte is exactly testb.
2039 if (is_uint8(mask.value_)) {
2043 EnsureSpace ensure_space(this);
2044 emit_rex(rax, op, size);
2046 emit_operand(rax, op); // Operation code 0
2051 void Assembler::emit_test(const Operand& op, Register reg, int size) {
2052 EnsureSpace ensure_space(this);
2053 emit_rex(reg, op, size);
2055 emit_operand(reg, op);
2059 // FPU instructions.
2062 void Assembler::fld(int i) {
2063 EnsureSpace ensure_space(this);
2064 emit_farith(0xD9, 0xC0, i);
2068 void Assembler::fld1() {
2069 EnsureSpace ensure_space(this);
2075 void Assembler::fldz() {
2076 EnsureSpace ensure_space(this);
2082 void Assembler::fldpi() {
2083 EnsureSpace ensure_space(this);
2089 void Assembler::fldln2() {
2090 EnsureSpace ensure_space(this);
2096 void Assembler::fld_s(const Operand& adr) {
2097 EnsureSpace ensure_space(this);
2098 emit_optional_rex_32(adr);
2100 emit_operand(0, adr);
2104 void Assembler::fld_d(const Operand& adr) {
2105 EnsureSpace ensure_space(this);
2106 emit_optional_rex_32(adr);
2108 emit_operand(0, adr);
2112 void Assembler::fstp_s(const Operand& adr) {
2113 EnsureSpace ensure_space(this);
2114 emit_optional_rex_32(adr);
2116 emit_operand(3, adr);
2120 void Assembler::fstp_d(const Operand& adr) {
2121 EnsureSpace ensure_space(this);
2122 emit_optional_rex_32(adr);
2124 emit_operand(3, adr);
2128 void Assembler::fstp(int index) {
2129 DCHECK(is_uint3(index));
2130 EnsureSpace ensure_space(this);
2131 emit_farith(0xDD, 0xD8, index);
2135 void Assembler::fild_s(const Operand& adr) {
2136 EnsureSpace ensure_space(this);
2137 emit_optional_rex_32(adr);
2139 emit_operand(0, adr);
2143 void Assembler::fild_d(const Operand& adr) {
2144 EnsureSpace ensure_space(this);
2145 emit_optional_rex_32(adr);
2147 emit_operand(5, adr);
2151 void Assembler::fistp_s(const Operand& adr) {
2152 EnsureSpace ensure_space(this);
2153 emit_optional_rex_32(adr);
2155 emit_operand(3, adr);
2159 void Assembler::fisttp_s(const Operand& adr) {
2160 DCHECK(IsEnabled(SSE3));
2161 EnsureSpace ensure_space(this);
2162 emit_optional_rex_32(adr);
2164 emit_operand(1, adr);
2168 void Assembler::fisttp_d(const Operand& adr) {
2169 DCHECK(IsEnabled(SSE3));
2170 EnsureSpace ensure_space(this);
2171 emit_optional_rex_32(adr);
2173 emit_operand(1, adr);
2177 void Assembler::fist_s(const Operand& adr) {
2178 EnsureSpace ensure_space(this);
2179 emit_optional_rex_32(adr);
2181 emit_operand(2, adr);
2185 void Assembler::fistp_d(const Operand& adr) {
2186 EnsureSpace ensure_space(this);
2187 emit_optional_rex_32(adr);
2189 emit_operand(7, adr);
2193 void Assembler::fabs() {
2194 EnsureSpace ensure_space(this);
2200 void Assembler::fchs() {
2201 EnsureSpace ensure_space(this);
2207 void Assembler::fcos() {
2208 EnsureSpace ensure_space(this);
2214 void Assembler::fsin() {
2215 EnsureSpace ensure_space(this);
2221 void Assembler::fptan() {
2222 EnsureSpace ensure_space(this);
2228 void Assembler::fyl2x() {
2229 EnsureSpace ensure_space(this);
2235 void Assembler::f2xm1() {
2236 EnsureSpace ensure_space(this);
2242 void Assembler::fscale() {
2243 EnsureSpace ensure_space(this);
2249 void Assembler::fninit() {
2250 EnsureSpace ensure_space(this);
2256 void Assembler::fadd(int i) {
2257 EnsureSpace ensure_space(this);
2258 emit_farith(0xDC, 0xC0, i);
2262 void Assembler::fsub(int i) {
2263 EnsureSpace ensure_space(this);
2264 emit_farith(0xDC, 0xE8, i);
2268 void Assembler::fisub_s(const Operand& adr) {
2269 EnsureSpace ensure_space(this);
2270 emit_optional_rex_32(adr);
2272 emit_operand(4, adr);
2276 void Assembler::fmul(int i) {
2277 EnsureSpace ensure_space(this);
2278 emit_farith(0xDC, 0xC8, i);
2282 void Assembler::fdiv(int i) {
2283 EnsureSpace ensure_space(this);
2284 emit_farith(0xDC, 0xF8, i);
2288 void Assembler::faddp(int i) {
2289 EnsureSpace ensure_space(this);
2290 emit_farith(0xDE, 0xC0, i);
2294 void Assembler::fsubp(int i) {
2295 EnsureSpace ensure_space(this);
2296 emit_farith(0xDE, 0xE8, i);
2300 void Assembler::fsubrp(int i) {
2301 EnsureSpace ensure_space(this);
2302 emit_farith(0xDE, 0xE0, i);
2306 void Assembler::fmulp(int i) {
2307 EnsureSpace ensure_space(this);
2308 emit_farith(0xDE, 0xC8, i);
2312 void Assembler::fdivp(int i) {
2313 EnsureSpace ensure_space(this);
2314 emit_farith(0xDE, 0xF8, i);
2318 void Assembler::fprem() {
2319 EnsureSpace ensure_space(this);
2325 void Assembler::fprem1() {
2326 EnsureSpace ensure_space(this);
2332 void Assembler::fxch(int i) {
2333 EnsureSpace ensure_space(this);
2334 emit_farith(0xD9, 0xC8, i);
2338 void Assembler::fincstp() {
2339 EnsureSpace ensure_space(this);
2345 void Assembler::ffree(int i) {
2346 EnsureSpace ensure_space(this);
2347 emit_farith(0xDD, 0xC0, i);
2351 void Assembler::ftst() {
2352 EnsureSpace ensure_space(this);
2358 void Assembler::fucomp(int i) {
2359 EnsureSpace ensure_space(this);
2360 emit_farith(0xDD, 0xE8, i);
2364 void Assembler::fucompp() {
2365 EnsureSpace ensure_space(this);
2371 void Assembler::fucomi(int i) {
2372 EnsureSpace ensure_space(this);
2378 void Assembler::fucomip() {
2379 EnsureSpace ensure_space(this);
2385 void Assembler::fcompp() {
2386 EnsureSpace ensure_space(this);
2392 void Assembler::fnstsw_ax() {
2393 EnsureSpace ensure_space(this);
2399 void Assembler::fwait() {
2400 EnsureSpace ensure_space(this);
2405 void Assembler::frndint() {
2406 EnsureSpace ensure_space(this);
2412 void Assembler::fnclex() {
2413 EnsureSpace ensure_space(this);
2419 void Assembler::sahf() {
2420 // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2421 // in 64-bit mode. Test CpuID.
2422 DCHECK(IsEnabled(SAHF));
2423 EnsureSpace ensure_space(this);
2428 void Assembler::emit_farith(int b1, int b2, int i) {
2429 DCHECK(is_uint8(b1) && is_uint8(b2)); // wrong opcode
2430 DCHECK(is_uint3(i)); // illegal stack offset
2438 void Assembler::andps(XMMRegister dst, XMMRegister src) {
2439 EnsureSpace ensure_space(this);
2440 emit_optional_rex_32(dst, src);
2443 emit_sse_operand(dst, src);
2447 void Assembler::andps(XMMRegister dst, const Operand& src) {
2448 EnsureSpace ensure_space(this);
2449 emit_optional_rex_32(dst, src);
2452 emit_sse_operand(dst, src);
2456 void Assembler::orps(XMMRegister dst, XMMRegister src) {
2457 EnsureSpace ensure_space(this);
2458 emit_optional_rex_32(dst, src);
2461 emit_sse_operand(dst, src);
2465 void Assembler::orps(XMMRegister dst, const Operand& src) {
2466 EnsureSpace ensure_space(this);
2467 emit_optional_rex_32(dst, src);
2470 emit_sse_operand(dst, src);
2474 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2475 EnsureSpace ensure_space(this);
2476 emit_optional_rex_32(dst, src);
2479 emit_sse_operand(dst, src);
2483 void Assembler::xorps(XMMRegister dst, const Operand& src) {
2484 EnsureSpace ensure_space(this);
2485 emit_optional_rex_32(dst, src);
2488 emit_sse_operand(dst, src);
2492 void Assembler::addps(XMMRegister dst, XMMRegister src) {
2493 EnsureSpace ensure_space(this);
2494 emit_optional_rex_32(dst, src);
2497 emit_sse_operand(dst, src);
2501 void Assembler::addps(XMMRegister dst, const Operand& src) {
2502 EnsureSpace ensure_space(this);
2503 emit_optional_rex_32(dst, src);
2506 emit_sse_operand(dst, src);
2510 void Assembler::subps(XMMRegister dst, XMMRegister src) {
2511 EnsureSpace ensure_space(this);
2512 emit_optional_rex_32(dst, src);
2515 emit_sse_operand(dst, src);
2519 void Assembler::subps(XMMRegister dst, const Operand& src) {
2520 EnsureSpace ensure_space(this);
2521 emit_optional_rex_32(dst, src);
2524 emit_sse_operand(dst, src);
2528 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
2529 EnsureSpace ensure_space(this);
2530 emit_optional_rex_32(dst, src);
2533 emit_sse_operand(dst, src);
2537 void Assembler::mulps(XMMRegister dst, const Operand& src) {
2538 EnsureSpace ensure_space(this);
2539 emit_optional_rex_32(dst, src);
2542 emit_sse_operand(dst, src);
2546 void Assembler::divps(XMMRegister dst, XMMRegister src) {
2547 EnsureSpace ensure_space(this);
2548 emit_optional_rex_32(dst, src);
2551 emit_sse_operand(dst, src);
2555 void Assembler::divps(XMMRegister dst, const Operand& src) {
2556 EnsureSpace ensure_space(this);
2557 emit_optional_rex_32(dst, src);
2560 emit_sse_operand(dst, src);
2564 // SSE 2 operations.
2566 void Assembler::movd(XMMRegister dst, Register src) {
2567 EnsureSpace ensure_space(this);
2569 emit_optional_rex_32(dst, src);
2572 emit_sse_operand(dst, src);
2576 void Assembler::movd(Register dst, XMMRegister src) {
2577 EnsureSpace ensure_space(this);
2579 emit_optional_rex_32(src, dst);
2582 emit_sse_operand(src, dst);
2586 void Assembler::movq(XMMRegister dst, Register src) {
2587 EnsureSpace ensure_space(this);
2589 emit_rex_64(dst, src);
2592 emit_sse_operand(dst, src);
2596 void Assembler::movq(Register dst, XMMRegister src) {
2597 EnsureSpace ensure_space(this);
2599 emit_rex_64(src, dst);
2602 emit_sse_operand(src, dst);
2606 void Assembler::movq(XMMRegister dst, XMMRegister src) {
2607 EnsureSpace ensure_space(this);
2608 if (dst.low_bits() == 4) {
2609 // Avoid unnecessary SIB byte.
2611 emit_optional_rex_32(dst, src);
2614 emit_sse_operand(dst, src);
2617 emit_optional_rex_32(src, dst);
2620 emit_sse_operand(src, dst);
2625 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2626 EnsureSpace ensure_space(this);
2628 emit_rex_64(src, dst);
2631 emit_sse_operand(src, dst);
2635 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2636 EnsureSpace ensure_space(this);
2638 emit_rex_64(dst, src);
2641 emit_sse_operand(dst, src);
2645 void Assembler::movdqu(const Operand& dst, XMMRegister src) {
2646 EnsureSpace ensure_space(this);
2648 emit_rex_64(src, dst);
2651 emit_sse_operand(src, dst);
2655 void Assembler::movdqu(XMMRegister dst, const Operand& src) {
2656 EnsureSpace ensure_space(this);
2658 emit_rex_64(dst, src);
2661 emit_sse_operand(dst, src);
2665 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2666 DCHECK(IsEnabled(SSE4_1));
2667 DCHECK(is_uint8(imm8));
2668 EnsureSpace ensure_space(this);
2670 emit_optional_rex_32(src, dst);
2674 emit_sse_operand(src, dst);
2679 void Assembler::movsd(const Operand& dst, XMMRegister src) {
2680 EnsureSpace ensure_space(this);
2681 emit(0xF2); // double
2682 emit_optional_rex_32(src, dst);
2684 emit(0x11); // store
2685 emit_sse_operand(src, dst);
2689 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2690 EnsureSpace ensure_space(this);
2691 emit(0xF2); // double
2692 emit_optional_rex_32(dst, src);
2695 emit_sse_operand(dst, src);
2699 void Assembler::movsd(XMMRegister dst, const Operand& src) {
2700 EnsureSpace ensure_space(this);
2701 emit(0xF2); // double
2702 emit_optional_rex_32(dst, src);
2705 emit_sse_operand(dst, src);
2709 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2710 EnsureSpace ensure_space(this);
2711 if (src.low_bits() == 4) {
2712 // Try to avoid an unnecessary SIB byte.
2713 emit_optional_rex_32(src, dst);
2716 emit_sse_operand(src, dst);
2718 emit_optional_rex_32(dst, src);
2721 emit_sse_operand(dst, src);
2726 void Assembler::shufps(XMMRegister dst, XMMRegister src, byte imm8) {
2727 DCHECK(is_uint8(imm8));
2728 EnsureSpace ensure_space(this);
2729 emit_optional_rex_32(src, dst);
2732 emit_sse_operand(dst, src);
2737 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2738 EnsureSpace ensure_space(this);
2739 if (src.low_bits() == 4) {
2740 // Try to avoid an unnecessary SIB byte.
2742 emit_optional_rex_32(src, dst);
2745 emit_sse_operand(src, dst);
2748 emit_optional_rex_32(dst, src);
2751 emit_sse_operand(dst, src);
2756 void Assembler::addss(XMMRegister dst, XMMRegister src) {
2757 EnsureSpace ensure_space(this);
2759 emit_optional_rex_32(dst, src);
2762 emit_sse_operand(dst, src);
2766 void Assembler::addss(XMMRegister dst, const Operand& src) {
2767 EnsureSpace ensure_space(this);
2769 emit_optional_rex_32(dst, src);
2772 emit_sse_operand(dst, src);
2776 void Assembler::subss(XMMRegister dst, XMMRegister src) {
2777 EnsureSpace ensure_space(this);
2779 emit_optional_rex_32(dst, src);
2782 emit_sse_operand(dst, src);
2786 void Assembler::subss(XMMRegister dst, const Operand& src) {
2787 EnsureSpace ensure_space(this);
2789 emit_optional_rex_32(dst, src);
2792 emit_sse_operand(dst, src);
2796 void Assembler::mulss(XMMRegister dst, XMMRegister src) {
2797 EnsureSpace ensure_space(this);
2799 emit_optional_rex_32(dst, src);
2802 emit_sse_operand(dst, src);
2806 void Assembler::mulss(XMMRegister dst, const Operand& src) {
2807 EnsureSpace ensure_space(this);
2809 emit_optional_rex_32(dst, src);
2812 emit_sse_operand(dst, src);
2816 void Assembler::divss(XMMRegister dst, XMMRegister src) {
2817 EnsureSpace ensure_space(this);
2819 emit_optional_rex_32(dst, src);
2822 emit_sse_operand(dst, src);
2826 void Assembler::divss(XMMRegister dst, const Operand& src) {
2827 EnsureSpace ensure_space(this);
2829 emit_optional_rex_32(dst, src);
2832 emit_sse_operand(dst, src);
2836 void Assembler::ucomiss(XMMRegister dst, XMMRegister src) {
2837 EnsureSpace ensure_space(this);
2838 emit_optional_rex_32(dst, src);
2841 emit_sse_operand(dst, src);
2845 void Assembler::ucomiss(XMMRegister dst, const Operand& src) {
2846 EnsureSpace ensure_space(this);
2847 emit_optional_rex_32(dst, src);
2850 emit_sse_operand(dst, src);
2854 void Assembler::movss(XMMRegister dst, const Operand& src) {
2855 EnsureSpace ensure_space(this);
2856 emit(0xF3); // single
2857 emit_optional_rex_32(dst, src);
2860 emit_sse_operand(dst, src);
2864 void Assembler::movss(const Operand& src, XMMRegister dst) {
2865 EnsureSpace ensure_space(this);
2866 emit(0xF3); // single
2867 emit_optional_rex_32(dst, src);
2869 emit(0x11); // store
2870 emit_sse_operand(dst, src);
2874 void Assembler::psllq(XMMRegister reg, byte imm8) {
2875 EnsureSpace ensure_space(this);
2877 emit_optional_rex_32(reg);
2880 emit_sse_operand(rsi, reg); // rsi == 6
2885 void Assembler::psrlq(XMMRegister reg, byte imm8) {
2886 EnsureSpace ensure_space(this);
2888 emit_optional_rex_32(reg);
2891 emit_sse_operand(rdx, reg); // rdx == 2
2896 void Assembler::pslld(XMMRegister reg, byte imm8) {
2897 EnsureSpace ensure_space(this);
2899 emit_optional_rex_32(reg);
2902 emit_sse_operand(rsi, reg); // rsi == 6
2907 void Assembler::psrld(XMMRegister reg, byte imm8) {
2908 EnsureSpace ensure_space(this);
2910 emit_optional_rex_32(reg);
2913 emit_sse_operand(rdx, reg); // rdx == 2
2918 void Assembler::cvttss2si(Register dst, const Operand& src) {
2919 EnsureSpace ensure_space(this);
2921 emit_optional_rex_32(dst, src);
2924 emit_operand(dst, src);
2928 void Assembler::cvttss2si(Register dst, XMMRegister src) {
2929 EnsureSpace ensure_space(this);
2931 emit_optional_rex_32(dst, src);
2934 emit_sse_operand(dst, src);
2938 void Assembler::cvttsd2si(Register dst, const Operand& src) {
2939 EnsureSpace ensure_space(this);
2941 emit_optional_rex_32(dst, src);
2944 emit_operand(dst, src);
2948 void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2949 EnsureSpace ensure_space(this);
2951 emit_optional_rex_32(dst, src);
2954 emit_sse_operand(dst, src);
2958 void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2959 EnsureSpace ensure_space(this);
2961 emit_rex_64(dst, src);
2964 emit_sse_operand(dst, src);
2968 void Assembler::cvttsd2siq(Register dst, const Operand& src) {
2969 EnsureSpace ensure_space(this);
2971 emit_rex_64(dst, src);
2974 emit_sse_operand(dst, src);
2978 void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2979 EnsureSpace ensure_space(this);
2981 emit_optional_rex_32(dst, src);
2984 emit_sse_operand(dst, src);
2988 void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2989 EnsureSpace ensure_space(this);
2991 emit_optional_rex_32(dst, src);
2994 emit_sse_operand(dst, src);
2998 void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2999 EnsureSpace ensure_space(this);
3001 emit_optional_rex_32(dst, src);
3004 emit_sse_operand(dst, src);
3008 void Assembler::cvtqsi2sd(XMMRegister dst, const Operand& src) {
3009 EnsureSpace ensure_space(this);
3011 emit_rex_64(dst, src);
3014 emit_sse_operand(dst, src);
3018 void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
3019 EnsureSpace ensure_space(this);
3021 emit_rex_64(dst, src);
3024 emit_sse_operand(dst, src);
3028 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
3029 EnsureSpace ensure_space(this);
3031 emit_optional_rex_32(dst, src);
3034 emit_sse_operand(dst, src);
3038 void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
3039 EnsureSpace ensure_space(this);
3041 emit_optional_rex_32(dst, src);
3044 emit_sse_operand(dst, src);
3048 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
3049 EnsureSpace ensure_space(this);
3051 emit_optional_rex_32(dst, src);
3054 emit_sse_operand(dst, src);
3058 void Assembler::cvtsd2ss(XMMRegister dst, const Operand& src) {
3059 EnsureSpace ensure_space(this);
3061 emit_optional_rex_32(dst, src);
3064 emit_sse_operand(dst, src);
3068 void Assembler::cvtsd2si(Register dst, XMMRegister src) {
3069 EnsureSpace ensure_space(this);
3071 emit_optional_rex_32(dst, src);
3074 emit_sse_operand(dst, src);
3078 void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
3079 EnsureSpace ensure_space(this);
3081 emit_rex_64(dst, src);
3084 emit_sse_operand(dst, src);
3088 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
3089 EnsureSpace ensure_space(this);
3091 emit_optional_rex_32(dst, src);
3094 emit_sse_operand(dst, src);
3098 void Assembler::addsd(XMMRegister dst, const Operand& src) {
3099 EnsureSpace ensure_space(this);
3101 emit_optional_rex_32(dst, src);
3104 emit_sse_operand(dst, src);
3108 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
3109 EnsureSpace ensure_space(this);
3111 emit_optional_rex_32(dst, src);
3114 emit_sse_operand(dst, src);
3118 void Assembler::mulsd(XMMRegister dst, const Operand& src) {
3119 EnsureSpace ensure_space(this);
3121 emit_optional_rex_32(dst, src);
3124 emit_sse_operand(dst, src);
3128 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
3129 EnsureSpace ensure_space(this);
3131 emit_optional_rex_32(dst, src);
3134 emit_sse_operand(dst, src);
3138 void Assembler::subsd(XMMRegister dst, const Operand& src) {
3139 EnsureSpace ensure_space(this);
3141 emit_optional_rex_32(dst, src);
3144 emit_sse_operand(dst, src);
3148 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
3149 EnsureSpace ensure_space(this);
3151 emit_optional_rex_32(dst, src);
3154 emit_sse_operand(dst, src);
3158 void Assembler::divsd(XMMRegister dst, const Operand& src) {
3159 EnsureSpace ensure_space(this);
3161 emit_optional_rex_32(dst, src);
3164 emit_sse_operand(dst, src);
3168 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
3169 EnsureSpace ensure_space(this);
3171 emit_optional_rex_32(dst, src);
3174 emit_sse_operand(dst, src);
3178 void Assembler::orpd(XMMRegister dst, XMMRegister src) {
3179 EnsureSpace ensure_space(this);
3181 emit_optional_rex_32(dst, src);
3184 emit_sse_operand(dst, src);
3188 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
3189 EnsureSpace ensure_space(this);
3191 emit_optional_rex_32(dst, src);
3194 emit_sse_operand(dst, src);
3198 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
3199 EnsureSpace ensure_space(this);
3201 emit_optional_rex_32(dst, src);
3204 emit_sse_operand(dst, src);
3208 void Assembler::sqrtsd(XMMRegister dst, const Operand& src) {
3209 EnsureSpace ensure_space(this);
3211 emit_optional_rex_32(dst, src);
3214 emit_sse_operand(dst, src);
3218 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
3219 EnsureSpace ensure_space(this);
3221 emit_optional_rex_32(dst, src);
3224 emit_sse_operand(dst, src);
3228 void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
3229 EnsureSpace ensure_space(this);
3231 emit_optional_rex_32(dst, src);
3234 emit_sse_operand(dst, src);
3238 void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
3239 EnsureSpace ensure_space(this);
3241 emit_optional_rex_32(dst, src);
3244 emit_sse_operand(dst, src);
3245 emit(0x01); // LT == 1
3249 void Assembler::roundsd(XMMRegister dst, XMMRegister src,
3250 Assembler::RoundingMode mode) {
3251 DCHECK(IsEnabled(SSE4_1));
3252 EnsureSpace ensure_space(this);
3254 emit_optional_rex_32(dst, src);
3258 emit_sse_operand(dst, src);
3259 // Mask precision exeption.
3260 emit(static_cast<byte>(mode) | 0x8);
3264 void Assembler::movmskpd(Register dst, XMMRegister src) {
3265 EnsureSpace ensure_space(this);
3267 emit_optional_rex_32(dst, src);
3270 emit_sse_operand(dst, src);
3274 void Assembler::movmskps(Register dst, XMMRegister src) {
3275 EnsureSpace ensure_space(this);
3276 emit_optional_rex_32(dst, src);
3279 emit_sse_operand(dst, src);
3283 void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
3284 EnsureSpace ensure_space(this);
3286 emit_optional_rex_32(dst, src);
3289 emit_sse_operand(dst, src);
3294 void Assembler::vfmasd(byte op, XMMRegister dst, XMMRegister src1,
3296 DCHECK(IsEnabled(FMA3));
3297 EnsureSpace ensure_space(this);
3298 emit_vex_prefix(dst, src1, src2, kLIG, k66, k0F38, kW1);
3300 emit_sse_operand(dst, src2);
3304 void Assembler::vfmasd(byte op, XMMRegister dst, XMMRegister src1,
3305 const Operand& src2) {
3306 DCHECK(IsEnabled(FMA3));
3307 EnsureSpace ensure_space(this);
3308 emit_vex_prefix(dst, src1, src2, kLIG, k66, k0F38, kW1);
3310 emit_sse_operand(dst, src2);
3314 void Assembler::vfmass(byte op, XMMRegister dst, XMMRegister src1,
3316 DCHECK(IsEnabled(FMA3));
3317 EnsureSpace ensure_space(this);
3318 emit_vex_prefix(dst, src1, src2, kLIG, k66, k0F38, kW0);
3320 emit_sse_operand(dst, src2);
3324 void Assembler::vfmass(byte op, XMMRegister dst, XMMRegister src1,
3325 const Operand& src2) {
3326 DCHECK(IsEnabled(FMA3));
3327 EnsureSpace ensure_space(this);
3328 emit_vex_prefix(dst, src1, src2, kLIG, k66, k0F38, kW0);
3330 emit_sse_operand(dst, src2);
3334 void Assembler::vsd(byte op, XMMRegister dst, XMMRegister src1,
3336 DCHECK(IsEnabled(AVX));
3337 EnsureSpace ensure_space(this);
3338 emit_vex_prefix(dst, src1, src2, kLIG, kF2, k0F, kWIG);
3340 emit_sse_operand(dst, src2);
3344 void Assembler::vsd(byte op, XMMRegister dst, XMMRegister src1,
3345 const Operand& src2) {
3346 DCHECK(IsEnabled(AVX));
3347 EnsureSpace ensure_space(this);
3348 emit_vex_prefix(dst, src1, src2, kLIG, kF2, k0F, kWIG);
3350 emit_sse_operand(dst, src2);
3354 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
3355 Register ireg = { reg.code() };
3356 emit_operand(ireg, adr);
3360 void Assembler::emit_sse_operand(Register reg, const Operand& adr) {
3361 Register ireg = {reg.code()};
3362 emit_operand(ireg, adr);
3366 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
3367 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3371 void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
3372 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3376 void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
3377 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3381 void Assembler::db(uint8_t data) {
3382 EnsureSpace ensure_space(this);
3387 void Assembler::dd(uint32_t data) {
3388 EnsureSpace ensure_space(this);
3393 void Assembler::dq(Label* label) {
3394 EnsureSpace ensure_space(this);
3395 if (label->is_bound()) {
3396 internal_reference_positions_.push_back(pc_offset());
3397 emitp(buffer_ + label->pos(), RelocInfo::INTERNAL_REFERENCE);
3399 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
3400 emitl(0); // Zero for the first 32bit marks it as 64bit absolute address.
3401 if (label->is_linked()) {
3402 emitl(label->pos());
3403 label->link_to(pc_offset() - sizeof(int32_t));
3405 DCHECK(label->is_unused());
3406 int32_t current = pc_offset();
3408 label->link_to(current);
3414 // Relocation information implementations.
3416 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3417 DCHECK(!RelocInfo::IsNone(rmode));
3418 // Don't record external references unless the heap will be serialized.
3419 if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
3420 !serializer_enabled() && !emit_debug_code()) {
3422 } else if (rmode == RelocInfo::CODE_AGE_SEQUENCE) {
3423 // Don't record psuedo relocation info for code age sequence mode.
3426 RelocInfo rinfo(pc_, rmode, data, NULL);
3427 reloc_info_writer.Write(&rinfo);
3431 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
3432 // No out-of-line constant pool support.
3433 DCHECK(!FLAG_enable_ool_constant_pool);
3434 return isolate->factory()->empty_constant_pool_array();
3438 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
3439 // No out-of-line constant pool support.
3440 DCHECK(!FLAG_enable_ool_constant_pool);
3445 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
3446 1 << RelocInfo::RUNTIME_ENTRY |
3447 1 << RelocInfo::INTERNAL_REFERENCE |
3448 1 << RelocInfo::CODE_AGE_SEQUENCE;
3451 bool RelocInfo::IsCodedSpecially() {
3452 // The deserializer needs to know whether a pointer is specially coded. Being
3453 // specially coded on x64 means that it is a relative 32 bit address, as used
3454 // by branch instructions.
3455 return (1 << rmode_) & kApplyMask;
3459 bool RelocInfo::IsInConstantPool() {
3464 } } // namespace v8::internal
3466 #endif // V8_TARGET_ARCH_X64