1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/macro-assembler.h"
10 #include "src/serialize.h"
15 // -----------------------------------------------------------------------------
16 // Implementation of CpuFeatures
18 void CpuFeatures::ProbeImpl(bool cross_compile) {
20 CHECK(cpu.has_sse2()); // SSE2 support is mandatory.
21 CHECK(cpu.has_cmov()); // CMOV support is mandatory.
23 // Only use statically determined features for cross compile (snapshot).
24 if (cross_compile) return;
26 if (cpu.has_sse41() && FLAG_enable_sse4_1) supported_ |= 1u << SSE4_1;
27 if (cpu.has_sse3() && FLAG_enable_sse3) supported_ |= 1u << SSE3;
28 // SAHF is not generally available in long mode.
29 if (cpu.has_sahf() && FLAG_enable_sahf) supported_|= 1u << SAHF;
33 void CpuFeatures::PrintTarget() { }
34 void CpuFeatures::PrintFeatures() { }
37 // -----------------------------------------------------------------------------
38 // Implementation of RelocInfo
40 // Patch the code at the current PC with a call to the target address.
41 // Additional guard int3 instructions can be added if required.
42 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
43 int code_size = Assembler::kCallSequenceLength + guard_bytes;
45 // Create a code patcher.
46 CodePatcher patcher(pc_, code_size);
48 // Add a label for checking the size of the code used for returning.
51 patcher.masm()->bind(&check_codesize);
55 patcher.masm()->movp(kScratchRegister, reinterpret_cast<void*>(target),
56 Assembler::RelocInfoNone());
57 patcher.masm()->call(kScratchRegister);
59 // Check that the size of the code generated is as expected.
60 DCHECK_EQ(Assembler::kCallSequenceLength,
61 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
63 // Add the requested number of int3 instructions after the call.
64 for (int i = 0; i < guard_bytes; i++) {
65 patcher.masm()->int3();
70 void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
71 // Patch the code at the current address with the supplied instructions.
72 for (int i = 0; i < instruction_count; i++) {
73 *(pc_ + i) = *(instructions + i);
76 // Indicate that code has changed.
77 CpuFeatures::FlushICache(pc_, instruction_count);
81 // -----------------------------------------------------------------------------
82 // Register constants.
85 Register::kRegisterCodeByAllocationIndex[kMaxNumAllocatableRegisters] = {
86 // rax, rbx, rdx, rcx, rsi, rdi, r8, r9, r11, r14, r15
87 0, 3, 2, 1, 6, 7, 8, 9, 11, 14, 15
90 const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
91 0, 3, 2, 1, -1, -1, 4, 5, 6, 7, -1, 8, -1, -1, 9, 10
95 // -----------------------------------------------------------------------------
96 // Implementation of Operand
98 Operand::Operand(Register base, int32_t disp) : rex_(0) {
100 if (base.is(rsp) || base.is(r12)) {
101 // SIB byte is needed to encode (rsp + offset) or (r12 + offset).
102 set_sib(times_1, rsp, base);
105 if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
107 } else if (is_int8(disp)) {
117 Operand::Operand(Register base,
120 int32_t disp) : rex_(0) {
121 DCHECK(!index.is(rsp));
123 set_sib(scale, index, base);
124 if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
125 // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
126 // possibly set by set_sib.
128 } else if (is_int8(disp)) {
138 Operand::Operand(Register index,
140 int32_t disp) : rex_(0) {
141 DCHECK(!index.is(rsp));
144 set_sib(scale, index, rbp);
149 Operand::Operand(const Operand& operand, int32_t offset) {
150 DCHECK(operand.len_ >= 1);
151 // Operand encodes REX ModR/M [SIB] [Disp].
152 byte modrm = operand.buf_[0];
153 DCHECK(modrm < 0xC0); // Disallow mode 3 (register target).
154 bool has_sib = ((modrm & 0x07) == 0x04);
155 byte mode = modrm & 0xC0;
156 int disp_offset = has_sib ? 2 : 1;
157 int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
158 // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
160 bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base.
161 int32_t disp_value = 0;
162 if (mode == 0x80 || is_baseless) {
163 // Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
164 disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]);
165 } else if (mode == 0x40) {
166 // Mode 1: Byte displacement.
167 disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
170 // Write new operand with same registers, but with modified displacement.
171 DCHECK(offset >= 0 ? disp_value + offset > disp_value
172 : disp_value + offset < disp_value); // No overflow.
173 disp_value += offset;
175 if (!is_int8(disp_value) || is_baseless) {
176 // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
177 buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
178 len_ = disp_offset + 4;
179 Memory::int32_at(&buf_[disp_offset]) = disp_value;
180 } else if (disp_value != 0 || (base_reg == 0x05)) {
181 // Need 8 bits of displacement.
182 buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1.
183 len_ = disp_offset + 1;
184 buf_[disp_offset] = static_cast<byte>(disp_value);
186 // Need no displacement.
187 buf_[0] = (modrm & 0x3f); // Mode 0.
191 buf_[1] = operand.buf_[1];
196 bool Operand::AddressUsesRegister(Register reg) const {
197 int code = reg.code();
198 DCHECK((buf_[0] & 0xC0) != 0xC0); // Always a memory operand.
199 // Start with only low three bits of base register. Initial decoding doesn't
200 // distinguish on the REX.B bit.
201 int base_code = buf_[0] & 0x07;
202 if (base_code == rsp.code()) {
203 // SIB byte present in buf_[1].
204 // Check the index register from the SIB byte + REX.X prefix.
205 int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
206 // Index code (including REX.X) of 0x04 (rsp) means no index register.
207 if (index_code != rsp.code() && index_code == code) return true;
208 // Add REX.B to get the full base register code.
209 base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
210 // A base register of 0x05 (rbp) with mod = 0 means no base register.
211 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
212 return code == base_code;
214 // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
216 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
217 base_code |= ((rex_ & 0x01) << 3);
218 return code == base_code;
223 // -----------------------------------------------------------------------------
224 // Implementation of Assembler.
226 #ifdef GENERATED_CODE_COVERAGE
227 static void InitCoverageLog();
230 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
231 : AssemblerBase(isolate, buffer, buffer_size),
233 positions_recorder_(this) {
234 // Clear the buffer in debug mode unless it was provided by the
235 // caller in which case we can't be sure it's okay to overwrite
236 // existing code in it.
239 memset(buffer_, 0xCC, buffer_size_); // int3
243 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
246 #ifdef GENERATED_CODE_COVERAGE
252 void Assembler::GetCode(CodeDesc* desc) {
253 // Finalize code (at this point overflow() may be true, but the gap ensures
254 // that we are still not overlapping instructions and relocation info).
255 DCHECK(pc_ <= reloc_info_writer.pos()); // No overlap.
256 // Set up code descriptor.
257 desc->buffer = buffer_;
258 desc->buffer_size = buffer_size_;
259 desc->instr_size = pc_offset();
260 DCHECK(desc->instr_size > 0); // Zero-size code objects upset the system.
262 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
267 void Assembler::Align(int m) {
268 DCHECK(IsPowerOf2(m));
269 int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
274 void Assembler::CodeTargetAlign() {
275 Align(16); // Preferred alignment of jump targets on x64.
279 bool Assembler::IsNop(Address addr) {
281 while (*a == 0x66) a++;
282 if (*a == 0x90) return true;
283 if (a[0] == 0xf && a[1] == 0x1f) return true;
288 void Assembler::bind_to(Label* L, int pos) {
289 DCHECK(!L->is_bound()); // Label may only be bound once.
290 DCHECK(0 <= pos && pos <= pc_offset()); // Position must be valid.
291 if (L->is_linked()) {
292 int current = L->pos();
293 int next = long_at(current);
294 while (next != current) {
295 // Relative address, relative to point after address.
296 int imm32 = pos - (current + sizeof(int32_t));
297 long_at_put(current, imm32);
299 next = long_at(next);
301 // Fix up last fixup on linked list.
302 int last_imm32 = pos - (current + sizeof(int32_t));
303 long_at_put(current, last_imm32);
305 while (L->is_near_linked()) {
306 int fixup_pos = L->near_link_pos();
308 static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
309 DCHECK(offset_to_next <= 0);
310 int disp = pos - (fixup_pos + sizeof(int8_t));
311 CHECK(is_int8(disp));
312 set_byte_at(fixup_pos, disp);
313 if (offset_to_next < 0) {
314 L->link_to(fixup_pos + offset_to_next, Label::kNear);
323 void Assembler::bind(Label* L) {
324 bind_to(L, pc_offset());
328 void Assembler::GrowBuffer() {
329 DCHECK(buffer_overflow());
330 if (!own_buffer_) FATAL("external code buffer is too small");
332 // Compute new buffer size.
333 CodeDesc desc; // the new buffer
334 desc.buffer_size = 2 * buffer_size_;
336 // Some internal data structures overflow for very large buffers,
337 // they must ensure that kMaximalBufferSize is not too large.
338 if ((desc.buffer_size > kMaximalBufferSize) ||
339 (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) {
340 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
343 // Set up new buffer.
344 desc.buffer = NewArray<byte>(desc.buffer_size);
345 desc.instr_size = pc_offset();
347 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
349 // Clear the buffer in debug mode. Use 'int3' instructions to make
350 // sure to get into problems if we ever run uninitialized code.
352 memset(desc.buffer, 0xCC, desc.buffer_size);
356 intptr_t pc_delta = desc.buffer - buffer_;
357 intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
358 (buffer_ + buffer_size_);
359 MemMove(desc.buffer, buffer_, desc.instr_size);
360 MemMove(rc_delta + reloc_info_writer.pos(), reloc_info_writer.pos(),
364 DeleteArray(buffer_);
365 buffer_ = desc.buffer;
366 buffer_size_ = desc.buffer_size;
368 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
369 reloc_info_writer.last_pc() + pc_delta);
371 // Relocate runtime entries.
372 for (RelocIterator it(desc); !it.done(); it.next()) {
373 RelocInfo::Mode rmode = it.rinfo()->rmode();
374 if (rmode == RelocInfo::INTERNAL_REFERENCE) {
375 intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc());
376 if (*p != 0) { // 0 means uninitialized.
382 DCHECK(!buffer_overflow());
386 void Assembler::emit_operand(int code, const Operand& adr) {
387 DCHECK(is_uint3(code));
388 const unsigned length = adr.len_;
391 // Emit updated ModR/M byte containing the given register.
392 DCHECK((adr.buf_[0] & 0x38) == 0);
393 pc_[0] = adr.buf_[0] | code << 3;
395 // Emit the rest of the encoded operand.
396 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
401 // Assembler Instruction implementations.
403 void Assembler::arithmetic_op(byte opcode,
407 EnsureSpace ensure_space(this);
408 emit_rex(reg, op, size);
410 emit_operand(reg, op);
414 void Assembler::arithmetic_op(byte opcode,
418 EnsureSpace ensure_space(this);
419 DCHECK((opcode & 0xC6) == 2);
420 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
421 // Swap reg and rm_reg and change opcode operand order.
422 emit_rex(rm_reg, reg, size);
424 emit_modrm(rm_reg, reg);
426 emit_rex(reg, rm_reg, size);
428 emit_modrm(reg, rm_reg);
433 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
434 EnsureSpace ensure_space(this);
435 DCHECK((opcode & 0xC6) == 2);
436 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
437 // Swap reg and rm_reg and change opcode operand order.
439 emit_optional_rex_32(rm_reg, reg);
441 emit_modrm(rm_reg, reg);
444 emit_optional_rex_32(reg, rm_reg);
446 emit_modrm(reg, rm_reg);
451 void Assembler::arithmetic_op_16(byte opcode,
453 const Operand& rm_reg) {
454 EnsureSpace ensure_space(this);
456 emit_optional_rex_32(reg, rm_reg);
458 emit_operand(reg, rm_reg);
462 void Assembler::arithmetic_op_8(byte opcode, Register reg, const Operand& op) {
463 EnsureSpace ensure_space(this);
464 if (!reg.is_byte_register()) {
465 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
469 emit_operand(reg, op);
473 void Assembler::arithmetic_op_8(byte opcode, Register reg, Register rm_reg) {
474 EnsureSpace ensure_space(this);
475 DCHECK((opcode & 0xC6) == 2);
476 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
477 // Swap reg and rm_reg and change opcode operand order.
478 if (!rm_reg.is_byte_register() || !reg.is_byte_register()) {
479 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
480 emit_rex_32(rm_reg, reg);
483 emit_modrm(rm_reg, reg);
485 if (!reg.is_byte_register() || !rm_reg.is_byte_register()) {
486 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
487 emit_rex_32(reg, rm_reg);
490 emit_modrm(reg, rm_reg);
495 void Assembler::immediate_arithmetic_op(byte subcode,
499 EnsureSpace ensure_space(this);
501 if (is_int8(src.value_)) {
503 emit_modrm(subcode, dst);
505 } else if (dst.is(rax)) {
506 emit(0x05 | (subcode << 3));
510 emit_modrm(subcode, dst);
515 void Assembler::immediate_arithmetic_op(byte subcode,
519 EnsureSpace ensure_space(this);
521 if (is_int8(src.value_)) {
523 emit_operand(subcode, dst);
527 emit_operand(subcode, dst);
533 void Assembler::immediate_arithmetic_op_16(byte subcode,
536 EnsureSpace ensure_space(this);
537 emit(0x66); // Operand size override prefix.
538 emit_optional_rex_32(dst);
539 if (is_int8(src.value_)) {
541 emit_modrm(subcode, dst);
543 } else if (dst.is(rax)) {
544 emit(0x05 | (subcode << 3));
548 emit_modrm(subcode, dst);
554 void Assembler::immediate_arithmetic_op_16(byte subcode,
557 EnsureSpace ensure_space(this);
558 emit(0x66); // Operand size override prefix.
559 emit_optional_rex_32(dst);
560 if (is_int8(src.value_)) {
562 emit_operand(subcode, dst);
566 emit_operand(subcode, dst);
572 void Assembler::immediate_arithmetic_op_8(byte subcode,
575 EnsureSpace ensure_space(this);
576 emit_optional_rex_32(dst);
577 DCHECK(is_int8(src.value_) || is_uint8(src.value_));
579 emit_operand(subcode, dst);
584 void Assembler::immediate_arithmetic_op_8(byte subcode,
587 EnsureSpace ensure_space(this);
588 if (!dst.is_byte_register()) {
589 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
592 DCHECK(is_int8(src.value_) || is_uint8(src.value_));
594 emit_modrm(subcode, dst);
599 void Assembler::shift(Register dst,
600 Immediate shift_amount,
603 EnsureSpace ensure_space(this);
604 DCHECK(size == kInt64Size ? is_uint6(shift_amount.value_)
605 : is_uint5(shift_amount.value_));
606 if (shift_amount.value_ == 1) {
609 emit_modrm(subcode, dst);
613 emit_modrm(subcode, dst);
614 emit(shift_amount.value_);
619 void Assembler::shift(Register dst, int subcode, int size) {
620 EnsureSpace ensure_space(this);
623 emit_modrm(subcode, dst);
627 void Assembler::bt(const Operand& dst, Register src) {
628 EnsureSpace ensure_space(this);
629 emit_rex_64(src, dst);
632 emit_operand(src, dst);
636 void Assembler::bts(const Operand& dst, Register src) {
637 EnsureSpace ensure_space(this);
638 emit_rex_64(src, dst);
641 emit_operand(src, dst);
645 void Assembler::bsrl(Register dst, Register src) {
646 EnsureSpace ensure_space(this);
647 emit_optional_rex_32(dst, src);
650 emit_modrm(dst, src);
654 void Assembler::call(Label* L) {
655 positions_recorder()->WriteRecordedPositions();
656 EnsureSpace ensure_space(this);
657 // 1110 1000 #32-bit disp.
660 int offset = L->pos() - pc_offset() - sizeof(int32_t);
663 } else if (L->is_linked()) {
665 L->link_to(pc_offset() - sizeof(int32_t));
667 DCHECK(L->is_unused());
668 int32_t current = pc_offset();
675 void Assembler::call(Address entry, RelocInfo::Mode rmode) {
676 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
677 positions_recorder()->WriteRecordedPositions();
678 EnsureSpace ensure_space(this);
679 // 1110 1000 #32-bit disp.
681 emit_runtime_entry(entry, rmode);
685 void Assembler::call(Handle<Code> target,
686 RelocInfo::Mode rmode,
687 TypeFeedbackId ast_id) {
688 positions_recorder()->WriteRecordedPositions();
689 EnsureSpace ensure_space(this);
690 // 1110 1000 #32-bit disp.
692 emit_code_target(target, rmode, ast_id);
696 void Assembler::call(Register adr) {
697 positions_recorder()->WriteRecordedPositions();
698 EnsureSpace ensure_space(this);
699 // Opcode: FF /2 r64.
700 emit_optional_rex_32(adr);
702 emit_modrm(0x2, adr);
706 void Assembler::call(const Operand& op) {
707 positions_recorder()->WriteRecordedPositions();
708 EnsureSpace ensure_space(this);
709 // Opcode: FF /2 m64.
710 emit_optional_rex_32(op);
712 emit_operand(0x2, op);
716 // Calls directly to the given address using a relative offset.
717 // Should only ever be used in Code objects for calls within the
718 // same Code object. Should not be used when generating new code (use labels),
719 // but only when patching existing code.
720 void Assembler::call(Address target) {
721 positions_recorder()->WriteRecordedPositions();
722 EnsureSpace ensure_space(this);
723 // 1110 1000 #32-bit disp.
725 Address source = pc_ + 4;
726 intptr_t displacement = target - source;
727 DCHECK(is_int32(displacement));
728 emitl(static_cast<int32_t>(displacement));
732 void Assembler::clc() {
733 EnsureSpace ensure_space(this);
738 void Assembler::cld() {
739 EnsureSpace ensure_space(this);
744 void Assembler::cdq() {
745 EnsureSpace ensure_space(this);
750 void Assembler::cmovq(Condition cc, Register dst, Register src) {
753 } else if (cc == never) {
756 // No need to check CpuInfo for CMOV support, it's a required part of the
757 // 64-bit architecture.
758 DCHECK(cc >= 0); // Use mov for unconditional moves.
759 EnsureSpace ensure_space(this);
760 // Opcode: REX.W 0f 40 + cc /r.
761 emit_rex_64(dst, src);
764 emit_modrm(dst, src);
768 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
771 } else if (cc == never) {
775 EnsureSpace ensure_space(this);
776 // Opcode: REX.W 0f 40 + cc /r.
777 emit_rex_64(dst, src);
780 emit_operand(dst, src);
784 void Assembler::cmovl(Condition cc, Register dst, Register src) {
787 } else if (cc == never) {
791 EnsureSpace ensure_space(this);
792 // Opcode: 0f 40 + cc /r.
793 emit_optional_rex_32(dst, src);
796 emit_modrm(dst, src);
800 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
803 } else if (cc == never) {
807 EnsureSpace ensure_space(this);
808 // Opcode: 0f 40 + cc /r.
809 emit_optional_rex_32(dst, src);
812 emit_operand(dst, src);
816 void Assembler::cmpb_al(Immediate imm8) {
817 DCHECK(is_int8(imm8.value_) || is_uint8(imm8.value_));
818 EnsureSpace ensure_space(this);
824 void Assembler::cpuid() {
825 EnsureSpace ensure_space(this);
831 void Assembler::cqo() {
832 EnsureSpace ensure_space(this);
838 void Assembler::emit_dec(Register dst, int size) {
839 EnsureSpace ensure_space(this);
842 emit_modrm(0x1, dst);
846 void Assembler::emit_dec(const Operand& dst, int size) {
847 EnsureSpace ensure_space(this);
850 emit_operand(1, dst);
854 void Assembler::decb(Register dst) {
855 EnsureSpace ensure_space(this);
856 if (!dst.is_byte_register()) {
857 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
861 emit_modrm(0x1, dst);
865 void Assembler::decb(const Operand& dst) {
866 EnsureSpace ensure_space(this);
867 emit_optional_rex_32(dst);
869 emit_operand(1, dst);
873 void Assembler::enter(Immediate size) {
874 EnsureSpace ensure_space(this);
876 emitw(size.value_); // 16 bit operand, always.
881 void Assembler::hlt() {
882 EnsureSpace ensure_space(this);
887 void Assembler::emit_idiv(Register src, int size) {
888 EnsureSpace ensure_space(this);
891 emit_modrm(0x7, src);
895 void Assembler::emit_div(Register src, int size) {
896 EnsureSpace ensure_space(this);
899 emit_modrm(0x6, src);
903 void Assembler::emit_imul(Register src, int size) {
904 EnsureSpace ensure_space(this);
907 emit_modrm(0x5, src);
911 void Assembler::emit_imul(Register dst, Register src, int size) {
912 EnsureSpace ensure_space(this);
913 emit_rex(dst, src, size);
916 emit_modrm(dst, src);
920 void Assembler::emit_imul(Register dst, const Operand& src, int size) {
921 EnsureSpace ensure_space(this);
922 emit_rex(dst, src, size);
925 emit_operand(dst, src);
929 void Assembler::emit_imul(Register dst, Register src, Immediate imm, int size) {
930 EnsureSpace ensure_space(this);
931 emit_rex(dst, src, size);
932 if (is_int8(imm.value_)) {
934 emit_modrm(dst, src);
938 emit_modrm(dst, src);
944 void Assembler::emit_inc(Register dst, int size) {
945 EnsureSpace ensure_space(this);
948 emit_modrm(0x0, dst);
952 void Assembler::emit_inc(const Operand& dst, int size) {
953 EnsureSpace ensure_space(this);
956 emit_operand(0, dst);
960 void Assembler::int3() {
961 EnsureSpace ensure_space(this);
966 void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
970 } else if (cc == never) {
973 EnsureSpace ensure_space(this);
974 DCHECK(is_uint4(cc));
976 const int short_size = 2;
977 const int long_size = 6;
978 int offs = L->pos() - pc_offset();
980 // Determine whether we can use 1-byte offsets for backwards branches,
981 // which have a max range of 128 bytes.
983 // We also need to check predictable_code_size() flag here, because on x64,
984 // when the full code generator recompiles code for debugging, some places
985 // need to be padded out to a certain size. The debugger is keeping track of
986 // how often it did this so that it can adjust return addresses on the
987 // stack, but if the size of jump instructions can also change, that's not
988 // enough and the calculated offsets would be incorrect.
989 if (is_int8(offs - short_size) && !predictable_code_size()) {
990 // 0111 tttn #8-bit disp.
992 emit((offs - short_size) & 0xFF);
994 // 0000 1111 1000 tttn #32-bit disp.
997 emitl(offs - long_size);
999 } else if (distance == Label::kNear) {
1000 // 0111 tttn #8-bit disp
1003 if (L->is_near_linked()) {
1004 int offset = L->near_link_pos() - pc_offset();
1005 DCHECK(is_int8(offset));
1006 disp = static_cast<byte>(offset & 0xFF);
1008 L->link_to(pc_offset(), Label::kNear);
1010 } else if (L->is_linked()) {
1011 // 0000 1111 1000 tttn #32-bit disp.
1015 L->link_to(pc_offset() - sizeof(int32_t));
1017 DCHECK(L->is_unused());
1020 int32_t current = pc_offset();
1022 L->link_to(current);
1027 void Assembler::j(Condition cc, Address entry, RelocInfo::Mode rmode) {
1028 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
1029 EnsureSpace ensure_space(this);
1030 DCHECK(is_uint4(cc));
1033 emit_runtime_entry(entry, rmode);
1037 void Assembler::j(Condition cc,
1038 Handle<Code> target,
1039 RelocInfo::Mode rmode) {
1040 EnsureSpace ensure_space(this);
1041 DCHECK(is_uint4(cc));
1042 // 0000 1111 1000 tttn #32-bit disp.
1045 emit_code_target(target, rmode);
1049 void Assembler::jmp(Label* L, Label::Distance distance) {
1050 EnsureSpace ensure_space(this);
1051 const int short_size = sizeof(int8_t);
1052 const int long_size = sizeof(int32_t);
1053 if (L->is_bound()) {
1054 int offs = L->pos() - pc_offset() - 1;
1056 if (is_int8(offs - short_size) && !predictable_code_size()) {
1057 // 1110 1011 #8-bit disp.
1059 emit((offs - short_size) & 0xFF);
1061 // 1110 1001 #32-bit disp.
1063 emitl(offs - long_size);
1065 } else if (distance == Label::kNear) {
1068 if (L->is_near_linked()) {
1069 int offset = L->near_link_pos() - pc_offset();
1070 DCHECK(is_int8(offset));
1071 disp = static_cast<byte>(offset & 0xFF);
1073 L->link_to(pc_offset(), Label::kNear);
1075 } else if (L->is_linked()) {
1076 // 1110 1001 #32-bit disp.
1079 L->link_to(pc_offset() - long_size);
1081 // 1110 1001 #32-bit disp.
1082 DCHECK(L->is_unused());
1084 int32_t current = pc_offset();
1086 L->link_to(current);
1091 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1092 EnsureSpace ensure_space(this);
1093 // 1110 1001 #32-bit disp.
1095 emit_code_target(target, rmode);
1099 void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
1100 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
1101 EnsureSpace ensure_space(this);
1102 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
1104 emit_runtime_entry(entry, rmode);
1108 void Assembler::jmp(Register target) {
1109 EnsureSpace ensure_space(this);
1111 emit_optional_rex_32(target);
1113 emit_modrm(0x4, target);
1117 void Assembler::jmp(const Operand& src) {
1118 EnsureSpace ensure_space(this);
1120 emit_optional_rex_32(src);
1122 emit_operand(0x4, src);
1126 void Assembler::emit_lea(Register dst, const Operand& src, int size) {
1127 EnsureSpace ensure_space(this);
1128 emit_rex(dst, src, size);
1130 emit_operand(dst, src);
1134 void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1135 EnsureSpace ensure_space(this);
1136 if (kPointerSize == kInt64Size) {
1137 emit(0x48); // REX.W
1141 DCHECK(kPointerSize == kInt32Size);
1144 // In 64-bit mode, need to zero extend the operand to 8 bytes.
1145 // See 2.2.1.4 in Intel64 and IA32 Architectures Software
1146 // Developer's Manual Volume 2.
1152 void Assembler::load_rax(ExternalReference ref) {
1153 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1157 void Assembler::leave() {
1158 EnsureSpace ensure_space(this);
1163 void Assembler::movb(Register dst, const Operand& src) {
1164 EnsureSpace ensure_space(this);
1165 if (!dst.is_byte_register()) {
1166 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1167 emit_rex_32(dst, src);
1169 emit_optional_rex_32(dst, src);
1172 emit_operand(dst, src);
1176 void Assembler::movb(Register dst, Immediate imm) {
1177 EnsureSpace ensure_space(this);
1178 if (!dst.is_byte_register()) {
1181 emit(0xB0 + dst.low_bits());
1186 void Assembler::movb(const Operand& dst, Register src) {
1187 EnsureSpace ensure_space(this);
1188 if (!src.is_byte_register()) {
1189 emit_rex_32(src, dst);
1191 emit_optional_rex_32(src, dst);
1194 emit_operand(src, dst);
1198 void Assembler::movb(const Operand& dst, Immediate imm) {
1199 EnsureSpace ensure_space(this);
1200 emit_optional_rex_32(dst);
1202 emit_operand(0x0, dst);
1203 emit(static_cast<byte>(imm.value_));
1207 void Assembler::movw(Register dst, const Operand& src) {
1208 EnsureSpace ensure_space(this);
1210 emit_optional_rex_32(dst, src);
1212 emit_operand(dst, src);
1216 void Assembler::movw(const Operand& dst, Register src) {
1217 EnsureSpace ensure_space(this);
1219 emit_optional_rex_32(src, dst);
1221 emit_operand(src, dst);
1225 void Assembler::movw(const Operand& dst, Immediate imm) {
1226 EnsureSpace ensure_space(this);
1228 emit_optional_rex_32(dst);
1230 emit_operand(0x0, dst);
1231 emit(static_cast<byte>(imm.value_ & 0xff));
1232 emit(static_cast<byte>(imm.value_ >> 8));
1236 void Assembler::emit_mov(Register dst, const Operand& src, int size) {
1237 EnsureSpace ensure_space(this);
1238 emit_rex(dst, src, size);
1240 emit_operand(dst, src);
1244 void Assembler::emit_mov(Register dst, Register src, int size) {
1245 EnsureSpace ensure_space(this);
1246 if (src.low_bits() == 4) {
1247 emit_rex(src, dst, size);
1249 emit_modrm(src, dst);
1251 emit_rex(dst, src, size);
1253 emit_modrm(dst, src);
1258 void Assembler::emit_mov(const Operand& dst, Register src, int size) {
1259 EnsureSpace ensure_space(this);
1260 emit_rex(src, dst, size);
1262 emit_operand(src, dst);
1266 void Assembler::emit_mov(Register dst, Immediate value, int size) {
1267 EnsureSpace ensure_space(this);
1268 emit_rex(dst, size);
1269 if (size == kInt64Size) {
1271 emit_modrm(0x0, dst);
1273 DCHECK(size == kInt32Size);
1274 emit(0xB8 + dst.low_bits());
1280 void Assembler::emit_mov(const Operand& dst, Immediate value, int size) {
1281 EnsureSpace ensure_space(this);
1282 emit_rex(dst, size);
1284 emit_operand(0x0, dst);
1289 void Assembler::movp(Register dst, void* value, RelocInfo::Mode rmode) {
1290 EnsureSpace ensure_space(this);
1291 emit_rex(dst, kPointerSize);
1292 emit(0xB8 | dst.low_bits());
1293 emitp(value, rmode);
1297 void Assembler::movq(Register dst, int64_t value) {
1298 EnsureSpace ensure_space(this);
1300 emit(0xB8 | dst.low_bits());
1305 void Assembler::movq(Register dst, uint64_t value) {
1306 movq(dst, static_cast<int64_t>(value));
1310 // Loads the ip-relative location of the src label into the target location
1311 // (as a 32-bit offset sign extended to 64-bit).
1312 void Assembler::movl(const Operand& dst, Label* src) {
1313 EnsureSpace ensure_space(this);
1314 emit_optional_rex_32(dst);
1316 emit_operand(0, dst);
1317 if (src->is_bound()) {
1318 int offset = src->pos() - pc_offset() - sizeof(int32_t);
1319 DCHECK(offset <= 0);
1321 } else if (src->is_linked()) {
1323 src->link_to(pc_offset() - sizeof(int32_t));
1325 DCHECK(src->is_unused());
1326 int32_t current = pc_offset();
1328 src->link_to(current);
1333 void Assembler::movsxbl(Register dst, const Operand& src) {
1334 EnsureSpace ensure_space(this);
1335 emit_optional_rex_32(dst, src);
1338 emit_operand(dst, src);
1342 void Assembler::movsxbq(Register dst, const Operand& src) {
1343 EnsureSpace ensure_space(this);
1344 emit_rex_64(dst, src);
1347 emit_operand(dst, src);
1351 void Assembler::movsxwl(Register dst, const Operand& src) {
1352 EnsureSpace ensure_space(this);
1353 emit_optional_rex_32(dst, src);
1356 emit_operand(dst, src);
1360 void Assembler::movsxwq(Register dst, const Operand& src) {
1361 EnsureSpace ensure_space(this);
1362 emit_rex_64(dst, src);
1365 emit_operand(dst, src);
1369 void Assembler::movsxlq(Register dst, Register src) {
1370 EnsureSpace ensure_space(this);
1371 emit_rex_64(dst, src);
1373 emit_modrm(dst, src);
1377 void Assembler::movsxlq(Register dst, const Operand& src) {
1378 EnsureSpace ensure_space(this);
1379 emit_rex_64(dst, src);
1381 emit_operand(dst, src);
1385 void Assembler::emit_movzxb(Register dst, const Operand& src, int size) {
1386 EnsureSpace ensure_space(this);
1387 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1388 // there is no need to make this a 64 bit operation.
1389 emit_optional_rex_32(dst, src);
1392 emit_operand(dst, src);
1396 void Assembler::emit_movzxb(Register dst, Register src, int size) {
1397 EnsureSpace ensure_space(this);
1398 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1399 // there is no need to make this a 64 bit operation.
1400 emit_optional_rex_32(dst, src);
1403 emit_modrm(dst, src);
1407 void Assembler::emit_movzxw(Register dst, const Operand& src, int size) {
1408 EnsureSpace ensure_space(this);
1409 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1410 // there is no need to make this a 64 bit operation.
1411 emit_optional_rex_32(dst, src);
1414 emit_operand(dst, src);
1418 void Assembler::emit_movzxw(Register dst, Register src, int size) {
1419 EnsureSpace ensure_space(this);
1420 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1421 // there is no need to make this a 64 bit operation.
1422 emit_optional_rex_32(dst, src);
1425 emit_modrm(dst, src);
1429 void Assembler::repmovsb() {
1430 EnsureSpace ensure_space(this);
1436 void Assembler::repmovsw() {
1437 EnsureSpace ensure_space(this);
1438 emit(0x66); // Operand size override.
1444 void Assembler::emit_repmovs(int size) {
1445 EnsureSpace ensure_space(this);
1452 void Assembler::mul(Register src) {
1453 EnsureSpace ensure_space(this);
1456 emit_modrm(0x4, src);
1460 void Assembler::emit_neg(Register dst, int size) {
1461 EnsureSpace ensure_space(this);
1462 emit_rex(dst, size);
1464 emit_modrm(0x3, dst);
1468 void Assembler::emit_neg(const Operand& dst, int size) {
1469 EnsureSpace ensure_space(this);
1472 emit_operand(3, dst);
1476 void Assembler::nop() {
1477 EnsureSpace ensure_space(this);
1482 void Assembler::emit_not(Register dst, int size) {
1483 EnsureSpace ensure_space(this);
1484 emit_rex(dst, size);
1486 emit_modrm(0x2, dst);
1490 void Assembler::emit_not(const Operand& dst, int size) {
1491 EnsureSpace ensure_space(this);
1492 emit_rex(dst, size);
1494 emit_operand(2, dst);
1498 void Assembler::Nop(int n) {
1499 // The recommended muti-byte sequences of NOP instructions from the Intel 64
1500 // and IA-32 Architectures Software Developer's Manual.
1502 // Length Assembly Byte Sequence
1503 // 2 bytes 66 NOP 66 90H
1504 // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H
1505 // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H
1506 // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H
1507 // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H
1508 // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H
1509 // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1510 // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00
1513 EnsureSpace ensure_space(this);
1575 void Assembler::popq(Register dst) {
1576 EnsureSpace ensure_space(this);
1577 emit_optional_rex_32(dst);
1578 emit(0x58 | dst.low_bits());
1582 void Assembler::popq(const Operand& dst) {
1583 EnsureSpace ensure_space(this);
1584 emit_optional_rex_32(dst);
1586 emit_operand(0, dst);
1590 void Assembler::popfq() {
1591 EnsureSpace ensure_space(this);
1596 void Assembler::pushq(Register src) {
1597 EnsureSpace ensure_space(this);
1598 emit_optional_rex_32(src);
1599 emit(0x50 | src.low_bits());
1603 void Assembler::pushq(const Operand& src) {
1604 EnsureSpace ensure_space(this);
1605 emit_optional_rex_32(src);
1607 emit_operand(6, src);
1611 void Assembler::pushq(Immediate value) {
1612 EnsureSpace ensure_space(this);
1613 if (is_int8(value.value_)) {
1615 emit(value.value_); // Emit low byte of value.
1618 emitl(value.value_);
1623 void Assembler::pushq_imm32(int32_t imm32) {
1624 EnsureSpace ensure_space(this);
1630 void Assembler::pushfq() {
1631 EnsureSpace ensure_space(this);
1636 void Assembler::ret(int imm16) {
1637 EnsureSpace ensure_space(this);
1638 DCHECK(is_uint16(imm16));
1644 emit((imm16 >> 8) & 0xFF);
1649 void Assembler::setcc(Condition cc, Register reg) {
1650 if (cc > last_condition) {
1651 movb(reg, Immediate(cc == always ? 1 : 0));
1654 EnsureSpace ensure_space(this);
1655 DCHECK(is_uint4(cc));
1656 if (!reg.is_byte_register()) { // Use x64 byte registers, where different.
1661 emit_modrm(0x0, reg);
1665 void Assembler::shld(Register dst, Register src) {
1666 EnsureSpace ensure_space(this);
1667 emit_rex_64(src, dst);
1670 emit_modrm(src, dst);
1674 void Assembler::shrd(Register dst, Register src) {
1675 EnsureSpace ensure_space(this);
1676 emit_rex_64(src, dst);
1679 emit_modrm(src, dst);
1683 void Assembler::emit_xchg(Register dst, Register src, int size) {
1684 EnsureSpace ensure_space(this);
1685 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1686 Register other = src.is(rax) ? dst : src;
1687 emit_rex(other, size);
1688 emit(0x90 | other.low_bits());
1689 } else if (dst.low_bits() == 4) {
1690 emit_rex(dst, src, size);
1692 emit_modrm(dst, src);
1694 emit_rex(src, dst, size);
1696 emit_modrm(src, dst);
1701 void Assembler::emit_xchg(Register dst, const Operand& src, int size) {
1702 EnsureSpace ensure_space(this);
1703 emit_rex(dst, src, size);
1705 emit_operand(dst, src);
1709 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1710 EnsureSpace ensure_space(this);
1711 if (kPointerSize == kInt64Size) {
1712 emit(0x48); // REX.W
1716 DCHECK(kPointerSize == kInt32Size);
1719 // In 64-bit mode, need to zero extend the operand to 8 bytes.
1720 // See 2.2.1.4 in Intel64 and IA32 Architectures Software
1721 // Developer's Manual Volume 2.
1727 void Assembler::store_rax(ExternalReference ref) {
1728 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1732 void Assembler::testb(Register dst, Register src) {
1733 EnsureSpace ensure_space(this);
1734 if (src.low_bits() == 4) {
1735 emit_rex_32(src, dst);
1737 emit_modrm(src, dst);
1739 if (!dst.is_byte_register() || !src.is_byte_register()) {
1740 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1741 emit_rex_32(dst, src);
1744 emit_modrm(dst, src);
1749 void Assembler::testb(Register reg, Immediate mask) {
1750 DCHECK(is_int8(mask.value_) || is_uint8(mask.value_));
1751 EnsureSpace ensure_space(this);
1754 emit(mask.value_); // Low byte emitted.
1756 if (!reg.is_byte_register()) {
1757 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1761 emit_modrm(0x0, reg);
1762 emit(mask.value_); // Low byte emitted.
1767 void Assembler::testb(const Operand& op, Immediate mask) {
1768 DCHECK(is_int8(mask.value_) || is_uint8(mask.value_));
1769 EnsureSpace ensure_space(this);
1770 emit_optional_rex_32(rax, op);
1772 emit_operand(rax, op); // Operation code 0
1773 emit(mask.value_); // Low byte emitted.
1777 void Assembler::testb(const Operand& op, Register reg) {
1778 EnsureSpace ensure_space(this);
1779 if (!reg.is_byte_register()) {
1780 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1781 emit_rex_32(reg, op);
1783 emit_optional_rex_32(reg, op);
1786 emit_operand(reg, op);
1790 void Assembler::emit_test(Register dst, Register src, int size) {
1791 EnsureSpace ensure_space(this);
1792 if (src.low_bits() == 4) {
1793 emit_rex(src, dst, size);
1795 emit_modrm(src, dst);
1797 emit_rex(dst, src, size);
1799 emit_modrm(dst, src);
1804 void Assembler::emit_test(Register reg, Immediate mask, int size) {
1805 // testl with a mask that fits in the low byte is exactly testb.
1806 if (is_uint8(mask.value_)) {
1810 EnsureSpace ensure_space(this);
1812 emit_rex(rax, size);
1816 emit_rex(reg, size);
1818 emit_modrm(0x0, reg);
1824 void Assembler::emit_test(const Operand& op, Immediate mask, int size) {
1825 // testl with a mask that fits in the low byte is exactly testb.
1826 if (is_uint8(mask.value_)) {
1830 EnsureSpace ensure_space(this);
1831 emit_rex(rax, op, size);
1833 emit_operand(rax, op); // Operation code 0
1838 void Assembler::emit_test(const Operand& op, Register reg, int size) {
1839 EnsureSpace ensure_space(this);
1840 emit_rex(reg, op, size);
1842 emit_operand(reg, op);
1846 // FPU instructions.
1849 void Assembler::fld(int i) {
1850 EnsureSpace ensure_space(this);
1851 emit_farith(0xD9, 0xC0, i);
1855 void Assembler::fld1() {
1856 EnsureSpace ensure_space(this);
1862 void Assembler::fldz() {
1863 EnsureSpace ensure_space(this);
1869 void Assembler::fldpi() {
1870 EnsureSpace ensure_space(this);
1876 void Assembler::fldln2() {
1877 EnsureSpace ensure_space(this);
1883 void Assembler::fld_s(const Operand& adr) {
1884 EnsureSpace ensure_space(this);
1885 emit_optional_rex_32(adr);
1887 emit_operand(0, adr);
1891 void Assembler::fld_d(const Operand& adr) {
1892 EnsureSpace ensure_space(this);
1893 emit_optional_rex_32(adr);
1895 emit_operand(0, adr);
1899 void Assembler::fstp_s(const Operand& adr) {
1900 EnsureSpace ensure_space(this);
1901 emit_optional_rex_32(adr);
1903 emit_operand(3, adr);
1907 void Assembler::fstp_d(const Operand& adr) {
1908 EnsureSpace ensure_space(this);
1909 emit_optional_rex_32(adr);
1911 emit_operand(3, adr);
1915 void Assembler::fstp(int index) {
1916 DCHECK(is_uint3(index));
1917 EnsureSpace ensure_space(this);
1918 emit_farith(0xDD, 0xD8, index);
1922 void Assembler::fild_s(const Operand& adr) {
1923 EnsureSpace ensure_space(this);
1924 emit_optional_rex_32(adr);
1926 emit_operand(0, adr);
1930 void Assembler::fild_d(const Operand& adr) {
1931 EnsureSpace ensure_space(this);
1932 emit_optional_rex_32(adr);
1934 emit_operand(5, adr);
1938 void Assembler::fistp_s(const Operand& adr) {
1939 EnsureSpace ensure_space(this);
1940 emit_optional_rex_32(adr);
1942 emit_operand(3, adr);
1946 void Assembler::fisttp_s(const Operand& adr) {
1947 DCHECK(IsEnabled(SSE3));
1948 EnsureSpace ensure_space(this);
1949 emit_optional_rex_32(adr);
1951 emit_operand(1, adr);
1955 void Assembler::fisttp_d(const Operand& adr) {
1956 DCHECK(IsEnabled(SSE3));
1957 EnsureSpace ensure_space(this);
1958 emit_optional_rex_32(adr);
1960 emit_operand(1, adr);
1964 void Assembler::fist_s(const Operand& adr) {
1965 EnsureSpace ensure_space(this);
1966 emit_optional_rex_32(adr);
1968 emit_operand(2, adr);
1972 void Assembler::fistp_d(const Operand& adr) {
1973 EnsureSpace ensure_space(this);
1974 emit_optional_rex_32(adr);
1976 emit_operand(7, adr);
1980 void Assembler::fabs() {
1981 EnsureSpace ensure_space(this);
1987 void Assembler::fchs() {
1988 EnsureSpace ensure_space(this);
1994 void Assembler::fcos() {
1995 EnsureSpace ensure_space(this);
2001 void Assembler::fsin() {
2002 EnsureSpace ensure_space(this);
2008 void Assembler::fptan() {
2009 EnsureSpace ensure_space(this);
2015 void Assembler::fyl2x() {
2016 EnsureSpace ensure_space(this);
2022 void Assembler::f2xm1() {
2023 EnsureSpace ensure_space(this);
2029 void Assembler::fscale() {
2030 EnsureSpace ensure_space(this);
2036 void Assembler::fninit() {
2037 EnsureSpace ensure_space(this);
2043 void Assembler::fadd(int i) {
2044 EnsureSpace ensure_space(this);
2045 emit_farith(0xDC, 0xC0, i);
2049 void Assembler::fsub(int i) {
2050 EnsureSpace ensure_space(this);
2051 emit_farith(0xDC, 0xE8, i);
2055 void Assembler::fisub_s(const Operand& adr) {
2056 EnsureSpace ensure_space(this);
2057 emit_optional_rex_32(adr);
2059 emit_operand(4, adr);
2063 void Assembler::fmul(int i) {
2064 EnsureSpace ensure_space(this);
2065 emit_farith(0xDC, 0xC8, i);
2069 void Assembler::fdiv(int i) {
2070 EnsureSpace ensure_space(this);
2071 emit_farith(0xDC, 0xF8, i);
2075 void Assembler::faddp(int i) {
2076 EnsureSpace ensure_space(this);
2077 emit_farith(0xDE, 0xC0, i);
2081 void Assembler::fsubp(int i) {
2082 EnsureSpace ensure_space(this);
2083 emit_farith(0xDE, 0xE8, i);
2087 void Assembler::fsubrp(int i) {
2088 EnsureSpace ensure_space(this);
2089 emit_farith(0xDE, 0xE0, i);
2093 void Assembler::fmulp(int i) {
2094 EnsureSpace ensure_space(this);
2095 emit_farith(0xDE, 0xC8, i);
2099 void Assembler::fdivp(int i) {
2100 EnsureSpace ensure_space(this);
2101 emit_farith(0xDE, 0xF8, i);
2105 void Assembler::fprem() {
2106 EnsureSpace ensure_space(this);
2112 void Assembler::fprem1() {
2113 EnsureSpace ensure_space(this);
2119 void Assembler::fxch(int i) {
2120 EnsureSpace ensure_space(this);
2121 emit_farith(0xD9, 0xC8, i);
2125 void Assembler::fincstp() {
2126 EnsureSpace ensure_space(this);
2132 void Assembler::ffree(int i) {
2133 EnsureSpace ensure_space(this);
2134 emit_farith(0xDD, 0xC0, i);
2138 void Assembler::ftst() {
2139 EnsureSpace ensure_space(this);
2145 void Assembler::fucomp(int i) {
2146 EnsureSpace ensure_space(this);
2147 emit_farith(0xDD, 0xE8, i);
2151 void Assembler::fucompp() {
2152 EnsureSpace ensure_space(this);
2158 void Assembler::fucomi(int i) {
2159 EnsureSpace ensure_space(this);
2165 void Assembler::fucomip() {
2166 EnsureSpace ensure_space(this);
2172 void Assembler::fcompp() {
2173 EnsureSpace ensure_space(this);
2179 void Assembler::fnstsw_ax() {
2180 EnsureSpace ensure_space(this);
2186 void Assembler::fwait() {
2187 EnsureSpace ensure_space(this);
2192 void Assembler::frndint() {
2193 EnsureSpace ensure_space(this);
2199 void Assembler::fnclex() {
2200 EnsureSpace ensure_space(this);
2206 void Assembler::sahf() {
2207 // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2208 // in 64-bit mode. Test CpuID.
2209 DCHECK(IsEnabled(SAHF));
2210 EnsureSpace ensure_space(this);
2215 void Assembler::emit_farith(int b1, int b2, int i) {
2216 DCHECK(is_uint8(b1) && is_uint8(b2)); // wrong opcode
2217 DCHECK(is_uint3(i)); // illegal stack offset
2225 void Assembler::andps(XMMRegister dst, XMMRegister src) {
2226 EnsureSpace ensure_space(this);
2227 emit_optional_rex_32(dst, src);
2230 emit_sse_operand(dst, src);
2234 void Assembler::andps(XMMRegister dst, const Operand& src) {
2235 EnsureSpace ensure_space(this);
2236 emit_optional_rex_32(dst, src);
2239 emit_sse_operand(dst, src);
2243 void Assembler::orps(XMMRegister dst, XMMRegister src) {
2244 EnsureSpace ensure_space(this);
2245 emit_optional_rex_32(dst, src);
2248 emit_sse_operand(dst, src);
2252 void Assembler::orps(XMMRegister dst, const Operand& src) {
2253 EnsureSpace ensure_space(this);
2254 emit_optional_rex_32(dst, src);
2257 emit_sse_operand(dst, src);
2261 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2262 EnsureSpace ensure_space(this);
2263 emit_optional_rex_32(dst, src);
2266 emit_sse_operand(dst, src);
2270 void Assembler::xorps(XMMRegister dst, const Operand& src) {
2271 EnsureSpace ensure_space(this);
2272 emit_optional_rex_32(dst, src);
2275 emit_sse_operand(dst, src);
2279 void Assembler::addps(XMMRegister dst, XMMRegister src) {
2280 EnsureSpace ensure_space(this);
2281 emit_optional_rex_32(dst, src);
2284 emit_sse_operand(dst, src);
2288 void Assembler::addps(XMMRegister dst, const Operand& src) {
2289 EnsureSpace ensure_space(this);
2290 emit_optional_rex_32(dst, src);
2293 emit_sse_operand(dst, src);
2297 void Assembler::subps(XMMRegister dst, XMMRegister src) {
2298 EnsureSpace ensure_space(this);
2299 emit_optional_rex_32(dst, src);
2302 emit_sse_operand(dst, src);
2306 void Assembler::subps(XMMRegister dst, const Operand& src) {
2307 EnsureSpace ensure_space(this);
2308 emit_optional_rex_32(dst, src);
2311 emit_sse_operand(dst, src);
2315 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
2316 EnsureSpace ensure_space(this);
2317 emit_optional_rex_32(dst, src);
2320 emit_sse_operand(dst, src);
2324 void Assembler::mulps(XMMRegister dst, const Operand& src) {
2325 EnsureSpace ensure_space(this);
2326 emit_optional_rex_32(dst, src);
2329 emit_sse_operand(dst, src);
2333 void Assembler::divps(XMMRegister dst, XMMRegister src) {
2334 EnsureSpace ensure_space(this);
2335 emit_optional_rex_32(dst, src);
2338 emit_sse_operand(dst, src);
2342 void Assembler::divps(XMMRegister dst, const Operand& src) {
2343 EnsureSpace ensure_space(this);
2344 emit_optional_rex_32(dst, src);
2347 emit_sse_operand(dst, src);
2351 // SSE 2 operations.
2353 void Assembler::movd(XMMRegister dst, Register src) {
2354 EnsureSpace ensure_space(this);
2356 emit_optional_rex_32(dst, src);
2359 emit_sse_operand(dst, src);
2363 void Assembler::movd(Register dst, XMMRegister src) {
2364 EnsureSpace ensure_space(this);
2366 emit_optional_rex_32(src, dst);
2369 emit_sse_operand(src, dst);
2373 void Assembler::movq(XMMRegister dst, Register src) {
2374 EnsureSpace ensure_space(this);
2376 emit_rex_64(dst, src);
2379 emit_sse_operand(dst, src);
2383 void Assembler::movq(Register dst, XMMRegister src) {
2384 EnsureSpace ensure_space(this);
2386 emit_rex_64(src, dst);
2389 emit_sse_operand(src, dst);
2393 void Assembler::movq(XMMRegister dst, XMMRegister src) {
2394 EnsureSpace ensure_space(this);
2395 if (dst.low_bits() == 4) {
2396 // Avoid unnecessary SIB byte.
2398 emit_optional_rex_32(dst, src);
2401 emit_sse_operand(dst, src);
2404 emit_optional_rex_32(src, dst);
2407 emit_sse_operand(src, dst);
2412 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2413 EnsureSpace ensure_space(this);
2415 emit_rex_64(src, dst);
2418 emit_sse_operand(src, dst);
2422 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2423 EnsureSpace ensure_space(this);
2425 emit_rex_64(dst, src);
2428 emit_sse_operand(dst, src);
2432 void Assembler::movdqu(const Operand& dst, XMMRegister src) {
2433 EnsureSpace ensure_space(this);
2435 emit_rex_64(src, dst);
2438 emit_sse_operand(src, dst);
2442 void Assembler::movdqu(XMMRegister dst, const Operand& src) {
2443 EnsureSpace ensure_space(this);
2445 emit_rex_64(dst, src);
2448 emit_sse_operand(dst, src);
2452 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2453 DCHECK(IsEnabled(SSE4_1));
2454 DCHECK(is_uint8(imm8));
2455 EnsureSpace ensure_space(this);
2457 emit_optional_rex_32(src, dst);
2461 emit_sse_operand(src, dst);
2466 void Assembler::movsd(const Operand& dst, XMMRegister src) {
2467 EnsureSpace ensure_space(this);
2468 emit(0xF2); // double
2469 emit_optional_rex_32(src, dst);
2471 emit(0x11); // store
2472 emit_sse_operand(src, dst);
2476 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2477 EnsureSpace ensure_space(this);
2478 emit(0xF2); // double
2479 emit_optional_rex_32(dst, src);
2482 emit_sse_operand(dst, src);
2486 void Assembler::movsd(XMMRegister dst, const Operand& src) {
2487 EnsureSpace ensure_space(this);
2488 emit(0xF2); // double
2489 emit_optional_rex_32(dst, src);
2492 emit_sse_operand(dst, src);
2496 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2497 EnsureSpace ensure_space(this);
2498 if (src.low_bits() == 4) {
2499 // Try to avoid an unnecessary SIB byte.
2500 emit_optional_rex_32(src, dst);
2503 emit_sse_operand(src, dst);
2505 emit_optional_rex_32(dst, src);
2508 emit_sse_operand(dst, src);
2513 void Assembler::shufps(XMMRegister dst, XMMRegister src, byte imm8) {
2514 DCHECK(is_uint8(imm8));
2515 EnsureSpace ensure_space(this);
2516 emit_optional_rex_32(src, dst);
2519 emit_sse_operand(dst, src);
2524 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2525 EnsureSpace ensure_space(this);
2526 if (src.low_bits() == 4) {
2527 // Try to avoid an unnecessary SIB byte.
2529 emit_optional_rex_32(src, dst);
2532 emit_sse_operand(src, dst);
2535 emit_optional_rex_32(dst, src);
2538 emit_sse_operand(dst, src);
2543 void Assembler::movss(XMMRegister dst, const Operand& src) {
2544 EnsureSpace ensure_space(this);
2545 emit(0xF3); // single
2546 emit_optional_rex_32(dst, src);
2549 emit_sse_operand(dst, src);
2553 void Assembler::movss(const Operand& src, XMMRegister dst) {
2554 EnsureSpace ensure_space(this);
2555 emit(0xF3); // single
2556 emit_optional_rex_32(dst, src);
2558 emit(0x11); // store
2559 emit_sse_operand(dst, src);
2563 void Assembler::psllq(XMMRegister reg, byte imm8) {
2564 EnsureSpace ensure_space(this);
2568 emit_sse_operand(rsi, reg); // rsi == 6
2573 void Assembler::cvttss2si(Register dst, const Operand& src) {
2574 EnsureSpace ensure_space(this);
2576 emit_optional_rex_32(dst, src);
2579 emit_operand(dst, src);
2583 void Assembler::cvttss2si(Register dst, XMMRegister src) {
2584 EnsureSpace ensure_space(this);
2586 emit_optional_rex_32(dst, src);
2589 emit_sse_operand(dst, src);
2593 void Assembler::cvttsd2si(Register dst, const Operand& src) {
2594 EnsureSpace ensure_space(this);
2596 emit_optional_rex_32(dst, src);
2599 emit_operand(dst, src);
2603 void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2604 EnsureSpace ensure_space(this);
2606 emit_optional_rex_32(dst, src);
2609 emit_sse_operand(dst, src);
2613 void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2614 EnsureSpace ensure_space(this);
2616 emit_rex_64(dst, src);
2619 emit_sse_operand(dst, src);
2623 void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2624 EnsureSpace ensure_space(this);
2626 emit_optional_rex_32(dst, src);
2629 emit_sse_operand(dst, src);
2633 void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2634 EnsureSpace ensure_space(this);
2636 emit_optional_rex_32(dst, src);
2639 emit_sse_operand(dst, src);
2643 void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2644 EnsureSpace ensure_space(this);
2646 emit_optional_rex_32(dst, src);
2649 emit_sse_operand(dst, src);
2653 void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
2654 EnsureSpace ensure_space(this);
2656 emit_rex_64(dst, src);
2659 emit_sse_operand(dst, src);
2663 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
2664 EnsureSpace ensure_space(this);
2666 emit_optional_rex_32(dst, src);
2669 emit_sse_operand(dst, src);
2673 void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
2674 EnsureSpace ensure_space(this);
2676 emit_optional_rex_32(dst, src);
2679 emit_sse_operand(dst, src);
2683 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
2684 EnsureSpace ensure_space(this);
2686 emit_optional_rex_32(dst, src);
2689 emit_sse_operand(dst, src);
2693 void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2694 EnsureSpace ensure_space(this);
2696 emit_optional_rex_32(dst, src);
2699 emit_sse_operand(dst, src);
2703 void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
2704 EnsureSpace ensure_space(this);
2706 emit_rex_64(dst, src);
2709 emit_sse_operand(dst, src);
2713 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
2714 EnsureSpace ensure_space(this);
2716 emit_optional_rex_32(dst, src);
2719 emit_sse_operand(dst, src);
2723 void Assembler::addsd(XMMRegister dst, const Operand& src) {
2724 EnsureSpace ensure_space(this);
2726 emit_optional_rex_32(dst, src);
2729 emit_sse_operand(dst, src);
2733 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2734 EnsureSpace ensure_space(this);
2736 emit_optional_rex_32(dst, src);
2739 emit_sse_operand(dst, src);
2743 void Assembler::mulsd(XMMRegister dst, const Operand& src) {
2744 EnsureSpace ensure_space(this);
2746 emit_optional_rex_32(dst, src);
2749 emit_sse_operand(dst, src);
2753 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2754 EnsureSpace ensure_space(this);
2756 emit_optional_rex_32(dst, src);
2759 emit_sse_operand(dst, src);
2763 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2764 EnsureSpace ensure_space(this);
2766 emit_optional_rex_32(dst, src);
2769 emit_sse_operand(dst, src);
2773 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2774 EnsureSpace ensure_space(this);
2776 emit_optional_rex_32(dst, src);
2779 emit_sse_operand(dst, src);
2783 void Assembler::orpd(XMMRegister dst, XMMRegister src) {
2784 EnsureSpace ensure_space(this);
2786 emit_optional_rex_32(dst, src);
2789 emit_sse_operand(dst, src);
2793 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
2794 EnsureSpace ensure_space(this);
2796 emit_optional_rex_32(dst, src);
2799 emit_sse_operand(dst, src);
2803 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
2804 EnsureSpace ensure_space(this);
2806 emit_optional_rex_32(dst, src);
2809 emit_sse_operand(dst, src);
2813 void Assembler::sqrtsd(XMMRegister dst, const Operand& src) {
2814 EnsureSpace ensure_space(this);
2816 emit_optional_rex_32(dst, src);
2819 emit_sse_operand(dst, src);
2823 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
2824 EnsureSpace ensure_space(this);
2826 emit_optional_rex_32(dst, src);
2829 emit_sse_operand(dst, src);
2833 void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
2834 EnsureSpace ensure_space(this);
2836 emit_optional_rex_32(dst, src);
2839 emit_sse_operand(dst, src);
2843 void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
2844 EnsureSpace ensure_space(this);
2846 emit_optional_rex_32(dst, src);
2849 emit_sse_operand(dst, src);
2850 emit(0x01); // LT == 1
2854 void Assembler::roundsd(XMMRegister dst, XMMRegister src,
2855 Assembler::RoundingMode mode) {
2856 DCHECK(IsEnabled(SSE4_1));
2857 EnsureSpace ensure_space(this);
2859 emit_optional_rex_32(dst, src);
2863 emit_sse_operand(dst, src);
2864 // Mask precision exeption.
2865 emit(static_cast<byte>(mode) | 0x8);
2869 void Assembler::movmskpd(Register dst, XMMRegister src) {
2870 EnsureSpace ensure_space(this);
2872 emit_optional_rex_32(dst, src);
2875 emit_sse_operand(dst, src);
2879 void Assembler::movmskps(Register dst, XMMRegister src) {
2880 EnsureSpace ensure_space(this);
2881 emit_optional_rex_32(dst, src);
2884 emit_sse_operand(dst, src);
2888 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
2889 Register ireg = { reg.code() };
2890 emit_operand(ireg, adr);
2894 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
2895 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2899 void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
2900 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2904 void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
2905 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2909 void Assembler::db(uint8_t data) {
2910 EnsureSpace ensure_space(this);
2915 void Assembler::dd(uint32_t data) {
2916 EnsureSpace ensure_space(this);
2921 // Relocation information implementations.
2923 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
2924 DCHECK(!RelocInfo::IsNone(rmode));
2925 // Don't record external references unless the heap will be serialized.
2926 if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
2927 !serializer_enabled() && !emit_debug_code()) {
2929 } else if (rmode == RelocInfo::CODE_AGE_SEQUENCE) {
2930 // Don't record psuedo relocation info for code age sequence mode.
2933 RelocInfo rinfo(pc_, rmode, data, NULL);
2934 reloc_info_writer.Write(&rinfo);
2938 void Assembler::RecordJSReturn() {
2939 positions_recorder()->WriteRecordedPositions();
2940 EnsureSpace ensure_space(this);
2941 RecordRelocInfo(RelocInfo::JS_RETURN);
2945 void Assembler::RecordDebugBreakSlot() {
2946 positions_recorder()->WriteRecordedPositions();
2947 EnsureSpace ensure_space(this);
2948 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
2952 void Assembler::RecordComment(const char* msg, bool force) {
2953 if (FLAG_code_comments || force) {
2954 EnsureSpace ensure_space(this);
2955 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
2960 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
2961 // No out-of-line constant pool support.
2962 DCHECK(!FLAG_enable_ool_constant_pool);
2963 return isolate->factory()->empty_constant_pool_array();
2967 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
2968 // No out-of-line constant pool support.
2969 DCHECK(!FLAG_enable_ool_constant_pool);
2974 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
2975 1 << RelocInfo::RUNTIME_ENTRY |
2976 1 << RelocInfo::INTERNAL_REFERENCE |
2977 1 << RelocInfo::CODE_AGE_SEQUENCE;
2980 bool RelocInfo::IsCodedSpecially() {
2981 // The deserializer needs to know whether a pointer is specially coded. Being
2982 // specially coded on x64 means that it is a relative 32 bit address, as used
2983 // by branch instructions.
2984 return (1 << rmode_) & kApplyMask;
2988 bool RelocInfo::IsInConstantPool() {
2993 } } // namespace v8::internal
2995 #endif // V8_TARGET_ARCH_X64