1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_X64
32 #include "macro-assembler.h"
33 #include "serialize.h"
38 // -----------------------------------------------------------------------------
39 // Implementation of CpuFeatures
43 bool CpuFeatures::initialized_ = false;
45 uint64_t CpuFeatures::supported_ = CpuFeatures::kDefaultCpuFeatures;
46 uint64_t CpuFeatures::found_by_runtime_probing_only_ = 0;
47 uint64_t CpuFeatures::cross_compile_ = 0;
49 ExternalReference ExternalReference::cpu_features() {
50 ASSERT(CpuFeatures::initialized_);
51 return ExternalReference(&CpuFeatures::supported_);
55 void CpuFeatures::Probe() {
56 ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures);
60 supported_ = kDefaultCpuFeatures;
61 if (Serializer::enabled()) {
62 supported_ |= OS::CpuFeaturesImpliedByPlatform();
63 return; // No features if we might serialize.
66 uint64_t probed_features = 0;
68 if (cpu.has_sse41()) {
69 probed_features |= static_cast<uint64_t>(1) << SSE4_1;
72 probed_features |= static_cast<uint64_t>(1) << SSE3;
75 // SSE2 must be available on every x64 CPU.
76 ASSERT(cpu.has_sse2());
77 probed_features |= static_cast<uint64_t>(1) << SSE2;
79 // CMOV must be available on every x64 CPU.
80 ASSERT(cpu.has_cmov());
81 probed_features |= static_cast<uint64_t>(1) << CMOV;
83 // SAHF is not generally available in long mode.
85 probed_features |= static_cast<uint64_t>(1) << SAHF;
88 uint64_t platform_features = OS::CpuFeaturesImpliedByPlatform();
89 supported_ = probed_features | platform_features;
90 found_by_runtime_probing_only_
91 = probed_features & ~kDefaultCpuFeatures & ~platform_features;
95 // -----------------------------------------------------------------------------
96 // Implementation of RelocInfo
98 // Patch the code at the current PC with a call to the target address.
99 // Additional guard int3 instructions can be added if required.
100 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
101 int code_size = Assembler::kCallSequenceLength + guard_bytes;
103 // Create a code patcher.
104 CodePatcher patcher(pc_, code_size);
106 // Add a label for checking the size of the code used for returning.
108 Label check_codesize;
109 patcher.masm()->bind(&check_codesize);
113 patcher.masm()->movp(kScratchRegister, reinterpret_cast<void*>(target),
114 Assembler::RelocInfoNone());
115 patcher.masm()->call(kScratchRegister);
117 // Check that the size of the code generated is as expected.
118 ASSERT_EQ(Assembler::kCallSequenceLength,
119 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
121 // Add the requested number of int3 instructions after the call.
122 for (int i = 0; i < guard_bytes; i++) {
123 patcher.masm()->int3();
128 void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
129 // Patch the code at the current address with the supplied instructions.
130 for (int i = 0; i < instruction_count; i++) {
131 *(pc_ + i) = *(instructions + i);
134 // Indicate that code has changed.
135 CPU::FlushICache(pc_, instruction_count);
139 // -----------------------------------------------------------------------------
140 // Register constants.
143 Register::kRegisterCodeByAllocationIndex[kMaxNumAllocatableRegisters] = {
144 // rax, rbx, rdx, rcx, rsi, rdi, r8, r9, r11, r14, r15
145 0, 3, 2, 1, 6, 7, 8, 9, 11, 14, 15
148 const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
149 0, 3, 2, 1, -1, -1, 4, 5, 6, 7, -1, 8, -1, -1, 9, 10
153 // -----------------------------------------------------------------------------
154 // Implementation of Operand
156 Operand::Operand(Register base, int32_t disp) : rex_(0) {
158 if (base.is(rsp) || base.is(r12)) {
159 // SIB byte is needed to encode (rsp + offset) or (r12 + offset).
160 set_sib(times_1, rsp, base);
163 if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
165 } else if (is_int8(disp)) {
175 Operand::Operand(Register base,
178 int32_t disp) : rex_(0) {
179 ASSERT(!index.is(rsp));
181 set_sib(scale, index, base);
182 if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
183 // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
184 // possibly set by set_sib.
186 } else if (is_int8(disp)) {
196 Operand::Operand(Register index,
198 int32_t disp) : rex_(0) {
199 ASSERT(!index.is(rsp));
202 set_sib(scale, index, rbp);
207 Operand::Operand(const Operand& operand, int32_t offset) {
208 ASSERT(operand.len_ >= 1);
209 // Operand encodes REX ModR/M [SIB] [Disp].
210 byte modrm = operand.buf_[0];
211 ASSERT(modrm < 0xC0); // Disallow mode 3 (register target).
212 bool has_sib = ((modrm & 0x07) == 0x04);
213 byte mode = modrm & 0xC0;
214 int disp_offset = has_sib ? 2 : 1;
215 int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
216 // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
218 bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base.
219 int32_t disp_value = 0;
220 if (mode == 0x80 || is_baseless) {
221 // Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
222 disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]);
223 } else if (mode == 0x40) {
224 // Mode 1: Byte displacement.
225 disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
228 // Write new operand with same registers, but with modified displacement.
229 ASSERT(offset >= 0 ? disp_value + offset > disp_value
230 : disp_value + offset < disp_value); // No overflow.
231 disp_value += offset;
233 if (!is_int8(disp_value) || is_baseless) {
234 // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
235 buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
236 len_ = disp_offset + 4;
237 Memory::int32_at(&buf_[disp_offset]) = disp_value;
238 } else if (disp_value != 0 || (base_reg == 0x05)) {
239 // Need 8 bits of displacement.
240 buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1.
241 len_ = disp_offset + 1;
242 buf_[disp_offset] = static_cast<byte>(disp_value);
244 // Need no displacement.
245 buf_[0] = (modrm & 0x3f); // Mode 0.
249 buf_[1] = operand.buf_[1];
254 bool Operand::AddressUsesRegister(Register reg) const {
255 int code = reg.code();
256 ASSERT((buf_[0] & 0xC0) != 0xC0); // Always a memory operand.
257 // Start with only low three bits of base register. Initial decoding doesn't
258 // distinguish on the REX.B bit.
259 int base_code = buf_[0] & 0x07;
260 if (base_code == rsp.code()) {
261 // SIB byte present in buf_[1].
262 // Check the index register from the SIB byte + REX.X prefix.
263 int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
264 // Index code (including REX.X) of 0x04 (rsp) means no index register.
265 if (index_code != rsp.code() && index_code == code) return true;
266 // Add REX.B to get the full base register code.
267 base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
268 // A base register of 0x05 (rbp) with mod = 0 means no base register.
269 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
270 return code == base_code;
272 // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
274 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
275 base_code |= ((rex_ & 0x01) << 3);
276 return code == base_code;
281 // -----------------------------------------------------------------------------
282 // Implementation of Assembler.
284 #ifdef GENERATED_CODE_COVERAGE
285 static void InitCoverageLog();
288 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
289 : AssemblerBase(isolate, buffer, buffer_size),
291 positions_recorder_(this) {
292 // Clear the buffer in debug mode unless it was provided by the
293 // caller in which case we can't be sure it's okay to overwrite
294 // existing code in it.
297 memset(buffer_, 0xCC, buffer_size_); // int3
301 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
304 #ifdef GENERATED_CODE_COVERAGE
310 void Assembler::GetCode(CodeDesc* desc) {
311 // Finalize code (at this point overflow() may be true, but the gap ensures
312 // that we are still not overlapping instructions and relocation info).
313 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
314 // Set up code descriptor.
315 desc->buffer = buffer_;
316 desc->buffer_size = buffer_size_;
317 desc->instr_size = pc_offset();
318 ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system.
320 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
325 void Assembler::Align(int m) {
326 ASSERT(IsPowerOf2(m));
327 int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
332 void Assembler::CodeTargetAlign() {
333 Align(16); // Preferred alignment of jump targets on x64.
337 bool Assembler::IsNop(Address addr) {
339 while (*a == 0x66) a++;
340 if (*a == 0x90) return true;
341 if (a[0] == 0xf && a[1] == 0x1f) return true;
346 void Assembler::bind_to(Label* L, int pos) {
347 ASSERT(!L->is_bound()); // Label may only be bound once.
348 ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid.
349 if (L->is_linked()) {
350 int current = L->pos();
351 int next = long_at(current);
352 while (next != current) {
353 // Relative address, relative to point after address.
354 int imm32 = pos - (current + sizeof(int32_t));
355 long_at_put(current, imm32);
357 next = long_at(next);
359 // Fix up last fixup on linked list.
360 int last_imm32 = pos - (current + sizeof(int32_t));
361 long_at_put(current, last_imm32);
363 while (L->is_near_linked()) {
364 int fixup_pos = L->near_link_pos();
366 static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
367 ASSERT(offset_to_next <= 0);
368 int disp = pos - (fixup_pos + sizeof(int8_t));
369 CHECK(is_int8(disp));
370 set_byte_at(fixup_pos, disp);
371 if (offset_to_next < 0) {
372 L->link_to(fixup_pos + offset_to_next, Label::kNear);
381 void Assembler::bind(Label* L) {
382 bind_to(L, pc_offset());
386 void Assembler::GrowBuffer() {
387 ASSERT(buffer_overflow());
388 if (!own_buffer_) FATAL("external code buffer is too small");
390 // Compute new buffer size.
391 CodeDesc desc; // the new buffer
392 if (buffer_size_ < 4*KB) {
393 desc.buffer_size = 4*KB;
395 desc.buffer_size = 2*buffer_size_;
397 // Some internal data structures overflow for very large buffers,
398 // they must ensure that kMaximalBufferSize is not too large.
399 if ((desc.buffer_size > kMaximalBufferSize) ||
400 (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) {
401 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
404 // Set up new buffer.
405 desc.buffer = NewArray<byte>(desc.buffer_size);
406 desc.instr_size = pc_offset();
408 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
410 // Clear the buffer in debug mode. Use 'int3' instructions to make
411 // sure to get into problems if we ever run uninitialized code.
413 memset(desc.buffer, 0xCC, desc.buffer_size);
417 intptr_t pc_delta = desc.buffer - buffer_;
418 intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
419 (buffer_ + buffer_size_);
420 OS::MemMove(desc.buffer, buffer_, desc.instr_size);
421 OS::MemMove(rc_delta + reloc_info_writer.pos(),
422 reloc_info_writer.pos(), desc.reloc_size);
425 if (isolate() != NULL &&
426 isolate()->assembler_spare_buffer() == NULL &&
427 buffer_size_ == kMinimalBufferSize) {
428 isolate()->set_assembler_spare_buffer(buffer_);
430 DeleteArray(buffer_);
432 buffer_ = desc.buffer;
433 buffer_size_ = desc.buffer_size;
435 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
436 reloc_info_writer.last_pc() + pc_delta);
438 // Relocate runtime entries.
439 for (RelocIterator it(desc); !it.done(); it.next()) {
440 RelocInfo::Mode rmode = it.rinfo()->rmode();
441 if (rmode == RelocInfo::INTERNAL_REFERENCE) {
442 intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc());
443 if (*p != 0) { // 0 means uninitialized.
449 ASSERT(!buffer_overflow());
453 void Assembler::emit_operand(int code, const Operand& adr) {
454 ASSERT(is_uint3(code));
455 const unsigned length = adr.len_;
458 // Emit updated ModR/M byte containing the given register.
459 ASSERT((adr.buf_[0] & 0x38) == 0);
460 pc_[0] = adr.buf_[0] | code << 3;
462 // Emit the rest of the encoded operand.
463 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
468 // Assembler Instruction implementations.
470 void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) {
471 EnsureSpace ensure_space(this);
472 emit_rex_64(reg, op);
474 emit_operand(reg, op);
478 void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
479 EnsureSpace ensure_space(this);
480 ASSERT((opcode & 0xC6) == 2);
481 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
482 // Swap reg and rm_reg and change opcode operand order.
483 emit_rex_64(rm_reg, reg);
485 emit_modrm(rm_reg, reg);
487 emit_rex_64(reg, rm_reg);
489 emit_modrm(reg, rm_reg);
494 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
495 EnsureSpace ensure_space(this);
496 ASSERT((opcode & 0xC6) == 2);
497 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
498 // Swap reg and rm_reg and change opcode operand order.
500 emit_optional_rex_32(rm_reg, reg);
502 emit_modrm(rm_reg, reg);
505 emit_optional_rex_32(reg, rm_reg);
507 emit_modrm(reg, rm_reg);
512 void Assembler::arithmetic_op_16(byte opcode,
514 const Operand& rm_reg) {
515 EnsureSpace ensure_space(this);
517 emit_optional_rex_32(reg, rm_reg);
519 emit_operand(reg, rm_reg);
523 void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
524 EnsureSpace ensure_space(this);
525 ASSERT((opcode & 0xC6) == 2);
526 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
527 // Swap reg and rm_reg and change opcode operand order.
528 emit_optional_rex_32(rm_reg, reg);
529 emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD.
530 emit_modrm(rm_reg, reg);
532 emit_optional_rex_32(reg, rm_reg);
534 emit_modrm(reg, rm_reg);
539 void Assembler::arithmetic_op_32(byte opcode,
541 const Operand& rm_reg) {
542 EnsureSpace ensure_space(this);
543 emit_optional_rex_32(reg, rm_reg);
545 emit_operand(reg, rm_reg);
549 void Assembler::immediate_arithmetic_op(byte subcode,
552 EnsureSpace ensure_space(this);
554 if (is_int8(src.value_)) {
556 emit_modrm(subcode, dst);
558 } else if (dst.is(rax)) {
559 emit(0x05 | (subcode << 3));
563 emit_modrm(subcode, dst);
568 void Assembler::immediate_arithmetic_op(byte subcode,
571 EnsureSpace ensure_space(this);
573 if (is_int8(src.value_)) {
575 emit_operand(subcode, dst);
579 emit_operand(subcode, dst);
585 void Assembler::immediate_arithmetic_op_16(byte subcode,
588 EnsureSpace ensure_space(this);
589 emit(0x66); // Operand size override prefix.
590 emit_optional_rex_32(dst);
591 if (is_int8(src.value_)) {
593 emit_modrm(subcode, dst);
595 } else if (dst.is(rax)) {
596 emit(0x05 | (subcode << 3));
600 emit_modrm(subcode, dst);
606 void Assembler::immediate_arithmetic_op_16(byte subcode,
609 EnsureSpace ensure_space(this);
610 emit(0x66); // Operand size override prefix.
611 emit_optional_rex_32(dst);
612 if (is_int8(src.value_)) {
614 emit_operand(subcode, dst);
618 emit_operand(subcode, dst);
624 void Assembler::immediate_arithmetic_op_32(byte subcode,
627 EnsureSpace ensure_space(this);
628 emit_optional_rex_32(dst);
629 if (is_int8(src.value_)) {
631 emit_modrm(subcode, dst);
633 } else if (dst.is(rax)) {
634 emit(0x05 | (subcode << 3));
638 emit_modrm(subcode, dst);
644 void Assembler::immediate_arithmetic_op_32(byte subcode,
647 EnsureSpace ensure_space(this);
648 emit_optional_rex_32(dst);
649 if (is_int8(src.value_)) {
651 emit_operand(subcode, dst);
655 emit_operand(subcode, dst);
661 void Assembler::immediate_arithmetic_op_8(byte subcode,
664 EnsureSpace ensure_space(this);
665 emit_optional_rex_32(dst);
666 ASSERT(is_int8(src.value_) || is_uint8(src.value_));
668 emit_operand(subcode, dst);
673 void Assembler::immediate_arithmetic_op_8(byte subcode,
676 EnsureSpace ensure_space(this);
677 if (!dst.is_byte_register()) {
678 // Use 64-bit mode byte registers.
681 ASSERT(is_int8(src.value_) || is_uint8(src.value_));
683 emit_modrm(subcode, dst);
688 void Assembler::shift(Register dst, Immediate shift_amount, int subcode) {
689 EnsureSpace ensure_space(this);
690 ASSERT(is_uint6(shift_amount.value_)); // illegal shift count
691 if (shift_amount.value_ == 1) {
694 emit_modrm(subcode, dst);
698 emit_modrm(subcode, dst);
699 emit(shift_amount.value_);
704 void Assembler::shift(Register dst, int subcode) {
705 EnsureSpace ensure_space(this);
708 emit_modrm(subcode, dst);
712 void Assembler::shift_32(Register dst, int subcode) {
713 EnsureSpace ensure_space(this);
714 emit_optional_rex_32(dst);
716 emit_modrm(subcode, dst);
720 void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
721 EnsureSpace ensure_space(this);
722 ASSERT(is_uint5(shift_amount.value_)); // illegal shift count
723 if (shift_amount.value_ == 1) {
724 emit_optional_rex_32(dst);
726 emit_modrm(subcode, dst);
728 emit_optional_rex_32(dst);
730 emit_modrm(subcode, dst);
731 emit(shift_amount.value_);
736 void Assembler::bt(const Operand& dst, Register src) {
737 EnsureSpace ensure_space(this);
738 emit_rex_64(src, dst);
741 emit_operand(src, dst);
745 void Assembler::bts(const Operand& dst, Register src) {
746 EnsureSpace ensure_space(this);
747 emit_rex_64(src, dst);
750 emit_operand(src, dst);
754 void Assembler::bsrl(Register dst, Register src) {
755 EnsureSpace ensure_space(this);
756 emit_optional_rex_32(dst, src);
759 emit_modrm(dst, src);
763 void Assembler::call(Label* L) {
764 positions_recorder()->WriteRecordedPositions();
765 EnsureSpace ensure_space(this);
766 // 1110 1000 #32-bit disp.
769 int offset = L->pos() - pc_offset() - sizeof(int32_t);
772 } else if (L->is_linked()) {
774 L->link_to(pc_offset() - sizeof(int32_t));
776 ASSERT(L->is_unused());
777 int32_t current = pc_offset();
784 void Assembler::call(Address entry, RelocInfo::Mode rmode) {
785 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
786 positions_recorder()->WriteRecordedPositions();
787 EnsureSpace ensure_space(this);
788 // 1110 1000 #32-bit disp.
790 emit_runtime_entry(entry, rmode);
794 void Assembler::call(Handle<Code> target,
795 RelocInfo::Mode rmode,
796 TypeFeedbackId ast_id) {
797 positions_recorder()->WriteRecordedPositions();
798 EnsureSpace ensure_space(this);
799 // 1110 1000 #32-bit disp.
801 emit_code_target(target, rmode, ast_id);
805 void Assembler::call(Register adr) {
806 positions_recorder()->WriteRecordedPositions();
807 EnsureSpace ensure_space(this);
808 // Opcode: FF /2 r64.
809 emit_optional_rex_32(adr);
811 emit_modrm(0x2, adr);
815 void Assembler::call(const Operand& op) {
816 positions_recorder()->WriteRecordedPositions();
817 EnsureSpace ensure_space(this);
818 // Opcode: FF /2 m64.
819 emit_optional_rex_32(op);
821 emit_operand(0x2, op);
825 // Calls directly to the given address using a relative offset.
826 // Should only ever be used in Code objects for calls within the
827 // same Code object. Should not be used when generating new code (use labels),
828 // but only when patching existing code.
829 void Assembler::call(Address target) {
830 positions_recorder()->WriteRecordedPositions();
831 EnsureSpace ensure_space(this);
832 // 1110 1000 #32-bit disp.
834 Address source = pc_ + 4;
835 intptr_t displacement = target - source;
836 ASSERT(is_int32(displacement));
837 emitl(static_cast<int32_t>(displacement));
841 void Assembler::clc() {
842 EnsureSpace ensure_space(this);
847 void Assembler::cld() {
848 EnsureSpace ensure_space(this);
853 void Assembler::cdq() {
854 EnsureSpace ensure_space(this);
859 void Assembler::cmovq(Condition cc, Register dst, Register src) {
862 } else if (cc == never) {
865 // No need to check CpuInfo for CMOV support, it's a required part of the
866 // 64-bit architecture.
867 ASSERT(cc >= 0); // Use mov for unconditional moves.
868 EnsureSpace ensure_space(this);
869 // Opcode: REX.W 0f 40 + cc /r.
870 emit_rex_64(dst, src);
873 emit_modrm(dst, src);
877 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
880 } else if (cc == never) {
884 EnsureSpace ensure_space(this);
885 // Opcode: REX.W 0f 40 + cc /r.
886 emit_rex_64(dst, src);
889 emit_operand(dst, src);
893 void Assembler::cmovl(Condition cc, Register dst, Register src) {
896 } else if (cc == never) {
900 EnsureSpace ensure_space(this);
901 // Opcode: 0f 40 + cc /r.
902 emit_optional_rex_32(dst, src);
905 emit_modrm(dst, src);
909 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
912 } else if (cc == never) {
916 EnsureSpace ensure_space(this);
917 // Opcode: 0f 40 + cc /r.
918 emit_optional_rex_32(dst, src);
921 emit_operand(dst, src);
925 void Assembler::cmpb_al(Immediate imm8) {
926 ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_));
927 EnsureSpace ensure_space(this);
933 void Assembler::cpuid() {
934 EnsureSpace ensure_space(this);
940 void Assembler::cqo() {
941 EnsureSpace ensure_space(this);
947 void Assembler::emit_dec(Register dst, int size) {
948 EnsureSpace ensure_space(this);
951 emit_modrm(0x1, dst);
955 void Assembler::emit_dec(const Operand& dst, int size) {
956 EnsureSpace ensure_space(this);
959 emit_operand(1, dst);
963 void Assembler::decb(Register dst) {
964 EnsureSpace ensure_space(this);
965 if (!dst.is_byte_register()) {
966 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
970 emit_modrm(0x1, dst);
974 void Assembler::decb(const Operand& dst) {
975 EnsureSpace ensure_space(this);
976 emit_optional_rex_32(dst);
978 emit_operand(1, dst);
982 void Assembler::enter(Immediate size) {
983 EnsureSpace ensure_space(this);
985 emitw(size.value_); // 16 bit operand, always.
990 void Assembler::hlt() {
991 EnsureSpace ensure_space(this);
996 void Assembler::emit_idiv(Register src, int size) {
997 EnsureSpace ensure_space(this);
1000 emit_modrm(0x7, src);
1004 void Assembler::emit_imul(Register src, int size) {
1005 EnsureSpace ensure_space(this);
1006 emit_rex(src, size);
1008 emit_modrm(0x5, src);
1012 void Assembler::emit_imul(Register dst, Register src, int size) {
1013 EnsureSpace ensure_space(this);
1014 emit_rex(dst, src, size);
1017 emit_modrm(dst, src);
1021 void Assembler::emit_imul(Register dst, const Operand& src, int size) {
1022 EnsureSpace ensure_space(this);
1023 emit_rex(dst, src, size);
1026 emit_operand(dst, src);
1030 void Assembler::emit_imul(Register dst, Register src, Immediate imm, int size) {
1031 EnsureSpace ensure_space(this);
1032 emit_rex(dst, src, size);
1033 if (is_int8(imm.value_)) {
1035 emit_modrm(dst, src);
1039 emit_modrm(dst, src);
1045 void Assembler::emit_inc(Register dst, int size) {
1046 EnsureSpace ensure_space(this);
1047 emit_rex(dst, size);
1049 emit_modrm(0x0, dst);
1053 void Assembler::emit_inc(const Operand& dst, int size) {
1054 EnsureSpace ensure_space(this);
1055 emit_rex(dst, size);
1057 emit_operand(0, dst);
1061 void Assembler::int3() {
1062 EnsureSpace ensure_space(this);
1067 void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1071 } else if (cc == never) {
1074 EnsureSpace ensure_space(this);
1075 ASSERT(is_uint4(cc));
1076 if (L->is_bound()) {
1077 const int short_size = 2;
1078 const int long_size = 6;
1079 int offs = L->pos() - pc_offset();
1081 // Determine whether we can use 1-byte offsets for backwards branches,
1082 // which have a max range of 128 bytes.
1084 // We also need to check predictable_code_size() flag here, because on x64,
1085 // when the full code generator recompiles code for debugging, some places
1086 // need to be padded out to a certain size. The debugger is keeping track of
1087 // how often it did this so that it can adjust return addresses on the
1088 // stack, but if the size of jump instructions can also change, that's not
1089 // enough and the calculated offsets would be incorrect.
1090 if (is_int8(offs - short_size) && !predictable_code_size()) {
1091 // 0111 tttn #8-bit disp.
1093 emit((offs - short_size) & 0xFF);
1095 // 0000 1111 1000 tttn #32-bit disp.
1098 emitl(offs - long_size);
1100 } else if (distance == Label::kNear) {
1101 // 0111 tttn #8-bit disp
1104 if (L->is_near_linked()) {
1105 int offset = L->near_link_pos() - pc_offset();
1106 ASSERT(is_int8(offset));
1107 disp = static_cast<byte>(offset & 0xFF);
1109 L->link_to(pc_offset(), Label::kNear);
1111 } else if (L->is_linked()) {
1112 // 0000 1111 1000 tttn #32-bit disp.
1116 L->link_to(pc_offset() - sizeof(int32_t));
1118 ASSERT(L->is_unused());
1121 int32_t current = pc_offset();
1123 L->link_to(current);
1128 void Assembler::j(Condition cc, Address entry, RelocInfo::Mode rmode) {
1129 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1130 EnsureSpace ensure_space(this);
1131 ASSERT(is_uint4(cc));
1134 emit_runtime_entry(entry, rmode);
1138 void Assembler::j(Condition cc,
1139 Handle<Code> target,
1140 RelocInfo::Mode rmode) {
1141 EnsureSpace ensure_space(this);
1142 ASSERT(is_uint4(cc));
1143 // 0000 1111 1000 tttn #32-bit disp.
1146 emit_code_target(target, rmode);
1150 void Assembler::jmp(Label* L, Label::Distance distance) {
1151 EnsureSpace ensure_space(this);
1152 const int short_size = sizeof(int8_t);
1153 const int long_size = sizeof(int32_t);
1154 if (L->is_bound()) {
1155 int offs = L->pos() - pc_offset() - 1;
1157 if (is_int8(offs - short_size) && !predictable_code_size()) {
1158 // 1110 1011 #8-bit disp.
1160 emit((offs - short_size) & 0xFF);
1162 // 1110 1001 #32-bit disp.
1164 emitl(offs - long_size);
1166 } else if (distance == Label::kNear) {
1169 if (L->is_near_linked()) {
1170 int offset = L->near_link_pos() - pc_offset();
1171 ASSERT(is_int8(offset));
1172 disp = static_cast<byte>(offset & 0xFF);
1174 L->link_to(pc_offset(), Label::kNear);
1176 } else if (L->is_linked()) {
1177 // 1110 1001 #32-bit disp.
1180 L->link_to(pc_offset() - long_size);
1182 // 1110 1001 #32-bit disp.
1183 ASSERT(L->is_unused());
1185 int32_t current = pc_offset();
1187 L->link_to(current);
1192 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1193 EnsureSpace ensure_space(this);
1194 // 1110 1001 #32-bit disp.
1196 emit_code_target(target, rmode);
1200 void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
1201 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1202 EnsureSpace ensure_space(this);
1203 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1205 emit_runtime_entry(entry, rmode);
1209 void Assembler::jmp(Register target) {
1210 EnsureSpace ensure_space(this);
1212 emit_optional_rex_32(target);
1214 emit_modrm(0x4, target);
1218 void Assembler::jmp(const Operand& src) {
1219 EnsureSpace ensure_space(this);
1221 emit_optional_rex_32(src);
1223 emit_operand(0x4, src);
1227 void Assembler::emit_lea(Register dst, const Operand& src, int size) {
1228 EnsureSpace ensure_space(this);
1229 emit_rex(dst, src, size);
1231 emit_operand(dst, src);
1235 void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1236 EnsureSpace ensure_space(this);
1237 if (kPointerSize == kInt64Size) {
1238 emit(0x48); // REX.W
1242 ASSERT(kPointerSize == kInt32Size);
1245 // In 64-bit mode, need to zero extend the operand to 8 bytes.
1246 // See 2.2.1.4 in Intel64 and IA32 Architectures Software
1247 // Developer's Manual Volume 2.
1253 void Assembler::load_rax(ExternalReference ref) {
1254 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1258 void Assembler::leave() {
1259 EnsureSpace ensure_space(this);
1264 void Assembler::movb(Register dst, const Operand& src) {
1265 EnsureSpace ensure_space(this);
1266 if (!dst.is_byte_register()) {
1267 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1268 emit_rex_32(dst, src);
1270 emit_optional_rex_32(dst, src);
1273 emit_operand(dst, src);
1277 void Assembler::movb(Register dst, Immediate imm) {
1278 EnsureSpace ensure_space(this);
1279 if (!dst.is_byte_register()) {
1282 emit(0xB0 + dst.low_bits());
1287 void Assembler::movb(const Operand& dst, Register src) {
1288 EnsureSpace ensure_space(this);
1289 if (!src.is_byte_register()) {
1290 emit_rex_32(src, dst);
1292 emit_optional_rex_32(src, dst);
1295 emit_operand(src, dst);
1299 void Assembler::movb(const Operand& dst, Immediate imm) {
1300 EnsureSpace ensure_space(this);
1301 emit_optional_rex_32(dst);
1303 emit_operand(0x0, dst);
1304 emit(static_cast<byte>(imm.value_));
1308 void Assembler::movw(Register dst, const Operand& src) {
1309 EnsureSpace ensure_space(this);
1311 emit_optional_rex_32(dst, src);
1313 emit_operand(dst, src);
1317 void Assembler::movw(const Operand& dst, Register src) {
1318 EnsureSpace ensure_space(this);
1320 emit_optional_rex_32(src, dst);
1322 emit_operand(src, dst);
1326 void Assembler::movw(const Operand& dst, Immediate imm) {
1327 EnsureSpace ensure_space(this);
1329 emit_optional_rex_32(dst);
1331 emit_operand(0x0, dst);
1332 emit(static_cast<byte>(imm.value_ & 0xff));
1333 emit(static_cast<byte>(imm.value_ >> 8));
1337 void Assembler::emit_mov(Register dst, const Operand& src, int size) {
1338 EnsureSpace ensure_space(this);
1339 emit_rex(dst, src, size);
1341 emit_operand(dst, src);
1345 void Assembler::emit_mov(Register dst, Register src, int size) {
1346 EnsureSpace ensure_space(this);
1347 if (src.low_bits() == 4) {
1348 emit_rex(src, dst, size);
1350 emit_modrm(src, dst);
1352 emit_rex(dst, src, size);
1354 emit_modrm(dst, src);
1359 void Assembler::emit_mov(const Operand& dst, Register src, int size) {
1360 EnsureSpace ensure_space(this);
1361 emit_rex(src, dst, size);
1363 emit_operand(src, dst);
1367 void Assembler::emit_mov(Register dst, Immediate value, int size) {
1368 EnsureSpace ensure_space(this);
1369 emit_rex(dst, size);
1370 if (size == kInt64Size) {
1372 emit_modrm(0x0, dst);
1374 ASSERT(size == kInt32Size);
1375 emit(0xB8 + dst.low_bits());
1381 void Assembler::emit_mov(const Operand& dst, Immediate value, int size) {
1382 EnsureSpace ensure_space(this);
1383 emit_rex(dst, size);
1385 emit_operand(0x0, dst);
1390 void Assembler::movp(Register dst, void* value, RelocInfo::Mode rmode) {
1391 EnsureSpace ensure_space(this);
1392 emit_rex(dst, kPointerSize);
1393 emit(0xB8 | dst.low_bits());
1394 emitp(value, rmode);
1398 void Assembler::movq(Register dst, int64_t value) {
1399 EnsureSpace ensure_space(this);
1401 emit(0xB8 | dst.low_bits());
1406 void Assembler::movq(Register dst, uint64_t value) {
1407 movq(dst, static_cast<int64_t>(value));
1411 // Loads the ip-relative location of the src label into the target location
1412 // (as a 32-bit offset sign extended to 64-bit).
1413 void Assembler::movl(const Operand& dst, Label* src) {
1414 EnsureSpace ensure_space(this);
1415 emit_optional_rex_32(dst);
1417 emit_operand(0, dst);
1418 if (src->is_bound()) {
1419 int offset = src->pos() - pc_offset() - sizeof(int32_t);
1420 ASSERT(offset <= 0);
1422 } else if (src->is_linked()) {
1424 src->link_to(pc_offset() - sizeof(int32_t));
1426 ASSERT(src->is_unused());
1427 int32_t current = pc_offset();
1429 src->link_to(current);
1434 void Assembler::movsxbq(Register dst, const Operand& src) {
1435 EnsureSpace ensure_space(this);
1436 emit_rex_64(dst, src);
1439 emit_operand(dst, src);
1443 void Assembler::movsxwq(Register dst, const Operand& src) {
1444 EnsureSpace ensure_space(this);
1445 emit_rex_64(dst, src);
1448 emit_operand(dst, src);
1452 void Assembler::movsxlq(Register dst, Register src) {
1453 EnsureSpace ensure_space(this);
1454 emit_rex_64(dst, src);
1456 emit_modrm(dst, src);
1460 void Assembler::movsxlq(Register dst, const Operand& src) {
1461 EnsureSpace ensure_space(this);
1462 emit_rex_64(dst, src);
1464 emit_operand(dst, src);
1468 void Assembler::emit_movzxb(Register dst, const Operand& src, int size) {
1469 EnsureSpace ensure_space(this);
1470 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1471 // there is no need to make this a 64 bit operation.
1472 emit_optional_rex_32(dst, src);
1475 emit_operand(dst, src);
1479 void Assembler::emit_movzxw(Register dst, const Operand& src, int size) {
1480 EnsureSpace ensure_space(this);
1481 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1482 // there is no need to make this a 64 bit operation.
1483 emit_optional_rex_32(dst, src);
1486 emit_operand(dst, src);
1490 void Assembler::emit_movzxw(Register dst, Register src, int size) {
1491 EnsureSpace ensure_space(this);
1492 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1493 // there is no need to make this a 64 bit operation.
1494 emit_optional_rex_32(dst, src);
1497 emit_modrm(dst, src);
1501 void Assembler::repmovsb() {
1502 EnsureSpace ensure_space(this);
1508 void Assembler::repmovsw() {
1509 EnsureSpace ensure_space(this);
1510 emit(0x66); // Operand size override.
1516 void Assembler::emit_repmovs(int size) {
1517 EnsureSpace ensure_space(this);
1524 void Assembler::mul(Register src) {
1525 EnsureSpace ensure_space(this);
1528 emit_modrm(0x4, src);
1532 void Assembler::emit_neg(Register dst, int size) {
1533 EnsureSpace ensure_space(this);
1534 emit_rex(dst, size);
1536 emit_modrm(0x3, dst);
1540 void Assembler::emit_neg(const Operand& dst, int size) {
1541 EnsureSpace ensure_space(this);
1544 emit_operand(3, dst);
1548 void Assembler::nop() {
1549 EnsureSpace ensure_space(this);
1554 void Assembler::emit_not(Register dst, int size) {
1555 EnsureSpace ensure_space(this);
1556 emit_rex(dst, size);
1558 emit_modrm(0x2, dst);
1562 void Assembler::emit_not(const Operand& dst, int size) {
1563 EnsureSpace ensure_space(this);
1564 emit_rex(dst, size);
1566 emit_operand(2, dst);
1570 void Assembler::Nop(int n) {
1571 // The recommended muti-byte sequences of NOP instructions from the Intel 64
1572 // and IA-32 Architectures Software Developer's Manual.
1574 // Length Assembly Byte Sequence
1575 // 2 bytes 66 NOP 66 90H
1576 // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H
1577 // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H
1578 // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H
1579 // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H
1580 // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H
1581 // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1582 // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00
1585 EnsureSpace ensure_space(this);
1647 void Assembler::popq(Register dst) {
1648 EnsureSpace ensure_space(this);
1649 emit_optional_rex_32(dst);
1650 emit(0x58 | dst.low_bits());
1654 void Assembler::popq(const Operand& dst) {
1655 EnsureSpace ensure_space(this);
1656 emit_optional_rex_32(dst);
1658 emit_operand(0, dst);
1662 void Assembler::popfq() {
1663 EnsureSpace ensure_space(this);
1668 void Assembler::pushq(Register src) {
1669 EnsureSpace ensure_space(this);
1670 emit_optional_rex_32(src);
1671 emit(0x50 | src.low_bits());
1675 void Assembler::pushq(const Operand& src) {
1676 EnsureSpace ensure_space(this);
1677 emit_optional_rex_32(src);
1679 emit_operand(6, src);
1683 void Assembler::pushq(Immediate value) {
1684 EnsureSpace ensure_space(this);
1685 if (is_int8(value.value_)) {
1687 emit(value.value_); // Emit low byte of value.
1690 emitl(value.value_);
1695 void Assembler::pushq_imm32(int32_t imm32) {
1696 EnsureSpace ensure_space(this);
1702 void Assembler::pushfq() {
1703 EnsureSpace ensure_space(this);
1708 void Assembler::ret(int imm16) {
1709 EnsureSpace ensure_space(this);
1710 ASSERT(is_uint16(imm16));
1716 emit((imm16 >> 8) & 0xFF);
1721 void Assembler::setcc(Condition cc, Register reg) {
1722 if (cc > last_condition) {
1723 movb(reg, Immediate(cc == always ? 1 : 0));
1726 EnsureSpace ensure_space(this);
1727 ASSERT(is_uint4(cc));
1728 if (!reg.is_byte_register()) { // Use x64 byte registers, where different.
1733 emit_modrm(0x0, reg);
1737 void Assembler::shld(Register dst, Register src) {
1738 EnsureSpace ensure_space(this);
1739 emit_rex_64(src, dst);
1742 emit_modrm(src, dst);
1746 void Assembler::shrd(Register dst, Register src) {
1747 EnsureSpace ensure_space(this);
1748 emit_rex_64(src, dst);
1751 emit_modrm(src, dst);
1755 void Assembler::emit_xchg(Register dst, Register src, int size) {
1756 EnsureSpace ensure_space(this);
1757 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1758 Register other = src.is(rax) ? dst : src;
1759 emit_rex(other, size);
1760 emit(0x90 | other.low_bits());
1761 } else if (dst.low_bits() == 4) {
1762 emit_rex(dst, src, size);
1764 emit_modrm(dst, src);
1766 emit_rex(src, dst, size);
1768 emit_modrm(src, dst);
1773 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1774 EnsureSpace ensure_space(this);
1775 if (kPointerSize == kInt64Size) {
1776 emit(0x48); // REX.W
1780 ASSERT(kPointerSize == kInt32Size);
1783 // In 64-bit mode, need to zero extend the operand to 8 bytes.
1784 // See 2.2.1.4 in Intel64 and IA32 Architectures Software
1785 // Developer's Manual Volume 2.
1791 void Assembler::store_rax(ExternalReference ref) {
1792 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1796 void Assembler::testb(Register dst, Register src) {
1797 EnsureSpace ensure_space(this);
1798 if (src.low_bits() == 4) {
1799 emit_rex_32(src, dst);
1801 emit_modrm(src, dst);
1803 if (!dst.is_byte_register() || !src.is_byte_register()) {
1804 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1805 emit_rex_32(dst, src);
1808 emit_modrm(dst, src);
1813 void Assembler::testb(Register reg, Immediate mask) {
1814 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
1815 EnsureSpace ensure_space(this);
1818 emit(mask.value_); // Low byte emitted.
1820 if (!reg.is_byte_register()) {
1821 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1825 emit_modrm(0x0, reg);
1826 emit(mask.value_); // Low byte emitted.
1831 void Assembler::testb(const Operand& op, Immediate mask) {
1832 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
1833 EnsureSpace ensure_space(this);
1834 emit_optional_rex_32(rax, op);
1836 emit_operand(rax, op); // Operation code 0
1837 emit(mask.value_); // Low byte emitted.
1841 void Assembler::testb(const Operand& op, Register reg) {
1842 EnsureSpace ensure_space(this);
1843 if (!reg.is_byte_register()) {
1844 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1845 emit_rex_32(reg, op);
1847 emit_optional_rex_32(reg, op);
1850 emit_operand(reg, op);
1854 void Assembler::emit_test(Register dst, Register src, int size) {
1855 EnsureSpace ensure_space(this);
1856 if (src.low_bits() == 4) {
1857 emit_rex(src, dst, size);
1859 emit_modrm(src, dst);
1861 emit_rex(dst, src, size);
1863 emit_modrm(dst, src);
1868 void Assembler::emit_test(Register reg, Immediate mask, int size) {
1869 // testl with a mask that fits in the low byte is exactly testb.
1870 if (is_uint8(mask.value_)) {
1874 EnsureSpace ensure_space(this);
1876 emit_rex(rax, size);
1880 emit_rex(reg, size);
1882 emit_modrm(0x0, reg);
1888 void Assembler::emit_test(const Operand& op, Immediate mask, int size) {
1889 // testl with a mask that fits in the low byte is exactly testb.
1890 if (is_uint8(mask.value_)) {
1894 EnsureSpace ensure_space(this);
1895 emit_rex(rax, op, size);
1897 emit_operand(rax, op); // Operation code 0
1902 void Assembler::emit_test(const Operand& op, Register reg, int size) {
1903 EnsureSpace ensure_space(this);
1904 emit_rex(reg, op, size);
1906 emit_operand(reg, op);
1910 // FPU instructions.
1913 void Assembler::fld(int i) {
1914 EnsureSpace ensure_space(this);
1915 emit_farith(0xD9, 0xC0, i);
1919 void Assembler::fld1() {
1920 EnsureSpace ensure_space(this);
1926 void Assembler::fldz() {
1927 EnsureSpace ensure_space(this);
1933 void Assembler::fldpi() {
1934 EnsureSpace ensure_space(this);
1940 void Assembler::fldln2() {
1941 EnsureSpace ensure_space(this);
1947 void Assembler::fld_s(const Operand& adr) {
1948 EnsureSpace ensure_space(this);
1949 emit_optional_rex_32(adr);
1951 emit_operand(0, adr);
1955 void Assembler::fld_d(const Operand& adr) {
1956 EnsureSpace ensure_space(this);
1957 emit_optional_rex_32(adr);
1959 emit_operand(0, adr);
1963 void Assembler::fstp_s(const Operand& adr) {
1964 EnsureSpace ensure_space(this);
1965 emit_optional_rex_32(adr);
1967 emit_operand(3, adr);
1971 void Assembler::fstp_d(const Operand& adr) {
1972 EnsureSpace ensure_space(this);
1973 emit_optional_rex_32(adr);
1975 emit_operand(3, adr);
1979 void Assembler::fstp(int index) {
1980 ASSERT(is_uint3(index));
1981 EnsureSpace ensure_space(this);
1982 emit_farith(0xDD, 0xD8, index);
1986 void Assembler::fild_s(const Operand& adr) {
1987 EnsureSpace ensure_space(this);
1988 emit_optional_rex_32(adr);
1990 emit_operand(0, adr);
1994 void Assembler::fild_d(const Operand& adr) {
1995 EnsureSpace ensure_space(this);
1996 emit_optional_rex_32(adr);
1998 emit_operand(5, adr);
2002 void Assembler::fistp_s(const Operand& adr) {
2003 EnsureSpace ensure_space(this);
2004 emit_optional_rex_32(adr);
2006 emit_operand(3, adr);
2010 void Assembler::fisttp_s(const Operand& adr) {
2011 ASSERT(IsEnabled(SSE3));
2012 EnsureSpace ensure_space(this);
2013 emit_optional_rex_32(adr);
2015 emit_operand(1, adr);
2019 void Assembler::fisttp_d(const Operand& adr) {
2020 ASSERT(IsEnabled(SSE3));
2021 EnsureSpace ensure_space(this);
2022 emit_optional_rex_32(adr);
2024 emit_operand(1, adr);
2028 void Assembler::fist_s(const Operand& adr) {
2029 EnsureSpace ensure_space(this);
2030 emit_optional_rex_32(adr);
2032 emit_operand(2, adr);
2036 void Assembler::fistp_d(const Operand& adr) {
2037 EnsureSpace ensure_space(this);
2038 emit_optional_rex_32(adr);
2040 emit_operand(7, adr);
2044 void Assembler::fabs() {
2045 EnsureSpace ensure_space(this);
2051 void Assembler::fchs() {
2052 EnsureSpace ensure_space(this);
2058 void Assembler::fcos() {
2059 EnsureSpace ensure_space(this);
2065 void Assembler::fsin() {
2066 EnsureSpace ensure_space(this);
2072 void Assembler::fptan() {
2073 EnsureSpace ensure_space(this);
2079 void Assembler::fyl2x() {
2080 EnsureSpace ensure_space(this);
2086 void Assembler::f2xm1() {
2087 EnsureSpace ensure_space(this);
2093 void Assembler::fscale() {
2094 EnsureSpace ensure_space(this);
2100 void Assembler::fninit() {
2101 EnsureSpace ensure_space(this);
2107 void Assembler::fadd(int i) {
2108 EnsureSpace ensure_space(this);
2109 emit_farith(0xDC, 0xC0, i);
2113 void Assembler::fsub(int i) {
2114 EnsureSpace ensure_space(this);
2115 emit_farith(0xDC, 0xE8, i);
2119 void Assembler::fisub_s(const Operand& adr) {
2120 EnsureSpace ensure_space(this);
2121 emit_optional_rex_32(adr);
2123 emit_operand(4, adr);
2127 void Assembler::fmul(int i) {
2128 EnsureSpace ensure_space(this);
2129 emit_farith(0xDC, 0xC8, i);
2133 void Assembler::fdiv(int i) {
2134 EnsureSpace ensure_space(this);
2135 emit_farith(0xDC, 0xF8, i);
2139 void Assembler::faddp(int i) {
2140 EnsureSpace ensure_space(this);
2141 emit_farith(0xDE, 0xC0, i);
2145 void Assembler::fsubp(int i) {
2146 EnsureSpace ensure_space(this);
2147 emit_farith(0xDE, 0xE8, i);
2151 void Assembler::fsubrp(int i) {
2152 EnsureSpace ensure_space(this);
2153 emit_farith(0xDE, 0xE0, i);
2157 void Assembler::fmulp(int i) {
2158 EnsureSpace ensure_space(this);
2159 emit_farith(0xDE, 0xC8, i);
2163 void Assembler::fdivp(int i) {
2164 EnsureSpace ensure_space(this);
2165 emit_farith(0xDE, 0xF8, i);
2169 void Assembler::fprem() {
2170 EnsureSpace ensure_space(this);
2176 void Assembler::fprem1() {
2177 EnsureSpace ensure_space(this);
2183 void Assembler::fxch(int i) {
2184 EnsureSpace ensure_space(this);
2185 emit_farith(0xD9, 0xC8, i);
2189 void Assembler::fincstp() {
2190 EnsureSpace ensure_space(this);
2196 void Assembler::ffree(int i) {
2197 EnsureSpace ensure_space(this);
2198 emit_farith(0xDD, 0xC0, i);
2202 void Assembler::ftst() {
2203 EnsureSpace ensure_space(this);
2209 void Assembler::fucomp(int i) {
2210 EnsureSpace ensure_space(this);
2211 emit_farith(0xDD, 0xE8, i);
2215 void Assembler::fucompp() {
2216 EnsureSpace ensure_space(this);
2222 void Assembler::fucomi(int i) {
2223 EnsureSpace ensure_space(this);
2229 void Assembler::fucomip() {
2230 EnsureSpace ensure_space(this);
2236 void Assembler::fcompp() {
2237 EnsureSpace ensure_space(this);
2243 void Assembler::fnstsw_ax() {
2244 EnsureSpace ensure_space(this);
2250 void Assembler::fwait() {
2251 EnsureSpace ensure_space(this);
2256 void Assembler::frndint() {
2257 EnsureSpace ensure_space(this);
2263 void Assembler::fnclex() {
2264 EnsureSpace ensure_space(this);
2270 void Assembler::sahf() {
2271 // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2272 // in 64-bit mode. Test CpuID.
2273 EnsureSpace ensure_space(this);
2278 void Assembler::emit_farith(int b1, int b2, int i) {
2279 ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode
2280 ASSERT(is_uint3(i)); // illegal stack offset
2288 void Assembler::andps(XMMRegister dst, XMMRegister src) {
2289 EnsureSpace ensure_space(this);
2290 emit_optional_rex_32(dst, src);
2293 emit_sse_operand(dst, src);
2297 void Assembler::andps(XMMRegister dst, const Operand& src) {
2298 EnsureSpace ensure_space(this);
2299 emit_optional_rex_32(dst, src);
2302 emit_sse_operand(dst, src);
2306 void Assembler::orps(XMMRegister dst, XMMRegister src) {
2307 EnsureSpace ensure_space(this);
2308 emit_optional_rex_32(dst, src);
2311 emit_sse_operand(dst, src);
2315 void Assembler::orps(XMMRegister dst, const Operand& src) {
2316 EnsureSpace ensure_space(this);
2317 emit_optional_rex_32(dst, src);
2320 emit_sse_operand(dst, src);
2324 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2325 EnsureSpace ensure_space(this);
2326 emit_optional_rex_32(dst, src);
2329 emit_sse_operand(dst, src);
2333 void Assembler::xorps(XMMRegister dst, const Operand& src) {
2334 EnsureSpace ensure_space(this);
2335 emit_optional_rex_32(dst, src);
2338 emit_sse_operand(dst, src);
2342 void Assembler::addps(XMMRegister dst, XMMRegister src) {
2343 EnsureSpace ensure_space(this);
2344 emit_optional_rex_32(dst, src);
2347 emit_sse_operand(dst, src);
2351 void Assembler::addps(XMMRegister dst, const Operand& src) {
2352 EnsureSpace ensure_space(this);
2353 emit_optional_rex_32(dst, src);
2356 emit_sse_operand(dst, src);
2360 void Assembler::subps(XMMRegister dst, XMMRegister src) {
2361 EnsureSpace ensure_space(this);
2362 emit_optional_rex_32(dst, src);
2365 emit_sse_operand(dst, src);
2369 void Assembler::subps(XMMRegister dst, const Operand& src) {
2370 EnsureSpace ensure_space(this);
2371 emit_optional_rex_32(dst, src);
2374 emit_sse_operand(dst, src);
2378 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
2379 EnsureSpace ensure_space(this);
2380 emit_optional_rex_32(dst, src);
2383 emit_sse_operand(dst, src);
2387 void Assembler::mulps(XMMRegister dst, const Operand& src) {
2388 EnsureSpace ensure_space(this);
2389 emit_optional_rex_32(dst, src);
2392 emit_sse_operand(dst, src);
2396 void Assembler::divps(XMMRegister dst, XMMRegister src) {
2397 EnsureSpace ensure_space(this);
2398 emit_optional_rex_32(dst, src);
2401 emit_sse_operand(dst, src);
2405 void Assembler::divps(XMMRegister dst, const Operand& src) {
2406 EnsureSpace ensure_space(this);
2407 emit_optional_rex_32(dst, src);
2410 emit_sse_operand(dst, src);
2414 // SSE 2 operations.
2416 void Assembler::movd(XMMRegister dst, Register src) {
2417 EnsureSpace ensure_space(this);
2419 emit_optional_rex_32(dst, src);
2422 emit_sse_operand(dst, src);
2426 void Assembler::movd(Register dst, XMMRegister src) {
2427 EnsureSpace ensure_space(this);
2429 emit_optional_rex_32(src, dst);
2432 emit_sse_operand(src, dst);
2436 void Assembler::movq(XMMRegister dst, Register src) {
2437 EnsureSpace ensure_space(this);
2439 emit_rex_64(dst, src);
2442 emit_sse_operand(dst, src);
2446 void Assembler::movq(Register dst, XMMRegister src) {
2447 EnsureSpace ensure_space(this);
2449 emit_rex_64(src, dst);
2452 emit_sse_operand(src, dst);
2456 void Assembler::movq(XMMRegister dst, XMMRegister src) {
2457 EnsureSpace ensure_space(this);
2458 if (dst.low_bits() == 4) {
2459 // Avoid unnecessary SIB byte.
2461 emit_optional_rex_32(dst, src);
2464 emit_sse_operand(dst, src);
2467 emit_optional_rex_32(src, dst);
2470 emit_sse_operand(src, dst);
2475 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2476 EnsureSpace ensure_space(this);
2478 emit_rex_64(src, dst);
2481 emit_sse_operand(src, dst);
2485 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2486 EnsureSpace ensure_space(this);
2488 emit_rex_64(dst, src);
2491 emit_sse_operand(dst, src);
2495 void Assembler::movdqu(const Operand& dst, XMMRegister src) {
2496 EnsureSpace ensure_space(this);
2498 emit_rex_64(src, dst);
2501 emit_sse_operand(src, dst);
2505 void Assembler::movdqu(XMMRegister dst, const Operand& src) {
2506 EnsureSpace ensure_space(this);
2508 emit_rex_64(dst, src);
2511 emit_sse_operand(dst, src);
2515 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2516 ASSERT(IsEnabled(SSE4_1));
2517 ASSERT(is_uint8(imm8));
2518 EnsureSpace ensure_space(this);
2520 emit_optional_rex_32(src, dst);
2524 emit_sse_operand(src, dst);
2529 void Assembler::insertps(XMMRegister dst, XMMRegister src, byte imm8) {
2530 ASSERT(CpuFeatures::IsSupported(SSE4_1));
2531 ASSERT(is_uint8(imm8));
2532 EnsureSpace ensure_space(this);
2534 emit_optional_rex_32(dst, src);
2538 emit_sse_operand(dst, src);
2543 void Assembler::pinsrd(XMMRegister dst, Register src, byte imm8) {
2544 ASSERT(CpuFeatures::IsSupported(SSE4_1));
2545 ASSERT(is_uint8(imm8));
2546 EnsureSpace ensure_space(this);
2548 emit_optional_rex_32(dst, src);
2552 emit_sse_operand(dst, src);
2557 void Assembler::movsd(const Operand& dst, XMMRegister src) {
2558 EnsureSpace ensure_space(this);
2559 emit(0xF2); // double
2560 emit_optional_rex_32(src, dst);
2562 emit(0x11); // store
2563 emit_sse_operand(src, dst);
2567 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2568 EnsureSpace ensure_space(this);
2569 emit(0xF2); // double
2570 emit_optional_rex_32(dst, src);
2573 emit_sse_operand(dst, src);
2577 void Assembler::movsd(XMMRegister dst, const Operand& src) {
2578 EnsureSpace ensure_space(this);
2579 emit(0xF2); // double
2580 emit_optional_rex_32(dst, src);
2583 emit_sse_operand(dst, src);
2587 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2588 EnsureSpace ensure_space(this);
2589 if (src.low_bits() == 4) {
2590 // Try to avoid an unnecessary SIB byte.
2591 emit_optional_rex_32(src, dst);
2594 emit_sse_operand(src, dst);
2596 emit_optional_rex_32(dst, src);
2599 emit_sse_operand(dst, src);
2604 void Assembler::movups(XMMRegister dst, const Operand& src) {
2605 EnsureSpace ensure_space(this);
2606 emit_optional_rex_32(dst, src);
2609 emit_sse_operand(dst, src);
2613 void Assembler::movups(const Operand& dst, XMMRegister src) {
2614 EnsureSpace ensure_space(this);
2615 emit_optional_rex_32(src, dst);
2618 emit_sse_operand(src, dst);
2622 void Assembler::shufps(XMMRegister dst, XMMRegister src, byte imm8) {
2623 ASSERT(is_uint8(imm8));
2624 EnsureSpace ensure_space(this);
2625 emit_optional_rex_32(dst, src);
2628 emit_sse_operand(dst, src);
2633 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2634 EnsureSpace ensure_space(this);
2635 if (src.low_bits() == 4) {
2636 // Try to avoid an unnecessary SIB byte.
2638 emit_optional_rex_32(src, dst);
2641 emit_sse_operand(src, dst);
2644 emit_optional_rex_32(dst, src);
2647 emit_sse_operand(dst, src);
2652 void Assembler::movss(XMMRegister dst, const Operand& src) {
2653 EnsureSpace ensure_space(this);
2654 emit(0xF3); // single
2655 emit_optional_rex_32(dst, src);
2658 emit_sse_operand(dst, src);
2662 void Assembler::movss(const Operand& src, XMMRegister dst) {
2663 EnsureSpace ensure_space(this);
2664 emit(0xF3); // single
2665 emit_optional_rex_32(dst, src);
2667 emit(0x11); // store
2668 emit_sse_operand(dst, src);
2672 void Assembler::psllq(XMMRegister reg, byte imm8) {
2673 EnsureSpace ensure_space(this);
2677 emit_sse_operand(rsi, reg); // rsi == 6
2682 void Assembler::cvttss2si(Register dst, const Operand& src) {
2683 EnsureSpace ensure_space(this);
2685 emit_optional_rex_32(dst, src);
2688 emit_operand(dst, src);
2692 void Assembler::cvttss2si(Register dst, XMMRegister src) {
2693 EnsureSpace ensure_space(this);
2695 emit_optional_rex_32(dst, src);
2698 emit_sse_operand(dst, src);
2702 void Assembler::cvttsd2si(Register dst, const Operand& src) {
2703 EnsureSpace ensure_space(this);
2705 emit_optional_rex_32(dst, src);
2708 emit_operand(dst, src);
2712 void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2713 EnsureSpace ensure_space(this);
2715 emit_optional_rex_32(dst, src);
2718 emit_sse_operand(dst, src);
2722 void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2723 EnsureSpace ensure_space(this);
2725 emit_rex_64(dst, src);
2728 emit_sse_operand(dst, src);
2732 void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2733 EnsureSpace ensure_space(this);
2735 emit_optional_rex_32(dst, src);
2738 emit_sse_operand(dst, src);
2742 void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2743 EnsureSpace ensure_space(this);
2745 emit_optional_rex_32(dst, src);
2748 emit_sse_operand(dst, src);
2752 void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2753 EnsureSpace ensure_space(this);
2755 emit_optional_rex_32(dst, src);
2758 emit_sse_operand(dst, src);
2762 void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
2763 EnsureSpace ensure_space(this);
2765 emit_rex_64(dst, src);
2768 emit_sse_operand(dst, src);
2772 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
2773 EnsureSpace ensure_space(this);
2775 emit_optional_rex_32(dst, src);
2778 emit_sse_operand(dst, src);
2782 void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
2783 EnsureSpace ensure_space(this);
2785 emit_optional_rex_32(dst, src);
2788 emit_sse_operand(dst, src);
2792 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
2793 EnsureSpace ensure_space(this);
2795 emit_optional_rex_32(dst, src);
2798 emit_sse_operand(dst, src);
2802 void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2803 EnsureSpace ensure_space(this);
2805 emit_optional_rex_32(dst, src);
2808 emit_sse_operand(dst, src);
2812 void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
2813 EnsureSpace ensure_space(this);
2815 emit_rex_64(dst, src);
2818 emit_sse_operand(dst, src);
2822 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
2823 EnsureSpace ensure_space(this);
2825 emit_optional_rex_32(dst, src);
2828 emit_sse_operand(dst, src);
2832 void Assembler::addsd(XMMRegister dst, const Operand& src) {
2833 EnsureSpace ensure_space(this);
2835 emit_optional_rex_32(dst, src);
2838 emit_sse_operand(dst, src);
2842 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2843 EnsureSpace ensure_space(this);
2845 emit_optional_rex_32(dst, src);
2848 emit_sse_operand(dst, src);
2852 void Assembler::mulsd(XMMRegister dst, const Operand& src) {
2853 EnsureSpace ensure_space(this);
2855 emit_optional_rex_32(dst, src);
2858 emit_sse_operand(dst, src);
2862 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2863 EnsureSpace ensure_space(this);
2865 emit_optional_rex_32(dst, src);
2868 emit_sse_operand(dst, src);
2872 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2873 EnsureSpace ensure_space(this);
2875 emit_optional_rex_32(dst, src);
2878 emit_sse_operand(dst, src);
2882 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2883 EnsureSpace ensure_space(this);
2885 emit_optional_rex_32(dst, src);
2888 emit_sse_operand(dst, src);
2892 void Assembler::orpd(XMMRegister dst, XMMRegister src) {
2893 EnsureSpace ensure_space(this);
2895 emit_optional_rex_32(dst, src);
2898 emit_sse_operand(dst, src);
2902 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
2903 EnsureSpace ensure_space(this);
2905 emit_optional_rex_32(dst, src);
2908 emit_sse_operand(dst, src);
2912 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
2913 EnsureSpace ensure_space(this);
2915 emit_optional_rex_32(dst, src);
2918 emit_sse_operand(dst, src);
2922 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
2923 EnsureSpace ensure_space(this);
2925 emit_optional_rex_32(dst, src);
2928 emit_sse_operand(dst, src);
2932 void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
2933 EnsureSpace ensure_space(this);
2935 emit_optional_rex_32(dst, src);
2938 emit_sse_operand(dst, src);
2942 void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
2943 EnsureSpace ensure_space(this);
2945 emit_optional_rex_32(dst, src);
2948 emit_sse_operand(dst, src);
2949 emit(0x01); // LT == 1
2953 void Assembler::cmpps(XMMRegister dst, XMMRegister src, int8_t cmp) {
2954 EnsureSpace ensure_space(this);
2955 emit_optional_rex_32(dst, src);
2958 emit_sse_operand(dst, src);
2963 void Assembler::cmpeqps(XMMRegister dst, XMMRegister src) {
2964 cmpps(dst, src, 0x0);
2968 void Assembler::cmpltps(XMMRegister dst, XMMRegister src) {
2969 cmpps(dst, src, 0x1);
2973 void Assembler::cmpleps(XMMRegister dst, XMMRegister src) {
2974 cmpps(dst, src, 0x2);
2978 void Assembler::cmpneqps(XMMRegister dst, XMMRegister src) {
2979 cmpps(dst, src, 0x4);
2983 void Assembler::cmpnltps(XMMRegister dst, XMMRegister src) {
2984 cmpps(dst, src, 0x5);
2988 void Assembler::cmpnleps(XMMRegister dst, XMMRegister src) {
2989 cmpps(dst, src, 0x6);
2993 void Assembler::pslld(XMMRegister reg, int8_t shift) {
2994 EnsureSpace ensure_space(this);
2996 emit_optional_rex_32(reg);
2999 emit_sse_operand(rsi, reg); // rsi == 6
3004 void Assembler::pslld(XMMRegister dst, XMMRegister src) {
3005 EnsureSpace ensure_space(this);
3007 emit_optional_rex_32(dst, src);
3010 emit_sse_operand(dst, src);
3014 void Assembler::psrld(XMMRegister reg, int8_t shift) {
3015 EnsureSpace ensure_space(this);
3017 emit_optional_rex_32(reg);
3020 emit_sse_operand(rdx, reg); // rdx == 2
3025 void Assembler::psrld(XMMRegister dst, XMMRegister src) {
3026 EnsureSpace ensure_space(this);
3028 emit_optional_rex_32(dst, src);
3031 emit_sse_operand(dst, src);
3035 void Assembler::psrad(XMMRegister reg, int8_t shift) {
3036 EnsureSpace ensure_space(this);
3038 emit_optional_rex_32(reg);
3041 emit_sse_operand(rsp, reg); // rsp == 4
3046 void Assembler::psrad(XMMRegister dst, XMMRegister src) {
3047 EnsureSpace ensure_space(this);
3049 emit_optional_rex_32(dst, src);
3052 emit_sse_operand(dst, src);
3056 void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
3057 EnsureSpace ensure_space(this);
3059 emit_optional_rex_32(dst, src);
3062 emit_sse_operand(dst, src);
3066 void Assembler::pcmpgtd(XMMRegister dst, XMMRegister src) {
3067 EnsureSpace ensure_space(this);
3069 emit_optional_rex_32(dst, src);
3072 emit_sse_operand(dst, src);
3076 void Assembler::roundsd(XMMRegister dst, XMMRegister src,
3077 Assembler::RoundingMode mode) {
3078 ASSERT(IsEnabled(SSE4_1));
3079 EnsureSpace ensure_space(this);
3081 emit_optional_rex_32(dst, src);
3085 emit_sse_operand(dst, src);
3086 // Mask precision exeption.
3087 emit(static_cast<byte>(mode) | 0x8);
3091 void Assembler::movmskpd(Register dst, XMMRegister src) {
3092 EnsureSpace ensure_space(this);
3094 emit_optional_rex_32(dst, src);
3097 emit_sse_operand(dst, src);
3101 void Assembler::movmskps(Register dst, XMMRegister src) {
3102 EnsureSpace ensure_space(this);
3103 emit_optional_rex_32(dst, src);
3106 emit_sse_operand(dst, src);
3110 void Assembler::minps(XMMRegister dst, XMMRegister src) {
3111 EnsureSpace ensure_space(this);
3112 emit_optional_rex_32(dst, src);
3115 emit_sse_operand(dst, src);
3119 void Assembler::minps(XMMRegister dst, const Operand& src) {
3120 EnsureSpace ensure_space(this);
3121 emit_optional_rex_32(dst, src);
3124 emit_sse_operand(dst, src);
3128 void Assembler::maxps(XMMRegister dst, XMMRegister src) {
3129 EnsureSpace ensure_space(this);
3130 emit_optional_rex_32(dst, src);
3133 emit_sse_operand(dst, src);
3137 void Assembler::maxps(XMMRegister dst, const Operand& src) {
3138 EnsureSpace ensure_space(this);
3139 emit_optional_rex_32(dst, src);
3142 emit_sse_operand(dst, src);
3146 void Assembler::rcpps(XMMRegister dst, XMMRegister src) {
3147 EnsureSpace ensure_space(this);
3148 emit_optional_rex_32(dst, src);
3151 emit_sse_operand(dst, src);
3155 void Assembler::rcpps(XMMRegister dst, const Operand& src) {
3156 EnsureSpace ensure_space(this);
3157 emit_optional_rex_32(dst, src);
3160 emit_sse_operand(dst, src);
3164 void Assembler::rsqrtps(XMMRegister dst, XMMRegister src) {
3165 EnsureSpace ensure_space(this);
3166 emit_optional_rex_32(dst, src);
3169 emit_sse_operand(dst, src);
3173 void Assembler::rsqrtps(XMMRegister dst, const Operand& src) {
3174 EnsureSpace ensure_space(this);
3175 emit_optional_rex_32(dst, src);
3178 emit_sse_operand(dst, src);
3182 void Assembler::sqrtps(XMMRegister dst, XMMRegister src) {
3183 EnsureSpace ensure_space(this);
3184 emit_optional_rex_32(dst, src);
3187 emit_sse_operand(dst, src);
3191 void Assembler::sqrtps(XMMRegister dst, const Operand& src) {
3192 EnsureSpace ensure_space(this);
3193 emit_optional_rex_32(dst, src);
3196 emit_sse_operand(dst, src);
3200 void Assembler::cvtdq2ps(XMMRegister dst, XMMRegister src) {
3201 EnsureSpace ensure_space(this);
3202 emit_optional_rex_32(dst, src);
3205 emit_sse_operand(dst, src);
3209 void Assembler::cvtdq2ps(XMMRegister dst, const Operand& src) {
3210 EnsureSpace ensure_space(this);
3211 emit_optional_rex_32(dst, src);
3214 emit_sse_operand(dst, src);
3218 void Assembler::paddd(XMMRegister dst, XMMRegister src) {
3219 EnsureSpace ensure_space(this);
3221 emit_optional_rex_32(dst, src);
3224 emit_sse_operand(dst, src);
3228 void Assembler::paddd(XMMRegister dst, const Operand& src) {
3229 EnsureSpace ensure_space(this);
3231 emit_optional_rex_32(dst, src);
3234 emit_sse_operand(dst, src);
3238 void Assembler::psubd(XMMRegister dst, XMMRegister src) {
3239 EnsureSpace ensure_space(this);
3241 emit_optional_rex_32(dst, src);
3244 emit_sse_operand(dst, src);
3248 void Assembler::psubd(XMMRegister dst, const Operand& src) {
3249 EnsureSpace ensure_space(this);
3251 emit_optional_rex_32(dst, src);
3254 emit_sse_operand(dst, src);
3258 void Assembler::pmulld(XMMRegister dst, XMMRegister src) {
3259 ASSERT(IsEnabled(SSE4_1));
3260 EnsureSpace ensure_space(this);
3262 emit_optional_rex_32(dst, src);
3266 emit_sse_operand(dst, src);
3270 void Assembler::pmulld(XMMRegister dst, const Operand& src) {
3271 EnsureSpace ensure_space(this);
3273 emit_optional_rex_32(dst, src);
3276 emit_sse_operand(dst, src);
3280 void Assembler::pmuludq(XMMRegister dst, XMMRegister src) {
3281 EnsureSpace ensure_space(this);
3283 emit_optional_rex_32(dst, src);
3286 emit_sse_operand(dst, src);
3290 void Assembler::pmuludq(XMMRegister dst, const Operand& src) {
3291 EnsureSpace ensure_space(this);
3293 emit_optional_rex_32(dst, src);
3296 emit_sse_operand(dst, src);
3300 void Assembler::punpackldq(XMMRegister dst, XMMRegister src) {
3301 EnsureSpace ensure_space(this);
3303 emit_optional_rex_32(dst, src);
3306 emit_sse_operand(dst, src);
3310 void Assembler::punpackldq(XMMRegister dst, const Operand& src) {
3311 EnsureSpace ensure_space(this);
3313 emit_optional_rex_32(dst, src);
3316 emit_sse_operand(dst, src);
3320 void Assembler::psrldq(XMMRegister dst, uint8_t shift) {
3321 EnsureSpace ensure_space(this);
3323 emit_optional_rex_32(dst);
3326 emit_sse_operand(dst);
3331 void Assembler::cvtps2dq(XMMRegister dst, XMMRegister src) {
3332 EnsureSpace ensure_space(this);
3334 emit_optional_rex_32(dst, src);
3337 emit_sse_operand(dst, src);
3341 void Assembler::cvtps2dq(XMMRegister dst, const Operand& src) {
3342 EnsureSpace ensure_space(this);
3344 emit_optional_rex_32(dst, src);
3347 emit_sse_operand(dst, src);
3351 void Assembler::pshufd(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
3352 EnsureSpace ensure_space(this);
3354 emit_optional_rex_32(dst, src);
3357 emit_sse_operand(dst, src);
3362 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
3363 Register ireg = { reg.code() };
3364 emit_operand(ireg, adr);
3368 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
3369 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3373 void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
3374 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3378 void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
3379 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3383 void Assembler::emit_sse_operand(XMMRegister dst) {
3384 emit(0xD8 | dst.low_bits());
3388 void Assembler::db(uint8_t data) {
3389 EnsureSpace ensure_space(this);
3394 void Assembler::dd(uint32_t data) {
3395 EnsureSpace ensure_space(this);
3400 // Relocation information implementations.
3402 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3403 ASSERT(!RelocInfo::IsNone(rmode));
3404 if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
3405 // Don't record external references unless the heap will be serialized.
3407 if (!Serializer::enabled()) {
3408 Serializer::TooLateToEnableNow();
3411 if (!Serializer::enabled() && !emit_debug_code()) {
3414 } else if (rmode == RelocInfo::CODE_AGE_SEQUENCE) {
3415 // Don't record psuedo relocation info for code age sequence mode.
3418 RelocInfo rinfo(pc_, rmode, data, NULL);
3419 reloc_info_writer.Write(&rinfo);
3423 void Assembler::RecordJSReturn() {
3424 positions_recorder()->WriteRecordedPositions();
3425 EnsureSpace ensure_space(this);
3426 RecordRelocInfo(RelocInfo::JS_RETURN);
3430 void Assembler::RecordDebugBreakSlot() {
3431 positions_recorder()->WriteRecordedPositions();
3432 EnsureSpace ensure_space(this);
3433 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
3437 void Assembler::RecordComment(const char* msg, bool force) {
3438 if (FLAG_code_comments || force) {
3439 EnsureSpace ensure_space(this);
3440 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
3445 MaybeObject* Assembler::AllocateConstantPool(Heap* heap) {
3446 // No out-of-line constant pool support.
3452 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
3453 // No out-of-line constant pool support.
3458 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
3459 1 << RelocInfo::RUNTIME_ENTRY |
3460 1 << RelocInfo::INTERNAL_REFERENCE |
3461 1 << RelocInfo::CODE_AGE_SEQUENCE;
3464 bool RelocInfo::IsCodedSpecially() {
3465 // The deserializer needs to know whether a pointer is specially coded. Being
3466 // specially coded on x64 means that it is a relative 32 bit address, as used
3467 // by branch instructions.
3468 return (1 << rmode_) & kApplyMask;
3472 bool RelocInfo::IsInConstantPool() {
3477 } } // namespace v8::internal
3479 #endif // V8_TARGET_ARCH_X64