2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
3 // All Rights Reserved.
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
9 // - Redistributions of source code must retain the above copyright notice,
10 // this list of conditions and the following disclaimer.
12 // - Redistribution in binary form must reproduce the above copyright
13 // notice, this list of conditions and the following disclaimer in the
14 // documentation and/or other materials provided with the distribution.
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 // The original source code covered by the above license above has been
33 // modified significantly by Google Inc.
34 // Copyright 2012 the V8 project authors. All rights reserved.
37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
40 #include "src/mips64/assembler-mips64.h"
42 #include "src/assembler.h"
43 #include "src/debug.h"
50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
53 // -----------------------------------------------------------------------------
54 // Operand and MemOperand.
56 Operand::Operand(int64_t immediate, RelocInfo::Mode rmode) {
63 Operand::Operand(const ExternalReference& f) {
65 imm64_ = reinterpret_cast<int64_t>(f.address());
66 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
70 Operand::Operand(Smi* value) {
72 imm64_ = reinterpret_cast<intptr_t>(value);
73 rmode_ = RelocInfo::NONE32;
77 Operand::Operand(Register rm) {
82 bool Operand::is_reg() const {
83 return rm_.is_valid();
87 int Register::NumAllocatableRegisters() {
88 return kMaxNumAllocatableRegisters;
92 int DoubleRegister::NumRegisters() {
93 return FPURegister::kMaxNumRegisters;
97 int DoubleRegister::NumAllocatableRegisters() {
98 return FPURegister::kMaxNumAllocatableRegisters;
102 int DoubleRegister::NumAllocatableAliasedRegisters() {
103 return NumAllocatableRegisters();
107 int FPURegister::ToAllocationIndex(FPURegister reg) {
108 DCHECK(reg.code() % 2 == 0);
109 DCHECK(reg.code() / 2 < kMaxNumAllocatableRegisters);
110 DCHECK(reg.is_valid());
111 DCHECK(!reg.is(kDoubleRegZero));
112 DCHECK(!reg.is(kLithiumScratchDouble));
113 return (reg.code() / 2);
117 // -----------------------------------------------------------------------------
120 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
121 if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
122 // Absolute code pointer inside code object moves with the code object.
123 byte* p = reinterpret_cast<byte*>(pc_);
124 int count = Assembler::RelocateInternalReference(rmode_, p, delta);
125 CpuFeatures::FlushICache(p, count * sizeof(uint32_t));
130 Address RelocInfo::target_address() {
131 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
132 return Assembler::target_address_at(pc_, host_);
136 Address RelocInfo::target_address_address() {
137 DCHECK(IsCodeTarget(rmode_) ||
138 IsRuntimeEntry(rmode_) ||
139 rmode_ == EMBEDDED_OBJECT ||
140 rmode_ == EXTERNAL_REFERENCE);
141 // Read the address of the word containing the target_address in an
142 // instruction stream.
143 // The only architecture-independent user of this function is the serializer.
144 // The serializer uses it to find out how many raw bytes of instruction to
145 // output before the next target.
146 // For an instruction like LUI/ORI where the target bits are mixed into the
147 // instruction bits, the size of the target will be zero, indicating that the
148 // serializer should not step forward in memory after a target is resolved
149 // and written. In this case the target_address_address function should
150 // return the end of the instructions to be patched, allowing the
151 // deserializer to deserialize the instructions as raw bytes and put them in
152 // place, ready to be patched with the target. After jump optimization,
153 // that is the address of the instruction that follows J/JAL/JR/JALR
155 // return reinterpret_cast<Address>(
156 // pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
157 return reinterpret_cast<Address>(
158 pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
162 Address RelocInfo::constant_pool_entry_address() {
168 int RelocInfo::target_address_size() {
169 return Assembler::kSpecialTargetSize;
173 void RelocInfo::set_target_address(Address target,
174 WriteBarrierMode write_barrier_mode,
175 ICacheFlushMode icache_flush_mode) {
176 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
177 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
178 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
179 host() != NULL && IsCodeTarget(rmode_)) {
180 Object* target_code = Code::GetCodeFromTargetAddress(target);
181 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
182 host(), this, HeapObject::cast(target_code));
187 Address Assembler::target_address_from_return_address(Address pc) {
188 return pc - kCallTargetAddressOffset;
192 Address Assembler::break_address_from_return_address(Address pc) {
193 return pc - Assembler::kPatchDebugBreakSlotReturnOffset;
197 void Assembler::set_target_internal_reference_encoded_at(Address pc,
199 // Encoded internal references are lui/ori load of 48-bit absolute address.
200 Instr instr_lui = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);
201 Instr instr_ori = Assembler::instr_at(pc + 1 * Assembler::kInstrSize);
202 Instr instr_ori2 = Assembler::instr_at(pc + 3 * Assembler::kInstrSize);
203 DCHECK(Assembler::IsLui(instr_lui));
204 DCHECK(Assembler::IsOri(instr_ori));
205 DCHECK(Assembler::IsOri(instr_ori2));
206 instr_lui &= ~kImm16Mask;
207 instr_ori &= ~kImm16Mask;
208 instr_ori2 &= ~kImm16Mask;
209 int64_t imm = reinterpret_cast<int64_t>(target);
210 DCHECK((imm & 3) == 0);
211 Assembler::instr_at_put(pc + 0 * Assembler::kInstrSize,
212 instr_lui | ((imm >> 32) & kImm16Mask));
213 Assembler::instr_at_put(pc + 1 * Assembler::kInstrSize,
214 instr_ori | ((imm >> 16) & kImm16Mask));
215 Assembler::instr_at_put(pc + 3 * Assembler::kInstrSize,
216 instr_ori | (imm & kImm16Mask));
217 // Currently used only by deserializer, and all code will be flushed
218 // after complete deserialization, no need to flush on each reference.
222 void Assembler::deserialization_set_target_internal_reference_at(
223 Address pc, Address target, RelocInfo::Mode mode) {
224 if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
225 DCHECK(IsLui(instr_at(pc)));
226 set_target_internal_reference_encoded_at(pc, target);
228 DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
229 Memory::Address_at(pc) = target;
234 Object* RelocInfo::target_object() {
235 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
236 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
240 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
241 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
242 return Handle<Object>(reinterpret_cast<Object**>(
243 Assembler::target_address_at(pc_, host_)));
247 void RelocInfo::set_target_object(Object* target,
248 WriteBarrierMode write_barrier_mode,
249 ICacheFlushMode icache_flush_mode) {
250 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
251 Assembler::set_target_address_at(pc_, host_,
252 reinterpret_cast<Address>(target),
254 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
256 target->IsHeapObject()) {
257 host()->GetHeap()->incremental_marking()->RecordWrite(
258 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
263 Address RelocInfo::target_external_reference() {
264 DCHECK(rmode_ == EXTERNAL_REFERENCE);
265 return Assembler::target_address_at(pc_, host_);
269 Address RelocInfo::target_internal_reference() {
270 if (rmode_ == INTERNAL_REFERENCE) {
271 return Memory::Address_at(pc_);
273 // Encoded internal references are lui/ori load of 48-bit absolute address.
274 DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
275 Instr instr_lui = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
276 Instr instr_ori = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
277 Instr instr_ori2 = Assembler::instr_at(pc_ + 3 * Assembler::kInstrSize);
278 DCHECK(Assembler::IsLui(instr_lui));
279 DCHECK(Assembler::IsOri(instr_ori));
280 DCHECK(Assembler::IsOri(instr_ori2));
281 int64_t imm = (instr_lui & static_cast<int64_t>(kImm16Mask)) << 32;
282 imm |= (instr_ori & static_cast<int64_t>(kImm16Mask)) << 16;
283 imm |= (instr_ori2 & static_cast<int64_t>(kImm16Mask));
284 return reinterpret_cast<Address>(imm);
289 Address RelocInfo::target_internal_reference_address() {
290 DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
291 return reinterpret_cast<Address>(pc_);
295 Address RelocInfo::target_runtime_entry(Assembler* origin) {
296 DCHECK(IsRuntimeEntry(rmode_));
297 return target_address();
301 void RelocInfo::set_target_runtime_entry(Address target,
302 WriteBarrierMode write_barrier_mode,
303 ICacheFlushMode icache_flush_mode) {
304 DCHECK(IsRuntimeEntry(rmode_));
305 if (target_address() != target)
306 set_target_address(target, write_barrier_mode, icache_flush_mode);
310 Handle<Cell> RelocInfo::target_cell_handle() {
311 DCHECK(rmode_ == RelocInfo::CELL);
312 Address address = Memory::Address_at(pc_);
313 return Handle<Cell>(reinterpret_cast<Cell**>(address));
317 Cell* RelocInfo::target_cell() {
318 DCHECK(rmode_ == RelocInfo::CELL);
319 return Cell::FromValueAddress(Memory::Address_at(pc_));
323 void RelocInfo::set_target_cell(Cell* cell,
324 WriteBarrierMode write_barrier_mode,
325 ICacheFlushMode icache_flush_mode) {
326 DCHECK(rmode_ == RelocInfo::CELL);
327 Address address = cell->address() + Cell::kValueOffset;
328 Memory::Address_at(pc_) = address;
329 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
330 // TODO(1550) We are passing NULL as a slot because cell can never be on
331 // evacuation candidate.
332 host()->GetHeap()->incremental_marking()->RecordWrite(
338 static const int kNoCodeAgeSequenceLength = 9 * Assembler::kInstrSize;
341 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
342 UNREACHABLE(); // This should never be reached on Arm.
343 return Handle<Object>();
347 Code* RelocInfo::code_age_stub() {
348 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
349 return Code::GetCodeFromTargetAddress(
350 Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
354 void RelocInfo::set_code_age_stub(Code* stub,
355 ICacheFlushMode icache_flush_mode) {
356 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
357 Assembler::set_target_address_at(pc_ + Assembler::kInstrSize,
359 stub->instruction_start());
363 Address RelocInfo::call_address() {
364 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
365 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
366 // The pc_ offset of 0 assumes mips patched return sequence per
367 // debug-mips.cc BreakLocation::SetDebugBreakAtReturn(), or
368 // debug break slot per BreakLocation::SetDebugBreakAtSlot().
369 return Assembler::target_address_at(pc_, host_);
373 void RelocInfo::set_call_address(Address target) {
374 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
375 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
376 // The pc_ offset of 0 assumes mips patched return sequence per
377 // debug-mips.cc BreakLocation::SetDebugBreakAtReturn(), or
378 // debug break slot per BreakLocation::SetDebugBreakAtSlot().
379 Assembler::set_target_address_at(pc_, host_, target);
380 if (host() != NULL) {
381 Object* target_code = Code::GetCodeFromTargetAddress(target);
382 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
383 host(), this, HeapObject::cast(target_code));
388 Object* RelocInfo::call_object() {
389 return *call_object_address();
393 Object** RelocInfo::call_object_address() {
394 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
395 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
396 return reinterpret_cast<Object**>(pc_ + 6 * Assembler::kInstrSize);
400 void RelocInfo::set_call_object(Object* target) {
401 *call_object_address() = target;
405 void RelocInfo::WipeOut() {
406 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
407 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
408 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
409 if (IsInternalReference(rmode_)) {
410 Memory::Address_at(pc_) = NULL;
411 } else if (IsInternalReferenceEncoded(rmode_)) {
412 Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
414 Assembler::set_target_address_at(pc_, host_, NULL);
419 bool RelocInfo::IsPatchedReturnSequence() {
420 Instr instr0 = Assembler::instr_at(pc_); // lui.
421 Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize); // ori.
422 Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize); // dsll.
423 Instr instr3 = Assembler::instr_at(pc_ + 3 * Assembler::kInstrSize); // ori.
424 Instr instr4 = Assembler::instr_at(pc_ + 4 * Assembler::kInstrSize); // jalr.
426 bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
427 (instr1 & kOpcodeMask) == ORI &&
428 (instr2 & kFunctionFieldMask) == DSLL &&
429 (instr3 & kOpcodeMask) == ORI &&
430 (instr4 & kFunctionFieldMask) == JALR);
431 return patched_return;
435 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
436 Instr current_instr = Assembler::instr_at(pc_);
437 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
441 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
442 RelocInfo::Mode mode = rmode();
443 if (mode == RelocInfo::EMBEDDED_OBJECT) {
444 visitor->VisitEmbeddedPointer(this);
445 } else if (RelocInfo::IsCodeTarget(mode)) {
446 visitor->VisitCodeTarget(this);
447 } else if (mode == RelocInfo::CELL) {
448 visitor->VisitCell(this);
449 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
450 visitor->VisitExternalReference(this);
451 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
452 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
453 visitor->VisitInternalReference(this);
454 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
455 visitor->VisitCodeAgeSequence(this);
456 } else if (((RelocInfo::IsJSReturn(mode) &&
457 IsPatchedReturnSequence()) ||
458 (RelocInfo::IsDebugBreakSlot(mode) &&
459 IsPatchedDebugBreakSlotSequence())) &&
460 isolate->debug()->has_break_points()) {
461 visitor->VisitDebugTarget(this);
462 } else if (RelocInfo::IsRuntimeEntry(mode)) {
463 visitor->VisitRuntimeEntry(this);
468 template<typename StaticVisitor>
469 void RelocInfo::Visit(Heap* heap) {
470 RelocInfo::Mode mode = rmode();
471 if (mode == RelocInfo::EMBEDDED_OBJECT) {
472 StaticVisitor::VisitEmbeddedPointer(heap, this);
473 } else if (RelocInfo::IsCodeTarget(mode)) {
474 StaticVisitor::VisitCodeTarget(heap, this);
475 } else if (mode == RelocInfo::CELL) {
476 StaticVisitor::VisitCell(heap, this);
477 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
478 StaticVisitor::VisitExternalReference(this);
479 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
480 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
481 StaticVisitor::VisitInternalReference(this);
482 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
483 StaticVisitor::VisitCodeAgeSequence(heap, this);
484 } else if (heap->isolate()->debug()->has_break_points() &&
485 ((RelocInfo::IsJSReturn(mode) &&
486 IsPatchedReturnSequence()) ||
487 (RelocInfo::IsDebugBreakSlot(mode) &&
488 IsPatchedDebugBreakSlotSequence()))) {
489 StaticVisitor::VisitDebugTarget(heap, this);
490 } else if (RelocInfo::IsRuntimeEntry(mode)) {
491 StaticVisitor::VisitRuntimeEntry(this);
496 // -----------------------------------------------------------------------------
500 void Assembler::CheckBuffer() {
501 if (buffer_space() <= kGap) {
507 void Assembler::CheckTrampolinePoolQuick() {
508 if (pc_offset() >= next_buffer_check_) {
509 CheckTrampolinePool();
514 void Assembler::emit(Instr x) {
515 if (!is_buffer_growth_blocked()) {
518 *reinterpret_cast<Instr*>(pc_) = x;
520 CheckTrampolinePoolQuick();
524 void Assembler::emit(uint64_t x) {
525 if (!is_buffer_growth_blocked()) {
528 *reinterpret_cast<uint64_t*>(pc_) = x;
529 pc_ += kInstrSize * 2;
530 CheckTrampolinePoolQuick();
534 } } // namespace v8::internal
536 #endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_