2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
3 // All Rights Reserved.
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
9 // - Redistributions of source code must retain the above copyright notice,
10 // this list of conditions and the following disclaimer.
12 // - Redistribution in binary form must reproduce the above copyright
13 // notice, this list of conditions and the following disclaimer in the
14 // documentation and/or other materials provided with the distribution.
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 // The original source code covered by the above license above has been
33 // modified significantly by Google Inc.
34 // Copyright 2012 the V8 project authors. All rights reserved.
37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
40 #include "src/mips64/assembler-mips64.h"
42 #include "src/assembler.h"
43 #include "src/debug.h"
50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
53 // -----------------------------------------------------------------------------
54 // Operand and MemOperand.
56 Operand::Operand(int64_t immediate, RelocInfo::Mode rmode) {
63 Operand::Operand(const ExternalReference& f) {
65 imm64_ = reinterpret_cast<int64_t>(f.address());
66 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
70 Operand::Operand(Smi* value) {
72 imm64_ = reinterpret_cast<intptr_t>(value);
73 rmode_ = RelocInfo::NONE32;
77 Operand::Operand(Register rm) {
82 bool Operand::is_reg() const {
83 return rm_.is_valid();
87 int Register::NumAllocatableRegisters() {
88 return kMaxNumAllocatableRegisters;
92 int DoubleRegister::NumRegisters() {
93 return FPURegister::kMaxNumRegisters;
97 int DoubleRegister::NumAllocatableRegisters() {
98 return FPURegister::kMaxNumAllocatableRegisters;
102 int FPURegister::ToAllocationIndex(FPURegister reg) {
103 DCHECK(reg.code() % 2 == 0);
104 DCHECK(reg.code() / 2 < kMaxNumAllocatableRegisters);
105 DCHECK(reg.is_valid());
106 DCHECK(!reg.is(kDoubleRegZero));
107 DCHECK(!reg.is(kLithiumScratchDouble));
108 return (reg.code() / 2);
112 // -----------------------------------------------------------------------------
115 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
116 if (IsInternalReference(rmode_)) {
117 // Absolute code pointer inside code object moves with the code object.
118 byte* p = reinterpret_cast<byte*>(pc_);
119 int count = Assembler::RelocateInternalReference(p, delta);
120 CpuFeatures::FlushICache(p, count * sizeof(uint32_t));
125 Address RelocInfo::target_address() {
126 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
127 return Assembler::target_address_at(pc_, host_);
131 Address RelocInfo::target_address_address() {
132 DCHECK(IsCodeTarget(rmode_) ||
133 IsRuntimeEntry(rmode_) ||
134 rmode_ == EMBEDDED_OBJECT ||
135 rmode_ == EXTERNAL_REFERENCE);
136 // Read the address of the word containing the target_address in an
137 // instruction stream.
138 // The only architecture-independent user of this function is the serializer.
139 // The serializer uses it to find out how many raw bytes of instruction to
140 // output before the next target.
141 // For an instruction like LUI/ORI where the target bits are mixed into the
142 // instruction bits, the size of the target will be zero, indicating that the
143 // serializer should not step forward in memory after a target is resolved
144 // and written. In this case the target_address_address function should
145 // return the end of the instructions to be patched, allowing the
146 // deserializer to deserialize the instructions as raw bytes and put them in
147 // place, ready to be patched with the target. After jump optimization,
148 // that is the address of the instruction that follows J/JAL/JR/JALR
150 // return reinterpret_cast<Address>(
151 // pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
152 return reinterpret_cast<Address>(
153 pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
157 Address RelocInfo::constant_pool_entry_address() {
163 int RelocInfo::target_address_size() {
164 return Assembler::kSpecialTargetSize;
168 void RelocInfo::set_target_address(Address target,
169 WriteBarrierMode write_barrier_mode,
170 ICacheFlushMode icache_flush_mode) {
171 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
172 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
173 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
174 host() != NULL && IsCodeTarget(rmode_)) {
175 Object* target_code = Code::GetCodeFromTargetAddress(target);
176 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
177 host(), this, HeapObject::cast(target_code));
182 Address Assembler::target_address_from_return_address(Address pc) {
183 return pc - kCallTargetAddressOffset;
187 Object* RelocInfo::target_object() {
188 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
189 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
193 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
194 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
195 return Handle<Object>(reinterpret_cast<Object**>(
196 Assembler::target_address_at(pc_, host_)));
200 void RelocInfo::set_target_object(Object* target,
201 WriteBarrierMode write_barrier_mode,
202 ICacheFlushMode icache_flush_mode) {
203 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
204 Assembler::set_target_address_at(pc_, host_,
205 reinterpret_cast<Address>(target),
207 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
209 target->IsHeapObject()) {
210 host()->GetHeap()->incremental_marking()->RecordWrite(
211 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
216 Address RelocInfo::target_reference() {
217 DCHECK(rmode_ == EXTERNAL_REFERENCE);
218 return Assembler::target_address_at(pc_, host_);
222 Address RelocInfo::target_runtime_entry(Assembler* origin) {
223 DCHECK(IsRuntimeEntry(rmode_));
224 return target_address();
228 void RelocInfo::set_target_runtime_entry(Address target,
229 WriteBarrierMode write_barrier_mode,
230 ICacheFlushMode icache_flush_mode) {
231 DCHECK(IsRuntimeEntry(rmode_));
232 if (target_address() != target)
233 set_target_address(target, write_barrier_mode, icache_flush_mode);
237 Handle<Cell> RelocInfo::target_cell_handle() {
238 DCHECK(rmode_ == RelocInfo::CELL);
239 Address address = Memory::Address_at(pc_);
240 return Handle<Cell>(reinterpret_cast<Cell**>(address));
244 Cell* RelocInfo::target_cell() {
245 DCHECK(rmode_ == RelocInfo::CELL);
246 return Cell::FromValueAddress(Memory::Address_at(pc_));
250 void RelocInfo::set_target_cell(Cell* cell,
251 WriteBarrierMode write_barrier_mode,
252 ICacheFlushMode icache_flush_mode) {
253 DCHECK(rmode_ == RelocInfo::CELL);
254 Address address = cell->address() + Cell::kValueOffset;
255 Memory::Address_at(pc_) = address;
256 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
257 // TODO(1550) We are passing NULL as a slot because cell can never be on
258 // evacuation candidate.
259 host()->GetHeap()->incremental_marking()->RecordWrite(
265 static const int kNoCodeAgeSequenceLength = 9 * Assembler::kInstrSize;
268 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
269 UNREACHABLE(); // This should never be reached on Arm.
270 return Handle<Object>();
274 Code* RelocInfo::code_age_stub() {
275 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
276 return Code::GetCodeFromTargetAddress(
277 Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
281 void RelocInfo::set_code_age_stub(Code* stub,
282 ICacheFlushMode icache_flush_mode) {
283 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
284 Assembler::set_target_address_at(pc_ + Assembler::kInstrSize,
286 stub->instruction_start());
290 Address RelocInfo::call_address() {
291 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
292 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
293 // The pc_ offset of 0 assumes mips patched return sequence per
294 // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
295 // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
296 return Assembler::target_address_at(pc_, host_);
300 void RelocInfo::set_call_address(Address target) {
301 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
302 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
303 // The pc_ offset of 0 assumes mips patched return sequence per
304 // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
305 // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
306 Assembler::set_target_address_at(pc_, host_, target);
307 if (host() != NULL) {
308 Object* target_code = Code::GetCodeFromTargetAddress(target);
309 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
310 host(), this, HeapObject::cast(target_code));
315 Object* RelocInfo::call_object() {
316 return *call_object_address();
320 Object** RelocInfo::call_object_address() {
321 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
322 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
323 return reinterpret_cast<Object**>(pc_ + 6 * Assembler::kInstrSize);
327 void RelocInfo::set_call_object(Object* target) {
328 *call_object_address() = target;
332 void RelocInfo::WipeOut() {
333 DCHECK(IsEmbeddedObject(rmode_) ||
334 IsCodeTarget(rmode_) ||
335 IsRuntimeEntry(rmode_) ||
336 IsExternalReference(rmode_));
337 Assembler::set_target_address_at(pc_, host_, NULL);
341 bool RelocInfo::IsPatchedReturnSequence() {
342 Instr instr0 = Assembler::instr_at(pc_); // lui.
343 Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize); // ori.
344 Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize); // dsll.
345 Instr instr3 = Assembler::instr_at(pc_ + 3 * Assembler::kInstrSize); // ori.
346 Instr instr4 = Assembler::instr_at(pc_ + 4 * Assembler::kInstrSize); // jalr.
348 bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
349 (instr1 & kOpcodeMask) == ORI &&
350 (instr2 & kFunctionFieldMask) == DSLL &&
351 (instr3 & kOpcodeMask) == ORI &&
352 (instr4 & kFunctionFieldMask) == JALR);
353 return patched_return;
357 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
358 Instr current_instr = Assembler::instr_at(pc_);
359 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
363 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
364 RelocInfo::Mode mode = rmode();
365 if (mode == RelocInfo::EMBEDDED_OBJECT) {
366 visitor->VisitEmbeddedPointer(this);
367 } else if (RelocInfo::IsCodeTarget(mode)) {
368 visitor->VisitCodeTarget(this);
369 } else if (mode == RelocInfo::CELL) {
370 visitor->VisitCell(this);
371 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
372 visitor->VisitExternalReference(this);
373 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
374 visitor->VisitCodeAgeSequence(this);
375 } else if (((RelocInfo::IsJSReturn(mode) &&
376 IsPatchedReturnSequence()) ||
377 (RelocInfo::IsDebugBreakSlot(mode) &&
378 IsPatchedDebugBreakSlotSequence())) &&
379 isolate->debug()->has_break_points()) {
380 visitor->VisitDebugTarget(this);
381 } else if (RelocInfo::IsRuntimeEntry(mode)) {
382 visitor->VisitRuntimeEntry(this);
387 template<typename StaticVisitor>
388 void RelocInfo::Visit(Heap* heap) {
389 RelocInfo::Mode mode = rmode();
390 if (mode == RelocInfo::EMBEDDED_OBJECT) {
391 StaticVisitor::VisitEmbeddedPointer(heap, this);
392 } else if (RelocInfo::IsCodeTarget(mode)) {
393 StaticVisitor::VisitCodeTarget(heap, this);
394 } else if (mode == RelocInfo::CELL) {
395 StaticVisitor::VisitCell(heap, this);
396 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
397 StaticVisitor::VisitExternalReference(this);
398 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
399 StaticVisitor::VisitCodeAgeSequence(heap, this);
400 } else if (heap->isolate()->debug()->has_break_points() &&
401 ((RelocInfo::IsJSReturn(mode) &&
402 IsPatchedReturnSequence()) ||
403 (RelocInfo::IsDebugBreakSlot(mode) &&
404 IsPatchedDebugBreakSlotSequence()))) {
405 StaticVisitor::VisitDebugTarget(heap, this);
406 } else if (RelocInfo::IsRuntimeEntry(mode)) {
407 StaticVisitor::VisitRuntimeEntry(this);
412 // -----------------------------------------------------------------------------
416 void Assembler::CheckBuffer() {
417 if (buffer_space() <= kGap) {
423 void Assembler::CheckTrampolinePoolQuick() {
424 if (pc_offset() >= next_buffer_check_) {
425 CheckTrampolinePool();
430 void Assembler::emit(Instr x) {
431 if (!is_buffer_growth_blocked()) {
434 *reinterpret_cast<Instr*>(pc_) = x;
436 CheckTrampolinePoolQuick();
440 void Assembler::emit(uint64_t x) {
441 if (!is_buffer_growth_blocked()) {
444 *reinterpret_cast<uint64_t*>(pc_) = x;
445 pc_ += kInstrSize * 2;
446 CheckTrampolinePoolQuick();
450 } } // namespace v8::internal
452 #endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_