1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
40 #include "arm/assembler-arm.h"
50 int Register::NumAllocatableRegisters() {
51 return kMaxNumAllocatableRegisters;
55 int DwVfpRegister::NumRegisters() {
56 return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
60 int DwVfpRegister::NumReservedRegisters() {
61 return kNumReservedRegisters;
65 int DwVfpRegister::NumAllocatableRegisters() {
66 return NumRegisters() - kNumReservedRegisters;
70 int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
71 ASSERT(!reg.is(kDoubleRegZero));
72 ASSERT(!reg.is(kScratchDoubleReg));
73 if (reg.code() > kDoubleRegZero.code()) {
74 return reg.code() - kNumReservedRegisters;
80 DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
81 ASSERT(index >= 0 && index < NumAllocatableRegisters());
82 ASSERT(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
83 kNumReservedRegisters - 1);
84 if (index >= kDoubleRegZero.code()) {
85 return from_code(index + kNumReservedRegisters);
87 return from_code(index);
91 void RelocInfo::apply(intptr_t delta) {
92 if (RelocInfo::IsInternalReference(rmode_)) {
93 // absolute code pointer inside code object moves with the code object.
94 int32_t* p = reinterpret_cast<int32_t*>(pc_);
95 *p += delta; // relocate entry
97 // We do not use pc relative addressing on ARM, so there is
98 // nothing else to do.
102 Address RelocInfo::target_address() {
103 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
104 return Assembler::target_address_at(pc_);
108 Address RelocInfo::target_address_address() {
109 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
110 || rmode_ == EMBEDDED_OBJECT
111 || rmode_ == EXTERNAL_REFERENCE);
112 return Assembler::target_pointer_address_at(pc_);
116 int RelocInfo::target_address_size() {
121 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
122 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
123 Assembler::set_target_address_at(pc_, target);
124 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
125 Object* target_code = Code::GetCodeFromTargetAddress(target);
126 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
127 host(), this, HeapObject::cast(target_code));
132 Object* RelocInfo::target_object() {
133 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
134 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
138 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
139 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
140 return Handle<Object>(reinterpret_cast<Object**>(
141 Assembler::target_address_at(pc_)));
145 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
146 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
147 ASSERT(!target->IsConsString());
148 Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
149 if (mode == UPDATE_WRITE_BARRIER &&
151 target->IsHeapObject()) {
152 host()->GetHeap()->incremental_marking()->RecordWrite(
153 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
158 Address RelocInfo::target_reference() {
159 ASSERT(rmode_ == EXTERNAL_REFERENCE);
160 return Assembler::target_address_at(pc_);
164 Address RelocInfo::target_runtime_entry(Assembler* origin) {
165 ASSERT(IsRuntimeEntry(rmode_));
166 return target_address();
170 void RelocInfo::set_target_runtime_entry(Address target,
171 WriteBarrierMode mode) {
172 ASSERT(IsRuntimeEntry(rmode_));
173 if (target_address() != target) set_target_address(target, mode);
177 Handle<Cell> RelocInfo::target_cell_handle() {
178 ASSERT(rmode_ == RelocInfo::CELL);
179 Address address = Memory::Address_at(pc_);
180 return Handle<Cell>(reinterpret_cast<Cell**>(address));
184 Cell* RelocInfo::target_cell() {
185 ASSERT(rmode_ == RelocInfo::CELL);
186 return Cell::FromValueAddress(Memory::Address_at(pc_));
190 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
191 ASSERT(rmode_ == RelocInfo::CELL);
192 Address address = cell->address() + Cell::kValueOffset;
193 Memory::Address_at(pc_) = address;
194 if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
195 // TODO(1550) We are passing NULL as a slot because cell can never be on
196 // evacuation candidate.
197 host()->GetHeap()->incremental_marking()->RecordWrite(
203 static const int kNoCodeAgeSequenceLength = 3;
206 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
207 UNREACHABLE(); // This should never be reached on Arm.
208 return Handle<Object>();
212 Code* RelocInfo::code_age_stub() {
213 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
214 return Code::GetCodeFromTargetAddress(
215 Memory::Address_at(pc_ + Assembler::kInstrSize *
216 (kNoCodeAgeSequenceLength - 1)));
220 void RelocInfo::set_code_age_stub(Code* stub) {
221 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
222 Memory::Address_at(pc_ + Assembler::kInstrSize *
223 (kNoCodeAgeSequenceLength - 1)) =
224 stub->instruction_start();
228 Address RelocInfo::call_address() {
229 // The 2 instructions offset assumes patched debug break slot or return
231 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
232 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
233 return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
237 void RelocInfo::set_call_address(Address target) {
238 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
239 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
240 Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
241 if (host() != NULL) {
242 Object* target_code = Code::GetCodeFromTargetAddress(target);
243 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
244 host(), this, HeapObject::cast(target_code));
249 Object* RelocInfo::call_object() {
250 return *call_object_address();
254 void RelocInfo::set_call_object(Object* target) {
255 *call_object_address() = target;
259 Object** RelocInfo::call_object_address() {
260 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
261 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
262 return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
266 void RelocInfo::WipeOut() {
267 ASSERT(IsEmbeddedObject(rmode_) ||
268 IsCodeTarget(rmode_) ||
269 IsRuntimeEntry(rmode_) ||
270 IsExternalReference(rmode_));
271 Assembler::set_target_address_at(pc_, NULL);
275 bool RelocInfo::IsPatchedReturnSequence() {
276 Instr current_instr = Assembler::instr_at(pc_);
277 Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
278 // A patched return sequence is:
281 return ((current_instr & kLdrPCMask) == kLdrPCPattern)
282 && ((next_instr & kBlxRegMask) == kBlxRegPattern);
286 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
287 Instr current_instr = Assembler::instr_at(pc_);
288 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
292 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
293 RelocInfo::Mode mode = rmode();
294 if (mode == RelocInfo::EMBEDDED_OBJECT) {
295 visitor->VisitEmbeddedPointer(this);
296 } else if (RelocInfo::IsCodeTarget(mode)) {
297 visitor->VisitCodeTarget(this);
298 } else if (mode == RelocInfo::CELL) {
299 visitor->VisitCell(this);
300 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
301 visitor->VisitExternalReference(this);
302 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
303 visitor->VisitCodeAgeSequence(this);
304 #ifdef ENABLE_DEBUGGER_SUPPORT
305 } else if (((RelocInfo::IsJSReturn(mode) &&
306 IsPatchedReturnSequence()) ||
307 (RelocInfo::IsDebugBreakSlot(mode) &&
308 IsPatchedDebugBreakSlotSequence())) &&
309 isolate->debug()->has_break_points()) {
310 visitor->VisitDebugTarget(this);
312 } else if (RelocInfo::IsRuntimeEntry(mode)) {
313 visitor->VisitRuntimeEntry(this);
318 template<typename StaticVisitor>
319 void RelocInfo::Visit(Heap* heap) {
320 RelocInfo::Mode mode = rmode();
321 if (mode == RelocInfo::EMBEDDED_OBJECT) {
322 StaticVisitor::VisitEmbeddedPointer(heap, this);
323 } else if (RelocInfo::IsCodeTarget(mode)) {
324 StaticVisitor::VisitCodeTarget(heap, this);
325 } else if (mode == RelocInfo::CELL) {
326 StaticVisitor::VisitCell(heap, this);
327 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
328 StaticVisitor::VisitExternalReference(this);
329 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
330 StaticVisitor::VisitCodeAgeSequence(heap, this);
331 #ifdef ENABLE_DEBUGGER_SUPPORT
332 } else if (heap->isolate()->debug()->has_break_points() &&
333 ((RelocInfo::IsJSReturn(mode) &&
334 IsPatchedReturnSequence()) ||
335 (RelocInfo::IsDebugBreakSlot(mode) &&
336 IsPatchedDebugBreakSlotSequence()))) {
337 StaticVisitor::VisitDebugTarget(heap, this);
339 } else if (RelocInfo::IsRuntimeEntry(mode)) {
340 StaticVisitor::VisitRuntimeEntry(this);
345 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
352 Operand::Operand(const ExternalReference& f) {
354 imm32_ = reinterpret_cast<int32_t>(f.address());
355 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
359 Operand::Operand(Smi* value) {
361 imm32_ = reinterpret_cast<intptr_t>(value);
362 rmode_ = RelocInfo::NONE32;
366 Operand::Operand(Register rm) {
374 bool Operand::is_reg() const {
375 return rm_.is_valid() &&
382 void Assembler::CheckBuffer() {
383 if (buffer_space() <= kGap) {
386 if (pc_offset() >= next_buffer_check_) {
387 CheckConstPool(false, true);
392 void Assembler::emit(Instr x) {
394 *reinterpret_cast<Instr*>(pc_) = x;
399 Address Assembler::target_pointer_address_at(Address pc) {
400 Instr instr = Memory::int32_at(pc);
401 return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
405 Address Assembler::target_address_at(Address pc) {
406 if (IsMovW(Memory::int32_at(pc))) {
407 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
408 Instruction* instr = Instruction::At(pc);
409 Instruction* next_instr = Instruction::At(pc + kInstrSize);
410 return reinterpret_cast<Address>(
411 (next_instr->ImmedMovwMovtValue() << 16) |
412 instr->ImmedMovwMovtValue());
414 ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
415 return Memory::Address_at(target_pointer_address_at(pc));
419 Address Assembler::target_address_from_return_address(Address pc) {
420 // Returns the address of the call target from the return address that will
421 // be returned to after a call.
422 // Call sequence on V7 or later is :
423 // movw ip, #... @ call address low 16
424 // movt ip, #... @ call address high 16
427 // Or pre-V7 or cases that need frequent patching:
428 // ldr ip, [pc, #...] @ call address
431 Address candidate = pc - 2 * Assembler::kInstrSize;
432 Instr candidate_instr(Memory::int32_at(candidate));
433 if (IsLdrPcImmediateOffset(candidate_instr)) {
436 candidate = pc - 3 * Assembler::kInstrSize;
437 ASSERT(IsMovW(Memory::int32_at(candidate)) &&
438 IsMovT(Memory::int32_at(candidate + kInstrSize)));
443 Address Assembler::return_address_from_call_start(Address pc) {
444 if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) {
445 return pc + kInstrSize * 2;
447 ASSERT(IsMovW(Memory::int32_at(pc)));
448 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
449 return pc + kInstrSize * 3;
454 void Assembler::deserialization_set_special_target_at(
455 Address constant_pool_entry, Address target) {
456 Memory::Address_at(constant_pool_entry) = target;
460 static Instr EncodeMovwImmediate(uint32_t immediate) {
461 ASSERT(immediate < 0x10000);
462 return ((immediate & 0xf000) << 4) | (immediate & 0xfff);
466 void Assembler::set_target_address_at(Address pc, Address target) {
467 if (IsMovW(Memory::int32_at(pc))) {
468 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
469 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
470 uint32_t immediate = reinterpret_cast<uint32_t>(target);
471 uint32_t intermediate = instr_ptr[0];
472 intermediate &= ~EncodeMovwImmediate(0xFFFF);
473 intermediate |= EncodeMovwImmediate(immediate & 0xFFFF);
474 instr_ptr[0] = intermediate;
475 intermediate = instr_ptr[1];
476 intermediate &= ~EncodeMovwImmediate(0xFFFF);
477 intermediate |= EncodeMovwImmediate(immediate >> 16);
478 instr_ptr[1] = intermediate;
479 ASSERT(IsMovW(Memory::int32_at(pc)));
480 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
481 CPU::FlushICache(pc, 2 * kInstrSize);
483 ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
484 Memory::Address_at(target_pointer_address_at(pc)) = target;
485 // Intuitively, we would think it is necessary to always flush the
486 // instruction cache after patching a target address in the code as follows:
487 // CPU::FlushICache(pc, sizeof(target));
488 // However, on ARM, no instruction is actually patched in the case
489 // of embedded constants of the form:
490 // ldr ip, [pc, #...]
491 // since the instruction accessing this address in the constant pool remains
497 } } // namespace v8::internal
499 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_