1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
40 #include "src/arm/assembler-arm.h"
42 #include "src/assembler.h"
43 #include "src/debug.h"
50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
51 bool CpuFeatures::SupportsSIMD128InCrankshaft() { return false; }
54 int Register::NumAllocatableRegisters() {
55 return kMaxNumAllocatableRegisters;
59 int DwVfpRegister::NumRegisters() {
60 return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
64 int DwVfpRegister::NumReservedRegisters() {
65 return kNumReservedRegisters;
69 int DwVfpRegister::NumAllocatableRegisters() {
70 return NumRegisters() - kNumReservedRegisters;
74 int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
75 DCHECK(!reg.is(kDoubleRegZero));
76 DCHECK(!reg.is(kScratchDoubleReg));
77 if (reg.code() > kDoubleRegZero.code()) {
78 return reg.code() - kNumReservedRegisters;
84 DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
85 DCHECK(index >= 0 && index < NumAllocatableRegisters());
86 DCHECK(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
87 kNumReservedRegisters - 1);
88 if (index >= kDoubleRegZero.code()) {
89 return from_code(index + kNumReservedRegisters);
91 return from_code(index);
95 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
96 if (RelocInfo::IsInternalReference(rmode_)) {
97 // absolute code pointer inside code object moves with the code object.
98 int32_t* p = reinterpret_cast<int32_t*>(pc_);
99 *p += delta; // relocate entry
101 // We do not use pc relative addressing on ARM, so there is
102 // nothing else to do.
106 Address RelocInfo::target_address() {
107 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
108 return Assembler::target_address_at(pc_, host_);
112 Address RelocInfo::target_address_address() {
113 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
114 || rmode_ == EMBEDDED_OBJECT
115 || rmode_ == EXTERNAL_REFERENCE);
116 if (FLAG_enable_ool_constant_pool ||
117 Assembler::IsMovW(Memory::int32_at(pc_))) {
118 // We return the PC for ool constant pool since this function is used by the
119 // serializerer and expects the address to reside within the code object.
120 return reinterpret_cast<Address>(pc_);
122 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
123 return constant_pool_entry_address();
128 Address RelocInfo::constant_pool_entry_address() {
129 DCHECK(IsInConstantPool());
130 return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
134 int RelocInfo::target_address_size() {
139 void RelocInfo::set_target_address(Address target,
140 WriteBarrierMode write_barrier_mode,
141 ICacheFlushMode icache_flush_mode) {
142 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
143 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
144 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
145 host() != NULL && IsCodeTarget(rmode_)) {
146 Object* target_code = Code::GetCodeFromTargetAddress(target);
147 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
148 host(), this, HeapObject::cast(target_code));
153 Object* RelocInfo::target_object() {
154 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
155 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
159 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
160 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
161 return Handle<Object>(reinterpret_cast<Object**>(
162 Assembler::target_address_at(pc_, host_)));
166 void RelocInfo::set_target_object(Object* target,
167 WriteBarrierMode write_barrier_mode,
168 ICacheFlushMode icache_flush_mode) {
169 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
170 Assembler::set_target_address_at(pc_, host_,
171 reinterpret_cast<Address>(target),
173 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
175 target->IsHeapObject()) {
176 host()->GetHeap()->incremental_marking()->RecordWrite(
177 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
182 Address RelocInfo::target_reference() {
183 DCHECK(rmode_ == EXTERNAL_REFERENCE);
184 return Assembler::target_address_at(pc_, host_);
188 Address RelocInfo::target_runtime_entry(Assembler* origin) {
189 DCHECK(IsRuntimeEntry(rmode_));
190 return target_address();
194 void RelocInfo::set_target_runtime_entry(Address target,
195 WriteBarrierMode write_barrier_mode,
196 ICacheFlushMode icache_flush_mode) {
197 DCHECK(IsRuntimeEntry(rmode_));
198 if (target_address() != target)
199 set_target_address(target, write_barrier_mode, icache_flush_mode);
203 Handle<Cell> RelocInfo::target_cell_handle() {
204 DCHECK(rmode_ == RelocInfo::CELL);
205 Address address = Memory::Address_at(pc_);
206 return Handle<Cell>(reinterpret_cast<Cell**>(address));
210 Cell* RelocInfo::target_cell() {
211 DCHECK(rmode_ == RelocInfo::CELL);
212 return Cell::FromValueAddress(Memory::Address_at(pc_));
216 void RelocInfo::set_target_cell(Cell* cell,
217 WriteBarrierMode write_barrier_mode,
218 ICacheFlushMode icache_flush_mode) {
219 DCHECK(rmode_ == RelocInfo::CELL);
220 Address address = cell->address() + Cell::kValueOffset;
221 Memory::Address_at(pc_) = address;
222 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
223 // TODO(1550) We are passing NULL as a slot because cell can never be on
224 // evacuation candidate.
225 host()->GetHeap()->incremental_marking()->RecordWrite(
231 static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
234 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
235 UNREACHABLE(); // This should never be reached on Arm.
236 return Handle<Object>();
240 Code* RelocInfo::code_age_stub() {
241 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
242 return Code::GetCodeFromTargetAddress(
243 Memory::Address_at(pc_ +
244 (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
248 void RelocInfo::set_code_age_stub(Code* stub,
249 ICacheFlushMode icache_flush_mode) {
250 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
251 Memory::Address_at(pc_ +
252 (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
253 stub->instruction_start();
257 Address RelocInfo::call_address() {
258 // The 2 instructions offset assumes patched debug break slot or return
260 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
261 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
262 return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
266 void RelocInfo::set_call_address(Address target) {
267 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
268 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
269 Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
270 if (host() != NULL) {
271 Object* target_code = Code::GetCodeFromTargetAddress(target);
272 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
273 host(), this, HeapObject::cast(target_code));
278 Object* RelocInfo::call_object() {
279 return *call_object_address();
283 void RelocInfo::set_call_object(Object* target) {
284 *call_object_address() = target;
288 Object** RelocInfo::call_object_address() {
289 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
290 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
291 return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
295 void RelocInfo::WipeOut() {
296 DCHECK(IsEmbeddedObject(rmode_) ||
297 IsCodeTarget(rmode_) ||
298 IsRuntimeEntry(rmode_) ||
299 IsExternalReference(rmode_));
300 Assembler::set_target_address_at(pc_, host_, NULL);
304 bool RelocInfo::IsPatchedReturnSequence() {
305 Instr current_instr = Assembler::instr_at(pc_);
306 Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
307 // A patched return sequence is:
310 return Assembler::IsLdrPcImmediateOffset(current_instr) &&
311 Assembler::IsBlxReg(next_instr);
315 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
316 Instr current_instr = Assembler::instr_at(pc_);
317 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
321 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
322 RelocInfo::Mode mode = rmode();
323 if (mode == RelocInfo::EMBEDDED_OBJECT) {
324 visitor->VisitEmbeddedPointer(this);
325 } else if (RelocInfo::IsCodeTarget(mode)) {
326 visitor->VisitCodeTarget(this);
327 } else if (mode == RelocInfo::CELL) {
328 visitor->VisitCell(this);
329 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
330 visitor->VisitExternalReference(this);
331 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
332 visitor->VisitCodeAgeSequence(this);
333 } else if (((RelocInfo::IsJSReturn(mode) &&
334 IsPatchedReturnSequence()) ||
335 (RelocInfo::IsDebugBreakSlot(mode) &&
336 IsPatchedDebugBreakSlotSequence())) &&
337 isolate->debug()->has_break_points()) {
338 visitor->VisitDebugTarget(this);
339 } else if (RelocInfo::IsRuntimeEntry(mode)) {
340 visitor->VisitRuntimeEntry(this);
345 template<typename StaticVisitor>
346 void RelocInfo::Visit(Heap* heap) {
347 RelocInfo::Mode mode = rmode();
348 if (mode == RelocInfo::EMBEDDED_OBJECT) {
349 StaticVisitor::VisitEmbeddedPointer(heap, this);
350 } else if (RelocInfo::IsCodeTarget(mode)) {
351 StaticVisitor::VisitCodeTarget(heap, this);
352 } else if (mode == RelocInfo::CELL) {
353 StaticVisitor::VisitCell(heap, this);
354 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
355 StaticVisitor::VisitExternalReference(this);
356 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
357 StaticVisitor::VisitCodeAgeSequence(heap, this);
358 } else if (heap->isolate()->debug()->has_break_points() &&
359 ((RelocInfo::IsJSReturn(mode) &&
360 IsPatchedReturnSequence()) ||
361 (RelocInfo::IsDebugBreakSlot(mode) &&
362 IsPatchedDebugBreakSlotSequence()))) {
363 StaticVisitor::VisitDebugTarget(heap, this);
364 } else if (RelocInfo::IsRuntimeEntry(mode)) {
365 StaticVisitor::VisitRuntimeEntry(this);
370 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
377 Operand::Operand(const ExternalReference& f) {
379 imm32_ = reinterpret_cast<int32_t>(f.address());
380 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
384 Operand::Operand(Smi* value) {
386 imm32_ = reinterpret_cast<intptr_t>(value);
387 rmode_ = RelocInfo::NONE32;
391 Operand::Operand(Register rm) {
399 bool Operand::is_reg() const {
400 return rm_.is_valid() &&
407 void Assembler::CheckBuffer() {
408 if (buffer_space() <= kGap) {
411 if (pc_offset() >= next_buffer_check_) {
412 CheckConstPool(false, true);
417 void Assembler::emit(Instr x) {
419 *reinterpret_cast<Instr*>(pc_) = x;
424 Address Assembler::target_address_from_return_address(Address pc) {
425 // Returns the address of the call target from the return address that will
426 // be returned to after a call.
427 // Call sequence on V7 or later is :
428 // movw ip, #... @ call address low 16
429 // movt ip, #... @ call address high 16
432 // Or pre-V7 or cases that need frequent patching, the address is in the
433 // constant pool. It could be a small constant pool load:
434 // ldr ip, [pc / pp, #...] @ call address
437 // Or an extended constant pool load:
440 // ldr ip, [pc, ip] @ call address
443 Address candidate = pc - 2 * Assembler::kInstrSize;
444 Instr candidate_instr(Memory::int32_at(candidate));
445 if (IsLdrPcImmediateOffset(candidate_instr) |
446 IsLdrPpImmediateOffset(candidate_instr)) {
448 } else if (IsLdrPpRegOffset(candidate_instr)) {
449 candidate = pc - 4 * Assembler::kInstrSize;
450 DCHECK(IsMovW(Memory::int32_at(candidate)) &&
451 IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
454 candidate = pc - 3 * Assembler::kInstrSize;
455 DCHECK(IsMovW(Memory::int32_at(candidate)) &&
456 IsMovT(Memory::int32_at(candidate + kInstrSize)));
462 Address Assembler::break_address_from_return_address(Address pc) {
463 return pc - Assembler::kPatchDebugBreakSlotReturnOffset;
467 Address Assembler::return_address_from_call_start(Address pc) {
468 if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
469 IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
470 // Load from constant pool, small section.
471 return pc + kInstrSize * 2;
473 DCHECK(IsMovW(Memory::int32_at(pc)));
474 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
475 if (IsLdrPpRegOffset(Memory::int32_at(pc + kInstrSize))) {
476 // Load from constant pool, extended section.
477 return pc + kInstrSize * 4;
479 // A movw / movt load immediate.
480 return pc + kInstrSize * 3;
486 void Assembler::deserialization_set_special_target_at(
487 Address constant_pool_entry, Code* code, Address target) {
488 if (FLAG_enable_ool_constant_pool) {
489 set_target_address_at(constant_pool_entry, code, target);
491 Memory::Address_at(constant_pool_entry) = target;
496 bool Assembler::is_constant_pool_load(Address pc) {
497 return !Assembler::IsMovW(Memory::int32_at(pc)) ||
498 (FLAG_enable_ool_constant_pool &&
499 Assembler::IsLdrPpRegOffset(
500 Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
504 Address Assembler::constant_pool_entry_address(
505 Address pc, ConstantPoolArray* constant_pool) {
506 if (FLAG_enable_ool_constant_pool) {
507 DCHECK(constant_pool != NULL);
509 if (IsMovW(Memory::int32_at(pc))) {
510 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
511 IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
512 // This is an extended constant pool lookup.
513 Instruction* movw_instr = Instruction::At(pc);
514 Instruction* movt_instr = Instruction::At(pc + kInstrSize);
515 cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
516 movw_instr->ImmedMovwMovtValue();
518 // This is a small constant pool lookup.
519 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
520 cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
522 return reinterpret_cast<Address>(constant_pool) + cp_offset;
524 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
525 Instr instr = Memory::int32_at(pc);
526 return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
531 Address Assembler::target_address_at(Address pc,
532 ConstantPoolArray* constant_pool) {
533 if (is_constant_pool_load(pc)) {
534 // This is a constant pool lookup. Return the value in the constant pool.
535 return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
537 // This is an movw_movt immediate load. Return the immediate.
538 DCHECK(IsMovW(Memory::int32_at(pc)) &&
539 IsMovT(Memory::int32_at(pc + kInstrSize)));
540 Instruction* movw_instr = Instruction::At(pc);
541 Instruction* movt_instr = Instruction::At(pc + kInstrSize);
542 return reinterpret_cast<Address>(
543 (movt_instr->ImmedMovwMovtValue() << 16) |
544 movw_instr->ImmedMovwMovtValue());
549 void Assembler::set_target_address_at(Address pc,
550 ConstantPoolArray* constant_pool,
552 ICacheFlushMode icache_flush_mode) {
553 if (is_constant_pool_load(pc)) {
554 // This is a constant pool lookup. Update the entry in the constant pool.
555 Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
556 // Intuitively, we would think it is necessary to always flush the
557 // instruction cache after patching a target address in the code as follows:
558 // CpuFeatures::FlushICache(pc, sizeof(target));
559 // However, on ARM, no instruction is actually patched in the case
560 // of embedded constants of the form:
561 // ldr ip, [pp, #...]
562 // since the instruction accessing this address in the constant pool remains
565 // This is an movw_movt immediate load. Patch the immediate embedded in the
567 DCHECK(IsMovW(Memory::int32_at(pc)));
568 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
569 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
570 uint32_t immediate = reinterpret_cast<uint32_t>(target);
571 instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
572 instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
573 DCHECK(IsMovW(Memory::int32_at(pc)));
574 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
575 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
576 CpuFeatures::FlushICache(pc, 2 * kInstrSize);
582 } } // namespace v8::internal
584 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_