1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2014 the V8 project authors. All rights reserved.
37 #ifndef V8_PPC_ASSEMBLER_PPC_INL_H_
38 #define V8_PPC_ASSEMBLER_PPC_INL_H_
40 #include "src/ppc/assembler-ppc.h"
42 #include "src/assembler.h"
43 #include "src/debug.h"
50 bool CpuFeatures::SupportsCrankshaft() { return true; }
53 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
54 // absolute code pointer inside code object moves with the code object.
55 if (IsInternalReference(rmode_)) {
57 Address target = Memory::Address_at(pc_);
58 Memory::Address_at(pc_) = target + delta;
61 DCHECK(IsInternalReferenceEncoded(rmode_));
62 Address target = Assembler::target_address_at(pc_, host_);
63 Assembler::set_target_address_at(pc_, host_, target + delta,
69 Address RelocInfo::target_internal_reference() {
70 if (IsInternalReference(rmode_)) {
72 return Memory::Address_at(pc_);
75 DCHECK(IsInternalReferenceEncoded(rmode_));
76 return Assembler::target_address_at(pc_, host_);
81 Address RelocInfo::target_internal_reference_address() {
82 DCHECK(IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
83 return reinterpret_cast<Address>(pc_);
87 Address RelocInfo::target_address() {
88 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
89 return Assembler::target_address_at(pc_, host_);
93 Address RelocInfo::target_address_address() {
94 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) ||
95 rmode_ == EMBEDDED_OBJECT || rmode_ == EXTERNAL_REFERENCE);
97 if (FLAG_enable_embedded_constant_pool &&
98 Assembler::IsConstantPoolLoadStart(pc_)) {
99 // We return the PC for embedded constant pool since this function is used
100 // by the serializer and expects the address to reside within the code
102 return reinterpret_cast<Address>(pc_);
105 // Read the address of the word containing the target_address in an
106 // instruction stream.
107 // The only architecture-independent user of this function is the serializer.
108 // The serializer uses it to find out how many raw bytes of instruction to
109 // output before the next target.
110 // For an instruction like LIS/ORI where the target bits are mixed into the
111 // instruction bits, the size of the target will be zero, indicating that the
112 // serializer should not step forward in memory after a target is resolved
114 return reinterpret_cast<Address>(pc_);
118 Address RelocInfo::constant_pool_entry_address() {
119 if (FLAG_enable_embedded_constant_pool) {
120 Address constant_pool = host_->constant_pool();
121 DCHECK(constant_pool);
122 ConstantPoolEntry::Access access;
123 if (Assembler::IsConstantPoolLoadStart(pc_, &access))
124 return Assembler::target_constant_pool_address_at(
125 pc_, constant_pool, access, ConstantPoolEntry::INTPTR);
132 int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; }
135 void RelocInfo::set_target_address(Address target,
136 WriteBarrierMode write_barrier_mode,
137 ICacheFlushMode icache_flush_mode) {
138 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
139 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
140 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
141 IsCodeTarget(rmode_)) {
142 Object* target_code = Code::GetCodeFromTargetAddress(target);
143 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
144 host(), this, HeapObject::cast(target_code));
149 Address Assembler::break_address_from_return_address(Address pc) {
150 return target_address_from_return_address(pc);
154 Address Assembler::target_address_from_return_address(Address pc) {
155 // Returns the address of the call target from the return address that will
156 // be returned to after a call.
157 // Call sequence is :
158 // mov ip, @ call address
163 ConstantPoolEntry::Access access;
164 if (FLAG_enable_embedded_constant_pool &&
165 IsConstantPoolLoadEnd(pc - 3 * kInstrSize, &access)) {
166 len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
168 len = kMovInstructionsNoConstantPool;
170 return pc - (len + 2) * kInstrSize;
174 Address Assembler::return_address_from_call_start(Address pc) {
176 ConstantPoolEntry::Access access;
177 if (FLAG_enable_embedded_constant_pool &&
178 IsConstantPoolLoadStart(pc, &access)) {
179 len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
181 len = kMovInstructionsNoConstantPool;
183 return pc + (len + 2) * kInstrSize;
187 Object* RelocInfo::target_object() {
188 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
189 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
193 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
194 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
195 return Handle<Object>(
196 reinterpret_cast<Object**>(Assembler::target_address_at(pc_, host_)));
200 void RelocInfo::set_target_object(Object* target,
201 WriteBarrierMode write_barrier_mode,
202 ICacheFlushMode icache_flush_mode) {
203 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
204 Assembler::set_target_address_at(
205 pc_, host_, reinterpret_cast<Address>(target), icache_flush_mode);
206 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
207 target->IsHeapObject()) {
208 host()->GetHeap()->incremental_marking()->RecordWrite(
209 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
214 Address RelocInfo::target_external_reference() {
215 DCHECK(rmode_ == EXTERNAL_REFERENCE);
216 return Assembler::target_address_at(pc_, host_);
220 Address RelocInfo::target_runtime_entry(Assembler* origin) {
221 DCHECK(IsRuntimeEntry(rmode_));
222 return target_address();
226 void RelocInfo::set_target_runtime_entry(Address target,
227 WriteBarrierMode write_barrier_mode,
228 ICacheFlushMode icache_flush_mode) {
229 DCHECK(IsRuntimeEntry(rmode_));
230 if (target_address() != target)
231 set_target_address(target, write_barrier_mode, icache_flush_mode);
235 Handle<Cell> RelocInfo::target_cell_handle() {
236 DCHECK(rmode_ == RelocInfo::CELL);
237 Address address = Memory::Address_at(pc_);
238 return Handle<Cell>(reinterpret_cast<Cell**>(address));
242 Cell* RelocInfo::target_cell() {
243 DCHECK(rmode_ == RelocInfo::CELL);
244 return Cell::FromValueAddress(Memory::Address_at(pc_));
248 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode write_barrier_mode,
249 ICacheFlushMode icache_flush_mode) {
250 DCHECK(rmode_ == RelocInfo::CELL);
251 Address address = cell->address() + Cell::kValueOffset;
252 Memory::Address_at(pc_) = address;
253 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
254 // TODO(1550) We are passing NULL as a slot because cell can never be on
255 // evacuation candidate.
256 host()->GetHeap()->incremental_marking()->RecordWrite(host(), NULL, cell);
261 static const int kNoCodeAgeInstructions =
262 FLAG_enable_embedded_constant_pool ? 7 : 6;
263 static const int kCodeAgingInstructions =
264 Assembler::kMovInstructionsNoConstantPool + 3;
265 static const int kNoCodeAgeSequenceInstructions =
266 ((kNoCodeAgeInstructions >= kCodeAgingInstructions)
267 ? kNoCodeAgeInstructions
268 : kCodeAgingInstructions);
269 static const int kNoCodeAgeSequenceNops =
270 (kNoCodeAgeSequenceInstructions - kNoCodeAgeInstructions);
271 static const int kCodeAgingSequenceNops =
272 (kNoCodeAgeSequenceInstructions - kCodeAgingInstructions);
273 static const int kCodeAgingTargetDelta = 1 * Assembler::kInstrSize;
274 static const int kNoCodeAgeSequenceLength =
275 (kNoCodeAgeSequenceInstructions * Assembler::kInstrSize);
278 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
279 UNREACHABLE(); // This should never be reached on PPC.
280 return Handle<Object>();
284 Code* RelocInfo::code_age_stub() {
285 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
286 return Code::GetCodeFromTargetAddress(
287 Assembler::target_address_at(pc_ + kCodeAgingTargetDelta, host_));
291 void RelocInfo::set_code_age_stub(Code* stub,
292 ICacheFlushMode icache_flush_mode) {
293 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
294 Assembler::set_target_address_at(pc_ + kCodeAgingTargetDelta, host_,
295 stub->instruction_start(),
300 Address RelocInfo::call_address() {
301 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
302 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
303 // The pc_ offset of 0 assumes patched return sequence per
304 // BreakLocation::SetDebugBreakAtReturn(), or debug break
305 // slot per BreakLocation::SetDebugBreakAtSlot().
306 return Assembler::target_address_at(pc_, host_);
310 void RelocInfo::set_call_address(Address target) {
311 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
312 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
313 Assembler::set_target_address_at(pc_, host_, target);
314 if (host() != NULL) {
315 Object* target_code = Code::GetCodeFromTargetAddress(target);
316 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
317 host(), this, HeapObject::cast(target_code));
322 Object* RelocInfo::call_object() { return *call_object_address(); }
325 void RelocInfo::set_call_object(Object* target) {
326 *call_object_address() = target;
330 Object** RelocInfo::call_object_address() {
331 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
332 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
333 return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
337 void RelocInfo::WipeOut() {
338 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
339 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
340 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
341 if (IsInternalReference(rmode_)) {
343 Memory::Address_at(pc_) = NULL;
344 } else if (IsInternalReferenceEncoded(rmode_)) {
346 // Currently used only by deserializer, no need to flush.
347 Assembler::set_target_address_at(pc_, host_, NULL, SKIP_ICACHE_FLUSH);
349 Assembler::set_target_address_at(pc_, host_, NULL);
354 bool RelocInfo::IsPatchedReturnSequence() {
356 // The patched return sequence is defined by
357 // BreakLocation::SetDebugBreakAtReturn()
360 Instr instr0 = Assembler::instr_at(pc_);
361 Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
362 #if V8_TARGET_ARCH_PPC64
363 Instr instr3 = Assembler::instr_at(pc_ + (3 * Assembler::kInstrSize));
364 Instr instr4 = Assembler::instr_at(pc_ + (4 * Assembler::kInstrSize));
365 Instr binstr = Assembler::instr_at(pc_ + (7 * Assembler::kInstrSize));
367 Instr binstr = Assembler::instr_at(pc_ + 4 * Assembler::kInstrSize);
369 bool patched_return =
370 ((instr0 & kOpcodeMask) == ADDIS && (instr1 & kOpcodeMask) == ORI &&
371 #if V8_TARGET_ARCH_PPC64
372 (instr3 & kOpcodeMask) == ORIS && (instr4 & kOpcodeMask) == ORI &&
374 (binstr == 0x7d821008)); // twge r2, r2
376 // printf("IsPatchedReturnSequence: %d\n", patched_return);
377 return patched_return;
381 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
382 Instr current_instr = Assembler::instr_at(pc_);
383 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
387 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
388 RelocInfo::Mode mode = rmode();
389 if (mode == RelocInfo::EMBEDDED_OBJECT) {
390 visitor->VisitEmbeddedPointer(this);
391 } else if (RelocInfo::IsCodeTarget(mode)) {
392 visitor->VisitCodeTarget(this);
393 } else if (mode == RelocInfo::CELL) {
394 visitor->VisitCell(this);
395 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
396 visitor->VisitExternalReference(this);
397 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
398 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
399 visitor->VisitInternalReference(this);
400 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
401 visitor->VisitCodeAgeSequence(this);
402 } else if (((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) ||
403 (RelocInfo::IsDebugBreakSlot(mode) &&
404 IsPatchedDebugBreakSlotSequence())) &&
405 isolate->debug()->has_break_points()) {
406 visitor->VisitDebugTarget(this);
407 } else if (IsRuntimeEntry(mode)) {
408 visitor->VisitRuntimeEntry(this);
413 template <typename StaticVisitor>
414 void RelocInfo::Visit(Heap* heap) {
415 RelocInfo::Mode mode = rmode();
416 if (mode == RelocInfo::EMBEDDED_OBJECT) {
417 StaticVisitor::VisitEmbeddedPointer(heap, this);
418 } else if (RelocInfo::IsCodeTarget(mode)) {
419 StaticVisitor::VisitCodeTarget(heap, this);
420 } else if (mode == RelocInfo::CELL) {
421 StaticVisitor::VisitCell(heap, this);
422 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
423 StaticVisitor::VisitExternalReference(this);
424 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
425 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
426 StaticVisitor::VisitInternalReference(this);
427 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
428 StaticVisitor::VisitCodeAgeSequence(heap, this);
429 } else if (heap->isolate()->debug()->has_break_points() &&
430 ((RelocInfo::IsJSReturn(mode) && IsPatchedReturnSequence()) ||
431 (RelocInfo::IsDebugBreakSlot(mode) &&
432 IsPatchedDebugBreakSlotSequence()))) {
433 StaticVisitor::VisitDebugTarget(heap, this);
434 } else if (IsRuntimeEntry(mode)) {
435 StaticVisitor::VisitRuntimeEntry(this);
439 Operand::Operand(intptr_t immediate, RelocInfo::Mode rmode) {
445 Operand::Operand(const ExternalReference& f) {
447 imm_ = reinterpret_cast<intptr_t>(f.address());
448 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
451 Operand::Operand(Smi* value) {
453 imm_ = reinterpret_cast<intptr_t>(value);
454 rmode_ = kRelocInfo_NONEPTR;
457 Operand::Operand(Register rm) {
459 rmode_ = kRelocInfo_NONEPTR; // PPC -why doesn't ARM do this?
462 void Assembler::CheckBuffer() {
463 if (buffer_space() <= kGap) {
468 void Assembler::CheckTrampolinePoolQuick() {
469 if (pc_offset() >= next_buffer_check_) {
470 CheckTrampolinePool();
474 void Assembler::emit(Instr x) {
476 *reinterpret_cast<Instr*>(pc_) = x;
478 CheckTrampolinePoolQuick();
481 bool Operand::is_reg() const { return rm_.is_valid(); }
484 // Fetch the 32bit value from the FIXED_SEQUENCE lis/ori
485 Address Assembler::target_address_at(Address pc, Address constant_pool) {
486 if (FLAG_enable_embedded_constant_pool && constant_pool) {
487 ConstantPoolEntry::Access access;
488 if (IsConstantPoolLoadStart(pc, &access))
489 return Memory::Address_at(target_constant_pool_address_at(
490 pc, constant_pool, access, ConstantPoolEntry::INTPTR));
493 Instr instr1 = instr_at(pc);
494 Instr instr2 = instr_at(pc + kInstrSize);
495 // Interpret 2 instructions generated by lis/ori
496 if (IsLis(instr1) && IsOri(instr2)) {
497 #if V8_TARGET_ARCH_PPC64
498 Instr instr4 = instr_at(pc + (3 * kInstrSize));
499 Instr instr5 = instr_at(pc + (4 * kInstrSize));
500 // Assemble the 64 bit value.
501 uint64_t hi = (static_cast<uint32_t>((instr1 & kImm16Mask) << 16) |
502 static_cast<uint32_t>(instr2 & kImm16Mask));
503 uint64_t lo = (static_cast<uint32_t>((instr4 & kImm16Mask) << 16) |
504 static_cast<uint32_t>(instr5 & kImm16Mask));
505 return reinterpret_cast<Address>((hi << 32) | lo);
507 // Assemble the 32 bit value.
508 return reinterpret_cast<Address>(((instr1 & kImm16Mask) << 16) |
509 (instr2 & kImm16Mask));
518 #if V8_TARGET_ARCH_PPC64
519 const int kLoadIntptrOpcode = LD;
521 const int kLoadIntptrOpcode = LWZ;
524 // Constant pool load sequence detection:
525 // 1) REGULAR access:
526 // load <dst>, kConstantPoolRegister + <offset>
528 // 2) OVERFLOWED access:
529 // addis <scratch>, kConstantPoolRegister, <offset_high>
530 // load <dst>, <scratch> + <offset_low>
531 bool Assembler::IsConstantPoolLoadStart(Address pc,
532 ConstantPoolEntry::Access* access) {
533 Instr instr = instr_at(pc);
534 int opcode = instr & kOpcodeMask;
535 if (!GetRA(instr).is(kConstantPoolRegister)) return false;
536 bool overflowed = (opcode == ADDIS);
539 opcode = instr_at(pc + kInstrSize) & kOpcodeMask;
541 DCHECK(opcode == kLoadIntptrOpcode || opcode == LFD);
544 *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
545 : ConstantPoolEntry::REGULAR);
551 bool Assembler::IsConstantPoolLoadEnd(Address pc,
552 ConstantPoolEntry::Access* access) {
553 Instr instr = instr_at(pc);
554 int opcode = instr & kOpcodeMask;
555 bool overflowed = false;
556 if (!(opcode == kLoadIntptrOpcode || opcode == LFD)) return false;
557 if (!GetRA(instr).is(kConstantPoolRegister)) {
558 instr = instr_at(pc - kInstrSize);
559 opcode = instr & kOpcodeMask;
560 if ((opcode != ADDIS) || !GetRA(instr).is(kConstantPoolRegister)) {
566 *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
567 : ConstantPoolEntry::REGULAR);
573 int Assembler::GetConstantPoolOffset(Address pc,
574 ConstantPoolEntry::Access access,
575 ConstantPoolEntry::Type type) {
576 bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
578 ConstantPoolEntry::Access access_check =
579 static_cast<ConstantPoolEntry::Access>(-1);
580 DCHECK(IsConstantPoolLoadStart(pc, &access_check));
581 DCHECK(access_check == access);
585 offset = (instr_at(pc) & kImm16Mask) << 16;
586 offset += SIGN_EXT_IMM16(instr_at(pc + kInstrSize) & kImm16Mask);
587 DCHECK(!is_int16(offset));
589 offset = SIGN_EXT_IMM16((instr_at(pc) & kImm16Mask));
595 void Assembler::PatchConstantPoolAccessInstruction(
596 int pc_offset, int offset, ConstantPoolEntry::Access access,
597 ConstantPoolEntry::Type type) {
598 Address pc = buffer_ + pc_offset;
599 bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
601 ConstantPoolEntry::Access access_check =
602 static_cast<ConstantPoolEntry::Access>(-1);
603 DCHECK(IsConstantPoolLoadStart(pc, &access_check));
604 DCHECK(access_check == access);
605 DCHECK(overflowed != is_int16(offset));
608 int hi_word = static_cast<int>(offset >> 16);
609 int lo_word = static_cast<int>(offset & 0xffff);
610 if (lo_word & 0x8000) hi_word++;
612 Instr instr1 = instr_at(pc);
613 Instr instr2 = instr_at(pc + kInstrSize);
614 instr1 &= ~kImm16Mask;
615 instr1 |= (hi_word & kImm16Mask);
616 instr2 &= ~kImm16Mask;
617 instr2 |= (lo_word & kImm16Mask);
618 instr_at_put(pc, instr1);
619 instr_at_put(pc + kInstrSize, instr2);
621 Instr instr = instr_at(pc);
622 instr &= ~kImm16Mask;
623 instr |= (offset & kImm16Mask);
624 instr_at_put(pc, instr);
629 Address Assembler::target_constant_pool_address_at(
630 Address pc, Address constant_pool, ConstantPoolEntry::Access access,
631 ConstantPoolEntry::Type type) {
632 Address addr = constant_pool;
634 addr += GetConstantPoolOffset(pc, access, type);
639 // This sets the branch destination (which gets loaded at the call address).
640 // This is for calls and branches within generated code. The serializer
641 // has already deserialized the mov instructions etc.
642 // There is a FIXED_SEQUENCE assumption here
643 void Assembler::deserialization_set_special_target_at(
644 Address instruction_payload, Code* code, Address target) {
645 set_target_address_at(instruction_payload, code, target);
649 void Assembler::deserialization_set_target_internal_reference_at(
650 Address pc, Address target, RelocInfo::Mode mode) {
651 if (RelocInfo::IsInternalReferenceEncoded(mode)) {
653 set_target_address_at(pc, code, target, SKIP_ICACHE_FLUSH);
655 Memory::Address_at(pc) = target;
660 // This code assumes the FIXED_SEQUENCE of lis/ori
661 void Assembler::set_target_address_at(Address pc, Address constant_pool,
663 ICacheFlushMode icache_flush_mode) {
664 if (FLAG_enable_embedded_constant_pool && constant_pool) {
665 ConstantPoolEntry::Access access;
666 if (IsConstantPoolLoadStart(pc, &access)) {
667 Memory::Address_at(target_constant_pool_address_at(
668 pc, constant_pool, access, ConstantPoolEntry::INTPTR)) = target;
673 Instr instr1 = instr_at(pc);
674 Instr instr2 = instr_at(pc + kInstrSize);
675 // Interpret 2 instructions generated by lis/ori
676 if (IsLis(instr1) && IsOri(instr2)) {
677 #if V8_TARGET_ARCH_PPC64
678 Instr instr4 = instr_at(pc + (3 * kInstrSize));
679 Instr instr5 = instr_at(pc + (4 * kInstrSize));
680 // Needs to be fixed up when mov changes to handle 64-bit values.
681 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
682 uintptr_t itarget = reinterpret_cast<uintptr_t>(target);
684 instr5 &= ~kImm16Mask;
685 instr5 |= itarget & kImm16Mask;
686 itarget = itarget >> 16;
688 instr4 &= ~kImm16Mask;
689 instr4 |= itarget & kImm16Mask;
690 itarget = itarget >> 16;
692 instr2 &= ~kImm16Mask;
693 instr2 |= itarget & kImm16Mask;
694 itarget = itarget >> 16;
696 instr1 &= ~kImm16Mask;
697 instr1 |= itarget & kImm16Mask;
698 itarget = itarget >> 16;
704 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
705 CpuFeatures::FlushICache(p, 5 * kInstrSize);
708 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
709 uint32_t itarget = reinterpret_cast<uint32_t>(target);
710 int lo_word = itarget & kImm16Mask;
711 int hi_word = itarget >> 16;
712 instr1 &= ~kImm16Mask;
714 instr2 &= ~kImm16Mask;
719 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
720 CpuFeatures::FlushICache(p, 2 * kInstrSize);
728 } // namespace v8::internal
730 #endif // V8_PPC_ASSEMBLER_PPC_INL_H_