Upstream version 10.39.233.0
[platform/framework/web/crosswalk.git] / src / v8 / src / arm / assembler-arm-inl.h
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40 #include "src/arm/assembler-arm.h"
41
42 #include "src/assembler.h"
43 #include "src/debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
51 bool CpuFeatures::SupportsSIMD128InCrankshaft() { return false; }
52
53
54 int Register::NumAllocatableRegisters() {
55   return kMaxNumAllocatableRegisters;
56 }
57
58
59 int DwVfpRegister::NumRegisters() {
60   return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
61 }
62
63
64 int DwVfpRegister::NumReservedRegisters() {
65   return kNumReservedRegisters;
66 }
67
68
69 int DwVfpRegister::NumAllocatableRegisters() {
70   return NumRegisters() - kNumReservedRegisters;
71 }
72
73
74 int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
75   DCHECK(!reg.is(kDoubleRegZero));
76   DCHECK(!reg.is(kScratchDoubleReg));
77   if (reg.code() > kDoubleRegZero.code()) {
78     return reg.code() - kNumReservedRegisters;
79   }
80   return reg.code();
81 }
82
83
84 DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
85   DCHECK(index >= 0 && index < NumAllocatableRegisters());
86   DCHECK(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
87          kNumReservedRegisters - 1);
88   if (index >= kDoubleRegZero.code()) {
89     return from_code(index + kNumReservedRegisters);
90   }
91   return from_code(index);
92 }
93
94
95 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
96   if (RelocInfo::IsInternalReference(rmode_)) {
97     // absolute code pointer inside code object moves with the code object.
98     int32_t* p = reinterpret_cast<int32_t*>(pc_);
99     *p += delta;  // relocate entry
100   }
101   // We do not use pc relative addressing on ARM, so there is
102   // nothing else to do.
103 }
104
105
106 Address RelocInfo::target_address() {
107   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
108   return Assembler::target_address_at(pc_, host_);
109 }
110
111
112 Address RelocInfo::target_address_address() {
113   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
114                               || rmode_ == EMBEDDED_OBJECT
115                               || rmode_ == EXTERNAL_REFERENCE);
116   if (FLAG_enable_ool_constant_pool ||
117       Assembler::IsMovW(Memory::int32_at(pc_))) {
118     // We return the PC for ool constant pool since this function is used by the
119     // serializerer and expects the address to reside within the code object.
120     return reinterpret_cast<Address>(pc_);
121   } else {
122     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
123     return constant_pool_entry_address();
124   }
125 }
126
127
128 Address RelocInfo::constant_pool_entry_address() {
129   DCHECK(IsInConstantPool());
130   return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
131 }
132
133
134 int RelocInfo::target_address_size() {
135   return kPointerSize;
136 }
137
138
139 void RelocInfo::set_target_address(Address target,
140                                    WriteBarrierMode write_barrier_mode,
141                                    ICacheFlushMode icache_flush_mode) {
142   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
143   Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
144   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
145       host() != NULL && IsCodeTarget(rmode_)) {
146     Object* target_code = Code::GetCodeFromTargetAddress(target);
147     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
148         host(), this, HeapObject::cast(target_code));
149   }
150 }
151
152
153 Object* RelocInfo::target_object() {
154   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
155   return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
156 }
157
158
159 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
160   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
161   return Handle<Object>(reinterpret_cast<Object**>(
162       Assembler::target_address_at(pc_, host_)));
163 }
164
165
166 void RelocInfo::set_target_object(Object* target,
167                                   WriteBarrierMode write_barrier_mode,
168                                   ICacheFlushMode icache_flush_mode) {
169   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
170   Assembler::set_target_address_at(pc_, host_,
171                                    reinterpret_cast<Address>(target),
172                                    icache_flush_mode);
173   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
174       host() != NULL &&
175       target->IsHeapObject()) {
176     host()->GetHeap()->incremental_marking()->RecordWrite(
177         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
178   }
179 }
180
181
182 Address RelocInfo::target_reference() {
183   DCHECK(rmode_ == EXTERNAL_REFERENCE);
184   return Assembler::target_address_at(pc_, host_);
185 }
186
187
188 Address RelocInfo::target_runtime_entry(Assembler* origin) {
189   DCHECK(IsRuntimeEntry(rmode_));
190   return target_address();
191 }
192
193
194 void RelocInfo::set_target_runtime_entry(Address target,
195                                          WriteBarrierMode write_barrier_mode,
196                                          ICacheFlushMode icache_flush_mode) {
197   DCHECK(IsRuntimeEntry(rmode_));
198   if (target_address() != target)
199     set_target_address(target, write_barrier_mode, icache_flush_mode);
200 }
201
202
203 Handle<Cell> RelocInfo::target_cell_handle() {
204   DCHECK(rmode_ == RelocInfo::CELL);
205   Address address = Memory::Address_at(pc_);
206   return Handle<Cell>(reinterpret_cast<Cell**>(address));
207 }
208
209
210 Cell* RelocInfo::target_cell() {
211   DCHECK(rmode_ == RelocInfo::CELL);
212   return Cell::FromValueAddress(Memory::Address_at(pc_));
213 }
214
215
216 void RelocInfo::set_target_cell(Cell* cell,
217                                 WriteBarrierMode write_barrier_mode,
218                                 ICacheFlushMode icache_flush_mode) {
219   DCHECK(rmode_ == RelocInfo::CELL);
220   Address address = cell->address() + Cell::kValueOffset;
221   Memory::Address_at(pc_) = address;
222   if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
223     // TODO(1550) We are passing NULL as a slot because cell can never be on
224     // evacuation candidate.
225     host()->GetHeap()->incremental_marking()->RecordWrite(
226         host(), NULL, cell);
227   }
228 }
229
230
231 static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
232
233
234 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
235   UNREACHABLE();  // This should never be reached on Arm.
236   return Handle<Object>();
237 }
238
239
240 Code* RelocInfo::code_age_stub() {
241   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
242   return Code::GetCodeFromTargetAddress(
243       Memory::Address_at(pc_ +
244                          (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
245 }
246
247
248 void RelocInfo::set_code_age_stub(Code* stub,
249                                   ICacheFlushMode icache_flush_mode) {
250   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
251   Memory::Address_at(pc_ +
252                      (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
253       stub->instruction_start();
254 }
255
256
257 Address RelocInfo::call_address() {
258   // The 2 instructions offset assumes patched debug break slot or return
259   // sequence.
260   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
261          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
262   return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
263 }
264
265
266 void RelocInfo::set_call_address(Address target) {
267   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
268          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
269   Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
270   if (host() != NULL) {
271     Object* target_code = Code::GetCodeFromTargetAddress(target);
272     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
273         host(), this, HeapObject::cast(target_code));
274   }
275 }
276
277
278 Object* RelocInfo::call_object() {
279   return *call_object_address();
280 }
281
282
283 void RelocInfo::set_call_object(Object* target) {
284   *call_object_address() = target;
285 }
286
287
288 Object** RelocInfo::call_object_address() {
289   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
290          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
291   return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
292 }
293
294
295 void RelocInfo::WipeOut() {
296   DCHECK(IsEmbeddedObject(rmode_) ||
297          IsCodeTarget(rmode_) ||
298          IsRuntimeEntry(rmode_) ||
299          IsExternalReference(rmode_));
300   Assembler::set_target_address_at(pc_, host_, NULL);
301 }
302
303
304 bool RelocInfo::IsPatchedReturnSequence() {
305   Instr current_instr = Assembler::instr_at(pc_);
306   Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
307   // A patched return sequence is:
308   //  ldr ip, [pc, #0]
309   //  blx ip
310   return Assembler::IsLdrPcImmediateOffset(current_instr) &&
311          Assembler::IsBlxReg(next_instr);
312 }
313
314
315 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
316   Instr current_instr = Assembler::instr_at(pc_);
317   return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
318 }
319
320
321 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
322   RelocInfo::Mode mode = rmode();
323   if (mode == RelocInfo::EMBEDDED_OBJECT) {
324     visitor->VisitEmbeddedPointer(this);
325   } else if (RelocInfo::IsCodeTarget(mode)) {
326     visitor->VisitCodeTarget(this);
327   } else if (mode == RelocInfo::CELL) {
328     visitor->VisitCell(this);
329   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
330     visitor->VisitExternalReference(this);
331   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
332     visitor->VisitCodeAgeSequence(this);
333   } else if (((RelocInfo::IsJSReturn(mode) &&
334               IsPatchedReturnSequence()) ||
335              (RelocInfo::IsDebugBreakSlot(mode) &&
336               IsPatchedDebugBreakSlotSequence())) &&
337              isolate->debug()->has_break_points()) {
338     visitor->VisitDebugTarget(this);
339   } else if (RelocInfo::IsRuntimeEntry(mode)) {
340     visitor->VisitRuntimeEntry(this);
341   }
342 }
343
344
345 template<typename StaticVisitor>
346 void RelocInfo::Visit(Heap* heap) {
347   RelocInfo::Mode mode = rmode();
348   if (mode == RelocInfo::EMBEDDED_OBJECT) {
349     StaticVisitor::VisitEmbeddedPointer(heap, this);
350   } else if (RelocInfo::IsCodeTarget(mode)) {
351     StaticVisitor::VisitCodeTarget(heap, this);
352   } else if (mode == RelocInfo::CELL) {
353     StaticVisitor::VisitCell(heap, this);
354   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
355     StaticVisitor::VisitExternalReference(this);
356   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
357     StaticVisitor::VisitCodeAgeSequence(heap, this);
358   } else if (heap->isolate()->debug()->has_break_points() &&
359              ((RelocInfo::IsJSReturn(mode) &&
360               IsPatchedReturnSequence()) ||
361              (RelocInfo::IsDebugBreakSlot(mode) &&
362               IsPatchedDebugBreakSlotSequence()))) {
363     StaticVisitor::VisitDebugTarget(heap, this);
364   } else if (RelocInfo::IsRuntimeEntry(mode)) {
365     StaticVisitor::VisitRuntimeEntry(this);
366   }
367 }
368
369
370 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
371   rm_ = no_reg;
372   imm32_ = immediate;
373   rmode_ = rmode;
374 }
375
376
377 Operand::Operand(const ExternalReference& f)  {
378   rm_ = no_reg;
379   imm32_ = reinterpret_cast<int32_t>(f.address());
380   rmode_ = RelocInfo::EXTERNAL_REFERENCE;
381 }
382
383
384 Operand::Operand(Smi* value) {
385   rm_ = no_reg;
386   imm32_ =  reinterpret_cast<intptr_t>(value);
387   rmode_ = RelocInfo::NONE32;
388 }
389
390
391 Operand::Operand(Register rm) {
392   rm_ = rm;
393   rs_ = no_reg;
394   shift_op_ = LSL;
395   shift_imm_ = 0;
396 }
397
398
399 bool Operand::is_reg() const {
400   return rm_.is_valid() &&
401          rs_.is(no_reg) &&
402          shift_op_ == LSL &&
403          shift_imm_ == 0;
404 }
405
406
407 void Assembler::CheckBuffer() {
408   if (buffer_space() <= kGap) {
409     GrowBuffer();
410   }
411   if (pc_offset() >= next_buffer_check_) {
412     CheckConstPool(false, true);
413   }
414 }
415
416
417 void Assembler::emit(Instr x) {
418   CheckBuffer();
419   *reinterpret_cast<Instr*>(pc_) = x;
420   pc_ += kInstrSize;
421 }
422
423
424 Address Assembler::target_address_from_return_address(Address pc) {
425   // Returns the address of the call target from the return address that will
426   // be returned to after a call.
427   // Call sequence on V7 or later is:
428   //  movw  ip, #... @ call address low 16
429   //  movt  ip, #... @ call address high 16
430   //  blx   ip
431   //                      @ return address
432   // For V6 when the constant pool is unavailable, it is:
433   //  mov  ip, #...     @ call address low 8
434   //  orr  ip, ip, #... @ call address 2nd 8
435   //  orr  ip, ip, #... @ call address 3rd 8
436   //  orr  ip, ip, #... @ call address high 8
437   //  blx   ip
438   //                      @ return address
439   // In cases that need frequent patching, the address is in the
440   // constant pool.  It could be a small constant pool load:
441   //  ldr   ip, [pc / pp, #...] @ call address
442   //  blx   ip
443   //                      @ return address
444   // Or an extended constant pool load (ARMv7):
445   //  movw  ip, #...
446   //  movt  ip, #...
447   //  ldr   ip, [pc, ip]  @ call address
448   //  blx   ip
449   //                      @ return address
450   // Or an extended constant pool load (ARMv6):
451   //  mov  ip, #...
452   //  orr  ip, ip, #...
453   //  orr  ip, ip, #...
454   //  orr  ip, ip, #...
455   //  ldr   ip, [pc, ip]  @ call address
456   //  blx   ip
457   //                      @ return address
458   Address candidate = pc - 2 * Assembler::kInstrSize;
459   Instr candidate_instr(Memory::int32_at(candidate));
460   if (IsLdrPcImmediateOffset(candidate_instr) |
461       IsLdrPpImmediateOffset(candidate_instr)) {
462     return candidate;
463   } else {
464     if (IsLdrPpRegOffset(candidate_instr)) {
465       candidate -= Assembler::kInstrSize;
466     }
467     if (CpuFeatures::IsSupported(ARMv7)) {
468       candidate -= 1 * Assembler::kInstrSize;
469       DCHECK(IsMovW(Memory::int32_at(candidate)) &&
470              IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
471     } else {
472       candidate -= 3 * Assembler::kInstrSize;
473       DCHECK(
474           IsMovImmed(Memory::int32_at(candidate)) &&
475           IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) &&
476           IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) &&
477           IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize)));
478     }
479     return candidate;
480   }
481 }
482
483
484 Address Assembler::break_address_from_return_address(Address pc) {
485   return pc - Assembler::kPatchDebugBreakSlotReturnOffset;
486 }
487
488
489 Address Assembler::return_address_from_call_start(Address pc) {
490   if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
491       IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
492     // Load from constant pool, small section.
493     return pc + kInstrSize * 2;
494   } else {
495     if (CpuFeatures::IsSupported(ARMv7)) {
496       DCHECK(IsMovW(Memory::int32_at(pc)));
497       DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
498       if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
499         // Load from constant pool, extended section.
500         return pc + kInstrSize * 4;
501       } else {
502         // A movw / movt load immediate.
503         return pc + kInstrSize * 3;
504       }
505     } else {
506       DCHECK(IsMovImmed(Memory::int32_at(pc)));
507       DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
508       DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
509       DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
510       if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
511         // Load from constant pool, extended section.
512         return pc + kInstrSize * 6;
513       } else {
514         // A mov / orr load immediate.
515         return pc + kInstrSize * 5;
516       }
517     }
518   }
519 }
520
521
522 void Assembler::deserialization_set_special_target_at(
523     Address constant_pool_entry, Code* code, Address target) {
524   if (FLAG_enable_ool_constant_pool) {
525     set_target_address_at(constant_pool_entry, code, target);
526   } else {
527     Memory::Address_at(constant_pool_entry) = target;
528   }
529 }
530
531
532 bool Assembler::is_constant_pool_load(Address pc) {
533   if (CpuFeatures::IsSupported(ARMv7)) {
534     return !Assembler::IsMovW(Memory::int32_at(pc)) ||
535            (FLAG_enable_ool_constant_pool &&
536             Assembler::IsLdrPpRegOffset(
537                 Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
538   } else {
539     return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
540            (FLAG_enable_ool_constant_pool &&
541             Assembler::IsLdrPpRegOffset(
542                 Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
543   }
544 }
545
546
547 Address Assembler::constant_pool_entry_address(
548     Address pc, ConstantPoolArray* constant_pool) {
549   if (FLAG_enable_ool_constant_pool) {
550     DCHECK(constant_pool != NULL);
551     int cp_offset;
552     if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
553       DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
554              IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
555              IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
556              IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
557       // This is an extended constant pool lookup (ARMv6).
558       Instr mov_instr = instr_at(pc);
559       Instr orr_instr_1 = instr_at(pc + kInstrSize);
560       Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
561       Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
562       cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
563                   DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
564     } else if (IsMovW(Memory::int32_at(pc))) {
565       DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
566              IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
567       // This is an extended constant pool lookup (ARMv7).
568       Instruction* movw_instr = Instruction::At(pc);
569       Instruction* movt_instr = Instruction::At(pc + kInstrSize);
570       cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
571                   movw_instr->ImmedMovwMovtValue();
572     } else {
573       // This is a small constant pool lookup.
574       DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
575       cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
576     }
577     return reinterpret_cast<Address>(constant_pool) + cp_offset;
578   } else {
579     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
580     Instr instr = Memory::int32_at(pc);
581     return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
582   }
583 }
584
585
586 Address Assembler::target_address_at(Address pc,
587                                      ConstantPoolArray* constant_pool) {
588   if (is_constant_pool_load(pc)) {
589     // This is a constant pool lookup. Return the value in the constant pool.
590     return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
591   } else if (CpuFeatures::IsSupported(ARMv7)) {
592     // This is an movw / movt immediate load. Return the immediate.
593     DCHECK(IsMovW(Memory::int32_at(pc)) &&
594            IsMovT(Memory::int32_at(pc + kInstrSize)));
595     Instruction* movw_instr = Instruction::At(pc);
596     Instruction* movt_instr = Instruction::At(pc + kInstrSize);
597     return reinterpret_cast<Address>(
598         (movt_instr->ImmedMovwMovtValue() << 16) |
599          movw_instr->ImmedMovwMovtValue());
600   } else {
601     // This is an mov / orr immediate load. Return the immediate.
602     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
603            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
604            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
605            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
606     Instr mov_instr = instr_at(pc);
607     Instr orr_instr_1 = instr_at(pc + kInstrSize);
608     Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
609     Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
610     Address ret = reinterpret_cast<Address>(
611         DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
612         DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
613     return ret;
614   }
615 }
616
617
618 void Assembler::set_target_address_at(Address pc,
619                                       ConstantPoolArray* constant_pool,
620                                       Address target,
621                                       ICacheFlushMode icache_flush_mode) {
622   if (is_constant_pool_load(pc)) {
623     // This is a constant pool lookup. Update the entry in the constant pool.
624     Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
625     // Intuitively, we would think it is necessary to always flush the
626     // instruction cache after patching a target address in the code as follows:
627     //   CpuFeatures::FlushICache(pc, sizeof(target));
628     // However, on ARM, no instruction is actually patched in the case
629     // of embedded constants of the form:
630     // ldr   ip, [pp, #...]
631     // since the instruction accessing this address in the constant pool remains
632     // unchanged.
633   } else if (CpuFeatures::IsSupported(ARMv7)) {
634     // This is an movw / movt immediate load. Patch the immediate embedded in
635     // the instructions.
636     DCHECK(IsMovW(Memory::int32_at(pc)));
637     DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
638     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
639     uint32_t immediate = reinterpret_cast<uint32_t>(target);
640     instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
641     instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
642     DCHECK(IsMovW(Memory::int32_at(pc)));
643     DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
644     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
645       CpuFeatures::FlushICache(pc, 2 * kInstrSize);
646     }
647   } else {
648     // This is an mov / orr immediate load. Patch the immediate embedded in
649     // the instructions.
650     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
651            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
652            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
653            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
654     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
655     uint32_t immediate = reinterpret_cast<uint32_t>(target);
656     instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
657     instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
658     instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
659     instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
660     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
661            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
662            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
663            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
664     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
665       CpuFeatures::FlushICache(pc, 4 * kInstrSize);
666     }
667   }
668 }
669
670
671 } }  // namespace v8::internal
672
673 #endif  // V8_ARM_ASSEMBLER_ARM_INL_H_