Upstream version 11.40.271.0
[platform/framework/web/crosswalk.git] / src / v8 / src / arm / assembler-arm-inl.h
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40 #include "src/arm/assembler-arm.h"
41
42 #include "src/assembler.h"
43 #include "src/debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
51 bool CpuFeatures::SupportsSIMD128InCrankshaft() { return false; }
52
53
54 int Register::NumAllocatableRegisters() {
55   return kMaxNumAllocatableRegisters;
56 }
57
58
59 int DwVfpRegister::NumRegisters() {
60   return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
61 }
62
63
64 int DwVfpRegister::NumReservedRegisters() {
65   return kNumReservedRegisters;
66 }
67
68
69 int DwVfpRegister::NumAllocatableRegisters() {
70   return NumRegisters() - kNumReservedRegisters;
71 }
72
73
74 // static
75 int DwVfpRegister::NumAllocatableAliasedRegisters() {
76   return LowDwVfpRegister::kMaxNumLowRegisters - kNumReservedRegisters;
77 }
78
79
80 int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
81   DCHECK(!reg.is(kDoubleRegZero));
82   DCHECK(!reg.is(kScratchDoubleReg));
83   if (reg.code() > kDoubleRegZero.code()) {
84     return reg.code() - kNumReservedRegisters;
85   }
86   return reg.code();
87 }
88
89
90 DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
91   DCHECK(index >= 0 && index < NumAllocatableRegisters());
92   DCHECK(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
93          kNumReservedRegisters - 1);
94   if (index >= kDoubleRegZero.code()) {
95     return from_code(index + kNumReservedRegisters);
96   }
97   return from_code(index);
98 }
99
100
101 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
102   if (RelocInfo::IsInternalReference(rmode_)) {
103     // absolute code pointer inside code object moves with the code object.
104     int32_t* p = reinterpret_cast<int32_t*>(pc_);
105     *p += delta;  // relocate entry
106   }
107   // We do not use pc relative addressing on ARM, so there is
108   // nothing else to do.
109 }
110
111
112 Address RelocInfo::target_address() {
113   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
114   return Assembler::target_address_at(pc_, host_);
115 }
116
117
118 Address RelocInfo::target_address_address() {
119   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
120                               || rmode_ == EMBEDDED_OBJECT
121                               || rmode_ == EXTERNAL_REFERENCE);
122   if (FLAG_enable_ool_constant_pool ||
123       Assembler::IsMovW(Memory::int32_at(pc_))) {
124     // We return the PC for ool constant pool since this function is used by the
125     // serializerer and expects the address to reside within the code object.
126     return reinterpret_cast<Address>(pc_);
127   } else {
128     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
129     return constant_pool_entry_address();
130   }
131 }
132
133
134 Address RelocInfo::constant_pool_entry_address() {
135   DCHECK(IsInConstantPool());
136   return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
137 }
138
139
140 int RelocInfo::target_address_size() {
141   return kPointerSize;
142 }
143
144
145 void RelocInfo::set_target_address(Address target,
146                                    WriteBarrierMode write_barrier_mode,
147                                    ICacheFlushMode icache_flush_mode) {
148   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
149   Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
150   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
151       host() != NULL && IsCodeTarget(rmode_)) {
152     Object* target_code = Code::GetCodeFromTargetAddress(target);
153     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
154         host(), this, HeapObject::cast(target_code));
155   }
156 }
157
158
159 Object* RelocInfo::target_object() {
160   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
161   return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
162 }
163
164
165 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
166   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
167   return Handle<Object>(reinterpret_cast<Object**>(
168       Assembler::target_address_at(pc_, host_)));
169 }
170
171
172 void RelocInfo::set_target_object(Object* target,
173                                   WriteBarrierMode write_barrier_mode,
174                                   ICacheFlushMode icache_flush_mode) {
175   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
176   Assembler::set_target_address_at(pc_, host_,
177                                    reinterpret_cast<Address>(target),
178                                    icache_flush_mode);
179   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
180       host() != NULL &&
181       target->IsHeapObject()) {
182     host()->GetHeap()->incremental_marking()->RecordWrite(
183         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
184   }
185 }
186
187
188 Address RelocInfo::target_reference() {
189   DCHECK(rmode_ == EXTERNAL_REFERENCE);
190   return Assembler::target_address_at(pc_, host_);
191 }
192
193
194 Address RelocInfo::target_runtime_entry(Assembler* origin) {
195   DCHECK(IsRuntimeEntry(rmode_));
196   return target_address();
197 }
198
199
200 void RelocInfo::set_target_runtime_entry(Address target,
201                                          WriteBarrierMode write_barrier_mode,
202                                          ICacheFlushMode icache_flush_mode) {
203   DCHECK(IsRuntimeEntry(rmode_));
204   if (target_address() != target)
205     set_target_address(target, write_barrier_mode, icache_flush_mode);
206 }
207
208
209 Handle<Cell> RelocInfo::target_cell_handle() {
210   DCHECK(rmode_ == RelocInfo::CELL);
211   Address address = Memory::Address_at(pc_);
212   return Handle<Cell>(reinterpret_cast<Cell**>(address));
213 }
214
215
216 Cell* RelocInfo::target_cell() {
217   DCHECK(rmode_ == RelocInfo::CELL);
218   return Cell::FromValueAddress(Memory::Address_at(pc_));
219 }
220
221
222 void RelocInfo::set_target_cell(Cell* cell,
223                                 WriteBarrierMode write_barrier_mode,
224                                 ICacheFlushMode icache_flush_mode) {
225   DCHECK(rmode_ == RelocInfo::CELL);
226   Address address = cell->address() + Cell::kValueOffset;
227   Memory::Address_at(pc_) = address;
228   if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
229     // TODO(1550) We are passing NULL as a slot because cell can never be on
230     // evacuation candidate.
231     host()->GetHeap()->incremental_marking()->RecordWrite(
232         host(), NULL, cell);
233   }
234 }
235
236
237 static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
238
239
240 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
241   UNREACHABLE();  // This should never be reached on Arm.
242   return Handle<Object>();
243 }
244
245
246 Code* RelocInfo::code_age_stub() {
247   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
248   return Code::GetCodeFromTargetAddress(
249       Memory::Address_at(pc_ +
250                          (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
251 }
252
253
254 void RelocInfo::set_code_age_stub(Code* stub,
255                                   ICacheFlushMode icache_flush_mode) {
256   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
257   Memory::Address_at(pc_ +
258                      (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
259       stub->instruction_start();
260 }
261
262
263 Address RelocInfo::call_address() {
264   // The 2 instructions offset assumes patched debug break slot or return
265   // sequence.
266   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
267          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
268   return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
269 }
270
271
272 void RelocInfo::set_call_address(Address target) {
273   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
274          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
275   Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
276   if (host() != NULL) {
277     Object* target_code = Code::GetCodeFromTargetAddress(target);
278     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
279         host(), this, HeapObject::cast(target_code));
280   }
281 }
282
283
284 Object* RelocInfo::call_object() {
285   return *call_object_address();
286 }
287
288
289 void RelocInfo::set_call_object(Object* target) {
290   *call_object_address() = target;
291 }
292
293
294 Object** RelocInfo::call_object_address() {
295   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
296          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
297   return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
298 }
299
300
301 void RelocInfo::WipeOut() {
302   DCHECK(IsEmbeddedObject(rmode_) ||
303          IsCodeTarget(rmode_) ||
304          IsRuntimeEntry(rmode_) ||
305          IsExternalReference(rmode_));
306   Assembler::set_target_address_at(pc_, host_, NULL);
307 }
308
309
310 bool RelocInfo::IsPatchedReturnSequence() {
311   Instr current_instr = Assembler::instr_at(pc_);
312   Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
313   // A patched return sequence is:
314   //  ldr ip, [pc, #0]
315   //  blx ip
316   return Assembler::IsLdrPcImmediateOffset(current_instr) &&
317          Assembler::IsBlxReg(next_instr);
318 }
319
320
321 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
322   Instr current_instr = Assembler::instr_at(pc_);
323   return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
324 }
325
326
327 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
328   RelocInfo::Mode mode = rmode();
329   if (mode == RelocInfo::EMBEDDED_OBJECT) {
330     visitor->VisitEmbeddedPointer(this);
331   } else if (RelocInfo::IsCodeTarget(mode)) {
332     visitor->VisitCodeTarget(this);
333   } else if (mode == RelocInfo::CELL) {
334     visitor->VisitCell(this);
335   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
336     visitor->VisitExternalReference(this);
337   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
338     visitor->VisitCodeAgeSequence(this);
339   } else if (((RelocInfo::IsJSReturn(mode) &&
340               IsPatchedReturnSequence()) ||
341              (RelocInfo::IsDebugBreakSlot(mode) &&
342               IsPatchedDebugBreakSlotSequence())) &&
343              isolate->debug()->has_break_points()) {
344     visitor->VisitDebugTarget(this);
345   } else if (RelocInfo::IsRuntimeEntry(mode)) {
346     visitor->VisitRuntimeEntry(this);
347   }
348 }
349
350
351 template<typename StaticVisitor>
352 void RelocInfo::Visit(Heap* heap) {
353   RelocInfo::Mode mode = rmode();
354   if (mode == RelocInfo::EMBEDDED_OBJECT) {
355     StaticVisitor::VisitEmbeddedPointer(heap, this);
356   } else if (RelocInfo::IsCodeTarget(mode)) {
357     StaticVisitor::VisitCodeTarget(heap, this);
358   } else if (mode == RelocInfo::CELL) {
359     StaticVisitor::VisitCell(heap, this);
360   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
361     StaticVisitor::VisitExternalReference(this);
362   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
363     StaticVisitor::VisitCodeAgeSequence(heap, this);
364   } else if (heap->isolate()->debug()->has_break_points() &&
365              ((RelocInfo::IsJSReturn(mode) &&
366               IsPatchedReturnSequence()) ||
367              (RelocInfo::IsDebugBreakSlot(mode) &&
368               IsPatchedDebugBreakSlotSequence()))) {
369     StaticVisitor::VisitDebugTarget(heap, this);
370   } else if (RelocInfo::IsRuntimeEntry(mode)) {
371     StaticVisitor::VisitRuntimeEntry(this);
372   }
373 }
374
375
376 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
377   rm_ = no_reg;
378   imm32_ = immediate;
379   rmode_ = rmode;
380 }
381
382
383 Operand::Operand(const ExternalReference& f)  {
384   rm_ = no_reg;
385   imm32_ = reinterpret_cast<int32_t>(f.address());
386   rmode_ = RelocInfo::EXTERNAL_REFERENCE;
387 }
388
389
390 Operand::Operand(Smi* value) {
391   rm_ = no_reg;
392   imm32_ =  reinterpret_cast<intptr_t>(value);
393   rmode_ = RelocInfo::NONE32;
394 }
395
396
397 Operand::Operand(Register rm) {
398   rm_ = rm;
399   rs_ = no_reg;
400   shift_op_ = LSL;
401   shift_imm_ = 0;
402 }
403
404
405 bool Operand::is_reg() const {
406   return rm_.is_valid() &&
407          rs_.is(no_reg) &&
408          shift_op_ == LSL &&
409          shift_imm_ == 0;
410 }
411
412
413 void Assembler::CheckBuffer() {
414   if (buffer_space() <= kGap) {
415     GrowBuffer();
416   }
417   if (pc_offset() >= next_buffer_check_) {
418     CheckConstPool(false, true);
419   }
420 }
421
422
423 void Assembler::emit(Instr x) {
424   CheckBuffer();
425   *reinterpret_cast<Instr*>(pc_) = x;
426   pc_ += kInstrSize;
427 }
428
429
430 Address Assembler::target_address_from_return_address(Address pc) {
431   // Returns the address of the call target from the return address that will
432   // be returned to after a call.
433   // Call sequence on V7 or later is:
434   //  movw  ip, #... @ call address low 16
435   //  movt  ip, #... @ call address high 16
436   //  blx   ip
437   //                      @ return address
438   // For V6 when the constant pool is unavailable, it is:
439   //  mov  ip, #...     @ call address low 8
440   //  orr  ip, ip, #... @ call address 2nd 8
441   //  orr  ip, ip, #... @ call address 3rd 8
442   //  orr  ip, ip, #... @ call address high 8
443   //  blx   ip
444   //                      @ return address
445   // In cases that need frequent patching, the address is in the
446   // constant pool.  It could be a small constant pool load:
447   //  ldr   ip, [pc / pp, #...] @ call address
448   //  blx   ip
449   //                      @ return address
450   // Or an extended constant pool load (ARMv7):
451   //  movw  ip, #...
452   //  movt  ip, #...
453   //  ldr   ip, [pc, ip]  @ call address
454   //  blx   ip
455   //                      @ return address
456   // Or an extended constant pool load (ARMv6):
457   //  mov  ip, #...
458   //  orr  ip, ip, #...
459   //  orr  ip, ip, #...
460   //  orr  ip, ip, #...
461   //  ldr   ip, [pc, ip]  @ call address
462   //  blx   ip
463   //                      @ return address
464   Address candidate = pc - 2 * Assembler::kInstrSize;
465   Instr candidate_instr(Memory::int32_at(candidate));
466   if (IsLdrPcImmediateOffset(candidate_instr) |
467       IsLdrPpImmediateOffset(candidate_instr)) {
468     return candidate;
469   } else {
470     if (IsLdrPpRegOffset(candidate_instr)) {
471       candidate -= Assembler::kInstrSize;
472     }
473     if (CpuFeatures::IsSupported(ARMv7)) {
474       candidate -= 1 * Assembler::kInstrSize;
475       DCHECK(IsMovW(Memory::int32_at(candidate)) &&
476              IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
477     } else {
478       candidate -= 3 * Assembler::kInstrSize;
479       DCHECK(
480           IsMovImmed(Memory::int32_at(candidate)) &&
481           IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) &&
482           IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) &&
483           IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize)));
484     }
485     return candidate;
486   }
487 }
488
489
490 Address Assembler::break_address_from_return_address(Address pc) {
491   return pc - Assembler::kPatchDebugBreakSlotReturnOffset;
492 }
493
494
495 Address Assembler::return_address_from_call_start(Address pc) {
496   if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
497       IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
498     // Load from constant pool, small section.
499     return pc + kInstrSize * 2;
500   } else {
501     if (CpuFeatures::IsSupported(ARMv7)) {
502       DCHECK(IsMovW(Memory::int32_at(pc)));
503       DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
504       if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
505         // Load from constant pool, extended section.
506         return pc + kInstrSize * 4;
507       } else {
508         // A movw / movt load immediate.
509         return pc + kInstrSize * 3;
510       }
511     } else {
512       DCHECK(IsMovImmed(Memory::int32_at(pc)));
513       DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
514       DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
515       DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
516       if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
517         // Load from constant pool, extended section.
518         return pc + kInstrSize * 6;
519       } else {
520         // A mov / orr load immediate.
521         return pc + kInstrSize * 5;
522       }
523     }
524   }
525 }
526
527
528 void Assembler::deserialization_set_special_target_at(
529     Address constant_pool_entry, Code* code, Address target) {
530   if (FLAG_enable_ool_constant_pool) {
531     set_target_address_at(constant_pool_entry, code, target);
532   } else {
533     Memory::Address_at(constant_pool_entry) = target;
534   }
535 }
536
537
538 bool Assembler::is_constant_pool_load(Address pc) {
539   if (CpuFeatures::IsSupported(ARMv7)) {
540     return !Assembler::IsMovW(Memory::int32_at(pc)) ||
541            (FLAG_enable_ool_constant_pool &&
542             Assembler::IsLdrPpRegOffset(
543                 Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
544   } else {
545     return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
546            (FLAG_enable_ool_constant_pool &&
547             Assembler::IsLdrPpRegOffset(
548                 Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
549   }
550 }
551
552
553 Address Assembler::constant_pool_entry_address(
554     Address pc, ConstantPoolArray* constant_pool) {
555   if (FLAG_enable_ool_constant_pool) {
556     DCHECK(constant_pool != NULL);
557     int cp_offset;
558     if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
559       DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
560              IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
561              IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
562              IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
563       // This is an extended constant pool lookup (ARMv6).
564       Instr mov_instr = instr_at(pc);
565       Instr orr_instr_1 = instr_at(pc + kInstrSize);
566       Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
567       Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
568       cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
569                   DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
570     } else if (IsMovW(Memory::int32_at(pc))) {
571       DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
572              IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
573       // This is an extended constant pool lookup (ARMv7).
574       Instruction* movw_instr = Instruction::At(pc);
575       Instruction* movt_instr = Instruction::At(pc + kInstrSize);
576       cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
577                   movw_instr->ImmedMovwMovtValue();
578     } else {
579       // This is a small constant pool lookup.
580       DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
581       cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
582     }
583     return reinterpret_cast<Address>(constant_pool) + cp_offset;
584   } else {
585     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
586     Instr instr = Memory::int32_at(pc);
587     return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
588   }
589 }
590
591
592 Address Assembler::target_address_at(Address pc,
593                                      ConstantPoolArray* constant_pool) {
594   if (is_constant_pool_load(pc)) {
595     // This is a constant pool lookup. Return the value in the constant pool.
596     return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
597   } else if (CpuFeatures::IsSupported(ARMv7)) {
598     // This is an movw / movt immediate load. Return the immediate.
599     DCHECK(IsMovW(Memory::int32_at(pc)) &&
600            IsMovT(Memory::int32_at(pc + kInstrSize)));
601     Instruction* movw_instr = Instruction::At(pc);
602     Instruction* movt_instr = Instruction::At(pc + kInstrSize);
603     return reinterpret_cast<Address>(
604         (movt_instr->ImmedMovwMovtValue() << 16) |
605          movw_instr->ImmedMovwMovtValue());
606   } else {
607     // This is an mov / orr immediate load. Return the immediate.
608     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
609            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
610            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
611            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
612     Instr mov_instr = instr_at(pc);
613     Instr orr_instr_1 = instr_at(pc + kInstrSize);
614     Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
615     Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
616     Address ret = reinterpret_cast<Address>(
617         DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
618         DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
619     return ret;
620   }
621 }
622
623
624 void Assembler::set_target_address_at(Address pc,
625                                       ConstantPoolArray* constant_pool,
626                                       Address target,
627                                       ICacheFlushMode icache_flush_mode) {
628   if (is_constant_pool_load(pc)) {
629     // This is a constant pool lookup. Update the entry in the constant pool.
630     Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
631     // Intuitively, we would think it is necessary to always flush the
632     // instruction cache after patching a target address in the code as follows:
633     //   CpuFeatures::FlushICache(pc, sizeof(target));
634     // However, on ARM, no instruction is actually patched in the case
635     // of embedded constants of the form:
636     // ldr   ip, [pp, #...]
637     // since the instruction accessing this address in the constant pool remains
638     // unchanged.
639   } else if (CpuFeatures::IsSupported(ARMv7)) {
640     // This is an movw / movt immediate load. Patch the immediate embedded in
641     // the instructions.
642     DCHECK(IsMovW(Memory::int32_at(pc)));
643     DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
644     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
645     uint32_t immediate = reinterpret_cast<uint32_t>(target);
646     instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
647     instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
648     DCHECK(IsMovW(Memory::int32_at(pc)));
649     DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
650     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
651       CpuFeatures::FlushICache(pc, 2 * kInstrSize);
652     }
653   } else {
654     // This is an mov / orr immediate load. Patch the immediate embedded in
655     // the instructions.
656     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
657            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
658            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
659            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
660     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
661     uint32_t immediate = reinterpret_cast<uint32_t>(target);
662     instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
663     instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
664     instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
665     instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
666     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
667            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
668            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
669            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
670     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
671       CpuFeatures::FlushICache(pc, 4 * kInstrSize);
672     }
673   }
674 }
675
676
677 } }  // namespace v8::internal
678
679 #endif  // V8_ARM_ASSEMBLER_ARM_INL_H_