deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / arm / assembler-arm-inl.h
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40 #include "src/arm/assembler-arm.h"
41
42 #include "src/assembler.h"
43 #include "src/debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
51
52
53 int Register::NumAllocatableRegisters() {
54   return kMaxNumAllocatableRegisters;
55 }
56
57
58 int DwVfpRegister::NumRegisters() {
59   return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
60 }
61
62
63 int DwVfpRegister::NumReservedRegisters() {
64   return kNumReservedRegisters;
65 }
66
67
68 int DwVfpRegister::NumAllocatableRegisters() {
69   return NumRegisters() - kNumReservedRegisters;
70 }
71
72
73 // static
74 int DwVfpRegister::NumAllocatableAliasedRegisters() {
75   return LowDwVfpRegister::kMaxNumLowRegisters - kNumReservedRegisters;
76 }
77
78
79 int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
80   DCHECK(!reg.is(kDoubleRegZero));
81   DCHECK(!reg.is(kScratchDoubleReg));
82   if (reg.code() > kDoubleRegZero.code()) {
83     return reg.code() - kNumReservedRegisters;
84   }
85   return reg.code();
86 }
87
88
89 DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
90   DCHECK(index >= 0 && index < NumAllocatableRegisters());
91   DCHECK(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
92          kNumReservedRegisters - 1);
93   if (index >= kDoubleRegZero.code()) {
94     return from_code(index + kNumReservedRegisters);
95   }
96   return from_code(index);
97 }
98
99
100 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
101   if (RelocInfo::IsInternalReference(rmode_)) {
102     // absolute code pointer inside code object moves with the code object.
103     int32_t* p = reinterpret_cast<int32_t*>(pc_);
104     *p += delta;  // relocate entry
105   }
106   // We do not use pc relative addressing on ARM, so there is
107   // nothing else to do.
108 }
109
110
111 Address RelocInfo::target_address() {
112   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
113   return Assembler::target_address_at(pc_, host_);
114 }
115
116
117 Address RelocInfo::target_address_address() {
118   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
119                               || rmode_ == EMBEDDED_OBJECT
120                               || rmode_ == EXTERNAL_REFERENCE);
121   if (FLAG_enable_ool_constant_pool ||
122       Assembler::IsMovW(Memory::int32_at(pc_))) {
123     // We return the PC for ool constant pool since this function is used by the
124     // serializer and expects the address to reside within the code object.
125     return reinterpret_cast<Address>(pc_);
126   } else {
127     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
128     return constant_pool_entry_address();
129   }
130 }
131
132
133 Address RelocInfo::constant_pool_entry_address() {
134   DCHECK(IsInConstantPool());
135   return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
136 }
137
138
139 int RelocInfo::target_address_size() {
140   return kPointerSize;
141 }
142
143
144 void RelocInfo::set_target_address(Address target,
145                                    WriteBarrierMode write_barrier_mode,
146                                    ICacheFlushMode icache_flush_mode) {
147   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
148   Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
149   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
150       host() != NULL && IsCodeTarget(rmode_)) {
151     Object* target_code = Code::GetCodeFromTargetAddress(target);
152     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
153         host(), this, HeapObject::cast(target_code));
154   }
155 }
156
157
158 Object* RelocInfo::target_object() {
159   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
160   return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
161 }
162
163
164 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
165   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
166   return Handle<Object>(reinterpret_cast<Object**>(
167       Assembler::target_address_at(pc_, host_)));
168 }
169
170
171 void RelocInfo::set_target_object(Object* target,
172                                   WriteBarrierMode write_barrier_mode,
173                                   ICacheFlushMode icache_flush_mode) {
174   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
175   Assembler::set_target_address_at(pc_, host_,
176                                    reinterpret_cast<Address>(target),
177                                    icache_flush_mode);
178   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
179       host() != NULL &&
180       target->IsHeapObject()) {
181     host()->GetHeap()->incremental_marking()->RecordWrite(
182         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
183   }
184 }
185
186
187 Address RelocInfo::target_external_reference() {
188   DCHECK(rmode_ == EXTERNAL_REFERENCE);
189   return Assembler::target_address_at(pc_, host_);
190 }
191
192
193 Address RelocInfo::target_internal_reference() {
194   DCHECK(rmode_ == INTERNAL_REFERENCE);
195   return Memory::Address_at(pc_);
196 }
197
198
199 Address RelocInfo::target_internal_reference_address() {
200   DCHECK(rmode_ == INTERNAL_REFERENCE);
201   return reinterpret_cast<Address>(pc_);
202 }
203
204
205 Address RelocInfo::target_runtime_entry(Assembler* origin) {
206   DCHECK(IsRuntimeEntry(rmode_));
207   return target_address();
208 }
209
210
211 void RelocInfo::set_target_runtime_entry(Address target,
212                                          WriteBarrierMode write_barrier_mode,
213                                          ICacheFlushMode icache_flush_mode) {
214   DCHECK(IsRuntimeEntry(rmode_));
215   if (target_address() != target)
216     set_target_address(target, write_barrier_mode, icache_flush_mode);
217 }
218
219
220 Handle<Cell> RelocInfo::target_cell_handle() {
221   DCHECK(rmode_ == RelocInfo::CELL);
222   Address address = Memory::Address_at(pc_);
223   return Handle<Cell>(reinterpret_cast<Cell**>(address));
224 }
225
226
227 Cell* RelocInfo::target_cell() {
228   DCHECK(rmode_ == RelocInfo::CELL);
229   return Cell::FromValueAddress(Memory::Address_at(pc_));
230 }
231
232
233 void RelocInfo::set_target_cell(Cell* cell,
234                                 WriteBarrierMode write_barrier_mode,
235                                 ICacheFlushMode icache_flush_mode) {
236   DCHECK(rmode_ == RelocInfo::CELL);
237   Address address = cell->address() + Cell::kValueOffset;
238   Memory::Address_at(pc_) = address;
239   if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
240     // TODO(1550) We are passing NULL as a slot because cell can never be on
241     // evacuation candidate.
242     host()->GetHeap()->incremental_marking()->RecordWrite(
243         host(), NULL, cell);
244   }
245 }
246
247
248 static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
249
250
251 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
252   UNREACHABLE();  // This should never be reached on Arm.
253   return Handle<Object>();
254 }
255
256
257 Code* RelocInfo::code_age_stub() {
258   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
259   return Code::GetCodeFromTargetAddress(
260       Memory::Address_at(pc_ +
261                          (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
262 }
263
264
265 void RelocInfo::set_code_age_stub(Code* stub,
266                                   ICacheFlushMode icache_flush_mode) {
267   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
268   Memory::Address_at(pc_ +
269                      (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
270       stub->instruction_start();
271 }
272
273
274 Address RelocInfo::call_address() {
275   // The 2 instructions offset assumes patched debug break slot or return
276   // sequence.
277   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
278          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
279   return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
280 }
281
282
283 void RelocInfo::set_call_address(Address target) {
284   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
285          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
286   Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
287   if (host() != NULL) {
288     Object* target_code = Code::GetCodeFromTargetAddress(target);
289     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
290         host(), this, HeapObject::cast(target_code));
291   }
292 }
293
294
295 Object* RelocInfo::call_object() {
296   return *call_object_address();
297 }
298
299
300 void RelocInfo::set_call_object(Object* target) {
301   *call_object_address() = target;
302 }
303
304
305 Object** RelocInfo::call_object_address() {
306   DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
307          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
308   return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
309 }
310
311
312 void RelocInfo::WipeOut() {
313   DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
314          IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
315          IsInternalReference(rmode_));
316   if (IsInternalReference(rmode_)) {
317     Memory::Address_at(pc_) = NULL;
318   } else {
319     Assembler::set_target_address_at(pc_, host_, NULL);
320   }
321 }
322
323
324 bool RelocInfo::IsPatchedReturnSequence() {
325   Instr current_instr = Assembler::instr_at(pc_);
326   Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
327   // A patched return sequence is:
328   //  ldr ip, [pc, #0]
329   //  blx ip
330   return Assembler::IsLdrPcImmediateOffset(current_instr) &&
331          Assembler::IsBlxReg(next_instr);
332 }
333
334
335 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
336   Instr current_instr = Assembler::instr_at(pc_);
337   return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
338 }
339
340
341 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
342   RelocInfo::Mode mode = rmode();
343   if (mode == RelocInfo::EMBEDDED_OBJECT) {
344     visitor->VisitEmbeddedPointer(this);
345   } else if (RelocInfo::IsCodeTarget(mode)) {
346     visitor->VisitCodeTarget(this);
347   } else if (mode == RelocInfo::CELL) {
348     visitor->VisitCell(this);
349   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
350     visitor->VisitExternalReference(this);
351   } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
352     visitor->VisitInternalReference(this);
353   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
354     visitor->VisitCodeAgeSequence(this);
355   } else if (((RelocInfo::IsJSReturn(mode) &&
356               IsPatchedReturnSequence()) ||
357              (RelocInfo::IsDebugBreakSlot(mode) &&
358               IsPatchedDebugBreakSlotSequence())) &&
359              isolate->debug()->has_break_points()) {
360     visitor->VisitDebugTarget(this);
361   } else if (RelocInfo::IsRuntimeEntry(mode)) {
362     visitor->VisitRuntimeEntry(this);
363   }
364 }
365
366
367 template<typename StaticVisitor>
368 void RelocInfo::Visit(Heap* heap) {
369   RelocInfo::Mode mode = rmode();
370   if (mode == RelocInfo::EMBEDDED_OBJECT) {
371     StaticVisitor::VisitEmbeddedPointer(heap, this);
372   } else if (RelocInfo::IsCodeTarget(mode)) {
373     StaticVisitor::VisitCodeTarget(heap, this);
374   } else if (mode == RelocInfo::CELL) {
375     StaticVisitor::VisitCell(heap, this);
376   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
377     StaticVisitor::VisitExternalReference(this);
378   } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
379     StaticVisitor::VisitInternalReference(this);
380   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
381     StaticVisitor::VisitCodeAgeSequence(heap, this);
382   } else if (heap->isolate()->debug()->has_break_points() &&
383              ((RelocInfo::IsJSReturn(mode) &&
384               IsPatchedReturnSequence()) ||
385              (RelocInfo::IsDebugBreakSlot(mode) &&
386               IsPatchedDebugBreakSlotSequence()))) {
387     StaticVisitor::VisitDebugTarget(heap, this);
388   } else if (RelocInfo::IsRuntimeEntry(mode)) {
389     StaticVisitor::VisitRuntimeEntry(this);
390   }
391 }
392
393
394 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
395   rm_ = no_reg;
396   imm32_ = immediate;
397   rmode_ = rmode;
398 }
399
400
401 Operand::Operand(const ExternalReference& f)  {
402   rm_ = no_reg;
403   imm32_ = reinterpret_cast<int32_t>(f.address());
404   rmode_ = RelocInfo::EXTERNAL_REFERENCE;
405 }
406
407
408 Operand::Operand(Smi* value) {
409   rm_ = no_reg;
410   imm32_ =  reinterpret_cast<intptr_t>(value);
411   rmode_ = RelocInfo::NONE32;
412 }
413
414
415 Operand::Operand(Register rm) {
416   rm_ = rm;
417   rs_ = no_reg;
418   shift_op_ = LSL;
419   shift_imm_ = 0;
420 }
421
422
423 bool Operand::is_reg() const {
424   return rm_.is_valid() &&
425          rs_.is(no_reg) &&
426          shift_op_ == LSL &&
427          shift_imm_ == 0;
428 }
429
430
431 void Assembler::CheckBuffer() {
432   if (buffer_space() <= kGap) {
433     GrowBuffer();
434   }
435   if (pc_offset() >= next_buffer_check_) {
436     CheckConstPool(false, true);
437   }
438 }
439
440
441 void Assembler::emit(Instr x) {
442   CheckBuffer();
443   *reinterpret_cast<Instr*>(pc_) = x;
444   pc_ += kInstrSize;
445 }
446
447
448 Address Assembler::target_address_from_return_address(Address pc) {
449   // Returns the address of the call target from the return address that will
450   // be returned to after a call.
451   // Call sequence on V7 or later is:
452   //  movw  ip, #... @ call address low 16
453   //  movt  ip, #... @ call address high 16
454   //  blx   ip
455   //                      @ return address
456   // For V6 when the constant pool is unavailable, it is:
457   //  mov  ip, #...     @ call address low 8
458   //  orr  ip, ip, #... @ call address 2nd 8
459   //  orr  ip, ip, #... @ call address 3rd 8
460   //  orr  ip, ip, #... @ call address high 8
461   //  blx   ip
462   //                      @ return address
463   // In cases that need frequent patching, the address is in the
464   // constant pool.  It could be a small constant pool load:
465   //  ldr   ip, [pc / pp, #...] @ call address
466   //  blx   ip
467   //                      @ return address
468   // Or an extended constant pool load (ARMv7):
469   //  movw  ip, #...
470   //  movt  ip, #...
471   //  ldr   ip, [pc, ip]  @ call address
472   //  blx   ip
473   //                      @ return address
474   // Or an extended constant pool load (ARMv6):
475   //  mov  ip, #...
476   //  orr  ip, ip, #...
477   //  orr  ip, ip, #...
478   //  orr  ip, ip, #...
479   //  ldr   ip, [pc, ip]  @ call address
480   //  blx   ip
481   //                      @ return address
482   Address candidate = pc - 2 * Assembler::kInstrSize;
483   Instr candidate_instr(Memory::int32_at(candidate));
484   if (IsLdrPcImmediateOffset(candidate_instr) |
485       IsLdrPpImmediateOffset(candidate_instr)) {
486     return candidate;
487   } else {
488     if (IsLdrPpRegOffset(candidate_instr)) {
489       candidate -= Assembler::kInstrSize;
490     }
491     if (CpuFeatures::IsSupported(ARMv7)) {
492       candidate -= 1 * Assembler::kInstrSize;
493       DCHECK(IsMovW(Memory::int32_at(candidate)) &&
494              IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
495     } else {
496       candidate -= 3 * Assembler::kInstrSize;
497       DCHECK(
498           IsMovImmed(Memory::int32_at(candidate)) &&
499           IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) &&
500           IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) &&
501           IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize)));
502     }
503     return candidate;
504   }
505 }
506
507
508 Address Assembler::break_address_from_return_address(Address pc) {
509   return pc - Assembler::kPatchDebugBreakSlotReturnOffset;
510 }
511
512
513 Address Assembler::return_address_from_call_start(Address pc) {
514   if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
515       IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
516     // Load from constant pool, small section.
517     return pc + kInstrSize * 2;
518   } else {
519     if (CpuFeatures::IsSupported(ARMv7)) {
520       DCHECK(IsMovW(Memory::int32_at(pc)));
521       DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
522       if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
523         // Load from constant pool, extended section.
524         return pc + kInstrSize * 4;
525       } else {
526         // A movw / movt load immediate.
527         return pc + kInstrSize * 3;
528       }
529     } else {
530       DCHECK(IsMovImmed(Memory::int32_at(pc)));
531       DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
532       DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
533       DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
534       if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
535         // Load from constant pool, extended section.
536         return pc + kInstrSize * 6;
537       } else {
538         // A mov / orr load immediate.
539         return pc + kInstrSize * 5;
540       }
541     }
542   }
543 }
544
545
546 void Assembler::deserialization_set_special_target_at(
547     Address constant_pool_entry, Code* code, Address target) {
548   if (FLAG_enable_ool_constant_pool) {
549     set_target_address_at(constant_pool_entry, code, target);
550   } else {
551     Memory::Address_at(constant_pool_entry) = target;
552   }
553 }
554
555
556 void Assembler::deserialization_set_target_internal_reference_at(
557     Address pc, Address target, RelocInfo::Mode mode) {
558   Memory::Address_at(pc) = target;
559 }
560
561
562 bool Assembler::is_constant_pool_load(Address pc) {
563   if (CpuFeatures::IsSupported(ARMv7)) {
564     return !Assembler::IsMovW(Memory::int32_at(pc)) ||
565            (FLAG_enable_ool_constant_pool &&
566             Assembler::IsLdrPpRegOffset(
567                 Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
568   } else {
569     return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
570            (FLAG_enable_ool_constant_pool &&
571             Assembler::IsLdrPpRegOffset(
572                 Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
573   }
574 }
575
576
577 Address Assembler::constant_pool_entry_address(
578     Address pc, ConstantPoolArray* constant_pool) {
579   if (FLAG_enable_ool_constant_pool) {
580     DCHECK(constant_pool != NULL);
581     int cp_offset;
582     if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
583       DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
584              IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
585              IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
586              IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
587       // This is an extended constant pool lookup (ARMv6).
588       Instr mov_instr = instr_at(pc);
589       Instr orr_instr_1 = instr_at(pc + kInstrSize);
590       Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
591       Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
592       cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
593                   DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
594     } else if (IsMovW(Memory::int32_at(pc))) {
595       DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
596              IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
597       // This is an extended constant pool lookup (ARMv7).
598       Instruction* movw_instr = Instruction::At(pc);
599       Instruction* movt_instr = Instruction::At(pc + kInstrSize);
600       cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
601                   movw_instr->ImmedMovwMovtValue();
602     } else {
603       // This is a small constant pool lookup.
604       DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
605       cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
606     }
607     return reinterpret_cast<Address>(constant_pool) + cp_offset;
608   } else {
609     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
610     Instr instr = Memory::int32_at(pc);
611     return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
612   }
613 }
614
615
616 Address Assembler::target_address_at(Address pc,
617                                      ConstantPoolArray* constant_pool) {
618   if (is_constant_pool_load(pc)) {
619     // This is a constant pool lookup. Return the value in the constant pool.
620     return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
621   } else if (CpuFeatures::IsSupported(ARMv7)) {
622     // This is an movw / movt immediate load. Return the immediate.
623     DCHECK(IsMovW(Memory::int32_at(pc)) &&
624            IsMovT(Memory::int32_at(pc + kInstrSize)));
625     Instruction* movw_instr = Instruction::At(pc);
626     Instruction* movt_instr = Instruction::At(pc + kInstrSize);
627     return reinterpret_cast<Address>(
628         (movt_instr->ImmedMovwMovtValue() << 16) |
629          movw_instr->ImmedMovwMovtValue());
630   } else {
631     // This is an mov / orr immediate load. Return the immediate.
632     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
633            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
634            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
635            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
636     Instr mov_instr = instr_at(pc);
637     Instr orr_instr_1 = instr_at(pc + kInstrSize);
638     Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
639     Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
640     Address ret = reinterpret_cast<Address>(
641         DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
642         DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
643     return ret;
644   }
645 }
646
647
648 void Assembler::set_target_address_at(Address pc,
649                                       ConstantPoolArray* constant_pool,
650                                       Address target,
651                                       ICacheFlushMode icache_flush_mode) {
652   if (is_constant_pool_load(pc)) {
653     // This is a constant pool lookup. Update the entry in the constant pool.
654     Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
655     // Intuitively, we would think it is necessary to always flush the
656     // instruction cache after patching a target address in the code as follows:
657     //   CpuFeatures::FlushICache(pc, sizeof(target));
658     // However, on ARM, no instruction is actually patched in the case
659     // of embedded constants of the form:
660     // ldr   ip, [pp, #...]
661     // since the instruction accessing this address in the constant pool remains
662     // unchanged.
663   } else if (CpuFeatures::IsSupported(ARMv7)) {
664     // This is an movw / movt immediate load. Patch the immediate embedded in
665     // the instructions.
666     DCHECK(IsMovW(Memory::int32_at(pc)));
667     DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
668     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
669     uint32_t immediate = reinterpret_cast<uint32_t>(target);
670     instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
671     instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
672     DCHECK(IsMovW(Memory::int32_at(pc)));
673     DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
674     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
675       CpuFeatures::FlushICache(pc, 2 * kInstrSize);
676     }
677   } else {
678     // This is an mov / orr immediate load. Patch the immediate embedded in
679     // the instructions.
680     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
681            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
682            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
683            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
684     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
685     uint32_t immediate = reinterpret_cast<uint32_t>(target);
686     instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
687     instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
688     instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
689     instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
690     DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
691            IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
692            IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
693            IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
694     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
695       CpuFeatures::FlushICache(pc, 4 * kInstrSize);
696     }
697   }
698 }
699
700
701 } }  // namespace v8::internal
702
703 #endif  // V8_ARM_ASSEMBLER_ARM_INL_H_