Upstream version 5.34.92.0
[platform/framework/web/crosswalk.git] / src / v8 / src / arm / assembler-arm-inl.h
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40 #include "arm/assembler-arm.h"
41
42 #include "cpu.h"
43 #include "debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
50 int Register::NumAllocatableRegisters() {
51   return kMaxNumAllocatableRegisters;
52 }
53
54
55 int DwVfpRegister::NumRegisters() {
56   return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
57 }
58
59
60 int DwVfpRegister::NumReservedRegisters() {
61   return kNumReservedRegisters;
62 }
63
64
65 int DwVfpRegister::NumAllocatableRegisters() {
66   return NumRegisters() - kNumReservedRegisters;
67 }
68
69
70 int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
71   ASSERT(!reg.is(kDoubleRegZero));
72   ASSERT(!reg.is(kScratchDoubleReg));
73   if (reg.code() > kDoubleRegZero.code()) {
74     return reg.code() - kNumReservedRegisters;
75   }
76   return reg.code();
77 }
78
79
80 DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
81   ASSERT(index >= 0 && index < NumAllocatableRegisters());
82   ASSERT(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
83          kNumReservedRegisters - 1);
84   if (index >= kDoubleRegZero.code()) {
85     return from_code(index + kNumReservedRegisters);
86   }
87   return from_code(index);
88 }
89
90
91 void RelocInfo::apply(intptr_t delta) {
92   if (RelocInfo::IsInternalReference(rmode_)) {
93     // absolute code pointer inside code object moves with the code object.
94     int32_t* p = reinterpret_cast<int32_t*>(pc_);
95     *p += delta;  // relocate entry
96   }
97   // We do not use pc relative addressing on ARM, so there is
98   // nothing else to do.
99 }
100
101
102 Address RelocInfo::target_address() {
103   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
104   return Assembler::target_address_at(pc_);
105 }
106
107
108 Address RelocInfo::target_address_address() {
109   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
110                               || rmode_ == EMBEDDED_OBJECT
111                               || rmode_ == EXTERNAL_REFERENCE);
112   return Assembler::target_pointer_address_at(pc_);
113 }
114
115
116 int RelocInfo::target_address_size() {
117   return kPointerSize;
118 }
119
120
121 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
122   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
123   Assembler::set_target_address_at(pc_, target);
124   if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
125     Object* target_code = Code::GetCodeFromTargetAddress(target);
126     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
127         host(), this, HeapObject::cast(target_code));
128   }
129 }
130
131
132 Object* RelocInfo::target_object() {
133   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
134   return reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
135 }
136
137
138 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
139   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
140   return Handle<Object>(reinterpret_cast<Object**>(
141       Assembler::target_address_at(pc_)));
142 }
143
144
145 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
146   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
147   ASSERT(!target->IsConsString());
148   Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
149   if (mode == UPDATE_WRITE_BARRIER &&
150       host() != NULL &&
151       target->IsHeapObject()) {
152     host()->GetHeap()->incremental_marking()->RecordWrite(
153         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
154   }
155 }
156
157
158 Address RelocInfo::target_reference() {
159   ASSERT(rmode_ == EXTERNAL_REFERENCE);
160   return Assembler::target_address_at(pc_);
161 }
162
163
164 Address RelocInfo::target_runtime_entry(Assembler* origin) {
165   ASSERT(IsRuntimeEntry(rmode_));
166   return target_address();
167 }
168
169
170 void RelocInfo::set_target_runtime_entry(Address target,
171                                          WriteBarrierMode mode) {
172   ASSERT(IsRuntimeEntry(rmode_));
173   if (target_address() != target) set_target_address(target, mode);
174 }
175
176
177 Handle<Cell> RelocInfo::target_cell_handle() {
178   ASSERT(rmode_ == RelocInfo::CELL);
179   Address address = Memory::Address_at(pc_);
180   return Handle<Cell>(reinterpret_cast<Cell**>(address));
181 }
182
183
184 Cell* RelocInfo::target_cell() {
185   ASSERT(rmode_ == RelocInfo::CELL);
186   return Cell::FromValueAddress(Memory::Address_at(pc_));
187 }
188
189
190 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
191   ASSERT(rmode_ == RelocInfo::CELL);
192   Address address = cell->address() + Cell::kValueOffset;
193   Memory::Address_at(pc_) = address;
194   if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
195     // TODO(1550) We are passing NULL as a slot because cell can never be on
196     // evacuation candidate.
197     host()->GetHeap()->incremental_marking()->RecordWrite(
198         host(), NULL, cell);
199   }
200 }
201
202
203 static const int kNoCodeAgeSequenceLength = 3;
204
205
206 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
207   UNREACHABLE();  // This should never be reached on Arm.
208   return Handle<Object>();
209 }
210
211
212 Code* RelocInfo::code_age_stub() {
213   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
214   return Code::GetCodeFromTargetAddress(
215       Memory::Address_at(pc_ + Assembler::kInstrSize *
216                          (kNoCodeAgeSequenceLength - 1)));
217 }
218
219
220 void RelocInfo::set_code_age_stub(Code* stub) {
221   ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
222   Memory::Address_at(pc_ + Assembler::kInstrSize *
223                      (kNoCodeAgeSequenceLength - 1)) =
224       stub->instruction_start();
225 }
226
227
228 Address RelocInfo::call_address() {
229   // The 2 instructions offset assumes patched debug break slot or return
230   // sequence.
231   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
232          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
233   return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
234 }
235
236
237 void RelocInfo::set_call_address(Address target) {
238   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
239          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
240   Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
241   if (host() != NULL) {
242     Object* target_code = Code::GetCodeFromTargetAddress(target);
243     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
244         host(), this, HeapObject::cast(target_code));
245   }
246 }
247
248
249 Object* RelocInfo::call_object() {
250   return *call_object_address();
251 }
252
253
254 void RelocInfo::set_call_object(Object* target) {
255   *call_object_address() = target;
256 }
257
258
259 Object** RelocInfo::call_object_address() {
260   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
261          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
262   return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
263 }
264
265
266 void RelocInfo::WipeOut() {
267   ASSERT(IsEmbeddedObject(rmode_) ||
268          IsCodeTarget(rmode_) ||
269          IsRuntimeEntry(rmode_) ||
270          IsExternalReference(rmode_));
271   Assembler::set_target_address_at(pc_, NULL);
272 }
273
274
275 bool RelocInfo::IsPatchedReturnSequence() {
276   Instr current_instr = Assembler::instr_at(pc_);
277   Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
278   // A patched return sequence is:
279   //  ldr ip, [pc, #0]
280   //  blx ip
281   return ((current_instr & kLdrPCMask) == kLdrPCPattern)
282           && ((next_instr & kBlxRegMask) == kBlxRegPattern);
283 }
284
285
286 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
287   Instr current_instr = Assembler::instr_at(pc_);
288   return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
289 }
290
291
292 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
293   RelocInfo::Mode mode = rmode();
294   if (mode == RelocInfo::EMBEDDED_OBJECT) {
295     visitor->VisitEmbeddedPointer(this);
296   } else if (RelocInfo::IsCodeTarget(mode)) {
297     visitor->VisitCodeTarget(this);
298   } else if (mode == RelocInfo::CELL) {
299     visitor->VisitCell(this);
300   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
301     visitor->VisitExternalReference(this);
302   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
303     visitor->VisitCodeAgeSequence(this);
304 #ifdef ENABLE_DEBUGGER_SUPPORT
305   } else if (((RelocInfo::IsJSReturn(mode) &&
306               IsPatchedReturnSequence()) ||
307              (RelocInfo::IsDebugBreakSlot(mode) &&
308               IsPatchedDebugBreakSlotSequence())) &&
309              isolate->debug()->has_break_points()) {
310     visitor->VisitDebugTarget(this);
311 #endif
312   } else if (RelocInfo::IsRuntimeEntry(mode)) {
313     visitor->VisitRuntimeEntry(this);
314   }
315 }
316
317
318 template<typename StaticVisitor>
319 void RelocInfo::Visit(Heap* heap) {
320   RelocInfo::Mode mode = rmode();
321   if (mode == RelocInfo::EMBEDDED_OBJECT) {
322     StaticVisitor::VisitEmbeddedPointer(heap, this);
323   } else if (RelocInfo::IsCodeTarget(mode)) {
324     StaticVisitor::VisitCodeTarget(heap, this);
325   } else if (mode == RelocInfo::CELL) {
326     StaticVisitor::VisitCell(heap, this);
327   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
328     StaticVisitor::VisitExternalReference(this);
329   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
330     StaticVisitor::VisitCodeAgeSequence(heap, this);
331 #ifdef ENABLE_DEBUGGER_SUPPORT
332   } else if (heap->isolate()->debug()->has_break_points() &&
333              ((RelocInfo::IsJSReturn(mode) &&
334               IsPatchedReturnSequence()) ||
335              (RelocInfo::IsDebugBreakSlot(mode) &&
336               IsPatchedDebugBreakSlotSequence()))) {
337     StaticVisitor::VisitDebugTarget(heap, this);
338 #endif
339   } else if (RelocInfo::IsRuntimeEntry(mode)) {
340     StaticVisitor::VisitRuntimeEntry(this);
341   }
342 }
343
344
345 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
346   rm_ = no_reg;
347   imm32_ = immediate;
348   rmode_ = rmode;
349 }
350
351
352 Operand::Operand(const ExternalReference& f)  {
353   rm_ = no_reg;
354   imm32_ = reinterpret_cast<int32_t>(f.address());
355   rmode_ = RelocInfo::EXTERNAL_REFERENCE;
356 }
357
358
359 Operand::Operand(Smi* value) {
360   rm_ = no_reg;
361   imm32_ =  reinterpret_cast<intptr_t>(value);
362   rmode_ = RelocInfo::NONE32;
363 }
364
365
366 Operand::Operand(Register rm) {
367   rm_ = rm;
368   rs_ = no_reg;
369   shift_op_ = LSL;
370   shift_imm_ = 0;
371 }
372
373
374 bool Operand::is_reg() const {
375   return rm_.is_valid() &&
376          rs_.is(no_reg) &&
377          shift_op_ == LSL &&
378          shift_imm_ == 0;
379 }
380
381
382 void Assembler::CheckBuffer() {
383   if (buffer_space() <= kGap) {
384     GrowBuffer();
385   }
386   if (pc_offset() >= next_buffer_check_) {
387     CheckConstPool(false, true);
388   }
389 }
390
391
392 void Assembler::emit(Instr x) {
393   CheckBuffer();
394   *reinterpret_cast<Instr*>(pc_) = x;
395   pc_ += kInstrSize;
396 }
397
398
399 Address Assembler::target_pointer_address_at(Address pc) {
400   Instr instr = Memory::int32_at(pc);
401   return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
402 }
403
404
405 Address Assembler::target_address_at(Address pc) {
406   if (IsMovW(Memory::int32_at(pc))) {
407     ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
408     Instruction* instr = Instruction::At(pc);
409     Instruction* next_instr = Instruction::At(pc + kInstrSize);
410     return reinterpret_cast<Address>(
411         (next_instr->ImmedMovwMovtValue() << 16) |
412         instr->ImmedMovwMovtValue());
413   }
414   ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
415   return Memory::Address_at(target_pointer_address_at(pc));
416 }
417
418
419 Address Assembler::target_address_from_return_address(Address pc) {
420   // Returns the address of the call target from the return address that will
421   // be returned to after a call.
422   // Call sequence on V7 or later is :
423   //  movw  ip, #... @ call address low 16
424   //  movt  ip, #... @ call address high 16
425   //  blx   ip
426   //                      @ return address
427   // Or pre-V7 or cases that need frequent patching:
428   //  ldr   ip, [pc, #...] @ call address
429   //  blx   ip
430   //                      @ return address
431   Address candidate = pc - 2 * Assembler::kInstrSize;
432   Instr candidate_instr(Memory::int32_at(candidate));
433   if (IsLdrPcImmediateOffset(candidate_instr)) {
434     return candidate;
435   }
436   candidate = pc - 3 * Assembler::kInstrSize;
437   ASSERT(IsMovW(Memory::int32_at(candidate)) &&
438          IsMovT(Memory::int32_at(candidate + kInstrSize)));
439   return candidate;
440 }
441
442
443 Address Assembler::return_address_from_call_start(Address pc) {
444   if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) {
445     return pc + kInstrSize * 2;
446   } else {
447     ASSERT(IsMovW(Memory::int32_at(pc)));
448     ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
449     return pc + kInstrSize * 3;
450   }
451 }
452
453
454 void Assembler::deserialization_set_special_target_at(
455     Address constant_pool_entry, Address target) {
456   Memory::Address_at(constant_pool_entry) = target;
457 }
458
459
460 static Instr EncodeMovwImmediate(uint32_t immediate) {
461   ASSERT(immediate < 0x10000);
462   return ((immediate & 0xf000) << 4) | (immediate & 0xfff);
463 }
464
465
466 void Assembler::set_target_address_at(Address pc, Address target) {
467   if (IsMovW(Memory::int32_at(pc))) {
468     ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
469     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
470     uint32_t immediate = reinterpret_cast<uint32_t>(target);
471     uint32_t intermediate = instr_ptr[0];
472     intermediate &= ~EncodeMovwImmediate(0xFFFF);
473     intermediate |= EncodeMovwImmediate(immediate & 0xFFFF);
474     instr_ptr[0] = intermediate;
475     intermediate = instr_ptr[1];
476     intermediate &= ~EncodeMovwImmediate(0xFFFF);
477     intermediate |= EncodeMovwImmediate(immediate >> 16);
478     instr_ptr[1] = intermediate;
479     ASSERT(IsMovW(Memory::int32_at(pc)));
480     ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
481     CPU::FlushICache(pc, 2 * kInstrSize);
482   } else {
483     ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
484     Memory::Address_at(target_pointer_address_at(pc)) = target;
485     // Intuitively, we would think it is necessary to always flush the
486     // instruction cache after patching a target address in the code as follows:
487     //   CPU::FlushICache(pc, sizeof(target));
488     // However, on ARM, no instruction is actually patched in the case
489     // of embedded constants of the form:
490     // ldr   ip, [pc, #...]
491     // since the instruction accessing this address in the constant pool remains
492     // unchanged.
493   }
494 }
495
496
497 } }  // namespace v8::internal
498
499 #endif  // V8_ARM_ASSEMBLER_ARM_INL_H_