1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
35 // A light-weight IA32 Assembler.
37 #ifndef V8_IA32_ASSEMBLER_IA32_INL_H_
38 #define V8_IA32_ASSEMBLER_IA32_INL_H_
40 #include "ia32/assembler-ia32.h"
49 static const byte kCallOpcode = 0xE8;
50 static const int kNoCodeAgeSequenceLength = 5;
53 // The modes possibly affected by apply must be in kApplyMask.
54 void RelocInfo::apply(intptr_t delta) {
55 if (IsRuntimeEntry(rmode_) || IsCodeTarget(rmode_)) {
56 int32_t* p = reinterpret_cast<int32_t*>(pc_);
57 *p -= delta; // Relocate entry.
58 CPU::FlushICache(p, sizeof(uint32_t));
59 } else if (rmode_ == CODE_AGE_SEQUENCE) {
60 if (*pc_ == kCallOpcode) {
61 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
62 *p -= delta; // Relocate entry.
63 CPU::FlushICache(p, sizeof(uint32_t));
65 } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
66 // Special handling of js_return when a break point is set (call
67 // instruction has been inserted).
68 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
69 *p -= delta; // Relocate entry.
70 CPU::FlushICache(p, sizeof(uint32_t));
71 } else if (rmode_ == DEBUG_BREAK_SLOT && IsPatchedDebugBreakSlotSequence()) {
72 // Special handling of a debug break slot when a break point is set (call
73 // instruction has been inserted).
74 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
75 *p -= delta; // Relocate entry.
76 CPU::FlushICache(p, sizeof(uint32_t));
77 } else if (IsInternalReference(rmode_)) {
78 // absolute code pointer inside code object moves with the code object.
79 int32_t* p = reinterpret_cast<int32_t*>(pc_);
80 *p += delta; // Relocate entry.
81 CPU::FlushICache(p, sizeof(uint32_t));
86 Address RelocInfo::target_address() {
87 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
88 return Assembler::target_address_at(pc_);
92 Address RelocInfo::target_address_address() {
93 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
94 || rmode_ == EMBEDDED_OBJECT
95 || rmode_ == EXTERNAL_REFERENCE);
96 return reinterpret_cast<Address>(pc_);
100 int RelocInfo::target_address_size() {
101 return Assembler::kSpecialTargetSize;
105 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
106 Assembler::set_target_address_at(pc_, target);
107 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
108 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
109 Object* target_code = Code::GetCodeFromTargetAddress(target);
110 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
111 host(), this, HeapObject::cast(target_code));
116 Object* RelocInfo::target_object() {
117 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
118 return Memory::Object_at(pc_);
122 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
123 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
124 return Memory::Object_Handle_at(pc_);
128 Object** RelocInfo::target_object_address() {
129 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
130 return &Memory::Object_at(pc_);
134 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
135 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
136 ASSERT(!target->IsConsString());
137 Memory::Object_at(pc_) = target;
138 CPU::FlushICache(pc_, sizeof(Address));
139 if (mode == UPDATE_WRITE_BARRIER &&
141 target->IsHeapObject()) {
142 host()->GetHeap()->incremental_marking()->RecordWrite(
143 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
148 Address* RelocInfo::target_reference_address() {
149 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
150 return reinterpret_cast<Address*>(pc_);
154 Address RelocInfo::target_runtime_entry(Assembler* origin) {
155 ASSERT(IsRuntimeEntry(rmode_));
156 return reinterpret_cast<Address>(*reinterpret_cast<int32_t*>(pc_));
160 void RelocInfo::set_target_runtime_entry(Address target,
161 WriteBarrierMode mode) {
162 ASSERT(IsRuntimeEntry(rmode_));
163 if (target_address() != target) set_target_address(target, mode);
167 Handle<Cell> RelocInfo::target_cell_handle() {
168 ASSERT(rmode_ == RelocInfo::CELL);
169 Address address = Memory::Address_at(pc_);
170 return Handle<Cell>(reinterpret_cast<Cell**>(address));
174 Cell* RelocInfo::target_cell() {
175 ASSERT(rmode_ == RelocInfo::CELL);
176 return Cell::FromValueAddress(Memory::Address_at(pc_));
180 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
181 ASSERT(rmode_ == RelocInfo::CELL);
182 Address address = cell->address() + Cell::kValueOffset;
183 Memory::Address_at(pc_) = address;
184 CPU::FlushICache(pc_, sizeof(Address));
185 if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
186 // TODO(1550) We are passing NULL as a slot because cell can never be on
187 // evacuation candidate.
188 host()->GetHeap()->incremental_marking()->RecordWrite(
194 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
195 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
196 ASSERT(*pc_ == kCallOpcode);
197 return Memory::Object_Handle_at(pc_ + 1);
201 Code* RelocInfo::code_age_stub() {
202 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
203 ASSERT(*pc_ == kCallOpcode);
204 return Code::GetCodeFromTargetAddress(
205 Assembler::target_address_at(pc_ + 1));
209 void RelocInfo::set_code_age_stub(Code* stub) {
210 ASSERT(*pc_ == kCallOpcode);
211 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
212 Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
216 Address RelocInfo::call_address() {
217 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
218 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
219 return Assembler::target_address_at(pc_ + 1);
223 void RelocInfo::set_call_address(Address target) {
224 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
225 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
226 Assembler::set_target_address_at(pc_ + 1, target);
227 if (host() != NULL) {
228 Object* target_code = Code::GetCodeFromTargetAddress(target);
229 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
230 host(), this, HeapObject::cast(target_code));
235 Object* RelocInfo::call_object() {
236 return *call_object_address();
240 void RelocInfo::set_call_object(Object* target) {
241 *call_object_address() = target;
245 Object** RelocInfo::call_object_address() {
246 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
247 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
248 return reinterpret_cast<Object**>(pc_ + 1);
252 bool RelocInfo::IsPatchedReturnSequence() {
253 return *pc_ == kCallOpcode;
257 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
258 return !Assembler::IsNop(pc());
262 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
263 RelocInfo::Mode mode = rmode();
264 if (mode == RelocInfo::EMBEDDED_OBJECT) {
265 visitor->VisitEmbeddedPointer(this);
266 CPU::FlushICache(pc_, sizeof(Address));
267 } else if (RelocInfo::IsCodeTarget(mode)) {
268 visitor->VisitCodeTarget(this);
269 } else if (mode == RelocInfo::CELL) {
270 visitor->VisitCell(this);
271 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
272 visitor->VisitExternalReference(this);
273 CPU::FlushICache(pc_, sizeof(Address));
274 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
275 visitor->VisitCodeAgeSequence(this);
276 #ifdef ENABLE_DEBUGGER_SUPPORT
277 } else if (((RelocInfo::IsJSReturn(mode) &&
278 IsPatchedReturnSequence()) ||
279 (RelocInfo::IsDebugBreakSlot(mode) &&
280 IsPatchedDebugBreakSlotSequence())) &&
281 isolate->debug()->has_break_points()) {
282 visitor->VisitDebugTarget(this);
284 } else if (IsRuntimeEntry(mode)) {
285 visitor->VisitRuntimeEntry(this);
290 template<typename StaticVisitor>
291 void RelocInfo::Visit(Heap* heap) {
292 RelocInfo::Mode mode = rmode();
293 if (mode == RelocInfo::EMBEDDED_OBJECT) {
294 StaticVisitor::VisitEmbeddedPointer(heap, this);
295 CPU::FlushICache(pc_, sizeof(Address));
296 } else if (RelocInfo::IsCodeTarget(mode)) {
297 StaticVisitor::VisitCodeTarget(heap, this);
298 } else if (mode == RelocInfo::CELL) {
299 StaticVisitor::VisitCell(heap, this);
300 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
301 StaticVisitor::VisitExternalReference(this);
302 CPU::FlushICache(pc_, sizeof(Address));
303 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
304 StaticVisitor::VisitCodeAgeSequence(heap, this);
305 #ifdef ENABLE_DEBUGGER_SUPPORT
306 } else if (heap->isolate()->debug()->has_break_points() &&
307 ((RelocInfo::IsJSReturn(mode) &&
308 IsPatchedReturnSequence()) ||
309 (RelocInfo::IsDebugBreakSlot(mode) &&
310 IsPatchedDebugBreakSlotSequence()))) {
311 StaticVisitor::VisitDebugTarget(heap, this);
313 } else if (IsRuntimeEntry(mode)) {
314 StaticVisitor::VisitRuntimeEntry(this);
320 Immediate::Immediate(int x) {
322 rmode_ = RelocInfo::NONE32;
326 Immediate::Immediate(const ExternalReference& ext) {
327 x_ = reinterpret_cast<int32_t>(ext.address());
328 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
332 Immediate::Immediate(Label* internal_offset) {
333 x_ = reinterpret_cast<int32_t>(internal_offset);
334 rmode_ = RelocInfo::INTERNAL_REFERENCE;
338 Immediate::Immediate(Handle<Object> handle) {
339 AllowDeferredHandleDereference using_raw_address;
340 // Verify all Objects referred by code are NOT in new space.
341 Object* obj = *handle;
342 if (obj->IsHeapObject()) {
343 ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
344 x_ = reinterpret_cast<intptr_t>(handle.location());
345 rmode_ = RelocInfo::EMBEDDED_OBJECT;
347 // no relocation needed
348 x_ = reinterpret_cast<intptr_t>(obj);
349 rmode_ = RelocInfo::NONE32;
354 Immediate::Immediate(Smi* value) {
355 x_ = reinterpret_cast<intptr_t>(value);
356 rmode_ = RelocInfo::NONE32;
360 Immediate::Immediate(Address addr) {
361 x_ = reinterpret_cast<int32_t>(addr);
362 rmode_ = RelocInfo::NONE32;
366 void Assembler::emit(uint32_t x) {
367 *reinterpret_cast<uint32_t*>(pc_) = x;
368 pc_ += sizeof(uint32_t);
372 void Assembler::emit(Handle<Object> handle) {
373 AllowDeferredHandleDereference heap_object_check;
374 // Verify all Objects referred by code are NOT in new space.
375 Object* obj = *handle;
376 ASSERT(!isolate()->heap()->InNewSpace(obj));
377 if (obj->IsHeapObject()) {
378 emit(reinterpret_cast<intptr_t>(handle.location()),
379 RelocInfo::EMBEDDED_OBJECT);
381 // no relocation needed
382 emit(reinterpret_cast<intptr_t>(obj));
387 void Assembler::emit(uint32_t x, RelocInfo::Mode rmode, TypeFeedbackId id) {
388 if (rmode == RelocInfo::CODE_TARGET && !id.IsNone()) {
389 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, id.ToInt());
390 } else if (!RelocInfo::IsNone(rmode)
391 && rmode != RelocInfo::CODE_AGE_SEQUENCE) {
392 RecordRelocInfo(rmode);
398 void Assembler::emit(Handle<Code> code,
399 RelocInfo::Mode rmode,
401 AllowDeferredHandleDereference embedding_raw_address;
402 emit(reinterpret_cast<intptr_t>(code.location()), rmode, id);
406 void Assembler::emit(const Immediate& x) {
407 if (x.rmode_ == RelocInfo::INTERNAL_REFERENCE) {
408 Label* label = reinterpret_cast<Label*>(x.x_);
409 emit_code_relative_offset(label);
412 if (!RelocInfo::IsNone(x.rmode_)) RecordRelocInfo(x.rmode_);
417 void Assembler::emit_code_relative_offset(Label* label) {
418 if (label->is_bound()) {
420 pos = label->pos() + Code::kHeaderSize - kHeapObjectTag;
423 emit_disp(label, Displacement::CODE_RELATIVE);
428 void Assembler::emit_w(const Immediate& x) {
429 ASSERT(RelocInfo::IsNone(x.rmode_));
430 uint16_t value = static_cast<uint16_t>(x.x_);
431 reinterpret_cast<uint16_t*>(pc_)[0] = value;
432 pc_ += sizeof(uint16_t);
436 Address Assembler::target_address_at(Address pc) {
437 return pc + sizeof(int32_t) + *reinterpret_cast<int32_t*>(pc);
441 void Assembler::set_target_address_at(Address pc, Address target) {
442 int32_t* p = reinterpret_cast<int32_t*>(pc);
443 *p = target - (pc + sizeof(int32_t));
444 CPU::FlushICache(p, sizeof(int32_t));
448 Address Assembler::target_address_from_return_address(Address pc) {
449 return pc - kCallTargetAddressOffset;
453 Displacement Assembler::disp_at(Label* L) {
454 return Displacement(long_at(L->pos()));
458 void Assembler::disp_at_put(Label* L, Displacement disp) {
459 long_at_put(L->pos(), disp.data());
463 void Assembler::emit_disp(Label* L, Displacement::Type type) {
464 Displacement disp(L, type);
465 L->link_to(pc_offset());
466 emit(static_cast<int>(disp.data()));
470 void Assembler::emit_near_disp(Label* L) {
472 if (L->is_near_linked()) {
473 int offset = L->near_link_pos() - pc_offset();
474 ASSERT(is_int8(offset));
475 disp = static_cast<byte>(offset & 0xFF);
477 L->link_to(pc_offset(), Label::kNear);
482 void Operand::set_modrm(int mod, Register rm) {
483 ASSERT((mod & -4) == 0);
484 buf_[0] = mod << 6 | rm.code();
489 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
491 ASSERT((scale & -4) == 0);
492 // Use SIB with no index register only for base esp.
493 ASSERT(!index.is(esp) || base.is(esp));
494 buf_[1] = scale << 6 | index.code() << 3 | base.code();
499 void Operand::set_disp8(int8_t disp) {
500 ASSERT(len_ == 1 || len_ == 2);
501 *reinterpret_cast<int8_t*>(&buf_[len_++]) = disp;
505 void Operand::set_dispr(int32_t disp, RelocInfo::Mode rmode) {
506 ASSERT(len_ == 1 || len_ == 2);
507 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
509 len_ += sizeof(int32_t);
513 Operand::Operand(Register reg) {
519 Operand::Operand(XMMRegister xmm_reg) {
520 Register reg = { xmm_reg.code() };
525 Operand::Operand(int32_t disp, RelocInfo::Mode rmode) {
528 set_dispr(disp, rmode);
531 } } // namespace v8::internal
533 #endif // V8_IA32_ASSEMBLER_IA32_INL_H_