static const int kNodeIsIndependentShift = 3;
static const int kNodeIsPartiallyDependentShift = 4;
- static const int kJSObjectType = 0xbf;
+ static const int kJSObjectType = 0xbe;
static const int kFirstNonstringType = 0x80;
static const int kOddballType = 0x83;
static const int kForeignType = 0x87;
DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
|| rmode_ == EMBEDDED_OBJECT
|| rmode_ == EXTERNAL_REFERENCE);
- if (FLAG_enable_ool_constant_pool ||
+ if (FLAG_enable_embedded_constant_pool ||
Assembler::IsMovW(Memory::int32_at(pc_))) {
// We return the PC for ool constant pool since this function is used by the
// serializer and expects the address to reside within the code object.
void Assembler::deserialization_set_special_target_at(
Address constant_pool_entry, Code* code, Address target) {
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
set_target_address_at(constant_pool_entry, code, target);
} else {
Memory::Address_at(constant_pool_entry) = target;
bool Assembler::is_constant_pool_load(Address pc) {
if (CpuFeatures::IsSupported(ARMv7)) {
return !Assembler::IsMovW(Memory::int32_at(pc)) ||
- (FLAG_enable_ool_constant_pool &&
+ (FLAG_enable_embedded_constant_pool &&
Assembler::IsLdrPpRegOffset(
Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
} else {
return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
- (FLAG_enable_ool_constant_pool &&
+ (FLAG_enable_embedded_constant_pool &&
Assembler::IsLdrPpRegOffset(
Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
}
}
-Address Assembler::constant_pool_entry_address(
- Address pc, ConstantPoolArray* constant_pool) {
- if (FLAG_enable_ool_constant_pool) {
+Address Assembler::constant_pool_entry_address(Address pc,
+ Address constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
DCHECK(constant_pool != NULL);
int cp_offset;
if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
}
- return reinterpret_cast<Address>(constant_pool) + cp_offset;
+ return constant_pool + cp_offset;
} else {
DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
Instr instr = Memory::int32_at(pc);
}
-Address Assembler::target_address_at(Address pc,
- ConstantPoolArray* constant_pool) {
+Address Assembler::target_address_at(Address pc, Address constant_pool) {
if (is_constant_pool_load(pc)) {
// This is a constant pool lookup. Return the value in the constant pool.
return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
}
-void Assembler::set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
+void Assembler::set_target_address_at(Address pc, Address constant_pool,
Address target,
ICacheFlushMode icache_flush_mode) {
if (is_constant_pool_load(pc)) {
bool RelocInfo::IsCodedSpecially() {
// The deserializer needs to know whether a pointer is specially coded. Â Being
// specially coded on ARM means that it is a movw/movt instruction, or is an
- // out of line constant pool entry. Â These only occur if
- // FLAG_enable_ool_constant_pool is true.
- return FLAG_enable_ool_constant_pool;
+ // embedded constant pool entry. Â These only occur if
+ // FLAG_enable_embedded_constant_pool is true.
+ return FLAG_enable_embedded_constant_pool;
}
Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
: AssemblerBase(isolate, buffer, buffer_size),
recorded_ast_id_(TypeFeedbackId::None()),
- constant_pool_builder_(),
+ constant_pool_builder_(kLdrMaxReachBits, kVldrMaxReachBits),
positions_recorder_(this) {
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
- num_pending_32_bit_reloc_info_ = 0;
- num_pending_64_bit_reloc_info_ = 0;
+ num_pending_32_bit_constants_ = 0;
+ num_pending_64_bit_constants_ = 0;
next_buffer_check_ = 0;
const_pool_blocked_nesting_ = 0;
no_const_pool_before_ = 0;
void Assembler::GetCode(CodeDesc* desc) {
reloc_info_writer.Finish();
- if (!FLAG_enable_ool_constant_pool) {
- // Emit constant pool if necessary.
+
+ // Emit constant pool if necessary.
+ int constant_pool_offset = 0;
+ if (FLAG_enable_embedded_constant_pool) {
+ constant_pool_offset = EmitEmbeddedConstantPool();
+ } else {
CheckConstPool(true, false);
- DCHECK(num_pending_32_bit_reloc_info_ == 0);
- DCHECK(num_pending_64_bit_reloc_info_ == 0);
+ DCHECK(num_pending_32_bit_constants_ == 0);
+ DCHECK(num_pending_64_bit_constants_ == 0);
}
// Set up code descriptor.
desc->buffer = buffer_;
desc->buffer_size = buffer_size_;
desc->instr_size = pc_offset();
desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
+ desc->constant_pool_size =
+ (constant_pool_offset ? desc->instr_size - constant_pool_offset : 0);
desc->origin = this;
}
Instr Assembler::GetConsantPoolLoadPattern() {
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
return kLdrPpImmedPattern;
} else {
return kLdrPCImmedPattern;
Instr Assembler::GetConsantPoolLoadMask() {
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
return kLdrPpImmedMask;
} else {
return kLdrPCImmedMask;
static bool use_mov_immediate_load(const Operand& x,
const Assembler* assembler) {
- if (FLAG_enable_ool_constant_pool && assembler != NULL &&
- !assembler->is_ool_constant_pool_available()) {
+ if (FLAG_enable_embedded_constant_pool && assembler != NULL &&
+ !assembler->is_constant_pool_available()) {
return true;
} else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
(assembler == NULL || !assembler->predictable_code_size())) {
if (use_mov_immediate_load(*this, assembler)) {
// A movw / movt or mov / orr immediate load.
instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4;
- } else if (assembler != NULL && assembler->use_extended_constant_pool()) {
- // An extended constant pool load.
+ } else if (assembler != NULL &&
+ assembler->ConstantPoolAccessIsInOverflow()) {
+ // An overflowed constant pool load.
instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5;
} else {
// A small constant pool load.
void Assembler::move_32_bit_immediate(Register rd,
const Operand& x,
Condition cond) {
- RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL);
uint32_t imm32 = static_cast<uint32_t>(x.imm32_);
if (x.must_output_reloc_info(this)) {
- RecordRelocInfo(rinfo);
+ RecordRelocInfo(x.rmode_);
}
if (use_mov_immediate_load(x, this)) {
Register target = rd.code() == pc.code() ? ip : rd;
if (CpuFeatures::IsSupported(ARMv7)) {
- if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) {
+ if (!FLAG_enable_embedded_constant_pool &&
+ x.must_output_reloc_info(this)) {
// Make sure the movw/movt doesn't get separated.
BlockConstPoolFor(2);
}
movw(target, imm32 & 0xffff, cond);
movt(target, imm32 >> 16, cond);
} else {
- DCHECK(FLAG_enable_ool_constant_pool);
+ DCHECK(FLAG_enable_embedded_constant_pool);
mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond);
orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond);
orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond);
mov(rd, target, LeaveCC, cond);
}
} else {
- DCHECK(!FLAG_enable_ool_constant_pool || is_ool_constant_pool_available());
- ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo);
- if (section == ConstantPoolArray::EXTENDED_SECTION) {
- DCHECK(FLAG_enable_ool_constant_pool);
+ DCHECK(!FLAG_enable_embedded_constant_pool || is_constant_pool_available());
+ ConstantPoolEntry::Access access =
+ ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_);
+ if (access == ConstantPoolEntry::OVERFLOWED) {
+ DCHECK(FLAG_enable_embedded_constant_pool);
Register target = rd.code() == pc.code() ? ip : rd;
// Emit instructions to load constant pool offset.
if (CpuFeatures::IsSupported(ARMv7)) {
// Load from constant pool at offset.
ldr(rd, MemOperand(pp, target), cond);
} else {
- DCHECK(section == ConstantPoolArray::SMALL_SECTION);
- ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond);
+ DCHECK(access == ConstantPoolEntry::REGULAR);
+ ldr(rd, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0),
+ cond);
}
}
}
int vd, d;
dst.split_code(&vd, &d);
emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc);
- } else if (FLAG_enable_vldr_imm && is_ool_constant_pool_available()) {
+ } else if (FLAG_enable_vldr_imm && is_constant_pool_available()) {
// TODO(jfb) Temporarily turned off until we have constant blinding or
// some equivalent mitigation: an attacker can otherwise control
// generated data which also happens to be executable, a Very Bad
// The code could also randomize the order of values, though
// that's tricky because vldr has a limited reach. Furthermore
// it breaks load locality.
- RelocInfo rinfo(pc_, imm);
- ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo);
- if (section == ConstantPoolArray::EXTENDED_SECTION) {
- DCHECK(FLAG_enable_ool_constant_pool);
+ ConstantPoolEntry::Access access = ConstantPoolAddEntry(pc_offset(), imm);
+ if (access == ConstantPoolEntry::OVERFLOWED) {
+ DCHECK(FLAG_enable_embedded_constant_pool);
// Emit instructions to load constant pool offset.
movw(ip, 0);
movt(ip, 0);
// Load from constant pool at offset.
vldr(dst, MemOperand(pp, ip));
} else {
- DCHECK(section == ConstantPoolArray::SMALL_SECTION);
- vldr(dst, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0));
+ DCHECK(access == ConstantPoolEntry::REGULAR);
+ vldr(dst, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0));
}
} else {
// Synthesise the double from ARM immediates.
// None of our relocation types are pc relative pointing outside the code
// buffer nor pc absolute pointing inside the code buffer, so there is no need
// to relocate any emitted relocation entries.
-
- // Relocate pending relocation entries.
- for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) {
- RelocInfo& rinfo = pending_32_bit_reloc_info_[i];
- DCHECK(rinfo.rmode() != RelocInfo::COMMENT &&
- rinfo.rmode() != RelocInfo::POSITION);
- if (rinfo.rmode() != RelocInfo::JS_RETURN) {
- rinfo.set_pc(rinfo.pc() + pc_delta);
- }
- }
- for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) {
- RelocInfo& rinfo = pending_64_bit_reloc_info_[i];
- DCHECK(rinfo.rmode() == RelocInfo::NONE64);
- rinfo.set_pc(rinfo.pc() + pc_delta);
- }
- constant_pool_builder_.Relocate(pc_delta);
}
// No relocation info should be pending while using db. db is used
// to write pure data with no pointers and the constant pool should
// be emitted before using db.
- DCHECK(num_pending_32_bit_reloc_info_ == 0);
- DCHECK(num_pending_64_bit_reloc_info_ == 0);
+ DCHECK(num_pending_32_bit_constants_ == 0);
+ DCHECK(num_pending_64_bit_constants_ == 0);
CheckBuffer();
*reinterpret_cast<uint8_t*>(pc_) = data;
pc_ += sizeof(uint8_t);
// No relocation info should be pending while using dd. dd is used
// to write pure data with no pointers and the constant pool should
// be emitted before using dd.
- DCHECK(num_pending_32_bit_reloc_info_ == 0);
- DCHECK(num_pending_64_bit_reloc_info_ == 0);
+ DCHECK(num_pending_32_bit_constants_ == 0);
+ DCHECK(num_pending_64_bit_constants_ == 0);
CheckBuffer();
*reinterpret_cast<uint32_t*>(pc_) = data;
pc_ += sizeof(uint32_t);
}
+void Assembler::dq(uint64_t value) {
+ // No relocation info should be pending while using dq. dq is used
+ // to write pure data with no pointers and the constant pool should
+ // be emitted before using dd.
+ DCHECK(num_pending_32_bit_constants_ == 0);
+ DCHECK(num_pending_64_bit_constants_ == 0);
+ CheckBuffer();
+ *reinterpret_cast<uint64_t*>(pc_) = value;
+ pc_ += sizeof(uint64_t);
+}
+
+
void Assembler::emit_code_stub_address(Code* stub) {
CheckBuffer();
*reinterpret_cast<uint32_t*>(pc_) =
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
+ if (RelocInfo::IsNone(rmode) ||
+ // Don't record external references unless the heap will be serialized.
+ (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() &&
+ !emit_debug_code())) {
+ return;
+ }
+ DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
+ if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
+ data = RecordedAstId().ToInt();
+ ClearRecordedAstId();
+ }
RelocInfo rinfo(pc_, rmode, data, NULL);
- RecordRelocInfo(rinfo);
+ reloc_info_writer.Write(&rinfo);
}
-void Assembler::RecordRelocInfo(const RelocInfo& rinfo) {
- if (!RelocInfo::IsNone(rinfo.rmode())) {
- // Don't record external references unless the heap will be serialized.
- if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE &&
- !serializer_enabled() && !emit_debug_code()) {
- return;
- }
- DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
- if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) {
- RelocInfo reloc_info_with_ast_id(rinfo.pc(),
- rinfo.rmode(),
- RecordedAstId().ToInt(),
- NULL);
- ClearRecordedAstId();
- reloc_info_writer.Write(&reloc_info_with_ast_id);
- } else {
- reloc_info_writer.Write(&rinfo);
+ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
+ RelocInfo::Mode rmode,
+ intptr_t value) {
+ DCHECK(rmode != RelocInfo::COMMENT && rmode != RelocInfo::POSITION &&
+ rmode != RelocInfo::STATEMENT_POSITION &&
+ rmode != RelocInfo::CONST_POOL && rmode != RelocInfo::NONE64);
+ bool sharing_ok = RelocInfo::IsNone(rmode) ||
+ !(serializer_enabled() || rmode < RelocInfo::CELL);
+ if (FLAG_enable_embedded_constant_pool) {
+ return constant_pool_builder_.AddEntry(position, value, sharing_ok);
+ } else {
+ DCHECK(num_pending_32_bit_constants_ < kMaxNumPending32Constants);
+ if (num_pending_32_bit_constants_ == 0) {
+ first_const_pool_32_use_ = position;
}
+ ConstantPoolEntry entry(position, value, sharing_ok);
+ pending_32_bit_constants_[num_pending_32_bit_constants_++] = entry;
+
+ // Make sure the constant pool is not emitted in place of the next
+ // instruction for which we just recorded relocation info.
+ BlockConstPoolFor(1);
+ return ConstantPoolEntry::REGULAR;
}
}
-ConstantPoolArray::LayoutSection Assembler::ConstantPoolAddEntry(
- const RelocInfo& rinfo) {
- if (FLAG_enable_ool_constant_pool) {
- return constant_pool_builder_.AddEntry(this, rinfo);
+ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
+ double value) {
+ if (FLAG_enable_embedded_constant_pool) {
+ return constant_pool_builder_.AddEntry(position, value);
} else {
- if (rinfo.rmode() == RelocInfo::NONE64) {
- DCHECK(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo);
- if (num_pending_64_bit_reloc_info_ == 0) {
- first_const_pool_64_use_ = pc_offset();
- }
- pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo;
- } else {
- DCHECK(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo);
- if (num_pending_32_bit_reloc_info_ == 0) {
- first_const_pool_32_use_ = pc_offset();
- }
- pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo;
+ DCHECK(num_pending_64_bit_constants_ < kMaxNumPending64Constants);
+ if (num_pending_64_bit_constants_ == 0) {
+ first_const_pool_64_use_ = position;
}
+ ConstantPoolEntry entry(position, value);
+ pending_64_bit_constants_[num_pending_64_bit_constants_++] = entry;
+
// Make sure the constant pool is not emitted in place of the next
// instruction for which we just recorded relocation info.
BlockConstPoolFor(1);
- return ConstantPoolArray::SMALL_SECTION;
+ return ConstantPoolEntry::REGULAR;
}
}
void Assembler::BlockConstPoolFor(int instructions) {
- if (FLAG_enable_ool_constant_pool) {
- // Should be a no-op if using an out-of-line constant pool.
- DCHECK(num_pending_32_bit_reloc_info_ == 0);
- DCHECK(num_pending_64_bit_reloc_info_ == 0);
+ if (FLAG_enable_embedded_constant_pool) {
+ // Should be a no-op if using an embedded constant pool.
+ DCHECK(num_pending_32_bit_constants_ == 0);
+ DCHECK(num_pending_64_bit_constants_ == 0);
return;
}
// Max pool start (if we need a jump and an alignment).
#ifdef DEBUG
int start = pc_limit + kInstrSize + 2 * kPointerSize;
- DCHECK((num_pending_32_bit_reloc_info_ == 0) ||
+ DCHECK((num_pending_32_bit_constants_ == 0) ||
(start - first_const_pool_32_use_ +
- num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool));
- DCHECK((num_pending_64_bit_reloc_info_ == 0) ||
+ num_pending_64_bit_constants_ * kDoubleSize <
+ kMaxDistToIntPool));
+ DCHECK((num_pending_64_bit_constants_ == 0) ||
(start - first_const_pool_64_use_ < kMaxDistToFPPool));
#endif
no_const_pool_before_ = pc_limit;
void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
- if (FLAG_enable_ool_constant_pool) {
- // Should be a no-op if using an out-of-line constant pool.
- DCHECK(num_pending_32_bit_reloc_info_ == 0);
- DCHECK(num_pending_64_bit_reloc_info_ == 0);
+ if (FLAG_enable_embedded_constant_pool) {
+ // Should be a no-op if using an embedded constant pool.
+ DCHECK(num_pending_32_bit_constants_ == 0);
+ DCHECK(num_pending_64_bit_constants_ == 0);
return;
}
}
// There is nothing to do if there are no pending constant pool entries.
- if ((num_pending_32_bit_reloc_info_ == 0) &&
- (num_pending_64_bit_reloc_info_ == 0)) {
+ if ((num_pending_32_bit_constants_ == 0) &&
+ (num_pending_64_bit_constants_ == 0)) {
// Calculate the offset of the next check.
next_buffer_check_ = pc_offset() + kCheckPoolInterval;
return;
// the gap to the relocation information).
int jump_instr = require_jump ? kInstrSize : 0;
int size_up_to_marker = jump_instr + kInstrSize;
- int size_after_marker = num_pending_32_bit_reloc_info_ * kPointerSize;
- bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0);
+ int size_after_marker = num_pending_32_bit_constants_ * kPointerSize;
+ bool has_fp_values = (num_pending_64_bit_constants_ > 0);
bool require_64_bit_align = false;
if (has_fp_values) {
require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7);
if (require_64_bit_align) {
size_after_marker += kInstrSize;
}
- size_after_marker += num_pending_64_bit_reloc_info_ * kDoubleSize;
+ size_after_marker += num_pending_64_bit_constants_ * kDoubleSize;
}
int size = size_up_to_marker + size_after_marker;
DCHECK((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0));
bool need_emit = false;
if (has_fp_values) {
- int dist64 = pc_offset() +
- size -
- num_pending_32_bit_reloc_info_ * kPointerSize -
+ int dist64 = pc_offset() + size -
+ num_pending_32_bit_constants_ * kPointerSize -
first_const_pool_64_use_;
if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) ||
(!require_jump && (dist64 >= kMaxDistToFPPool / 2))) {
// Emit 64-bit constant pool entries first: their range is smaller than
// 32-bit entries.
- for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) {
- RelocInfo& rinfo = pending_64_bit_reloc_info_[i];
+ for (int i = 0; i < num_pending_64_bit_constants_; i++) {
+ ConstantPoolEntry& entry = pending_64_bit_constants_[i];
DCHECK(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment.
- Instr instr = instr_at(rinfo.pc());
+ Instr instr = instr_at(entry.position());
// Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
DCHECK((IsVldrDPcImmediateOffset(instr) &&
GetVldrDRegisterImmediateOffset(instr) == 0));
- int delta = pc_ - rinfo.pc() - kPcLoadDelta;
+ int delta = pc_offset() - entry.position() - kPcLoadDelta;
DCHECK(is_uint10(delta));
bool found = false;
- uint64_t value = rinfo.raw_data64();
+ uint64_t value = entry.value64();
for (int j = 0; j < i; j++) {
- RelocInfo& rinfo2 = pending_64_bit_reloc_info_[j];
- if (value == rinfo2.raw_data64()) {
+ ConstantPoolEntry& entry2 = pending_64_bit_constants_[j];
+ if (value == entry2.value64()) {
found = true;
- DCHECK(rinfo2.rmode() == RelocInfo::NONE64);
- Instr instr2 = instr_at(rinfo2.pc());
+ Instr instr2 = instr_at(entry2.position());
DCHECK(IsVldrDPcImmediateOffset(instr2));
delta = GetVldrDRegisterImmediateOffset(instr2);
- delta += rinfo2.pc() - rinfo.pc();
+ delta += entry2.position() - entry.position();
break;
}
}
- instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta));
+ instr_at_put(entry.position(),
+ SetVldrDRegisterImmediateOffset(instr, delta));
if (!found) {
- uint64_t uint_data = rinfo.raw_data64();
- emit(uint_data & 0xFFFFFFFF);
- emit(uint_data >> 32);
+ dq(entry.value64());
}
}
// Emit 32-bit constant pool entries.
- for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) {
- RelocInfo& rinfo = pending_32_bit_reloc_info_[i];
- DCHECK(rinfo.rmode() != RelocInfo::COMMENT &&
- rinfo.rmode() != RelocInfo::POSITION &&
- rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
- rinfo.rmode() != RelocInfo::CONST_POOL &&
- rinfo.rmode() != RelocInfo::NONE64);
-
- Instr instr = instr_at(rinfo.pc());
+ for (int i = 0; i < num_pending_32_bit_constants_; i++) {
+ ConstantPoolEntry& entry = pending_32_bit_constants_[i];
+ Instr instr = instr_at(entry.position());
// 64-bit loads shouldn't get here.
DCHECK(!IsVldrDPcImmediateOffset(instr));
if (IsLdrPcImmediateOffset(instr) &&
GetLdrRegisterImmediateOffset(instr) == 0) {
- int delta = pc_ - rinfo.pc() - kPcLoadDelta;
+ int delta = pc_offset() - entry.position() - kPcLoadDelta;
DCHECK(is_uint12(delta));
// 0 is the smallest delta:
// ldr rd, [pc, #0]
// data
bool found = false;
- if (!serializer_enabled() && rinfo.rmode() >= RelocInfo::CELL) {
+ if (entry.sharing_ok()) {
for (int j = 0; j < i; j++) {
- RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j];
+ ConstantPoolEntry& entry2 = pending_32_bit_constants_[j];
- if ((rinfo2.data() == rinfo.data()) &&
- (rinfo2.rmode() == rinfo.rmode())) {
- Instr instr2 = instr_at(rinfo2.pc());
+ if (entry2.value() == entry.value()) {
+ Instr instr2 = instr_at(entry2.position());
if (IsLdrPcImmediateOffset(instr2)) {
delta = GetLdrRegisterImmediateOffset(instr2);
- delta += rinfo2.pc() - rinfo.pc();
+ delta += entry2.position() - entry.position();
found = true;
break;
}
}
}
- instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta));
+ instr_at_put(entry.position(),
+ SetLdrRegisterImmediateOffset(instr, delta));
if (!found) {
- emit(rinfo.data());
+ emit(entry.value());
}
} else {
DCHECK(IsMovW(instr));
}
}
- num_pending_32_bit_reloc_info_ = 0;
- num_pending_64_bit_reloc_info_ = 0;
+ num_pending_32_bit_constants_ = 0;
+ num_pending_64_bit_constants_ = 0;
first_const_pool_32_use_ = -1;
first_const_pool_64_use_ = -1;
}
-Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
- if (!FLAG_enable_ool_constant_pool) {
- return isolate->factory()->empty_constant_pool_array();
- }
- return constant_pool_builder_.New(isolate);
-}
-
-
-void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
- constant_pool_builder_.Populate(this, constant_pool);
-}
-
-
-ConstantPoolBuilder::ConstantPoolBuilder()
- : entries_(), current_section_(ConstantPoolArray::SMALL_SECTION) {}
-
-
-bool ConstantPoolBuilder::IsEmpty() {
- return entries_.size() == 0;
-}
-
-
-ConstantPoolArray::Type ConstantPoolBuilder::GetConstantPoolType(
- RelocInfo::Mode rmode) {
- if (rmode == RelocInfo::NONE64) {
- return ConstantPoolArray::INT64;
- } else if (!RelocInfo::IsGCRelocMode(rmode)) {
- return ConstantPoolArray::INT32;
- } else if (RelocInfo::IsCodeTarget(rmode)) {
- return ConstantPoolArray::CODE_PTR;
- } else {
- DCHECK(RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode));
- return ConstantPoolArray::HEAP_PTR;
- }
-}
-
-
-ConstantPoolArray::LayoutSection ConstantPoolBuilder::AddEntry(
- Assembler* assm, const RelocInfo& rinfo) {
- RelocInfo::Mode rmode = rinfo.rmode();
- DCHECK(rmode != RelocInfo::COMMENT &&
- rmode != RelocInfo::POSITION &&
- rmode != RelocInfo::STATEMENT_POSITION &&
- rmode != RelocInfo::CONST_POOL);
-
- // Try to merge entries which won't be patched.
- int merged_index = -1;
- ConstantPoolArray::LayoutSection entry_section = current_section_;
- if (RelocInfo::IsNone(rmode) ||
- (!assm->serializer_enabled() && (rmode >= RelocInfo::CELL))) {
- size_t i;
- std::vector<ConstantPoolEntry>::const_iterator it;
- for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) {
- if (RelocInfo::IsEqual(rinfo, it->rinfo_)) {
- // Merge with found entry.
- merged_index = i;
- entry_section = entries_[i].section_;
- break;
- }
- }
- }
- DCHECK(entry_section <= current_section_);
- entries_.push_back(ConstantPoolEntry(rinfo, entry_section, merged_index));
-
- if (merged_index == -1) {
- // Not merged, so update the appropriate count.
- number_of_entries_[entry_section].increment(GetConstantPoolType(rmode));
- }
-
- // Check if we still have room for another entry in the small section
- // given Arm's ldr and vldr immediate offset range.
- if (current_section_ == ConstantPoolArray::SMALL_SECTION &&
- !(is_uint12(ConstantPoolArray::SizeFor(*small_entries())) &&
- is_uint10(ConstantPoolArray::MaxInt64Offset(
- small_entries()->count_of(ConstantPoolArray::INT64))))) {
- current_section_ = ConstantPoolArray::EXTENDED_SECTION;
- }
- return entry_section;
-}
-
-
-void ConstantPoolBuilder::Relocate(int pc_delta) {
- for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
- entry != entries_.end(); entry++) {
- DCHECK(entry->rinfo_.rmode() != RelocInfo::JS_RETURN);
- entry->rinfo_.set_pc(entry->rinfo_.pc() + pc_delta);
- }
-}
-
+void Assembler::PatchConstantPoolAccessInstruction(
+ int pc_offset, int offset, ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ DCHECK(FLAG_enable_embedded_constant_pool);
+ Address pc = buffer_ + pc_offset;
-Handle<ConstantPoolArray> ConstantPoolBuilder::New(Isolate* isolate) {
- if (IsEmpty()) {
- return isolate->factory()->empty_constant_pool_array();
- } else if (extended_entries()->is_empty()) {
- return isolate->factory()->NewConstantPoolArray(*small_entries());
- } else {
- DCHECK(current_section_ == ConstantPoolArray::EXTENDED_SECTION);
- return isolate->factory()->NewExtendedConstantPoolArray(
- *small_entries(), *extended_entries());
- }
-}
-
-
-void ConstantPoolBuilder::Populate(Assembler* assm,
- ConstantPoolArray* constant_pool) {
- DCHECK_EQ(extended_entries()->is_empty(),
- !constant_pool->is_extended_layout());
- DCHECK(small_entries()->equals(ConstantPoolArray::NumberOfEntries(
- constant_pool, ConstantPoolArray::SMALL_SECTION)));
- if (constant_pool->is_extended_layout()) {
- DCHECK(extended_entries()->equals(ConstantPoolArray::NumberOfEntries(
- constant_pool, ConstantPoolArray::EXTENDED_SECTION)));
- }
-
- // Set up initial offsets.
- int offsets[ConstantPoolArray::NUMBER_OF_LAYOUT_SECTIONS]
- [ConstantPoolArray::NUMBER_OF_TYPES];
- for (int section = 0; section <= constant_pool->final_section(); section++) {
- int section_start = (section == ConstantPoolArray::EXTENDED_SECTION)
- ? small_entries()->total_count()
- : 0;
- for (int i = 0; i < ConstantPoolArray::NUMBER_OF_TYPES; i++) {
- ConstantPoolArray::Type type = static_cast<ConstantPoolArray::Type>(i);
- if (number_of_entries_[section].count_of(type) != 0) {
- offsets[section][type] = constant_pool->OffsetOfElementAt(
- number_of_entries_[section].base_of(type) + section_start);
- }
- }
- }
-
- for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
- entry != entries_.end(); entry++) {
- RelocInfo rinfo = entry->rinfo_;
- RelocInfo::Mode rmode = entry->rinfo_.rmode();
- ConstantPoolArray::Type type = GetConstantPoolType(rmode);
-
- // Update constant pool if necessary and get the entry's offset.
- int offset;
- if (entry->merged_index_ == -1) {
- offset = offsets[entry->section_][type];
- offsets[entry->section_][type] += ConstantPoolArray::entry_size(type);
- if (type == ConstantPoolArray::INT64) {
- constant_pool->set_at_offset(offset, rinfo.data64());
- } else if (type == ConstantPoolArray::INT32) {
- constant_pool->set_at_offset(offset,
- static_cast<int32_t>(rinfo.data()));
- } else if (type == ConstantPoolArray::CODE_PTR) {
- constant_pool->set_at_offset(offset,
- reinterpret_cast<Address>(rinfo.data()));
- } else {
- DCHECK(type == ConstantPoolArray::HEAP_PTR);
- constant_pool->set_at_offset(offset,
- reinterpret_cast<Object*>(rinfo.data()));
- }
- offset -= kHeapObjectTag;
- entry->merged_index_ = offset; // Stash offset for merged entries.
- } else {
- DCHECK(entry->merged_index_ < (entry - entries_.begin()));
- offset = entries_[entry->merged_index_].merged_index_;
- }
-
- // Patch vldr/ldr instruction with correct offset.
- Instr instr = assm->instr_at(rinfo.pc());
- if (entry->section_ == ConstantPoolArray::EXTENDED_SECTION) {
- if (CpuFeatures::IsSupported(ARMv7)) {
- // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
- Instr next_instr = assm->instr_at(rinfo.pc() + Assembler::kInstrSize);
- DCHECK((Assembler::IsMovW(instr) &&
- Instruction::ImmedMovwMovtValue(instr) == 0));
- DCHECK((Assembler::IsMovT(next_instr) &&
- Instruction::ImmedMovwMovtValue(next_instr) == 0));
- assm->instr_at_put(
- rinfo.pc(), Assembler::PatchMovwImmediate(instr, offset & 0xffff));
- assm->instr_at_put(
- rinfo.pc() + Assembler::kInstrSize,
- Assembler::PatchMovwImmediate(next_instr, offset >> 16));
- } else {
- // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
- Instr instr_2 = assm->instr_at(rinfo.pc() + Assembler::kInstrSize);
- Instr instr_3 = assm->instr_at(rinfo.pc() + 2 * Assembler::kInstrSize);
- Instr instr_4 = assm->instr_at(rinfo.pc() + 3 * Assembler::kInstrSize);
- DCHECK((Assembler::IsMovImmed(instr) &&
- Instruction::Immed8Value(instr) == 0));
- DCHECK((Assembler::IsOrrImmed(instr_2) &&
- Instruction::Immed8Value(instr_2) == 0) &&
- Assembler::GetRn(instr_2).is(Assembler::GetRd(instr_2)));
- DCHECK((Assembler::IsOrrImmed(instr_3) &&
- Instruction::Immed8Value(instr_3) == 0) &&
- Assembler::GetRn(instr_3).is(Assembler::GetRd(instr_3)));
- DCHECK((Assembler::IsOrrImmed(instr_4) &&
- Instruction::Immed8Value(instr_4) == 0) &&
- Assembler::GetRn(instr_4).is(Assembler::GetRd(instr_4)));
- assm->instr_at_put(
- rinfo.pc(), Assembler::PatchShiftImm(instr, (offset & kImm8Mask)));
- assm->instr_at_put(
- rinfo.pc() + Assembler::kInstrSize,
- Assembler::PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
- assm->instr_at_put(
- rinfo.pc() + 2 * Assembler::kInstrSize,
- Assembler::PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
- assm->instr_at_put(
- rinfo.pc() + 3 * Assembler::kInstrSize,
- Assembler::PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
- }
- } else if (type == ConstantPoolArray::INT64) {
- // Instruction to patch must be 'vldr rd, [pp, #0]'.
- DCHECK((Assembler::IsVldrDPpImmediateOffset(instr) &&
- Assembler::GetVldrDRegisterImmediateOffset(instr) == 0));
- DCHECK(is_uint10(offset));
- assm->instr_at_put(rinfo.pc(), Assembler::SetVldrDRegisterImmediateOffset(
- instr, offset));
+ // Patch vldr/ldr instruction with correct offset.
+ Instr instr = instr_at(pc);
+ if (access == ConstantPoolEntry::OVERFLOWED) {
+ if (CpuFeatures::IsSupported(ARMv7)) {
+ // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
+ Instr next_instr = instr_at(pc + kInstrSize);
+ DCHECK((IsMovW(instr) && Instruction::ImmedMovwMovtValue(instr) == 0));
+ DCHECK((IsMovT(next_instr) &&
+ Instruction::ImmedMovwMovtValue(next_instr) == 0));
+ instr_at_put(pc, PatchMovwImmediate(instr, offset & 0xffff));
+ instr_at_put(pc + kInstrSize,
+ PatchMovwImmediate(next_instr, offset >> 16));
} else {
- // Instruction to patch must be 'ldr rd, [pp, #0]'.
- DCHECK((Assembler::IsLdrPpImmediateOffset(instr) &&
- Assembler::GetLdrRegisterImmediateOffset(instr) == 0));
- DCHECK(is_uint12(offset));
- assm->instr_at_put(
- rinfo.pc(), Assembler::SetLdrRegisterImmediateOffset(instr, offset));
+ // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
+ Instr instr_2 = instr_at(pc + kInstrSize);
+ Instr instr_3 = instr_at(pc + 2 * kInstrSize);
+ Instr instr_4 = instr_at(pc + 3 * kInstrSize);
+ DCHECK((IsMovImmed(instr) && Instruction::Immed8Value(instr) == 0));
+ DCHECK((IsOrrImmed(instr_2) && Instruction::Immed8Value(instr_2) == 0) &&
+ GetRn(instr_2).is(GetRd(instr_2)));
+ DCHECK((IsOrrImmed(instr_3) && Instruction::Immed8Value(instr_3) == 0) &&
+ GetRn(instr_3).is(GetRd(instr_3)));
+ DCHECK((IsOrrImmed(instr_4) && Instruction::Immed8Value(instr_4) == 0) &&
+ GetRn(instr_4).is(GetRd(instr_4)));
+ instr_at_put(pc, PatchShiftImm(instr, (offset & kImm8Mask)));
+ instr_at_put(pc + kInstrSize,
+ PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
+ instr_at_put(pc + 2 * kInstrSize,
+ PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
+ instr_at_put(pc + 3 * kInstrSize,
+ PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
}
+ } else if (type == ConstantPoolEntry::DOUBLE) {
+ // Instruction to patch must be 'vldr rd, [pp, #0]'.
+ DCHECK((IsVldrDPpImmediateOffset(instr) &&
+ GetVldrDRegisterImmediateOffset(instr) == 0));
+ DCHECK(is_uint10(offset));
+ instr_at_put(pc, SetVldrDRegisterImmediateOffset(instr, offset));
+ } else {
+ // Instruction to patch must be 'ldr rd, [pp, #0]'.
+ DCHECK((IsLdrPpImmediateOffset(instr) &&
+ GetLdrRegisterImmediateOffset(instr) == 0));
+ DCHECK(is_uint12(offset));
+ instr_at_put(pc, SetLdrRegisterImmediateOffset(instr, offset));
}
}
struct Register {
static const int kNumRegisters = 16;
static const int kMaxNumAllocatableRegisters =
- FLAG_enable_ool_constant_pool ? 8 : 9;
+ FLAG_enable_embedded_constant_pool ? 8 : 9;
static const int kSizeInBytes = 4;
inline static int NumAllocatableRegisters();
"r7",
"r8",
};
- if (FLAG_enable_ool_constant_pool && (index >= 7)) {
+ if (FLAG_enable_embedded_constant_pool && (index >= 7)) {
return names[index + 1];
}
return names[index];
const Register r6 = { kRegister_r6_Code };
// Used as context register.
const Register r7 = {kRegister_r7_Code};
-// Used as constant pool pointer register if FLAG_enable_ool_constant_pool.
+// Used as constant pool pointer register if FLAG_enable_embedded_constant_pool.
const Register r8 = { kRegister_r8_Code };
// Used as lithium codegen scratch register.
const Register r9 = { kRegister_r9_Code };
};
-// Class used to build a constant pool.
-class ConstantPoolBuilder BASE_EMBEDDED {
- public:
- ConstantPoolBuilder();
- ConstantPoolArray::LayoutSection AddEntry(Assembler* assm,
- const RelocInfo& rinfo);
- void Relocate(int pc_delta);
- bool IsEmpty();
- Handle<ConstantPoolArray> New(Isolate* isolate);
- void Populate(Assembler* assm, ConstantPoolArray* constant_pool);
-
- inline ConstantPoolArray::LayoutSection current_section() const {
- return current_section_;
- }
-
- inline ConstantPoolArray::NumberOfEntries* number_of_entries(
- ConstantPoolArray::LayoutSection section) {
- return &number_of_entries_[section];
- }
-
- inline ConstantPoolArray::NumberOfEntries* small_entries() {
- return number_of_entries(ConstantPoolArray::SMALL_SECTION);
- }
-
- inline ConstantPoolArray::NumberOfEntries* extended_entries() {
- return number_of_entries(ConstantPoolArray::EXTENDED_SECTION);
- }
-
- private:
- struct ConstantPoolEntry {
- ConstantPoolEntry(RelocInfo rinfo, ConstantPoolArray::LayoutSection section,
- int merged_index)
- : rinfo_(rinfo), section_(section), merged_index_(merged_index) {}
-
- RelocInfo rinfo_;
- ConstantPoolArray::LayoutSection section_;
- int merged_index_;
- };
-
- ConstantPoolArray::Type GetConstantPoolType(RelocInfo::Mode rmode);
-
- std::vector<ConstantPoolEntry> entries_;
- ConstantPoolArray::LayoutSection current_section_;
- ConstantPoolArray::NumberOfEntries number_of_entries_[2];
-};
-
struct VmovIndex {
unsigned char index;
};
// Return the address in the constant pool of the code target address used by
// the branch/call instruction at pc, or the object in a mov.
- INLINE(static Address constant_pool_entry_address(
- Address pc, ConstantPoolArray* constant_pool));
+ INLINE(static Address constant_pool_entry_address(Address pc,
+ Address constant_pool));
// Read/Modify the code target address in the branch/call instruction at pc.
- INLINE(static Address target_address_at(Address pc,
- ConstantPoolArray* constant_pool));
- INLINE(static void set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
- Address target,
- ICacheFlushMode icache_flush_mode =
- FLUSH_ICACHE_IF_NEEDED));
+ INLINE(static Address target_address_at(Address pc, Address constant_pool));
+ INLINE(static void set_target_address_at(
+ Address pc, Address constant_pool, Address target,
+ ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED));
INLINE(static Address target_address_at(Address pc, Code* code)) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
INLINE(static void set_target_address_at(Address pc,
Address target,
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED)) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target, icache_flush_mode);
}
void RecordConstPool(int size);
// Writes a single byte or word of data in the code stream. Used
- // for inline tables, e.g., jump-tables. The constant pool should be
- // emitted before any use of db and dd to ensure that constant pools
+ // for inline tables, e.g., jump-tables. CheckConstantPool() should be
+ // called before any use of db/dd/dq/dp to ensure that constant pools
// are not emitted as part of the tables generated.
void db(uint8_t data);
void dd(uint32_t data);
+ void dq(uint64_t data);
+ void dp(uintptr_t data) { dd(data); }
// Emits the address of the code stub's first instruction.
void emit_code_stub_address(Code* stub);
static const int kMaxDistToIntPool = 4*KB;
static const int kMaxDistToFPPool = 1*KB;
// All relocations could be integer, it therefore acts as the limit.
- static const int kMaxNumPending32RelocInfo = kMaxDistToIntPool/kInstrSize;
- static const int kMaxNumPending64RelocInfo = kMaxDistToFPPool/kInstrSize;
+ static const int kMaxNumPending32Constants = kMaxDistToIntPool / kInstrSize;
+ static const int kMaxNumPending64Constants = kMaxDistToFPPool / kInstrSize;
// Postpone the generation of the constant pool for the specified number of
// instructions.
// Check if is time to emit a constant pool.
void CheckConstPool(bool force_emit, bool require_jump);
- // Allocate a constant pool of the correct size for the generated code.
- Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
-
- // Generate the constant pool for the generated code.
- void PopulateConstantPool(ConstantPoolArray* constant_pool);
+ int EmitEmbeddedConstantPool() {
+ DCHECK(FLAG_enable_embedded_constant_pool);
+ return constant_pool_builder_.Emit(this);
+ }
- bool use_extended_constant_pool() const {
- return constant_pool_builder_.current_section() ==
- ConstantPoolArray::EXTENDED_SECTION;
+ bool ConstantPoolAccessIsInOverflow() const {
+ return constant_pool_builder_.NextAccess(ConstantPoolEntry::INTPTR) ==
+ ConstantPoolEntry::OVERFLOWED;
}
+ void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type);
protected:
// Relocation for a type-recording IC has the AST id added to it. This
// Max pool start (if we need a jump and an alignment).
int start = pc_offset() + kInstrSize + 2 * kPointerSize;
// Check the constant pool hasn't been blocked for too long.
- DCHECK((num_pending_32_bit_reloc_info_ == 0) ||
- (start + num_pending_64_bit_reloc_info_ * kDoubleSize <
+ DCHECK((num_pending_32_bit_constants_ == 0) ||
+ (start + num_pending_64_bit_constants_ * kDoubleSize <
(first_const_pool_32_use_ + kMaxDistToIntPool)));
- DCHECK((num_pending_64_bit_reloc_info_ == 0) ||
+ DCHECK((num_pending_64_bit_constants_ == 0) ||
(start < (first_const_pool_64_use_ + kMaxDistToFPPool)));
#endif
// Two cases:
static const int kMaxRelocSize = RelocInfoWriter::kMaxSize;
RelocInfoWriter reloc_info_writer;
- // Relocation info records are also used during code generation as temporary
+ // ConstantPoolEntry records are used during code generation as temporary
// containers for constants and code target addresses until they are emitted
- // to the constant pool. These pending relocation info records are temporarily
- // stored in a separate buffer until a constant pool is emitted.
+ // to the constant pool. These records are temporarily stored in a separate
+ // buffer until a constant pool is emitted.
// If every instruction in a long sequence is accessing the pool, we need one
// pending relocation entry per instruction.
- // The buffers of pending relocation info.
- RelocInfo pending_32_bit_reloc_info_[kMaxNumPending32RelocInfo];
- RelocInfo pending_64_bit_reloc_info_[kMaxNumPending64RelocInfo];
- // Number of pending reloc info entries in the 32 bits buffer.
- int num_pending_32_bit_reloc_info_;
- // Number of pending reloc info entries in the 64 bits buffer.
- int num_pending_64_bit_reloc_info_;
+ // The buffers of pending constant pool entries.
+ ConstantPoolEntry pending_32_bit_constants_[kMaxNumPending32Constants];
+ ConstantPoolEntry pending_64_bit_constants_[kMaxNumPending64Constants];
+ // Number of pending constant pool entries in the 32 bits buffer.
+ int num_pending_32_bit_constants_;
+ // Number of pending constant pool entries in the 64 bits buffer.
+ int num_pending_64_bit_constants_;
ConstantPoolBuilder constant_pool_builder_;
void bind_to(Label* L, int pos);
void next(Label* L);
- enum UseConstantPoolMode {
- USE_CONSTANT_POOL,
- DONT_USE_CONSTANT_POOL
- };
-
// Record reloc info for current pc_
void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
- void RecordRelocInfo(const RelocInfo& rinfo);
- ConstantPoolArray::LayoutSection ConstantPoolAddEntry(const RelocInfo& rinfo);
+ ConstantPoolEntry::Access ConstantPoolAddEntry(int position,
+ RelocInfo::Mode rmode,
+ intptr_t value);
+ ConstantPoolEntry::Access ConstantPoolAddEntry(int position, double value);
friend class RelocInfo;
friend class CodePatcher;
// r2: receiver
// r3: argc
// r4: argv
- // r5-r6, r8 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered
+ // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
ProfileEntryHookStub::MaybeCallEntryHook(masm);
// Clear the context before we push it when entering the internal frame.
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
__ mov(r5, Operand(r4));
__ mov(r6, Operand(r4));
- if (!FLAG_enable_ool_constant_pool) {
+ if (!FLAG_enable_embedded_constant_pool) {
__ mov(r8, Operand(r4));
}
if (kR9Available == 1) {
__ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
{ ConstantPoolUnavailableScope constant_pool_unavailable(masm);
- if (FLAG_enable_ool_constant_pool) {
- __ ldr(pp, FieldMemOperand(r0, Code::kConstantPoolOffset));
+ if (FLAG_enable_embedded_constant_pool) {
+ __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
}
// Load the OSR entrypoint offset from the deoptimization data.
__ SmiTag(r0);
__ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
- (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
- fp.bit() | lr.bit());
+ (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
+ fp.bit() | lr.bit());
__ add(fp, sp,
Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
}
__ ldr(r1, MemOperand(r1));
__ mov(r2, Operand(pending_handler_offset_address));
__ ldr(r2, MemOperand(r2));
- if (FLAG_enable_ool_constant_pool) {
- __ ldr(pp, FieldMemOperand(r1, Code::kConstantPoolOffset));
+ if (FLAG_enable_embedded_constant_pool) {
+ __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r1);
}
__ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
__ add(pc, r1, r2);
// r3: argc
// r4: argv
int marker = type();
- if (FLAG_enable_ool_constant_pool) {
- __ mov(r8, Operand(isolate()->factory()->empty_constant_pool_array()));
+ if (FLAG_enable_embedded_constant_pool) {
+ __ mov(r8, Operand::Zero());
}
__ mov(r7, Operand(Smi::FromInt(marker)));
__ mov(r6, Operand(Smi::FromInt(marker)));
__ ldr(r5, MemOperand(r5));
__ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
__ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
- (FLAG_enable_ool_constant_pool ? r8.bit() : 0) |
- ip.bit());
+ (FLAG_enable_embedded_constant_pool ? r8.bit() : 0) |
+ ip.bit());
// Set up frame pointer for the frame to be pushed.
__ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
const int kPCRegister = 15;
const int kNoRegister = -1;
+// Used in embedded constant pool builder - max reach in bits for
+// various load instructions (unsigned)
+const int kLdrMaxReachBits = 12;
+const int kVldrMaxReachBits = 10;
+
// -----------------------------------------------------------------------------
// Conditions.
StandardFrameConstants::kConstantPoolOffset - kPointerSize));
// Pop return address, frame and constant pool pointer (if
- // FLAG_enable_ool_constant_pool).
+ // FLAG_enable_embedded_constant_pool).
__ LeaveFrame(StackFrame::INTERNAL);
{ ConstantPoolUnavailableScope constant_pool_unavailable(masm);
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
- DCHECK(FLAG_enable_ool_constant_pool);
+ DCHECK(FLAG_enable_embedded_constant_pool);
SetFrameSlot(offset, value);
}
Register JavaScriptFrame::fp_register() { return v8::internal::fp; }
Register JavaScriptFrame::context_register() { return cp; }
Register JavaScriptFrame::constant_pool_pointer_register() {
- DCHECK(FLAG_enable_ool_constant_pool);
+ DCHECK(FLAG_enable_embedded_constant_pool);
return pp;
}
Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; }
Register StubFailureTrampolineFrame::context_register() { return cp; }
Register StubFailureTrampolineFrame::constant_pool_pointer_register() {
- DCHECK(FLAG_enable_ool_constant_pool);
+ DCHECK(FLAG_enable_embedded_constant_pool);
return pp;
}
-Object*& ExitFrame::constant_pool_slot() const {
- DCHECK(FLAG_enable_ool_constant_pool);
- const int offset = ExitFrameConstants::kConstantPoolOffset;
- return Memory::Object_at(fp() + offset);
-}
-
-
} // namespace internal
} // namespace v8
class ExitFrameConstants : public AllStatic {
public:
- static const int kFrameSize = FLAG_enable_ool_constant_pool ?
- 3 * kPointerSize : 2 * kPointerSize;
+ static const int kFrameSize =
+ FLAG_enable_embedded_constant_pool ? 3 * kPointerSize : 2 * kPointerSize;
- static const int kConstantPoolOffset = FLAG_enable_ool_constant_pool ?
- -3 * kPointerSize : 0;
+ static const int kConstantPoolOffset =
+ FLAG_enable_embedded_constant_pool ? -3 * kPointerSize : 0;
static const int kCodeOffset = -2 * kPointerSize;
static const int kSPOffset = -1 * kPointerSize;
// The live registers are:
// o r1: the JS function object being called (i.e., ourselves)
// o cp: our context
-// o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
+// o pp: our caller's constant pool pointer (if enabled)
// o fp: our caller's frame pointer
// o sp: stack pointer
// o lr: return address
__ bind(&resume_frame);
// lr = return address.
// fp = caller's frame pointer.
- // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
+ // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
// cp = callee's context,
// r4 = callee's JS function.
__ PushFixedFrame(r4);
__ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
{ ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
// Load the new code object's constant pool pointer.
- __ ldr(pp,
- MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize));
+ __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
}
__ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
static Address GetInterruptImmediateLoadAddress(Address pc) {
Address load_address = pc - 2 * Assembler::kInstrSize;
- if (!FLAG_enable_ool_constant_pool) {
+ if (!FLAG_enable_embedded_constant_pool) {
DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
} else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
// This is an extended constant pool lookup.
// r1: Callee's JS function.
// cp: Callee's context.
- // pp: Callee's constant pool pointer (if FLAG_enable_ool_constant_pool)
+ // pp: Callee's constant pool pointer (if enabled)
// fp: Caller's frame pointer.
// lr: Caller's pc.
safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
- if (FLAG_enable_ool_constant_pool && (kind & Safepoint::kWithRegisters)) {
- // Register pp always contains a pointer to the constant pool.
- safepoint.DefinePointerRegister(pp, zone());
- }
}
void MacroAssembler::PushFixedFrame(Register marker_reg) {
DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code());
- stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) |
- cp.bit() |
- (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
- fp.bit() |
- lr.bit());
+ stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | cp.bit() |
+ (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
+ fp.bit() | lr.bit());
}
void MacroAssembler::PopFixedFrame(Register marker_reg) {
DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code());
- ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) |
- cp.bit() |
- (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
- fp.bit() |
- lr.bit());
+ ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | cp.bit() |
+ (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
+ fp.bit() | lr.bit());
}
}
+void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
+ Register code_target_address) {
+ DCHECK(FLAG_enable_embedded_constant_pool);
+ ldr(pp, MemOperand(code_target_address,
+ Code::kConstantPoolOffset - Code::kHeaderSize));
+ add(pp, pp, code_target_address);
+}
+
+
void MacroAssembler::LoadConstantPoolPointerRegister() {
- if (FLAG_enable_ool_constant_pool) {
- int constant_pool_offset = Code::kConstantPoolOffset - Code::kHeaderSize -
- pc_offset() - Instruction::kPCReadOffset;
- DCHECK(ImmediateFitsAddrMode2Instruction(constant_pool_offset));
- ldr(pp, MemOperand(pc, constant_pool_offset));
- }
+ DCHECK(FLAG_enable_embedded_constant_pool);
+ int entry_offset = pc_offset() + Instruction::kPCReadOffset;
+ sub(ip, pc, Operand(entry_offset));
+ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
}
Push(Smi::FromInt(StackFrame::STUB));
// Adjust FP to point to saved FP.
add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
LoadConstantPoolPointerRegister();
- set_ool_constant_pool_available(true);
+ set_constant_pool_available(true);
}
}
add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
}
}
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
LoadConstantPoolPointerRegister();
- set_ool_constant_pool_available(true);
+ set_constant_pool_available(true);
}
}
bool load_constant_pool_pointer_reg) {
// r0-r3: preserved
PushFixedFrame();
- if (FLAG_enable_ool_constant_pool && load_constant_pool_pointer_reg) {
+ if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
LoadConstantPoolPointerRegister();
}
mov(ip, Operand(Smi::FromInt(type)));
// Drop the execution stack down to the frame pointer and restore
// the caller frame pointer, return address and constant pool pointer
- // (if FLAG_enable_ool_constant_pool).
+ // (if FLAG_enable_embedded_constant_pool).
int frame_ends;
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
frame_ends = pc_offset();
ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
mov(ip, Operand::Zero());
str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
}
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
}
mov(ip, Operand(CodeObject()));
// fp - ExitFrameConstants::kFrameSize -
// DwVfpRegister::kMaxNumRegisters * kDoubleSize,
// since the sp slot, code slot and constant pool slot (if
- // FLAG_enable_ool_constant_pool) were pushed after the fp.
+ // FLAG_enable_embedded_constant_pool) were pushed after the fp.
}
// Reserve place for the return address and stack space and align the frame
#endif
// Tear down the exit frame, pop the arguments, and return.
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
}
mov(sp, Operand(fp));
Label small_constant_pool_load, load_result;
ldr(result, MemOperand(ldr_location));
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
// Check if this is an extended constant pool load.
and_(scratch, result, Operand(GetConsantPoolLoadMask()));
teq(scratch, Operand(GetConsantPoolLoadPattern()));
bind(&load_result);
// Get the address of the constant.
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
add(result, pp, Operand(result));
} else {
add(result, ldr_location, Operand(result));
}
// Push a fixed frame, consisting of lr, fp, constant pool (if
- // FLAG_enable_ool_constant_pool), context and JS function / marker id if
+ // FLAG_enable_embedded_constant_pool), context and JS function / marker id if
// marker_reg is a valid register.
void PushFixedFrame(Register marker_reg = no_reg);
void PopFixedFrame(Register marker_reg = no_reg);
void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0,
Register scratch1, Label* found);
+ // Loads the constant pool pointer (pp) register.
+ void LoadConstantPoolPointerRegisterFromCodeTargetAddress(
+ Register code_target_address);
+ void LoadConstantPoolPointerRegister();
+
private:
void CallCFunctionHelper(Register function,
int num_reg_arguments,
MemOperand SafepointRegisterSlot(Register reg);
MemOperand SafepointRegistersAndDoublesSlot(Register reg);
- // Loads the constant pool pointer (pp) register.
- void LoadConstantPoolPointerRegister();
-
bool generating_stub_;
bool has_frame_;
// This handle will be patched with the code object on installation.
// Read/Modify the code target address in the branch/call instruction at pc.
-Address Assembler::target_address_at(Address pc,
- ConstantPoolArray* constant_pool) {
+Address Assembler::target_address_at(Address pc, Address constant_pool) {
return Memory::Address_at(target_pointer_address_at(pc));
}
Address Assembler::target_address_at(Address pc, Code* code) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
}
-void Assembler::set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
+void Assembler::set_target_address_at(Address pc, Address constant_pool,
Address target,
ICacheFlushMode icache_flush_mode) {
Memory::Address_at(target_pointer_address_at(pc)) = target;
Code* code,
Address target,
ICacheFlushMode icache_flush_mode) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target, icache_flush_mode);
}
}
-Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return isolate->factory()->empty_constant_pool_array();
-}
-
-
-void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return;
-}
-
-
void PatchingAssembler::PatchAdrFar(int64_t target_offset) {
// The code at the current instruction should be:
// adr rd, 0
inline static Address target_pointer_address_at(Address pc);
// Read/Modify the code target address in the branch/call instruction at pc.
- inline static Address target_address_at(Address pc,
- ConstantPoolArray* constant_pool);
- inline static void set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
- Address target,
- ICacheFlushMode icache_flush_mode =
- FLUSH_ICACHE_IF_NEEDED);
+ inline static Address target_address_at(Address pc, Address constant_pool);
+ inline static void set_target_address_at(
+ Address pc, Address constant_pool, Address target,
+ ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED);
static inline Address target_address_at(Address pc, Code* code);
static inline void set_target_address_at(Address pc,
Code* code,
// Required by V8.
void dd(uint32_t data) { dc32(data); }
void db(uint8_t data) { dc8(data); }
+ void dq(uint64_t data) { dc64(data); }
+ void dp(uintptr_t data) { dc64(data); }
// Code generation helpers --------------------------------------------------
// Check if is time to emit a constant pool.
void CheckConstPool(bool force_emit, bool require_jump);
- // Allocate a constant pool of the correct size for the generated code.
- Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
-
- // Generate the constant pool for the generated code.
- void PopulateConstantPool(ConstantPoolArray* constant_pool);
+ void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ // No embedded constant pool support.
+ UNREACHABLE();
+ }
// Returns true if we should emit a veneer as soon as possible for a branch
// which can at most reach to specified pc.
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
- // No out-of-line constant pool support.
+ // No embedded constant pool support.
UNREACHABLE();
}
}
-Object*& ExitFrame::constant_pool_slot() const {
- UNREACHABLE();
- return Memory::Object_at(NULL);
-}
-
-
} // namespace internal
} // namespace v8
predictable_code_size_(false),
// We may use the assembler without an isolate.
serializer_enabled_(isolate && isolate->serializer_enabled()),
- ool_constant_pool_available_(false) {
+ constant_pool_available_(false) {
if (FLAG_mask_constants_with_cookie && isolate != NULL) {
jit_cookie_ = isolate->random_number_generator()->NextInt();
}
}
+ConstantPoolBuilder::ConstantPoolBuilder(int ptr_reach_bits,
+ int double_reach_bits) {
+ info_[ConstantPoolEntry::INTPTR].entries.reserve(64);
+ info_[ConstantPoolEntry::INTPTR].regular_reach_bits = ptr_reach_bits;
+ info_[ConstantPoolEntry::DOUBLE].regular_reach_bits = double_reach_bits;
+}
+
+
+ConstantPoolEntry::Access ConstantPoolBuilder::NextAccess(
+ ConstantPoolEntry::Type type) const {
+ const PerTypeEntryInfo& info = info_[type];
+
+ if (info.overflow()) return ConstantPoolEntry::OVERFLOWED;
+
+ int dbl_count = info_[ConstantPoolEntry::DOUBLE].regular_count;
+ int dbl_offset = dbl_count * kDoubleSize;
+ int ptr_count = info_[ConstantPoolEntry::INTPTR].regular_count;
+ int ptr_offset = ptr_count * kPointerSize + dbl_offset;
+
+ if (type == ConstantPoolEntry::DOUBLE) {
+ // Double overflow detection must take into account the reach for both types
+ int ptr_reach_bits = info_[ConstantPoolEntry::INTPTR].regular_reach_bits;
+ if (!is_uintn(dbl_offset, info.regular_reach_bits) ||
+ (ptr_count > 0 &&
+ !is_uintn(ptr_offset + kDoubleSize - kPointerSize, ptr_reach_bits))) {
+ return ConstantPoolEntry::OVERFLOWED;
+ }
+ } else {
+ DCHECK(type == ConstantPoolEntry::INTPTR);
+ if (!is_uintn(ptr_offset, info.regular_reach_bits)) {
+ return ConstantPoolEntry::OVERFLOWED;
+ }
+ }
+
+ return ConstantPoolEntry::REGULAR;
+}
+
+
+ConstantPoolEntry::Access ConstantPoolBuilder::AddEntry(
+ ConstantPoolEntry& entry, ConstantPoolEntry::Type type) {
+ DCHECK(!emitted_label_.is_bound());
+ PerTypeEntryInfo& info = info_[type];
+ const int entry_size = ConstantPoolEntry::size(type);
+ bool merged = false;
+
+ if (entry.sharing_ok()) {
+ // Try to merge entries
+ std::vector<ConstantPoolEntry>::iterator it = info.shared_entries.begin();
+ int end = static_cast<int>(info.shared_entries.size());
+ for (int i = 0; i < end; i++, it++) {
+ if ((entry_size == kPointerSize) ? entry.value() == it->value()
+ : entry.value64() == it->value64()) {
+ // Merge with found entry.
+ entry.set_merged_index(i);
+ merged = true;
+ break;
+ }
+ }
+ }
+
+ // By definition, merged entries have regular access.
+ DCHECK(!merged || entry.merged_index() < info.regular_count);
+ ConstantPoolEntry::Access access =
+ (merged ? ConstantPoolEntry::REGULAR : NextAccess(type));
+
+ // Enforce an upper bound on search time by limiting the search to
+ // unique sharable entries which fit in the regular section.
+ if (entry.sharing_ok() && !merged && access == ConstantPoolEntry::REGULAR) {
+ info.shared_entries.push_back(entry);
+ } else {
+ info.entries.push_back(entry);
+ }
+
+ // We're done if we found a match or have already triggered the
+ // overflow state.
+ if (merged || info.overflow()) return access;
+
+ if (access == ConstantPoolEntry::REGULAR) {
+ info.regular_count++;
+ } else {
+ info.overflow_start = static_cast<int>(info.entries.size()) - 1;
+ }
+
+ return access;
+}
+
+
+void ConstantPoolBuilder::EmitSharedEntries(Assembler* assm,
+ ConstantPoolEntry::Type type) {
+ PerTypeEntryInfo& info = info_[type];
+ std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
+ const int entry_size = ConstantPoolEntry::size(type);
+ int base = emitted_label_.pos();
+ DCHECK(base > 0);
+ int shared_end = static_cast<int>(shared_entries.size());
+ std::vector<ConstantPoolEntry>::iterator shared_it = shared_entries.begin();
+ for (int i = 0; i < shared_end; i++, shared_it++) {
+ int offset = assm->pc_offset() - base;
+ shared_it->set_offset(offset); // Save offset for merged entries.
+ if (entry_size == kPointerSize) {
+ assm->dp(shared_it->value());
+ } else {
+ assm->dq(shared_it->value64());
+ }
+ DCHECK(is_uintn(offset, info.regular_reach_bits));
+
+ // Patch load sequence with correct offset.
+ assm->PatchConstantPoolAccessInstruction(shared_it->position(), offset,
+ ConstantPoolEntry::REGULAR, type);
+ }
+}
+
+
+void ConstantPoolBuilder::EmitGroup(Assembler* assm,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ PerTypeEntryInfo& info = info_[type];
+ const bool overflow = info.overflow();
+ std::vector<ConstantPoolEntry>& entries = info.entries;
+ std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
+ const int entry_size = ConstantPoolEntry::size(type);
+ int base = emitted_label_.pos();
+ DCHECK(base > 0);
+ int begin;
+ int end;
+
+ if (access == ConstantPoolEntry::REGULAR) {
+ // Emit any shared entries first
+ EmitSharedEntries(assm, type);
+ }
+
+ if (access == ConstantPoolEntry::REGULAR) {
+ begin = 0;
+ end = overflow ? info.overflow_start : static_cast<int>(entries.size());
+ } else {
+ DCHECK(access == ConstantPoolEntry::OVERFLOWED);
+ if (!overflow) return;
+ begin = info.overflow_start;
+ end = static_cast<int>(entries.size());
+ }
+
+ std::vector<ConstantPoolEntry>::iterator it = entries.begin();
+ if (begin > 0) std::advance(it, begin);
+ for (int i = begin; i < end; i++, it++) {
+ // Update constant pool if necessary and get the entry's offset.
+ int offset;
+ ConstantPoolEntry::Access entry_access;
+ if (!it->is_merged()) {
+ // Emit new entry
+ offset = assm->pc_offset() - base;
+ entry_access = access;
+ if (entry_size == kPointerSize) {
+ assm->dp(it->value());
+ } else {
+ assm->dq(it->value64());
+ }
+ } else {
+ // Retrieve offset from shared entry.
+ offset = shared_entries[it->merged_index()].offset();
+ entry_access = ConstantPoolEntry::REGULAR;
+ }
+
+ DCHECK(entry_access == ConstantPoolEntry::OVERFLOWED ||
+ is_uintn(offset, info.regular_reach_bits));
+
+ // Patch load sequence with correct offset.
+ assm->PatchConstantPoolAccessInstruction(it->position(), offset,
+ entry_access, type);
+ }
+}
+
+
+// Emit and return position of pool. Zero implies no constant pool.
+int ConstantPoolBuilder::Emit(Assembler* assm) {
+ bool emitted = emitted_label_.is_bound();
+ bool empty = IsEmpty();
+
+ if (!emitted) {
+ // Mark start of constant pool. Align if necessary.
+ if (!empty) assm->Align(kDoubleSize);
+ assm->bind(&emitted_label_);
+ if (!empty) {
+ // Emit in groups based on access and type.
+ // Emit doubles first for alignment purposes.
+ EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::DOUBLE);
+ EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::INTPTR);
+ if (info_[ConstantPoolEntry::DOUBLE].overflow()) {
+ assm->Align(kDoubleSize);
+ EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
+ ConstantPoolEntry::DOUBLE);
+ }
+ if (info_[ConstantPoolEntry::INTPTR].overflow()) {
+ EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
+ ConstantPoolEntry::INTPTR);
+ }
+ }
+ }
+
+ return !empty ? emitted_label_.pos() : 0;
+}
+
+
// Platform specific but identical code for all the platforms.
return (enabled_cpu_features_ & (static_cast<uint64_t>(1) << f)) != 0;
}
- bool is_ool_constant_pool_available() const {
- if (FLAG_enable_ool_constant_pool) {
- return ool_constant_pool_available_;
+ bool is_constant_pool_available() const {
+ if (FLAG_enable_embedded_constant_pool) {
+ return constant_pool_available_;
} else {
- // Out-of-line constant pool not supported on this architecture.
+ // Embedded constant pool not supported on this architecture.
UNREACHABLE();
return false;
}
int buffer_size_;
bool own_buffer_;
- void set_ool_constant_pool_available(bool available) {
- if (FLAG_enable_ool_constant_pool) {
- ool_constant_pool_available_ = available;
+ void set_constant_pool_available(bool available) {
+ if (FLAG_enable_embedded_constant_pool) {
+ constant_pool_available_ = available;
} else {
- // Out-of-line constant pool not supported on this architecture.
+ // Embedded constant pool not supported on this architecture.
UNREACHABLE();
}
}
// Indicates whether the constant pool can be accessed, which is only possible
// if the pp register points to the current code object's constant pool.
- bool ool_constant_pool_available_;
+ bool constant_pool_available_;
// Constant pool.
friend class FrameAndConstantPoolScope;
RelocInfo(byte* pc, Mode rmode, intptr_t data, Code* host)
: pc_(pc), rmode_(rmode), data_(data), host_(host) {
}
- RelocInfo(byte* pc, double data64)
- : pc_(pc), rmode_(NONE64), data64_(data64), host_(NULL) {
- }
static inline bool IsRealRelocMode(Mode mode) {
return mode >= FIRST_REAL_RELOC_MODE &&
}
static inline int ModeMask(Mode mode) { return 1 << mode; }
- // Returns true if the first RelocInfo has the same mode and raw data as the
- // second one.
- static inline bool IsEqual(RelocInfo first, RelocInfo second) {
- return first.rmode() == second.rmode() &&
- (first.rmode() == RelocInfo::NONE64 ?
- first.raw_data64() == second.raw_data64() :
- first.data() == second.data());
- }
-
// Accessors
byte* pc() const { return pc_; }
void set_pc(byte* pc) { pc_ = pc; }
Mode rmode() const { return rmode_; }
intptr_t data() const { return data_; }
- double data64() const { return data64_; }
- uint64_t raw_data64() { return bit_cast<uint64_t>(data64_); }
Code* host() const { return host_; }
void set_host(Code* host) { host_ = host; }
// comment).
byte* pc_;
Mode rmode_;
- union {
- intptr_t data_;
- double data64_;
- };
+ intptr_t data_;
Code* host_;
// External-reference pointers are also split across instruction-pairs
// on some platforms, but are accessed via indirect pointers. This location
};
+// -----------------------------------------------------------------------------
+// Constant pool support
+
+class ConstantPoolEntry {
+ public:
+ ConstantPoolEntry() {}
+ ConstantPoolEntry(int position, intptr_t value, bool sharing_ok)
+ : position_(position),
+ merged_index_(sharing_ok ? SHARING_ALLOWED : SHARING_PROHIBITED),
+ value_(value) {}
+ ConstantPoolEntry(int position, double value)
+ : position_(position), merged_index_(SHARING_ALLOWED), value64_(value) {}
+
+ int position() const { return position_; }
+ bool sharing_ok() const { return merged_index_ != SHARING_PROHIBITED; }
+ bool is_merged() const { return merged_index_ >= 0; }
+ int merged_index(void) const {
+ DCHECK(is_merged());
+ return merged_index_;
+ }
+ void set_merged_index(int index) {
+ merged_index_ = index;
+ DCHECK(is_merged());
+ }
+ int offset(void) const {
+ DCHECK(merged_index_ >= 0);
+ return merged_index_;
+ }
+ void set_offset(int offset) {
+ DCHECK(offset >= 0);
+ merged_index_ = offset;
+ }
+ intptr_t value() const { return value_; }
+ uint64_t value64() const { return bit_cast<uint64_t>(value64_); }
+
+ enum Type { INTPTR, DOUBLE, NUMBER_OF_TYPES };
+
+ static int size(Type type) {
+ return (type == INTPTR) ? kPointerSize : kDoubleSize;
+ }
+
+ enum Access { REGULAR, OVERFLOWED };
+
+ private:
+ int position_;
+ int merged_index_;
+ union {
+ intptr_t value_;
+ double value64_;
+ };
+ enum { SHARING_PROHIBITED = -2, SHARING_ALLOWED = -1 };
+};
+
+
+// -----------------------------------------------------------------------------
+// Embedded constant pool support
+
+class ConstantPoolBuilder BASE_EMBEDDED {
+ public:
+ ConstantPoolBuilder(int ptr_reach_bits, int double_reach_bits);
+
+ // Add pointer-sized constant to the embedded constant pool
+ ConstantPoolEntry::Access AddEntry(int position, intptr_t value,
+ bool sharing_ok) {
+ ConstantPoolEntry entry(position, value, sharing_ok);
+ return AddEntry(entry, ConstantPoolEntry::INTPTR);
+ }
+
+ // Add double constant to the embedded constant pool
+ ConstantPoolEntry::Access AddEntry(int position, double value) {
+ ConstantPoolEntry entry(position, value);
+ return AddEntry(entry, ConstantPoolEntry::DOUBLE);
+ }
+
+ // Previews the access type required for the next new entry to be added.
+ ConstantPoolEntry::Access NextAccess(ConstantPoolEntry::Type type) const;
+
+ bool IsEmpty() {
+ return info_[ConstantPoolEntry::INTPTR].entries.empty() &&
+ info_[ConstantPoolEntry::INTPTR].shared_entries.empty() &&
+ info_[ConstantPoolEntry::DOUBLE].entries.empty() &&
+ info_[ConstantPoolEntry::DOUBLE].shared_entries.empty();
+ }
+
+ // Emit the constant pool. Invoke only after all entries have been
+ // added and all instructions have been emitted.
+ // Returns position of the emitted pool (zero implies no constant pool).
+ int Emit(Assembler* assm);
+
+ // Returns the label associated with the start of the constant pool.
+ // Linking to this label in the function prologue may provide an
+ // efficient means of constant pool pointer register initialization
+ // on some architectures.
+ inline Label* EmittedPosition() { return &emitted_label_; }
+
+ private:
+ ConstantPoolEntry::Access AddEntry(ConstantPoolEntry& entry,
+ ConstantPoolEntry::Type type);
+ void EmitSharedEntries(Assembler* assm, ConstantPoolEntry::Type type);
+ void EmitGroup(Assembler* assm, ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type);
+
+ struct PerTypeEntryInfo {
+ PerTypeEntryInfo() : regular_count(0), overflow_start(-1) {}
+ bool overflow() const {
+ return (overflow_start >= 0 &&
+ overflow_start < static_cast<int>(entries.size()));
+ }
+ int regular_reach_bits;
+ int regular_count;
+ int overflow_start;
+ std::vector<ConstantPoolEntry> entries;
+ std::vector<ConstantPoolEntry> shared_entries;
+ };
+
+ Label emitted_label_; // Records pc_offset of emitted pool
+ PerTypeEntryInfo info_[ConstantPoolEntry::NUMBER_OF_TYPES];
+};
+
+
} } // namespace v8::internal
#endif // V8_ASSEMBLER_H_
int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
bool saved_pp;
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
__ Push(lr, fp, pp);
// Adjust FP to point to saved FP.
__ sub(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
int register_save_area_size = 0;
RegList frame_saves = fp.bit();
__ mflr(r0);
- __ Push(r0, fp);
- __ mr(fp, sp);
+ if (FLAG_enable_embedded_constant_pool) {
+ __ Push(r0, fp, kConstantPoolRegister);
+ // Adjust FP to point to saved FP.
+ __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
+ register_save_area_size += kPointerSize;
+ frame_saves |= kConstantPoolRegister.bit();
+ } else {
+ __ Push(r0, fp);
+ __ mr(fp, sp);
+ }
// Save callee-saved registers.
const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves;
for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
}
// Restore registers.
RegList frame_saves = fp.bit();
+ if (FLAG_enable_embedded_constant_pool) {
+ frame_saves |= kConstantPoolRegister.bit();
+ }
const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves;
if (saves != 0) {
__ MultiPop(saves);
reinterpret_cast<intptr_t>(new_pc));
}
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
// Update constant pool pointer for new code.
frame->set_constant_pool(new_code->constant_pool());
}
DCHECK(!is_bottommost || !has_alignment_padding_ ||
(fp_value & kPointerSize) != 0);
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
// For the bottommost output frame the constant pool pointer can be gotten
// from the input frame. For subsequent output frames, it can be read from
// the previous frame.
output_frame->SetPc(pc_value);
// Update constant pool.
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
intptr_t constant_pool_value =
reinterpret_cast<intptr_t>(non_optimized_code->constant_pool());
output_frame->SetConstantPool(constant_pool_value);
fp_value, output_offset, value);
}
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
// Read the caller's constant pool from the previous frame.
output_offset -= kPointerSize;
value = output_[frame_index - 1]->GetConstantPool();
adaptor_trampoline->instruction_start() +
isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
output_frame->SetPc(pc_value);
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
intptr_t constant_pool_value =
reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
output_frame->SetConstantPool(constant_pool_value);
fp_value, output_offset, value);
}
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
// Read the caller's constant pool from the previous frame.
output_offset -= kPointerSize;
value = output_[frame_index - 1]->GetConstantPool();
construct_stub->instruction_start() +
isolate_->heap()->construct_stub_deopt_pc_offset()->value());
output_frame->SetPc(pc);
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
intptr_t constant_pool_value =
reinterpret_cast<intptr_t>(construct_stub->constant_pool());
output_frame->SetConstantPool(constant_pool_value);
// We need 1 stack entry for the return address and enough entries for the
// StackFrame::INTERNAL (FP, context, frame type, code object and constant
- // pool (if FLAG_enable_ool_constant_pool)- see MacroAssembler::EnterFrame).
+ // pool (if enabled)- see MacroAssembler::EnterFrame).
// For a setter stub frame we need one additional entry for the implicit
// return value, see StoreStubCompiler::CompileStoreViaSetter.
unsigned fixed_frame_entries =
fp_value, output_offset, value);
}
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
// Read the caller's constant pool from the previous frame.
output_offset -= kPointerSize;
value = output_[frame_index - 1]->GetConstantPool();
intptr_t pc = reinterpret_cast<intptr_t>(
accessor_stub->instruction_start() + offset->value());
output_frame->SetPc(pc);
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
intptr_t constant_pool_value =
reinterpret_cast<intptr_t>(accessor_stub->constant_pool());
output_frame->SetConstantPool(constant_pool_value);
top_address + output_frame_offset, output_frame_offset, value);
}
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
// Read the caller's constant pool from the input frame.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
DCHECK(trampoline != NULL);
output_frame->SetPc(reinterpret_cast<intptr_t>(
trampoline->instruction_start()));
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
Register constant_pool_reg =
StubFailureTrampolineFrame::constant_pool_pointer_register();
intptr_t constant_pool_value =
}
-Handle<ConstantPoolArray> Factory::NewConstantPoolArray(
- const ConstantPoolArray::NumberOfEntries& small) {
- DCHECK(small.total_count() > 0);
- CALL_HEAP_FUNCTION(
- isolate(),
- isolate()->heap()->AllocateConstantPoolArray(small),
- ConstantPoolArray);
-}
-
-
-Handle<ConstantPoolArray> Factory::NewExtendedConstantPoolArray(
- const ConstantPoolArray::NumberOfEntries& small,
- const ConstantPoolArray::NumberOfEntries& extended) {
- DCHECK(small.total_count() > 0);
- DCHECK(extended.total_count() > 0);
- CALL_HEAP_FUNCTION(
- isolate(),
- isolate()->heap()->AllocateExtendedConstantPoolArray(small, extended),
- ConstantPoolArray);
-}
-
-
Handle<OrderedHashSet> Factory::NewOrderedHashSet() {
return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kMinCapacity);
}
}
-Handle<ConstantPoolArray> Factory::CopyConstantPoolArray(
- Handle<ConstantPoolArray> array) {
- CALL_HEAP_FUNCTION(isolate(),
- isolate()->heap()->CopyConstantPoolArray(*array),
- ConstantPoolArray);
-}
-
-
Handle<Object> Factory::NewNumber(double value,
PretenureFlag pretenure) {
// We need to distinguish the minus zero value and this cannot be
int prologue_offset,
bool is_debug) {
Handle<ByteArray> reloc_info = NewByteArray(desc.reloc_size, TENURED);
- Handle<ConstantPoolArray> constant_pool =
- desc.origin->NewConstantPool(isolate());
// Compute size.
int body_size = RoundUp(desc.instr_size, kObjectAlignment);
code->set_next_code_link(*undefined_value());
code->set_handler_table(*empty_fixed_array(), SKIP_WRITE_BARRIER);
code->set_prologue_offset(prologue_offset);
+ if (FLAG_enable_embedded_constant_pool) {
+ code->set_constant_pool_offset(desc.instr_size - desc.constant_pool_size);
+ }
if (code->kind() == Code::OPTIMIZED_FUNCTION) {
code->set_marked_for_deoptimization(false);
}
code->set_has_debug_break_slots(true);
}
- desc.origin->PopulateConstantPool(*constant_pool);
- code->set_constant_pool(*constant_pool);
-
// Allow self references to created code object by patching the handle to
// point to the newly allocated Code object.
if (!self_ref.is_null()) *(self_ref.location()) = *code;
int size,
PretenureFlag pretenure = NOT_TENURED);
- Handle<ConstantPoolArray> NewConstantPoolArray(
- const ConstantPoolArray::NumberOfEntries& small);
-
- Handle<ConstantPoolArray> NewExtendedConstantPoolArray(
- const ConstantPoolArray::NumberOfEntries& small,
- const ConstantPoolArray::NumberOfEntries& extended);
-
Handle<OrderedHashSet> NewOrderedHashSet();
Handle<OrderedHashMap> NewOrderedHashMap();
Handle<FixedDoubleArray> CopyFixedDoubleArray(
Handle<FixedDoubleArray> array);
- Handle<ConstantPoolArray> CopyConstantPoolArray(
- Handle<ConstantPoolArray> array);
-
// Numbers (e.g. literals) are pretenured by the parser.
// The return value may be a smi or a heap number.
Handle<Object> NewNumber(double value,
#define FLAG FLAG_READONLY
// assembler.h
-DEFINE_BOOL(enable_ool_constant_pool, V8_OOL_CONSTANT_POOL,
- "enable use of out-of-line constant pools (ARM only)")
+DEFINE_BOOL(enable_embedded_constant_pool, V8_EMBEDDED_CONSTANT_POOL,
+ "enable use of embedded constant pools (ARM/PPC only)")
DEFINE_BOOL(unbox_double_fields, V8_DOUBLE_FIELDS_UNBOXING,
"enable in-object double fields unboxing (64-bit only)")
if (!IsValidStackAddress(sp)) return false;
StackFrame::State state;
ExitFrame::FillState(fp, sp, &state);
- if (!IsValidStackAddress(reinterpret_cast<Address>(state.pc_address))) {
- return false;
- }
return *state.pc_address != NULL;
}
#endif
-void StackFrame::IteratePc(ObjectVisitor* v,
- Address* pc_address,
- Code* holder) {
+void StackFrame::IteratePc(ObjectVisitor* v, Address* pc_address,
+ Address* constant_pool_address, Code* holder) {
Address pc = *pc_address;
DCHECK(GcSafeCodeContains(holder, pc));
unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
holder = reinterpret_cast<Code*>(code);
pc = holder->instruction_start() + pc_offset;
*pc_address = pc;
+ if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
+ *constant_pool_address = holder->constant_pool();
+ }
}
}
state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
state->pc_address = ResolveReturnAddressLocation(
reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
state->constant_pool_address = reinterpret_cast<Address*>(
fp() + ExitFrameConstants::kConstantPoolOffset);
}
void ExitFrame::Iterate(ObjectVisitor* v) const {
// The arguments are traversed as part of the expression stack of
// the calling frame.
- IteratePc(v, pc_address(), LookupCode());
+ IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
v->VisitPointer(&code_slot());
- if (FLAG_enable_ool_constant_pool) {
- v->VisitPointer(&constant_pool_slot());
- }
}
state->fp = fp;
state->pc_address = ResolveReturnAddressLocation(
reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
- state->constant_pool_address =
- reinterpret_cast<Address*>(fp + ExitFrameConstants::kConstantPoolOffset);
+ // The constant pool recorded in the exit frame is not associated
+ // with the pc in this state (the return address into a C entry
+ // stub). ComputeCallerState will retrieve the constant pool
+ // together with the associated caller pc.
+ state->constant_pool_address = NULL;
}
}
// Visit the return address in the callee and incoming arguments.
- IteratePc(v, pc_address(), code);
+ IteratePc(v, pc_address(), constant_pool_address(), code);
// Visit the context in stub frame and JavaScript frame.
// Visit the function in JavaScript frame.
void EntryFrame::Iterate(ObjectVisitor* v) const {
- IteratePc(v, pc_address(), LookupCode());
+ IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
}
void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
IterateExpressions(v);
- IteratePc(v, pc_address(), LookupCode());
+ IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
}
// Internal frames only have object pointers on the expression stack
// as they never have any arguments.
IterateExpressions(v);
- IteratePc(v, pc_address(), LookupCode());
+ IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
}
const int offset = StandardFrameConstants::kLastObjectOffset;
limit = &Memory::Object_at(fp() + offset) + 1;
v->VisitPointers(base, limit);
- IteratePc(v, pc_address(), LookupCode());
+ IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
}
class StandardFrameConstants : public AllStatic {
public:
// Fixed part of the frame consists of return address, caller fp,
- // constant pool (if FLAG_enable_ool_constant_pool), context, and function.
- // StandardFrame::IterateExpressions assumes that kLastObjectOffset is the
- // last object pointer.
+ // constant pool (if FLAG_enable_embedded_constant_pool), context, and
+ // function. StandardFrame::IterateExpressions assumes that kLastObjectOffset
+ // is the last object pointer.
static const int kCPSlotSize =
- FLAG_enable_ool_constant_pool ? kPointerSize : 0;
+ FLAG_enable_embedded_constant_pool ? kPointerSize : 0;
static const int kFixedFrameSizeFromFp = 2 * kPointerSize + kCPSlotSize;
- static const int kFixedFrameSize = kPCOnStackSize + kFPOnStackSize +
- kFixedFrameSizeFromFp;
- static const int kExpressionsOffset = -3 * kPointerSize - kCPSlotSize;
- static const int kMarkerOffset = -2 * kPointerSize - kCPSlotSize;
- static const int kContextOffset = -1 * kPointerSize - kCPSlotSize;
- static const int kConstantPoolOffset = FLAG_enable_ool_constant_pool ?
- -1 * kPointerSize : 0;
- static const int kCallerFPOffset = 0 * kPointerSize;
- static const int kCallerPCOffset = +1 * kFPOnStackSize;
- static const int kCallerSPOffset = kCallerPCOffset + 1 * kPCOnStackSize;
-
- static const int kLastObjectOffset = FLAG_enable_ool_constant_pool ?
- kConstantPoolOffset : kContextOffset;
+ static const int kFixedFrameSize =
+ kPCOnStackSize + kFPOnStackSize + kFixedFrameSizeFromFp;
+ static const int kExpressionsOffset = -3 * kPointerSize - kCPSlotSize;
+ static const int kMarkerOffset = -2 * kPointerSize - kCPSlotSize;
+ static const int kContextOffset = -1 * kPointerSize - kCPSlotSize;
+ static const int kConstantPoolOffset = kCPSlotSize ? -1 * kPointerSize : 0;
+ static const int kCallerFPOffset = 0 * kPointerSize;
+ static const int kCallerPCOffset = +1 * kFPOnStackSize;
+ static const int kCallerSPOffset = kCallerPCOffset + 1 * kPCOnStackSize;
+
+ static const int kLastObjectOffset = kContextOffset;
};
void set_pc(Address pc) { *pc_address() = pc; }
Address constant_pool() const { return *constant_pool_address(); }
- void set_constant_pool(ConstantPoolArray* constant_pool) {
- *constant_pool_address() = reinterpret_cast<Address>(constant_pool);
+ void set_constant_pool(Address constant_pool) {
+ *constant_pool_address() = constant_pool;
}
virtual void SetCallerFp(Address caller_fp) = 0;
unsigned* stack_slots);
virtual void Iterate(ObjectVisitor* v) const = 0;
- static void IteratePc(ObjectVisitor* v, Address* pc_address, Code* holder);
+ static void IteratePc(ObjectVisitor* v, Address* pc_address,
+ Address* constant_pool_address, Code* holder);
// Sets a callback function for return-address rewriting profilers
// to resolve the location of a return address to the location of the
virtual Code* unchecked_code() const;
Object*& code_slot() const;
- Object*& constant_pool_slot() const;
// Garbage collection support.
virtual void Iterate(ObjectVisitor* v) const;
#endif
#endif
-// Determine whether the architecture uses an out-of-line constant pool.
-#define V8_OOL_CONSTANT_POOL 0
+// Determine whether the architecture uses an embedded constant pool
+// (contiguous constant pool embedded in code object).
+#if V8_TARGET_ARCH_PPC
+#define V8_EMBEDDED_CONSTANT_POOL 1
+#else
+#define V8_EMBEDDED_CONSTANT_POOL 0
+#endif
#ifdef V8_TARGET_ARCH_ARM
// Set stack limit lower for ARM than for other architectures because
// A CodeDesc describes a buffer holding instructions and relocation
// information. The instructions start at the beginning of the buffer
// and grow forward, the relocation information starts at the end of
-// the buffer and grows backward.
+// the buffer and grows backward. A constant pool may exist at the
+// end of the instructions.
//
-// |<--------------- buffer_size ---------------->|
-// |<-- instr_size -->| |<-- reloc_size -->|
-// +==================+========+==================+
-// | instructions | free | reloc info |
-// +==================+========+==================+
+// |<--------------- buffer_size ----------------------------------->|
+// |<------------- instr_size ---------->| |<-- reloc_size -->|
+// | |<- const_pool_size ->| |
+// +=====================================+========+==================+
+// | instructions | data | free | reloc info |
+// +=====================================+========+==================+
// ^
// |
// buffer
int buffer_size;
int instr_size;
int reloc_size;
+ int constant_pool_size;
Assembler* origin;
};
SetInternalReference(code, entry,
"gc_metadata", code->gc_metadata(),
Code::kGCMetadataOffset);
- SetInternalReference(code, entry,
- "constant_pool", code->constant_pool(),
- Code::kConstantPoolOffset);
if (code->kind() == Code::OPTIMIZED_FUNCTION) {
SetWeakReference(code, entry,
"next_code_link", code->next_code_link(),
}
-AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
- if (src->length() == 0) return src;
- return CopyConstantPoolArrayWithMap(src, src->map());
-}
-
-
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
AllocationSpace retry_space,
AllocationAlignment alignment) {
STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) ==
0); // NOLINT
-STATIC_ASSERT((ConstantPoolArray::kFirstEntryOffset & kDoubleAlignmentMask) ==
- 0); // NOLINT
-STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset &
- kDoubleAlignmentMask) == 0); // NOLINT
STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) ==
0); // NOLINT
#ifdef V8_HOST_ARCH_32_BIT
ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array);
ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined);
ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null);
- ALLOCATE_PARTIAL_MAP(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel,
- constant_pool_array);
#undef ALLOCATE_PARTIAL_MAP
}
}
set_empty_descriptor_array(DescriptorArray::cast(obj));
- // Allocate the constant pool array.
- {
- AllocationResult allocation = AllocateEmptyConstantPoolArray();
- if (!allocation.To(&obj)) return false;
- }
- set_empty_constant_pool_array(ConstantPoolArray::cast(obj));
-
// Fix the instance_descriptors for the existing maps.
meta_map()->set_code_cache(empty_fixed_array());
meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
null_map()->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
}
- constant_pool_array_map()->set_code_cache(empty_fixed_array());
- constant_pool_array_map()->set_dependent_code(
- DependentCode::cast(empty_fixed_array()));
- constant_pool_array_map()->set_raw_transitions(Smi::FromInt(0));
- constant_pool_array_map()->set_instance_descriptors(empty_descriptor_array());
- if (FLAG_unbox_double_fields) {
- constant_pool_array_map()->set_layout_descriptor(
- LayoutDescriptor::FastPointerLayout());
- }
-
// Fix prototype object for existing maps.
meta_map()->set_prototype(null_value());
meta_map()->set_constructor_or_backpointer(null_value());
null_map()->set_prototype(null_value());
null_map()->set_constructor_or_backpointer(null_value());
- constant_pool_array_map()->set_prototype(null_value());
- constant_pool_array_map()->set_constructor_or_backpointer(null_value());
-
{ // Map allocation
#define ALLOCATE_MAP(instance_type, size, field_name) \
{ \
AllocationResult Heap::CopyCode(Code* code) {
AllocationResult allocation;
- HeapObject* new_constant_pool;
- if (FLAG_enable_ool_constant_pool &&
- code->constant_pool() != empty_constant_pool_array()) {
- // Copy the constant pool, since edits to the copied code may modify
- // the constant pool.
- allocation = CopyConstantPoolArray(code->constant_pool());
- if (!allocation.To(&new_constant_pool)) return allocation;
- } else {
- new_constant_pool = empty_constant_pool_array();
- }
HeapObject* result = NULL;
// Allocate an object the same size as the code object.
CopyBlock(new_addr, old_addr, obj_size);
Code* new_code = Code::cast(result);
- // Update the constant pool.
- new_code->set_constant_pool(new_constant_pool);
-
// Relocate the copy.
DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() ||
AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
- // Allocate ByteArray and ConstantPoolArray before the Code object, so that we
- // do not risk leaving uninitialized Code object (and breaking the heap).
+ // Allocate ByteArray before the Code object, so that we do not risk
+ // leaving uninitialized Code object (and breaking the heap).
ByteArray* reloc_info_array;
{
AllocationResult allocation =
AllocateByteArray(reloc_info.length(), TENURED);
if (!allocation.To(&reloc_info_array)) return allocation;
}
- HeapObject* new_constant_pool;
- if (FLAG_enable_ool_constant_pool &&
- code->constant_pool() != empty_constant_pool_array()) {
- // Copy the constant pool, since edits to the copied code may modify
- // the constant pool.
- AllocationResult allocation = CopyConstantPoolArray(code->constant_pool());
- if (!allocation.To(&new_constant_pool)) return allocation;
- } else {
- new_constant_pool = empty_constant_pool_array();
- }
int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
Code* new_code = Code::cast(result);
new_code->set_relocation_info(reloc_info_array);
- // Update constant pool.
- new_code->set_constant_pool(new_constant_pool);
-
// Copy patched rinfo.
CopyBytes(new_code->relocation_start(), reloc_info.start(),
static_cast<size_t>(reloc_info.length()));
}
-AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src,
- Map* map) {
- HeapObject* obj;
- if (src->is_extended_layout()) {
- ConstantPoolArray::NumberOfEntries small(src,
- ConstantPoolArray::SMALL_SECTION);
- ConstantPoolArray::NumberOfEntries extended(
- src, ConstantPoolArray::EXTENDED_SECTION);
- AllocationResult allocation =
- AllocateExtendedConstantPoolArray(small, extended);
- if (!allocation.To(&obj)) return allocation;
- } else {
- ConstantPoolArray::NumberOfEntries small(src,
- ConstantPoolArray::SMALL_SECTION);
- AllocationResult allocation = AllocateConstantPoolArray(small);
- if (!allocation.To(&obj)) return allocation;
- }
- obj->set_map_no_write_barrier(map);
- CopyBlock(obj->address() + ConstantPoolArray::kFirstEntryOffset,
- src->address() + ConstantPoolArray::kFirstEntryOffset,
- src->size() - ConstantPoolArray::kFirstEntryOffset);
- return obj;
-}
-
-
AllocationResult Heap::AllocateRawFixedArray(int length,
PretenureFlag pretenure) {
if (length < 0 || length > FixedArray::kMaxLength) {
}
-AllocationResult Heap::AllocateConstantPoolArray(
- const ConstantPoolArray::NumberOfEntries& small) {
- CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
- int size = ConstantPoolArray::SizeFor(small);
- AllocationSpace space = SelectSpace(size, TENURED);
-
- HeapObject* object = nullptr;
- {
- AllocationResult allocation =
- AllocateRaw(size, space, OLD_SPACE, kDoubleAligned);
- if (!allocation.To(&object)) return allocation;
- }
- object->set_map_no_write_barrier(constant_pool_array_map());
-
- ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
- constant_pool->Init(small);
- constant_pool->ClearPtrEntries(isolate());
- return constant_pool;
-}
-
-
-AllocationResult Heap::AllocateExtendedConstantPoolArray(
- const ConstantPoolArray::NumberOfEntries& small,
- const ConstantPoolArray::NumberOfEntries& extended) {
- CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
- CHECK(extended.are_in_range(0, kMaxInt));
- int size = ConstantPoolArray::SizeForExtended(small, extended);
- AllocationSpace space = SelectSpace(size, TENURED);
-
- HeapObject* object = nullptr;
- {
- AllocationResult allocation =
- AllocateRaw(size, space, OLD_SPACE, kDoubleAligned);
- if (!allocation.To(&object)) return allocation;
- }
- object->set_map_no_write_barrier(constant_pool_array_map());
-
- ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
- constant_pool->InitExtended(small, extended);
- constant_pool->ClearPtrEntries(isolate());
- return constant_pool;
-}
-
-
-AllocationResult Heap::AllocateEmptyConstantPoolArray() {
- ConstantPoolArray::NumberOfEntries small(0, 0, 0, 0);
- int size = ConstantPoolArray::SizeFor(small);
- HeapObject* result = NULL;
- {
- AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
- if (!allocation.To(&result)) return allocation;
- }
- result->set_map_no_write_barrier(constant_pool_array_map());
- ConstantPoolArray::cast(result)->Init(small);
- return result;
-}
-
-
AllocationResult Heap::AllocateSymbol() {
// Statically ensure that it is safe to allocate symbols in paged spaces.
STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize);
V(Map, scope_info_map, ScopeInfoMap) \
V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
- V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
V(Map, weak_cell_map, WeakCellMap) \
V(Map, one_byte_string_map, OneByteStringMap) \
V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap) \
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
V(ByteArray, empty_byte_array, EmptyByteArray) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
- V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
/* The roots above this line should be boring from a GC point of view. */ \
/* This means they are never in new space and never on a page that is */ \
/* being compacted. */ \
V(ScopeInfoMap) \
V(FixedCOWArrayMap) \
V(FixedDoubleArrayMap) \
- V(ConstantPoolArrayMap) \
V(WeakCellMap) \
V(NoInterceptorResultSentinel) \
V(HashTableMap) \
V(EmptyFixedArray) \
V(EmptyByteArray) \
V(EmptyDescriptorArray) \
- V(EmptyConstantPoolArray) \
V(ArgumentsMarker) \
V(SymbolMap) \
V(SloppyArgumentsElementsMap) \
MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
FixedDoubleArray* src);
- // Make a copy of src and return it. Returns
- // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
- MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray(
- ConstantPoolArray* src);
-
-
// Computes a single character string where the character has code.
// A cache is used for one-byte (Latin1) codes.
MUST_USE_RESULT AllocationResult
// Allocate a symbol in old space.
MUST_USE_RESULT AllocationResult AllocateSymbol();
- // Make a copy of src, set the map, and return the copy.
- MUST_USE_RESULT AllocationResult
- CopyConstantPoolArrayWithMap(ConstantPoolArray* src, Map* map);
-
- MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(
- const ConstantPoolArray::NumberOfEntries& small);
-
- MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(
- const ConstantPoolArray::NumberOfEntries& small,
- const ConstantPoolArray::NumberOfEntries& extended);
-
// Allocates an external array of the specified length and type.
MUST_USE_RESULT AllocationResult
AllocateExternalArray(int length, ExternalArrayType array_type,
MUST_USE_RESULT AllocationResult
AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
- // Allocate empty constant pool array.
- MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray();
-
// Allocate a tenured simple cell.
MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot,
SlotsBuffer::IGNORE_OVERFLOW);
}
- } else if (dst->IsConstantPoolArray()) {
- // We special case ConstantPoolArrays since they could contain integers
- // value entries which look like tagged pointers.
- // TODO(mstarzinger): restructure this code to avoid this special-casing.
- ConstantPoolArray* array = ConstantPoolArray::cast(dst);
- ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
- while (!code_iter.is_finished()) {
- Address code_entry_slot =
- dst_addr + array->OffsetOfElementAt(code_iter.next_index());
- Address code_entry = Memory::Address_at(code_entry_slot);
-
- if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
- SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_,
- SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot,
- SlotsBuffer::IGNORE_OVERFLOW);
- }
- }
- ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
- while (!heap_iter.is_finished()) {
- Address heap_slot =
- dst_addr + array->OffsetOfElementAt(heap_iter.next_index());
- Object* value = Memory::Object_at(heap_slot);
- RecordMigratedSlot(value, heap_slot);
- }
}
} else if (dest == CODE_SPACE) {
PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr));
InstanceType type = object->map()->instance_type();
// Slots in maps and code can't be invalid because they are never shrunk.
if (type == MAP_TYPE || type == CODE_TYPE) return true;
- if (type == CONSTANT_POOL_ARRAY_TYPE) {
- if (FLAG_enable_ool_constant_pool) {
- // TODO(ishell): implement constant pool support if we ever enable it.
- UNIMPLEMENTED();
- } else {
- // This is left here just to make constant pool unit tests work.
- return true;
- }
- }
// Consider slots in objects that contain ONLY raw data as invalid.
if (object->MayContainRawValues()) return false;
if (FLAG_unbox_double_fields) {
rinfo.Visit(isolate, v);
break;
}
+ case SlotsBuffer::OBJECT_SLOT: {
+ v->VisitPointer(reinterpret_cast<Object**>(addr));
+ break;
+ }
default:
UNREACHABLE();
break;
if (target_page->IsEvacuationCandidate() &&
(rinfo->host() == NULL ||
!ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
+ Address addr = rinfo->pc();
+ SlotsBuffer::SlotType slot_type = SlotTypeForRMode(rmode);
+ if (rinfo->IsInConstantPool()) {
+ addr = rinfo->constant_pool_entry_address();
+ if (RelocInfo::IsCodeTarget(rmode)) {
+ slot_type = SlotsBuffer::CODE_ENTRY_SLOT;
+ } else {
+ DCHECK(RelocInfo::IsEmbeddedObject(rmode));
+ slot_type = SlotsBuffer::OBJECT_SLOT;
+ }
+ }
bool success = SlotsBuffer::AddTo(
&slots_buffer_allocator_, target_page->slots_buffer_address(),
- SlotTypeForRMode(rmode), rinfo->pc(), SlotsBuffer::FAIL_ON_OVERFLOW);
+ slot_type, addr, SlotsBuffer::FAIL_ON_OVERFLOW);
if (!success) {
EvictPopularEvacuationCandidate(target_page);
}
enum SlotType {
EMBEDDED_OBJECT_SLOT,
+ OBJECT_SLOT,
RELOCATED_CODE_OBJECT,
CELL_TARGET_SLOT,
CODE_TARGET_SLOT,
switch (type) {
case EMBEDDED_OBJECT_SLOT:
return "EMBEDDED_OBJECT_SLOT";
+ case OBJECT_SLOT:
+ return "OBJECT_SLOT";
case RELOCATED_CODE_OBJECT:
return "RELOCATED_CODE_OBJECT";
case CELL_TARGET_SLOT:
table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
- table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
-
table_.Register(kVisitNativeContext, &VisitNativeContext);
table_.Register(kVisitAllocationSite, &VisitAllocationSite);
}
-template <typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
- ConstantPoolArray* array = ConstantPoolArray::cast(object);
- ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
- while (!code_iter.is_finished()) {
- Address code_entry = reinterpret_cast<Address>(
- array->RawFieldOfElementAt(code_iter.next_index()));
- StaticVisitor::VisitCodeEntry(heap, code_entry);
- }
-
- ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
- while (!heap_iter.is_finished()) {
- Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
- HeapObject* object = HeapObject::cast(*slot);
- heap->mark_compact_collector()->RecordSlot(slot, slot, object);
- bool is_weak_object =
- (array->get_weak_object_state() ==
- ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
- Code::IsWeakObjectInOptimizedCode(object));
- if (!is_weak_object) {
- StaticVisitor::MarkObject(heap, object);
- }
- }
-}
-
-
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
HeapObject* object) {
IteratePointer(v, kDeoptimizationDataOffset);
IteratePointer(v, kTypeFeedbackInfoOffset);
IterateNextCodeLink(v, kNextCodeLinkOffset);
- IteratePointer(v, kConstantPoolOffset);
RelocIterator it(this, mode_mask);
Isolate* isolate = this->GetIsolate();
reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
StaticVisitor::VisitNextCodeLink(
heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
- StaticVisitor::VisitPointer(
- heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
RelocIterator it(this, mode_mask);
case FIXED_DOUBLE_ARRAY_TYPE:
return kVisitFixedDoubleArray;
- case CONSTANT_POOL_ARRAY_TYPE:
- return kVisitConstantPoolArray;
-
case ODDBALL_TYPE:
return kVisitOddball;
V(FixedDoubleArray) \
V(FixedTypedArray) \
V(FixedFloat64Array) \
- V(ConstantPoolArray) \
V(NativeContext) \
V(AllocationSite) \
V(DataObject2) \
INLINE(static void VisitMap(Map* map, HeapObject* object));
INLINE(static void VisitCode(Map* map, HeapObject* object));
INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
- INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
// large object space.
CHECK(object->IsCode() || object->IsSeqString() ||
object->IsExternalString() || object->IsFixedArray() ||
- object->IsFixedDoubleArray() || object->IsByteArray() ||
- object->IsConstantPoolArray());
+ object->IsFixedDoubleArray() || object->IsByteArray());
// The object itself should look OK.
object->ObjectVerify();
}
+void Assembler::emit_q(uint64_t x) {
+ *reinterpret_cast<uint64_t*>(pc_) = x;
+ pc_ += sizeof(uint64_t);
+}
+
+
void Assembler::emit(Handle<Object> handle) {
AllowDeferredHandleDereference heap_object_check;
// Verify all Objects referred by code are NOT in new space.
}
-Address Assembler::target_address_at(Address pc,
- ConstantPoolArray* constant_pool) {
+Address Assembler::target_address_at(Address pc, Address constant_pool) {
return pc + sizeof(int32_t) + *reinterpret_cast<int32_t*>(pc);
}
-void Assembler::set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
+void Assembler::set_target_address_at(Address pc, Address constant_pool,
Address target,
ICacheFlushMode icache_flush_mode) {
int32_t* p = reinterpret_cast<int32_t*>(pc);
}
+void Assembler::dq(uint64_t data) {
+ EnsureSpace ensure_space(this);
+ emit_q(data);
+}
+
+
void Assembler::dd(Label* label) {
EnsureSpace ensure_space(this);
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
}
-Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return isolate->factory()->empty_constant_pool_array();
-}
-
-
-void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return;
-}
-
-
#ifdef GENERATED_CODE_COVERAGE
static FILE* coverage_log = NULL;
void GetCode(CodeDesc* desc);
// Read/Modify the code target in the branch/call instruction at pc.
- inline static Address target_address_at(Address pc,
- ConstantPoolArray* constant_pool);
- inline static void set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
- Address target,
- ICacheFlushMode icache_flush_mode =
- FLUSH_ICACHE_IF_NEEDED);
+ inline static Address target_address_at(Address pc, Address constant_pool);
+ inline static void set_target_address_at(
+ Address pc, Address constant_pool, Address target,
+ ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED);
static inline Address target_address_at(Address pc, Code* code) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
static inline void set_target_address_at(Address pc,
Address target,
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target);
}
// inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
+ void dq(uint64_t data);
+ void dp(uintptr_t data) { dd(data); }
void dd(Label* label);
// Check if there is less than kGap bytes available in the buffer.
byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
- // Allocate a constant pool of the correct size for the generated code.
- Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
-
- // Generate the constant pool for the generated code.
- void PopulateConstantPool(ConstantPoolArray* constant_pool);
+ void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ // No embedded constant pool support.
+ UNREACHABLE();
+ }
protected:
void emit_sse_operand(XMMRegister reg, const Operand& adr);
TypeFeedbackId id = TypeFeedbackId::None());
inline void emit(const Immediate& x);
inline void emit_w(const Immediate& x);
+ inline void emit_q(uint64_t x);
// Emit the code-object-relative offset of the label's position
inline void emit_code_relative_offset(Label* label);
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
- // No out-of-line constant pool support.
+ // No embedded constant pool support.
UNREACHABLE();
}
}
-Object*& ExitFrame::constant_pool_slot() const {
- UNREACHABLE();
- return Memory::Object_at(NULL);
-}
-
-
} // namespace internal
} // namespace v8
}
-ConstantPoolArray* IC::constant_pool() const {
- if (!FLAG_enable_ool_constant_pool) {
+Address IC::constant_pool() const {
+ if (!FLAG_enable_embedded_constant_pool) {
return NULL;
} else {
- Handle<ConstantPoolArray> result = raw_constant_pool_;
+ Address constant_pool = raw_constant_pool();
Debug* debug = isolate()->debug();
// First check if any break points are active if not just return the
// original constant pool.
- if (!debug->has_break_points()) return *result;
+ if (!debug->has_break_points()) return constant_pool;
// At least one break point is active perform additional test to ensure that
// break point locations are updated correctly.
Address target = Assembler::target_address_from_return_address(pc());
if (debug->IsDebugBreak(
- Assembler::target_address_at(target, raw_constant_pool()))) {
+ Assembler::target_address_at(target, constant_pool))) {
// If the call site is a call to debug break then we want to return the
// constant pool for the original code instead of the breakpointed code.
return GetOriginalCode()->constant_pool();
}
- return *result;
+ return constant_pool;
}
}
-ConstantPoolArray* IC::raw_constant_pool() const {
- if (FLAG_enable_ool_constant_pool) {
- return *raw_constant_pool_;
+Address IC::raw_constant_pool() const {
+ if (FLAG_enable_embedded_constant_pool) {
+ return *constant_pool_address_;
} else {
return NULL;
}
}
-Code* IC::GetTargetAtAddress(Address address,
- ConstantPoolArray* constant_pool) {
+Code* IC::GetTargetAtAddress(Address address, Address constant_pool) {
// Get the target address of the IC.
Address target = Assembler::target_address_at(address, constant_pool);
// Convert target address to the code object. Code::GetCodeFromTargetAddress
void IC::SetTargetAtAddress(Address address, Code* target,
- ConstantPoolArray* constant_pool) {
+ Address constant_pool) {
if (AddressIsDeoptimizedCode(target->GetIsolate(), address)) return;
DCHECK(target->is_inline_cache_stub() || target->is_compare_ic_stub());
// static
void ICUtility::Clear(Isolate* isolate, Address address,
- ConstantPoolArray* constant_pool) {
+ Address constant_pool) {
IC::Clear(isolate, address, constant_pool);
}
class ICUtility : public AllStatic {
public:
// Clear the inline cache to initial state.
- static void Clear(Isolate* isolate, Address address,
- ConstantPoolArray* constant_pool);
+ static void Clear(Isolate* isolate, Address address, Address constant_pool);
};
// levels of the stack frame iteration code. This yields a ~35% speedup when
// running DeltaBlue and a ~25% speedup of gbemu with the '--nouse-ic' flag.
const Address entry = Isolate::c_entry_fp(isolate->thread_local_top());
- Address constant_pool = NULL;
- if (FLAG_enable_ool_constant_pool) {
- constant_pool =
- Memory::Address_at(entry + ExitFrameConstants::kConstantPoolOffset);
+ Address* constant_pool = NULL;
+ if (FLAG_enable_embedded_constant_pool) {
+ constant_pool = reinterpret_cast<Address*>(
+ entry + ExitFrameConstants::kConstantPoolOffset);
}
Address* pc_address =
reinterpret_cast<Address*>(entry + ExitFrameConstants::kCallerPCOffset);
// StubFailureTrampoline, we need to look one frame further down the stack to
// find the frame pointer and the return address stack slot.
if (depth == EXTRA_CALL_FRAME) {
- if (FLAG_enable_ool_constant_pool) {
- constant_pool =
- Memory::Address_at(fp + StandardFrameConstants::kConstantPoolOffset);
+ if (FLAG_enable_embedded_constant_pool) {
+ constant_pool = reinterpret_cast<Address*>(
+ fp + StandardFrameConstants::kConstantPoolOffset);
}
const int kCallerPCOffset = StandardFrameConstants::kCallerPCOffset;
pc_address = reinterpret_cast<Address*>(fp + kCallerPCOffset);
DCHECK(fp == frame->fp() && pc_address == frame->pc_address());
#endif
fp_ = fp;
- if (FLAG_enable_ool_constant_pool) {
- raw_constant_pool_ = handle(
- ConstantPoolArray::cast(reinterpret_cast<Object*>(constant_pool)),
- isolate);
+ if (FLAG_enable_embedded_constant_pool) {
+ constant_pool_address_ = constant_pool;
}
pc_address_ = StackFrame::ResolveReturnAddressLocation(pc_address);
target_ = handle(raw_target(), isolate);
}
-void IC::Clear(Isolate* isolate, Address address,
- ConstantPoolArray* constant_pool) {
+void IC::Clear(Isolate* isolate, Address address, Address constant_pool) {
Code* target = GetTargetAtAddress(address, constant_pool);
// Don't clear debug break inline cache as it will remove the break point.
void StoreIC::Clear(Isolate* isolate, Address address, Code* target,
- ConstantPoolArray* constant_pool) {
+ Address constant_pool) {
if (IsCleared(target)) return;
Code* code = PropertyICCompiler::FindPreMonomorphic(isolate, Code::STORE_IC,
target->extra_ic_state());
void KeyedStoreIC::Clear(Isolate* isolate, Address address, Code* target,
- ConstantPoolArray* constant_pool) {
+ Address constant_pool) {
if (IsCleared(target)) return;
Handle<Code> code = pre_monomorphic_stub(
isolate, StoreICState::GetLanguageMode(target->extra_ic_state()));
void CompareIC::Clear(Isolate* isolate, Address address, Code* target,
- ConstantPoolArray* constant_pool) {
+ Address constant_pool) {
DCHECK(CodeStub::GetMajorKey(target) == CodeStub::CompareIC);
CompareICStub stub(target->stub_key(), isolate);
// Only clear CompareICs that can retain objects.
}
-void CompareNilIC::Clear(Address address, Code* target,
- ConstantPoolArray* constant_pool) {
+void CompareNilIC::Clear(Address address, Code* target, Address constant_pool) {
if (IsCleared(target)) return;
ExtraICState state = target->extra_ic_state();
}
// Clear the inline cache to initial state.
- static void Clear(Isolate* isolate, Address address,
- ConstantPoolArray* constant_pool);
+ static void Clear(Isolate* isolate, Address address, Address constant_pool);
#ifdef DEBUG
bool IsLoadStub() const {
// Access the target code for the given IC address.
static inline Code* GetTargetAtAddress(Address address,
- ConstantPoolArray* constant_pool);
+ Address constant_pool);
static inline void SetTargetAtAddress(Address address, Code* target,
- ConstantPoolArray* constant_pool);
+ Address constant_pool);
static void OnTypeFeedbackChanged(Isolate* isolate, Address address,
State old_state, State new_state,
bool target_remains_ic_stub);
private:
inline Code* raw_target() const;
- inline ConstantPoolArray* constant_pool() const;
- inline ConstantPoolArray* raw_constant_pool() const;
+ inline Address constant_pool() const;
+ inline Address raw_constant_pool() const;
void FindTargetMaps() {
if (target_maps_set_) return;
// Frame pointer for the frame that uses (calls) the IC.
Address fp_;
- // All access to the program counter of an IC structure is indirect
- // to make the code GC safe. This feature is crucial since
+ // All access to the program counter and constant pool of an IC structure is
+ // indirect to make the code GC safe. This feature is crucial since
// GetProperty and SetProperty are called and they in turn might
// invoke the garbage collector.
Address* pc_address_;
- Isolate* isolate_;
-
// The constant pool of the code which originally called the IC (which might
// be for the breakpointed copy of the original code).
- Handle<ConstantPoolArray> raw_constant_pool_;
+ Address* constant_pool_address_;
+
+ Isolate* isolate_;
// The original code target that missed.
Handle<Code> target_;
Handle<Code> SimpleFieldLoad(FieldIndex index);
static void Clear(Isolate* isolate, Address address, Code* target,
- ConstantPoolArray* constant_pool);
+ Address constant_pool);
friend class IC;
};
private:
static void Clear(Isolate* isolate, Address address, Code* target,
- ConstantPoolArray* constant_pool);
+ Address constant_pool);
friend class IC;
};
inline void set_target(Code* code);
static void Clear(Isolate* isolate, Address address, Code* target,
- ConstantPoolArray* constant_pool);
+ Address constant_pool);
friend class IC;
};
inline void set_target(Code* code);
static void Clear(Isolate* isolate, Address address, Code* target,
- ConstantPoolArray* constant_pool);
+ Address constant_pool);
KeyedAccessStoreMode GetStoreMode(Handle<JSObject> receiver,
Handle<Object> key, Handle<Object> value);
bool strong);
static void Clear(Isolate* isolate, Address address, Code* target,
- ConstantPoolArray* constant_pool);
+ Address constant_pool);
Token::Value op_;
static Handle<Code> GetUninitialized();
- static void Clear(Address address, Code* target,
- ConstantPoolArray* constant_pool);
+ static void Clear(Address address, Code* target, Address constant_pool);
static Handle<Object> DoCompareNilSlow(Isolate* isolate, NilValue nil,
Handle<Object> object);
// -- lr : return address
// -----------------------------------
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
if (accessor_index >= 0) {
DCHECK(!holder.is(scratch));
// -- lr : return address
// -----------------------------------
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Save value register, so we can restore it later.
__ push(value());
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
{
- FrameScope frame_scope(masm(), StackFrame::INTERNAL);
+ FrameAndConstantPoolScope frame_scope(masm(), StackFrame::INTERNAL);
if (must_preserve_receiver_reg) {
__ Push(receiver(), holder_reg, this->name());
} else {
for (int i = 0; i < objects.length(); i++) {
AddWeakObjectToCodeDependency(isolate(), objects.at(i), code);
}
- if (FLAG_enable_ool_constant_pool) {
- code->constant_pool()->set_weak_object_state(
- ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE);
- }
code->set_can_have_weak_objects(true);
}
: masm_(masm),
type_(type),
old_has_frame_(masm->has_frame()),
- old_constant_pool_available_(FLAG_enable_ool_constant_pool &&
- masm->is_ool_constant_pool_available()) {
+ old_constant_pool_available_(FLAG_enable_embedded_constant_pool &&
+ masm->is_constant_pool_available()) {
masm->set_has_frame(true);
- if (FLAG_enable_ool_constant_pool) {
- masm->set_ool_constant_pool_available(true);
+ if (FLAG_enable_embedded_constant_pool) {
+ masm->set_constant_pool_available(true);
}
if (type_ != StackFrame::MANUAL && type_ != StackFrame::NONE) {
masm->EnterFrame(type, !old_constant_pool_available_);
~FrameAndConstantPoolScope() {
masm_->LeaveFrame(type_);
masm_->set_has_frame(old_has_frame_);
- if (FLAG_enable_ool_constant_pool) {
- masm_->set_ool_constant_pool_available(old_constant_pool_available_);
+ if (FLAG_enable_embedded_constant_pool) {
+ masm_->set_constant_pool_available(old_constant_pool_available_);
}
}
public:
explicit ConstantPoolUnavailableScope(MacroAssembler* masm)
: masm_(masm),
- old_constant_pool_available_(FLAG_enable_ool_constant_pool &&
- masm->is_ool_constant_pool_available()) {
- if (FLAG_enable_ool_constant_pool) {
- masm_->set_ool_constant_pool_available(false);
+ old_constant_pool_available_(FLAG_enable_embedded_constant_pool &&
+ masm->is_constant_pool_available()) {
+ if (FLAG_enable_embedded_constant_pool) {
+ masm_->set_constant_pool_available(false);
}
}
~ConstantPoolUnavailableScope() {
- if (FLAG_enable_ool_constant_pool) {
- masm_->set_ool_constant_pool_available(old_constant_pool_available_);
+ if (FLAG_enable_embedded_constant_pool) {
+ masm_->set_constant_pool_available(old_constant_pool_available_);
}
}
}
+void Assembler::dq(uint64_t data) {
+ CheckBuffer();
+ *reinterpret_cast<uint64_t*>(pc_) = data;
+ pc_ += sizeof(uint64_t);
+}
+
+
void Assembler::dd(Label* label) {
CheckBuffer();
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
}
-Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return isolate->factory()->empty_constant_pool_array();
-}
-
-
-void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return;
-}
-
-
} // namespace internal
} // namespace v8
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED);
// On MIPS there is no Constant Pool so we skip that parameter.
- INLINE(static Address target_address_at(Address pc,
- ConstantPoolArray* constant_pool)) {
+ INLINE(static Address target_address_at(Address pc, Address constant_pool)) {
return target_address_at(pc);
}
- INLINE(static void set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
- Address target,
- ICacheFlushMode icache_flush_mode =
- FLUSH_ICACHE_IF_NEEDED)) {
+ INLINE(static void set_target_address_at(
+ Address pc, Address constant_pool, Address target,
+ ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) {
set_target_address_at(pc, target, icache_flush_mode);
}
INLINE(static Address target_address_at(Address pc, Code* code)) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
INLINE(static void set_target_address_at(Address pc,
Address target,
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED)) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target, icache_flush_mode);
}
// inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
+ void dq(uint64_t data);
+ void dp(uintptr_t data) { dd(data); }
void dd(Label* label);
// Emits the address of the code stub's first instruction.
void CheckTrampolinePool();
- // Allocate a constant pool of the correct size for the generated code.
- Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
-
- // Generate the constant pool for the generated code.
- void PopulateConstantPool(ConstantPoolArray* constant_pool);
+ void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ // No embedded constant pool support.
+ UNREACHABLE();
+ }
protected:
// Relocation for a type-recording IC has the AST id added to it. This
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
- // No out-of-line constant pool support.
+ // No embedded constant pool support.
UNREACHABLE();
}
}
-Object*& ExitFrame::constant_pool_slot() const {
- UNREACHABLE();
- return Memory::Object_at(NULL);
-}
-
-
} // namespace internal
} // namespace v8
}
+void Assembler::dq(uint64_t data) {
+ CheckBuffer();
+ *reinterpret_cast<uint64_t*>(pc_) = data;
+ pc_ += sizeof(uint64_t);
+}
+
+
void Assembler::dd(Label* label) {
CheckBuffer();
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
}
-Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return isolate->factory()->empty_constant_pool_array();
-}
-
-
-void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return;
-}
-
-
} // namespace internal
} // namespace v8
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED);
// On MIPS there is no Constant Pool so we skip that parameter.
- INLINE(static Address target_address_at(Address pc,
- ConstantPoolArray* constant_pool)) {
+ INLINE(static Address target_address_at(Address pc, Address constant_pool)) {
return target_address_at(pc);
}
- INLINE(static void set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
- Address target,
- ICacheFlushMode icache_flush_mode =
- FLUSH_ICACHE_IF_NEEDED)) {
+ INLINE(static void set_target_address_at(
+ Address pc, Address constant_pool, Address target,
+ ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) {
set_target_address_at(pc, target, icache_flush_mode);
}
INLINE(static Address target_address_at(Address pc, Code* code)) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
INLINE(static void set_target_address_at(Address pc,
Address target,
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED)) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target, icache_flush_mode);
}
// inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
+ void dq(uint64_t data);
+ void dp(uintptr_t data) { dq(data); }
void dd(Label* label);
// Emits the address of the code stub's first instruction.
void CheckTrampolinePool();
- // Allocate a constant pool of the correct size for the generated code.
- Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
-
- // Generate the constant pool for the generated code.
- void PopulateConstantPool(ConstantPoolArray* constant_pool);
+ void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ // No embedded constant pool support.
+ UNREACHABLE();
+ }
protected:
// Relocation for a type-recording IC has the AST id added to it. This
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
- // No out-of-line constant pool support.
+ // No embedded constant pool support.
UNREACHABLE();
}
}
-Object*& ExitFrame::constant_pool_slot() const {
- UNREACHABLE();
- return Memory::Object_at(NULL);
-}
-
-
} // namespace internal
} // namespace v8
case FIXED_DOUBLE_ARRAY_TYPE:
FixedDoubleArray::cast(this)->FixedDoubleArrayVerify();
break;
- case CONSTANT_POOL_ARRAY_TYPE:
- ConstantPoolArray::cast(this)->ConstantPoolArrayVerify();
- break;
case BYTE_ARRAY_TYPE:
ByteArray::cast(this)->ByteArrayVerify();
break;
}
-void ConstantPoolArray::ConstantPoolArrayVerify() {
- CHECK(IsConstantPoolArray());
- ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR);
- while (!code_iter.is_finished()) {
- Address code_entry = get_code_ptr_entry(code_iter.next_index());
- VerifyPointer(Code::GetCodeFromTargetAddress(code_entry));
- }
- ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR);
- while (!heap_iter.is_finished()) {
- VerifyObjectField(OffsetOfElementAt(heap_iter.next_index()));
- }
-}
-
-
void JSGeneratorObject::JSGeneratorObjectVerify() {
// In an expression like "new g()", there can be a point where a generator
// object is allocated but its fields are all undefined, as it hasn't yet been
bool Object::IsFixedArrayBase() const {
- return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
- IsFixedTypedArrayBase() || IsExternalArray();
+ return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase() ||
+ IsExternalArray();
}
TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
-TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
bool Object::IsJSWeakCollection() const {
// map-word).
return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
}
- // The ConstantPoolArray contains heap pointers, but also raw values.
- if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
return (type <= LAST_DATA_TYPE);
}
}
-void ConstantPoolArray::NumberOfEntries::increment(Type type) {
- DCHECK(type < NUMBER_OF_TYPES);
- element_counts_[type]++;
-}
-
-
-int ConstantPoolArray::NumberOfEntries::equals(
- const ConstantPoolArray::NumberOfEntries& other) const {
- for (int i = 0; i < NUMBER_OF_TYPES; i++) {
- if (element_counts_[i] != other.element_counts_[i]) return false;
- }
- return true;
-}
-
-
-bool ConstantPoolArray::NumberOfEntries::is_empty() const {
- return total_count() == 0;
-}
-
-
-int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
- DCHECK(type < NUMBER_OF_TYPES);
- return element_counts_[type];
-}
-
-
-int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
- int base = 0;
- DCHECK(type < NUMBER_OF_TYPES);
- for (int i = 0; i < type; i++) {
- base += element_counts_[i];
- }
- return base;
-}
-
-
-int ConstantPoolArray::NumberOfEntries::total_count() const {
- int count = 0;
- for (int i = 0; i < NUMBER_OF_TYPES; i++) {
- count += element_counts_[i];
- }
- return count;
-}
-
-
-int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
- for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
- if (element_counts_[i] < min || element_counts_[i] > max) {
- return false;
- }
- }
- return true;
-}
-
-
-int ConstantPoolArray::Iterator::next_index() {
- DCHECK(!is_finished());
- int ret = next_index_++;
- update_section();
- return ret;
-}
-
-
-bool ConstantPoolArray::Iterator::is_finished() {
- return next_index_ > array_->last_index(type_, final_section_);
-}
-
-
-void ConstantPoolArray::Iterator::update_section() {
- if (next_index_ > array_->last_index(type_, current_section_) &&
- current_section_ != final_section_) {
- DCHECK(final_section_ == EXTENDED_SECTION);
- current_section_ = EXTENDED_SECTION;
- next_index_ = array_->first_index(type_, EXTENDED_SECTION);
- }
-}
-
-
-bool ConstantPoolArray::is_extended_layout() {
- uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
- return IsExtendedField::decode(small_layout_1);
-}
-
-
-ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
- return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
-}
-
-
-int ConstantPoolArray::first_extended_section_index() {
- DCHECK(is_extended_layout());
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- return TotalCountField::decode(small_layout_2);
-}
-
-
-int ConstantPoolArray::get_extended_section_header_offset() {
- return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
-}
-
-
-ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- return WeakObjectStateField::decode(small_layout_2);
-}
-
-
-void ConstantPoolArray::set_weak_object_state(
- ConstantPoolArray::WeakObjectState state) {
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
- WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
-}
-
-
-int ConstantPoolArray::first_index(Type type, LayoutSection section) {
- int index = 0;
- if (section == EXTENDED_SECTION) {
- DCHECK(is_extended_layout());
- index += first_extended_section_index();
- }
-
- for (Type type_iter = FIRST_TYPE; type_iter < type;
- type_iter = next_type(type_iter)) {
- index += number_of_entries(type_iter, section);
- }
-
- return index;
-}
-
-
-int ConstantPoolArray::last_index(Type type, LayoutSection section) {
- return first_index(type, section) + number_of_entries(type, section) - 1;
-}
-
-
-int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
- if (section == SMALL_SECTION) {
- uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- switch (type) {
- case INT64:
- return Int64CountField::decode(small_layout_1);
- case CODE_PTR:
- return CodePtrCountField::decode(small_layout_1);
- case HEAP_PTR:
- return HeapPtrCountField::decode(small_layout_1);
- case INT32:
- return Int32CountField::decode(small_layout_2);
- default:
- UNREACHABLE();
- return 0;
- }
- } else {
- DCHECK(section == EXTENDED_SECTION && is_extended_layout());
- int offset = get_extended_section_header_offset();
- switch (type) {
- case INT64:
- offset += kExtendedInt64CountOffset;
- break;
- case CODE_PTR:
- offset += kExtendedCodePtrCountOffset;
- break;
- case HEAP_PTR:
- offset += kExtendedHeapPtrCountOffset;
- break;
- case INT32:
- offset += kExtendedInt32CountOffset;
- break;
- default:
- UNREACHABLE();
- }
- return READ_INT_FIELD(this, offset);
- }
-}
-
-
-bool ConstantPoolArray::offset_is_type(int offset, Type type) {
- return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
- offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
- (is_extended_layout() &&
- offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
- offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
-}
-
-
-ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
- LayoutSection section;
- if (is_extended_layout() && index >= first_extended_section_index()) {
- section = EXTENDED_SECTION;
- } else {
- section = SMALL_SECTION;
- }
-
- Type type = FIRST_TYPE;
- while (index > last_index(type, section)) {
- type = next_type(type);
- }
- DCHECK(type <= LAST_TYPE);
- return type;
-}
-
-
-int64_t ConstantPoolArray::get_int64_entry(int index) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT64);
- return READ_INT64_FIELD(this, OffsetOfElementAt(index));
-}
-
-
-double ConstantPoolArray::get_int64_entry_as_double(int index) {
- STATIC_ASSERT(kDoubleSize == kInt64Size);
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT64);
- return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
-}
-
-
-Address ConstantPoolArray::get_code_ptr_entry(int index) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == CODE_PTR);
- return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
-}
-
-
-Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == HEAP_PTR);
- return READ_FIELD(this, OffsetOfElementAt(index));
-}
-
-
-int32_t ConstantPoolArray::get_int32_entry(int index) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT32);
- return READ_INT32_FIELD(this, OffsetOfElementAt(index));
-}
-
-
-void ConstantPoolArray::set(int index, int64_t value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT64);
- WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
-}
-
-
-void ConstantPoolArray::set(int index, double value) {
- STATIC_ASSERT(kDoubleSize == kInt64Size);
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT64);
- WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
-}
-
-
-void ConstantPoolArray::set(int index, Address value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == CODE_PTR);
- WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
-}
-
-
-void ConstantPoolArray::set(int index, Object* value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(!GetHeap()->InNewSpace(value));
- DCHECK(get_type(index) == HEAP_PTR);
- WRITE_FIELD(this, OffsetOfElementAt(index), value);
- WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
-}
-
-
-void ConstantPoolArray::set(int index, int32_t value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(get_type(index) == INT32);
- WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(offset_is_type(offset, INT32));
- WRITE_INT32_FIELD(this, offset, value);
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(offset_is_type(offset, INT64));
- WRITE_INT64_FIELD(this, offset, value);
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, double value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(offset_is_type(offset, INT64));
- WRITE_DOUBLE_FIELD(this, offset, value);
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, Address value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(offset_is_type(offset, CODE_PTR));
- WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
- WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
-}
-
-
-void ConstantPoolArray::set_at_offset(int offset, Object* value) {
- DCHECK(map() == GetHeap()->constant_pool_array_map());
- DCHECK(!GetHeap()->InNewSpace(value));
- DCHECK(offset_is_type(offset, HEAP_PTR));
- WRITE_FIELD(this, offset, value);
- WRITE_BARRIER(GetHeap(), this, offset, value);
-}
-
-
-void ConstantPoolArray::Init(const NumberOfEntries& small) {
- uint32_t small_layout_1 =
- Int64CountField::encode(small.count_of(INT64)) |
- CodePtrCountField::encode(small.count_of(CODE_PTR)) |
- HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
- IsExtendedField::encode(false);
- uint32_t small_layout_2 =
- Int32CountField::encode(small.count_of(INT32)) |
- TotalCountField::encode(small.total_count()) |
- WeakObjectStateField::encode(NO_WEAK_OBJECTS);
- WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
- WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
- if (kHeaderSize != kFirstEntryOffset) {
- DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
- WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
- }
-}
-
-
-void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
- const NumberOfEntries& extended) {
- // Initialize small layout fields first.
- Init(small);
-
- // Set is_extended_layout field.
- uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
- small_layout_1 = IsExtendedField::update(small_layout_1, true);
- WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
-
- // Initialize the extended layout fields.
- int extended_header_offset = get_extended_section_header_offset();
- WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
- extended.count_of(INT64));
- WRITE_INT32_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
- extended.count_of(CODE_PTR));
- WRITE_INT32_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
- extended.count_of(HEAP_PTR));
- WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
- extended.count_of(INT32));
-}
-
-
-int ConstantPoolArray::size() {
- NumberOfEntries small(this, SMALL_SECTION);
- if (!is_extended_layout()) {
- return SizeFor(small);
- } else {
- NumberOfEntries extended(this, EXTENDED_SECTION);
- return SizeForExtended(small, extended);
- }
-}
-
-
-int ConstantPoolArray::length() {
- uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
- int length = TotalCountField::decode(small_layout_2);
- if (is_extended_layout()) {
- length += number_of_entries(INT64, EXTENDED_SECTION) +
- number_of_entries(CODE_PTR, EXTENDED_SECTION) +
- number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
- number_of_entries(INT32, EXTENDED_SECTION);
- }
- return length;
-}
-
-
WriteBarrierMode HeapObject::GetWriteBarrierMode(
const DisallowHeapAllocation& promise) {
Heap* heap = GetHeap();
AllocationAlignment HeapObject::RequiredAlignment() {
#ifdef V8_HOST_ARCH_32_BIT
- if ((IsFixedFloat64Array() || IsFixedDoubleArray() ||
- IsConstantPoolArray()) &&
+ if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
FixedArrayBase::cast(this)->length() != 0) {
return kDoubleAligned;
}
CAST_ACCESSOR(CodeCacheHashTable)
CAST_ACCESSOR(CompilationCacheTable)
CAST_ACCESSOR(ConsString)
-CAST_ACCESSOR(ConstantPoolArray)
CAST_ACCESSOR(DeoptimizationInputData)
CAST_ACCESSOR(DeoptimizationOutputData)
CAST_ACCESSOR(DependentCode)
return FixedDoubleArray::SizeFor(
reinterpret_cast<FixedDoubleArray*>(this)->length());
}
- if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
- return reinterpret_cast<ConstantPoolArray*>(this)->size();
- }
if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
return reinterpret_cast<FixedTypedArrayBase*>(
}
-ConstantPoolArray* Code::constant_pool() {
- return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
-}
-
-
-void Code::set_constant_pool(Object* value) {
- DCHECK(value->IsConstantPoolArray());
- WRITE_FIELD(this, kConstantPoolOffset, value);
- WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
+Address Code::constant_pool() {
+ Address constant_pool = NULL;
+ if (FLAG_enable_embedded_constant_pool) {
+ int offset = constant_pool_offset();
+ if (offset < instruction_size()) {
+ constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
+ }
+ }
+ return constant_pool;
}
INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
+INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
WRITE_FIELD(this, kRelocationInfoOffset, NULL);
WRITE_FIELD(this, kHandlerTableOffset, NULL);
WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
- WRITE_FIELD(this, kConstantPoolOffset, NULL);
// Do not wipe out major/minor keys on a code stub or IC
if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
case FIXED_DOUBLE_ARRAY_TYPE:
FixedDoubleArray::cast(this)->FixedDoubleArrayPrint(os);
break;
- case CONSTANT_POOL_ARRAY_TYPE:
- ConstantPoolArray::cast(this)->ConstantPoolArrayPrint(os);
- break;
case FIXED_ARRAY_TYPE:
FixedArray::cast(this)->FixedArrayPrint(os);
break;
}
-void ConstantPoolArray::ConstantPoolArrayPrint(std::ostream& os) { // NOLINT
- HeapObject::PrintHeader(os, "ConstantPoolArray");
- os << " - length: " << length();
- for (int i = 0; i <= last_index(INT32, SMALL_SECTION); i++) {
- if (i < last_index(INT64, SMALL_SECTION)) {
- os << "\n [" << i << "]: double: " << get_int64_entry_as_double(i);
- } else if (i <= last_index(CODE_PTR, SMALL_SECTION)) {
- os << "\n [" << i << "]: code target pointer: "
- << reinterpret_cast<void*>(get_code_ptr_entry(i));
- } else if (i <= last_index(HEAP_PTR, SMALL_SECTION)) {
- os << "\n [" << i << "]: heap pointer: "
- << reinterpret_cast<void*>(get_heap_ptr_entry(i));
- } else if (i <= last_index(INT32, SMALL_SECTION)) {
- os << "\n [" << i << "]: int32: " << get_int32_entry(i);
- }
- }
- if (is_extended_layout()) {
- os << "\n Extended section:";
- for (int i = first_extended_section_index();
- i <= last_index(INT32, EXTENDED_SECTION); i++) {
- if (i < last_index(INT64, EXTENDED_SECTION)) {
- os << "\n [" << i << "]: double: " << get_int64_entry_as_double(i);
- } else if (i <= last_index(CODE_PTR, EXTENDED_SECTION)) {
- os << "\n [" << i << "]: code target pointer: "
- << reinterpret_cast<void*>(get_code_ptr_entry(i));
- } else if (i <= last_index(HEAP_PTR, EXTENDED_SECTION)) {
- os << "\n [" << i << "]: heap pointer: "
- << reinterpret_cast<void*>(get_heap_ptr_entry(i));
- } else if (i <= last_index(INT32, EXTENDED_SECTION)) {
- os << "\n [" << i << "]: int32: " << get_int32_entry(i);
- }
- }
- }
- os << "\n";
-}
-
-
void JSValue::JSValuePrint(std::ostream& os) { // NOLINT
HeapObject::PrintHeader(os, "ValueObject");
value()->Print(os);
case FIXED_ARRAY_TYPE:
FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
break;
- case CONSTANT_POOL_ARRAY_TYPE:
- reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v);
- break;
case FIXED_DOUBLE_ARRAY_TYPE:
break;
case JS_OBJECT_TYPE:
}
-void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
- // Unfortunately the serializer relies on pointers within an object being
- // visited in-order, so we have to iterate both the code and heap pointers in
- // the small section before doing so in the extended section.
- for (int s = 0; s <= final_section(); ++s) {
- LayoutSection section = static_cast<LayoutSection>(s);
- ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR,
- section);
- while (!code_iter.is_finished()) {
- v->VisitCodeEntry(reinterpret_cast<Address>(
- RawFieldOfElementAt(code_iter.next_index())));
- }
-
- ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR,
- section);
- while (!heap_iter.is_finished()) {
- v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index()));
- }
- }
-}
-
-
-void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) {
- Type type[] = { CODE_PTR, HEAP_PTR };
- Address default_value[] = {
- isolate->builtins()->builtin(Builtins::kIllegal)->entry(),
- reinterpret_cast<Address>(isolate->heap()->undefined_value()) };
-
- for (int i = 0; i < 2; ++i) {
- for (int s = 0; s <= final_section(); ++s) {
- LayoutSection section = static_cast<LayoutSection>(s);
- if (number_of_entries(type[i], section) > 0) {
- int offset = OffsetOfElementAt(first_index(type[i], section));
- MemsetPointer(
- reinterpret_cast<Address*>(HeapObject::RawField(this, offset)),
- default_value[i],
- number_of_entries(type[i], section));
- }
- }
- }
-}
-
-
void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
// Iterate over all fields in the body but take care in dealing with
// the code entry.
os << "Instructions (size = " << instruction_size() << ")\n";
{
Isolate* isolate = GetIsolate();
- int decode_size = is_crankshafted()
- ? static_cast<int>(safepoint_table_offset())
- : instruction_size();
- // If there might be a back edge table, stop before reaching it.
- if (kind() == Code::FUNCTION) {
- decode_size =
- Min(decode_size, static_cast<int>(back_edge_table_offset()));
- }
+ int size = instruction_size();
+ int safepoint_offset =
+ is_crankshafted() ? static_cast<int>(safepoint_table_offset()) : size;
+ int back_edge_offset = (kind() == Code::FUNCTION)
+ ? static_cast<int>(back_edge_table_offset())
+ : size;
+ int constant_pool_offset = FLAG_enable_embedded_constant_pool
+ ? this->constant_pool_offset()
+ : size;
+
+ // Stop before reaching any embedded tables
+ int code_size = Min(safepoint_offset, back_edge_offset);
+ code_size = Min(code_size, constant_pool_offset);
byte* begin = instruction_start();
- byte* end = begin + decode_size;
+ byte* end = begin + code_size;
Disassembler::Decode(isolate, &os, begin, end, this);
+
+ if (constant_pool_offset < size) {
+ int constant_pool_size = size - constant_pool_offset;
+ DCHECK((constant_pool_size & kPointerAlignmentMask) == 0);
+ os << "\nConstant Pool (size = " << constant_pool_size << ")\n";
+ Vector<char> buf = Vector<char>::New(50);
+ intptr_t* ptr = reinterpret_cast<intptr_t*>(begin + constant_pool_offset);
+ for (int i = 0; i < constant_pool_size; i += kPointerSize, ptr++) {
+ SNPrintF(buf, "%4d %08" V8PRIxPTR, i, *ptr);
+ os << static_cast<const void*>(ptr) << " " << buf.start() << "\n";
+ }
+ }
}
os << "\n";
it.rinfo()->Print(GetIsolate(), os);
}
os << "\n";
-
-#ifdef OBJECT_PRINT
- if (FLAG_enable_ool_constant_pool) {
- ConstantPoolArray* pool = constant_pool();
- if (pool->length()) {
- os << "Constant Pool\n";
- pool->Print(os);
- os << "\n";
- }
- }
-#endif
}
#endif // ENABLE_DISASSEMBLER
\
V(FIXED_ARRAY_TYPE) \
V(FIXED_DOUBLE_ARRAY_TYPE) \
- V(CONSTANT_POOL_ARRAY_TYPE) \
V(SHARED_FUNCTION_INFO_TYPE) \
V(WEAK_CELL_TYPE) \
\
DEBUG_INFO_TYPE,
BREAK_POINT_INFO_TYPE,
FIXED_ARRAY_TYPE,
- CONSTANT_POOL_ARRAY_TYPE,
SHARED_FUNCTION_INFO_TYPE,
CELL_TYPE,
WEAK_CELL_TYPE,
V(FixedDoubleArray) \
V(WeakFixedArray) \
V(ArrayList) \
- V(ConstantPoolArray) \
V(Context) \
V(ScriptContextTable) \
V(NativeContext) \
};
-// ConstantPoolArray describes a fixed-sized array containing constant pool
-// entries.
-//
-// A ConstantPoolArray can be structured in two different ways depending upon
-// whether it is extended or small. The is_extended_layout() method can be used
-// to discover which layout the constant pool has.
-//
-// The format of a small constant pool is:
-// [kSmallLayout1Offset] : Small section layout bitmap 1
-// [kSmallLayout2Offset] : Small section layout bitmap 2
-// [first_index(INT64, SMALL_SECTION)] : 64 bit entries
-// ... : ...
-// [first_index(CODE_PTR, SMALL_SECTION)] : code pointer entries
-// ... : ...
-// [first_index(HEAP_PTR, SMALL_SECTION)] : heap pointer entries
-// ... : ...
-// [first_index(INT32, SMALL_SECTION)] : 32 bit entries
-// ... : ...
-//
-// If the constant pool has an extended layout, the extended section constant
-// pool also contains an extended section, which has the following format at
-// location get_extended_section_header_offset():
-// [kExtendedInt64CountOffset] : count of extended 64 bit entries
-// [kExtendedCodePtrCountOffset] : count of extended code pointers
-// [kExtendedHeapPtrCountOffset] : count of extended heap pointers
-// [kExtendedInt32CountOffset] : count of extended 32 bit entries
-// [first_index(INT64, EXTENDED_SECTION)] : 64 bit entries
-// ... : ...
-// [first_index(CODE_PTR, EXTENDED_SECTION)]: code pointer entries
-// ... : ...
-// [first_index(HEAP_PTR, EXTENDED_SECTION)]: heap pointer entries
-// ... : ...
-// [first_index(INT32, EXTENDED_SECTION)] : 32 bit entries
-// ... : ...
-//
-class ConstantPoolArray: public HeapObject {
- public:
- enum WeakObjectState { NO_WEAK_OBJECTS, WEAK_OBJECTS_IN_OPTIMIZED_CODE };
-
- enum Type {
- INT64 = 0,
- CODE_PTR,
- HEAP_PTR,
- INT32,
- // Number of types stored by the ConstantPoolArrays.
- NUMBER_OF_TYPES,
- FIRST_TYPE = INT64,
- LAST_TYPE = INT32
- };
-
- enum LayoutSection {
- SMALL_SECTION = 0,
- EXTENDED_SECTION,
- NUMBER_OF_LAYOUT_SECTIONS
- };
-
- class NumberOfEntries BASE_EMBEDDED {
- public:
- inline NumberOfEntries() {
- for (int i = 0; i < NUMBER_OF_TYPES; i++) {
- element_counts_[i] = 0;
- }
- }
-
- inline NumberOfEntries(int int64_count, int code_ptr_count,
- int heap_ptr_count, int int32_count) {
- element_counts_[INT64] = int64_count;
- element_counts_[CODE_PTR] = code_ptr_count;
- element_counts_[HEAP_PTR] = heap_ptr_count;
- element_counts_[INT32] = int32_count;
- }
-
- inline NumberOfEntries(ConstantPoolArray* array, LayoutSection section) {
- element_counts_[INT64] = array->number_of_entries(INT64, section);
- element_counts_[CODE_PTR] = array->number_of_entries(CODE_PTR, section);
- element_counts_[HEAP_PTR] = array->number_of_entries(HEAP_PTR, section);
- element_counts_[INT32] = array->number_of_entries(INT32, section);
- }
-
- inline void increment(Type type);
- inline int equals(const NumberOfEntries& other) const;
- inline bool is_empty() const;
- inline int count_of(Type type) const;
- inline int base_of(Type type) const;
- inline int total_count() const;
- inline int are_in_range(int min, int max) const;
-
- private:
- int element_counts_[NUMBER_OF_TYPES];
- };
-
- class Iterator BASE_EMBEDDED {
- public:
- inline Iterator(ConstantPoolArray* array, Type type)
- : array_(array),
- type_(type),
- final_section_(array->final_section()),
- current_section_(SMALL_SECTION),
- next_index_(array->first_index(type, SMALL_SECTION)) {
- update_section();
- }
-
- inline Iterator(ConstantPoolArray* array, Type type, LayoutSection section)
- : array_(array),
- type_(type),
- final_section_(section),
- current_section_(section),
- next_index_(array->first_index(type, section)) {
- update_section();
- }
-
- inline int next_index();
- inline bool is_finished();
-
- private:
- inline void update_section();
- ConstantPoolArray* array_;
- const Type type_;
- const LayoutSection final_section_;
-
- LayoutSection current_section_;
- int next_index_;
- };
-
- // Getters for the first index, the last index and the count of entries of
- // a given type for a given layout section.
- inline int first_index(Type type, LayoutSection layout_section);
- inline int last_index(Type type, LayoutSection layout_section);
- inline int number_of_entries(Type type, LayoutSection layout_section);
-
- // Returns the type of the entry at the given index.
- inline Type get_type(int index);
- inline bool offset_is_type(int offset, Type type);
-
- // Setter and getter for pool elements.
- inline Address get_code_ptr_entry(int index);
- inline Object* get_heap_ptr_entry(int index);
- inline int64_t get_int64_entry(int index);
- inline int32_t get_int32_entry(int index);
- inline double get_int64_entry_as_double(int index);
-
- inline void set(int index, Address value);
- inline void set(int index, Object* value);
- inline void set(int index, int64_t value);
- inline void set(int index, double value);
- inline void set(int index, int32_t value);
-
- // Setters which take a raw offset rather than an index (for code generation).
- inline void set_at_offset(int offset, int32_t value);
- inline void set_at_offset(int offset, int64_t value);
- inline void set_at_offset(int offset, double value);
- inline void set_at_offset(int offset, Address value);
- inline void set_at_offset(int offset, Object* value);
-
- // Setter and getter for weak objects state
- inline void set_weak_object_state(WeakObjectState state);
- inline WeakObjectState get_weak_object_state();
-
- // Returns true if the constant pool has an extended layout, false if it has
- // only the small layout.
- inline bool is_extended_layout();
-
- // Returns the last LayoutSection in this constant pool array.
- inline LayoutSection final_section();
-
- // Set up initial state for a small layout constant pool array.
- inline void Init(const NumberOfEntries& small);
-
- // Set up initial state for an extended layout constant pool array.
- inline void InitExtended(const NumberOfEntries& small,
- const NumberOfEntries& extended);
-
- // Clears the pointer entries with GC safe values.
- void ClearPtrEntries(Isolate* isolate);
-
- // returns the total number of entries in the constant pool array.
- inline int length();
-
- // Garbage collection support.
- inline int size();
-
-
- inline static int MaxInt64Offset(int number_of_int64) {
- return kFirstEntryOffset + (number_of_int64 * kInt64Size);
- }
-
- inline static int SizeFor(const NumberOfEntries& small) {
- int size = kFirstEntryOffset +
- (small.count_of(INT64) * kInt64Size) +
- (small.count_of(CODE_PTR) * kPointerSize) +
- (small.count_of(HEAP_PTR) * kPointerSize) +
- (small.count_of(INT32) * kInt32Size);
- return RoundUp(size, kPointerSize);
- }
-
- inline static int SizeForExtended(const NumberOfEntries& small,
- const NumberOfEntries& extended) {
- int size = SizeFor(small);
- size = RoundUp(size, kInt64Size); // Align extended header to 64 bits.
- size += kExtendedFirstOffset +
- (extended.count_of(INT64) * kInt64Size) +
- (extended.count_of(CODE_PTR) * kPointerSize) +
- (extended.count_of(HEAP_PTR) * kPointerSize) +
- (extended.count_of(INT32) * kInt32Size);
- return RoundUp(size, kPointerSize);
- }
-
- inline static int entry_size(Type type) {
- switch (type) {
- case INT32:
- return kInt32Size;
- case INT64:
- return kInt64Size;
- case CODE_PTR:
- case HEAP_PTR:
- return kPointerSize;
- default:
- UNREACHABLE();
- return 0;
- }
- }
-
- // Code Generation support.
- inline int OffsetOfElementAt(int index) {
- int offset;
- LayoutSection section;
- if (is_extended_layout() && index >= first_extended_section_index()) {
- section = EXTENDED_SECTION;
- offset = get_extended_section_header_offset() + kExtendedFirstOffset;
- } else {
- section = SMALL_SECTION;
- offset = kFirstEntryOffset;
- }
-
- // Add offsets for the preceding type sections.
- DCHECK(index <= last_index(LAST_TYPE, section));
- for (Type type = FIRST_TYPE; index > last_index(type, section);
- type = next_type(type)) {
- offset += entry_size(type) * number_of_entries(type, section);
- }
-
- // Add offset for the index in it's type.
- Type type = get_type(index);
- offset += entry_size(type) * (index - first_index(type, section));
- return offset;
- }
-
- DECLARE_CAST(ConstantPoolArray)
-
- // Garbage collection support.
- Object** RawFieldOfElementAt(int index) {
- return HeapObject::RawField(this, OffsetOfElementAt(index));
- }
-
- // Small Layout description.
- static const int kSmallLayout1Offset = HeapObject::kHeaderSize;
- static const int kSmallLayout2Offset = kSmallLayout1Offset + kInt32Size;
- static const int kHeaderSize = kSmallLayout2Offset + kInt32Size;
- static const int kFirstEntryOffset = ROUND_UP(kHeaderSize, kInt64Size);
-
- static const int kSmallLayoutCountBits = 10;
- static const int kMaxSmallEntriesPerType = (1 << kSmallLayoutCountBits) - 1;
-
- // Fields in kSmallLayout1Offset.
- class Int64CountField: public BitField<int, 1, kSmallLayoutCountBits> {};
- class CodePtrCountField: public BitField<int, 11, kSmallLayoutCountBits> {};
- class HeapPtrCountField: public BitField<int, 21, kSmallLayoutCountBits> {};
- class IsExtendedField: public BitField<bool, 31, 1> {};
-
- // Fields in kSmallLayout2Offset.
- class Int32CountField: public BitField<int, 1, kSmallLayoutCountBits> {};
- class TotalCountField: public BitField<int, 11, 12> {};
- class WeakObjectStateField: public BitField<WeakObjectState, 23, 2> {};
-
- // Extended layout description, which starts at
- // get_extended_section_header_offset().
- static const int kExtendedInt64CountOffset = 0;
- static const int kExtendedCodePtrCountOffset =
- kExtendedInt64CountOffset + kInt32Size;
- static const int kExtendedHeapPtrCountOffset =
- kExtendedCodePtrCountOffset + kInt32Size;
- static const int kExtendedInt32CountOffset =
- kExtendedHeapPtrCountOffset + kInt32Size;
- static const int kExtendedFirstOffset =
- kExtendedInt32CountOffset + kInt32Size;
-
- // Dispatched behavior.
- void ConstantPoolIterateBody(ObjectVisitor* v);
-
- DECLARE_PRINTER(ConstantPoolArray)
- DECLARE_VERIFIER(ConstantPoolArray)
-
- private:
- inline int first_extended_section_index();
- inline int get_extended_section_header_offset();
-
- inline static Type next_type(Type type) {
- DCHECK(type >= FIRST_TYPE && type < NUMBER_OF_TYPES);
- int type_int = static_cast<int>(type);
- return static_cast<Type>(++type_int);
- }
-
- DISALLOW_IMPLICIT_CONSTRUCTORS(ConstantPoolArray);
-};
-
-
// DescriptorArrays are fixed arrays used to hold instance descriptors.
// The format of the these objects is:
// [0]: Number of descriptors
inline int prologue_offset() const;
inline void set_prologue_offset(int offset);
+ // [constant_pool offset]: Offset of the constant pool.
+ // Valid for FLAG_enable_embedded_constant_pool only
+ inline int constant_pool_offset() const;
+ inline void set_constant_pool_offset(int offset);
+
// Unchecked accessors to be used during GC.
inline ByteArray* unchecked_relocation_info();
inline void set_marked_for_deoptimization(bool flag);
// [constant_pool]: The constant pool for this function.
- inline ConstantPoolArray* constant_pool();
- inline void set_constant_pool(Object* constant_pool);
+ inline Address constant_pool();
// Get the safepoint entry for the given pc.
SafepointEntry GetSafepointEntry(Address pc);
// nesting that is deeper than 5 levels into account.
static const int kMaxLoopNestingMarker = 6;
+ static const int kConstantPoolSize =
+ FLAG_enable_embedded_constant_pool ? kIntSize : 0;
+
// Layout description.
static const int kRelocationInfoOffset = HeapObject::kHeaderSize;
static const int kHandlerTableOffset = kRelocationInfoOffset + kPointerSize;
// Note: We might be able to squeeze this into the flags above.
static const int kPrologueOffset = kKindSpecificFlags2Offset + kIntSize;
static const int kConstantPoolOffset = kPrologueOffset + kIntSize;
-
- static const int kHeaderPaddingStart = kConstantPoolOffset + kPointerSize;
+ static const int kHeaderPaddingStart =
+ kConstantPoolOffset + kConstantPoolSize;
// Add padding to align the instruction start following right after
// the Code object header.
static const int kHeaderSize =
(kHeaderPaddingStart + kCodeAlignmentMask) & ~kCodeAlignmentMask;
- // Ensure that the slot for the constant pool pointer is aligned.
- STATIC_ASSERT((kConstantPoolOffset & kPointerAlignmentMask) == 0);
// Byte offsets within kKindSpecificFlags1Offset.
static const int kFullCodeFlags = kKindSpecificFlags1Offset;
DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) ||
rmode_ == EMBEDDED_OBJECT || rmode_ == EXTERNAL_REFERENCE);
+ if (FLAG_enable_embedded_constant_pool &&
+ Assembler::IsConstantPoolLoadStart(pc_)) {
+ // We return the PC for ool constant pool since this function is used by the
+ // serializer and expects the address to reside within the code object.
+ return reinterpret_cast<Address>(pc_);
+ }
+
// Read the address of the word containing the target_address in an
// instruction stream.
// The only architecture-independent user of this function is the serializer.
Address RelocInfo::constant_pool_entry_address() {
+ if (FLAG_enable_embedded_constant_pool) {
+ Address constant_pool = host_->constant_pool();
+ DCHECK(constant_pool);
+ ConstantPoolEntry::Access access;
+ if (Assembler::IsConstantPoolLoadStart(pc_, &access))
+ return Assembler::target_constant_pool_address_at(
+ pc_, constant_pool, access, ConstantPoolEntry::INTPTR);
+ }
UNREACHABLE();
return NULL;
}
// mtlr ip
// blrl
// @ return address
- return pc - (kMovInstructions + 2) * kInstrSize;
+ int len;
+ ConstantPoolEntry::Access access;
+ if (FLAG_enable_embedded_constant_pool &&
+ IsConstantPoolLoadEnd(pc - 3 * kInstrSize, &access)) {
+ len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
+ } else {
+ len = kMovInstructionsNoConstantPool;
+ }
+ return pc - (len + 2) * kInstrSize;
}
Address Assembler::return_address_from_call_start(Address pc) {
- return pc + (kMovInstructions + 2) * kInstrSize;
+ int len;
+ ConstantPoolEntry::Access access;
+ if (FLAG_enable_embedded_constant_pool &&
+ IsConstantPoolLoadStart(pc, &access)) {
+ len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
+ } else {
+ len = kMovInstructionsNoConstantPool;
+ }
+ return pc + (len + 2) * kInstrSize;
}
}
-static const int kNoCodeAgeInstructions = 6;
-static const int kCodeAgingInstructions = Assembler::kMovInstructions + 3;
+static const int kNoCodeAgeInstructions =
+ FLAG_enable_embedded_constant_pool ? 7 : 6;
+static const int kCodeAgingInstructions =
+ Assembler::kMovInstructionsNoConstantPool + 3;
static const int kNoCodeAgeSequenceInstructions =
((kNoCodeAgeInstructions >= kCodeAgingInstructions)
? kNoCodeAgeInstructions
// Fetch the 32bit value from the FIXED_SEQUENCE lis/ori
-Address Assembler::target_address_at(Address pc,
- ConstantPoolArray* constant_pool) {
+Address Assembler::target_address_at(Address pc, Address constant_pool) {
+ if (FLAG_enable_embedded_constant_pool && constant_pool) {
+ ConstantPoolEntry::Access access;
+ if (IsConstantPoolLoadStart(pc, &access))
+ return Memory::Address_at(target_constant_pool_address_at(
+ pc, constant_pool, access, ConstantPoolEntry::INTPTR));
+ }
+
Instr instr1 = instr_at(pc);
Instr instr2 = instr_at(pc + kInstrSize);
// Interpret 2 instructions generated by lis/ori
}
+#if V8_TARGET_ARCH_PPC64
+const int kLoadIntptrOpcode = LD;
+#else
+const int kLoadIntptrOpcode = LWZ;
+#endif
+
+// Constant pool load sequence detection:
+// 1) REGULAR access:
+// load <dst>, kConstantPoolRegister + <offset>
+//
+// 2) OVERFLOWED access:
+// addis <scratch>, kConstantPoolRegister, <offset_high>
+// load <dst>, <scratch> + <offset_low>
+bool Assembler::IsConstantPoolLoadStart(Address pc,
+ ConstantPoolEntry::Access* access) {
+ Instr instr = instr_at(pc);
+ int opcode = instr & kOpcodeMask;
+ if (!GetRA(instr).is(kConstantPoolRegister)) return false;
+ bool overflowed = (opcode == ADDIS);
+#ifdef DEBUG
+ if (overflowed) {
+ opcode = instr_at(pc + kInstrSize) & kOpcodeMask;
+ }
+ DCHECK(opcode == kLoadIntptrOpcode || opcode == LFD);
+#endif
+ if (access) {
+ *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
+ : ConstantPoolEntry::REGULAR);
+ }
+ return true;
+}
+
+
+bool Assembler::IsConstantPoolLoadEnd(Address pc,
+ ConstantPoolEntry::Access* access) {
+ Instr instr = instr_at(pc);
+ int opcode = instr & kOpcodeMask;
+ bool overflowed = false;
+ if (!(opcode == kLoadIntptrOpcode || opcode == LFD)) return false;
+ if (!GetRA(instr).is(kConstantPoolRegister)) {
+ instr = instr_at(pc - kInstrSize);
+ opcode = instr & kOpcodeMask;
+ if ((opcode != ADDIS) || !GetRA(instr).is(kConstantPoolRegister)) {
+ return false;
+ }
+ overflowed = true;
+ }
+ if (access) {
+ *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
+ : ConstantPoolEntry::REGULAR);
+ }
+ return true;
+}
+
+
+int Assembler::GetConstantPoolOffset(Address pc,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
+#ifdef DEBUG
+ ConstantPoolEntry::Access access_check =
+ static_cast<ConstantPoolEntry::Access>(-1);
+ DCHECK(IsConstantPoolLoadStart(pc, &access_check));
+ DCHECK(access_check == access);
+#endif
+ int offset;
+ if (overflowed) {
+ offset = (instr_at(pc) & kImm16Mask) << 16;
+ offset += SIGN_EXT_IMM16(instr_at(pc + kInstrSize) & kImm16Mask);
+ DCHECK(!is_int16(offset));
+ } else {
+ offset = SIGN_EXT_IMM16((instr_at(pc) & kImm16Mask));
+ }
+ return offset;
+}
+
+
+void Assembler::PatchConstantPoolAccessInstruction(
+ int pc_offset, int offset, ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ Address pc = buffer_ + pc_offset;
+ bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
+#ifdef DEBUG
+ ConstantPoolEntry::Access access_check =
+ static_cast<ConstantPoolEntry::Access>(-1);
+ DCHECK(IsConstantPoolLoadStart(pc, &access_check));
+ DCHECK(access_check == access);
+ DCHECK(overflowed != is_int16(offset));
+#endif
+ if (overflowed) {
+ int hi_word = static_cast<int>(offset >> 16);
+ int lo_word = static_cast<int>(offset & 0xffff);
+ if (lo_word & 0x8000) hi_word++;
+
+ Instr instr1 = instr_at(pc);
+ Instr instr2 = instr_at(pc + kInstrSize);
+ instr1 &= ~kImm16Mask;
+ instr1 |= (hi_word & kImm16Mask);
+ instr2 &= ~kImm16Mask;
+ instr2 |= (lo_word & kImm16Mask);
+ instr_at_put(pc, instr1);
+ instr_at_put(pc + kInstrSize, instr2);
+ } else {
+ Instr instr = instr_at(pc);
+ instr &= ~kImm16Mask;
+ instr |= (offset & kImm16Mask);
+ instr_at_put(pc, instr);
+ }
+}
+
+
+Address Assembler::target_constant_pool_address_at(
+ Address pc, Address constant_pool, ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ Address addr = constant_pool;
+ DCHECK(addr);
+ addr += GetConstantPoolOffset(pc, access, type);
+ return addr;
+}
+
+
// This sets the branch destination (which gets loaded at the call address).
// This is for calls and branches within generated code. The serializer
// has already deserialized the mov instructions etc.
// This code assumes the FIXED_SEQUENCE of lis/ori
-void Assembler::set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
+void Assembler::set_target_address_at(Address pc, Address constant_pool,
Address target,
ICacheFlushMode icache_flush_mode) {
+ if (FLAG_enable_embedded_constant_pool && constant_pool) {
+ ConstantPoolEntry::Access access;
+ if (IsConstantPoolLoadStart(pc, &access)) {
+ Memory::Address_at(target_constant_pool_address_at(
+ pc, constant_pool, access, ConstantPoolEntry::INTPTR)) = target;
+ return;
+ }
+ }
+
Instr instr1 = instr_at(pc);
Instr instr2 = instr_at(pc + kInstrSize);
// Interpret 2 instructions generated by lis/ori
bool RelocInfo::IsCodedSpecially() {
// The deserializer needs to know whether a pointer is specially
// coded. Being specially coded on PPC means that it is a lis/ori
- // instruction sequence, and these are always the case inside code
- // objects.
+ // instruction sequence or is a constant pool entry, and these are
+ // always the case inside code objects.
return true;
}
bool RelocInfo::IsInConstantPool() {
+ if (FLAG_enable_embedded_constant_pool) {
+ Address constant_pool = host_->constant_pool();
+ return (constant_pool && Assembler::IsConstantPoolLoadStart(pc_));
+ }
return false;
}
Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
: AssemblerBase(isolate, buffer, buffer_size),
recorded_ast_id_(TypeFeedbackId::None()),
+ constant_pool_builder_(kLoadPtrMaxReachBits, kLoadDoubleMaxReachBits),
positions_recorder_(this) {
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
no_trampoline_pool_before_ = 0;
trampoline_pool_blocked_nesting_ = 0;
+ constant_pool_entry_sharing_blocked_nesting_ = 0;
// We leave space (kMaxBlockTrampolineSectionSize)
// for BlockTrampolinePoolScope buffer.
next_buffer_check_ =
void Assembler::GetCode(CodeDesc* desc) {
+ // Emit constant pool if necessary.
+ int constant_pool_offset = EmitConstantPool();
+
EmitRelocations();
// Set up code descriptor.
desc->buffer_size = buffer_size_;
desc->instr_size = pc_offset();
desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
+ desc->constant_pool_size =
+ (constant_pool_offset ? desc->instr_size - constant_pool_offset : 0);
desc->origin = this;
}
#else
DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m));
#endif
- while ((pc_offset() & (m - 1)) != 0) {
+ // First ensure instruction alignment
+ while (pc_offset() & (kInstrSize - 1)) {
+ db(0);
+ }
+ // Then pad to requested alignedment with nops
+ while (pc_offset() & (m - 1)) {
nop();
}
}
// Load the address of the label in a register.
Register dst = Register::from_code(instr_at(pos + kInstrSize));
CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos),
- kMovInstructions, CodePatcher::DONT_FLUSH);
+ kMovInstructionsNoConstantPool,
+ CodePatcher::DONT_FLUSH);
// Keep internal references relative until EmitRelocations.
patcher.masm()->bitwise_mov(dst, target_pos);
break;
CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos),
kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH);
// Keep internal references relative until EmitRelocations.
- patcher.masm()->emit_ptr(target_pos);
+ patcher.masm()->dp(target_pos);
break;
}
default:
Label instructions;
DCHECK(pc_offset() == 0);
emit_label_addr(&instructions);
- emit_ptr(0);
- emit_ptr(0);
+ dp(0);
+ dp(0);
bind(&instructions);
#endif
}
+int Assembler::instructions_required_for_mov(Register dst,
+ const Operand& src) const {
+ bool canOptimize =
+ !(src.must_output_reloc_info(this) || is_trampoline_pool_blocked());
+ if (use_constant_pool_for_mov(dst, src, canOptimize)) {
+ if (ConstantPoolAccessIsInOverflow()) {
+ return kMovInstructionsConstantPool + 1;
+ }
+ return kMovInstructionsConstantPool;
+ }
+ DCHECK(!canOptimize);
+ return kMovInstructionsNoConstantPool;
+}
+
+
+bool Assembler::use_constant_pool_for_mov(Register dst, const Operand& src,
+ bool canOptimize) const {
+ if (!FLAG_enable_embedded_constant_pool || !is_constant_pool_available()) {
+ // If there is no constant pool available, we must use a mov
+ // immediate sequence.
+ return false;
+ }
+
+ intptr_t value = src.immediate();
+#if V8_TARGET_ARCH_PPC64
+ bool allowOverflow = !((canOptimize && is_int32(value)) || dst.is(r0));
+#else
+ bool allowOverflow = !(canOptimize || dst.is(r0));
+#endif
+ if (canOptimize && is_int16(value)) {
+ // Prefer a single-instruction load-immediate.
+ return false;
+ }
+ if (!allowOverflow && ConstantPoolAccessIsInOverflow()) {
+ // Prefer non-relocatable two-instruction bitwise-mov32 over
+ // overflow sequence.
+ return false;
+ }
+
+ return true;
+}
+
+
void Assembler::EnsureSpaceFor(int space_needed) {
if (buffer_space() <= (kGap + space_needed)) {
GrowBuffer(space_needed);
canOptimize =
!(relocatable || (is_trampoline_pool_blocked() && !is_int16(value)));
+ if (use_constant_pool_for_mov(dst, src, canOptimize)) {
+ DCHECK(is_constant_pool_available());
+ if (relocatable) {
+ RecordRelocInfo(src.rmode_);
+ }
+ ConstantPoolEntry::Access access = ConstantPoolAddEntry(src.rmode_, value);
+#if V8_TARGET_ARCH_PPC64
+ if (access == ConstantPoolEntry::OVERFLOWED) {
+ addis(dst, kConstantPoolRegister, Operand::Zero());
+ ld(dst, MemOperand(dst, 0));
+ } else {
+ ld(dst, MemOperand(kConstantPoolRegister, 0));
+ }
+#else
+ if (access == ConstantPoolEntry::OVERFLOWED) {
+ addis(dst, kConstantPoolRegister, Operand::Zero());
+ lwz(dst, MemOperand(dst, 0));
+ } else {
+ lwz(dst, MemOperand(kConstantPoolRegister, 0));
+ }
+#endif
+ return;
+ }
+
if (canOptimize) {
if (is_int16(value)) {
li(dst, Operand(value));
BlockTrampolinePoolScope block_trampoline_pool(this);
emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask));
emit(dst.code());
- DCHECK(kMovInstructions >= 2);
- for (int i = 0; i < kMovInstructions - 2; i++) nop();
+ DCHECK(kMovInstructionsNoConstantPool >= 2);
+ for (int i = 0; i < kMovInstructionsNoConstantPool - 2; i++) nop();
}
}
int position = link(label);
if (label->is_bound()) {
// Keep internal references relative until EmitRelocations.
- emit_ptr(position);
+ dp(position);
} else {
// Encode internal reference to unbound label. We use a dummy opcode
// such that it won't collide with any opcode that might appear in the
void Assembler::lfd(const DoubleRegister frt, const MemOperand& src) {
int offset = src.offset();
Register ra = src.ra();
+ DCHECK(!ra.is(r0));
DCHECK(is_int16(offset));
int imm16 = offset & kImm16Mask;
// could be x_form instruction with some casting magic
void Assembler::lfdu(const DoubleRegister frt, const MemOperand& src) {
int offset = src.offset();
Register ra = src.ra();
+ DCHECK(!ra.is(r0));
DCHECK(is_int16(offset));
int imm16 = offset & kImm16Mask;
// could be x_form instruction with some casting magic
}
-void Assembler::emit_ptr(intptr_t data) {
+void Assembler::dq(uint64_t value) {
CheckBuffer();
- *reinterpret_cast<intptr_t*>(pc_) = data;
- pc_ += sizeof(intptr_t);
+ *reinterpret_cast<uint64_t*>(pc_) = value;
+ pc_ += sizeof(uint64_t);
}
-void Assembler::emit_double(double value) {
+void Assembler::dp(uintptr_t data) {
CheckBuffer();
- *reinterpret_cast<double*>(pc_) = value;
- pc_ += sizeof(double);
+ *reinterpret_cast<uintptr_t*>(pc_) = data;
+ pc_ += sizeof(uintptr_t);
}
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
- DeferredRelocInfo rinfo(pc_offset(), rmode, data);
- RecordRelocInfo(rinfo);
-}
-
-
-void Assembler::RecordRelocInfo(const DeferredRelocInfo& rinfo) {
- if (rinfo.rmode() >= RelocInfo::JS_RETURN &&
- rinfo.rmode() <= RelocInfo::DEBUG_BREAK_SLOT) {
- // Adjust code for new modes.
- DCHECK(RelocInfo::IsDebugBreakSlot(rinfo.rmode()) ||
- RelocInfo::IsJSReturn(rinfo.rmode()) ||
- RelocInfo::IsComment(rinfo.rmode()) ||
- RelocInfo::IsPosition(rinfo.rmode()));
+ if (RelocInfo::IsNone(rmode) ||
+ // Don't record external references unless the heap will be serialized.
+ (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() &&
+ !emit_debug_code())) {
+ return;
}
- if (!RelocInfo::IsNone(rinfo.rmode())) {
- // Don't record external references unless the heap will be serialized.
- if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) {
- if (!serializer_enabled() && !emit_debug_code()) {
- return;
- }
- }
- if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) {
- DeferredRelocInfo reloc_info_with_ast_id(rinfo.position(), rinfo.rmode(),
- RecordedAstId().ToInt());
- ClearRecordedAstId();
- relocations_.push_back(reloc_info_with_ast_id);
- } else {
- relocations_.push_back(rinfo);
- }
+ if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
+ data = RecordedAstId().ToInt();
+ ClearRecordedAstId();
}
+ DeferredRelocInfo rinfo(pc_offset(), rmode, data);
+ relocations_.push_back(rinfo);
}
}
-Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
- DCHECK(!FLAG_enable_ool_constant_pool);
- return isolate->factory()->empty_constant_pool_array();
-}
-
-
-void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
- DCHECK(!FLAG_enable_ool_constant_pool);
-}
} // namespace internal
} // namespace v8
static const int kAllocatableLowRangeBegin = 3;
static const int kAllocatableLowRangeEnd = 10;
static const int kAllocatableHighRangeBegin = 14;
- static const int kAllocatableHighRangeEnd = 28;
+ static const int kAllocatableHighRangeEnd =
+ FLAG_enable_embedded_constant_pool ? 27 : 28;
static const int kAllocatableContext = 30;
static const int kNumAllocatableLow =
"r28",
"cp",
};
+ if (FLAG_enable_embedded_constant_pool &&
+ (index == kMaxNumAllocatableRegisters - 2)) {
+ return names[index + 1];
+ }
return names[index];
}
1 << 3 | 1 << 4 | 1 << 5 | 1 << 6 | 1 << 7 | 1 << 8 | 1 << 9 | 1 << 10 |
1 << 14 | 1 << 15 | 1 << 16 | 1 << 17 | 1 << 18 | 1 << 19 | 1 << 20 |
1 << 21 | 1 << 22 | 1 << 23 | 1 << 24 | 1 << 25 | 1 << 26 | 1 << 27 |
- 1 << 28 | 1 << 30;
+ (FLAG_enable_embedded_constant_pool ? 0 : 1 << 28) | 1 << 30;
static Register from_code(int code) {
Register r = {code};
const int kRegister_r25_Code = 25;
const int kRegister_r26_Code = 26;
const int kRegister_r27_Code = 27;
-const int kRegister_r28_Code = 28;
+const int kRegister_r28_Code = 28; // constant pool pointer
const int kRegister_r29_Code = 29; // roots array pointer
const int kRegister_r30_Code = 30; // context pointer
const int kRegister_fp_Code = 31; // frame pointer
// Give alias names to registers
const Register cp = {kRegister_r30_Code}; // JavaScript context pointer
const Register kRootRegister = {kRegister_r29_Code}; // Roots array pointer.
+const Register kConstantPoolRegister = {kRegister_r28_Code}; // Constant pool
// Double word FP register.
struct DoubleRegister {
// The high 8 bits are set to zero.
void label_at_put(Label* L, int at_offset);
+ INLINE(static bool IsConstantPoolLoadStart(
+ Address pc, ConstantPoolEntry::Access* access = nullptr));
+ INLINE(static bool IsConstantPoolLoadEnd(
+ Address pc, ConstantPoolEntry::Access* access = nullptr));
+ INLINE(static int GetConstantPoolOffset(Address pc,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type));
+ INLINE(void PatchConstantPoolAccessInstruction(
+ int pc_offset, int offset, ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type));
+
+ // Return the address in the constant pool of the code target address used by
+ // the branch/call instruction at pc, or the object in a mov.
+ INLINE(static Address target_constant_pool_address_at(
+ Address pc, Address constant_pool, ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type));
+
// Read/Modify the code target address in the branch/call instruction at pc.
- INLINE(static Address target_address_at(Address pc,
- ConstantPoolArray* constant_pool));
+ INLINE(static Address target_address_at(Address pc, Address constant_pool));
INLINE(static void set_target_address_at(
- Address pc, ConstantPoolArray* constant_pool, Address target,
+ Address pc, Address constant_pool, Address target,
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED));
INLINE(static Address target_address_at(Address pc, Code* code)) {
- ConstantPoolArray* constant_pool = NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
INLINE(static void set_target_address_at(
Address pc, Code* code, Address target,
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) {
- ConstantPoolArray* constant_pool = NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target, icache_flush_mode);
}
// Number of instructions to load an address via a mov sequence.
#if V8_TARGET_ARCH_PPC64
- static const int kMovInstructions = 5;
+ static const int kMovInstructionsConstantPool = 1;
+ static const int kMovInstructionsNoConstantPool = 5;
+#if defined(V8_PPC_TAGGING_OPT)
+ static const int kTaggedLoadInstructions = 1;
+#else
static const int kTaggedLoadInstructions = 2;
+#endif
#else
- static const int kMovInstructions = 2;
+ static const int kMovInstructionsConstantPool = 1;
+ static const int kMovInstructionsNoConstantPool = 2;
static const int kTaggedLoadInstructions = 1;
#endif
+ static const int kMovInstructions = FLAG_enable_embedded_constant_pool
+ ? kMovInstructionsConstantPool
+ : kMovInstructionsNoConstantPool;
// Distance between the instruction referring to the address of the call
// target and the return address.
// This is the length of the BreakLocation::SetDebugBreakAtReturn()
// code patch FIXED_SEQUENCE
- static const int kJSReturnSequenceInstructions = kMovInstructions + 3;
+ static const int kJSReturnSequenceInstructions =
+ kMovInstructionsNoConstantPool + 3;
static const int kJSReturnSequenceLength =
kJSReturnSequenceInstructions * kInstrSize;
// This is the length of the code sequence from SetDebugBreakAtSlot()
// FIXED_SEQUENCE
- static const int kDebugBreakSlotInstructions = kMovInstructions + 2;
+ static const int kDebugBreakSlotInstructions =
+ kMovInstructionsNoConstantPool + 2;
static const int kDebugBreakSlotLength =
kDebugBreakSlotInstructions * kInstrSize;
DISALLOW_IMPLICIT_CONSTRUCTORS(BlockTrampolinePoolScope);
};
+ // Class for scoping disabling constant pool entry merging
+ class BlockConstantPoolEntrySharingScope {
+ public:
+ explicit BlockConstantPoolEntrySharingScope(Assembler* assem)
+ : assem_(assem) {
+ assem_->StartBlockConstantPoolEntrySharing();
+ }
+ ~BlockConstantPoolEntrySharingScope() {
+ assem_->EndBlockConstantPoolEntrySharing();
+ }
+
+ private:
+ Assembler* assem_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstantPoolEntrySharingScope);
+ };
+
// Debugging
// Mark address of the ExitJSFrame code.
// for inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
- void emit_ptr(intptr_t data);
- void emit_double(double data);
+ void dq(uint64_t data);
+ void dp(uintptr_t data);
PositionsRecorder* positions_recorder() { return &positions_recorder_; }
void BlockTrampolinePoolFor(int instructions);
void CheckTrampolinePool();
+ // For mov. Return the number of actual instructions required to
+ // load the operand into a register. This can be anywhere from
+ // one (constant pool small section) to five instructions (full
+ // 64-bit sequence).
+ //
+ // The value returned is only valid as long as no entries are added to the
+ // constant pool between this call and the actual instruction being emitted.
+ int instructions_required_for_mov(Register dst, const Operand& src) const;
+
+ // Decide between using the constant pool vs. a mov immediate sequence.
+ bool use_constant_pool_for_mov(Register dst, const Operand& src,
+ bool canOptimize) const;
+
// The code currently calls CheckBuffer() too often. This has the side
// effect of randomly growing the buffer in the middle of multi-instruction
// sequences.
// This function allows outside callers to check and grow the buffer
void EnsureSpaceFor(int space_needed);
- // Allocate a constant pool of the correct size for the generated code.
- Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
+ int EmitConstantPool() { return constant_pool_builder_.Emit(this); }
- // Generate the constant pool for the generated code.
- void PopulateConstantPool(ConstantPoolArray* constant_pool);
+ bool ConstantPoolAccessIsInOverflow() const {
+ return constant_pool_builder_.NextAccess(ConstantPoolEntry::INTPTR) ==
+ ConstantPoolEntry::OVERFLOWED;
+ }
+
+ Label* ConstantPoolPosition() {
+ return constant_pool_builder_.EmittedPosition();
+ }
void EmitRelocations();
// Record reloc info for current pc_
void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
- void RecordRelocInfo(const DeferredRelocInfo& rinfo);
+ ConstantPoolEntry::Access ConstantPoolAddEntry(RelocInfo::Mode rmode,
+ intptr_t value) {
+ bool sharing_ok = RelocInfo::IsNone(rmode) ||
+ !(serializer_enabled() || rmode < RelocInfo::CELL ||
+ is_constant_pool_entry_sharing_blocked());
+ return constant_pool_builder_.AddEntry(pc_offset(), value, sharing_ok);
+ }
+ ConstantPoolEntry::Access ConstantPoolAddEntry(double value) {
+ return constant_pool_builder_.AddEntry(pc_offset(), value);
+ }
// Block the emission of the trampoline pool before pc_offset.
void BlockTrampolinePoolBefore(int pc_offset) {
return trampoline_pool_blocked_nesting_ > 0;
}
+ void StartBlockConstantPoolEntrySharing() {
+ constant_pool_entry_sharing_blocked_nesting_++;
+ }
+ void EndBlockConstantPoolEntrySharing() {
+ constant_pool_entry_sharing_blocked_nesting_--;
+ }
+ bool is_constant_pool_entry_sharing_blocked() const {
+ return constant_pool_entry_sharing_blocked_nesting_ > 0;
+ }
+
bool has_exception() const { return internal_trampoline_exception_; }
bool is_trampoline_emitted() const { return trampoline_emitted_; }
int trampoline_pool_blocked_nesting_; // Block emission if this is not zero.
int no_trampoline_pool_before_; // Block emission before this pc offset.
+ // Do not share constant pool entries.
+ int constant_pool_entry_sharing_blocked_nesting_;
+
// Relocation info generation
// Each relocation is encoded as a variable size value
static const int kMaxRelocSize = RelocInfoWriter::kMaxSize;
// The bound position, before this we cannot do instruction elimination.
int last_bound_pos_;
+ ConstantPoolBuilder constant_pool_builder_;
+
// Code emission
inline void CheckBuffer();
void GrowBuffer(int needed = 0);
__ push(function); // Preserve the function.
__ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ push(r3);
__ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
}
__ bind(&gc_required);
__ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ push(argument);
__ CallRuntime(Runtime::kNewStringWrapper, 1);
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
// Push function as parameter to the runtime call.
__ Push(r4, r4);
// Enter a construct frame.
{
- FrameScope scope(masm, StackFrame::CONSTRUCT);
+ FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
if (create_memento) {
__ AssertUndefinedOrAllocationSite(r5, r7);
CHECK(!FLAG_pretenuring_call_new);
{
- FrameScope scope(masm, StackFrame::CONSTRUCT);
+ FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
// Smi-tagged arguments count.
__ mr(r7, r3);
static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
// Push function as parameter to the runtime call.
__ Push(r4, r4);
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
SaveFPRegsMode save_doubles) {
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Preserve registers across notification, this is important for compiled
// stubs that tail call the runtime on deopts passing their parameters in
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Pass the function and deoptimization type to the runtime system.
__ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
__ push(r3);
// Lookup the function in the JavaScript frame.
__ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Pass function as argument.
__ push(r3);
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
__ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
{
+ ConstantPoolUnavailableScope constant_pool_unavailable(masm);
__ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
+ if (FLAG_enable_embedded_constant_pool) {
+ __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
+ }
+
// Load the OSR entrypoint offset from the deoptimization data.
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
__ LoadP(r4, FieldMemOperand(
__ cmpl(sp, ip);
__ bge(&ok);
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kStackGuard, 0);
}
__ Jump(masm->isolate()->builtins()->OnStackReplacement(),
{
// Enter an internal frame in order to preserve argument count.
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r3);
__ Push(r3, r5);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
const int kStackSize = kFormalParameters + 1;
{
- FrameScope frame_scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
const int kReceiverOffset = kArgumentsOffset + kPointerSize;
const int kFunctionOffset = kReceiverOffset + kPointerSize;
const int kStackSize = kFormalParameters + 1;
{
- FrameScope frame_scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
const int kFunctionOffset = kArgumentsOffset + kPointerSize;
__ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ mflr(r0);
__ push(r0);
- __ Push(fp, r7, r4, r3);
+ if (FLAG_enable_embedded_constant_pool) {
+ __ Push(fp, kConstantPoolRegister, r7, r4, r3);
+ } else {
+ __ Push(fp, r7, r4, r3);
+ }
__ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
kPointerSize));
}
int param_count = descriptor.GetEnvironmentParameterCount();
{
// Call the runtime system in a fresh internal frame.
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
DCHECK(param_count == 0 ||
r3.is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
// Push arguments
__ bind(&skip);
// Compute the handler entry address and jump to it.
+ ConstantPoolUnavailableScope constant_pool_unavailable(masm);
__ mov(r4, Operand(pending_handler_code_address));
__ LoadP(r4, MemOperand(r4));
__ mov(r5, Operand(pending_handler_offset_address));
__ LoadP(r5, MemOperand(r5));
__ addi(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
+ if (FLAG_enable_embedded_constant_pool) {
+ __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r4);
+ }
__ add(ip, r4, r5);
__ Jump(ip);
}
// r7: argv
__ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used.
__ push(r0);
+ if (FLAG_enable_embedded_constant_pool) {
+ __ li(kConstantPoolRegister, Operand::Zero());
+ __ push(kConstantPoolRegister);
+ }
int marker = type();
__ LoadSmiLiteral(r0, Smi::FromInt(marker));
__ push(r0);
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r3, r4);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
}
// r5 : Feedback vector
// r6 : slot in feedback vector (Smi)
// r4 : the function to call
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Number-of-arguments register must be smi-tagged to call out.
__ SmiTag(r3);
static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
// Wrap the receiver and patch it back onto the stack.
{
- FrameScope frame_scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
__ Push(r4, r6);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ pop(r4);
// r6 - slot
// r4 - function
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
__ Push(r4);
__ CallStub(&create_stub);
void CallICStub::GenerateMiss(MacroAssembler* masm) {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Push the function and feedback info.
__ Push(r4, r5, r6);
ExternalReference miss =
ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r4, r3);
__ Push(r4, r3);
__ LoadSmiLiteral(r0, Smi::FromInt(op()));
const int kNoRegister = -1;
+// Used in embedded constant pool builder - max reach in bits for
+// various load instructions (one less due to unsigned)
+const int kLoadPtrMaxReachBits = 15;
+const int kLoadDoubleMaxReachBits = 15;
+
// sign-extend the least significant 16-bits of value <imm>
#define SIGN_EXT_IMM16(imm) ((static_cast<int>(imm) << 16) >> 16)
RegList object_regs,
RegList non_object_regs) {
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Load padding words on stack.
__ LoadSmiLiteral(ip, Smi::FromInt(LiveEdit::kFramePaddingValue));
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
- // No out-of-line constant pool support.
- UNREACHABLE();
+ DCHECK(FLAG_enable_embedded_constant_pool);
+ SetFrameSlot(offset, value);
}
Register JavaScriptFrame::fp_register() { return v8::internal::fp; }
Register JavaScriptFrame::context_register() { return cp; }
Register JavaScriptFrame::constant_pool_pointer_register() {
- UNREACHABLE();
- return no_reg;
+ DCHECK(FLAG_enable_embedded_constant_pool);
+ return kConstantPoolRegister;
}
Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; }
Register StubFailureTrampolineFrame::context_register() { return cp; }
Register StubFailureTrampolineFrame::constant_pool_pointer_register() {
- UNREACHABLE();
- return no_reg;
-}
-
-
-Object*& ExitFrame::constant_pool_slot() const {
- UNREACHABLE();
- return Memory::Object_at(NULL);
+ DCHECK(FLAG_enable_embedded_constant_pool);
+ return kConstantPoolRegister;
}
} // namespace internal
} // namespace v8
class ExitFrameConstants : public AllStatic {
public:
- static const int kFrameSize = 2 * kPointerSize;
- static const int kConstantPoolOffset = 0; // Not used.
+ static const int kFrameSize =
+ FLAG_enable_embedded_constant_pool ? 3 * kPointerSize : 2 * kPointerSize;
+
+ static const int kConstantPoolOffset =
+ FLAG_enable_embedded_constant_pool ? -3 * kPointerSize : 0;
static const int kCodeOffset = -2 * kPointerSize;
static const int kSPOffset = -1 * kPointerSize;
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
}
EmitReturnSequence();
+
+ if (HasStackOverflow()) {
+ masm_->AbortConstantPoolBuilding();
+ }
}
EmitProfilingCounterDecrement(weight);
{
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
+ Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
// BackEdgeTable::PatchAt manipulates this sequence.
__ cmpi(r6, Operand::Zero());
__ bc_short(ge, &ok);
// With 64bit we may need nop() instructions to ensure we have
// enough space to SetDebugBreakAtReturn()
if (is_int16(sp_delta)) {
- masm_->nop();
+ if (!FLAG_enable_embedded_constant_pool) masm_->nop();
masm_->nop();
}
#endif
__ bne(&slow_resume, cr0);
__ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
{
+ ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
+ if (FLAG_enable_embedded_constant_pool) {
+ __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
+ }
__ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
__ SmiUntag(r5);
__ add(ip, ip, r5);
// the frame (that is done in GeneratePrologue).
FrameScope frame_scope(masm_, StackFrame::NONE);
- return GeneratePrologue() && GenerateBody() && GenerateDeferredCode() &&
- GenerateJumpTable() && GenerateSafepointTable();
+ bool rc = GeneratePrologue() && GenerateBody() && GenerateDeferredCode() &&
+ GenerateJumpTable() && GenerateSafepointTable();
+ if (FLAG_enable_embedded_constant_pool && !rc) {
+ masm()->AbortConstantPoolBuilding();
+ }
+ return rc;
}
int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode,
Condition cond) {
- return (2 + kMovInstructions) * kInstrSize;
+ Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
+ return (2 + instructions_required_for_mov(ip, mov_operand)) * kInstrSize;
}
int MacroAssembler::CallSizeNotPredictableCodeSize(Address target,
RelocInfo::Mode rmode,
Condition cond) {
- return (2 + kMovInstructions) * kInstrSize;
+ return (2 + kMovInstructionsNoConstantPool) * kInstrSize;
}
void MacroAssembler::PushFixedFrame(Register marker_reg) {
mflr(r0);
- if (marker_reg.is_valid()) {
- Push(r0, fp, cp, marker_reg);
+ if (FLAG_enable_embedded_constant_pool) {
+ if (marker_reg.is_valid()) {
+ Push(r0, fp, kConstantPoolRegister, cp, marker_reg);
+ } else {
+ Push(r0, fp, kConstantPoolRegister, cp);
+ }
} else {
- Push(r0, fp, cp);
+ if (marker_reg.is_valid()) {
+ Push(r0, fp, cp, marker_reg);
+ } else {
+ Push(r0, fp, cp);
+ }
}
}
void MacroAssembler::PopFixedFrame(Register marker_reg) {
- if (marker_reg.is_valid()) {
- Pop(r0, fp, cp, marker_reg);
+ if (FLAG_enable_embedded_constant_pool) {
+ if (marker_reg.is_valid()) {
+ Pop(r0, fp, kConstantPoolRegister, cp, marker_reg);
+ } else {
+ Pop(r0, fp, kConstantPoolRegister, cp);
+ }
} else {
- Pop(r0, fp, cp);
+ if (marker_reg.is_valid()) {
+ Pop(r0, fp, cp, marker_reg);
+ } else {
+ Pop(r0, fp, cp);
+ }
}
mtlr(r0);
}
}
+void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
+ Register code_target_address) {
+ lwz(kConstantPoolRegister,
+ MemOperand(code_target_address,
+ Code::kConstantPoolOffset - Code::kHeaderSize));
+ add(kConstantPoolRegister, kConstantPoolRegister, code_target_address);
+}
+
+
+void MacroAssembler::LoadConstantPoolPointerRegister(Register base,
+ int code_start_delta) {
+ add_label_offset(kConstantPoolRegister, base, ConstantPoolPosition(),
+ code_start_delta);
+}
+
+
+void MacroAssembler::LoadConstantPoolPointerRegister() {
+ mov_label_addr(kConstantPoolRegister, ConstantPoolPosition());
+}
+
+
void MacroAssembler::StubPrologue(int prologue_offset) {
LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB));
PushFixedFrame(r11);
// Adjust FP to point to saved FP.
addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
+ if (FLAG_enable_embedded_constant_pool) {
+ // ip contains prologue address
+ LoadConstantPoolPointerRegister(ip, -prologue_offset);
+ set_constant_pool_available(true);
+ }
}
}
}
}
+ if (FLAG_enable_embedded_constant_pool) {
+ // ip contains prologue address
+ LoadConstantPoolPointerRegister(ip, -prologue_offset);
+ set_constant_pool_available(true);
+ }
}
void MacroAssembler::EnterFrame(StackFrame::Type type,
bool load_constant_pool_pointer_reg) {
- LoadSmiLiteral(ip, Smi::FromInt(type));
- PushFixedFrame(ip);
+ if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
+ PushFixedFrame();
+ // This path should not rely on ip containing code entry.
+ LoadConstantPoolPointerRegister();
+ LoadSmiLiteral(ip, Smi::FromInt(type));
+ push(ip);
+ } else {
+ LoadSmiLiteral(ip, Smi::FromInt(type));
+ PushFixedFrame(ip);
+ }
// Adjust FP to point to saved FP.
addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) {
+ ConstantPoolUnavailableScope constant_pool_unavailable(this);
// r3: preserved
// r4: preserved
// r5: preserved
int frame_ends;
LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ if (FLAG_enable_embedded_constant_pool) {
+ const int exitOffset = ExitFrameConstants::kConstantPoolOffset;
+ const int standardOffset = StandardFrameConstants::kConstantPoolOffset;
+ const int offset =
+ ((type == StackFrame::EXIT) ? exitOffset : standardOffset);
+ LoadP(kConstantPoolRegister, MemOperand(fp, offset));
+ }
mtlr(r0);
frame_ends = pc_offset();
Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0);
li(r8, Operand::Zero());
StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset));
}
+ if (FLAG_enable_embedded_constant_pool) {
+ StoreP(kConstantPoolRegister,
+ MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
+ }
mov(r8, Operand(CodeObject()));
StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
bool restore_context,
bool argument_count_is_length) {
+ ConstantPoolUnavailableScope constant_pool_unavailable(this);
// Optionally restore all double registers.
if (save_doubles) {
// Calculate the stack location of the saved doubles and restore them.
Register new_value) {
lwz(scratch, MemOperand(location));
+ if (FLAG_enable_embedded_constant_pool) {
+ if (emit_debug_code()) {
+ // Check that the instruction sequence is a load from the constant pool
+ ExtractBitMask(scratch, scratch, 0x1f * B16);
+ cmpi(scratch, Operand(kConstantPoolRegister.code()));
+ Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
+ // Scratch was clobbered. Restore it.
+ lwz(scratch, MemOperand(location));
+ }
+ // Get the address of the constant and patch it.
+ andi(scratch, scratch, Operand(kImm16Mask));
+ StorePX(new_value, MemOperand(kConstantPoolRegister, scratch));
+ return;
+ }
+
// This code assumes a FIXED_SEQUENCE for lis/ori
// At this point scratch is a lis instruction.
Register scratch) {
lwz(result, MemOperand(location));
+ if (FLAG_enable_embedded_constant_pool) {
+ if (emit_debug_code()) {
+ // Check that the instruction sequence is a load from the constant pool
+ ExtractBitMask(result, result, 0x1f * B16);
+ cmpi(result, Operand(kConstantPoolRegister.code()));
+ Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
+ lwz(result, MemOperand(location));
+ }
+ // Get the address of the constant and retrieve it.
+ andi(result, result, Operand(kImm16Mask));
+ LoadPX(result, MemOperand(kConstantPoolRegister, result));
+ return;
+ }
+
// This code assumes a FIXED_SEQUENCE for lis/ori
if (emit_debug_code()) {
And(result, result, Operand(kOpcodeMask | (0x1f * B16)));
void MacroAssembler::LoadDoubleLiteral(DoubleRegister result, double value,
Register scratch) {
+ if (FLAG_enable_embedded_constant_pool && is_constant_pool_available() &&
+ !(scratch.is(r0) && ConstantPoolAccessIsInOverflow())) {
+ ConstantPoolEntry::Access access = ConstantPoolAddEntry(value);
+ if (access == ConstantPoolEntry::OVERFLOWED) {
+ addis(scratch, kConstantPoolRegister, Operand::Zero());
+ lfd(result, MemOperand(scratch, 0));
+ } else {
+ lfd(result, MemOperand(kConstantPoolRegister, 0));
+ }
+ return;
+ }
+
// avoid gcc strict aliasing error using union cast
union {
double dval;
MacroAssembler(Isolate* isolate, void* buffer, int size);
- // Returns the size of a call in instructions.
+ // Returns the size of a call in instructions. Note, the value returned is
+ // only valid as long as no entries are added to the constant pool between
+ // checking the call size and emitting the actual call.
static int CallSize(Register target);
int CallSize(Address target, RelocInfo::Mode rmode, Condition cond = al);
static int CallSizeNotPredictableCodeSize(Address target,
// ---------------------------------------------------------------------------
// Patching helpers.
- // Retrieve/patch the relocated value (lis/ori pair).
+ // Retrieve/patch the relocated value (lis/ori pair or constant pool load).
void GetRelocatedValue(Register location, Register result, Register scratch);
void SetRelocatedValue(Register location, Register scratch,
Register new_value);
void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0,
Register scratch1, Label* found);
+ // Loads the constant pool pointer (kConstantPoolRegister).
+ void LoadConstantPoolPointerRegisterFromCodeTargetAddress(
+ Register code_target_address);
+ void LoadConstantPoolPointerRegister();
+ void LoadConstantPoolPointerRegister(Register base, int code_entry_delta = 0);
+
+ void AbortConstantPoolBuilding() {
+#ifdef DEBUG
+ // Avoid DCHECK(!is_linked()) failure in ~Label()
+ bind(ConstantPoolPosition());
+#endif
+ }
+
private:
static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
int offset = generator_object->continuation();
DCHECK(offset > 0);
frame->set_pc(pc + offset);
- if (FLAG_enable_ool_constant_pool) {
+ if (FLAG_enable_embedded_constant_pool) {
frame->set_constant_pool(
generator_object->function()->code()->constant_pool());
}
ALL_SPACES(kBackref, kPlain, kStartOfObject)
ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
#if defined(V8_TARGET_ARCH_MIPS) || defined(V8_TARGET_ARCH_MIPS64) || \
- defined(V8_TARGET_ARCH_PPC) || V8_OOL_CONSTANT_POOL
+ defined(V8_TARGET_ARCH_PPC)
// Deserialize a new object from pointer found in code and write
// a pointer to it to the current object. Required only for MIPS, PPC or
// ARM with ool constant pool, and omitted on the other architectures
// current object.
CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0)
CASE_BODY(kRootArray, kPlain, kStartOfObject, 0)
-#if defined(V8_TARGET_ARCH_MIPS) || V8_OOL_CONSTANT_POOL || \
- defined(V8_TARGET_ARCH_MIPS64) || defined(V8_TARGET_ARCH_PPC)
+#if defined(V8_TARGET_ARCH_MIPS) || defined(V8_TARGET_ARCH_MIPS64) || \
+ defined(V8_TARGET_ARCH_PPC)
// Find an object in the roots array and write a pointer to it to in code.
CASE_STATEMENT(kRootArray, kFromCode, kStartOfObject, 0)
CASE_BODY(kRootArray, kFromCode, kStartOfObject, 0)
void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) {
- // Out-of-line constant pool entries will be visited by the ConstantPoolArray.
- if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return;
-
int skip = OutputRawData(rinfo->target_address_address(),
kCanReturnSkipInsteadOfSkipping);
HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
void Serializer::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) {
- // Out-of-line constant pool entries will be visited by the ConstantPoolArray.
- if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return;
-
int skip = OutputRawData(rinfo->target_address_address(),
kCanReturnSkipInsteadOfSkipping);
Code* object = Code::GetCodeFromTargetAddress(rinfo->target_address());
void Serializer::ObjectSerializer::VisitCell(RelocInfo* rinfo) {
- // Out-of-line constant pool entries will be visited by the ConstantPoolArray.
- if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return;
-
int skip = OutputRawData(rinfo->pc(), kCanReturnSkipInsteadOfSkipping);
Cell* object = Cell::cast(rinfo->target_cell());
serializer_->SerializeObject(object, kPlain, kInnerPointer, skip);
RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED);
for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
RelocInfo* rinfo = it.rinfo();
- if (!(FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool())) {
- rinfo->WipeOut();
- }
+ rinfo->WipeOut();
}
// We need to wipe out the header fields *after* wiping out the
// relocations, because some of these fields are needed for the latter.
}
-Address Assembler::target_address_at(Address pc,
- ConstantPoolArray* constant_pool) {
+Address Assembler::target_address_at(Address pc, Address constant_pool) {
return Memory::int32_at(pc) + pc + 4;
}
-void Assembler::set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
+void Assembler::set_target_address_at(Address pc, Address constant_pool,
Address target,
ICacheFlushMode icache_flush_mode) {
Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
}
+void Assembler::dq(uint64_t data) {
+ EnsureSpace ensure_space(this);
+ emitq(data);
+}
+
+
void Assembler::dq(Label* label) {
EnsureSpace ensure_space(this);
if (label->is_bound()) {
}
-Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return isolate->factory()->empty_constant_pool_array();
-}
-
-
-void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return;
-}
-
-
const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
1 << RelocInfo::RUNTIME_ENTRY |
1 << RelocInfo::INTERNAL_REFERENCE |
// the absolute address of the target.
// These functions convert between absolute Addresses of Code objects and
// the relative displacements stored in the code.
- static inline Address target_address_at(Address pc,
- ConstantPoolArray* constant_pool);
- static inline void set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
- Address target,
- ICacheFlushMode icache_flush_mode =
- FLUSH_ICACHE_IF_NEEDED) ;
+ static inline Address target_address_at(Address pc, Address constant_pool);
+ static inline void set_target_address_at(
+ Address pc, Address constant_pool, Address target,
+ ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED);
static inline Address target_address_at(Address pc, Code* code) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
static inline void set_target_address_at(Address pc,
Address target,
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target, icache_flush_mode);
}
// Use --trace-deopt to enable.
void RecordDeoptReason(const int reason, const SourcePosition position);
- // Allocate a constant pool of the correct size for the generated code.
- Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
-
- // Generate the constant pool for the generated code.
- void PopulateConstantPool(ConstantPoolArray* constant_pool);
+ void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ // No embedded constant pool support.
+ UNREACHABLE();
+ }
// Writes a single word of data in the code stream.
// Used for inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
+ void dq(uint64_t data);
+ void dp(uintptr_t data) { dq(data); }
void dq(Label* label);
PositionsRecorder* positions_recorder() { return &positions_recorder_; }
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
- // No out-of-line constant pool support.
+ // No embedded constant pool support.
UNREACHABLE();
}
}
-Object*& ExitFrame::constant_pool_slot() const {
- UNREACHABLE();
- return Memory::Object_at(NULL);
-}
-
-
} // namespace internal
} // namespace v8
}
+void Assembler::emit_q(uint64_t x) {
+ *reinterpret_cast<uint64_t*>(pc_) = x;
+ pc_ += sizeof(uint64_t);
+}
+
+
void Assembler::emit(Handle<Object> handle) {
AllowDeferredHandleDereference heap_object_check;
// Verify all Objects referred by code are NOT in new space.
}
-Address Assembler::target_address_at(Address pc,
- ConstantPoolArray* constant_pool) {
+Address Assembler::target_address_at(Address pc, Address constant_pool) {
return pc + sizeof(int32_t) + *reinterpret_cast<int32_t*>(pc);
}
-void Assembler::set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
+void Assembler::set_target_address_at(Address pc, Address constant_pool,
Address target,
ICacheFlushMode icache_flush_mode) {
int32_t* p = reinterpret_cast<int32_t*>(pc);
}
+void Assembler::dq(uint64_t data) {
+ EnsureSpace ensure_space(this);
+ emit_q(data);
+}
+
+
void Assembler::dd(Label* label) {
EnsureSpace ensure_space(this);
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
}
-Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return isolate->factory()->empty_constant_pool_array();
-}
-
-
-void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
- // No out-of-line constant pool support.
- DCHECK(!FLAG_enable_ool_constant_pool);
- return;
-}
-
-
#ifdef GENERATED_CODE_COVERAGE
static FILE* coverage_log = NULL;
void GetCode(CodeDesc* desc);
// Read/Modify the code target in the branch/call instruction at pc.
- inline static Address target_address_at(Address pc,
- ConstantPoolArray* constant_pool);
- inline static void set_target_address_at(Address pc,
- ConstantPoolArray* constant_pool,
- Address target,
- ICacheFlushMode icache_flush_mode =
- FLUSH_ICACHE_IF_NEEDED);
+ inline static Address target_address_at(Address pc, Address constant_pool);
+ inline static void set_target_address_at(
+ Address pc, Address constant_pool, Address target,
+ ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED);
static inline Address target_address_at(Address pc, Code* code) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
static inline void set_target_address_at(Address pc,
Address target,
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED) {
- ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
+ Address constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target);
}
// inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
+ void dq(uint64_t data);
+ void dp(uintptr_t data) { dd(data); }
void dd(Label* label);
// Check if there is less than kGap bytes available in the buffer.
byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
- // Allocate a constant pool of the correct size for the generated code.
- Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
-
- // Generate the constant pool for the generated code.
- void PopulateConstantPool(ConstantPoolArray* constant_pool);
+ void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
+ ConstantPoolEntry::Access access,
+ ConstantPoolEntry::Type type) {
+ // No embedded constant pool support.
+ UNREACHABLE();
+ }
protected:
byte* addr_at(int pos) { return buffer_ + pos; }
TypeFeedbackId id = TypeFeedbackId::None());
inline void emit(const Immediate& x);
inline void emit_w(const Immediate& x);
+ inline void emit_q(uint64_t x);
// Emit the code-object-relative offset of the label's position
inline void emit_code_relative_offset(Label* label);
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
- // No out-of-line constant pool support.
+ // No embedded constant pool support.
UNREACHABLE();
}
}
-Object*& ExitFrame::constant_pool_slot() const {
- UNREACHABLE();
- return Memory::Object_at(NULL);
-}
-
-
} // namespace internal
} // namespace v8
int decode_size =
Min(f->code()->instruction_size(),
static_cast<int>(f->code()->back_edge_table_offset()));
+ if (FLAG_enable_embedded_constant_pool) {
+ decode_size = Min(decode_size, f->code()->constant_pool_offset());
+ }
Address end = pc + decode_size;
v8::internal::EmbeddedVector<char, 128> decode_buffer;
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Test constant pool array code.
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Test embedded constant pool builder code.
#include "src/v8.h"
-#include "src/factory.h"
-#include "src/objects.h"
+#include "src/assembler.h"
#include "test/cctest/cctest.h"
using namespace v8::internal;
-static ConstantPoolArray::Type kTypes[] = { ConstantPoolArray::INT64,
- ConstantPoolArray::CODE_PTR,
- ConstantPoolArray::HEAP_PTR,
- ConstantPoolArray::INT32 };
-static ConstantPoolArray::LayoutSection kSmall =
- ConstantPoolArray::SMALL_SECTION;
-static ConstantPoolArray::LayoutSection kExtended =
- ConstantPoolArray::EXTENDED_SECTION;
-
-Code* DummyCode(LocalContext* context) {
- CompileRun("function foo() {};");
- i::Handle<i::JSFunction> fun = v8::Utils::OpenHandle(
- *v8::Local<v8::Function>::Cast(
- (*context)->Global()->Get(v8_str("foo"))));
- return fun->code();
-}
+const ConstantPoolEntry::Type kPtrType = ConstantPoolEntry::INTPTR;
+const ConstantPoolEntry::Type kDblType = ConstantPoolEntry::DOUBLE;
+const ConstantPoolEntry::Access kRegAccess = ConstantPoolEntry::REGULAR;
+const ConstantPoolEntry::Access kOvflAccess = ConstantPoolEntry::OVERFLOWED;
+const int kReachBits = 6; // Use reach of 64-bytes to test overflow.
+const int kReach = 1 << kReachBits;
-TEST(ConstantPoolSmall) {
- LocalContext context;
- Isolate* isolate = CcTest::i_isolate();
- Factory* factory = isolate->factory();
- v8::HandleScope scope(context->GetIsolate());
- // Check construction.
- ConstantPoolArray::NumberOfEntries small(3, 1, 2, 1);
- Handle<ConstantPoolArray> array = factory->NewConstantPoolArray(small);
+TEST(ConstantPoolPointers) {
+ ConstantPoolBuilder builder(kReachBits, kReachBits);
+ const int kRegularCount = kReach / kPointerSize;
+ ConstantPoolEntry::Access access;
+ int pos = 0;
+ intptr_t value = 0;
+ bool sharing_ok = true;
- int expected_counts[] = { 3, 1, 2, 1 };
- int expected_first_idx[] = { 0, 3, 4, 6 };
- int expected_last_idx[] = { 2, 3, 5, 6 };
- for (int i = 0; i < 4; i++) {
- CHECK_EQ(expected_counts[i], array->number_of_entries(kTypes[i], kSmall));
- CHECK_EQ(expected_first_idx[i], array->first_index(kTypes[i], kSmall));
- CHECK_EQ(expected_last_idx[i], array->last_index(kTypes[i], kSmall));
+ CHECK(builder.IsEmpty());
+ while (builder.NextAccess(kPtrType) == kRegAccess) {
+ access = builder.AddEntry(pos++, value++, sharing_ok);
+ CHECK_EQ(access, kRegAccess);
}
- CHECK(!array->is_extended_layout());
-
- // Check getters and setters.
- int64_t big_number = V8_2PART_UINT64_C(0x12345678, 9ABCDEF0);
- Handle<Object> object = factory->NewHeapNumber(4.0, IMMUTABLE, TENURED);
- Code* code = DummyCode(&context);
- array->set(0, big_number);
- array->set(1, 0.5);
- array->set(2, 3e-24);
- array->set(3, code->entry());
- array->set(4, code);
- array->set(5, *object);
- array->set(6, 50);
- CHECK_EQ(big_number, array->get_int64_entry(0));
- CHECK_EQ(0.5, array->get_int64_entry_as_double(1));
- CHECK_EQ(3e-24, array->get_int64_entry_as_double(2));
- CHECK_EQ(code->entry(), array->get_code_ptr_entry(3));
- CHECK_EQ(code, array->get_heap_ptr_entry(4));
- CHECK_EQ(*object, array->get_heap_ptr_entry(5));
- CHECK_EQ(50, array->get_int32_entry(6));
+ CHECK(!builder.IsEmpty());
+ CHECK_EQ(pos, kRegularCount);
+
+ access = builder.AddEntry(pos, value, sharing_ok);
+ CHECK_EQ(access, kOvflAccess);
}
-TEST(ConstantPoolExtended) {
- LocalContext context;
- Isolate* isolate = CcTest::i_isolate();
- Factory* factory = isolate->factory();
- v8::HandleScope scope(context->GetIsolate());
-
- // Check construction.
- ConstantPoolArray::NumberOfEntries small(1, 2, 3, 4);
- ConstantPoolArray::NumberOfEntries extended(5, 6, 7, 8);
- Handle<ConstantPoolArray> array =
- factory->NewExtendedConstantPoolArray(small, extended);
-
- // Check small section.
- int small_counts[] = { 1, 2, 3, 4 };
- int small_first_idx[] = { 0, 1, 3, 6 };
- int small_last_idx[] = { 0, 2, 5, 9 };
- for (int i = 0; i < 4; i++) {
- CHECK_EQ(small_counts[i], array->number_of_entries(kTypes[i], kSmall));
- CHECK_EQ(small_first_idx[i], array->first_index(kTypes[i], kSmall));
- CHECK_EQ(small_last_idx[i], array->last_index(kTypes[i], kSmall));
- }
+TEST(ConstantPoolDoubles) {
+ ConstantPoolBuilder builder(kReachBits, kReachBits);
+ const int kRegularCount = kReach / kDoubleSize;
+ ConstantPoolEntry::Access access;
+ int pos = 0;
+ double value = 0.0;
- // Check extended layout.
- CHECK(array->is_extended_layout());
- int extended_counts[] = { 5, 6, 7, 8 };
- int extended_first_idx[] = { 10, 15, 21, 28 };
- int extended_last_idx[] = { 14, 20, 27, 35 };
- for (int i = 0; i < 4; i++) {
- CHECK_EQ(extended_counts[i],
- array->number_of_entries(kTypes[i], kExtended));
- CHECK_EQ(extended_first_idx[i], array->first_index(kTypes[i], kExtended));
- CHECK_EQ(extended_last_idx[i], array->last_index(kTypes[i], kExtended));
+ CHECK(builder.IsEmpty());
+ while (builder.NextAccess(kDblType) == kRegAccess) {
+ access = builder.AddEntry(pos++, value);
+ value += 0.5;
+ CHECK_EQ(access, kRegAccess);
}
+ CHECK(!builder.IsEmpty());
+ CHECK_EQ(pos, kRegularCount);
- // Check small and large section's don't overlap.
- int64_t small_section_int64 = V8_2PART_UINT64_C(0x56781234, DEF09ABC);
- Code* small_section_code_ptr = DummyCode(&context);
- Handle<Object> small_section_heap_ptr =
- factory->NewHeapNumber(4.0, IMMUTABLE, TENURED);
- int32_t small_section_int32 = 0xab12cd45;
-
- int64_t extended_section_int64 = V8_2PART_UINT64_C(0x12345678, 9ABCDEF0);
- Code* extended_section_code_ptr = DummyCode(&context);
- Handle<Object> extended_section_heap_ptr =
- factory->NewHeapNumber(5.0, IMMUTABLE, TENURED);
- int32_t extended_section_int32 = 0xef67ab89;
-
- for (int i = array->first_index(ConstantPoolArray::INT64, kSmall);
- i <= array->last_index(ConstantPoolArray::INT32, kSmall); i++) {
- if (i <= array->last_index(ConstantPoolArray::INT64, kSmall)) {
- array->set(i, small_section_int64);
- } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kSmall)) {
- array->set(i, small_section_code_ptr->entry());
- } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kSmall)) {
- array->set(i, *small_section_heap_ptr);
- } else {
- CHECK(i <= array->last_index(ConstantPoolArray::INT32, kSmall));
- array->set(i, small_section_int32);
- }
- }
- for (int i = array->first_index(ConstantPoolArray::INT64, kExtended);
- i <= array->last_index(ConstantPoolArray::INT32, kExtended); i++) {
- if (i <= array->last_index(ConstantPoolArray::INT64, kExtended)) {
- array->set(i, extended_section_int64);
- } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kExtended)) {
- array->set(i, extended_section_code_ptr->entry());
- } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kExtended)) {
- array->set(i, *extended_section_heap_ptr);
- } else {
- CHECK(i <= array->last_index(ConstantPoolArray::INT32, kExtended));
- array->set(i, extended_section_int32);
- }
- }
+ access = builder.AddEntry(pos, value);
+ CHECK_EQ(access, kOvflAccess);
+}
- for (int i = array->first_index(ConstantPoolArray::INT64, kSmall);
- i <= array->last_index(ConstantPoolArray::INT32, kSmall); i++) {
- if (i <= array->last_index(ConstantPoolArray::INT64, kSmall)) {
- CHECK_EQ(small_section_int64, array->get_int64_entry(i));
- } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kSmall)) {
- CHECK_EQ(small_section_code_ptr->entry(), array->get_code_ptr_entry(i));
- } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kSmall)) {
- CHECK_EQ(*small_section_heap_ptr, array->get_heap_ptr_entry(i));
- } else {
- CHECK(i <= array->last_index(ConstantPoolArray::INT32, kSmall));
- CHECK_EQ(small_section_int32, array->get_int32_entry(i));
- }
- }
- for (int i = array->first_index(ConstantPoolArray::INT64, kExtended);
- i <= array->last_index(ConstantPoolArray::INT32, kExtended); i++) {
- if (i <= array->last_index(ConstantPoolArray::INT64, kExtended)) {
- CHECK_EQ(extended_section_int64, array->get_int64_entry(i));
- } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kExtended)) {
- CHECK_EQ(extended_section_code_ptr->entry(),
- array->get_code_ptr_entry(i));
- } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kExtended)) {
- CHECK_EQ(*extended_section_heap_ptr, array->get_heap_ptr_entry(i));
+
+TEST(ConstantPoolMixedTypes) {
+ ConstantPoolBuilder builder(kReachBits, kReachBits);
+ const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) +
+ ((kPointerSize < kDoubleSize) ? 1 : 0));
+ ConstantPoolEntry::Type type = kPtrType;
+ ConstantPoolEntry::Access access;
+ int pos = 0;
+ intptr_t ptrValue = 0;
+ double dblValue = 0.0;
+ bool sharing_ok = true;
+
+ CHECK(builder.IsEmpty());
+ while (builder.NextAccess(type) == kRegAccess) {
+ if (type == kPtrType) {
+ access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
+ type = kDblType;
} else {
- CHECK(i <= array->last_index(ConstantPoolArray::INT32, kExtended));
- CHECK_EQ(extended_section_int32, array->get_int32_entry(i));
+ access = builder.AddEntry(pos++, dblValue);
+ dblValue += 0.5;
+ type = kPtrType;
}
+ CHECK_EQ(access, kRegAccess);
}
+ CHECK(!builder.IsEmpty());
+ CHECK_EQ(pos, kRegularCount);
+
+ access = builder.AddEntry(pos++, ptrValue, sharing_ok);
+ CHECK_EQ(access, kOvflAccess);
+ access = builder.AddEntry(pos, dblValue);
+ CHECK_EQ(access, kOvflAccess);
}
-static void CheckIterator(Handle<ConstantPoolArray> array,
- ConstantPoolArray::Type type,
- int expected_indexes[],
- int count) {
- int i = 0;
- ConstantPoolArray::Iterator iter(*array, type);
- while (!iter.is_finished()) {
- CHECK_EQ(expected_indexes[i++], iter.next_index());
+TEST(ConstantPoolMixedReach) {
+ const int ptrReachBits = kReachBits + 2;
+ const int ptrReach = 1 << ptrReachBits;
+ const int dblReachBits = kReachBits;
+ const int dblReach = kReach;
+ const int dblRegularCount =
+ Min(dblReach / kDoubleSize, ptrReach / (kDoubleSize + kPointerSize));
+ const int ptrRegularCount =
+ ((ptrReach - (dblRegularCount * (kDoubleSize + kPointerSize))) /
+ kPointerSize) +
+ dblRegularCount;
+ ConstantPoolBuilder builder(ptrReachBits, dblReachBits);
+ ConstantPoolEntry::Access access;
+ int pos = 0;
+ intptr_t ptrValue = 0;
+ double dblValue = 0.0;
+ bool sharing_ok = true;
+ int ptrCount = 0;
+ int dblCount = 0;
+
+ CHECK(builder.IsEmpty());
+ while (builder.NextAccess(kDblType) == kRegAccess) {
+ access = builder.AddEntry(pos++, dblValue);
+ dblValue += 0.5;
+ dblCount++;
+ CHECK_EQ(access, kRegAccess);
+
+ access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
+ ptrCount++;
+ CHECK_EQ(access, kRegAccess);
}
- CHECK_EQ(count, i);
-}
+ CHECK(!builder.IsEmpty());
+ CHECK_EQ(dblCount, dblRegularCount);
+ while (ptrCount < ptrRegularCount) {
+ access = builder.AddEntry(pos++, dblValue);
+ dblValue += 0.5;
+ CHECK_EQ(access, kOvflAccess);
-TEST(ConstantPoolIteratorSmall) {
- LocalContext context;
- Isolate* isolate = CcTest::i_isolate();
- Factory* factory = isolate->factory();
- v8::HandleScope scope(context->GetIsolate());
-
- ConstantPoolArray::NumberOfEntries small(1, 5, 2, 0);
- Handle<ConstantPoolArray> array = factory->NewConstantPoolArray(small);
+ access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
+ ptrCount++;
+ CHECK_EQ(access, kRegAccess);
+ }
+ CHECK_EQ(builder.NextAccess(kPtrType), kOvflAccess);
- int expected_int64_indexs[] = { 0 };
- CheckIterator(array, ConstantPoolArray::INT64, expected_int64_indexs, 1);
- int expected_code_indexs[] = { 1, 2, 3, 4, 5 };
- CheckIterator(array, ConstantPoolArray::CODE_PTR, expected_code_indexs, 5);
- int expected_heap_indexs[] = { 6, 7 };
- CheckIterator(array, ConstantPoolArray::HEAP_PTR, expected_heap_indexs, 2);
- int expected_int32_indexs[1];
- CheckIterator(array, ConstantPoolArray::INT32, expected_int32_indexs, 0);
+ access = builder.AddEntry(pos++, ptrValue, sharing_ok);
+ CHECK_EQ(access, kOvflAccess);
+ access = builder.AddEntry(pos, dblValue);
+ CHECK_EQ(access, kOvflAccess);
}
-TEST(ConstantPoolIteratorExtended) {
- LocalContext context;
- Isolate* isolate = CcTest::i_isolate();
- Factory* factory = isolate->factory();
- v8::HandleScope scope(context->GetIsolate());
-
- ConstantPoolArray::NumberOfEntries small(1, 0, 0, 4);
- ConstantPoolArray::NumberOfEntries extended(5, 0, 3, 0);
- Handle<ConstantPoolArray> array =
- factory->NewExtendedConstantPoolArray(small, extended);
-
- int expected_int64_indexs[] = { 0, 5, 6, 7, 8, 9 };
- CheckIterator(array, ConstantPoolArray::INT64, expected_int64_indexs, 6);
- int expected_code_indexs[1];
- CheckIterator(array, ConstantPoolArray::CODE_PTR, expected_code_indexs, 0);
- int expected_heap_indexs[] = { 10, 11, 12 };
- CheckIterator(array, ConstantPoolArray::HEAP_PTR, expected_heap_indexs, 3);
- int expected_int32_indexs[] = { 1, 2, 3, 4 };
- CheckIterator(array, ConstantPoolArray::INT32, expected_int32_indexs, 4);
-}
+TEST(ConstantPoolSharing) {
+ ConstantPoolBuilder builder(kReachBits, kReachBits);
+ const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) +
+ ((kPointerSize < kDoubleSize) ? 1 : 0));
+ ConstantPoolEntry::Access access;
+ CHECK(builder.IsEmpty());
-TEST(ConstantPoolPreciseGC) {
- LocalContext context;
- Isolate* isolate = CcTest::i_isolate();
- Heap* heap = isolate->heap();
- Factory* factory = isolate->factory();
- v8::HandleScope scope(context->GetIsolate());
-
- ConstantPoolArray::NumberOfEntries small(1, 0, 0, 1);
- Handle<ConstantPoolArray> array = factory->NewConstantPoolArray(small);
-
- // Check that the store buffer knows which entries are pointers and which are
- // not. To do this, make non-pointer entries which look like new space
- // pointers but are actually invalid and ensure the GC doesn't try to move
- // them.
- Handle<HeapObject> object = factory->NewHeapNumber(4.0);
- Object* raw_ptr = *object;
- // If interpreted as a pointer, this should be right inside the heap number
- // which will cause a crash when trying to lookup the 'map' pointer.
- intptr_t invalid_ptr = reinterpret_cast<intptr_t>(raw_ptr) + kInt32Size;
- int32_t invalid_ptr_int32 = static_cast<int32_t>(invalid_ptr);
- int64_t invalid_ptr_int64 = static_cast<int64_t>(invalid_ptr);
- array->set(0, invalid_ptr_int64);
- array->set(1, invalid_ptr_int32);
-
- // Ensure we perform a scan on scavenge for the constant pool's page.
- MemoryChunk::FromAddress(array->address())->set_scan_on_scavenge(true);
- heap->CollectGarbage(NEW_SPACE);
-
- // Check the object was moved by GC.
- CHECK_NE(*object, raw_ptr);
-
- // Check the non-pointer entries weren't changed.
- CHECK_EQ(invalid_ptr_int64, array->get_int64_entry(0));
- CHECK_EQ(invalid_ptr_int32, array->get_int32_entry(1));
+ ConstantPoolEntry::Type type = kPtrType;
+ int pos = 0;
+ intptr_t ptrValue = 0;
+ double dblValue = 0.0;
+ bool sharing_ok = true;
+ while (builder.NextAccess(type) == kRegAccess) {
+ if (type == kPtrType) {
+ access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
+ type = kDblType;
+ } else {
+ access = builder.AddEntry(pos++, dblValue);
+ dblValue += 0.5;
+ type = kPtrType;
+ }
+ CHECK_EQ(access, kRegAccess);
+ }
+ CHECK(!builder.IsEmpty());
+ CHECK_EQ(pos, kRegularCount);
+
+ type = kPtrType;
+ ptrValue = 0;
+ dblValue = 0.0;
+ while (pos < kRegularCount * 2) {
+ if (type == kPtrType) {
+ access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
+ type = kDblType;
+ } else {
+ access = builder.AddEntry(pos++, dblValue);
+ dblValue += 0.5;
+ type = kPtrType;
+ }
+ CHECK_EQ(access, kRegAccess);
+ }
+
+ access = builder.AddEntry(pos++, ptrValue, sharing_ok);
+ CHECK_EQ(access, kOvflAccess);
+ access = builder.AddEntry(pos, dblValue);
+ CHECK_EQ(access, kOvflAccess);
}
-TEST(ConstantPoolCompacting) {
- if (i::FLAG_never_compact) return;
- i::FLAG_always_compact = true;
- LocalContext context;
- Isolate* isolate = CcTest::i_isolate();
- Heap* heap = isolate->heap();
- Factory* factory = isolate->factory();
- v8::HandleScope scope(context->GetIsolate());
-
- ConstantPoolArray::NumberOfEntries small(0, 0, 1, 0);
- ConstantPoolArray::NumberOfEntries extended(0, 0, 1, 0);
- Handle<ConstantPoolArray> array =
- factory->NewExtendedConstantPoolArray(small, extended);
-
- // Start a second old-space page so that the heap pointer added to the
- // constant pool array ends up on the an evacuation candidate page.
- Page* first_page = heap->old_space()->anchor()->next_page();
- {
- HandleScope scope(isolate);
- int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
- Handle<HeapObject> temp =
- factory->NewFixedDoubleArray(dummy_array_size / kDoubleSize, TENURED);
- CHECK(heap->InOldSpace(temp->address()));
- Handle<HeapObject> heap_ptr =
- factory->NewHeapNumber(5.0, IMMUTABLE, TENURED);
- CHECK(heap->InOldSpace(heap_ptr->address()));
- CHECK(!first_page->Contains(heap_ptr->address()));
- array->set(0, *heap_ptr);
- array->set(1, *heap_ptr);
- }
+TEST(ConstantPoolNoSharing) {
+ ConstantPoolBuilder builder(kReachBits, kReachBits);
+ const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) +
+ ((kPointerSize < kDoubleSize) ? 1 : 0));
+ ConstantPoolEntry::Access access;
- // Check heap pointers are correctly updated on GC.
- Object* old_ptr = array->get_heap_ptr_entry(0);
- Handle<Object> object(old_ptr, isolate);
- CHECK_EQ(old_ptr, *object);
- CHECK_EQ(old_ptr, array->get_heap_ptr_entry(1));
+ CHECK(builder.IsEmpty());
- // Force compacting garbage collection.
- CHECK(FLAG_always_compact);
- heap->CollectAllGarbage();
+ ConstantPoolEntry::Type type = kPtrType;
+ int pos = 0;
+ intptr_t ptrValue = 0;
+ double dblValue = 0.0;
+ bool sharing_ok = false;
+ while (builder.NextAccess(type) == kRegAccess) {
+ if (type == kPtrType) {
+ access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
+ type = kDblType;
+ } else {
+ access = builder.AddEntry(pos++, dblValue);
+ dblValue += 0.5;
+ type = kPtrType;
+ }
+ CHECK_EQ(access, kRegAccess);
+ }
+ CHECK(!builder.IsEmpty());
+ CHECK_EQ(pos, kRegularCount);
+
+ type = kPtrType;
+ ptrValue = 0;
+ dblValue = 0.0;
+ sharing_ok = true;
+ while (pos < kRegularCount * 2) {
+ if (type == kPtrType) {
+ access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
+ type = kDblType;
+ CHECK_EQ(access, kOvflAccess);
+ } else {
+ access = builder.AddEntry(pos++, dblValue);
+ dblValue += 0.5;
+ type = kPtrType;
+ CHECK_EQ(access, kRegAccess);
+ }
+ }
- CHECK_NE(old_ptr, *object);
- CHECK_EQ(*object, array->get_heap_ptr_entry(0));
- CHECK_EQ(*object, array->get_heap_ptr_entry(1));
+ access = builder.AddEntry(pos++, ptrValue, sharing_ok);
+ CHECK_EQ(access, kOvflAccess);
+ access = builder.AddEntry(pos, dblValue);
+ CHECK_EQ(access, kOvflAccess);
}
writer.Finish();
relocation_info_size = static_cast<int>(buffer_end - writer.pos());
- CodeDesc desc = { buffer.get(), buffer_size, code_size,
- relocation_info_size, NULL };
+ CodeDesc desc = {buffer.get(), buffer_size, code_size, relocation_info_size,
+ 0, NULL};
// Read only (non-statement) positions.
{