: Assembler(NULL,
reinterpret_cast<byte*>(start),
count * kInstructionSize + kGap) {
- // Block constant pool emission.
- StartBlockConstPool();
+ StartBlockPools();
}
PatchingAssembler(byte* start, unsigned count)
: Assembler(NULL, start, count * kInstructionSize + kGap) {
// Block constant pool emission.
- StartBlockConstPool();
+ StartBlockPools();
}
~PatchingAssembler() {
// Const pool should still be blocked.
ASSERT(is_const_pool_blocked());
- EndBlockConstPool();
+ EndBlockPools();
// Verify we have generated the number of instruction we expected.
ASSERT((pc_offset() + kGap) == buffer_size_);
// Verify no relocation information has been emitted.
__ bind(&table_start);
Label needs_frame;
for (int i = 0; i < deopt_jump_table_.length(); i++) {
- __ Bind(&deopt_jump_table_[i].label);
- Address entry = deopt_jump_table_[i].address;
- Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
+ __ Bind(&deopt_jump_table_[i]->label);
+ Address entry = deopt_jump_table_[i]->address;
+ Deoptimizer::BailoutType type = deopt_jump_table_[i]->bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
} else {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
}
- if (deopt_jump_table_[i].needs_frame) {
+ if (deopt_jump_table_[i]->needs_frame) {
ASSERT(!info()->saves_caller_doubles());
UseScratchRegisterScope temps(masm());
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
if (deopt_jump_table_.is_empty() ||
- (deopt_jump_table_.last().address != entry) ||
- (deopt_jump_table_.last().bailout_type != bailout_type) ||
- (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ (deopt_jump_table_.last()->address != entry) ||
+ (deopt_jump_table_.last()->bailout_type != bailout_type) ||
+ (deopt_jump_table_.last()->needs_frame != !frame_is_built_)) {
+ Deoptimizer::JumpTableEntry* table_entry =
+ new(zone()) Deoptimizer::JumpTableEntry(entry,
+ bailout_type,
+ !frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
- __ B(&deopt_jump_table_.last().label,
+ __ B(&deopt_jump_table_.last()->label,
branch_type, reg, bit);
}
}