static const int kNullValueRootIndex = 7;
static const int kTrueValueRootIndex = 8;
static const int kFalseValueRootIndex = 9;
- static const int kEmptyStringRootIndex = 134;
+ static const int kEmptyStringRootIndex = 135;
static const int kNodeClassIdOffset = 1 * kApiPointerSize;
static const int kNodeFlagsOffset = 1 * kApiPointerSize + 3;
}
+bool Assembler::ImmediateFitsAddrMode2Instruction(int32_t imm32) {
+ return is_uint12(abs(imm32));
+}
+
+
// Debugging.
void Assembler::RecordJSReturn() {
positions_recorder()->WriteRecordedPositions();
// Check whether an immediate fits an addressing mode 1 instruction.
bool ImmediateFitsAddrMode1Instruction(int32_t imm32);
+ // Check whether an immediate fits an addressing mode 2 instruction.
+ bool ImmediateFitsAddrMode2Instruction(int32_t imm32);
+
// Class for scoping postponing the constant pool generation.
class BlockConstPoolScope {
public:
__ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
// Perform prologue operations usually performed by the young code stub.
- __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+ __ PushFixedFrame(r1);
__ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
// Jump to point after the code-age stub.
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ SmiTag(r0);
__ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
+ __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
+ (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
+ fp.bit() | lr.bit());
__ add(fp, sp,
Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
}
// r4: argv
Isolate* isolate = masm->isolate();
int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
- __ mov(r8, Operand(Smi::FromInt(marker)));
+ if (FLAG_enable_ool_constant_pool) {
+ __ mov(r8, Operand(Smi::FromInt(marker)));
+ }
+ __ mov(r7, Operand(Smi::FromInt(marker)));
__ mov(r6, Operand(Smi::FromInt(marker)));
__ mov(r5,
Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
__ ldr(r5, MemOperand(r5));
__ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
- __ Push(ip, r8, r6, r5);
+ __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
+ (FLAG_enable_ool_constant_pool ? r8.bit() : 0) |
+ ip.bit());
// Set up frame pointer for the frame to be pushed.
__ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
if (!initialized) {
CodePatcher patcher(byte_sequence, kNoCodeAgeSequenceLength);
PredictableCodeSizeScope scope(patcher.masm(), *length);
- patcher.masm()->stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+ patcher.masm()->PushFixedFrame(r1);
patcher.masm()->nop(ip.code());
patcher.masm()->add(fp, sp,
Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
Register JavaScriptFrame::fp_register() { return v8::internal::fp; }
Register JavaScriptFrame::context_register() { return cp; }
+Register JavaScriptFrame::constant_pool_pointer_register() {
+ ASSERT(FLAG_enable_ool_constant_pool);
+ return pp;
+}
Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; }
Register StubFailureTrampolineFrame::context_register() { return cp; }
+Register StubFailureTrampolineFrame::constant_pool_pointer_register() {
+ ASSERT(FLAG_enable_ool_constant_pool);
+ return pp;
+}
} } // namespace v8::internal
// The live registers are:
// o r1: the JS function object being called (i.e., ourselves)
// o cp: our context
+// o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
// o fp: our caller's frame pointer
// o sp: stack pointer
// o lr: return address
info->set_prologue_offset(masm_->pc_offset());
__ Prologue(BUILD_FUNCTION_FRAME);
info->AddNoFrameRange(0, masm_->pc_offset());
+ __ LoadConstantPoolPointerRegister();
{ Comment cmnt(masm_, "[ Allocate locals");
int locals_count = info->scope()->num_stack_slots();
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
Label check_exit_codesize;
- masm_->bind(&check_exit_codesize);
+ __ bind(&check_exit_codesize);
#endif
// Make sure that the constant pool is not emitted inside of the return
// sequence.
{ Assembler::BlockConstPoolScope block_const_pool(masm_);
- // Here we use masm_-> instead of the __ macro to avoid the code coverage
- // tool from instrumenting as we rely on the code size here.
int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
// TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
PredictableCodeSizeScope predictable(masm_, -1);
__ RecordJSReturn();
- masm_->mov(sp, fp);
- int no_frame_start = masm_->pc_offset();
- masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
- masm_->add(sp, sp, Operand(sp_delta));
- masm_->Jump(lr);
+ int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
+ __ add(sp, sp, Operand(sp_delta));
+ __ Jump(lr);
info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
}
__ bind(&resume_frame);
// lr = return address.
// fp = caller's frame pointer.
+ // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
// cp = callee's context,
// r4 = callee's JS function.
- __ Push(lr, fp, cp, r4);
+ __ PushFixedFrame(r4);
// Adjust FP to point to saved FP.
- __ add(fp, sp, Operand(2 * kPointerSize));
+ __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
// Load the operand stack size.
__ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
// r1: Callee's JS function.
// cp: Callee's context.
+ // pp: Callee's constant pool pointer (if FLAG_enable_ool_constant_pool)
// fp: Caller's frame pointer.
// lr: Caller's pc.
__ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME);
frame_is_built_ = true;
info_->AddNoFrameRange(0, masm_->pc_offset());
+ __ LoadConstantPoolPointerRegister();
}
// Reserve space for the stack slots needed by the code.
ASSERT(!frame_is_built_);
ASSERT(info()->IsStub());
frame_is_built_ = true;
- __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
+ __ PushFixedFrame();
__ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
__ push(scratch0());
__ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
Comment(";;; Destroy frame");
ASSERT(frame_is_built_);
__ pop(ip);
- __ ldm(ia_w, sp, cp.bit() | fp.bit() | lr.bit());
+ __ PopFixedFrame();
frame_is_built_ = false;
}
__ jmp(code->exit());
__ b(&needs_frame);
} else {
__ bind(&needs_frame);
- __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
+ __ PushFixedFrame();
// This variant of deopt can only be used with stubs. Since we don't
// have a function pointer to install in the stack frame that we're
// building, install a special marker there instead.
}
int no_frame_start = -1;
if (NeedsEagerFrame()) {
- __ mov(sp, fp);
- no_frame_start = masm_->pc_offset();
- __ ldm(ia_w, sp, fp.bit() | lr.bit());
+ no_frame_start = masm_->LeaveFrame(StackFrame::JAVA_SCRIPT);
}
if (instr->has_constant_parameter_count()) {
int parameter_count = ToInteger32(instr->constant_parameter_count());
}
+void MacroAssembler::PushFixedFrame(Register marker_reg) {
+ ASSERT(!marker_reg.is_valid() || marker_reg.code() < cp.code());
+ stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) |
+ cp.bit() |
+ (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
+ fp.bit() |
+ lr.bit());
+}
+
+
+void MacroAssembler::PopFixedFrame(Register marker_reg) {
+ ASSERT(!marker_reg.is_valid() || marker_reg.code() < cp.code());
+ ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) |
+ cp.bit() |
+ (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
+ fp.bit() |
+ lr.bit());
+}
+
+
// Push and pop all registers that can hold pointers.
void MacroAssembler::PushSafepointRegisters() {
// Safepoints expect a block of contiguous register values starting with r0:
void MacroAssembler::Prologue(PrologueFrameMode frame_mode) {
if (frame_mode == BUILD_STUB_FRAME) {
- stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
+ PushFixedFrame();
Push(Smi::FromInt(StackFrame::STUB));
// Adjust FP to point to saved FP.
add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
ldr(pc, MemOperand(pc, -4));
emit_code_stub_address(stub);
} else {
- stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+ PushFixedFrame(r1);
nop(ip.code());
// Adjust FP to point to saved FP.
add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
}
+void MacroAssembler::LoadConstantPoolPointerRegister() {
+ if (FLAG_enable_ool_constant_pool) {
+ int constant_pool_offset =
+ Code::kConstantPoolOffset - Code::kHeaderSize - pc_offset() - 8;
+ ASSERT(ImmediateFitsAddrMode2Instruction(constant_pool_offset));
+ ldr(pp, MemOperand(pc, constant_pool_offset));
+ }
+}
+
+
void MacroAssembler::EnterFrame(StackFrame::Type type) {
// r0-r3: preserved
- stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
+ PushFixedFrame();
mov(ip, Operand(Smi::FromInt(type)));
push(ip);
mov(ip, Operand(CodeObject()));
}
-void MacroAssembler::LeaveFrame(StackFrame::Type type) {
+int MacroAssembler::LeaveFrame(StackFrame::Type type) {
// r0: preserved
// r1: preserved
// r2: preserved
// Drop the execution stack down to the frame pointer and restore
- // the caller frame pointer and return address.
- mov(sp, fp);
- ldm(ia_w, sp, fp.bit() | lr.bit());
+ // the caller frame pointer, return address and constant pool pointer
+ // (if FLAG_enable_ool_constant_pool).
+ int frame_ends;
+ if (FLAG_enable_ool_constant_pool) {
+ add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
+ frame_ends = pc_offset();
+ ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
+ } else {
+ mov(sp, fp);
+ frame_ends = pc_offset();
+ ldm(ia_w, sp, fp.bit() | lr.bit());
+ }
+ return frame_ends;
}
}
}
+ // Push a fixed frame, consisting of lr, fp, constant pool (if
+ // FLAG_enable_ool_constant_pool), context and JS function / marker id if
+ // marker_reg is a valid register.
+ void PushFixedFrame(Register marker_reg = no_reg);
+ void PopFixedFrame(Register marker_reg = no_reg);
+
// Push and pop the registers that can hold pointers, as defined by the
// RegList constant kSafepointSavedRegisters.
void PushSafepointRegisters();
// Generates function and stub prologue code.
void Prologue(PrologueFrameMode frame_mode);
+ // Loads the constant pool pointer (pp) register.
+ void LoadConstantPoolPointerRegister();
+
// Enter exit frame.
// stack_space - extra stack space, used for alignment before call to C.
void EnterExitFrame(bool save_doubles, int stack_space = 0);
// Activation support.
void EnterFrame(StackFrame::Type type);
- void LeaveFrame(StackFrame::Type type);
+ // Returns the pc offset at which the frame ends.
+ int LeaveFrame(StackFrame::Type type);
// Expects object in r0 and returns map with validated enum cache
// in r0. Assumes that any other register can be used as a scratch.
ASSERT(!is_bottommost || !has_alignment_padding_ ||
(fp_value & kPointerSize) != 0);
+ if (FLAG_enable_ool_constant_pool) {
+ // For the bottommost output frame the constant pool pointer can be gotten
+ // from the input frame. For subsequent output frames, it can be gotten from
+ // the function's code.
+ Register constant_pool_reg =
+ JavaScriptFrame::constant_pool_pointer_register();
+ output_offset -= kPointerSize;
+ input_offset -= kPointerSize;
+ if (is_bottommost) {
+ value = input_->GetFrameSlot(input_offset);
+ } else {
+ value = reinterpret_cast<intptr_t>(
+ function->shared()->code()->constant_pool());
+ }
+ output_frame->SetFrameSlot(output_offset, value);
+ output_frame->SetConstantPool(value);
+ if (is_topmost) output_frame->SetRegister(constant_pool_reg.code(), value);
+ if (trace_scope_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR "; constant_pool\n",
+ top_address + output_offset, output_offset, value);
+ }
+ }
+
// For the bottommost output frame the context can be gotten from the input
// frame. For all subsequent output frames it can be gotten from the function
// so long as we don't inline functions that need local contexts.
fp_value, output_offset, value);
}
+ if (FLAG_enable_ool_constant_pool) {
+ // A marker value is used in place of the constant pool.
+ output_offset -= kPointerSize;
+ intptr_t constant_pool = reinterpret_cast<intptr_t>(
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ output_frame->SetFrameSlot(output_offset, constant_pool);
+ if (trace_scope_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; constant_pool (adaptor sentinel)\n",
+ top_address + output_offset, output_offset, constant_pool);
+ }
+ }
+
// A marker value is used in place of the context.
output_offset -= kPointerSize;
intptr_t context = reinterpret_cast<intptr_t>(
fp_value, output_offset, value);
}
+ if (FLAG_enable_ool_constant_pool) {
+ // The constant pool pointer can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetConstantPool();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (trace_scope_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; constant pool\n",
+ top_address + output_offset, output_offset, value);
+ }
+ }
+
// The context can be gotten from the previous frame.
output_offset -= kPointerSize;
value = output_[frame_index - 1]->GetContext();
}
// We need 1 stack entry for the return address and enough entries for the
- // StackFrame::INTERNAL (FP, context, frame type and code object - see
- // MacroAssembler::EnterFrame). For a setter stub frame we need one additional
- // entry for the implicit return value, see
- // StoreStubCompiler::CompileStoreViaSetter.
+ // StackFrame::INTERNAL (FP, context, frame type, code object and constant
+ // pool (if FLAG_enable_ool_constant_pool)- see MacroAssembler::EnterFrame).
+ // For a setter stub frame we need one additional entry for the implicit
+ // return value, see StoreStubCompiler::CompileStoreViaSetter.
unsigned fixed_frame_entries =
(StandardFrameConstants::kFixedFrameSize / kPointerSize) + 1 +
(is_setter_stub_frame ? 1 : 0);
fp_value, output_offset, value);
}
+ if (FLAG_enable_ool_constant_pool) {
+ // The constant pool pointer can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetConstantPool();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (trace_scope_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; constant pool\n",
+ top_address + output_offset, output_offset, value);
+ }
+ }
+
// The context can be gotten from the previous frame.
output_offset -= kPointerSize;
value = output_[frame_index - 1]->GetContext();
// +-------------------------+ +-------------------------+
// | | saved frame (FP) | | saved frame (FP) |
// | +=========================+<-fpreg +=========================+<-fpreg
+ // | |constant pool (if ool_cp)| |constant pool (if ool_cp)|
+ // | +-------------------------+ +-------------------------|
// | | JSFunction context | | JSFunction context |
// v +-------------------------+ +-------------------------|
// | COMPILED_STUB marker | | STUB_FAILURE marker |
top_address + output_frame_offset, output_frame_offset, value);
}
+ if (FLAG_enable_ool_constant_pool) {
+ // The constant pool pointer can be gotten from the input frame.
+ Register constant_pool_pointer_register =
+ StubFailureTrampolineFrame::constant_pool_pointer_register();
+ input_frame_offset -= kPointerSize;
+ value = input_->GetFrameSlot(input_frame_offset);
+ output_frame->SetRegister(constant_pool_pointer_register.code(), value);
+ output_frame_offset -= kPointerSize;
+ output_frame->SetFrameSlot(output_frame_offset, value);
+ if (trace_scope_) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; constant_pool_pointer\n",
+ top_address + output_frame_offset, output_frame_offset, value);
+ }
+ }
+
// The context can be gotten from the input frame.
Register context_reg = StubFailureTrampolineFrame::context_register();
input_frame_offset -= kPointerSize;
unsigned Deoptimizer::ComputeInputFrameSize() const {
unsigned fixed_size = ComputeFixedSize(function_);
- // The fp-to-sp delta already takes the context and the function
- // into account so we have to avoid double counting them.
+ // The fp-to-sp delta already takes the context, constant pool pointer and the
+ // function into account so we have to avoid double counting them.
unsigned result = fixed_size + fp_to_sp_delta_ -
StandardFrameConstants::kFixedFrameSizeFromFp;
#ifdef DEBUG
top_(kZapUint32),
pc_(kZapUint32),
fp_(kZapUint32),
- context_(kZapUint32) {
+ context_(kZapUint32),
+ constant_pool_(kZapUint32) {
// Zap all the registers.
for (int r = 0; r < Register::kNumRegisters; r++) {
SetRegister(r, kZapUint32);
intptr_t GetContext() const { return context_; }
void SetContext(intptr_t context) { context_ = context; }
+ intptr_t GetConstantPool() const { return constant_pool_; }
+ void SetConstantPool(intptr_t constant_pool) {
+ constant_pool_ = constant_pool;
+ }
+
Smi* GetState() const { return state_; }
void SetState(Smi* state) { state_ = state; }
intptr_t pc_;
intptr_t fp_;
intptr_t context_;
+ intptr_t constant_pool_;
StackFrame::Type type_;
Smi* state_;
class StandardFrameConstants : public AllStatic {
public:
// Fixed part of the frame consists of return address, caller fp,
- // context and function.
+ // constant pool (if FLAG_enable_ool_constant_pool), context, and function.
// StandardFrame::IterateExpressions assumes that kContextOffset is the last
// object pointer.
- static const int kFixedFrameSizeFromFp = 2 * kPointerSize;
+ static const int kCPSlotSize =
+ FLAG_enable_ool_constant_pool ? kPointerSize : 0;
+ static const int kFixedFrameSizeFromFp = 2 * kPointerSize + kCPSlotSize;
static const int kFixedFrameSize = kPCOnStackSize + kFPOnStackSize +
kFixedFrameSizeFromFp;
- static const int kExpressionsOffset = -3 * kPointerSize;
- static const int kMarkerOffset = -2 * kPointerSize;
- static const int kContextOffset = -1 * kPointerSize;
+ static const int kExpressionsOffset = -3 * kPointerSize - kCPSlotSize;
+ static const int kMarkerOffset = -2 * kPointerSize - kCPSlotSize;
+ static const int kContextOffset = -1 * kPointerSize - kCPSlotSize;
+ static const int kConstantPoolOffset = FLAG_enable_ool_constant_pool ?
+ -1 * kPointerSize : 0;
static const int kCallerFPOffset = 0 * kPointerSize;
static const int kCallerPCOffset = +1 * kFPOnStackSize;
static const int kCallerSPOffset = kCallerPCOffset + 1 * kPCOnStackSize;
// Architecture-specific register description.
static Register fp_register();
static Register context_register();
+ static Register constant_pool_pointer_register();
static JavaScriptFrame* cast(StackFrame* frame) {
ASSERT(frame->is_java_script());
// Architecture-specific register description.
static Register fp_register();
static Register context_register();
+ static Register constant_pool_pointer_register();
protected:
inline explicit StubFailureTrampolineFrame(
SetInternalReference(code, entry,
"gc_metadata", code->gc_metadata(),
Code::kGCMetadataOffset);
+ SetInternalReference(code, entry,
+ "constant_pool", code->constant_pool(),
+ Code::kConstantPoolOffset);
}
}
set_oddball_map(Map::cast(obj));
+ { MaybeObject* maybe_obj =
+ AllocatePartialMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_constant_pool_array_map(Map::cast(obj));
+
// Allocate the empty array.
{ MaybeObject* maybe_obj = AllocateEmptyFixedArray();
if (!maybe_obj->ToObject(&obj)) return false;
}
set_empty_descriptor_array(DescriptorArray::cast(obj));
+ // Allocate the constant pool array.
+ { MaybeObject* maybe_obj = AllocateEmptyConstantPoolArray();
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_empty_constant_pool_array(ConstantPoolArray::cast(obj));
+
// Fix the instance_descriptors for the existing maps.
meta_map()->set_code_cache(empty_fixed_array());
meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
oddball_map()->init_back_pointer(undefined_value());
oddball_map()->set_instance_descriptors(empty_descriptor_array());
+ constant_pool_array_map()->set_code_cache(empty_fixed_array());
+ constant_pool_array_map()->set_dependent_code(
+ DependentCode::cast(empty_fixed_array()));
+ constant_pool_array_map()->init_back_pointer(undefined_value());
+ constant_pool_array_map()->set_instance_descriptors(empty_descriptor_array());
+
// Fix prototype object for existing maps.
meta_map()->set_prototype(null_value());
meta_map()->set_constructor(null_value());
oddball_map()->set_prototype(null_value());
oddball_map()->set_constructor(null_value());
+ constant_pool_array_map()->set_prototype(null_value());
+ constant_pool_array_map()->set_constructor(null_value());
+
{ MaybeObject* maybe_obj =
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
set_fixed_double_array_map(Map::cast(obj));
{ MaybeObject* maybe_obj =
- AllocateMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel);
- if (!maybe_obj->ToObject(&obj)) return false;
- }
- set_constant_pool_array_map(Map::cast(obj));
-
- { MaybeObject* maybe_obj =
AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
if (code->kind() == Code::OPTIMIZED_FUNCTION) {
code->set_marked_for_deoptimization(false);
}
+ code->set_constant_pool(empty_constant_pool_array());
#ifdef ENABLE_DEBUGGER_SUPPORT
if (code->kind() == Code::FUNCTION) {
constant_pool->SetEntryCounts(number_of_int64_entries,
number_of_ptr_entries,
number_of_int32_entries);
- MemsetPointer(
- HeapObject::RawField(
- constant_pool,
- constant_pool->OffsetOfElementAt(constant_pool->first_ptr_index())),
- undefined_value(),
- number_of_ptr_entries);
+ if (number_of_ptr_entries > 0) {
+ MemsetPointer(
+ HeapObject::RawField(
+ constant_pool,
+ constant_pool->OffsetOfElementAt(constant_pool->first_ptr_index())),
+ undefined_value(),
+ number_of_ptr_entries);
+ }
return constant_pool;
}
+MaybeObject* Heap::AllocateEmptyConstantPoolArray() {
+ int size = ConstantPoolArray::SizeFor(0, 0, 0);
+ Object* result;
+ { MaybeObject* maybe_result =
+ AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ HeapObject::cast(result)->set_map_no_write_barrier(constant_pool_array_map());
+ ConstantPoolArray::cast(result)->SetEntryCounts(0, 0, 0);
+ return result;
+}
+
+
MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
Object* result;
{ MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
V(ByteArray, empty_byte_array, EmptyByteArray) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
+ V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
V(Smi, stack_limit, StackLimit) \
V(Oddball, arguments_marker, ArgumentsMarker) \
/* The roots above this line should be boring from a GC point of view. */ \
// Allocate empty fixed double array.
MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
+ // Allocate empty constant pool array.
+ MUST_USE_RESULT MaybeObject* AllocateEmptyConstantPoolArray();
+
// Allocate a tenured simple cell.
MUST_USE_RESULT MaybeObject* AllocateCell(Object* value);
Register JavaScriptFrame::fp_register() { return ebp; }
Register JavaScriptFrame::context_register() { return esi; }
+Register JavaScriptFrame::constant_pool_pointer_register() {
+ UNREACHABLE();
+ return no_reg;
+}
Register StubFailureTrampolineFrame::fp_register() { return ebp; }
Register StubFailureTrampolineFrame::context_register() { return esi; }
+Register StubFailureTrampolineFrame::constant_pool_pointer_register() {
+ UNREACHABLE();
+ return no_reg;
+}
} } // namespace v8::internal
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
#include "v8.h"
#if V8_TARGET_ARCH_MIPS
Register JavaScriptFrame::fp_register() { return v8::internal::fp; }
Register JavaScriptFrame::context_register() { return cp; }
+Register JavaScriptFrame::constant_pool_pointer_register() {
+ UNREACHABLE();
+ return no_reg;
+}
Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; }
Register StubFailureTrampolineFrame::context_register() { return cp; }
+Register StubFailureTrampolineFrame::constant_pool_pointer_register() {
+ UNREACHABLE();
+ return no_reg;
+}
} } // namespace v8::internal
}
+ConstantPoolArray* Code::constant_pool() {
+ return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
+}
+
+
+void Code::set_constant_pool(Object* value) {
+ ASSERT(value->IsConstantPoolArray());
+ WRITE_FIELD(this, kConstantPoolOffset, value);
+ WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
+}
+
+
Code::Flags Code::ComputeFlags(Kind kind,
InlineCacheState ic_state,
ExtraICState extra_ic_state,
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
- int first_ptr_offset = constant_pool->OffsetOfElementAt(
- constant_pool->first_ptr_index());
- int last_ptr_offset = constant_pool->OffsetOfElementAt(
- constant_pool->first_ptr_index() + constant_pool->count_of_ptr_entries());
- StaticVisitor::VisitPointers(
- heap,
- HeapObject::RawField(object, first_ptr_offset),
- HeapObject::RawField(object, last_ptr_offset));
+ if (constant_pool->count_of_ptr_entries() > 0) {
+ int first_ptr_offset = constant_pool->OffsetOfElementAt(
+ constant_pool->first_ptr_index());
+ int last_ptr_offset = constant_pool->OffsetOfElementAt(
+ constant_pool->first_ptr_index() +
+ constant_pool->count_of_ptr_entries());
+ StaticVisitor::VisitPointers(
+ heap,
+ HeapObject::RawField(object, first_ptr_offset),
+ HeapObject::RawField(object, last_ptr_offset));
+ }
}
IteratePointer(v, kHandlerTableOffset);
IteratePointer(v, kDeoptimizationDataOffset);
IteratePointer(v, kTypeFeedbackInfoOffset);
+ IteratePointer(v, kConstantPoolOffset);
RelocIterator it(this, mode_mask);
Isolate* isolate = this->GetIsolate();
StaticVisitor::VisitPointer(
heap,
reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
+ StaticVisitor::VisitPointer(
+ heap,
+ reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
+
RelocIterator it(this, mode_mask);
for (; !it.done(); it.next()) {
void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
- int first_ptr_offset = OffsetOfElementAt(first_ptr_index());
- int last_ptr_offset =
- OffsetOfElementAt(first_ptr_index() + count_of_ptr_entries());
- v->VisitPointers(
- HeapObject::RawField(this, first_ptr_offset),
- HeapObject::RawField(this, last_ptr_offset));
+ if (count_of_ptr_entries() > 0) {
+ int first_ptr_offset = OffsetOfElementAt(first_ptr_index());
+ int last_ptr_offset =
+ OffsetOfElementAt(first_ptr_index() + count_of_ptr_entries());
+ v->VisitPointers(
+ HeapObject::RawField(this, first_ptr_offset),
+ HeapObject::RawField(this, last_ptr_offset));
+ }
}
inline bool marked_for_deoptimization();
inline void set_marked_for_deoptimization(bool flag);
+ // [constant_pool]: The constant pool for this function.
+ inline ConstantPoolArray* constant_pool();
+ inline void set_constant_pool(Object* constant_pool);
+
// Get the safepoint entry for the given pc.
SafepointEntry GetSafepointEntry(Address pc);
kKindSpecificFlags1Offset + kIntSize;
// Note: We might be able to squeeze this into the flags above.
static const int kPrologueOffset = kKindSpecificFlags2Offset + kIntSize;
+ static const int kConstantPoolOffset = kPrologueOffset + kPointerSize;
- static const int kHeaderPaddingStart = kPrologueOffset + kIntSize;
+ static const int kHeaderPaddingStart = kConstantPoolOffset + kIntSize;
// Add padding to align the instruction start following right after
// the Code object header.
Register JavaScriptFrame::fp_register() { return rbp; }
Register JavaScriptFrame::context_register() { return rsi; }
+Register JavaScriptFrame::constant_pool_pointer_register() {
+ UNREACHABLE();
+ return no_reg;
+}
Register StubFailureTrampolineFrame::fp_register() { return rbp; }
Register StubFailureTrampolineFrame::context_register() { return rsi; }
+Register StubFailureTrampolineFrame::constant_pool_pointer_register() {
+ UNREACHABLE();
+ return no_reg;
+}
} } // namespace v8::internal
{ 'name': 'off_fp_context',
'value': 'StandardFrameConstants::kContextOffset' },
+ { 'name': 'off_fp_constant_pool',
+ 'value': 'StandardFrameConstants::kConstantPoolOffset' },
{ 'name': 'off_fp_marker',
'value': 'StandardFrameConstants::kMarkerOffset' },
{ 'name': 'off_fp_function',