}
-Handle<BytecodeArray> Factory::NewBytecodeArray(int length,
- const byte* raw_bytecodes,
- int frame_size,
- int parameter_count) {
+Handle<BytecodeArray> Factory::NewBytecodeArray(
+ int length, const byte* raw_bytecodes, int frame_size, int parameter_count,
+ Handle<FixedArray> constant_pool) {
DCHECK(0 <= length);
- CALL_HEAP_FUNCTION(isolate(),
- isolate()->heap()->AllocateBytecodeArray(
- length, raw_bytecodes, frame_size, parameter_count),
+ CALL_HEAP_FUNCTION(isolate(), isolate()->heap()->AllocateBytecodeArray(
+ length, raw_bytecodes, frame_size,
+ parameter_count, *constant_pool),
BytecodeArray);
}
PretenureFlag pretenure = NOT_TENURED);
Handle<BytecodeArray> NewBytecodeArray(int length, const byte* raw_bytecodes,
- int frame_size, int parameter_count);
+ int frame_size, int parameter_count,
+ Handle<FixedArray> constant_pool);
Handle<FixedTypedArrayBase> NewFixedTypedArrayWithExternalPointer(
int length, ExternalArrayType array_type, void* external_pointer,
set_empty_byte_array(byte_array);
BytecodeArray* bytecode_array;
- AllocationResult allocation = AllocateBytecodeArray(0, nullptr, 0, 0);
+ AllocationResult allocation =
+ AllocateBytecodeArray(0, nullptr, 0, 0, empty_fixed_array());
if (!allocation.To(&bytecode_array)) {
return false;
}
AllocationResult Heap::AllocateBytecodeArray(int length,
const byte* const raw_bytecodes,
int frame_size,
- int parameter_count) {
+ int parameter_count,
+ FixedArray* constant_pool) {
if (length < 0 || length > BytecodeArray::kMaxLength) {
v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
}
+ // Bytecode array is pretenured, so constant pool array should be to.
+ DCHECK(!InNewSpace(constant_pool));
int size = BytecodeArray::SizeFor(length);
HeapObject* result;
instance->set_length(length);
instance->set_frame_size(frame_size);
instance->set_parameter_count(parameter_count);
+ instance->set_constant_pool(constant_pool);
CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length);
return result;
// Allocates a bytecode array with given contents.
MUST_USE_RESULT AllocationResult
AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
- int parameter_count);
+ int parameter_count, FixedArray* constant_pool);
// Copy the code and scope info part of the code object, but insert
// the provided data as the relocation information.
Address base_pointer_slot =
dst->address() + FixedTypedArrayBase::kBasePointerOffset;
RecordMigratedSlot(Memory::Object_at(base_pointer_slot), base_pointer_slot);
+ } else if (src->IsBytecodeArray()) {
+ heap()->MoveBlock(dst->address(), src->address(), size);
+ Address constant_pool_slot =
+ dst->address() + BytecodeArray::kConstantPoolOffset;
+ RecordMigratedSlot(Memory::Object_at(constant_pool_slot),
+ constant_pool_slot);
} else if (FLAG_unbox_double_fields) {
Address dst_addr = dst->address();
Address src_addr = src->address();
if (object->IsFixedTypedArrayBase()) {
return static_cast<int>(slot - object->address()) ==
FixedTypedArrayBase::kBasePointerOffset;
+ } else if (object->IsBytecodeArray()) {
+ return static_cast<int>(slot - object->address()) ==
+ BytecodeArray::kConstantPoolOffset;
} else if (FLAG_unbox_double_fields) {
// Filter out slots that happen to point to unboxed double fields.
LayoutDescriptorHelper helper(object->map());
}
+template <typename StaticVisitor>
+int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
+ Map* map, HeapObject* object) {
+ VisitPointers(
+ map->GetHeap(), object,
+ HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
+ HeapObject::RawField(object, BytecodeArray::kHeaderSize));
+ return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
+}
+
+
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitShortcutCandidate,
table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
- table_.Register(kVisitBytecodeArray, &DataObjectVisitor::Visit);
+ table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
}
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
+ Map* map, HeapObject* object) {
+ StaticVisitor::VisitPointers(
+ map->GetHeap(), object,
+ HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
+ HeapObject::RawField(object, BytecodeArray::kHeaderSize));
+}
+
+
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
Map* map) {
case SYMBOL_TYPE:
Symbol::BodyDescriptor::IterateBody(this, v);
break;
+ case BYTECODE_ARRAY_TYPE:
+ reinterpret_cast<BytecodeArray*>(this)->BytecodeArrayIterateBody(v);
+ break;
case HEAP_NUMBER_TYPE:
case MUTABLE_HEAP_NUMBER_TYPE:
case SIMD128_VALUE_TYPE:
case FILLER_TYPE:
case BYTE_ARRAY_TYPE:
- case BYTECODE_ARRAY_TYPE:
case FREE_SPACE_TYPE:
break;
return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
}
- INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object)) {
- return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
- }
-
INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
return FixedDoubleArray::SizeFor(length);
INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
+ INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object));
class DataObjectVisitor {
public:
INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
+ INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
// Mark pointers in a Map and its TransitionArray together, possibly
// treating transitions or back pointers weak.
obj_address + FixedTypedArrayBase::kBasePointerOffset,
obj_address + FixedTypedArrayBase::kHeaderSize,
slot_callback);
+ } else if (heap_object->IsBytecodeArray()) {
+ FindPointersToNewSpaceInRegion(
+ obj_address + BytecodeArray::kConstantPoolOffset,
+ obj_address + BytecodeArray::kHeaderSize,
+ slot_callback);
} else if (FLAG_unbox_double_fields) {
LayoutDescriptorHelper helper(heap_object->map());
DCHECK(!helper.all_fields_tagged());
int bytecode_size = static_cast<int>(bytecodes_.size());
int register_count = local_register_count_ + temporary_register_count_;
int frame_size = register_count * kPointerSize;
- Handle<BytecodeArray> output = isolate_->factory()->NewBytecodeArray(
- bytecode_size, &bytecodes_.front(), frame_size, parameter_count_);
+ Factory* factory = isolate_->factory();
+ Handle<BytecodeArray> output =
+ factory->NewBytecodeArray(bytecode_size, &bytecodes_.front(), frame_size,
+ parameter_count_, factory->empty_fixed_array());
bytecode_generated_ = true;
return output;
}
void BytecodeArray::BytecodeArrayVerify() {
// TODO(oth): Walk bytecodes and immediate values to validate sanity.
CHECK(IsBytecodeArray());
+ CHECK(constant_pool()->IsFixedArray());
+ VerifyHeapPointer(constant_pool());
}
} else if (type == JS_FUNCTION_TYPE) {
return HeapObjectContents::kMixedValues;
#endif
+ } else if (type == BYTECODE_ARRAY_TYPE) {
+ return HeapObjectContents::kMixedValues;
} else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
return HeapObjectContents::kMixedValues;
}
+void BytecodeArray::BytecodeArrayIterateBody(ObjectVisitor* v) {
+ IteratePointer(v, kConstantPoolOffset);
+}
+
+
byte BytecodeArray::get(int index) {
DCHECK(index >= 0 && index < this->length());
return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
}
+ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
+
+
Address BytecodeArray::GetFirstBytecodeAddress() {
return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
}
interpreter::Bytecodes::Decode(os, bytecode_start);
os << "\n";
}
+
+ os << "Constant pool (size = " << constant_pool()->length() << ")\n";
+ constant_pool()->Print();
}
inline int parameter_count() const;
inline void set_parameter_count(int number_of_parameters);
+ // Accessors for the constant pool.
+ DECL_ACCESSORS(constant_pool, FixedArray)
+
DECLARE_CAST(BytecodeArray)
// Dispatched behavior.
inline int BytecodeArraySize();
+ inline void BytecodeArrayIterateBody(ObjectVisitor* v);
DECLARE_PRINTER(BytecodeArray)
DECLARE_VERIFIER(BytecodeArray)
// Layout description.
static const int kFrameSizeOffset = FixedArrayBase::kHeaderSize;
static const int kParameterSizeOffset = kFrameSizeOffset + kIntSize;
- static const int kHeaderSize = kParameterSizeOffset + kIntSize;
+ static const int kConstantPoolOffset = kParameterSizeOffset + kIntSize;
+ static const int kHeaderSize = kConstantPoolOffset + kPointerSize;
static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize);
static const int kFrameSize = 32;
static const int kParameterCount = 2;
+ i::FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
Factory* factory = isolate->factory();
HandleScope scope(isolate);
+ SimulateFullSpace(heap->old_space());
+ Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
+ for (int i = 0; i < 5; i++) {
+ constant_pool->set(i, *factory->NewHeapNumber(i));
+ }
+
// Allocate and initialize BytecodeArray
Handle<BytecodeArray> array = factory->NewBytecodeArray(
- kRawBytesSize, kRawBytes, kFrameSize, kParameterCount);
+ kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
CHECK(array->IsBytecodeArray());
CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
CHECK_EQ(array->frame_size(), kFrameSize);
CHECK_EQ(array->parameter_count(), kParameterCount);
+ CHECK_EQ(array->constant_pool(), *constant_pool);
CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
CHECK_GE(array->address() + array->BytecodeArraySize(),
array->GetFirstBytecodeAddress() + array->length());
CHECK_EQ(array->get(i), kRawBytes[i]);
}
- // Full garbage collection
+ FixedArray* old_constant_pool_address = *constant_pool;
+
+ // Perform a full garbage collection and force the constant pool to be on an
+ // evacuation candidate.
+ Page* evac_page = Page::FromAddress(constant_pool->address());
+ evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
heap->CollectAllGarbage();
- // BytecodeArray should survive
+ // BytecodeArray should survive.
CHECK_EQ(array->length(), kRawBytesSize);
CHECK_EQ(array->frame_size(), kFrameSize);
-
for (int i = 0; i < kRawBytesSize; i++) {
CHECK_EQ(array->get(i), kRawBytes[i]);
CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
}
+
+ // Constant pool should have been migrated.
+ CHECK_EQ(array->constant_pool(), *constant_pool);
+ CHECK_NE(array->constant_pool(), old_constant_pool_address);
}