}
+#define ROOT_ACCESSOR(type, name, camel_name) \
+ type* Heap::name() { return type::cast(roots_[k##camel_name##RootIndex]); }
+ROOT_LIST(ROOT_ACCESSOR)
+#undef ROOT_ACCESSOR
+
+#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
+ Map* Heap::name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
+STRUCT_LIST(STRUCT_MAP_ACCESSOR)
+#undef STRUCT_MAP_ACCESSOR
+
+#define STRING_ACCESSOR(name, str) \
+ String* Heap::name() { return String::cast(roots_[k##name##RootIndex]); }
+INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
+#undef STRING_ACCESSOR
+
+#define SYMBOL_ACCESSOR(name) \
+ Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
+PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
+#undef SYMBOL_ACCESSOR
+
+#define SYMBOL_ACCESSOR(name, varname, description) \
+ Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
+PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
+#undef SYMBOL_ACCESSOR
+
+#define ROOT_ACCESSOR(type, name, camel_name) \
+ void Heap::set_##name(type* value) { \
+ /* The deserializer makes use of the fact that these common roots are */ \
+ /* never in new space and never on a page that is being compacted. */ \
+ DCHECK(!deserialization_complete() || \
+ RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex)); \
+ DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
+ roots_[k##camel_name##RootIndex] = value; \
+ }
+ROOT_LIST(ROOT_ACCESSOR)
+#undef ROOT_ACCESSOR
+
+
template <>
bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
// TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
}
+uint32_t Heap::HashSeed() {
+ uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
+ DCHECK(FLAG_randomize_hashes || seed == 0);
+ return seed;
+}
+
+
+Smi* Heap::NextScriptId() {
+ int next_id = last_script_id()->value() + 1;
+ if (!Smi::IsValid(next_id) || next_id < 0) next_id = 1;
+ Smi* next_id_smi = Smi::FromInt(next_id);
+ set_last_script_id(next_id_smi);
+ return next_id_smi;
+}
+
+
+void Heap::SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
+ DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
+ set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
+}
+
+
+void Heap::SetConstructStubDeoptPCOffset(int pc_offset) {
+ DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+}
+
+
+void Heap::SetGetterStubDeoptPCOffset(int pc_offset) {
+ DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+}
+
+
+void Heap::SetSetterStubDeoptPCOffset(int pc_offset) {
+ DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+}
+
+
AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
: heap_(isolate->heap()), daf_(isolate) {
heap_->always_allocate_scope_depth_++;
// You can't use type::cast during GC because the assert fails.
// TODO(1490): Try removing the unchecked accessors, now that GC marking does
// not corrupt the map.
-#define ROOT_ACCESSOR(type, name, camel_name) \
- type* name() { return type::cast(roots_[k##camel_name##RootIndex]); } \
- type* raw_unchecked_##name() { \
- return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
+#define ROOT_ACCESSOR(type, name, camel_name) \
+ inline type* name(); \
+ type* raw_unchecked_##name() { \
+ return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
}
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
// Utility type maps
-#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
- Map* name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
+#define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
STRUCT_LIST(STRUCT_MAP_ACCESSOR)
#undef STRUCT_MAP_ACCESSOR
-#define STRING_ACCESSOR(name, str) \
- String* name() { return String::cast(roots_[k##name##RootIndex]); }
+#define STRING_ACCESSOR(name, str) inline String* name();
INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
#undef STRING_ACCESSOR
-#define SYMBOL_ACCESSOR(name) \
- Symbol* name() { return Symbol::cast(roots_[k##name##RootIndex]); }
+#define SYMBOL_ACCESSOR(name) inline Symbol* name();
PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR
-#define SYMBOL_ACCESSOR(name, varname, description) \
- Symbol* name() { return Symbol::cast(roots_[k##name##RootIndex]); }
+#define SYMBOL_ACCESSOR(name, varname, description) inline Symbol* name();
PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR
static inline void ScavengePointer(HeapObject** p);
static inline void ScavengeObject(HeapObject** p, HeapObject* object);
+ // Slow part of scavenge object.
+ static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
+
enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT };
// If an object has an AllocationMemento trailing it, return it, otherwise
inline bool OldGenerationAllocationLimitReached();
- inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
- scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
- }
-
void QueueMemoryChunkForFree(MemoryChunk* chunk);
void FreeQueuedChunks();
// The roots that have an index less than this are always in old space.
static const int kOldSpaceRoots = 0x20;
- uint32_t HashSeed() {
- uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
- DCHECK(FLAG_randomize_hashes || seed == 0);
- return seed;
- }
-
- Smi* NextScriptId() {
- int next_id = last_script_id()->value() + 1;
- if (!Smi::IsValid(next_id) || next_id < 0) next_id = 1;
- Smi* next_id_smi = Smi::FromInt(next_id);
- set_last_script_id(next_id_smi);
- return next_id_smi;
- }
-
- void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
- DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
- set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
- }
+ inline uint32_t HashSeed();
- void SetConstructStubDeoptPCOffset(int pc_offset) {
- DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
- set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
- }
+ inline Smi* NextScriptId();
- void SetGetterStubDeoptPCOffset(int pc_offset) {
- DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
- set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
- }
-
- void SetSetterStubDeoptPCOffset(int pc_offset) {
- DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
- set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
- }
+ inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
+ inline void SetConstructStubDeoptPCOffset(int pc_offset);
+ inline void SetGetterStubDeoptPCOffset(int pc_offset);
+ inline void SetSetterStubDeoptPCOffset(int pc_offset);
// For post mortem debugging.
void RememberUnmappedPage(Address page, bool compacted);
// Total length of the strings we failed to flatten since the last GC.
int unflattened_strings_length_;
-#define ROOT_ACCESSOR(type, name, camel_name) \
- inline void set_##name(type* value) { \
- /* The deserializer makes use of the fact that these common roots are */ \
- /* never in new space and never on a page that is being compacted. */ \
- DCHECK(!deserialization_complete() || \
- RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex)); \
- DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
- roots_[k##camel_name##RootIndex] = value; \
- }
+#define ROOT_ACCESSOR(type, name, camel_name) \
+ inline void set_##name(type* value);
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC();
- // Slow part of scavenge object.
- static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
-
// Total RegExp code ever generated
double total_regexp_code_generated_;