if (!allocation.To(&result)) return allocation;
// Map::cast cannot be used due to uninitialized map field.
- reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
+ reinterpret_cast<Map*>(result)->set_map(
+ reinterpret_cast<Map*>(root(kMetaMapRootIndex)));
reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
// Initialize to only containing tagged fields.
if (size == 0) return;
HeapObject* filler = HeapObject::FromAddress(addr);
if (size == kPointerSize) {
- filler->set_map_no_write_barrier(raw_unchecked_one_pointer_filler_map());
+ filler->set_map_no_write_barrier(
+ reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex)));
} else if (size == 2 * kPointerSize) {
- filler->set_map_no_write_barrier(raw_unchecked_two_pointer_filler_map());
+ filler->set_map_no_write_barrier(
+ reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex)));
} else {
- filler->set_map_no_write_barrier(raw_unchecked_free_space_map());
+ filler->set_map_no_write_barrier(
+ reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex)));
FreeSpace::cast(filler)->nobarrier_set_size(size);
}
// At this point, we may be deserializing the heap from a snapshot, and
static bool SafeIsNativeContext(HeapObject* obj) {
- return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map();
+ return obj->map() == obj->GetHeap()->root(Heap::kNativeContextMapRootIndex);
}
// Root set access. ==========================================================
// ===========================================================================
- // Heap root getters. We have versions with and without type::cast() here.
- // You can't use type::cast during GC because the assert fails.
- // TODO(1490): Try removing the unchecked accessors, now that GC marking does
- // not corrupt the map.
-#define ROOT_ACCESSOR(type, name, camel_name) \
- inline type* name(); \
- type* raw_unchecked_##name() { \
- return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
- }
+ // Heap root getters.
+#define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
intptr_t sum = 0;
FreeSpace* cur = top();
while (cur != NULL) {
- DCHECK(cur->map() == cur->GetHeap()->raw_unchecked_free_space_map());
+ DCHECK(cur->map() == cur->GetHeap()->root(Heap::kFreeSpaceMapRootIndex));
sum += cur->nobarrier_size();
cur = cur->next();
}
Object* element = KeyAt(entry);
// Empty entry. Uses raw unchecked accessors because it is called by the
// string table during bootstrapping.
- if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
- if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
+ if (element == isolate->heap()->root(Heap::kUndefinedValueRootIndex)) break;
+ if (element != isolate->heap()->root(Heap::kTheHoleValueRootIndex) &&
Shape::IsMatch(key, element)) return entry;
entry = NextProbe(entry, count++, capacity);
}
FreeSpace* FreeSpace::next() {
- DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
+ DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
(!GetHeap()->deserialization_complete() && map() == NULL));
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
return reinterpret_cast<FreeSpace*>(
FreeSpace** FreeSpace::next_address() {
- DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
+ DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
(!GetHeap()->deserialization_complete() && map() == NULL));
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
void FreeSpace::set_next(FreeSpace* next) {
- DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
+ DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
(!GetHeap()->deserialization_complete() && map() == NULL));
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
base::NoBarrier_Store(