-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
#include "spaces.h"
#include "store-buffer.h"
#include "v8memory.h"
-
+#include "factory.h"
#include "incremental-marking.h"
namespace v8 {
}
+// Getter that returns a tagged Smi and setter that writes a tagged Smi.
+#define ACCESSORS_TO_SMI(holder, name, offset) \
+ Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
+ void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
+ WRITE_FIELD(this, offset, value); \
+ }
+
+
+// Getter that returns a Smi as an int and writes an int as a Smi.
#define SMI_ACCESSORS(holder, name, offset) \
int holder::name() { \
Object* value = READ_FIELD(this, offset); \
}
+bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
+ ElementsKind to_kind) {
+ if (to_kind == FAST_ELEMENTS) {
+ return from_kind == FAST_SMI_ONLY_ELEMENTS ||
+ from_kind == FAST_DOUBLE_ELEMENTS;
+ } else {
+ return to_kind == FAST_DOUBLE_ELEMENTS &&
+ from_kind == FAST_SMI_ONLY_ELEMENTS;
+ }
+}
+
+
bool Object::IsFixedArrayBase() {
return IsFixedArray() || IsFixedDoubleArray();
}
}
+bool Object::IsTypeFeedbackCells() {
+ if (!IsFixedArray()) return false;
+ // There's actually no way to see the difference between a fixed array and
+ // a cache cells array. Since this is used for asserts we can check that
+ // the length is plausible though.
+ if (FixedArray::cast(this)->length() % 2 != 0) return false;
+ return true;
+}
+
+
bool Object::IsContext() {
if (Object::IsHeapObject()) {
Map* map = HeapObject::cast(this)->map();
map == heap->catch_context_map() ||
map == heap->with_context_map() ||
map == heap->global_context_map() ||
- map == heap->block_context_map());
+ map == heap->block_context_map() ||
+ map == heap->module_context_map());
}
return false;
}
}
-bool Object::IsSerializedScopeInfo() {
+bool Object::IsModuleContext() {
return Object::IsHeapObject() &&
HeapObject::cast(this)->map() ==
- HeapObject::cast(this)->GetHeap()->serialized_scope_info_map();
+ HeapObject::cast(this)->GetHeap()->module_context_map();
+}
+
+
+bool Object::IsScopeInfo() {
+ return Object::IsHeapObject() &&
+ HeapObject::cast(this)->map() ==
+ HeapObject::cast(this)->GetHeap()->scope_info_map();
}
TYPE_CHECKER(Oddball, ODDBALL_TYPE)
TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
+TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
+TYPE_CHECKER(JSDate, JS_DATE_TYPE)
TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
}
+bool Object::IsNaN() {
+ return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
+}
+
+
MaybeObject* Object::ToSmi() {
if (IsSmi()) return this;
if (IsHeapNumber()) {
#define WRITE_UINT32_FIELD(p, offset, value) \
(*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
+#define READ_INT64_FIELD(p, offset) \
+ (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
+
+#define WRITE_INT64_FIELD(p, offset, value) \
+ (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
+
#define READ_SHORT_FIELD(p, offset) \
(*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
// Unsafe accessor omitting write barrier.
-void HeapObject::set_map_unsafe(Map* value) {
+void HeapObject::set_map_no_write_barrier(Map* value) {
set_map_word(MapWord::FromMap(value));
}
ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
+Object** FixedArray::GetFirstElementAddress() {
+ return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
+}
+
+
+bool FixedArray::ContainsOnlySmisOrHoles() {
+ Object* the_hole = GetHeap()->the_hole_value();
+ Object** current = GetFirstElementAddress();
+ for (int i = 0; i < length(); ++i) {
+ Object* candidate = *current++;
+ if (!candidate->IsSmi() && candidate != the_hole) return false;
+ }
+ return true;
+}
+
+
FixedArrayBase* JSObject::elements() {
Object* array = READ_FIELD(this, kElementsOffset);
return static_cast<FixedArrayBase*>(array);
map != heap->free_space_map()) {
for (int i = 0; i < fixed_array->length(); i++) {
Object* current = fixed_array->get(i);
- ASSERT(current->IsSmi() || current == heap->the_hole_value());
+ ASSERT(current->IsSmi() || current->IsTheHole());
}
}
}
}
-MaybeObject* JSObject::EnsureCanContainNonSmiElements() {
+MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
#if DEBUG
ValidateSmiOnlyElements();
#endif
- if ((map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS)) {
- Object* obj;
- MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- set_map(Map::cast(obj));
+ if ((map()->elements_kind() != FAST_ELEMENTS)) {
+ return TransitionElementsKind(FAST_ELEMENTS);
}
return this;
}
MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
- uint32_t count) {
- if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
- for (uint32_t i = 0; i < count; ++i) {
- Object* current = *objects++;
- if (!current->IsSmi() && current != GetHeap()->the_hole_value()) {
- return EnsureCanContainNonSmiElements();
+ uint32_t count,
+ EnsureElementsMode mode) {
+ ElementsKind current_kind = map()->elements_kind();
+ ElementsKind target_kind = current_kind;
+ ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
+ if (current_kind == FAST_ELEMENTS) return this;
+
+ Heap* heap = GetHeap();
+ Object* the_hole = heap->the_hole_value();
+ Object* heap_number_map = heap->heap_number_map();
+ for (uint32_t i = 0; i < count; ++i) {
+ Object* current = *objects++;
+ if (!current->IsSmi() && current != the_hole) {
+ if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
+ HeapObject::cast(current)->map() == heap_number_map) {
+ target_kind = FAST_DOUBLE_ELEMENTS;
+ } else {
+ target_kind = FAST_ELEMENTS;
+ break;
}
}
}
+
+ if (target_kind != current_kind) {
+ return TransitionElementsKind(target_kind);
+ }
return this;
}
-MaybeObject* JSObject::EnsureCanContainElements(FixedArray* elements) {
- Object** objects = reinterpret_cast<Object**>(
- FIELD_ADDR(elements, elements->OffsetOfElementAt(0)));
- return EnsureCanContainElements(objects, elements->length());
+MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
+ EnsureElementsMode mode) {
+ if (elements->map() != GetHeap()->fixed_double_array_map()) {
+ ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
+ elements->map() == GetHeap()->fixed_cow_array_map());
+ if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
+ mode = DONT_ALLOW_DOUBLE_ELEMENTS;
+ }
+ Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
+ return EnsureCanContainElements(objects, elements->length(), mode);
+ }
+
+ ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
+ if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
+ return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
+ }
+
+ return this;
}
-void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
- ASSERT((map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements()) ==
- (value->map() == GetHeap()->fixed_array_map() ||
- value->map() == GetHeap()->fixed_cow_array_map()));
- ASSERT(map()->has_fast_double_elements() ==
- value->IsFixedDoubleArray());
+MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
+ ElementsKind to_kind) {
+ Map* current_map = map();
+ ElementsKind from_kind = current_map->elements_kind();
+
+ if (from_kind == to_kind) return current_map;
+
+ Context* global_context = isolate->context()->global_context();
+ if (current_map == global_context->smi_js_array_map()) {
+ if (to_kind == FAST_ELEMENTS) {
+ return global_context->object_js_array_map();
+ } else {
+ if (to_kind == FAST_DOUBLE_ELEMENTS) {
+ return global_context->double_js_array_map();
+ } else {
+ ASSERT(to_kind == DICTIONARY_ELEMENTS);
+ }
+ }
+ }
+ return GetElementsTransitionMapSlow(to_kind);
+}
+
+
+void JSObject::set_map_and_elements(Map* new_map,
+ FixedArrayBase* value,
+ WriteBarrierMode mode) {
ASSERT(value->HasValidElements());
#ifdef DEBUG
ValidateSmiOnlyElements();
#endif
+ if (new_map != NULL) {
+ if (mode == UPDATE_WRITE_BARRIER) {
+ set_map(new_map);
+ } else {
+ ASSERT(mode == SKIP_WRITE_BARRIER);
+ set_map_no_write_barrier(new_map);
+ }
+ }
+ ASSERT((map()->has_fast_elements() ||
+ map()->has_fast_smi_only_elements() ||
+ (value == GetHeap()->empty_fixed_array())) ==
+ (value->map() == GetHeap()->fixed_array_map() ||
+ value->map() == GetHeap()->fixed_cow_array_map()));
+ ASSERT((value == GetHeap()->empty_fixed_array()) ||
+ (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
WRITE_FIELD(this, kElementsOffset, value);
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
}
+void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
+ set_map_and_elements(NULL, value, mode);
+}
+
+
void JSObject::initialize_properties() {
ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
void JSObject::initialize_elements() {
- ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
+ ASSERT(map()->has_fast_elements() ||
+ map()->has_fast_smi_only_elements() ||
+ map()->has_fast_double_elements());
ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
}
ElementsKind elements_kind = FLAG_smi_only_arrays
? FAST_SMI_ONLY_ELEMENTS
: FAST_ELEMENTS;
- MaybeObject* maybe_obj = GetElementsTransitionMap(elements_kind);
+ MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
+ elements_kind);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
set_map(Map::cast(obj));
initialize_elements();
// The write barrier is not used for global property cells.
ASSERT(!val->IsJSGlobalPropertyCell());
WRITE_FIELD(this, kValueOffset, val);
- GetHeap()->incremental_marking()->RecordWrite(
- this, HeapObject::RawField(this, kValueOffset), val);
}
// field operations considerably on average.
if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
switch (type) {
+ case JS_MODULE_TYPE:
+ return JSModule::kSize;
case JS_GLOBAL_PROXY_TYPE:
return JSGlobalProxy::kSize;
case JS_GLOBAL_OBJECT_TYPE:
return JSFunction::kSize;
case JS_VALUE_TYPE:
return JSValue::kSize;
+ case JS_DATE_TYPE:
+ return JSDate::kSize;
case JS_ARRAY_TYPE:
- return JSValue::kSize;
+ return JSArray::kSize;
case JS_WEAK_MAP_TYPE:
return JSWeakMap::kSize;
case JS_REGEXP_TYPE:
- return JSValue::kSize;
+ return JSRegExp::kSize;
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
return JSObject::kHeaderSize;
case JS_MESSAGE_OBJECT_TYPE:
// Make sure to adjust for the number of in-object properties. These
// properties do contribute to the size, but are not internal fields.
return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
- map()->inobject_properties() - (map()->has_external_resource()?1:0);
+ map()->inobject_properties();
}
}
-void JSObject::SetExternalResourceObject(Object *value) {
- ASSERT(map()->has_external_resource());
- int offset = GetHeaderSize() + kPointerSize * GetInternalFieldCount();
+void JSObject::SetInternalField(int index, Smi* value) {
+ ASSERT(index < GetInternalFieldCount() && index >= 0);
+ // Internal objects do follow immediately after the header, whereas in-object
+ // properties are at the end of the object. Therefore there is no need
+ // to adjust the index here.
+ int offset = GetHeaderSize() + (kPointerSize * index);
WRITE_FIELD(this, offset, value);
- WRITE_BARRIER(GetHeap(), this, offset, value);
-}
-
-
-Object *JSObject::GetExternalResourceObject() {
- if (map()->has_external_resource()) {
- return READ_FIELD(this, GetHeaderSize() + kPointerSize * GetInternalFieldCount());
- } else {
- return GetHeap()->undefined_value();
- }
}
return result;
}
+int64_t FixedDoubleArray::get_representation(int index) {
+ ASSERT(map() != HEAP->fixed_cow_array_map() &&
+ map() != HEAP->fixed_array_map());
+ ASSERT(index >= 0 && index < this->length());
+ return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
+}
MaybeObject* FixedDoubleArray::get(int index) {
if (is_the_hole(index)) {
}
-void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
- int old_length = from->length();
- ASSERT(old_length < length());
- if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) {
- OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
- FIELD_ADDR(from, kHeaderSize),
- old_length * kDoubleSize);
- } else {
- for (int i = 0; i < old_length; ++i) {
- if (from->is_the_hole(i)) {
- set_the_hole(i);
- } else {
- set(i, from->get_scalar(i));
- }
- }
- }
- int offset = kHeaderSize + old_length * kDoubleSize;
- for (int current = from->length(); current < length(); ++current) {
- WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
- offset += kDoubleSize;
- }
-}
-
-
-void FixedDoubleArray::Initialize(FixedArray* from) {
- int old_length = from->length();
- ASSERT(old_length <= length());
- for (int i = 0; i < old_length; i++) {
- Object* hole_or_object = from->get(i);
- if (hole_or_object->IsTheHole()) {
- set_the_hole(i);
- } else {
- set(i, hole_or_object->Number());
- }
- }
- int offset = kHeaderSize + old_length * kDoubleSize;
- for (int current = from->length(); current < length(); ++current) {
- WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
- offset += kDoubleSize;
- }
-}
-
-
-void FixedDoubleArray::Initialize(NumberDictionary* from) {
- int offset = kHeaderSize;
- for (int current = 0; current < length(); ++current) {
- WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
- offset += kDoubleSize;
- }
- for (int i = 0; i < from->Capacity(); i++) {
- Object* key = from->KeyAt(i);
- if (key->IsNumber()) {
- uint32_t entry = static_cast<uint32_t>(key->Number());
- set(entry, from->ValueAt(i)->Number());
- }
- }
-}
-
-
WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
Heap* heap = GetHeap();
if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
}
+void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
+ int index,
+ Object* value) {
+ ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
+ ASSERT(index >= 0 && index < array->length());
+ int offset = kHeaderSize + index * kPointerSize;
+ WRITE_FIELD(array, offset, value);
+ Heap* heap = array->GetHeap();
+ if (heap->InNewSpace(value)) {
+ heap->RecordWrite(array->address(), offset);
+ }
+}
+
+
void FixedArray::NoWriteBarrierSet(FixedArray* array,
int index,
Object* value) {
}
-void DescriptorArray::NoWriteBarrierSwap(FixedArray* array,
- int first,
- int second) {
+void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
+ int first,
+ int second) {
Object* tmp = array->get(first);
- NoWriteBarrierSet(array, first, array->get(second));
- NoWriteBarrierSet(array, second, tmp);
+ NoIncrementalWriteBarrierSet(array, first, array->get(second));
+ NoIncrementalWriteBarrierSet(array, second, tmp);
}
}
-Smi* DescriptorArray::GetDetails(int descriptor_number) {
+PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
- return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
+ Object* details = GetContentArray()->get(ToDetailsIndex(descriptor_number));
+ return PropertyDetails(Smi::cast(details));
}
PropertyType DescriptorArray::GetType(int descriptor_number) {
- ASSERT(descriptor_number < number_of_descriptors());
- return PropertyDetails(GetDetails(descriptor_number)).type();
+ return GetDetails(descriptor_number).type();
}
bool DescriptorArray::IsProperty(int descriptor_number) {
- return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
+ Entry entry(this, descriptor_number);
+ return IsPropertyDescriptor(&entry);
}
-bool DescriptorArray::IsTransition(int descriptor_number) {
- PropertyType t = GetType(descriptor_number);
- return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
- t == ELEMENTS_TRANSITION;
+bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
+ switch (GetType(descriptor_number)) {
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ return true;
+ case CALLBACKS: {
+ Object* value = GetValue(descriptor_number);
+ if (!value->IsAccessorPair()) return false;
+ AccessorPair* accessors = AccessorPair::cast(value);
+ return accessors->getter()->IsMap() && accessors->setter()->IsMap();
+ }
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ case NULL_DESCRIPTOR:
+ return false;
+ }
+ UNREACHABLE(); // Keep the compiler happy.
+ return false;
}
}
-bool DescriptorArray::IsDontEnum(int descriptor_number) {
- return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
-}
-
-
void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
desc->Init(GetKey(descriptor_number),
GetValue(descriptor_number),
- PropertyDetails(GetDetails(descriptor_number)));
+ GetDetails(descriptor_number));
}
// Range check.
ASSERT(descriptor_number < number_of_descriptors());
- // Make sure none of the elements in desc are in new space.
- ASSERT(!HEAP->InNewSpace(desc->GetKey()));
- ASSERT(!HEAP->InNewSpace(desc->GetValue()));
-
- NoWriteBarrierSet(this,
- ToKeyIndex(descriptor_number),
- desc->GetKey());
+ NoIncrementalWriteBarrierSet(this,
+ ToKeyIndex(descriptor_number),
+ desc->GetKey());
FixedArray* content_array = GetContentArray();
- NoWriteBarrierSet(content_array,
- ToValueIndex(descriptor_number),
- desc->GetValue());
- NoWriteBarrierSet(content_array,
- ToDetailsIndex(descriptor_number),
- desc->GetDetails().AsSmi());
+ NoIncrementalWriteBarrierSet(content_array,
+ ToValueIndex(descriptor_number),
+ desc->GetValue());
+ NoIncrementalWriteBarrierSet(content_array,
+ ToDetailsIndex(descriptor_number),
+ desc->GetDetails().AsSmi());
}
-void DescriptorArray::CopyFrom(int index,
- DescriptorArray* src,
- int src_index,
- const WhitenessWitness& witness) {
- Descriptor desc;
- src->Get(src_index, &desc);
- Set(index, &desc, witness);
-}
-
-
-void DescriptorArray::NoWriteBarrierSwapDescriptors(int first, int second) {
- NoWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
+void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
+ int first, int second) {
+ NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
FixedArray* content_array = GetContentArray();
- NoWriteBarrierSwap(content_array,
- ToValueIndex(first),
- ToValueIndex(second));
- NoWriteBarrierSwap(content_array,
- ToDetailsIndex(first),
- ToDetailsIndex(second));
+ NoIncrementalWriteBarrierSwap(content_array,
+ ToValueIndex(first),
+ ToValueIndex(second));
+ NoIncrementalWriteBarrierSwap(content_array,
+ ToDetailsIndex(first),
+ ToDetailsIndex(second));
}
template<typename Shape, typename Key>
int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
uint32_t capacity = Capacity();
- uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
+ uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
while (true) {
Object* element = KeyAt(entry);
- if (element == isolate->heap()->undefined_value()) break; // Empty entry.
- if (element != isolate->heap()->null_value() &&
+ // Empty entry.
+ if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
+ if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
Shape::IsMatch(key, element)) return entry;
entry = NextProbe(entry, count++, capacity);
}
}
-bool NumberDictionary::requires_slow_elements() {
+bool SeededNumberDictionary::requires_slow_elements() {
Object* max_index_object = get(kMaxNumberKeyIndex);
if (!max_index_object->IsSmi()) return false;
return 0 !=
(Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
}
-uint32_t NumberDictionary::max_number_key() {
+uint32_t SeededNumberDictionary::max_number_key() {
ASSERT(!requires_slow_elements());
Object* max_index_object = get(kMaxNumberKeyIndex);
if (!max_index_object->IsSmi()) return 0;
return value >> kRequiresSlowElementsTagSize;
}
-void NumberDictionary::set_requires_slow_elements() {
+void SeededNumberDictionary::set_requires_slow_elements() {
set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
}
CAST_ACCESSOR(DescriptorArray)
CAST_ACCESSOR(DeoptimizationInputData)
CAST_ACCESSOR(DeoptimizationOutputData)
+CAST_ACCESSOR(TypeFeedbackCells)
CAST_ACCESSOR(SymbolTable)
CAST_ACCESSOR(JSFunctionResultCache)
CAST_ACCESSOR(NormalizedMapCache)
+CAST_ACCESSOR(ScopeInfo)
CAST_ACCESSOR(CompilationCacheTable)
CAST_ACCESSOR(CodeCacheHashTable)
CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
}
+bool ExternalString::is_short() {
+ InstanceType type = map()->instance_type();
+ return (type & kShortExternalStringMask) == kShortExternalStringTag;
+}
+
+
const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
}
+void ExternalAsciiString::update_data_cache() {
+ if (is_short()) return;
+ const char** data_field =
+ reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
+ *data_field = resource()->data();
+}
+
+
void ExternalAsciiString::set_resource(
const ExternalAsciiString::Resource* resource) {
*reinterpret_cast<const Resource**>(
FIELD_ADDR(this, kResourceOffset)) = resource;
+ if (resource != NULL) update_data_cache();
+}
+
+
+const char* ExternalAsciiString::GetChars() {
+ return resource()->data();
+}
+
+
+uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
+ ASSERT(index >= 0 && index < length());
+ return GetChars()[index];
}
}
+void ExternalTwoByteString::update_data_cache() {
+ if (is_short()) return;
+ const uint16_t** data_field =
+ reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
+ *data_field = resource()->data();
+}
+
+
void ExternalTwoByteString::set_resource(
const ExternalTwoByteString::Resource* resource) {
*reinterpret_cast<const Resource**>(
FIELD_ADDR(this, kResourceOffset)) = resource;
+ if (resource != NULL) update_data_cache();
+}
+
+
+const uint16_t* ExternalTwoByteString::GetChars() {
+ return resource()->data();
+}
+
+
+uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
+ ASSERT(index >= 0 && index < length());
+ return GetChars()[index];
+}
+
+
+const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
+ unsigned start) {
+ return GetChars() + start;
}
bool Map::is_shared() {
return ((1 << kIsShared) & bit_field3()) != 0;
}
-
-void Map::set_has_external_resource(bool value) {
- if (value) {
- set_bit_field(bit_field() | (1 << kHasExternalResource));
- } else {
- set_bit_field(bit_field() & ~(1 << kHasExternalResource));
- }
-}
-
-bool Map::has_external_resource()
-{
- return ((1 << kHasExternalResource) & bit_field()) != 0;
-}
-
-
-void Map::set_named_interceptor_is_fallback(bool value)
-{
- if (value) {
- set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
- } else {
- set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
- }
-}
-
-bool Map::named_interceptor_is_fallback()
-{
- return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
-}
JSFunction* Map::unchecked_constructor() {
bool Code::optimizable() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
}
void Code::set_optimizable(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
}
bool Code::has_deoptimization_support() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
}
void Code::set_has_deoptimization_support(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
bool Code::has_debug_break_slots() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
}
void Code::set_has_debug_break_slots(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
bool Code::is_compiled_optimizable() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsIsCompiledOptimizable::decode(flags);
}
void Code::set_compiled_optimizable(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
int Code::allow_osr_at_loop_nesting_level() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
}
void Code::set_allow_osr_at_loop_nesting_level(int level) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
}
+int Code::profiler_ticks() {
+ ASSERT_EQ(FUNCTION, kind());
+ return READ_BYTE_FIELD(this, kProfilerTicksOffset);
+}
+
+
+void Code::set_profiler_ticks(int ticks) {
+ ASSERT_EQ(FUNCTION, kind());
+ ASSERT(ticks < 256);
+ WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
+}
+
+
unsigned Code::stack_slots() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
return READ_UINT32_FIELD(this, kStackSlotsOffset);
unsigned Code::stack_check_table_offset() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
}
void Code::set_stack_check_table_offset(unsigned offset) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
}
}
+byte Code::compare_operation() {
+ ASSERT(is_compare_ic_stub());
+ return READ_BYTE_FIELD(this, kCompareOperationOffset);
+}
+
+
+void Code::set_compare_operation(byte value) {
+ ASSERT(is_compare_ic_stub());
+ WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
+}
+
+
byte Code::to_boolean_state() {
ASSERT(is_to_boolean_ic_stub());
return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
DescriptorArray* Map::instance_descriptors() {
Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
if (object->IsSmi()) {
- return HEAP->empty_descriptor_array();
+ return GetHeap()->empty_descriptor_array();
} else {
return DescriptorArray::cast(object);
}
Object* object = READ_FIELD(this,
kInstanceDescriptorsOrBitField3Offset);
if (!object->IsSmi()) {
+#ifdef DEBUG
+ ZapInstanceDescriptors();
+#endif
WRITE_FIELD(
this,
kInstanceDescriptorsOrBitField3Offset,
}
}
ASSERT(!is_shared());
+#ifdef DEBUG
+ if (value != instance_descriptors()) {
+ ZapInstanceDescriptors();
+ }
+#endif
WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
CONDITIONAL_WRITE_BARRIER(
heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
}
-FixedArray* Map::unchecked_prototype_transitions() {
- return reinterpret_cast<FixedArray*>(
- READ_FIELD(this, kPrototypeTransitionsOffset));
+Object* Map::GetBackPointer() {
+ Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
+ if (object->IsFixedArray()) {
+ return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
+ } else {
+ return object;
+ }
+}
+
+
+void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
+ Heap* heap = GetHeap();
+ ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
+ ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
+ (value->IsMap() && GetBackPointer()->IsUndefined()));
+ Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
+ if (object->IsFixedArray()) {
+ FixedArray::cast(object)->set(
+ kProtoTransitionBackPointerOffset, value, mode);
+ } else {
+ WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
+ }
+}
+
+
+FixedArray* Map::prototype_transitions() {
+ Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
+ if (object->IsFixedArray()) {
+ return FixedArray::cast(object);
+ } else {
+ return GetHeap()->empty_fixed_array();
+ }
+}
+
+
+void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
+ Heap* heap = GetHeap();
+ ASSERT(value != heap->empty_fixed_array());
+ value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
+#ifdef DEBUG
+ if (value != prototype_transitions()) {
+ ZapPrototypeTransitions();
+ }
+#endif
+ WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
+}
+
+
+void Map::init_prototype_transitions(Object* undefined) {
+ ASSERT(undefined->IsUndefined());
+ WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
+}
+
+
+HeapObject* Map::unchecked_prototype_transitions() {
+ Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
+ return reinterpret_cast<HeapObject*>(object);
}
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
-ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
ACCESSORS(Map, constructor, Object, kConstructorOffset)
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
ACCESSORS(AccessorInfo, data, Object, kDataOffset)
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
-ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
+ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
+
+ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
+ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
-ACCESSORS(InterceptorInfo, is_fallback, Smi, kFallbackOffset)
ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
kInstanceCallHandlerOffset)
ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
kAccessCheckInfoOffset)
-ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
+ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
kInternalFieldCountOffset)
-ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
- kHasExternalResourceOffset)
ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
ACCESSORS(Script, source, Object, kSourceOffset)
ACCESSORS(Script, name, Object, kNameOffset)
ACCESSORS(Script, id, Object, kIdOffset)
-ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
-ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
+ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
+ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
ACCESSORS(Script, data, Object, kDataOffset)
ACCESSORS(Script, context_data, Object, kContextOffset)
ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
-ACCESSORS(Script, type, Smi, kTypeOffset)
-ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
+ACCESSORS_TO_SMI(Script, type, kTypeOffset)
+ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
+ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
-ACCESSORS(Script, eval_from_instructions_offset, Smi,
- kEvalFrominstructionsOffsetOffset)
+ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
+ kEvalFrominstructionsOffsetOffset)
#ifdef ENABLE_DEBUGGER_SUPPORT
ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
-ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
-ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
-ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
+ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
+ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
+ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
#endif
ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset)
+SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
+
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
kHiddenPrototypeBit)
SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
+SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
+SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
+
+PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
+PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#endif
bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
- return initial_map() != HEAP->undefined_value();
+ return initial_map() != GetHeap()->undefined_value();
}
}
-StrictModeFlag SharedFunctionInfo::strict_mode_flag() {
- return BooleanBit::get(compiler_hints(), kStrictModeFunction)
- ? kStrictMode : kNonStrictMode;
+int SharedFunctionInfo::profiler_ticks() {
+ if (code()->kind() != Code::FUNCTION) return 0;
+ return code()->profiler_ticks();
}
-void SharedFunctionInfo::set_strict_mode_flag(StrictModeFlag strict_mode_flag) {
- ASSERT(strict_mode_flag == kStrictMode ||
- strict_mode_flag == kNonStrictMode);
- bool value = strict_mode_flag == kStrictMode;
- set_compiler_hints(
- BooleanBit::set(compiler_hints(), kStrictModeFunction, value));
+LanguageMode SharedFunctionInfo::language_mode() {
+ int hints = compiler_hints();
+ if (BooleanBit::get(hints, kExtendedModeFunction)) {
+ ASSERT(BooleanBit::get(hints, kStrictModeFunction));
+ return EXTENDED_MODE;
+ }
+ return BooleanBit::get(hints, kStrictModeFunction)
+ ? STRICT_MODE : CLASSIC_MODE;
}
-BOOL_GETTER(SharedFunctionInfo, compiler_hints, strict_mode,
- kStrictModeFunction)
+void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
+ // We only allow language mode transitions that go set the same language mode
+ // again or go up in the chain:
+ // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
+ ASSERT(this->language_mode() == CLASSIC_MODE ||
+ this->language_mode() == language_mode ||
+ language_mode == EXTENDED_MODE);
+ int hints = compiler_hints();
+ hints = BooleanBit::set(
+ hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
+ hints = BooleanBit::set(
+ hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
+ set_compiler_hints(hints);
+}
+
+
+bool SharedFunctionInfo::is_classic_mode() {
+ return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
+}
+
+BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
+ kExtendedModeFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
name_should_print_as_anonymous,
kNameShouldPrintAsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
+ kDontOptimize)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
}
-SerializedScopeInfo* SharedFunctionInfo::scope_info() {
- return reinterpret_cast<SerializedScopeInfo*>(
- READ_FIELD(this, kScopeInfoOffset));
+ScopeInfo* SharedFunctionInfo::scope_info() {
+ return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
}
-void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
+void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
WriteBarrierMode mode) {
WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
CONDITIONAL_WRITE_BARRIER(GetHeap(),
}
-Smi* SharedFunctionInfo::deopt_counter() {
- return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
-}
-
-
-void SharedFunctionInfo::set_deopt_counter(Smi* value) {
- WRITE_FIELD(this, kDeoptCounterOffset, value);
-}
-
-
bool SharedFunctionInfo::is_compiled() {
return code() !=
Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
void SharedFunctionInfo::set_code_age(int code_age) {
- set_compiler_hints(compiler_hints() |
- ((code_age & kCodeAgeMask) << kCodeAgeShift));
+ int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
+ set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
}
}
+MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
+ Map* initial_map) {
+ Context* global_context = context()->global_context();
+ Object* array_function =
+ global_context->get(Context::ARRAY_FUNCTION_INDEX);
+ if (array_function->IsJSFunction() &&
+ this == JSFunction::cast(array_function)) {
+ ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
+
+ MaybeObject* maybe_map = initial_map->CopyDropTransitions();
+ Map* new_double_map = NULL;
+ if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
+ new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
+ maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
+ new_double_map);
+ if (maybe_map->IsFailure()) return maybe_map;
+
+ maybe_map = new_double_map->CopyDropTransitions();
+ Map* new_object_map = NULL;
+ if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
+ new_object_map->set_elements_kind(FAST_ELEMENTS);
+ maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
+ new_object_map);
+ if (maybe_map->IsFailure()) return maybe_map;
+
+ global_context->set_smi_js_array_map(initial_map);
+ global_context->set_double_js_array_map(new_double_map);
+ global_context->set_object_js_array_map(new_object_map);
+ }
+ set_initial_map(initial_map);
+ return this;
+}
+
+
bool JSFunction::has_initial_map() {
return prototype_or_initial_map()->IsMap();
}
ACCESSORS(JSWeakMap, next, Object, kNextOffset)
-ObjectHashTable* JSWeakMap::unchecked_table() {
- return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
-}
-
-
Address Foreign::foreign_address() {
return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
}
}
+ACCESSORS(JSModule, context, Object, kContextOffset)
+
+
+JSModule* JSModule::cast(Object* obj) {
+ ASSERT(obj->IsJSModule());
+ ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
+ return reinterpret_cast<JSModule*>(obj);
+}
+
+
ACCESSORS(JSValue, value, Object, kValueOffset)
}
+ACCESSORS(JSDate, value, Object, kValueOffset)
+ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
+ACCESSORS(JSDate, year, Object, kYearOffset)
+ACCESSORS(JSDate, month, Object, kMonthOffset)
+ACCESSORS(JSDate, day, Object, kDayOffset)
+ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
+ACCESSORS(JSDate, hour, Object, kHourOffset)
+ACCESSORS(JSDate, min, Object, kMinOffset)
+ACCESSORS(JSDate, sec, Object, kSecOffset)
+
+
+JSDate* JSDate::cast(Object* obj) {
+ ASSERT(obj->IsJSDate());
+ ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
+ return reinterpret_cast<JSDate*>(obj);
+}
+
+
ACCESSORS(JSMessageObject, type, String, kTypeOffset)
ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
+ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
-ACCESSORS(Code, next_code_flushing_candidate,
- Object, kNextCodeFlushingCandidateOffset)
-
+ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
+ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
+INT_ACCESSORS(Code, ic_age, kICAgeOffset)
byte* Code::instruction_start() {
return FIELD_ADDR(this, kHeaderSize);
(map == GetHeap()->fixed_array_map() ||
map == GetHeap()->fixed_cow_array_map())) ||
(kind == FAST_DOUBLE_ELEMENTS &&
- fixed_array->IsFixedDoubleArray()) ||
+ (fixed_array->IsFixedDoubleArray() ||
+ fixed_array == GetHeap()->empty_fixed_array())) ||
(kind == DICTIONARY_ELEMENTS &&
fixed_array->IsFixedArray() &&
fixed_array->IsDictionary()) ||
}
-bool JSObject::AllowsSetElementsLength() {
- bool result = elements()->IsFixedArray() ||
- elements()->IsFixedDoubleArray();
- ASSERT(result == !HasExternalArrayElements());
- return result;
-}
-
-
MaybeObject* JSObject::EnsureWritableFastElements() {
ASSERT(HasFastTypeElements());
FixedArray* elems = FixedArray::cast(elements());
}
-NumberDictionary* JSObject::element_dictionary() {
+SeededNumberDictionary* JSObject::element_dictionary() {
ASSERT(HasDictionaryElements());
- return NumberDictionary::cast(elements());
+ return SeededNumberDictionary::cast(elements());
}
}
-StringHasher::StringHasher(int length)
+StringHasher::StringHasher(int length, uint32_t seed)
: length_(length),
- raw_running_hash_(0),
+ raw_running_hash_(seed),
array_index_(0),
is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
is_first_char_(true),
- is_valid_(true) { }
+ is_valid_(true) {
+ ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
+}
bool StringHasher::has_trivial_hash() {
}
-void StringHasher::AddCharacter(uc32 c) {
+void StringHasher::AddCharacter(uint32_t c) {
+ if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
+ AddSurrogatePair(c); // Not inlined.
+ return;
+ }
// Use the Jenkins one-at-a-time hash function to update the hash
// for the given character.
raw_running_hash_ += c;
}
-void StringHasher::AddCharacterNoIndex(uc32 c) {
+void StringHasher::AddCharacterNoIndex(uint32_t c) {
ASSERT(!is_array_index());
+ if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
+ AddSurrogatePairNoIndex(c); // Not inlined.
+ return;
+ }
raw_running_hash_ += c;
raw_running_hash_ += (raw_running_hash_ << 10);
raw_running_hash_ ^= (raw_running_hash_ >> 6);
result += (result << 3);
result ^= (result >> 11);
result += (result << 15);
- if (result == 0) {
+ if ((result & String::kHashBitMask) == 0) {
result = 27;
}
return result;
template <typename schar>
-uint32_t HashSequentialString(const schar* chars, int length) {
- StringHasher hasher(length);
+uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
+ StringHasher hasher(length, seed);
if (!hasher.has_trivial_hash()) {
int i;
for (i = 0; hasher.is_array_index() && (i < length); i++) {
}
-uint32_t NumberDictionaryShape::Hash(uint32_t key) {
- return ComputeIntegerHash(key);
+uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
+ return ComputeIntegerHash(key, 0);
}
-uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
+uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
+ Object* other) {
ASSERT(other->IsNumber());
- return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
+ return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
}
+uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
+ return ComputeIntegerHash(key, seed);
+}
+
+uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
+ uint32_t seed,
+ Object* other) {
+ ASSERT(other->IsNumber());
+ return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
+}
MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
return Isolate::Current()->heap()->NumberFromUint32(key);
template <int entrysize>
uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
- ASSERT(!key->IsUndefined() && !key->IsNull());
MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
return Smi::cast(maybe_hash->ToObjectChecked())->value();
}
template <int entrysize>
uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
Object* other) {
- ASSERT(!other->IsUndefined() && !other->IsNull());
MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
return Smi::cast(maybe_hash->ToObjectChecked())->value();
}
}
-void ObjectHashTable::RemoveEntry(int entry) {
- RemoveEntry(entry, GetHeap());
-}
-
-
void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:
// - MarkCompactCollector::MarkUnmarkedObject
+ // - IncrementalMarking::Step
ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
}
}
-MaybeObject* JSArray::SetContent(FixedArray* storage) {
- MaybeObject* maybe_object = EnsureCanContainElements(storage);
- if (maybe_object->IsFailure()) return maybe_object;
- set_length(Smi::FromInt(storage->length()));
+bool JSArray::AllowsSetElementsLength() {
+ bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
+ ASSERT(result == !HasExternalArrayElements());
+ return result;
+}
+
+
+MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
+ MaybeObject* maybe_result = EnsureCanContainElements(
+ storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
+ if (maybe_result->IsFailure()) return maybe_result;
+ ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
+ GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
+ ((storage->map() != GetHeap()->fixed_double_array_map()) &&
+ ((GetElementsKind() == FAST_ELEMENTS) ||
+ (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
+ FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
set_elements(storage);
+ set_length(Smi::FromInt(storage->length()));
return this;
}
}
+void TypeFeedbackCells::SetAstId(int index, Smi* id) {
+ set(1 + index * 2, id);
+}
+
+
+Smi* TypeFeedbackCells::AstId(int index) {
+ return Smi::cast(get(1 + index * 2));
+}
+
+
+void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
+ set(index * 2, cell);
+}
+
+
+JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
+ return JSGlobalPropertyCell::cast(get(index * 2));
+}
+
+
+Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
+ return isolate->factory()->the_hole_value();
+}
+
+
+Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
+ return isolate->factory()->undefined_value();
+}
+
+
+Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
+ return heap->raw_unchecked_the_hole_value();
+}
+
+
+SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
+SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
+ kIcWithTypeinfoCountOffset)
+ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
+ kTypeFeedbackCellsOffset)
+
+
+SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
+
+
Relocatable::Relocatable(Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
isolate_ = isolate;
#undef SLOT_ADDR
-
+#undef TYPE_CHECKER
#undef CAST_ACCESSOR
#undef INT_ACCESSORS
-#undef SMI_ACCESSORS
#undef ACCESSORS
+#undef ACCESSORS_TO_SMI
+#undef SMI_ACCESSORS
+#undef BOOL_GETTER
+#undef BOOL_ACCESSORS
#undef FIELD_ADDR
#undef READ_FIELD
#undef WRITE_FIELD
#undef WRITE_BARRIER
#undef CONDITIONAL_WRITE_BARRIER
-#undef READ_MEMADDR_FIELD
-#undef WRITE_MEMADDR_FIELD
#undef READ_DOUBLE_FIELD
#undef WRITE_DOUBLE_FIELD
#undef READ_INT_FIELD
#undef WRITE_INT_FIELD
+#undef READ_INTPTR_FIELD
+#undef WRITE_INTPTR_FIELD
+#undef READ_UINT32_FIELD
+#undef WRITE_UINT32_FIELD
#undef READ_SHORT_FIELD
#undef WRITE_SHORT_FIELD
#undef READ_BYTE_FIELD