1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
18 #include "conversions-inl.h"
24 #include "store-buffer.h"
27 #include "incremental-marking.h"
28 #include "transitions-inl.h"
29 #include "objects-visiting.h"
34 PropertyDetails::PropertyDetails(Smi* smi) {
35 value_ = smi->value();
39 Smi* PropertyDetails::AsSmi() const {
40 // Ensure the upper 2 bits have the same value by sign extending it. This is
41 // necessary to be able to use the 31st bit of the property details.
42 int value = value_ << 1;
43 return Smi::FromInt(value >> 1);
47 PropertyDetails PropertyDetails::AsDeleted() const {
48 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
49 return PropertyDetails(smi);
53 #define TYPE_CHECKER(type, instancetype) \
54 bool Object::Is##type() { \
55 return Object::IsHeapObject() && \
56 HeapObject::cast(this)->map()->instance_type() == instancetype; \
60 #define CAST_ACCESSOR(type) \
61 type* type::cast(Object* object) { \
62 SLOW_ASSERT(object->Is##type()); \
63 return reinterpret_cast<type*>(object); \
67 #define INT_ACCESSORS(holder, name, offset) \
68 int holder::name() { return READ_INT_FIELD(this, offset); } \
69 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
72 #define ACCESSORS(holder, name, type, offset) \
73 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
74 void holder::set_##name(type* value, WriteBarrierMode mode) { \
75 WRITE_FIELD(this, offset, value); \
76 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
80 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
81 #define ACCESSORS_TO_SMI(holder, name, offset) \
82 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
83 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
84 WRITE_FIELD(this, offset, value); \
88 // Getter that returns a Smi as an int and writes an int as a Smi.
89 #define SMI_ACCESSORS(holder, name, offset) \
90 int holder::name() { \
91 Object* value = READ_FIELD(this, offset); \
92 return Smi::cast(value)->value(); \
94 void holder::set_##name(int value) { \
95 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
98 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
99 int holder::synchronized_##name() { \
100 Object* value = ACQUIRE_READ_FIELD(this, offset); \
101 return Smi::cast(value)->value(); \
103 void holder::synchronized_set_##name(int value) { \
104 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
108 int holder::nobarrier_##name() { \
109 Object* value = NOBARRIER_READ_FIELD(this, offset); \
110 return Smi::cast(value)->value(); \
112 void holder::nobarrier_set_##name(int value) { \
113 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define BOOL_GETTER(holder, field, name, offset) \
117 bool holder::name() { \
118 return BooleanBit::get(field(), offset); \
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123 bool holder::name() { \
124 return BooleanBit::get(field(), offset); \
126 void holder::set_##name(bool value) { \
127 set_##field(BooleanBit::set(field(), offset, value)); \
131 bool Object::IsFixedArrayBase() {
132 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
133 IsFixedTypedArrayBase() || IsExternalArray();
137 // External objects are not extensible, so the map check is enough.
138 bool Object::IsExternal() {
139 return Object::IsHeapObject() &&
140 HeapObject::cast(this)->map() ==
141 HeapObject::cast(this)->GetHeap()->external_map();
145 bool Object::IsAccessorInfo() {
146 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
150 bool Object::IsSmi() {
151 return HAS_SMI_TAG(this);
155 bool Object::IsHeapObject() {
156 return Internals::HasHeapObjectTag(this);
160 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
161 TYPE_CHECKER(Float32x4, FLOAT32x4_TYPE)
162 TYPE_CHECKER(Float64x2, FLOAT64x2_TYPE)
163 TYPE_CHECKER(Int32x4, INT32x4_TYPE)
164 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
167 bool Object::IsString() {
168 return Object::IsHeapObject()
169 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
173 bool Object::IsName() {
174 return IsString() || IsSymbol();
178 bool Object::IsUniqueName() {
179 return IsInternalizedString() || IsSymbol();
183 bool Object::IsSpecObject() {
184 return Object::IsHeapObject()
185 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
189 bool Object::IsSpecFunction() {
190 if (!Object::IsHeapObject()) return false;
191 InstanceType type = HeapObject::cast(this)->map()->instance_type();
192 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
196 bool Object::IsTemplateInfo() {
197 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
201 bool Object::IsInternalizedString() {
202 if (!this->IsHeapObject()) return false;
203 uint32_t type = HeapObject::cast(this)->map()->instance_type();
204 STATIC_ASSERT(kNotInternalizedTag != 0);
205 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
206 (kStringTag | kInternalizedTag);
210 bool Object::IsConsString() {
211 if (!IsString()) return false;
212 return StringShape(String::cast(this)).IsCons();
216 bool Object::IsSlicedString() {
217 if (!IsString()) return false;
218 return StringShape(String::cast(this)).IsSliced();
222 bool Object::IsSeqString() {
223 if (!IsString()) return false;
224 return StringShape(String::cast(this)).IsSequential();
228 bool Object::IsSeqOneByteString() {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsSequential() &&
231 String::cast(this)->IsOneByteRepresentation();
235 bool Object::IsSeqTwoByteString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsTwoByteRepresentation();
242 bool Object::IsExternalString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsExternal();
248 bool Object::IsExternalAsciiString() {
249 if (!IsString()) return false;
250 return StringShape(String::cast(this)).IsExternal() &&
251 String::cast(this)->IsOneByteRepresentation();
255 bool Object::IsExternalTwoByteString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsTwoByteRepresentation();
262 bool Object::HasValidElements() {
263 // Dictionary is covered under FixedArray.
264 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
265 IsFixedTypedArrayBase();
269 Handle<Object> Object::NewStorageFor(Isolate* isolate,
270 Handle<Object> object,
271 Representation representation) {
272 if (representation.IsSmi() && object->IsUninitialized()) {
273 return handle(Smi::FromInt(0), isolate);
275 if (!representation.IsDouble()) return object;
276 if (object->IsUninitialized()) {
277 return isolate->factory()->NewHeapNumber(0);
279 return isolate->factory()->NewHeapNumber(object->Number());
283 StringShape::StringShape(String* str)
284 : type_(str->map()->instance_type()) {
286 ASSERT((type_ & kIsNotStringMask) == kStringTag);
290 StringShape::StringShape(Map* map)
291 : type_(map->instance_type()) {
293 ASSERT((type_ & kIsNotStringMask) == kStringTag);
297 StringShape::StringShape(InstanceType t)
298 : type_(static_cast<uint32_t>(t)) {
300 ASSERT((type_ & kIsNotStringMask) == kStringTag);
304 bool StringShape::IsInternalized() {
306 STATIC_ASSERT(kNotInternalizedTag != 0);
307 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
308 (kStringTag | kInternalizedTag);
312 bool String::IsOneByteRepresentation() {
313 uint32_t type = map()->instance_type();
314 return (type & kStringEncodingMask) == kOneByteStringTag;
318 bool String::IsTwoByteRepresentation() {
319 uint32_t type = map()->instance_type();
320 return (type & kStringEncodingMask) == kTwoByteStringTag;
324 bool String::IsOneByteRepresentationUnderneath() {
325 uint32_t type = map()->instance_type();
326 STATIC_ASSERT(kIsIndirectStringTag != 0);
327 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
329 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
330 case kOneByteStringTag:
332 case kTwoByteStringTag:
334 default: // Cons or sliced string. Need to go deeper.
335 return GetUnderlying()->IsOneByteRepresentation();
340 bool String::IsTwoByteRepresentationUnderneath() {
341 uint32_t type = map()->instance_type();
342 STATIC_ASSERT(kIsIndirectStringTag != 0);
343 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
345 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
346 case kOneByteStringTag:
348 case kTwoByteStringTag:
350 default: // Cons or sliced string. Need to go deeper.
351 return GetUnderlying()->IsTwoByteRepresentation();
356 bool String::HasOnlyOneByteChars() {
357 uint32_t type = map()->instance_type();
358 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
359 IsOneByteRepresentation();
363 bool StringShape::IsCons() {
364 return (type_ & kStringRepresentationMask) == kConsStringTag;
368 bool StringShape::IsSliced() {
369 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
373 bool StringShape::IsIndirect() {
374 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
378 bool StringShape::IsExternal() {
379 return (type_ & kStringRepresentationMask) == kExternalStringTag;
383 bool StringShape::IsSequential() {
384 return (type_ & kStringRepresentationMask) == kSeqStringTag;
388 StringRepresentationTag StringShape::representation_tag() {
389 uint32_t tag = (type_ & kStringRepresentationMask);
390 return static_cast<StringRepresentationTag>(tag);
394 uint32_t StringShape::encoding_tag() {
395 return type_ & kStringEncodingMask;
399 uint32_t StringShape::full_representation_tag() {
400 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
404 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
405 Internals::kFullStringRepresentationMask);
407 STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
408 Internals::kStringEncodingMask);
411 bool StringShape::IsSequentialAscii() {
412 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
416 bool StringShape::IsSequentialTwoByte() {
417 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
421 bool StringShape::IsExternalAscii() {
422 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
426 STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
427 Internals::kExternalAsciiRepresentationTag);
429 STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
432 bool StringShape::IsExternalTwoByte() {
433 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
437 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
438 Internals::kExternalTwoByteRepresentationTag);
440 STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
442 uc32 FlatStringReader::Get(int index) {
443 ASSERT(0 <= index && index <= length_);
445 return static_cast<const byte*>(start_)[index];
447 return static_cast<const uc16*>(start_)[index];
452 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
453 return key->AsHandle(isolate);
457 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
458 return key->AsHandle(isolate);
462 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
464 return key->AsHandle(isolate);
468 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
470 return key->AsHandle(isolate);
473 template <typename Char>
474 class SequentialStringKey : public HashTableKey {
476 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
477 : string_(string), hash_field_(0), seed_(seed) { }
479 virtual uint32_t Hash() V8_OVERRIDE {
480 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
484 uint32_t result = hash_field_ >> String::kHashShift;
485 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
490 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
491 return String::cast(other)->Hash();
494 Vector<const Char> string_;
495 uint32_t hash_field_;
500 class OneByteStringKey : public SequentialStringKey<uint8_t> {
502 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
503 : SequentialStringKey<uint8_t>(str, seed) { }
505 virtual bool IsMatch(Object* string) V8_OVERRIDE {
506 return String::cast(string)->IsOneByteEqualTo(string_);
509 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
514 class SubStringKey : public HashTableKey {
516 SubStringKey(Handle<String> string, int from, int length)
517 : string_(string), from_(from), length_(length) {
518 if (string_->IsSlicedString()) {
519 string_ = Handle<String>(Unslice(*string_, &from_));
521 ASSERT(string_->IsSeqString() || string->IsExternalString());
524 virtual uint32_t Hash() V8_OVERRIDE {
525 ASSERT(length_ >= 0);
526 ASSERT(from_ + length_ <= string_->length());
527 const Char* chars = GetChars() + from_;
528 hash_field_ = StringHasher::HashSequentialString(
529 chars, length_, string_->GetHeap()->HashSeed());
530 uint32_t result = hash_field_ >> String::kHashShift;
531 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
535 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
536 return String::cast(other)->Hash();
539 virtual bool IsMatch(Object* string) V8_OVERRIDE;
540 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
543 const Char* GetChars();
544 String* Unslice(String* string, int* offset) {
545 while (string->IsSlicedString()) {
546 SlicedString* sliced = SlicedString::cast(string);
547 *offset += sliced->offset();
548 string = sliced->parent();
553 Handle<String> string_;
556 uint32_t hash_field_;
560 class TwoByteStringKey : public SequentialStringKey<uc16> {
562 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
563 : SequentialStringKey<uc16>(str, seed) { }
565 virtual bool IsMatch(Object* string) V8_OVERRIDE {
566 return String::cast(string)->IsTwoByteEqualTo(string_);
569 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
573 // Utf8StringKey carries a vector of chars as key.
574 class Utf8StringKey : public HashTableKey {
576 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
577 : string_(string), hash_field_(0), seed_(seed) { }
579 virtual bool IsMatch(Object* string) V8_OVERRIDE {
580 return String::cast(string)->IsUtf8EqualTo(string_);
583 virtual uint32_t Hash() V8_OVERRIDE {
584 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
585 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
586 uint32_t result = hash_field_ >> String::kHashShift;
587 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
591 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
592 return String::cast(other)->Hash();
595 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
596 if (hash_field_ == 0) Hash();
597 return isolate->factory()->NewInternalizedStringFromUtf8(
598 string_, chars_, hash_field_);
601 Vector<const char> string_;
602 uint32_t hash_field_;
603 int chars_; // Caches the number of characters when computing the hash code.
608 bool Object::IsNumber() {
609 return IsSmi() || IsHeapNumber();
613 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
614 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
617 bool Object::IsFiller() {
618 if (!Object::IsHeapObject()) return false;
619 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
620 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
624 bool Object::IsExternalArray() {
625 if (!Object::IsHeapObject())
627 InstanceType instance_type =
628 HeapObject::cast(this)->map()->instance_type();
629 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
630 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
634 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
635 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
636 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
638 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
639 #undef TYPED_ARRAY_TYPE_CHECKER
642 bool Object::IsFixedTypedArrayBase() {
643 if (!Object::IsHeapObject()) return false;
645 InstanceType instance_type =
646 HeapObject::cast(this)->map()->instance_type();
647 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
648 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
652 bool Object::IsJSReceiver() {
653 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
654 return IsHeapObject() &&
655 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
659 bool Object::IsJSObject() {
660 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
661 return IsHeapObject() &&
662 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
666 bool Object::IsJSProxy() {
667 if (!Object::IsHeapObject()) return false;
668 InstanceType type = HeapObject::cast(this)->map()->instance_type();
669 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
673 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
674 TYPE_CHECKER(JSSet, JS_SET_TYPE)
675 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
676 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
677 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
678 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
679 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
680 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
681 TYPE_CHECKER(Map, MAP_TYPE)
682 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
683 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
684 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
687 bool Object::IsJSWeakCollection() {
688 return IsJSWeakMap() || IsJSWeakSet();
692 bool Object::IsDescriptorArray() {
693 return IsFixedArray();
697 bool Object::IsTransitionArray() {
698 return IsFixedArray();
702 bool Object::IsDeoptimizationInputData() {
703 // Must be a fixed array.
704 if (!IsFixedArray()) return false;
706 // There's no sure way to detect the difference between a fixed array and
707 // a deoptimization data array. Since this is used for asserts we can
708 // check that the length is zero or else the fixed size plus a multiple of
710 int length = FixedArray::cast(this)->length();
711 if (length == 0) return true;
713 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
714 return length >= 0 &&
715 length % DeoptimizationInputData::kDeoptEntrySize == 0;
719 bool Object::IsDeoptimizationOutputData() {
720 if (!IsFixedArray()) return false;
721 // There's actually no way to see the difference between a fixed array and
722 // a deoptimization data array. Since this is used for asserts we can check
723 // that the length is plausible though.
724 if (FixedArray::cast(this)->length() % 2 != 0) return false;
729 bool Object::IsDependentCode() {
730 if (!IsFixedArray()) return false;
731 // There's actually no way to see the difference between a fixed array and
732 // a dependent codes array.
737 bool Object::IsContext() {
738 if (!Object::IsHeapObject()) return false;
739 Map* map = HeapObject::cast(this)->map();
740 Heap* heap = map->GetHeap();
741 return (map == heap->function_context_map() ||
742 map == heap->catch_context_map() ||
743 map == heap->with_context_map() ||
744 map == heap->native_context_map() ||
745 map == heap->block_context_map() ||
746 map == heap->module_context_map() ||
747 map == heap->global_context_map());
751 bool Object::IsNativeContext() {
752 return Object::IsHeapObject() &&
753 HeapObject::cast(this)->map() ==
754 HeapObject::cast(this)->GetHeap()->native_context_map();
758 bool Object::IsScopeInfo() {
759 return Object::IsHeapObject() &&
760 HeapObject::cast(this)->map() ==
761 HeapObject::cast(this)->GetHeap()->scope_info_map();
765 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
768 template <> inline bool Is<JSFunction>(Object* obj) {
769 return obj->IsJSFunction();
773 TYPE_CHECKER(Code, CODE_TYPE)
774 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
775 TYPE_CHECKER(Cell, CELL_TYPE)
776 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
777 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
778 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
779 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
780 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
781 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
782 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
785 bool Object::IsStringWrapper() {
786 return IsJSValue() && JSValue::cast(this)->value()->IsString();
790 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
793 bool Object::IsBoolean() {
794 return IsOddball() &&
795 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
799 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
800 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
801 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
802 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
805 bool Object::IsJSArrayBufferView() {
806 return IsJSDataView() || IsJSTypedArray();
810 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
813 template <> inline bool Is<JSArray>(Object* obj) {
814 return obj->IsJSArray();
818 bool Object::IsHashTable() {
819 return Object::IsHeapObject() &&
820 HeapObject::cast(this)->map() ==
821 HeapObject::cast(this)->GetHeap()->hash_table_map();
825 bool Object::IsDictionary() {
826 return IsHashTable() &&
827 this != HeapObject::cast(this)->GetHeap()->string_table();
831 bool Object::IsStringTable() {
832 return IsHashTable();
836 bool Object::IsJSFunctionResultCache() {
837 if (!IsFixedArray()) return false;
838 FixedArray* self = FixedArray::cast(this);
839 int length = self->length();
840 if (length < JSFunctionResultCache::kEntriesIndex) return false;
841 if ((length - JSFunctionResultCache::kEntriesIndex)
842 % JSFunctionResultCache::kEntrySize != 0) {
846 if (FLAG_verify_heap) {
847 reinterpret_cast<JSFunctionResultCache*>(this)->
848 JSFunctionResultCacheVerify();
855 bool Object::IsNormalizedMapCache() {
856 return NormalizedMapCache::IsNormalizedMapCache(this);
860 int NormalizedMapCache::GetIndex(Handle<Map> map) {
861 return map->Hash() % NormalizedMapCache::kEntries;
865 bool NormalizedMapCache::IsNormalizedMapCache(Object* obj) {
866 if (!obj->IsFixedArray()) return false;
867 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
871 if (FLAG_verify_heap) {
872 reinterpret_cast<NormalizedMapCache*>(obj)->NormalizedMapCacheVerify();
879 bool Object::IsCompilationCacheTable() {
880 return IsHashTable();
884 bool Object::IsCodeCacheHashTable() {
885 return IsHashTable();
889 bool Object::IsPolymorphicCodeCacheHashTable() {
890 return IsHashTable();
894 bool Object::IsMapCache() {
895 return IsHashTable();
899 bool Object::IsObjectHashTable() {
900 return IsHashTable();
904 bool Object::IsOrderedHashTable() {
905 return IsHeapObject() &&
906 HeapObject::cast(this)->map() ==
907 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
911 bool Object::IsPrimitive() {
912 return IsOddball() || IsNumber() || IsString();
916 bool Object::IsJSGlobalProxy() {
917 bool result = IsHeapObject() &&
918 (HeapObject::cast(this)->map()->instance_type() ==
919 JS_GLOBAL_PROXY_TYPE);
921 HeapObject::cast(this)->map()->is_access_check_needed());
926 bool Object::IsGlobalObject() {
927 if (!IsHeapObject()) return false;
929 InstanceType type = HeapObject::cast(this)->map()->instance_type();
930 return type == JS_GLOBAL_OBJECT_TYPE ||
931 type == JS_BUILTINS_OBJECT_TYPE;
935 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
936 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
939 bool Object::IsUndetectableObject() {
940 return IsHeapObject()
941 && HeapObject::cast(this)->map()->is_undetectable();
945 bool Object::IsAccessCheckNeeded() {
946 if (!IsHeapObject()) return false;
947 if (IsJSGlobalProxy()) {
948 JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
949 GlobalObject* global =
950 proxy->GetIsolate()->context()->global_object();
951 return proxy->IsDetachedFrom(global);
953 return HeapObject::cast(this)->map()->is_access_check_needed();
957 bool Object::IsStruct() {
958 if (!IsHeapObject()) return false;
959 switch (HeapObject::cast(this)->map()->instance_type()) {
960 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
961 STRUCT_LIST(MAKE_STRUCT_CASE)
962 #undef MAKE_STRUCT_CASE
963 default: return false;
968 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
969 bool Object::Is##Name() { \
970 return Object::IsHeapObject() \
971 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
973 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
974 #undef MAKE_STRUCT_PREDICATE
977 bool Object::IsUndefined() {
978 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
982 bool Object::IsNull() {
983 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
987 bool Object::IsTheHole() {
988 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
992 bool Object::IsException() {
993 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
997 bool Object::IsUninitialized() {
998 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1002 bool Object::IsTrue() {
1003 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1007 bool Object::IsFalse() {
1008 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1012 bool Object::IsArgumentsMarker() {
1013 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1017 double Object::Number() {
1020 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1021 : reinterpret_cast<HeapNumber*>(this)->value();
1025 bool Object::IsNaN() {
1026 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1030 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1031 if (object->IsSmi()) return Handle<Smi>::cast(object);
1032 if (object->IsHeapNumber()) {
1033 double value = Handle<HeapNumber>::cast(object)->value();
1034 int int_value = FastD2I(value);
1035 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1036 return handle(Smi::FromInt(int_value), isolate);
1039 return Handle<Smi>();
1043 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1044 Handle<Object> object) {
1046 isolate, object, handle(isolate->context()->native_context(), isolate));
1050 bool Object::HasSpecificClassOf(String* name) {
1051 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1055 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1056 Handle<Name> name) {
1057 PropertyAttributes attributes;
1058 return GetPropertyWithReceiver(object, object, name, &attributes);
1062 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1063 Handle<Object> object,
1065 // GetElement can trigger a getter which can cause allocation.
1066 // This was not always the case. This ASSERT is here to catch
1067 // leftover incorrect uses.
1068 ASSERT(AllowHeapAllocation::IsAllowed());
1069 return Object::GetElementWithReceiver(isolate, object, object, index);
1073 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1074 Handle<Name> name) {
1076 Isolate* isolate = name->GetIsolate();
1077 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1078 return GetProperty(object, name);
1082 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1083 Handle<Object> object,
1085 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1086 ASSERT(!str.is_null());
1088 uint32_t index; // Assert that the name is not an array index.
1089 ASSERT(!str->AsArrayIndex(&index));
1091 return GetProperty(object, str);
1095 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1096 Handle<Object> receiver,
1098 return GetPropertyWithHandler(
1099 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1103 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1104 Handle<JSReceiver> receiver,
1106 Handle<Object> value,
1107 StrictMode strict_mode) {
1108 Isolate* isolate = proxy->GetIsolate();
1109 Handle<String> name = isolate->factory()->Uint32ToString(index);
1110 return SetPropertyWithHandler(
1111 proxy, receiver, name, value, NONE, strict_mode);
1115 bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) {
1116 Isolate* isolate = proxy->GetIsolate();
1117 Handle<String> name = isolate->factory()->Uint32ToString(index);
1118 return HasPropertyWithHandler(proxy, name);
1122 #define FIELD_ADDR(p, offset) \
1123 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1125 #define READ_FIELD(p, offset) \
1126 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
1128 #define ACQUIRE_READ_FIELD(p, offset) \
1129 reinterpret_cast<Object*>( \
1130 Acquire_Load(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset))))
1132 #define NOBARRIER_READ_FIELD(p, offset) \
1133 reinterpret_cast<Object*>( \
1134 NoBarrier_Load(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset))))
1136 #define WRITE_FIELD(p, offset, value) \
1137 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1139 #define RELEASE_WRITE_FIELD(p, offset, value) \
1140 Release_Store(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)), \
1141 reinterpret_cast<AtomicWord>(value));
1143 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1144 NoBarrier_Store(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)), \
1145 reinterpret_cast<AtomicWord>(value));
1147 #define WRITE_BARRIER(heap, object, offset, value) \
1148 heap->incremental_marking()->RecordWrite( \
1149 object, HeapObject::RawField(object, offset), value); \
1150 if (heap->InNewSpace(value)) { \
1151 heap->RecordWrite(object->address(), offset); \
1154 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1155 if (mode == UPDATE_WRITE_BARRIER) { \
1156 heap->incremental_marking()->RecordWrite( \
1157 object, HeapObject::RawField(object, offset), value); \
1158 if (heap->InNewSpace(value)) { \
1159 heap->RecordWrite(object->address(), offset); \
1163 #ifndef V8_TARGET_ARCH_MIPS
1164 #define READ_DOUBLE_FIELD(p, offset) \
1165 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
1166 #else // V8_TARGET_ARCH_MIPS
1167 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1168 // non-64-bit aligned HeapNumber::value.
1169 static inline double read_double_field(void* p, int offset) {
1174 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
1175 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
1178 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1179 #endif // V8_TARGET_ARCH_MIPS
1181 #ifndef V8_TARGET_ARCH_MIPS
1182 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1183 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1184 #else // V8_TARGET_ARCH_MIPS
1185 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1186 // non-64-bit aligned HeapNumber::value.
1187 static inline void write_double_field(void* p, int offset,
1194 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1195 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1197 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1198 write_double_field(p, offset, value)
1199 #endif // V8_TARGET_ARCH_MIPS
1201 #define READ_FLOAT32x4_FIELD(p, offset) \
1202 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)))
1204 #define WRITE_FLOAT32x4_FIELD(p, offset, value) \
1205 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1207 #define READ_FLOAT64x2_FIELD(p, offset) \
1208 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)))
1210 #define WRITE_FLOAT64x2_FIELD(p, offset, value) \
1211 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)) = value)
1213 #define READ_INT32x4_FIELD(p, offset) \
1214 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)))
1216 #define WRITE_INT32x4_FIELD(p, offset, value) \
1217 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1219 #define READ_FLOAT_FIELD(p, offset) \
1220 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)))
1222 #define WRITE_FLOAT_FIELD(p, offset, value) \
1223 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1225 #define READ_INT_FIELD(p, offset) \
1226 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1228 #define WRITE_INT_FIELD(p, offset, value) \
1229 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1231 #define READ_INTPTR_FIELD(p, offset) \
1232 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1234 #define WRITE_INTPTR_FIELD(p, offset, value) \
1235 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1237 #define READ_UINT32_FIELD(p, offset) \
1238 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1240 #define WRITE_UINT32_FIELD(p, offset, value) \
1241 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1243 #define READ_INT32_FIELD(p, offset) \
1244 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1246 #define WRITE_INT32_FIELD(p, offset, value) \
1247 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1249 #define READ_INT64_FIELD(p, offset) \
1250 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1252 #define WRITE_INT64_FIELD(p, offset, value) \
1253 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1255 #define READ_SHORT_FIELD(p, offset) \
1256 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1258 #define WRITE_SHORT_FIELD(p, offset, value) \
1259 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1261 #define READ_BYTE_FIELD(p, offset) \
1262 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1264 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1265 static_cast<byte>(NoBarrier_Load( \
1266 reinterpret_cast<Atomic8*>(FIELD_ADDR(p, offset))) )
1268 #define WRITE_BYTE_FIELD(p, offset, value) \
1269 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1271 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1272 NoBarrier_Store(reinterpret_cast<Atomic8*>(FIELD_ADDR(p, offset)), \
1273 static_cast<Atomic8>(value));
1275 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1276 return &READ_FIELD(obj, byte_offset);
1281 return Internals::SmiValue(this);
1285 Smi* Smi::FromInt(int value) {
1286 ASSERT(Smi::IsValid(value));
1287 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1291 Smi* Smi::FromIntptr(intptr_t value) {
1292 ASSERT(Smi::IsValid(value));
1293 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1294 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1298 bool Smi::IsValid(intptr_t value) {
1299 bool result = Internals::IsValidSmi(value);
1300 ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1305 MapWord MapWord::FromMap(Map* map) {
1306 return MapWord(reinterpret_cast<uintptr_t>(map));
1310 Map* MapWord::ToMap() {
1311 return reinterpret_cast<Map*>(value_);
1315 bool MapWord::IsForwardingAddress() {
1316 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1320 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1321 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1322 return MapWord(reinterpret_cast<uintptr_t>(raw));
1326 HeapObject* MapWord::ToForwardingAddress() {
1327 ASSERT(IsForwardingAddress());
1328 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1333 void HeapObject::VerifyObjectField(int offset) {
1334 VerifyPointer(READ_FIELD(this, offset));
1337 void HeapObject::VerifySmiField(int offset) {
1338 CHECK(READ_FIELD(this, offset)->IsSmi());
1343 Heap* HeapObject::GetHeap() {
1345 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1346 SLOW_ASSERT(heap != NULL);
1351 Isolate* HeapObject::GetIsolate() {
1352 return GetHeap()->isolate();
1356 Map* HeapObject::map() {
1357 return map_word().ToMap();
1361 void HeapObject::set_map(Map* value) {
1362 set_map_word(MapWord::FromMap(value));
1363 if (value != NULL) {
1364 // TODO(1600) We are passing NULL as a slot because maps can never be on
1365 // evacuation candidate.
1366 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1371 Map* HeapObject::synchronized_map() {
1372 return synchronized_map_word().ToMap();
1376 void HeapObject::synchronized_set_map(Map* value) {
1377 synchronized_set_map_word(MapWord::FromMap(value));
1378 if (value != NULL) {
1379 // TODO(1600) We are passing NULL as a slot because maps can never be on
1380 // evacuation candidate.
1381 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1386 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1387 synchronized_set_map_word(MapWord::FromMap(value));
1391 // Unsafe accessor omitting write barrier.
1392 void HeapObject::set_map_no_write_barrier(Map* value) {
1393 set_map_word(MapWord::FromMap(value));
1397 MapWord HeapObject::map_word() {
1399 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1403 void HeapObject::set_map_word(MapWord map_word) {
1404 NOBARRIER_WRITE_FIELD(
1405 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1409 MapWord HeapObject::synchronized_map_word() {
1411 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1415 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1416 RELEASE_WRITE_FIELD(
1417 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1421 HeapObject* HeapObject::FromAddress(Address address) {
1422 ASSERT_TAG_ALIGNED(address);
1423 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1427 Address HeapObject::address() {
1428 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1432 int HeapObject::Size() {
1433 return SizeFromMap(map());
1437 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1438 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1439 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1443 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1444 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1448 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1449 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1453 double HeapNumber::value() {
1454 return READ_DOUBLE_FIELD(this, kValueOffset);
1458 void HeapNumber::set_value(double value) {
1459 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1463 int HeapNumber::get_exponent() {
1464 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1465 kExponentShift) - kExponentBias;
1469 int HeapNumber::get_sign() {
1470 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1474 const char* Float32x4::Name() {
1479 int Float32x4::kRuntimeAllocatorId() {
1480 return Runtime::kAllocateFloat32x4;
1484 int Float32x4::kMapRootIndex() {
1485 return Heap::kFloat32x4MapRootIndex;
1489 float32x4_value_t Float32x4::value() {
1490 return READ_FLOAT32x4_FIELD(this, kValueOffset);
1494 void Float32x4::set_value(float32x4_value_t value) {
1495 WRITE_FLOAT32x4_FIELD(this, kValueOffset, value);
1499 float Float32x4::getAt(int index) {
1500 ASSERT(index >= 0 && index < kLanes);
1501 return READ_FLOAT_FIELD(this, kValueOffset + index * kFloatSize);
1505 const char* Float64x2::Name() {
1510 int Float64x2::kRuntimeAllocatorId() {
1511 return Runtime::kAllocateFloat64x2;
1515 int Float64x2::kMapRootIndex() {
1516 return Heap::kFloat64x2MapRootIndex;
1520 float64x2_value_t Float64x2::value() {
1521 return READ_FLOAT64x2_FIELD(this, kValueOffset);
1525 void Float64x2::set_value(float64x2_value_t value) {
1526 WRITE_FLOAT64x2_FIELD(this, kValueOffset, value);
1530 double Float64x2::getAt(int index) {
1531 ASSERT(index >= 0 && index < kLanes);
1532 return READ_DOUBLE_FIELD(this, kValueOffset + index * kDoubleSize);
1536 const char* Int32x4::Name() {
1541 int Int32x4::kRuntimeAllocatorId() {
1542 return Runtime::kAllocateInt32x4;
1546 int Int32x4::kMapRootIndex() {
1547 return Heap::kInt32x4MapRootIndex;
1551 int32x4_value_t Int32x4::value() {
1552 return READ_INT32x4_FIELD(this, kValueOffset);
1556 void Int32x4::set_value(int32x4_value_t value) {
1557 WRITE_INT32x4_FIELD(this, kValueOffset, value);
1561 int32_t Int32x4::getAt(int index) {
1562 ASSERT(index >= 0 && index < kLanes);
1563 return READ_INT32_FIELD(this, kValueOffset + index * kInt32Size);
1567 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1570 Object** FixedArray::GetFirstElementAddress() {
1571 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1575 bool FixedArray::ContainsOnlySmisOrHoles() {
1576 Object* the_hole = GetHeap()->the_hole_value();
1577 Object** current = GetFirstElementAddress();
1578 for (int i = 0; i < length(); ++i) {
1579 Object* candidate = *current++;
1580 if (!candidate->IsSmi() && candidate != the_hole) return false;
1586 FixedArrayBase* JSObject::elements() {
1587 Object* array = READ_FIELD(this, kElementsOffset);
1588 return static_cast<FixedArrayBase*>(array);
1592 void JSObject::ValidateElements(Handle<JSObject> object) {
1593 #ifdef ENABLE_SLOW_ASSERTS
1594 if (FLAG_enable_slow_asserts) {
1595 ElementsAccessor* accessor = object->GetElementsAccessor();
1596 accessor->Validate(object);
1602 void AllocationSite::Initialize() {
1603 set_transition_info(Smi::FromInt(0));
1604 SetElementsKind(GetInitialFastElementsKind());
1605 set_nested_site(Smi::FromInt(0));
1606 set_pretenure_data(Smi::FromInt(0));
1607 set_pretenure_create_count(Smi::FromInt(0));
1608 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1609 SKIP_WRITE_BARRIER);
1613 void AllocationSite::MarkZombie() {
1614 ASSERT(!IsZombie());
1616 set_pretenure_decision(kZombie);
1620 // Heuristic: We only need to create allocation site info if the boilerplate
1621 // elements kind is the initial elements kind.
1622 AllocationSiteMode AllocationSite::GetMode(
1623 ElementsKind boilerplate_elements_kind) {
1624 if (FLAG_pretenuring_call_new ||
1625 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1626 return TRACK_ALLOCATION_SITE;
1629 return DONT_TRACK_ALLOCATION_SITE;
1633 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1635 if (FLAG_pretenuring_call_new ||
1636 (IsFastSmiElementsKind(from) &&
1637 IsMoreGeneralElementsKindTransition(from, to))) {
1638 return TRACK_ALLOCATION_SITE;
1641 return DONT_TRACK_ALLOCATION_SITE;
1645 inline bool AllocationSite::CanTrack(InstanceType type) {
1646 if (FLAG_allocation_site_pretenuring) {
1647 return type == JS_ARRAY_TYPE ||
1648 type == JS_OBJECT_TYPE ||
1649 type < FIRST_NONSTRING_TYPE;
1651 return type == JS_ARRAY_TYPE;
1655 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1659 return DependentCode::kAllocationSiteTenuringChangedGroup;
1662 return DependentCode::kAllocationSiteTransitionChangedGroup;
1666 return DependentCode::kAllocationSiteTransitionChangedGroup;
1670 inline void AllocationSite::set_memento_found_count(int count) {
1671 int value = pretenure_data()->value();
1672 // Verify that we can count more mementos than we can possibly find in one
1673 // new space collection.
1674 ASSERT((GetHeap()->MaxSemiSpaceSize() /
1675 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1676 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1677 ASSERT(count < MementoFoundCountBits::kMax);
1679 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1680 SKIP_WRITE_BARRIER);
1683 inline bool AllocationSite::IncrementMementoFoundCount() {
1684 if (IsZombie()) return false;
1686 int value = memento_found_count();
1687 set_memento_found_count(value + 1);
1692 inline void AllocationSite::IncrementMementoCreateCount() {
1693 ASSERT(FLAG_allocation_site_pretenuring);
1694 int value = memento_create_count();
1695 set_memento_create_count(value + 1);
1699 inline bool AllocationSite::DigestPretenuringFeedback() {
1700 bool decision_changed = false;
1701 int create_count = memento_create_count();
1702 int found_count = memento_found_count();
1703 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1705 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1706 static_cast<double>(found_count) / create_count : 0.0;
1707 PretenureFlag current_mode = GetPretenureMode();
1709 if (minimum_mementos_created) {
1710 PretenureDecision result = ratio >= kPretenureRatio
1713 set_pretenure_decision(result);
1714 if (current_mode != GetPretenureMode()) {
1715 decision_changed = true;
1716 set_deopt_dependent_code(true);
1720 if (FLAG_trace_pretenuring_statistics) {
1722 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1723 static_cast<void*>(this), create_count, found_count, ratio,
1724 current_mode == TENURED ? "tenured" : "not tenured",
1725 GetPretenureMode() == TENURED ? "tenured" : "not tenured");
1728 // Clear feedback calculation fields until the next gc.
1729 set_memento_found_count(0);
1730 set_memento_create_count(0);
1731 return decision_changed;
1735 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1736 JSObject::ValidateElements(object);
1737 ElementsKind elements_kind = object->map()->elements_kind();
1738 if (!IsFastObjectElementsKind(elements_kind)) {
1739 if (IsFastHoleyElementsKind(elements_kind)) {
1740 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1742 TransitionElementsKind(object, FAST_ELEMENTS);
1748 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1751 EnsureElementsMode mode) {
1752 ElementsKind current_kind = object->map()->elements_kind();
1753 ElementsKind target_kind = current_kind;
1755 DisallowHeapAllocation no_allocation;
1756 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1757 bool is_holey = IsFastHoleyElementsKind(current_kind);
1758 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1759 Heap* heap = object->GetHeap();
1760 Object* the_hole = heap->the_hole_value();
1761 for (uint32_t i = 0; i < count; ++i) {
1762 Object* current = *objects++;
1763 if (current == the_hole) {
1765 target_kind = GetHoleyElementsKind(target_kind);
1766 } else if (!current->IsSmi()) {
1767 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1768 if (IsFastSmiElementsKind(target_kind)) {
1770 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1772 target_kind = FAST_DOUBLE_ELEMENTS;
1775 } else if (is_holey) {
1776 target_kind = FAST_HOLEY_ELEMENTS;
1779 target_kind = FAST_ELEMENTS;
1784 if (target_kind != current_kind) {
1785 TransitionElementsKind(object, target_kind);
1790 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1791 Handle<FixedArrayBase> elements,
1793 EnsureElementsMode mode) {
1794 Heap* heap = object->GetHeap();
1795 if (elements->map() != heap->fixed_double_array_map()) {
1796 ASSERT(elements->map() == heap->fixed_array_map() ||
1797 elements->map() == heap->fixed_cow_array_map());
1798 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1799 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1802 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1803 EnsureCanContainElements(object, objects, length, mode);
1807 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1808 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1809 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1810 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1811 Handle<FixedDoubleArray> double_array =
1812 Handle<FixedDoubleArray>::cast(elements);
1813 for (uint32_t i = 0; i < length; ++i) {
1814 if (double_array->is_the_hole(i)) {
1815 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1819 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1824 void JSObject::SetMapAndElements(Handle<JSObject> object,
1825 Handle<Map> new_map,
1826 Handle<FixedArrayBase> value) {
1827 JSObject::MigrateToMap(object, new_map);
1828 ASSERT((object->map()->has_fast_smi_or_object_elements() ||
1829 (*value == object->GetHeap()->empty_fixed_array())) ==
1830 (value->map() == object->GetHeap()->fixed_array_map() ||
1831 value->map() == object->GetHeap()->fixed_cow_array_map()));
1832 ASSERT((*value == object->GetHeap()->empty_fixed_array()) ||
1833 (object->map()->has_fast_double_elements() ==
1834 value->IsFixedDoubleArray()));
1835 object->set_elements(*value);
1839 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1840 WRITE_FIELD(this, kElementsOffset, value);
1841 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1845 void JSObject::initialize_properties() {
1846 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1847 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1851 void JSObject::initialize_elements() {
1852 FixedArrayBase* elements = map()->GetInitialElements();
1853 WRITE_FIELD(this, kElementsOffset, elements);
1857 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
1858 DisallowHeapAllocation no_gc;
1859 if (!map->HasTransitionArray()) return Handle<String>::null();
1860 TransitionArray* transitions = map->transitions();
1861 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1862 int transition = TransitionArray::kSimpleTransitionIndex;
1863 PropertyDetails details = transitions->GetTargetDetails(transition);
1864 Name* name = transitions->GetKey(transition);
1865 if (details.type() != FIELD) return Handle<String>::null();
1866 if (details.attributes() != NONE) return Handle<String>::null();
1867 if (!name->IsString()) return Handle<String>::null();
1868 return Handle<String>(String::cast(name));
1872 Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
1873 ASSERT(!ExpectedTransitionKey(map).is_null());
1874 return Handle<Map>(map->transitions()->GetTarget(
1875 TransitionArray::kSimpleTransitionIndex));
1879 Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1880 DisallowHeapAllocation no_allocation;
1881 if (!map->HasTransitionArray()) return Handle<Map>::null();
1882 TransitionArray* transitions = map->transitions();
1883 int transition = transitions->Search(*key);
1884 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1885 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1886 if (target_details.type() != FIELD) return Handle<Map>::null();
1887 if (target_details.attributes() != NONE) return Handle<Map>::null();
1888 return Handle<Map>(transitions->GetTarget(transition));
1892 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1893 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1896 byte Oddball::kind() {
1897 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1901 void Oddball::set_kind(byte value) {
1902 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1906 Object* Cell::value() {
1907 return READ_FIELD(this, kValueOffset);
1911 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1912 // The write barrier is not used for global property cells.
1913 ASSERT(!val->IsPropertyCell() && !val->IsCell());
1914 WRITE_FIELD(this, kValueOffset, val);
1917 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1919 Object* PropertyCell::type_raw() {
1920 return READ_FIELD(this, kTypeOffset);
1924 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1925 WRITE_FIELD(this, kTypeOffset, val);
1929 int JSObject::GetHeaderSize() {
1930 InstanceType type = map()->instance_type();
1931 // Check for the most common kind of JavaScript object before
1932 // falling into the generic switch. This speeds up the internal
1933 // field operations considerably on average.
1934 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1936 case JS_GENERATOR_OBJECT_TYPE:
1937 return JSGeneratorObject::kSize;
1938 case JS_MODULE_TYPE:
1939 return JSModule::kSize;
1940 case JS_GLOBAL_PROXY_TYPE:
1941 return JSGlobalProxy::kSize;
1942 case JS_GLOBAL_OBJECT_TYPE:
1943 return JSGlobalObject::kSize;
1944 case JS_BUILTINS_OBJECT_TYPE:
1945 return JSBuiltinsObject::kSize;
1946 case JS_FUNCTION_TYPE:
1947 return JSFunction::kSize;
1949 return JSValue::kSize;
1951 return JSDate::kSize;
1953 return JSArray::kSize;
1954 case JS_ARRAY_BUFFER_TYPE:
1955 return JSArrayBuffer::kSize;
1956 case JS_TYPED_ARRAY_TYPE:
1957 return JSTypedArray::kSize;
1958 case JS_DATA_VIEW_TYPE:
1959 return JSDataView::kSize;
1961 return JSSet::kSize;
1963 return JSMap::kSize;
1964 case JS_SET_ITERATOR_TYPE:
1965 return JSSetIterator::kSize;
1966 case JS_MAP_ITERATOR_TYPE:
1967 return JSMapIterator::kSize;
1968 case JS_WEAK_MAP_TYPE:
1969 return JSWeakMap::kSize;
1970 case JS_WEAK_SET_TYPE:
1971 return JSWeakSet::kSize;
1972 case JS_REGEXP_TYPE:
1973 return JSRegExp::kSize;
1974 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1975 return JSObject::kHeaderSize;
1976 case JS_MESSAGE_OBJECT_TYPE:
1977 return JSMessageObject::kSize;
1979 // TODO(jkummerow): Re-enable this. Blink currently hits this
1980 // from its CustomElementConstructorBuilder.
1987 int JSObject::GetInternalFieldCount() {
1988 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1989 // Make sure to adjust for the number of in-object properties. These
1990 // properties do contribute to the size, but are not internal fields.
1991 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1992 map()->inobject_properties();
1996 int JSObject::GetInternalFieldOffset(int index) {
1997 ASSERT(index < GetInternalFieldCount() && index >= 0);
1998 return GetHeaderSize() + (kPointerSize * index);
2002 Object* JSObject::GetInternalField(int index) {
2003 ASSERT(index < GetInternalFieldCount() && index >= 0);
2004 // Internal objects do follow immediately after the header, whereas in-object
2005 // properties are at the end of the object. Therefore there is no need
2006 // to adjust the index here.
2007 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2011 void JSObject::SetInternalField(int index, Object* value) {
2012 ASSERT(index < GetInternalFieldCount() && index >= 0);
2013 // Internal objects do follow immediately after the header, whereas in-object
2014 // properties are at the end of the object. Therefore there is no need
2015 // to adjust the index here.
2016 int offset = GetHeaderSize() + (kPointerSize * index);
2017 WRITE_FIELD(this, offset, value);
2018 WRITE_BARRIER(GetHeap(), this, offset, value);
2022 void JSObject::SetInternalField(int index, Smi* value) {
2023 ASSERT(index < GetInternalFieldCount() && index >= 0);
2024 // Internal objects do follow immediately after the header, whereas in-object
2025 // properties are at the end of the object. Therefore there is no need
2026 // to adjust the index here.
2027 int offset = GetHeaderSize() + (kPointerSize * index);
2028 WRITE_FIELD(this, offset, value);
2032 // Access fast-case object properties at index. The use of these routines
2033 // is needed to correctly distinguish between properties stored in-object and
2034 // properties stored in the properties array.
2035 Object* JSObject::RawFastPropertyAt(int index) {
2036 // Adjust for the number of properties stored in the object.
2037 index -= map()->inobject_properties();
2039 int offset = map()->instance_size() + (index * kPointerSize);
2040 return READ_FIELD(this, offset);
2042 ASSERT(index < properties()->length());
2043 return properties()->get(index);
2048 void JSObject::FastPropertyAtPut(int index, Object* value) {
2049 // Adjust for the number of properties stored in the object.
2050 index -= map()->inobject_properties();
2052 int offset = map()->instance_size() + (index * kPointerSize);
2053 WRITE_FIELD(this, offset, value);
2054 WRITE_BARRIER(GetHeap(), this, offset, value);
2056 ASSERT(index < properties()->length());
2057 properties()->set(index, value);
2062 int JSObject::GetInObjectPropertyOffset(int index) {
2063 return map()->GetInObjectPropertyOffset(index);
2067 Object* JSObject::InObjectPropertyAt(int index) {
2068 int offset = GetInObjectPropertyOffset(index);
2069 return READ_FIELD(this, offset);
2073 Object* JSObject::InObjectPropertyAtPut(int index,
2075 WriteBarrierMode mode) {
2076 // Adjust for the number of properties stored in the object.
2077 int offset = GetInObjectPropertyOffset(index);
2078 WRITE_FIELD(this, offset, value);
2079 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2085 void JSObject::InitializeBody(Map* map,
2086 Object* pre_allocated_value,
2087 Object* filler_value) {
2088 ASSERT(!filler_value->IsHeapObject() ||
2089 !GetHeap()->InNewSpace(filler_value));
2090 ASSERT(!pre_allocated_value->IsHeapObject() ||
2091 !GetHeap()->InNewSpace(pre_allocated_value));
2092 int size = map->instance_size();
2093 int offset = kHeaderSize;
2094 if (filler_value != pre_allocated_value) {
2095 int pre_allocated = map->pre_allocated_property_fields();
2096 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
2097 for (int i = 0; i < pre_allocated; i++) {
2098 WRITE_FIELD(this, offset, pre_allocated_value);
2099 offset += kPointerSize;
2102 while (offset < size) {
2103 WRITE_FIELD(this, offset, filler_value);
2104 offset += kPointerSize;
2109 bool JSObject::HasFastProperties() {
2110 ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
2111 return !properties()->IsDictionary();
2115 bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
2116 // Allow extra fast properties if the object has more than
2117 // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
2118 // very unlikely that the object is being used as a dictionary and there is a
2119 // good chance that allowing more map transitions will be worth it.
2120 Map* map = this->map();
2121 if (map->unused_property_fields() != 0) return false;
2123 int inobject = map->inobject_properties();
2126 if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
2127 limit = Max(inobject, kMaxFastProperties);
2129 limit = Max(inobject, kFastPropertiesSoftLimit);
2131 return properties()->length() > limit;
2135 void Struct::InitializeBody(int object_size) {
2136 Object* value = GetHeap()->undefined_value();
2137 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2138 WRITE_FIELD(this, offset, value);
2143 bool Object::ToArrayIndex(uint32_t* index) {
2145 int value = Smi::cast(this)->value();
2146 if (value < 0) return false;
2150 if (IsHeapNumber()) {
2151 double value = HeapNumber::cast(this)->value();
2152 uint32_t uint_value = static_cast<uint32_t>(value);
2153 if (value == static_cast<double>(uint_value)) {
2154 *index = uint_value;
2162 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2163 if (!this->IsJSValue()) return false;
2165 JSValue* js_value = JSValue::cast(this);
2166 if (!js_value->value()->IsString()) return false;
2168 String* str = String::cast(js_value->value());
2169 if (index >= static_cast<uint32_t>(str->length())) return false;
2175 void Object::VerifyApiCallResultType() {
2176 #if ENABLE_EXTRA_CHECKS
2186 FATAL("API call returned invalid object");
2188 #endif // ENABLE_EXTRA_CHECKS
2192 FixedArrayBase* FixedArrayBase::cast(Object* object) {
2193 ASSERT(object->IsFixedArrayBase());
2194 return reinterpret_cast<FixedArrayBase*>(object);
2198 Object* FixedArray::get(int index) {
2199 SLOW_ASSERT(index >= 0 && index < this->length());
2200 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2204 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2205 return handle(array->get(index), array->GetIsolate());
2209 bool FixedArray::is_the_hole(int index) {
2210 return get(index) == GetHeap()->the_hole_value();
2214 void FixedArray::set(int index, Smi* value) {
2215 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2216 ASSERT(index >= 0 && index < this->length());
2217 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
2218 int offset = kHeaderSize + index * kPointerSize;
2219 WRITE_FIELD(this, offset, value);
2223 void FixedArray::set(int index, Object* value) {
2224 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2225 ASSERT(index >= 0 && index < this->length());
2226 int offset = kHeaderSize + index * kPointerSize;
2227 WRITE_FIELD(this, offset, value);
2228 WRITE_BARRIER(GetHeap(), this, offset, value);
2232 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2233 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2237 inline double FixedDoubleArray::hole_nan_as_double() {
2238 return BitCast<double, uint64_t>(kHoleNanInt64);
2242 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2243 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
2244 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
2245 return OS::nan_value();
2249 double FixedDoubleArray::get_scalar(int index) {
2250 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2251 map() != GetHeap()->fixed_array_map());
2252 ASSERT(index >= 0 && index < this->length());
2253 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2254 ASSERT(!is_the_hole_nan(result));
2258 int64_t FixedDoubleArray::get_representation(int index) {
2259 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2260 map() != GetHeap()->fixed_array_map());
2261 ASSERT(index >= 0 && index < this->length());
2262 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2266 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2268 if (array->is_the_hole(index)) {
2269 return array->GetIsolate()->factory()->the_hole_value();
2271 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2276 void FixedDoubleArray::set(int index, double value) {
2277 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2278 map() != GetHeap()->fixed_array_map());
2279 int offset = kHeaderSize + index * kDoubleSize;
2280 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2281 WRITE_DOUBLE_FIELD(this, offset, value);
2285 void FixedDoubleArray::set_the_hole(int index) {
2286 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2287 map() != GetHeap()->fixed_array_map());
2288 int offset = kHeaderSize + index * kDoubleSize;
2289 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2293 bool FixedDoubleArray::is_the_hole(int index) {
2294 int offset = kHeaderSize + index * kDoubleSize;
2295 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2299 double* FixedDoubleArray::data_start() {
2300 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2304 void FixedDoubleArray::FillWithHoles(int from, int to) {
2305 for (int i = from; i < to; i++) {
2311 void ConstantPoolArray::set_weak_object_state(
2312 ConstantPoolArray::WeakObjectState state) {
2313 int old_layout_field = READ_INT_FIELD(this, kArrayLayoutOffset);
2314 int new_layout_field = WeakObjectStateField::update(old_layout_field, state);
2315 WRITE_INT_FIELD(this, kArrayLayoutOffset, new_layout_field);
2319 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2320 int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset);
2321 return WeakObjectStateField::decode(layout_field);
2325 int ConstantPoolArray::first_int64_index() {
2330 int ConstantPoolArray::first_code_ptr_index() {
2331 int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset);
2332 return first_int64_index() +
2333 NumberOfInt64EntriesField::decode(layout_field);
2337 int ConstantPoolArray::first_heap_ptr_index() {
2338 int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset);
2339 return first_code_ptr_index() +
2340 NumberOfCodePtrEntriesField::decode(layout_field);
2344 int ConstantPoolArray::first_int32_index() {
2345 int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset);
2346 return first_heap_ptr_index() +
2347 NumberOfHeapPtrEntriesField::decode(layout_field);
2351 int ConstantPoolArray::count_of_int64_entries() {
2352 return first_code_ptr_index();
2356 int ConstantPoolArray::count_of_code_ptr_entries() {
2357 return first_heap_ptr_index() - first_code_ptr_index();
2361 int ConstantPoolArray::count_of_heap_ptr_entries() {
2362 return first_int32_index() - first_heap_ptr_index();
2366 int ConstantPoolArray::count_of_int32_entries() {
2367 return length() - first_int32_index();
2371 void ConstantPoolArray::Init(int number_of_int64_entries,
2372 int number_of_code_ptr_entries,
2373 int number_of_heap_ptr_entries,
2374 int number_of_int32_entries) {
2375 set_length(number_of_int64_entries +
2376 number_of_code_ptr_entries +
2377 number_of_heap_ptr_entries +
2378 number_of_int32_entries);
2380 NumberOfInt64EntriesField::encode(number_of_int64_entries) |
2381 NumberOfCodePtrEntriesField::encode(number_of_code_ptr_entries) |
2382 NumberOfHeapPtrEntriesField::encode(number_of_heap_ptr_entries) |
2383 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2384 WRITE_INT_FIELD(this, kArrayLayoutOffset, layout_field);
2388 int64_t ConstantPoolArray::get_int64_entry(int index) {
2389 ASSERT(map() == GetHeap()->constant_pool_array_map());
2390 ASSERT(index >= 0 && index < first_code_ptr_index());
2391 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2394 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2395 STATIC_ASSERT(kDoubleSize == kInt64Size);
2396 ASSERT(map() == GetHeap()->constant_pool_array_map());
2397 ASSERT(index >= 0 && index < first_code_ptr_index());
2398 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2402 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2403 ASSERT(map() == GetHeap()->constant_pool_array_map());
2404 ASSERT(index >= first_code_ptr_index() && index < first_heap_ptr_index());
2405 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2409 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2410 ASSERT(map() == GetHeap()->constant_pool_array_map());
2411 ASSERT(index >= first_heap_ptr_index() && index < first_int32_index());
2412 return READ_FIELD(this, OffsetOfElementAt(index));
2416 int32_t ConstantPoolArray::get_int32_entry(int index) {
2417 ASSERT(map() == GetHeap()->constant_pool_array_map());
2418 ASSERT(index >= first_int32_index() && index < length());
2419 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2423 void ConstantPoolArray::set(int index, Address value) {
2424 ASSERT(map() == GetHeap()->constant_pool_array_map());
2425 ASSERT(index >= first_code_ptr_index() && index < first_heap_ptr_index());
2426 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2430 void ConstantPoolArray::set(int index, Object* value) {
2431 ASSERT(map() == GetHeap()->constant_pool_array_map());
2432 ASSERT(index >= first_code_ptr_index() && index < first_int32_index());
2433 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2434 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2438 void ConstantPoolArray::set(int index, int64_t value) {
2439 ASSERT(map() == GetHeap()->constant_pool_array_map());
2440 ASSERT(index >= first_int64_index() && index < first_code_ptr_index());
2441 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2445 void ConstantPoolArray::set(int index, double value) {
2446 STATIC_ASSERT(kDoubleSize == kInt64Size);
2447 ASSERT(map() == GetHeap()->constant_pool_array_map());
2448 ASSERT(index >= first_int64_index() && index < first_code_ptr_index());
2449 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2453 void ConstantPoolArray::set(int index, int32_t value) {
2454 ASSERT(map() == GetHeap()->constant_pool_array_map());
2455 ASSERT(index >= this->first_int32_index() && index < length());
2456 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2460 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2461 const DisallowHeapAllocation& promise) {
2462 Heap* heap = GetHeap();
2463 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2464 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2465 return UPDATE_WRITE_BARRIER;
2469 void FixedArray::set(int index,
2471 WriteBarrierMode mode) {
2472 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2473 ASSERT(index >= 0 && index < this->length());
2474 int offset = kHeaderSize + index * kPointerSize;
2475 WRITE_FIELD(this, offset, value);
2476 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2480 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2483 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2484 ASSERT(index >= 0 && index < array->length());
2485 int offset = kHeaderSize + index * kPointerSize;
2486 WRITE_FIELD(array, offset, value);
2487 Heap* heap = array->GetHeap();
2488 if (heap->InNewSpace(value)) {
2489 heap->RecordWrite(array->address(), offset);
2494 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2497 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2498 ASSERT(index >= 0 && index < array->length());
2499 ASSERT(!array->GetHeap()->InNewSpace(value));
2500 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2504 void FixedArray::set_undefined(int index) {
2505 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2506 ASSERT(index >= 0 && index < this->length());
2507 ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2509 kHeaderSize + index * kPointerSize,
2510 GetHeap()->undefined_value());
2514 void FixedArray::set_null(int index) {
2515 ASSERT(index >= 0 && index < this->length());
2516 ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2518 kHeaderSize + index * kPointerSize,
2519 GetHeap()->null_value());
2523 void FixedArray::set_the_hole(int index) {
2524 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2525 ASSERT(index >= 0 && index < this->length());
2526 ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2528 kHeaderSize + index * kPointerSize,
2529 GetHeap()->the_hole_value());
2533 void FixedArray::FillWithHoles(int from, int to) {
2534 for (int i = from; i < to; i++) {
2540 Object** FixedArray::data_start() {
2541 return HeapObject::RawField(this, kHeaderSize);
2545 bool DescriptorArray::IsEmpty() {
2546 ASSERT(length() >= kFirstIndex ||
2547 this == GetHeap()->empty_descriptor_array());
2548 return length() < kFirstIndex;
2552 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2554 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2558 // Perform a binary search in a fixed array. Low and high are entry indices. If
2559 // there are three entries in this array it should be called with low=0 and
2561 template<SearchMode search_mode, typename T>
2562 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2563 uint32_t hash = name->Hash();
2566 ASSERT(low <= high);
2568 while (low != high) {
2569 int mid = (low + high) / 2;
2570 Name* mid_name = array->GetSortedKey(mid);
2571 uint32_t mid_hash = mid_name->Hash();
2573 if (mid_hash >= hash) {
2580 for (; low <= limit; ++low) {
2581 int sort_index = array->GetSortedKeyIndex(low);
2582 Name* entry = array->GetKey(sort_index);
2583 if (entry->Hash() != hash) break;
2584 if (entry->Equals(name)) {
2585 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2588 return T::kNotFound;
2592 return T::kNotFound;
2596 // Perform a linear search in this fixed array. len is the number of entry
2597 // indices that are valid.
2598 template<SearchMode search_mode, typename T>
2599 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2600 uint32_t hash = name->Hash();
2601 if (search_mode == ALL_ENTRIES) {
2602 for (int number = 0; number < len; number++) {
2603 int sorted_index = array->GetSortedKeyIndex(number);
2604 Name* entry = array->GetKey(sorted_index);
2605 uint32_t current_hash = entry->Hash();
2606 if (current_hash > hash) break;
2607 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2610 ASSERT(len >= valid_entries);
2611 for (int number = 0; number < valid_entries; number++) {
2612 Name* entry = array->GetKey(number);
2613 uint32_t current_hash = entry->Hash();
2614 if (current_hash == hash && entry->Equals(name)) return number;
2617 return T::kNotFound;
2621 template<SearchMode search_mode, typename T>
2622 int Search(T* array, Name* name, int valid_entries) {
2623 if (search_mode == VALID_ENTRIES) {
2624 SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2626 SLOW_ASSERT(array->IsSortedNoDuplicates());
2629 int nof = array->number_of_entries();
2630 if (nof == 0) return T::kNotFound;
2632 // Fast case: do linear search for small arrays.
2633 const int kMaxElementsForLinearSearch = 8;
2634 if ((search_mode == ALL_ENTRIES &&
2635 nof <= kMaxElementsForLinearSearch) ||
2636 (search_mode == VALID_ENTRIES &&
2637 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2638 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2641 // Slow case: perform binary search.
2642 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2646 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2647 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2651 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2652 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2653 if (number_of_own_descriptors == 0) return kNotFound;
2655 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2656 int number = cache->Lookup(map, name);
2658 if (number == DescriptorLookupCache::kAbsent) {
2659 number = Search(name, number_of_own_descriptors);
2660 cache->Update(map, name, number);
2667 PropertyDetails Map::GetLastDescriptorDetails() {
2668 return instance_descriptors()->GetDetails(LastAdded());
2672 void Map::LookupDescriptor(JSObject* holder,
2674 LookupResult* result) {
2675 DescriptorArray* descriptors = this->instance_descriptors();
2676 int number = descriptors->SearchWithCache(name, this);
2677 if (number == DescriptorArray::kNotFound) return result->NotFound();
2678 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2682 void Map::LookupTransition(JSObject* holder,
2684 LookupResult* result) {
2685 int transition_index = this->SearchTransition(name);
2686 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
2687 result->TransitionResult(holder, this->GetTransition(transition_index));
2691 FixedArrayBase* Map::GetInitialElements() {
2692 if (has_fast_smi_or_object_elements() ||
2693 has_fast_double_elements()) {
2694 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2695 return GetHeap()->empty_fixed_array();
2696 } else if (has_external_array_elements()) {
2697 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2698 ASSERT(!GetHeap()->InNewSpace(empty_array));
2700 } else if (has_fixed_typed_array_elements()) {
2701 FixedTypedArrayBase* empty_array =
2702 GetHeap()->EmptyFixedTypedArrayForMap(this);
2703 ASSERT(!GetHeap()->InNewSpace(empty_array));
2712 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2713 ASSERT(descriptor_number < number_of_descriptors());
2714 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2718 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2719 return GetKeySlot(descriptor_number);
2723 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2724 return GetValueSlot(descriptor_number - 1) + 1;
2728 Name* DescriptorArray::GetKey(int descriptor_number) {
2729 ASSERT(descriptor_number < number_of_descriptors());
2730 return Name::cast(get(ToKeyIndex(descriptor_number)));
2734 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2735 return GetDetails(descriptor_number).pointer();
2739 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2740 return GetKey(GetSortedKeyIndex(descriptor_number));
2744 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2745 PropertyDetails details = GetDetails(descriptor_index);
2746 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2750 void DescriptorArray::SetRepresentation(int descriptor_index,
2751 Representation representation) {
2752 ASSERT(!representation.IsNone());
2753 PropertyDetails details = GetDetails(descriptor_index);
2754 set(ToDetailsIndex(descriptor_index),
2755 details.CopyWithRepresentation(representation).AsSmi());
2759 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2760 ASSERT(descriptor_number < number_of_descriptors());
2761 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2765 Object* DescriptorArray::GetValue(int descriptor_number) {
2766 ASSERT(descriptor_number < number_of_descriptors());
2767 return get(ToValueIndex(descriptor_number));
2771 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2772 set(ToValueIndex(descriptor_index), value);
2776 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2777 ASSERT(descriptor_number < number_of_descriptors());
2778 Object* details = get(ToDetailsIndex(descriptor_number));
2779 return PropertyDetails(Smi::cast(details));
2783 PropertyType DescriptorArray::GetType(int descriptor_number) {
2784 return GetDetails(descriptor_number).type();
2788 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2789 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2790 return GetDetails(descriptor_number).field_index();
2794 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
2795 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2796 return HeapType::cast(GetValue(descriptor_number));
2800 Object* DescriptorArray::GetConstant(int descriptor_number) {
2801 return GetValue(descriptor_number);
2805 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2806 ASSERT(GetType(descriptor_number) == CALLBACKS);
2807 return GetValue(descriptor_number);
2811 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2812 ASSERT(GetType(descriptor_number) == CALLBACKS);
2813 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2814 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2818 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2819 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
2820 handle(GetValue(descriptor_number), GetIsolate()),
2821 GetDetails(descriptor_number));
2825 void DescriptorArray::Set(int descriptor_number,
2827 const WhitenessWitness&) {
2829 ASSERT(descriptor_number < number_of_descriptors());
2831 NoIncrementalWriteBarrierSet(this,
2832 ToKeyIndex(descriptor_number),
2834 NoIncrementalWriteBarrierSet(this,
2835 ToValueIndex(descriptor_number),
2837 NoIncrementalWriteBarrierSet(this,
2838 ToDetailsIndex(descriptor_number),
2839 desc->GetDetails().AsSmi());
2843 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2845 ASSERT(descriptor_number < number_of_descriptors());
2847 set(ToKeyIndex(descriptor_number), *desc->GetKey());
2848 set(ToValueIndex(descriptor_number), *desc->GetValue());
2849 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2853 void DescriptorArray::Append(Descriptor* desc,
2854 const WhitenessWitness& witness) {
2855 DisallowHeapAllocation no_gc;
2856 int descriptor_number = number_of_descriptors();
2857 SetNumberOfDescriptors(descriptor_number + 1);
2858 Set(descriptor_number, desc, witness);
2860 uint32_t hash = desc->GetKey()->Hash();
2864 for (insertion = descriptor_number; insertion > 0; --insertion) {
2865 Name* key = GetSortedKey(insertion - 1);
2866 if (key->Hash() <= hash) break;
2867 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2870 SetSortedKey(insertion, descriptor_number);
2874 void DescriptorArray::Append(Descriptor* desc) {
2875 DisallowHeapAllocation no_gc;
2876 int descriptor_number = number_of_descriptors();
2877 SetNumberOfDescriptors(descriptor_number + 1);
2878 Set(descriptor_number, desc);
2880 uint32_t hash = desc->GetKey()->Hash();
2884 for (insertion = descriptor_number; insertion > 0; --insertion) {
2885 Name* key = GetSortedKey(insertion - 1);
2886 if (key->Hash() <= hash) break;
2887 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2890 SetSortedKey(insertion, descriptor_number);
2894 void DescriptorArray::SwapSortedKeys(int first, int second) {
2895 int first_key = GetSortedKeyIndex(first);
2896 SetSortedKey(first, GetSortedKeyIndex(second));
2897 SetSortedKey(second, first_key);
2901 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2902 : marking_(array->GetHeap()->incremental_marking()) {
2903 marking_->EnterNoMarkingScope();
2904 ASSERT(!marking_->IsMarking() ||
2905 Marking::Color(array) == Marking::WHITE_OBJECT);
2909 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2910 marking_->LeaveNoMarkingScope();
2914 template<typename Derived, typename Shape, typename Key>
2915 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
2916 const int kMinCapacity = 32;
2917 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2918 if (capacity < kMinCapacity) {
2919 capacity = kMinCapacity; // Guarantee min capacity.
2925 template<typename Derived, typename Shape, typename Key>
2926 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
2927 return FindEntry(GetIsolate(), key);
2931 // Find entry for key otherwise return kNotFound.
2932 template<typename Derived, typename Shape, typename Key>
2933 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2934 uint32_t capacity = Capacity();
2935 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
2937 // EnsureCapacity will guarantee the hash table is never full.
2939 Object* element = KeyAt(entry);
2940 // Empty entry. Uses raw unchecked accessors because it is called by the
2941 // string table during bootstrapping.
2942 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2943 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2944 Shape::IsMatch(key, element)) return entry;
2945 entry = NextProbe(entry, count++, capacity);
2951 bool SeededNumberDictionary::requires_slow_elements() {
2952 Object* max_index_object = get(kMaxNumberKeyIndex);
2953 if (!max_index_object->IsSmi()) return false;
2955 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2958 uint32_t SeededNumberDictionary::max_number_key() {
2959 ASSERT(!requires_slow_elements());
2960 Object* max_index_object = get(kMaxNumberKeyIndex);
2961 if (!max_index_object->IsSmi()) return 0;
2962 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2963 return value >> kRequiresSlowElementsTagSize;
2966 void SeededNumberDictionary::set_requires_slow_elements() {
2967 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2971 // ------------------------------------
2975 CAST_ACCESSOR(FixedArray)
2976 CAST_ACCESSOR(FixedDoubleArray)
2977 CAST_ACCESSOR(FixedTypedArrayBase)
2978 CAST_ACCESSOR(ConstantPoolArray)
2979 CAST_ACCESSOR(DescriptorArray)
2980 CAST_ACCESSOR(DeoptimizationInputData)
2981 CAST_ACCESSOR(DeoptimizationOutputData)
2982 CAST_ACCESSOR(DependentCode)
2983 CAST_ACCESSOR(StringTable)
2984 CAST_ACCESSOR(JSFunctionResultCache)
2985 CAST_ACCESSOR(NormalizedMapCache)
2986 CAST_ACCESSOR(ScopeInfo)
2987 CAST_ACCESSOR(CompilationCacheTable)
2988 CAST_ACCESSOR(CodeCacheHashTable)
2989 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2990 CAST_ACCESSOR(MapCache)
2991 CAST_ACCESSOR(String)
2992 CAST_ACCESSOR(SeqString)
2993 CAST_ACCESSOR(SeqOneByteString)
2994 CAST_ACCESSOR(SeqTwoByteString)
2995 CAST_ACCESSOR(SlicedString)
2996 CAST_ACCESSOR(ConsString)
2997 CAST_ACCESSOR(ExternalString)
2998 CAST_ACCESSOR(ExternalAsciiString)
2999 CAST_ACCESSOR(ExternalTwoByteString)
3000 CAST_ACCESSOR(Symbol)
3002 CAST_ACCESSOR(JSReceiver)
3003 CAST_ACCESSOR(JSObject)
3005 CAST_ACCESSOR(HeapObject)
3006 CAST_ACCESSOR(HeapNumber)
3007 CAST_ACCESSOR(Float32x4)
3008 CAST_ACCESSOR(Float64x2)
3009 CAST_ACCESSOR(Int32x4)
3010 CAST_ACCESSOR(Oddball)
3012 CAST_ACCESSOR(PropertyCell)
3013 CAST_ACCESSOR(SharedFunctionInfo)
3015 CAST_ACCESSOR(JSFunction)
3016 CAST_ACCESSOR(GlobalObject)
3017 CAST_ACCESSOR(JSGlobalProxy)
3018 CAST_ACCESSOR(JSGlobalObject)
3019 CAST_ACCESSOR(JSBuiltinsObject)
3021 CAST_ACCESSOR(JSArray)
3022 CAST_ACCESSOR(JSArrayBuffer)
3023 CAST_ACCESSOR(JSArrayBufferView)
3024 CAST_ACCESSOR(JSTypedArray)
3025 CAST_ACCESSOR(JSDataView)
3026 CAST_ACCESSOR(JSRegExp)
3027 CAST_ACCESSOR(JSProxy)
3028 CAST_ACCESSOR(JSFunctionProxy)
3029 CAST_ACCESSOR(JSSet)
3030 CAST_ACCESSOR(JSMap)
3031 CAST_ACCESSOR(JSSetIterator)
3032 CAST_ACCESSOR(JSMapIterator)
3033 CAST_ACCESSOR(JSWeakMap)
3034 CAST_ACCESSOR(JSWeakSet)
3035 CAST_ACCESSOR(Foreign)
3036 CAST_ACCESSOR(ByteArray)
3037 CAST_ACCESSOR(FreeSpace)
3038 CAST_ACCESSOR(ExternalArray)
3039 CAST_ACCESSOR(ExternalInt8Array)
3040 CAST_ACCESSOR(ExternalUint8Array)
3041 CAST_ACCESSOR(ExternalInt16Array)
3042 CAST_ACCESSOR(ExternalUint16Array)
3043 CAST_ACCESSOR(ExternalInt32Array)
3044 CAST_ACCESSOR(ExternalInt32x4Array)
3045 CAST_ACCESSOR(ExternalUint32Array)
3046 CAST_ACCESSOR(ExternalFloat32Array)
3047 CAST_ACCESSOR(ExternalFloat32x4Array)
3048 CAST_ACCESSOR(ExternalFloat64Array)
3049 CAST_ACCESSOR(ExternalFloat64x2Array)
3050 CAST_ACCESSOR(ExternalUint8ClampedArray)
3051 CAST_ACCESSOR(Struct)
3052 CAST_ACCESSOR(AccessorInfo)
3054 template <class Traits>
3055 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3056 SLOW_ASSERT(object->IsHeapObject() &&
3057 HeapObject::cast(object)->map()->instance_type() ==
3058 Traits::kInstanceType);
3059 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3063 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3064 STRUCT_LIST(MAKE_STRUCT_CAST)
3065 #undef MAKE_STRUCT_CAST
3068 template <typename Derived, typename Shape, typename Key>
3069 HashTable<Derived, Shape, Key>*
3070 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3071 ASSERT(obj->IsHashTable());
3072 return reinterpret_cast<HashTable*>(obj);
3076 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3077 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3079 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3080 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3082 SMI_ACCESSORS(String, length, kLengthOffset)
3083 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3086 uint32_t Name::hash_field() {
3087 return READ_UINT32_FIELD(this, kHashFieldOffset);
3091 void Name::set_hash_field(uint32_t value) {
3092 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3093 #if V8_HOST_ARCH_64_BIT
3094 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3099 bool Name::Equals(Name* other) {
3100 if (other == this) return true;
3101 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3102 this->IsSymbol() || other->IsSymbol()) {
3105 return String::cast(this)->SlowEquals(String::cast(other));
3109 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3110 if (one.is_identical_to(two)) return true;
3111 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3112 one->IsSymbol() || two->IsSymbol()) {
3115 return String::SlowEquals(Handle<String>::cast(one),
3116 Handle<String>::cast(two));
3120 ACCESSORS(Symbol, name, Object, kNameOffset)
3121 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3122 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3125 bool String::Equals(String* other) {
3126 if (other == this) return true;
3127 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3130 return SlowEquals(other);
3134 bool String::Equals(Handle<String> one, Handle<String> two) {
3135 if (one.is_identical_to(two)) return true;
3136 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3139 return SlowEquals(one, two);
3143 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3144 if (!string->IsConsString()) return string;
3145 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3146 if (cons->IsFlat()) return handle(cons->first());
3147 return SlowFlatten(cons, pretenure);
3151 uint16_t String::Get(int index) {
3152 ASSERT(index >= 0 && index < length());
3153 switch (StringShape(this).full_representation_tag()) {
3154 case kSeqStringTag | kOneByteStringTag:
3155 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3156 case kSeqStringTag | kTwoByteStringTag:
3157 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3158 case kConsStringTag | kOneByteStringTag:
3159 case kConsStringTag | kTwoByteStringTag:
3160 return ConsString::cast(this)->ConsStringGet(index);
3161 case kExternalStringTag | kOneByteStringTag:
3162 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
3163 case kExternalStringTag | kTwoByteStringTag:
3164 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3165 case kSlicedStringTag | kOneByteStringTag:
3166 case kSlicedStringTag | kTwoByteStringTag:
3167 return SlicedString::cast(this)->SlicedStringGet(index);
3177 void String::Set(int index, uint16_t value) {
3178 ASSERT(index >= 0 && index < length());
3179 ASSERT(StringShape(this).IsSequential());
3181 return this->IsOneByteRepresentation()
3182 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3183 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3187 bool String::IsFlat() {
3188 if (!StringShape(this).IsCons()) return true;
3189 return ConsString::cast(this)->second()->length() == 0;
3193 String* String::GetUnderlying() {
3194 // Giving direct access to underlying string only makes sense if the
3195 // wrapping string is already flattened.
3196 ASSERT(this->IsFlat());
3197 ASSERT(StringShape(this).IsIndirect());
3198 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3199 const int kUnderlyingOffset = SlicedString::kParentOffset;
3200 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3204 template<class Visitor>
3205 ConsString* String::VisitFlat(Visitor* visitor,
3208 int slice_offset = offset;
3209 const int length = string->length();
3210 ASSERT(offset <= length);
3212 int32_t type = string->map()->instance_type();
3213 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3214 case kSeqStringTag | kOneByteStringTag:
3215 visitor->VisitOneByteString(
3216 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3220 case kSeqStringTag | kTwoByteStringTag:
3221 visitor->VisitTwoByteString(
3222 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3226 case kExternalStringTag | kOneByteStringTag:
3227 visitor->VisitOneByteString(
3228 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3232 case kExternalStringTag | kTwoByteStringTag:
3233 visitor->VisitTwoByteString(
3234 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3238 case kSlicedStringTag | kOneByteStringTag:
3239 case kSlicedStringTag | kTwoByteStringTag: {
3240 SlicedString* slicedString = SlicedString::cast(string);
3241 slice_offset += slicedString->offset();
3242 string = slicedString->parent();
3246 case kConsStringTag | kOneByteStringTag:
3247 case kConsStringTag | kTwoByteStringTag:
3248 return ConsString::cast(string);
3258 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3259 ASSERT(index >= 0 && index < length());
3260 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3264 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3265 ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3266 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3267 static_cast<byte>(value));
3271 Address SeqOneByteString::GetCharsAddress() {
3272 return FIELD_ADDR(this, kHeaderSize);
3276 uint8_t* SeqOneByteString::GetChars() {
3277 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3281 Address SeqTwoByteString::GetCharsAddress() {
3282 return FIELD_ADDR(this, kHeaderSize);
3286 uc16* SeqTwoByteString::GetChars() {
3287 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3291 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3292 ASSERT(index >= 0 && index < length());
3293 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3297 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3298 ASSERT(index >= 0 && index < length());
3299 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3303 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3304 return SizeFor(length());
3308 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3309 return SizeFor(length());
3313 String* SlicedString::parent() {
3314 return String::cast(READ_FIELD(this, kParentOffset));
3318 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3319 ASSERT(parent->IsSeqString() || parent->IsExternalString());
3320 WRITE_FIELD(this, kParentOffset, parent);
3321 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3325 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3328 String* ConsString::first() {
3329 return String::cast(READ_FIELD(this, kFirstOffset));
3333 Object* ConsString::unchecked_first() {
3334 return READ_FIELD(this, kFirstOffset);
3338 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3339 WRITE_FIELD(this, kFirstOffset, value);
3340 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3344 String* ConsString::second() {
3345 return String::cast(READ_FIELD(this, kSecondOffset));
3349 Object* ConsString::unchecked_second() {
3350 return READ_FIELD(this, kSecondOffset);
3354 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3355 WRITE_FIELD(this, kSecondOffset, value);
3356 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3360 bool ExternalString::is_short() {
3361 InstanceType type = map()->instance_type();
3362 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3366 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3367 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3371 void ExternalAsciiString::update_data_cache() {
3372 if (is_short()) return;
3373 const char** data_field =
3374 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3375 *data_field = resource()->data();
3379 void ExternalAsciiString::set_resource(
3380 const ExternalAsciiString::Resource* resource) {
3381 ASSERT(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3382 *reinterpret_cast<const Resource**>(
3383 FIELD_ADDR(this, kResourceOffset)) = resource;
3384 if (resource != NULL) update_data_cache();
3388 const uint8_t* ExternalAsciiString::GetChars() {
3389 return reinterpret_cast<const uint8_t*>(resource()->data());
3393 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3394 ASSERT(index >= 0 && index < length());
3395 return GetChars()[index];
3399 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3400 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3404 void ExternalTwoByteString::update_data_cache() {
3405 if (is_short()) return;
3406 const uint16_t** data_field =
3407 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3408 *data_field = resource()->data();
3412 void ExternalTwoByteString::set_resource(
3413 const ExternalTwoByteString::Resource* resource) {
3414 *reinterpret_cast<const Resource**>(
3415 FIELD_ADDR(this, kResourceOffset)) = resource;
3416 if (resource != NULL) update_data_cache();
3420 const uint16_t* ExternalTwoByteString::GetChars() {
3421 return resource()->data();
3425 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3426 ASSERT(index >= 0 && index < length());
3427 return GetChars()[index];
3431 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3433 return GetChars() + start;
3437 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3438 return depth & kDepthMask;
3442 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3443 frames_[depth_++ & kDepthMask] = string;
3447 void ConsStringIteratorOp::PushRight(ConsString* string) {
3449 frames_[(depth_-1) & kDepthMask] = string;
3453 void ConsStringIteratorOp::AdjustMaximumDepth() {
3454 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3458 void ConsStringIteratorOp::Pop() {
3460 ASSERT(depth_ <= maximum_depth_);
3465 uint16_t StringCharacterStream::GetNext() {
3466 ASSERT(buffer8_ != NULL && end_ != NULL);
3467 // Advance cursor if needed.
3468 if (buffer8_ == end_) HasMore();
3469 ASSERT(buffer8_ < end_);
3470 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3474 StringCharacterStream::StringCharacterStream(String* string,
3475 ConsStringIteratorOp* op,
3477 : is_one_byte_(false),
3479 Reset(string, offset);
3483 void StringCharacterStream::Reset(String* string, int offset) {
3486 ConsString* cons_string = String::VisitFlat(this, string, offset);
3487 op_->Reset(cons_string, offset);
3488 if (cons_string != NULL) {
3489 string = op_->Next(&offset);
3490 if (string != NULL) String::VisitFlat(this, string, offset);
3495 bool StringCharacterStream::HasMore() {
3496 if (buffer8_ != end_) return true;
3498 String* string = op_->Next(&offset);
3499 ASSERT_EQ(offset, 0);
3500 if (string == NULL) return false;
3501 String::VisitFlat(this, string);
3502 ASSERT(buffer8_ != end_);
3507 void StringCharacterStream::VisitOneByteString(
3508 const uint8_t* chars, int length) {
3509 is_one_byte_ = true;
3511 end_ = chars + length;
3515 void StringCharacterStream::VisitTwoByteString(
3516 const uint16_t* chars, int length) {
3517 is_one_byte_ = false;
3519 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3523 void JSFunctionResultCache::MakeZeroSize() {
3524 set_finger_index(kEntriesIndex);
3525 set_size(kEntriesIndex);
3529 void JSFunctionResultCache::Clear() {
3530 int cache_size = size();
3531 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3532 MemsetPointer(entries_start,
3533 GetHeap()->the_hole_value(),
3534 cache_size - kEntriesIndex);
3539 int JSFunctionResultCache::size() {
3540 return Smi::cast(get(kCacheSizeIndex))->value();
3544 void JSFunctionResultCache::set_size(int size) {
3545 set(kCacheSizeIndex, Smi::FromInt(size));
3549 int JSFunctionResultCache::finger_index() {
3550 return Smi::cast(get(kFingerIndex))->value();
3554 void JSFunctionResultCache::set_finger_index(int finger_index) {
3555 set(kFingerIndex, Smi::FromInt(finger_index));
3559 byte ByteArray::get(int index) {
3560 ASSERT(index >= 0 && index < this->length());
3561 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3565 void ByteArray::set(int index, byte value) {
3566 ASSERT(index >= 0 && index < this->length());
3567 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3571 int ByteArray::get_int(int index) {
3572 ASSERT(index >= 0 && (index * kIntSize) < this->length());
3573 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3577 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3578 ASSERT_TAG_ALIGNED(address);
3579 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3583 Address ByteArray::GetDataStartAddress() {
3584 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3588 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3589 return reinterpret_cast<uint8_t*>(external_pointer());
3593 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3594 ASSERT((index >= 0) && (index < this->length()));
3595 uint8_t* ptr = external_uint8_clamped_pointer();
3600 Handle<Object> ExternalUint8ClampedArray::get(
3601 Handle<ExternalUint8ClampedArray> array,
3603 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3604 array->GetIsolate());
3608 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3609 ASSERT((index >= 0) && (index < this->length()));
3610 uint8_t* ptr = external_uint8_clamped_pointer();
3615 void* ExternalArray::external_pointer() {
3616 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3617 return reinterpret_cast<void*>(ptr);
3621 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3622 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3623 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3627 int8_t ExternalInt8Array::get_scalar(int index) {
3628 ASSERT((index >= 0) && (index < this->length()));
3629 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3634 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3636 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3637 array->GetIsolate());
3641 void ExternalInt8Array::set(int index, int8_t value) {
3642 ASSERT((index >= 0) && (index < this->length()));
3643 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3648 uint8_t ExternalUint8Array::get_scalar(int index) {
3649 ASSERT((index >= 0) && (index < this->length()));
3650 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3655 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3657 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3658 array->GetIsolate());
3662 void ExternalUint8Array::set(int index, uint8_t value) {
3663 ASSERT((index >= 0) && (index < this->length()));
3664 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3669 int16_t ExternalInt16Array::get_scalar(int index) {
3670 ASSERT((index >= 0) && (index < this->length()));
3671 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3676 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3678 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3679 array->GetIsolate());
3683 void ExternalInt16Array::set(int index, int16_t value) {
3684 ASSERT((index >= 0) && (index < this->length()));
3685 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3690 uint16_t ExternalUint16Array::get_scalar(int index) {
3691 ASSERT((index >= 0) && (index < this->length()));
3692 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3697 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3699 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3700 array->GetIsolate());
3704 void ExternalUint16Array::set(int index, uint16_t value) {
3705 ASSERT((index >= 0) && (index < this->length()));
3706 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3711 int32_t ExternalInt32Array::get_scalar(int index) {
3712 ASSERT((index >= 0) && (index < this->length()));
3713 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3718 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3720 return array->GetIsolate()->factory()->
3721 NewNumberFromInt(array->get_scalar(index));
3725 void ExternalInt32Array::set(int index, int32_t value) {
3726 ASSERT((index >= 0) && (index < this->length()));
3727 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3732 uint32_t ExternalUint32Array::get_scalar(int index) {
3733 ASSERT((index >= 0) && (index < this->length()));
3734 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3739 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3741 return array->GetIsolate()->factory()->
3742 NewNumberFromUint(array->get_scalar(index));
3746 void ExternalUint32Array::set(int index, uint32_t value) {
3747 ASSERT((index >= 0) && (index < this->length()));
3748 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3753 float ExternalFloat32Array::get_scalar(int index) {
3754 ASSERT((index >= 0) && (index < this->length()));
3755 float* ptr = static_cast<float*>(external_pointer());
3760 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
3762 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3766 void ExternalFloat32Array::set(int index, float value) {
3767 ASSERT((index >= 0) && (index < this->length()));
3768 float* ptr = static_cast<float*>(external_pointer());
3773 float32x4_value_t ExternalFloat32x4Array::get_scalar(int index) {
3774 ASSERT((index >= 0) && (index < this->length()));
3775 float* ptr = static_cast<float*>(external_pointer());
3776 float32x4_value_t value;
3777 value.storage[0] = ptr[index * 4 + 0];
3778 value.storage[1] = ptr[index * 4 + 1];
3779 value.storage[2] = ptr[index * 4 + 2];
3780 value.storage[3] = ptr[index * 4 + 3];
3785 Handle<Object> ExternalFloat32x4Array::get(Handle<ExternalFloat32x4Array> array,
3787 float32x4_value_t value = array->get_scalar(index);
3788 return array->GetIsolate()->factory()->NewFloat32x4(value);
3792 void ExternalFloat32x4Array::set(int index, const float32x4_value_t& value) {
3793 ASSERT((index >= 0) && (index < this->length()));
3794 float* ptr = static_cast<float*>(external_pointer());
3795 ptr[index * 4 + 0] = value.storage[0];
3796 ptr[index * 4 + 1] = value.storage[1];
3797 ptr[index * 4 + 2] = value.storage[2];
3798 ptr[index * 4 + 3] = value.storage[3];
3802 float64x2_value_t ExternalFloat64x2Array::get_scalar(int index) {
3803 ASSERT((index >= 0) && (index < this->length()));
3804 double* ptr = static_cast<double*>(external_pointer());
3805 float64x2_value_t value;
3806 value.storage[0] = ptr[index * 2 + 0];
3807 value.storage[1] = ptr[index * 2 + 1];
3812 Handle<Object> ExternalFloat64x2Array::get(Handle<ExternalFloat64x2Array> array,
3814 float64x2_value_t value = array->get_scalar(index);
3815 return array->GetIsolate()->factory()->NewFloat64x2(value);
3819 void ExternalFloat64x2Array::set(int index, const float64x2_value_t& value) {
3820 ASSERT((index >= 0) && (index < this->length()));
3821 double* ptr = static_cast<double*>(external_pointer());
3822 ptr[index * 2 + 0] = value.storage[0];
3823 ptr[index * 2 + 1] = value.storage[1];
3827 int32x4_value_t ExternalInt32x4Array::get_scalar(int index) {
3828 ASSERT((index >= 0) && (index < this->length()));
3829 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3830 int32x4_value_t value;
3831 value.storage[0] = ptr[index * 4 + 0];
3832 value.storage[1] = ptr[index * 4 + 1];
3833 value.storage[2] = ptr[index * 4 + 2];
3834 value.storage[3] = ptr[index * 4 + 3];
3839 Handle<Object> ExternalInt32x4Array::get(Handle<ExternalInt32x4Array> array,
3841 int32x4_value_t value = array->get_scalar(index);
3842 return array->GetIsolate()->factory()->NewInt32x4(value);
3846 void ExternalInt32x4Array::set(int index, const int32x4_value_t& value) {
3847 ASSERT((index >= 0) && (index < this->length()));
3848 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3849 ptr[index * 4 + 0] = value.storage[0];
3850 ptr[index * 4 + 1] = value.storage[1];
3851 ptr[index * 4 + 2] = value.storage[2];
3852 ptr[index * 4 + 3] = value.storage[3];
3856 double ExternalFloat64Array::get_scalar(int index) {
3857 ASSERT((index >= 0) && (index < this->length()));
3858 double* ptr = static_cast<double*>(external_pointer());
3863 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
3865 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3869 void ExternalFloat64Array::set(int index, double value) {
3870 ASSERT((index >= 0) && (index < this->length()));
3871 double* ptr = static_cast<double*>(external_pointer());
3876 void* FixedTypedArrayBase::DataPtr() {
3877 return FIELD_ADDR(this, kDataOffset);
3881 int FixedTypedArrayBase::DataSize() {
3882 InstanceType instance_type = map()->instance_type();
3884 switch (instance_type) {
3885 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3886 case FIXED_##TYPE##_ARRAY_TYPE: \
3887 element_size = size; \
3890 TYPED_ARRAYS(TYPED_ARRAY_CASE)
3891 #undef TYPED_ARRAY_CASE
3896 return length() * element_size;
3900 int FixedTypedArrayBase::size() {
3901 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
3905 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
3908 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
3911 int8_t Int8ArrayTraits::defaultValue() { return 0; }
3914 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
3917 int16_t Int16ArrayTraits::defaultValue() { return 0; }
3920 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
3923 int32_t Int32ArrayTraits::defaultValue() { return 0; }
3926 float Float32ArrayTraits::defaultValue() {
3927 return static_cast<float>(OS::nan_value());
3931 double Float64ArrayTraits::defaultValue() { return OS::nan_value(); }
3934 template <class Traits>
3935 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
3936 ASSERT((index >= 0) && (index < this->length()));
3937 ElementType* ptr = reinterpret_cast<ElementType*>(
3938 FIELD_ADDR(this, kDataOffset));
3944 FixedTypedArray<Float64ArrayTraits>::ElementType
3945 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
3946 ASSERT((index >= 0) && (index < this->length()));
3947 return READ_DOUBLE_FIELD(this, ElementOffset(index));
3951 template <class Traits>
3952 void FixedTypedArray<Traits>::set(int index, ElementType value) {
3953 ASSERT((index >= 0) && (index < this->length()));
3954 ElementType* ptr = reinterpret_cast<ElementType*>(
3955 FIELD_ADDR(this, kDataOffset));
3961 void FixedTypedArray<Float64ArrayTraits>::set(
3962 int index, Float64ArrayTraits::ElementType value) {
3963 ASSERT((index >= 0) && (index < this->length()));
3964 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
3968 template <class Traits>
3969 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
3970 return static_cast<ElementType>(value);
3975 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
3976 if (value < 0) return 0;
3977 if (value > 0xFF) return 0xFF;
3978 return static_cast<uint8_t>(value);
3982 template <class Traits>
3983 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
3985 return static_cast<ElementType>(DoubleToInt32(value));
3990 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
3991 if (value < 0) return 0;
3992 if (value > 0xFF) return 0xFF;
3993 return static_cast<uint8_t>(lrint(value));
3998 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
3999 return static_cast<float>(value);
4004 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4009 template <class Traits>
4010 Handle<Object> FixedTypedArray<Traits>::get(
4011 Handle<FixedTypedArray<Traits> > array,
4013 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4017 template <class Traits>
4018 Handle<Object> FixedTypedArray<Traits>::SetValue(
4019 Handle<FixedTypedArray<Traits> > array,
4021 Handle<Object> value) {
4022 ElementType cast_value = Traits::defaultValue();
4023 if (index < static_cast<uint32_t>(array->length())) {
4024 if (value->IsSmi()) {
4025 int int_value = Handle<Smi>::cast(value)->value();
4026 cast_value = from_int(int_value);
4027 } else if (value->IsHeapNumber()) {
4028 double double_value = Handle<HeapNumber>::cast(value)->value();
4029 cast_value = from_double(double_value);
4031 // Clamp undefined to the default value. All other types have been
4032 // converted to a number type further up in the call chain.
4033 ASSERT(value->IsUndefined());
4035 array->set(index, cast_value);
4037 return Traits::ToHandle(array->GetIsolate(), cast_value);
4041 Handle<Object> FixedTypedArray<Float32x4ArrayTraits>::SetValue(
4042 Handle<FixedTypedArray<Float32x4ArrayTraits> > array,
4043 uint32_t index, Handle<Object> value) {
4044 float32x4_value_t cast_value;
4045 cast_value.storage[0] = static_cast<float>(OS::nan_value());
4046 cast_value.storage[1] = static_cast<float>(OS::nan_value());
4047 cast_value.storage[2] = static_cast<float>(OS::nan_value());
4048 cast_value.storage[3] = static_cast<float>(OS::nan_value());
4049 if (index < static_cast<uint32_t>(array->length())) {
4050 if (value->IsFloat32x4()) {
4051 cast_value = Handle<Float32x4>::cast(value)->value();
4053 // Clamp undefined to NaN (default). All other types have been
4054 // converted to a number type further up in the call chain.
4055 ASSERT(value->IsUndefined());
4057 array->set(index, cast_value);
4059 return Float32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4064 Handle<Object> FixedTypedArray<Float64x2ArrayTraits>::SetValue(
4065 Handle<FixedTypedArray<Float64x2ArrayTraits> > array,
4066 uint32_t index, Handle<Object> value) {
4067 float64x2_value_t cast_value;
4068 cast_value.storage[0] = OS::nan_value();
4069 cast_value.storage[1] = OS::nan_value();
4070 if (index < static_cast<uint32_t>(array->length())) {
4071 if (value->IsFloat64x2()) {
4072 cast_value = Handle<Float64x2>::cast(value)->value();
4074 // Clamp undefined to NaN (default). All other types have been
4075 // converted to a number type further up in the call chain.
4076 ASSERT(value->IsUndefined());
4078 array->set(index, cast_value);
4080 return Float64x2ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4085 Handle<Object> FixedTypedArray<Int32x4ArrayTraits>::SetValue(
4086 Handle<FixedTypedArray<Int32x4ArrayTraits> > array,
4087 uint32_t index, Handle<Object> value) {
4088 int32x4_value_t cast_value;
4089 cast_value.storage[0] = 0;
4090 cast_value.storage[1] = 0;
4091 cast_value.storage[2] = 0;
4092 cast_value.storage[3] = 0;
4093 if (index < static_cast<uint32_t>(array->length())) {
4094 if (value->IsInt32x4()) {
4095 cast_value = Handle<Int32x4>::cast(value)->value();
4097 // Clamp undefined to zero (default). All other types have been
4098 // converted to a number type further up in the call chain.
4099 ASSERT(value->IsUndefined());
4101 array->set(index, cast_value);
4103 return Int32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4107 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4108 return handle(Smi::FromInt(scalar), isolate);
4112 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4114 return handle(Smi::FromInt(scalar), isolate);
4118 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4119 return handle(Smi::FromInt(scalar), isolate);
4123 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4124 return handle(Smi::FromInt(scalar), isolate);
4128 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4129 return handle(Smi::FromInt(scalar), isolate);
4133 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4134 return isolate->factory()->NewNumberFromUint(scalar);
4138 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4139 return isolate->factory()->NewNumberFromInt(scalar);
4143 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4144 return isolate->factory()->NewNumber(scalar);
4148 Handle<Object> Int32x4ArrayTraits::ToHandle(
4149 Isolate* isolate, int32x4_value_t scalar) {
4150 return isolate->factory()->NewInt32x4(scalar);
4154 Handle<Object> Float32x4ArrayTraits::ToHandle(
4155 Isolate* isolate, float32x4_value_t scalar) {
4156 return isolate->factory()->NewFloat32x4(scalar);
4160 Handle<Object> Float64x2ArrayTraits::ToHandle(
4161 Isolate* isolate, float64x2_value_t scalar) {
4162 return isolate->factory()->NewFloat64x2(scalar);
4166 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4167 return isolate->factory()->NewNumber(scalar);
4171 int Map::visitor_id() {
4172 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4176 void Map::set_visitor_id(int id) {
4177 ASSERT(0 <= id && id < 256);
4178 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4182 int Map::instance_size() {
4183 return NOBARRIER_READ_BYTE_FIELD(
4184 this, kInstanceSizeOffset) << kPointerSizeLog2;
4188 int Map::inobject_properties() {
4189 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4193 int Map::pre_allocated_property_fields() {
4194 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4198 int Map::GetInObjectPropertyOffset(int index) {
4199 // Adjust for the number of properties stored in the object.
4200 index -= inobject_properties();
4202 return instance_size() + (index * kPointerSize);
4206 int HeapObject::SizeFromMap(Map* map) {
4207 int instance_size = map->instance_size();
4208 if (instance_size != kVariableSizeSentinel) return instance_size;
4209 // Only inline the most frequent cases.
4210 int instance_type = static_cast<int>(map->instance_type());
4211 if (instance_type == FIXED_ARRAY_TYPE) {
4212 return FixedArray::BodyDescriptor::SizeOf(map, this);
4214 if (instance_type == ASCII_STRING_TYPE ||
4215 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
4216 return SeqOneByteString::SizeFor(
4217 reinterpret_cast<SeqOneByteString*>(this)->length());
4219 if (instance_type == BYTE_ARRAY_TYPE) {
4220 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4222 if (instance_type == FREE_SPACE_TYPE) {
4223 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4225 if (instance_type == STRING_TYPE ||
4226 instance_type == INTERNALIZED_STRING_TYPE) {
4227 return SeqTwoByteString::SizeFor(
4228 reinterpret_cast<SeqTwoByteString*>(this)->length());
4230 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4231 return FixedDoubleArray::SizeFor(
4232 reinterpret_cast<FixedDoubleArray*>(this)->length());
4234 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4235 return ConstantPoolArray::SizeFor(
4236 reinterpret_cast<ConstantPoolArray*>(this)->count_of_int64_entries(),
4237 reinterpret_cast<ConstantPoolArray*>(this)->count_of_code_ptr_entries(),
4238 reinterpret_cast<ConstantPoolArray*>(this)->count_of_heap_ptr_entries(),
4239 reinterpret_cast<ConstantPoolArray*>(this)->count_of_int32_entries());
4241 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4242 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4243 return reinterpret_cast<FixedTypedArrayBase*>(this)->size();
4245 ASSERT(instance_type == CODE_TYPE);
4246 return reinterpret_cast<Code*>(this)->CodeSize();
4250 void Map::set_instance_size(int value) {
4251 ASSERT_EQ(0, value & (kPointerSize - 1));
4252 value >>= kPointerSizeLog2;
4253 ASSERT(0 <= value && value < 256);
4254 NOBARRIER_WRITE_BYTE_FIELD(
4255 this, kInstanceSizeOffset, static_cast<byte>(value));
4259 void Map::set_inobject_properties(int value) {
4260 ASSERT(0 <= value && value < 256);
4261 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4265 void Map::set_pre_allocated_property_fields(int value) {
4266 ASSERT(0 <= value && value < 256);
4267 WRITE_BYTE_FIELD(this,
4268 kPreAllocatedPropertyFieldsOffset,
4269 static_cast<byte>(value));
4273 InstanceType Map::instance_type() {
4274 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4278 void Map::set_instance_type(InstanceType value) {
4279 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4283 int Map::unused_property_fields() {
4284 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4288 void Map::set_unused_property_fields(int value) {
4289 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4293 byte Map::bit_field() {
4294 return READ_BYTE_FIELD(this, kBitFieldOffset);
4298 void Map::set_bit_field(byte value) {
4299 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4303 byte Map::bit_field2() {
4304 return READ_BYTE_FIELD(this, kBitField2Offset);
4308 void Map::set_bit_field2(byte value) {
4309 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4313 void Map::set_non_instance_prototype(bool value) {
4315 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4317 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4322 bool Map::has_non_instance_prototype() {
4323 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4327 void Map::set_function_with_prototype(bool value) {
4328 set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
4332 bool Map::function_with_prototype() {
4333 return FunctionWithPrototype::decode(bit_field3());
4337 void Map::set_is_access_check_needed(bool access_check_needed) {
4338 if (access_check_needed) {
4339 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4341 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4346 bool Map::is_access_check_needed() {
4347 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4351 void Map::set_is_extensible(bool value) {
4353 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4355 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4359 bool Map::is_extensible() {
4360 return ((1 << kIsExtensible) & bit_field2()) != 0;
4364 void Map::set_attached_to_shared_function_info(bool value) {
4366 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
4368 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
4372 bool Map::attached_to_shared_function_info() {
4373 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
4377 void Map::set_is_shared(bool value) {
4378 set_bit_field3(IsShared::update(bit_field3(), value));
4382 bool Map::is_shared() {
4383 return IsShared::decode(bit_field3()); }
4386 void Map::set_dictionary_map(bool value) {
4387 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4388 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4389 set_bit_field3(new_bit_field3);
4393 bool Map::is_dictionary_map() {
4394 return DictionaryMap::decode(bit_field3());
4398 Code::Flags Code::flags() {
4399 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4403 void Map::set_owns_descriptors(bool is_shared) {
4404 set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
4408 bool Map::owns_descriptors() {
4409 return OwnsDescriptors::decode(bit_field3());
4413 void Map::set_has_instance_call_handler() {
4414 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4418 bool Map::has_instance_call_handler() {
4419 return HasInstanceCallHandler::decode(bit_field3());
4423 void Map::deprecate() {
4424 set_bit_field3(Deprecated::update(bit_field3(), true));
4428 bool Map::is_deprecated() {
4429 return Deprecated::decode(bit_field3());
4433 void Map::set_migration_target(bool value) {
4434 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4438 bool Map::is_migration_target() {
4439 return IsMigrationTarget::decode(bit_field3());
4443 void Map::freeze() {
4444 set_bit_field3(IsFrozen::update(bit_field3(), true));
4448 bool Map::is_frozen() {
4449 return IsFrozen::decode(bit_field3());
4453 void Map::mark_unstable() {
4454 set_bit_field3(IsUnstable::update(bit_field3(), true));
4458 bool Map::is_stable() {
4459 return !IsUnstable::decode(bit_field3());
4463 bool Map::has_code_cache() {
4464 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4468 bool Map::CanBeDeprecated() {
4469 int descriptor = LastAdded();
4470 for (int i = 0; i <= descriptor; i++) {
4471 PropertyDetails details = instance_descriptors()->GetDetails(i);
4472 if (details.representation().IsNone()) return true;
4473 if (details.representation().IsSmi()) return true;
4474 if (details.representation().IsDouble()) return true;
4475 if (details.representation().IsHeapObject()) return true;
4476 if (details.type() == CONSTANT) return true;
4482 void Map::NotifyLeafMapLayoutChange() {
4485 dependent_code()->DeoptimizeDependentCodeGroup(
4487 DependentCode::kPrototypeCheckGroup);
4492 bool Map::CanOmitMapChecks() {
4493 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4497 int DependentCode::number_of_entries(DependencyGroup group) {
4498 if (length() == 0) return 0;
4499 return Smi::cast(get(group))->value();
4503 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4504 set(group, Smi::FromInt(value));
4508 bool DependentCode::is_code_at(int i) {
4509 return get(kCodesStartIndex + i)->IsCode();
4512 Code* DependentCode::code_at(int i) {
4513 return Code::cast(get(kCodesStartIndex + i));
4517 CompilationInfo* DependentCode::compilation_info_at(int i) {
4518 return reinterpret_cast<CompilationInfo*>(
4519 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4523 void DependentCode::set_object_at(int i, Object* object) {
4524 set(kCodesStartIndex + i, object);
4528 Object* DependentCode::object_at(int i) {
4529 return get(kCodesStartIndex + i);
4533 Object** DependentCode::slot_at(int i) {
4534 return RawFieldOfElementAt(kCodesStartIndex + i);
4538 void DependentCode::clear_at(int i) {
4539 set_undefined(kCodesStartIndex + i);
4543 void DependentCode::copy(int from, int to) {
4544 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4548 void DependentCode::ExtendGroup(DependencyGroup group) {
4549 GroupStartIndexes starts(this);
4550 for (int g = kGroupCount - 1; g > group; g--) {
4551 if (starts.at(g) < starts.at(g + 1)) {
4552 copy(starts.at(g), starts.at(g + 1));
4558 void Code::set_flags(Code::Flags flags) {
4559 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4560 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4564 Code::Kind Code::kind() {
4565 return ExtractKindFromFlags(flags());
4569 InlineCacheState Code::ic_state() {
4570 InlineCacheState result = ExtractICStateFromFlags(flags());
4571 // Only allow uninitialized or debugger states for non-IC code
4572 // objects. This is used in the debugger to determine whether or not
4573 // a call to code object has been replaced with a debug break call.
4574 ASSERT(is_inline_cache_stub() ||
4575 result == UNINITIALIZED ||
4576 result == DEBUG_STUB);
4581 ExtraICState Code::extra_ic_state() {
4582 ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4583 return ExtractExtraICStateFromFlags(flags());
4587 Code::StubType Code::type() {
4588 return ExtractTypeFromFlags(flags());
4592 // For initialization.
4593 void Code::set_raw_kind_specific_flags1(int value) {
4594 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4598 void Code::set_raw_kind_specific_flags2(int value) {
4599 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4603 inline bool Code::is_crankshafted() {
4604 return IsCrankshaftedField::decode(
4605 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4609 inline void Code::set_is_crankshafted(bool value) {
4610 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4611 int updated = IsCrankshaftedField::update(previous, value);
4612 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4616 int Code::major_key() {
4617 ASSERT(has_major_key());
4618 return StubMajorKeyField::decode(
4619 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4623 void Code::set_major_key(int major) {
4624 ASSERT(has_major_key());
4625 ASSERT(0 <= major && major < 256);
4626 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4627 int updated = StubMajorKeyField::update(previous, major);
4628 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4632 bool Code::has_major_key() {
4633 return kind() == STUB ||
4634 kind() == HANDLER ||
4635 kind() == BINARY_OP_IC ||
4636 kind() == COMPARE_IC ||
4637 kind() == COMPARE_NIL_IC ||
4638 kind() == LOAD_IC ||
4639 kind() == KEYED_LOAD_IC ||
4640 kind() == STORE_IC ||
4641 kind() == CALL_IC ||
4642 kind() == KEYED_STORE_IC ||
4643 kind() == TO_BOOLEAN_IC;
4647 bool Code::optimizable() {
4648 ASSERT_EQ(FUNCTION, kind());
4649 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4653 void Code::set_optimizable(bool value) {
4654 ASSERT_EQ(FUNCTION, kind());
4655 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4659 bool Code::has_deoptimization_support() {
4660 ASSERT_EQ(FUNCTION, kind());
4661 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4662 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4666 void Code::set_has_deoptimization_support(bool value) {
4667 ASSERT_EQ(FUNCTION, kind());
4668 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4669 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4670 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4674 bool Code::has_debug_break_slots() {
4675 ASSERT_EQ(FUNCTION, kind());
4676 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4677 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4681 void Code::set_has_debug_break_slots(bool value) {
4682 ASSERT_EQ(FUNCTION, kind());
4683 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4684 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4685 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4689 bool Code::is_compiled_optimizable() {
4690 ASSERT_EQ(FUNCTION, kind());
4691 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4692 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4696 void Code::set_compiled_optimizable(bool value) {
4697 ASSERT_EQ(FUNCTION, kind());
4698 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4699 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4700 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4704 int Code::allow_osr_at_loop_nesting_level() {
4705 ASSERT_EQ(FUNCTION, kind());
4706 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
4710 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4711 ASSERT_EQ(FUNCTION, kind());
4712 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
4713 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
4717 int Code::profiler_ticks() {
4718 ASSERT_EQ(FUNCTION, kind());
4719 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4723 void Code::set_profiler_ticks(int ticks) {
4724 ASSERT_EQ(FUNCTION, kind());
4725 ASSERT(ticks < 256);
4726 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4730 unsigned Code::stack_slots() {
4731 ASSERT(is_crankshafted());
4732 return StackSlotsField::decode(
4733 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4737 void Code::set_stack_slots(unsigned slots) {
4738 CHECK(slots <= (1 << kStackSlotsBitCount));
4739 ASSERT(is_crankshafted());
4740 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4741 int updated = StackSlotsField::update(previous, slots);
4742 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4746 unsigned Code::safepoint_table_offset() {
4747 ASSERT(is_crankshafted());
4748 return SafepointTableOffsetField::decode(
4749 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4753 void Code::set_safepoint_table_offset(unsigned offset) {
4754 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4755 ASSERT(is_crankshafted());
4756 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4757 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4758 int updated = SafepointTableOffsetField::update(previous, offset);
4759 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4763 unsigned Code::back_edge_table_offset() {
4764 ASSERT_EQ(FUNCTION, kind());
4765 return BackEdgeTableOffsetField::decode(
4766 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4770 void Code::set_back_edge_table_offset(unsigned offset) {
4771 ASSERT_EQ(FUNCTION, kind());
4772 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4773 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4774 int updated = BackEdgeTableOffsetField::update(previous, offset);
4775 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4779 bool Code::back_edges_patched_for_osr() {
4780 ASSERT_EQ(FUNCTION, kind());
4781 return BackEdgesPatchedForOSRField::decode(
4782 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4786 void Code::set_back_edges_patched_for_osr(bool value) {
4787 ASSERT_EQ(FUNCTION, kind());
4788 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4789 int updated = BackEdgesPatchedForOSRField::update(previous, value);
4790 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4795 byte Code::to_boolean_state() {
4796 return extra_ic_state();
4800 bool Code::has_function_cache() {
4801 ASSERT(kind() == STUB);
4802 return HasFunctionCacheField::decode(
4803 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4807 void Code::set_has_function_cache(bool flag) {
4808 ASSERT(kind() == STUB);
4809 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4810 int updated = HasFunctionCacheField::update(previous, flag);
4811 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4815 bool Code::marked_for_deoptimization() {
4816 ASSERT(kind() == OPTIMIZED_FUNCTION);
4817 return MarkedForDeoptimizationField::decode(
4818 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4822 void Code::set_marked_for_deoptimization(bool flag) {
4823 ASSERT(kind() == OPTIMIZED_FUNCTION);
4824 ASSERT(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4825 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4826 int updated = MarkedForDeoptimizationField::update(previous, flag);
4827 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4831 bool Code::is_weak_stub() {
4832 return CanBeWeakStub() && WeakStubField::decode(
4833 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4837 void Code::mark_as_weak_stub() {
4838 ASSERT(CanBeWeakStub());
4839 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4840 int updated = WeakStubField::update(previous, true);
4841 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4845 bool Code::is_invalidated_weak_stub() {
4846 return is_weak_stub() && InvalidatedWeakStubField::decode(
4847 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4851 void Code::mark_as_invalidated_weak_stub() {
4852 ASSERT(is_inline_cache_stub());
4853 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4854 int updated = InvalidatedWeakStubField::update(previous, true);
4855 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4859 bool Code::is_inline_cache_stub() {
4860 Kind kind = this->kind();
4862 #define CASE(name) case name: return true;
4865 default: return false;
4870 bool Code::is_keyed_stub() {
4871 return is_keyed_load_stub() || is_keyed_store_stub();
4875 bool Code::is_debug_stub() {
4876 return ic_state() == DEBUG_STUB;
4880 ConstantPoolArray* Code::constant_pool() {
4881 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
4885 void Code::set_constant_pool(Object* value) {
4886 ASSERT(value->IsConstantPoolArray());
4887 WRITE_FIELD(this, kConstantPoolOffset, value);
4888 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4892 Code::Flags Code::ComputeFlags(Kind kind,
4893 InlineCacheState ic_state,
4894 ExtraICState extra_ic_state,
4896 InlineCacheHolderFlag holder) {
4897 // Compute the bit mask.
4898 unsigned int bits = KindField::encode(kind)
4899 | ICStateField::encode(ic_state)
4900 | TypeField::encode(type)
4901 | ExtraICStateField::encode(extra_ic_state)
4902 | CacheHolderField::encode(holder);
4903 return static_cast<Flags>(bits);
4907 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4908 ExtraICState extra_ic_state,
4909 InlineCacheHolderFlag holder,
4911 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
4915 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
4917 InlineCacheHolderFlag holder) {
4918 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
4922 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4923 return KindField::decode(flags);
4927 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4928 return ICStateField::decode(flags);
4932 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4933 return ExtraICStateField::decode(flags);
4937 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4938 return TypeField::decode(flags);
4942 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4943 return CacheHolderField::decode(flags);
4947 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4948 int bits = flags & ~TypeField::kMask;
4949 return static_cast<Flags>(bits);
4953 Code* Code::GetCodeFromTargetAddress(Address address) {
4954 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4955 // GetCodeFromTargetAddress might be called when marking objects during mark
4956 // sweep. reinterpret_cast is therefore used instead of the more appropriate
4957 // Code::cast. Code::cast does not work when the object's map is
4959 Code* result = reinterpret_cast<Code*>(code);
4964 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4966 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4970 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
4971 if (!FLAG_collect_maps) return false;
4972 if (object->IsMap()) {
4973 return Map::cast(object)->CanTransition() &&
4974 FLAG_weak_embedded_maps_in_optimized_code;
4976 if (object->IsJSObject() ||
4977 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
4978 return FLAG_weak_embedded_objects_in_optimized_code;
4984 class Code::FindAndReplacePattern {
4986 FindAndReplacePattern() : count_(0) { }
4987 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
4988 ASSERT(count_ < kMaxCount);
4989 find_[count_] = map_to_find;
4990 replace_[count_] = obj_to_replace;
4994 static const int kMaxCount = 4;
4996 Handle<Map> find_[kMaxCount];
4997 Handle<Object> replace_[kMaxCount];
5002 bool Code::IsWeakObjectInIC(Object* object) {
5003 return object->IsMap() && Map::cast(object)->CanTransition() &&
5004 FLAG_collect_maps &&
5005 FLAG_weak_embedded_maps_in_ic;
5009 Object* Map::prototype() {
5010 return READ_FIELD(this, kPrototypeOffset);
5014 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5015 ASSERT(value->IsNull() || value->IsJSReceiver());
5016 WRITE_FIELD(this, kPrototypeOffset, value);
5017 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5021 // If the descriptor is using the empty transition array, install a new empty
5022 // transition array that will have place for an element transition.
5023 static void EnsureHasTransitionArray(Handle<Map> map) {
5024 Handle<TransitionArray> transitions;
5025 if (!map->HasTransitionArray()) {
5026 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5027 transitions->set_back_pointer_storage(map->GetBackPointer());
5028 } else if (!map->transitions()->IsFullTransitionArray()) {
5029 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5033 map->set_transitions(*transitions);
5037 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5038 int len = descriptors->number_of_descriptors();
5039 set_instance_descriptors(descriptors);
5040 SetNumberOfOwnDescriptors(len);
5044 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5047 void Map::set_bit_field3(uint32_t bits) {
5048 // Ensure the upper 2 bits have the same value by sign extending it. This is
5049 // necessary to be able to use the 31st bit.
5050 int value = bits << 1;
5051 WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
5055 uint32_t Map::bit_field3() {
5056 Object* value = READ_FIELD(this, kBitField3Offset);
5057 return Smi::cast(value)->value();
5061 void Map::AppendDescriptor(Descriptor* desc) {
5062 DescriptorArray* descriptors = instance_descriptors();
5063 int number_of_own_descriptors = NumberOfOwnDescriptors();
5064 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
5065 descriptors->Append(desc);
5066 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5070 Object* Map::GetBackPointer() {
5071 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5072 if (object->IsDescriptorArray()) {
5073 return TransitionArray::cast(object)->back_pointer_storage();
5075 ASSERT(object->IsMap() || object->IsUndefined());
5081 bool Map::HasElementsTransition() {
5082 return HasTransitionArray() && transitions()->HasElementsTransition();
5086 bool Map::HasTransitionArray() {
5087 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5088 return object->IsTransitionArray();
5092 Map* Map::elements_transition_map() {
5093 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5094 return transitions()->GetTarget(index);
5098 bool Map::CanHaveMoreTransitions() {
5099 if (!HasTransitionArray()) return true;
5100 return FixedArray::SizeFor(transitions()->length() +
5101 TransitionArray::kTransitionSize)
5102 <= Page::kMaxRegularHeapObjectSize;
5106 Map* Map::GetTransition(int transition_index) {
5107 return transitions()->GetTarget(transition_index);
5111 int Map::SearchTransition(Name* name) {
5112 if (HasTransitionArray()) return transitions()->Search(name);
5113 return TransitionArray::kNotFound;
5117 FixedArray* Map::GetPrototypeTransitions() {
5118 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5119 if (!transitions()->HasPrototypeTransitions()) {
5120 return GetHeap()->empty_fixed_array();
5122 return transitions()->GetPrototypeTransitions();
5126 void Map::SetPrototypeTransitions(
5127 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5128 EnsureHasTransitionArray(map);
5129 int old_number_of_transitions = map->NumberOfProtoTransitions();
5131 if (map->HasPrototypeTransitions()) {
5132 ASSERT(map->GetPrototypeTransitions() != *proto_transitions);
5133 map->ZapPrototypeTransitions();
5136 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5137 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5141 bool Map::HasPrototypeTransitions() {
5142 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5146 TransitionArray* Map::transitions() {
5147 ASSERT(HasTransitionArray());
5148 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5149 return TransitionArray::cast(object);
5153 void Map::set_transitions(TransitionArray* transition_array,
5154 WriteBarrierMode mode) {
5155 // Transition arrays are not shared. When one is replaced, it should not
5156 // keep referenced objects alive, so we zap it.
5157 // When there is another reference to the array somewhere (e.g. a handle),
5158 // not zapping turns from a waste of memory into a source of crashes.
5159 if (HasTransitionArray()) {
5161 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5162 Map* target = transitions()->GetTarget(i);
5163 if (target->instance_descriptors() == instance_descriptors()) {
5164 Name* key = transitions()->GetKey(i);
5165 int new_target_index = transition_array->Search(key);
5166 ASSERT(new_target_index != TransitionArray::kNotFound);
5167 ASSERT(transition_array->GetTarget(new_target_index) == target);
5171 ASSERT(transitions() != transition_array);
5175 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5176 CONDITIONAL_WRITE_BARRIER(
5177 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5181 void Map::init_back_pointer(Object* undefined) {
5182 ASSERT(undefined->IsUndefined());
5183 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5187 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5188 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5189 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5190 (value->IsMap() && GetBackPointer()->IsUndefined()));
5191 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5192 if (object->IsTransitionArray()) {
5193 TransitionArray::cast(object)->set_back_pointer_storage(value);
5195 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5196 CONDITIONAL_WRITE_BARRIER(
5197 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5202 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5203 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5204 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5206 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5207 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5208 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5210 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5211 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5212 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5213 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
5215 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5217 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5218 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5219 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5220 kExpectedReceiverTypeOffset)
5222 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5223 kSerializedDataOffset)
5225 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5228 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5229 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5230 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5232 ACCESSORS(Box, value, Object, kValueOffset)
5234 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5235 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5236 ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
5238 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5239 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5240 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5242 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5243 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5244 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5245 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5246 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5247 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5249 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5250 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5252 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5253 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5254 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5256 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5257 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5258 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5259 kPrototypeTemplateOffset)
5260 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5261 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5262 kNamedPropertyHandlerOffset)
5263 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5264 kIndexedPropertyHandlerOffset)
5265 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5266 kInstanceTemplateOffset)
5267 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5268 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5269 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5270 kInstanceCallHandlerOffset)
5271 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5272 kAccessCheckInfoOffset)
5273 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5275 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5276 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5277 kInternalFieldCountOffset)
5279 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5280 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5282 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5284 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5285 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5286 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5287 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5288 kPretenureCreateCountOffset)
5289 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5290 kDependentCodeOffset)
5291 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5292 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5294 ACCESSORS(Script, source, Object, kSourceOffset)
5295 ACCESSORS(Script, name, Object, kNameOffset)
5296 ACCESSORS(Script, id, Smi, kIdOffset)
5297 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5298 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5299 ACCESSORS(Script, context_data, Object, kContextOffset)
5300 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5301 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5302 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5303 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5304 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5305 kEvalFrominstructionsOffsetOffset)
5306 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5307 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5309 Script::CompilationType Script::compilation_type() {
5310 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5311 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5313 void Script::set_compilation_type(CompilationType type) {
5314 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5315 type == COMPILATION_TYPE_EVAL));
5317 Script::CompilationState Script::compilation_state() {
5318 return BooleanBit::get(flags(), kCompilationStateBit) ?
5319 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5321 void Script::set_compilation_state(CompilationState state) {
5322 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5323 state == COMPILATION_STATE_COMPILED));
5327 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5328 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5329 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5330 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5332 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5333 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5334 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5335 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5337 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5338 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5339 kOptimizedCodeMapOffset)
5340 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5341 ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
5342 kFeedbackVectorOffset)
5343 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
5344 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5345 kInstanceClassNameOffset)
5346 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5347 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5348 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5349 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5352 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5353 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5354 kHiddenPrototypeBit)
5355 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5356 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5357 kNeedsAccessCheckBit)
5358 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5359 kReadOnlyPrototypeBit)
5360 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5361 kRemovePrototypeBit)
5362 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5364 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5366 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5369 BOOL_ACCESSORS(SharedFunctionInfo,
5371 allows_lazy_compilation,
5372 kAllowLazyCompilation)
5373 BOOL_ACCESSORS(SharedFunctionInfo,
5375 allows_lazy_compilation_without_context,
5376 kAllowLazyCompilationWithoutContext)
5377 BOOL_ACCESSORS(SharedFunctionInfo,
5381 BOOL_ACCESSORS(SharedFunctionInfo,
5383 has_duplicate_parameters,
5384 kHasDuplicateParameters)
5387 #if V8_HOST_ARCH_32_BIT
5388 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5389 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5390 kFormalParameterCountOffset)
5391 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5392 kExpectedNofPropertiesOffset)
5393 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5394 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5395 kStartPositionAndTypeOffset)
5396 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5397 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5398 kFunctionTokenPositionOffset)
5399 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5400 kCompilerHintsOffset)
5401 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5402 kOptCountAndBailoutReasonOffset)
5403 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5404 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5405 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5409 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5410 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5411 int holder::name() { \
5412 int value = READ_INT_FIELD(this, offset); \
5413 ASSERT(kHeapObjectTag == 1); \
5414 ASSERT((value & kHeapObjectTag) == 0); \
5415 return value >> 1; \
5417 void holder::set_##name(int value) { \
5418 ASSERT(kHeapObjectTag == 1); \
5419 ASSERT((value & 0xC0000000) == 0xC0000000 || \
5420 (value & 0xC0000000) == 0x000000000); \
5421 WRITE_INT_FIELD(this, \
5423 (value << 1) & ~kHeapObjectTag); \
5426 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5427 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5428 INT_ACCESSORS(holder, name, offset)
5431 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5432 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5433 formal_parameter_count,
5434 kFormalParameterCountOffset)
5436 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5437 expected_nof_properties,
5438 kExpectedNofPropertiesOffset)
5439 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5441 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5442 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5443 start_position_and_type,
5444 kStartPositionAndTypeOffset)
5446 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5447 function_token_position,
5448 kFunctionTokenPositionOffset)
5449 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5451 kCompilerHintsOffset)
5453 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5454 opt_count_and_bailout_reason,
5455 kOptCountAndBailoutReasonOffset)
5456 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5458 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5460 kAstNodeCountOffset)
5461 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5463 kProfilerTicksOffset)
5468 int SharedFunctionInfo::construction_count() {
5469 return READ_BYTE_FIELD(this, kConstructionCountOffset);
5473 void SharedFunctionInfo::set_construction_count(int value) {
5474 ASSERT(0 <= value && value < 256);
5475 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
5479 BOOL_ACCESSORS(SharedFunctionInfo,
5481 live_objects_may_exist,
5482 kLiveObjectsMayExist)
5485 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
5486 return initial_map() != GetHeap()->undefined_value();
5490 BOOL_GETTER(SharedFunctionInfo,
5492 optimization_disabled,
5493 kOptimizationDisabled)
5496 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5497 set_compiler_hints(BooleanBit::set(compiler_hints(),
5498 kOptimizationDisabled,
5500 // If disabling optimizations we reflect that in the code object so
5501 // it will not be counted as optimizable code.
5502 if ((code()->kind() == Code::FUNCTION) && disable) {
5503 code()->set_optimizable(false);
5508 StrictMode SharedFunctionInfo::strict_mode() {
5509 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5514 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5515 // We only allow mode transitions from sloppy to strict.
5516 ASSERT(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5517 int hints = compiler_hints();
5518 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5519 set_compiler_hints(hints);
5523 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5524 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5526 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5527 name_should_print_as_anonymous,
5528 kNameShouldPrintAsAnonymous)
5529 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5530 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5531 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5532 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
5534 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
5535 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5536 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5537 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5539 void SharedFunctionInfo::BeforeVisitingPointers() {
5540 if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
5544 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5545 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5547 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5549 bool Script::HasValidSource() {
5550 Object* src = this->source();
5551 if (!src->IsString()) return true;
5552 String* src_str = String::cast(src);
5553 if (!StringShape(src_str).IsExternal()) return true;
5554 if (src_str->IsOneByteRepresentation()) {
5555 return ExternalAsciiString::cast(src)->resource() != NULL;
5556 } else if (src_str->IsTwoByteRepresentation()) {
5557 return ExternalTwoByteString::cast(src)->resource() != NULL;
5563 void SharedFunctionInfo::DontAdaptArguments() {
5564 ASSERT(code()->kind() == Code::BUILTIN);
5565 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5569 int SharedFunctionInfo::start_position() {
5570 return start_position_and_type() >> kStartPositionShift;
5574 void SharedFunctionInfo::set_start_position(int start_position) {
5575 set_start_position_and_type((start_position << kStartPositionShift)
5576 | (start_position_and_type() & ~kStartPositionMask));
5580 Code* SharedFunctionInfo::code() {
5581 return Code::cast(READ_FIELD(this, kCodeOffset));
5585 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5586 ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
5587 WRITE_FIELD(this, kCodeOffset, value);
5588 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5592 void SharedFunctionInfo::ReplaceCode(Code* value) {
5593 // If the GC metadata field is already used then the function was
5594 // enqueued as a code flushing candidate and we remove it now.
5595 if (code()->gc_metadata() != NULL) {
5596 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5597 flusher->EvictCandidate(this);
5600 ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5606 ScopeInfo* SharedFunctionInfo::scope_info() {
5607 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5611 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5612 WriteBarrierMode mode) {
5613 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5614 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5617 reinterpret_cast<Object*>(value),
5622 bool SharedFunctionInfo::is_compiled() {
5624 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5628 bool SharedFunctionInfo::IsApiFunction() {
5629 return function_data()->IsFunctionTemplateInfo();
5633 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5634 ASSERT(IsApiFunction());
5635 return FunctionTemplateInfo::cast(function_data());
5639 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5640 return function_data()->IsSmi();
5644 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5645 ASSERT(HasBuiltinFunctionId());
5646 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5650 int SharedFunctionInfo::ic_age() {
5651 return ICAgeBits::decode(counters());
5655 void SharedFunctionInfo::set_ic_age(int ic_age) {
5656 set_counters(ICAgeBits::update(counters(), ic_age));
5660 int SharedFunctionInfo::deopt_count() {
5661 return DeoptCountBits::decode(counters());
5665 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5666 set_counters(DeoptCountBits::update(counters(), deopt_count));
5670 void SharedFunctionInfo::increment_deopt_count() {
5671 int value = counters();
5672 int deopt_count = DeoptCountBits::decode(value);
5673 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5674 set_counters(DeoptCountBits::update(value, deopt_count));
5678 int SharedFunctionInfo::opt_reenable_tries() {
5679 return OptReenableTriesBits::decode(counters());
5683 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5684 set_counters(OptReenableTriesBits::update(counters(), tries));
5688 int SharedFunctionInfo::opt_count() {
5689 return OptCountBits::decode(opt_count_and_bailout_reason());
5693 void SharedFunctionInfo::set_opt_count(int opt_count) {
5694 set_opt_count_and_bailout_reason(
5695 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5699 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5700 BailoutReason reason = static_cast<BailoutReason>(
5701 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5706 bool SharedFunctionInfo::has_deoptimization_support() {
5707 Code* code = this->code();
5708 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5712 void SharedFunctionInfo::TryReenableOptimization() {
5713 int tries = opt_reenable_tries();
5714 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5715 // We reenable optimization whenever the number of tries is a large
5716 // enough power of 2.
5717 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5718 set_optimization_disabled(false);
5721 code()->set_optimizable(true);
5726 bool JSFunction::IsBuiltin() {
5727 return context()->global_object()->IsJSBuiltinsObject();
5731 bool JSFunction::NeedsArgumentsAdaption() {
5732 return shared()->formal_parameter_count() !=
5733 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5737 bool JSFunction::IsOptimized() {
5738 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5742 bool JSFunction::IsOptimizable() {
5743 return code()->kind() == Code::FUNCTION && code()->optimizable();
5747 bool JSFunction::IsMarkedForOptimization() {
5748 return code() == GetIsolate()->builtins()->builtin(
5749 Builtins::kCompileOptimized);
5753 bool JSFunction::IsMarkedForConcurrentOptimization() {
5754 return code() == GetIsolate()->builtins()->builtin(
5755 Builtins::kCompileOptimizedConcurrent);
5759 bool JSFunction::IsInOptimizationQueue() {
5760 return code() == GetIsolate()->builtins()->builtin(
5761 Builtins::kInOptimizationQueue);
5765 Code* JSFunction::code() {
5767 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5771 void JSFunction::set_code(Code* value) {
5772 ASSERT(!GetHeap()->InNewSpace(value));
5773 Address entry = value->entry();
5774 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5775 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5777 HeapObject::RawField(this, kCodeEntryOffset),
5782 void JSFunction::set_code_no_write_barrier(Code* value) {
5783 ASSERT(!GetHeap()->InNewSpace(value));
5784 Address entry = value->entry();
5785 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5789 void JSFunction::ReplaceCode(Code* code) {
5790 bool was_optimized = IsOptimized();
5791 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5793 if (was_optimized && is_optimized) {
5794 shared()->EvictFromOptimizedCodeMap(this->code(),
5795 "Replacing with another optimized code");
5800 // Add/remove the function from the list of optimized functions for this
5801 // context based on the state change.
5802 if (!was_optimized && is_optimized) {
5803 context()->native_context()->AddOptimizedFunction(this);
5805 if (was_optimized && !is_optimized) {
5806 // TODO(titzer): linear in the number of optimized functions; fix!
5807 context()->native_context()->RemoveOptimizedFunction(this);
5812 Context* JSFunction::context() {
5813 return Context::cast(READ_FIELD(this, kContextOffset));
5817 void JSFunction::set_context(Object* value) {
5818 ASSERT(value->IsUndefined() || value->IsContext());
5819 WRITE_FIELD(this, kContextOffset, value);
5820 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5823 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5824 kPrototypeOrInitialMapOffset)
5827 Map* JSFunction::initial_map() {
5828 return Map::cast(prototype_or_initial_map());
5832 void JSFunction::set_initial_map(Map* value) {
5833 set_prototype_or_initial_map(value);
5837 bool JSFunction::has_initial_map() {
5838 return prototype_or_initial_map()->IsMap();
5842 bool JSFunction::has_instance_prototype() {
5843 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5847 bool JSFunction::has_prototype() {
5848 return map()->has_non_instance_prototype() || has_instance_prototype();
5852 Object* JSFunction::instance_prototype() {
5853 ASSERT(has_instance_prototype());
5854 if (has_initial_map()) return initial_map()->prototype();
5855 // When there is no initial map and the prototype is a JSObject, the
5856 // initial map field is used for the prototype field.
5857 return prototype_or_initial_map();
5861 Object* JSFunction::prototype() {
5862 ASSERT(has_prototype());
5863 // If the function's prototype property has been set to a non-JSObject
5864 // value, that value is stored in the constructor field of the map.
5865 if (map()->has_non_instance_prototype()) return map()->constructor();
5866 return instance_prototype();
5870 bool JSFunction::should_have_prototype() {
5871 return map()->function_with_prototype();
5875 bool JSFunction::is_compiled() {
5877 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5881 FixedArray* JSFunction::literals() {
5882 ASSERT(!shared()->bound());
5883 return literals_or_bindings();
5887 void JSFunction::set_literals(FixedArray* literals) {
5888 ASSERT(!shared()->bound());
5889 set_literals_or_bindings(literals);
5893 FixedArray* JSFunction::function_bindings() {
5894 ASSERT(shared()->bound());
5895 return literals_or_bindings();
5899 void JSFunction::set_function_bindings(FixedArray* bindings) {
5900 ASSERT(shared()->bound());
5901 // Bound function literal may be initialized to the empty fixed array
5902 // before the bindings are set.
5903 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
5904 bindings->map() == GetHeap()->fixed_cow_array_map());
5905 set_literals_or_bindings(bindings);
5909 int JSFunction::NumberOfLiterals() {
5910 ASSERT(!shared()->bound());
5911 return literals()->length();
5915 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5916 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5917 return READ_FIELD(this, OffsetOfFunctionWithId(id));
5921 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5923 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5924 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5925 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5929 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
5930 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5931 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
5935 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
5937 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5938 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
5939 ASSERT(!GetHeap()->InNewSpace(value));
5943 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5944 ACCESSORS(JSProxy, hash, Object, kHashOffset)
5945 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5946 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5949 void JSProxy::InitializeBody(int object_size, Object* value) {
5950 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5951 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5952 WRITE_FIELD(this, offset, value);
5957 ACCESSORS(JSSet, table, Object, kTableOffset)
5958 ACCESSORS(JSMap, table, Object, kTableOffset)
5961 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
5962 template<class Derived, class TableType> \
5963 type* OrderedHashTableIterator<Derived, TableType>::name() { \
5964 return type::cast(READ_FIELD(this, offset)); \
5966 template<class Derived, class TableType> \
5967 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
5968 type* value, WriteBarrierMode mode) { \
5969 WRITE_FIELD(this, offset, value); \
5970 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
5973 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
5974 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Smi, kIndexOffset)
5975 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(count, Smi, kCountOffset)
5976 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Smi, kKindOffset)
5977 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(next_iterator, Object,
5978 kNextIteratorOffset)
5979 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(previous_iterator, Object,
5980 kPreviousIteratorOffset)
5982 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
5985 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5986 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5989 Address Foreign::foreign_address() {
5990 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5994 void Foreign::set_foreign_address(Address value) {
5995 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5999 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6000 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6001 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6002 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6003 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6004 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6006 bool JSGeneratorObject::is_suspended() {
6007 ASSERT_LT(kGeneratorExecuting, kGeneratorClosed);
6008 ASSERT_EQ(kGeneratorClosed, 0);
6009 return continuation() > 0;
6012 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
6013 ASSERT(obj->IsJSGeneratorObject());
6014 ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
6015 return reinterpret_cast<JSGeneratorObject*>(obj);
6019 ACCESSORS(JSModule, context, Object, kContextOffset)
6020 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6023 JSModule* JSModule::cast(Object* obj) {
6024 ASSERT(obj->IsJSModule());
6025 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
6026 return reinterpret_cast<JSModule*>(obj);
6030 ACCESSORS(JSValue, value, Object, kValueOffset)
6033 JSValue* JSValue::cast(Object* obj) {
6034 ASSERT(obj->IsJSValue());
6035 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
6036 return reinterpret_cast<JSValue*>(obj);
6040 ACCESSORS(JSDate, value, Object, kValueOffset)
6041 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6042 ACCESSORS(JSDate, year, Object, kYearOffset)
6043 ACCESSORS(JSDate, month, Object, kMonthOffset)
6044 ACCESSORS(JSDate, day, Object, kDayOffset)
6045 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6046 ACCESSORS(JSDate, hour, Object, kHourOffset)
6047 ACCESSORS(JSDate, min, Object, kMinOffset)
6048 ACCESSORS(JSDate, sec, Object, kSecOffset)
6051 JSDate* JSDate::cast(Object* obj) {
6052 ASSERT(obj->IsJSDate());
6053 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
6054 return reinterpret_cast<JSDate*>(obj);
6058 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6059 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6060 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6061 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6062 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6063 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6066 JSMessageObject* JSMessageObject::cast(Object* obj) {
6067 ASSERT(obj->IsJSMessageObject());
6068 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
6069 return reinterpret_cast<JSMessageObject*>(obj);
6073 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6074 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6075 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6076 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6077 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6078 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6079 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6082 void Code::WipeOutHeader() {
6083 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6084 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6085 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6086 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6087 // Do not wipe out e.g. a minor key.
6088 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6089 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6094 Object* Code::type_feedback_info() {
6095 ASSERT(kind() == FUNCTION);
6096 return raw_type_feedback_info();
6100 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6101 ASSERT(kind() == FUNCTION);
6102 set_raw_type_feedback_info(value, mode);
6103 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6108 int Code::stub_info() {
6109 ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
6110 kind() == BINARY_OP_IC || kind() == LOAD_IC || kind() == CALL_IC);
6111 return Smi::cast(raw_type_feedback_info())->value();
6115 void Code::set_stub_info(int value) {
6116 ASSERT(kind() == COMPARE_IC ||
6117 kind() == COMPARE_NIL_IC ||
6118 kind() == BINARY_OP_IC ||
6120 kind() == LOAD_IC ||
6121 kind() == CALL_IC ||
6122 kind() == KEYED_LOAD_IC ||
6123 kind() == STORE_IC ||
6124 kind() == KEYED_STORE_IC);
6125 set_raw_type_feedback_info(Smi::FromInt(value));
6129 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6130 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6133 byte* Code::instruction_start() {
6134 return FIELD_ADDR(this, kHeaderSize);
6138 byte* Code::instruction_end() {
6139 return instruction_start() + instruction_size();
6143 int Code::body_size() {
6144 return RoundUp(instruction_size(), kObjectAlignment);
6148 ByteArray* Code::unchecked_relocation_info() {
6149 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6153 byte* Code::relocation_start() {
6154 return unchecked_relocation_info()->GetDataStartAddress();
6158 int Code::relocation_size() {
6159 return unchecked_relocation_info()->length();
6163 byte* Code::entry() {
6164 return instruction_start();
6168 bool Code::contains(byte* inner_pointer) {
6169 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6173 ACCESSORS(JSArray, length, Object, kLengthOffset)
6176 void* JSArrayBuffer::backing_store() {
6177 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6178 return reinterpret_cast<void*>(ptr);
6182 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6183 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6184 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6188 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6189 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6192 bool JSArrayBuffer::is_external() {
6193 return BooleanBit::get(flag(), kIsExternalBit);
6197 void JSArrayBuffer::set_is_external(bool value) {
6198 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6202 bool JSArrayBuffer::should_be_freed() {
6203 return BooleanBit::get(flag(), kShouldBeFreed);
6207 void JSArrayBuffer::set_should_be_freed(bool value) {
6208 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6212 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6213 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6216 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6217 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6218 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6219 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6220 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6222 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6225 JSRegExp::Type JSRegExp::TypeTag() {
6226 Object* data = this->data();
6227 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6228 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6229 return static_cast<JSRegExp::Type>(smi->value());
6233 int JSRegExp::CaptureCount() {
6234 switch (TypeTag()) {
6238 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6246 JSRegExp::Flags JSRegExp::GetFlags() {
6247 ASSERT(this->data()->IsFixedArray());
6248 Object* data = this->data();
6249 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6250 return Flags(smi->value());
6254 String* JSRegExp::Pattern() {
6255 ASSERT(this->data()->IsFixedArray());
6256 Object* data = this->data();
6257 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6262 Object* JSRegExp::DataAt(int index) {
6263 ASSERT(TypeTag() != NOT_COMPILED);
6264 return FixedArray::cast(data())->get(index);
6268 void JSRegExp::SetDataAt(int index, Object* value) {
6269 ASSERT(TypeTag() != NOT_COMPILED);
6270 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
6271 FixedArray::cast(data())->set(index, value);
6275 ElementsKind JSObject::GetElementsKind() {
6276 ElementsKind kind = map()->elements_kind();
6278 FixedArrayBase* fixed_array =
6279 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6281 // If a GC was caused while constructing this object, the elements
6282 // pointer may point to a one pointer filler map.
6283 if (ElementsAreSafeToExamine()) {
6284 Map* map = fixed_array->map();
6285 ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
6286 (map == GetHeap()->fixed_array_map() ||
6287 map == GetHeap()->fixed_cow_array_map())) ||
6288 (IsFastDoubleElementsKind(kind) &&
6289 (fixed_array->IsFixedDoubleArray() ||
6290 fixed_array == GetHeap()->empty_fixed_array())) ||
6291 (kind == DICTIONARY_ELEMENTS &&
6292 fixed_array->IsFixedArray() &&
6293 fixed_array->IsDictionary()) ||
6294 (kind > DICTIONARY_ELEMENTS));
6295 ASSERT((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6296 (elements()->IsFixedArray() && elements()->length() >= 2));
6303 ElementsAccessor* JSObject::GetElementsAccessor() {
6304 return ElementsAccessor::ForKind(GetElementsKind());
6308 bool JSObject::HasFastObjectElements() {
6309 return IsFastObjectElementsKind(GetElementsKind());
6313 bool JSObject::HasFastSmiElements() {
6314 return IsFastSmiElementsKind(GetElementsKind());
6318 bool JSObject::HasFastSmiOrObjectElements() {
6319 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6323 bool JSObject::HasFastDoubleElements() {
6324 return IsFastDoubleElementsKind(GetElementsKind());
6328 bool JSObject::HasFastHoleyElements() {
6329 return IsFastHoleyElementsKind(GetElementsKind());
6333 bool JSObject::HasFastElements() {
6334 return IsFastElementsKind(GetElementsKind());
6338 bool JSObject::HasDictionaryElements() {
6339 return GetElementsKind() == DICTIONARY_ELEMENTS;
6343 bool JSObject::HasSloppyArgumentsElements() {
6344 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6348 bool JSObject::HasExternalArrayElements() {
6349 HeapObject* array = elements();
6350 ASSERT(array != NULL);
6351 return array->IsExternalArray();
6355 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6356 bool JSObject::HasExternal##Type##Elements() { \
6357 HeapObject* array = elements(); \
6358 ASSERT(array != NULL); \
6359 if (!array->IsHeapObject()) \
6361 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6364 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6366 #undef EXTERNAL_ELEMENTS_CHECK
6369 bool JSObject::HasFixedTypedArrayElements() {
6370 HeapObject* array = elements();
6371 ASSERT(array != NULL);
6372 return array->IsFixedTypedArrayBase();
6376 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6377 bool JSObject::HasFixed##Type##Elements() { \
6378 HeapObject* array = elements(); \
6379 ASSERT(array != NULL); \
6380 if (!array->IsHeapObject()) \
6382 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6385 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6387 #undef FIXED_TYPED_ELEMENTS_CHECK
6390 bool JSObject::HasNamedInterceptor() {
6391 return map()->has_named_interceptor();
6395 bool JSObject::HasIndexedInterceptor() {
6396 return map()->has_indexed_interceptor();
6400 NameDictionary* JSObject::property_dictionary() {
6401 ASSERT(!HasFastProperties());
6402 return NameDictionary::cast(properties());
6406 SeededNumberDictionary* JSObject::element_dictionary() {
6407 ASSERT(HasDictionaryElements());
6408 return SeededNumberDictionary::cast(elements());
6412 Handle<JSSetIterator> JSSetIterator::Create(
6413 Handle<OrderedHashSet> table,
6415 return CreateInternal(table->GetIsolate()->set_iterator_map(), table, kind);
6419 Handle<JSMapIterator> JSMapIterator::Create(
6420 Handle<OrderedHashMap> table,
6422 return CreateInternal(table->GetIsolate()->map_iterator_map(), table, kind);
6426 bool Name::IsHashFieldComputed(uint32_t field) {
6427 return (field & kHashNotComputedMask) == 0;
6431 bool Name::HasHashCode() {
6432 return IsHashFieldComputed(hash_field());
6436 uint32_t Name::Hash() {
6437 // Fast case: has hash code already been computed?
6438 uint32_t field = hash_field();
6439 if (IsHashFieldComputed(field)) return field >> kHashShift;
6440 // Slow case: compute hash code and set it. Has to be a string.
6441 return String::cast(this)->ComputeAndSetHash();
6445 StringHasher::StringHasher(int length, uint32_t seed)
6447 raw_running_hash_(seed),
6449 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6450 is_first_char_(true) {
6451 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
6455 bool StringHasher::has_trivial_hash() {
6456 return length_ > String::kMaxHashCalcLength;
6460 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6462 running_hash += (running_hash << 10);
6463 running_hash ^= (running_hash >> 6);
6464 return running_hash;
6468 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6469 running_hash += (running_hash << 3);
6470 running_hash ^= (running_hash >> 11);
6471 running_hash += (running_hash << 15);
6472 if ((running_hash & String::kHashBitMask) == 0) {
6475 return running_hash;
6479 void StringHasher::AddCharacter(uint16_t c) {
6480 // Use the Jenkins one-at-a-time hash function to update the hash
6481 // for the given character.
6482 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6486 bool StringHasher::UpdateIndex(uint16_t c) {
6487 ASSERT(is_array_index_);
6488 if (c < '0' || c > '9') {
6489 is_array_index_ = false;
6493 if (is_first_char_) {
6494 is_first_char_ = false;
6495 if (c == '0' && length_ > 1) {
6496 is_array_index_ = false;
6500 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6501 is_array_index_ = false;
6504 array_index_ = array_index_ * 10 + d;
6509 template<typename Char>
6510 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6511 ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
6513 if (is_array_index_) {
6514 for (; i < length; i++) {
6515 AddCharacter(chars[i]);
6516 if (!UpdateIndex(chars[i])) {
6522 for (; i < length; i++) {
6523 ASSERT(!is_array_index_);
6524 AddCharacter(chars[i]);
6529 template <typename schar>
6530 uint32_t StringHasher::HashSequentialString(const schar* chars,
6533 StringHasher hasher(length, seed);
6534 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6535 return hasher.GetHashField();
6539 bool Name::AsArrayIndex(uint32_t* index) {
6540 return IsString() && String::cast(this)->AsArrayIndex(index);
6544 bool String::AsArrayIndex(uint32_t* index) {
6545 uint32_t field = hash_field();
6546 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6549 return SlowAsArrayIndex(index);
6553 Object* JSReceiver::GetPrototype() {
6554 return map()->prototype();
6558 Object* JSReceiver::GetConstructor() {
6559 return map()->constructor();
6563 bool JSReceiver::HasProperty(Handle<JSReceiver> object,
6564 Handle<Name> name) {
6565 if (object->IsJSProxy()) {
6566 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6567 return JSProxy::HasPropertyWithHandler(proxy, name);
6569 return GetPropertyAttribute(object, name) != ABSENT;
6573 bool JSReceiver::HasLocalProperty(Handle<JSReceiver> object,
6574 Handle<Name> name) {
6575 if (object->IsJSProxy()) {
6576 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6577 return JSProxy::HasPropertyWithHandler(proxy, name);
6579 return GetLocalPropertyAttribute(object, name) != ABSENT;
6583 PropertyAttributes JSReceiver::GetPropertyAttribute(Handle<JSReceiver> object,
6586 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6587 return GetElementAttribute(object, index);
6589 return GetPropertyAttributeWithReceiver(object, object, key);
6593 PropertyAttributes JSReceiver::GetElementAttribute(Handle<JSReceiver> object,
6595 if (object->IsJSProxy()) {
6596 return JSProxy::GetElementAttributeWithHandler(
6597 Handle<JSProxy>::cast(object), object, index);
6599 return JSObject::GetElementAttributeWithReceiver(
6600 Handle<JSObject>::cast(object), object, index, true);
6604 bool JSGlobalObject::IsDetached() {
6605 return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this);
6609 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) {
6610 return GetPrototype() != global;
6614 Handle<Object> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6615 return object->IsJSProxy()
6616 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6617 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6621 Object* JSReceiver::GetIdentityHash() {
6623 ? JSProxy::cast(this)->GetIdentityHash()
6624 : JSObject::cast(this)->GetIdentityHash();
6628 bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6629 if (object->IsJSProxy()) {
6630 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6631 return JSProxy::HasElementWithHandler(proxy, index);
6633 return JSObject::GetElementAttributeWithReceiver(
6634 Handle<JSObject>::cast(object), object, index, true) != ABSENT;
6638 bool JSReceiver::HasLocalElement(Handle<JSReceiver> object, uint32_t index) {
6639 if (object->IsJSProxy()) {
6640 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6641 return JSProxy::HasElementWithHandler(proxy, index);
6643 return JSObject::GetElementAttributeWithReceiver(
6644 Handle<JSObject>::cast(object), object, index, false) != ABSENT;
6648 PropertyAttributes JSReceiver::GetLocalElementAttribute(
6649 Handle<JSReceiver> object, uint32_t index) {
6650 if (object->IsJSProxy()) {
6651 return JSProxy::GetElementAttributeWithHandler(
6652 Handle<JSProxy>::cast(object), object, index);
6654 return JSObject::GetElementAttributeWithReceiver(
6655 Handle<JSObject>::cast(object), object, index, false);
6659 bool AccessorInfo::all_can_read() {
6660 return BooleanBit::get(flag(), kAllCanReadBit);
6664 void AccessorInfo::set_all_can_read(bool value) {
6665 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6669 bool AccessorInfo::all_can_write() {
6670 return BooleanBit::get(flag(), kAllCanWriteBit);
6674 void AccessorInfo::set_all_can_write(bool value) {
6675 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6679 bool AccessorInfo::prohibits_overwriting() {
6680 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
6684 void AccessorInfo::set_prohibits_overwriting(bool value) {
6685 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
6689 PropertyAttributes AccessorInfo::property_attributes() {
6690 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6694 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6695 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6699 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6700 Object* function_template = expected_receiver_type();
6701 if (!function_template->IsFunctionTemplateInfo()) return true;
6702 return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver);
6706 void AccessorPair::set_access_flags(v8::AccessControl access_control) {
6707 int current = access_flags()->value();
6708 current = BooleanBit::set(current,
6709 kProhibitsOverwritingBit,
6710 access_control & PROHIBITS_OVERWRITING);
6711 current = BooleanBit::set(current,
6713 access_control & ALL_CAN_READ);
6714 current = BooleanBit::set(current,
6716 access_control & ALL_CAN_WRITE);
6717 set_access_flags(Smi::FromInt(current));
6721 bool AccessorPair::all_can_read() {
6722 return BooleanBit::get(access_flags(), kAllCanReadBit);
6726 bool AccessorPair::all_can_write() {
6727 return BooleanBit::get(access_flags(), kAllCanWriteBit);
6731 bool AccessorPair::prohibits_overwriting() {
6732 return BooleanBit::get(access_flags(), kProhibitsOverwritingBit);
6736 template<typename Derived, typename Shape, typename Key>
6737 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6739 Handle<Object> value) {
6740 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6744 template<typename Derived, typename Shape, typename Key>
6745 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6747 Handle<Object> value,
6748 PropertyDetails details) {
6749 ASSERT(!key->IsName() ||
6750 details.IsDeleted() ||
6751 details.dictionary_index() > 0);
6752 int index = DerivedHashTable::EntryToIndex(entry);
6753 DisallowHeapAllocation no_gc;
6754 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6755 FixedArray::set(index, *key, mode);
6756 FixedArray::set(index+1, *value, mode);
6757 FixedArray::set(index+2, details.AsSmi());
6761 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6762 ASSERT(other->IsNumber());
6763 return key == static_cast<uint32_t>(other->Number());
6767 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6768 return ComputeIntegerHash(key, 0);
6772 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6774 ASSERT(other->IsNumber());
6775 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6779 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6780 return ComputeIntegerHash(key, seed);
6784 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6787 ASSERT(other->IsNumber());
6788 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6792 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6793 return isolate->factory()->NewNumberFromUint(key);
6797 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6798 // We know that all entries in a hash table had their hash keys created.
6799 // Use that knowledge to have fast failure.
6800 if (key->Hash() != Name::cast(other)->Hash()) return false;
6801 return key->Equals(Name::cast(other));
6805 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6810 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6811 return Name::cast(other)->Hash();
6815 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6817 ASSERT(key->IsUniqueName());
6822 void NameDictionary::DoGenerateNewEnumerationIndices(
6823 Handle<NameDictionary> dictionary) {
6824 DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6828 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6829 return key->SameValue(other);
6833 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6834 return Smi::cast(key->GetHash())->value();
6838 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6840 return Smi::cast(other->GetHash())->value();
6844 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6845 Handle<Object> key) {
6850 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6851 Handle<ObjectHashTable> table, Handle<Object> key) {
6852 return DerivedHashTable::Shrink(table, key);
6856 template <int entrysize>
6857 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6858 return key->SameValue(other);
6862 template <int entrysize>
6863 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6864 intptr_t hash = reinterpret_cast<intptr_t>(*key);
6865 return (uint32_t)(hash & 0xFFFFFFFF);
6869 template <int entrysize>
6870 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
6872 intptr_t hash = reinterpret_cast<intptr_t>(other);
6873 return (uint32_t)(hash & 0xFFFFFFFF);
6877 template <int entrysize>
6878 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6879 Handle<Object> key) {
6884 void Map::ClearCodeCache(Heap* heap) {
6885 // No write barrier is needed since empty_fixed_array is not in new space.
6886 // Please note this function is used during marking:
6887 // - MarkCompactCollector::MarkUnmarkedObject
6888 // - IncrementalMarking::Step
6889 ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
6890 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6894 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
6895 ASSERT(array->HasFastSmiOrObjectElements());
6896 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
6897 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6898 if (elts->length() < required_size) {
6899 // Doubling in size would be overkill, but leave some slack to avoid
6900 // constantly growing.
6901 Expand(array, required_size + (required_size >> 3));
6902 // It's a performance benefit to keep a frequently used array in new-space.
6903 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6904 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6905 // Expand will allocate a new backing store in new space even if the size
6906 // we asked for isn't larger than what we had before.
6907 Expand(array, required_size);
6912 void JSArray::set_length(Smi* length) {
6913 // Don't need a write barrier for a Smi.
6914 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6918 bool JSArray::AllowsSetElementsLength() {
6919 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6920 ASSERT(result == !HasExternalArrayElements());
6925 void JSArray::SetContent(Handle<JSArray> array,
6926 Handle<FixedArrayBase> storage) {
6927 EnsureCanContainElements(array, storage, storage->length(),
6928 ALLOW_COPIED_DOUBLE_ELEMENTS);
6930 ASSERT((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6931 IsFastDoubleElementsKind(array->GetElementsKind())) ||
6932 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6933 (IsFastObjectElementsKind(array->GetElementsKind()) ||
6934 (IsFastSmiElementsKind(array->GetElementsKind()) &&
6935 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
6936 array->set_elements(*storage);
6937 array->set_length(Smi::FromInt(storage->length()));
6941 Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
6942 return isolate->factory()->uninitialized_symbol();
6946 Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
6947 return isolate->factory()->megamorphic_symbol();
6951 Handle<Object> TypeFeedbackInfo::MonomorphicArraySentinel(Isolate* isolate,
6952 ElementsKind elements_kind) {
6953 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
6957 Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) {
6958 return heap->uninitialized_symbol();
6962 int TypeFeedbackInfo::ic_total_count() {
6963 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6964 return ICTotalCountField::decode(current);
6968 void TypeFeedbackInfo::set_ic_total_count(int count) {
6969 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6970 value = ICTotalCountField::update(value,
6971 ICTotalCountField::decode(count));
6972 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6976 int TypeFeedbackInfo::ic_with_type_info_count() {
6977 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6978 return ICsWithTypeInfoCountField::decode(current);
6982 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
6983 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6984 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6985 // We can get negative count here when the type-feedback info is
6986 // shared between two code objects. The can only happen when
6987 // the debugger made a shallow copy of code object (see Heap::CopyCode).
6988 // Since we do not optimize when the debugger is active, we can skip
6989 // this counter update.
6990 if (new_count >= 0) {
6991 new_count &= ICsWithTypeInfoCountField::kMask;
6992 value = ICsWithTypeInfoCountField::update(value, new_count);
6993 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6998 void TypeFeedbackInfo::initialize_storage() {
6999 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7000 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7004 void TypeFeedbackInfo::change_own_type_change_checksum() {
7005 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7006 int checksum = OwnTypeChangeChecksum::decode(value);
7007 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7008 value = OwnTypeChangeChecksum::update(value, checksum);
7009 // Ensure packed bit field is in Smi range.
7010 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7011 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7012 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7016 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7017 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7018 int mask = (1 << kTypeChangeChecksumBits) - 1;
7019 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7020 // Ensure packed bit field is in Smi range.
7021 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7022 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7023 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7027 int TypeFeedbackInfo::own_type_change_checksum() {
7028 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7029 return OwnTypeChangeChecksum::decode(value);
7033 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7034 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7035 int mask = (1 << kTypeChangeChecksumBits) - 1;
7036 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7040 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7043 Relocatable::Relocatable(Isolate* isolate) {
7045 prev_ = isolate->relocatable_top();
7046 isolate->set_relocatable_top(this);
7050 Relocatable::~Relocatable() {
7051 ASSERT_EQ(isolate_->relocatable_top(), this);
7052 isolate_->set_relocatable_top(prev_);
7056 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7057 return map->instance_size();
7061 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7062 v->VisitExternalReference(
7063 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7067 template<typename StaticVisitor>
7068 void Foreign::ForeignIterateBody() {
7069 StaticVisitor::VisitExternalReference(
7070 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7074 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
7075 typedef v8::String::ExternalAsciiStringResource Resource;
7076 v->VisitExternalAsciiString(
7077 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7081 template<typename StaticVisitor>
7082 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
7083 typedef v8::String::ExternalAsciiStringResource Resource;
7084 StaticVisitor::VisitExternalAsciiString(
7085 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7089 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7090 typedef v8::String::ExternalStringResource Resource;
7091 v->VisitExternalTwoByteString(
7092 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7096 template<typename StaticVisitor>
7097 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7098 typedef v8::String::ExternalStringResource Resource;
7099 StaticVisitor::VisitExternalTwoByteString(
7100 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7104 template<int start_offset, int end_offset, int size>
7105 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7108 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7109 HeapObject::RawField(obj, end_offset));
7113 template<int start_offset>
7114 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7117 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7118 HeapObject::RawField(obj, object_size));
7123 #undef CAST_ACCESSOR
7124 #undef INT_ACCESSORS
7126 #undef ACCESSORS_TO_SMI
7127 #undef SMI_ACCESSORS
7128 #undef SYNCHRONIZED_SMI_ACCESSORS
7129 #undef NOBARRIER_SMI_ACCESSORS
7131 #undef BOOL_ACCESSORS
7134 #undef NOBARRIER_READ_FIELD
7136 #undef NOBARRIER_WRITE_FIELD
7137 #undef WRITE_BARRIER
7138 #undef CONDITIONAL_WRITE_BARRIER
7139 #undef READ_DOUBLE_FIELD
7140 #undef WRITE_DOUBLE_FIELD
7141 #undef READ_INT_FIELD
7142 #undef WRITE_INT_FIELD
7143 #undef READ_INTPTR_FIELD
7144 #undef WRITE_INTPTR_FIELD
7145 #undef READ_UINT32_FIELD
7146 #undef WRITE_UINT32_FIELD
7147 #undef READ_SHORT_FIELD
7148 #undef WRITE_SHORT_FIELD
7149 #undef READ_BYTE_FIELD
7150 #undef WRITE_BYTE_FIELD
7151 #undef NOBARRIER_READ_BYTE_FIELD
7152 #undef NOBARRIER_WRITE_BYTE_FIELD
7154 } } // namespace v8::internal
7156 #endif // V8_OBJECTS_INL_H_