1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/elements.h"
20 #include "src/factory.h"
21 #include "src/field-index-inl.h"
22 #include "src/heap/heap-inl.h"
23 #include "src/heap/heap.h"
24 #include "src/heap/incremental-marking.h"
25 #include "src/heap/objects-visiting.h"
26 #include "src/heap/spaces.h"
27 #include "src/heap/store-buffer.h"
28 #include "src/isolate.h"
29 #include "src/lookup.h"
30 #include "src/objects.h"
31 #include "src/property.h"
32 #include "src/prototype.h"
33 #include "src/transitions-inl.h"
34 #include "src/type-feedback-vector-inl.h"
35 #include "src/v8memory.h"
40 PropertyDetails::PropertyDetails(Smi* smi) {
41 value_ = smi->value();
45 Smi* PropertyDetails::AsSmi() const {
46 // Ensure the upper 2 bits have the same value by sign extending it. This is
47 // necessary to be able to use the 31st bit of the property details.
48 int value = value_ << 1;
49 return Smi::FromInt(value >> 1);
53 PropertyDetails PropertyDetails::AsDeleted() const {
54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
55 return PropertyDetails(smi);
59 #define TYPE_CHECKER(type, instancetype) \
60 bool Object::Is##type() const { \
61 return Object::IsHeapObject() && \
62 HeapObject::cast(this)->map()->instance_type() == instancetype; \
66 #define CAST_ACCESSOR(type) \
67 type* type::cast(Object* object) { \
68 SLOW_DCHECK(object->Is##type()); \
69 return reinterpret_cast<type*>(object); \
71 const type* type::cast(const Object* object) { \
72 SLOW_DCHECK(object->Is##type()); \
73 return reinterpret_cast<const type*>(object); \
77 #define INT_ACCESSORS(holder, name, offset) \
78 int holder::name() const { return READ_INT_FIELD(this, offset); } \
79 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
82 #define ACCESSORS(holder, name, type, offset) \
83 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
84 void holder::set_##name(type* value, WriteBarrierMode mode) { \
85 WRITE_FIELD(this, offset, value); \
86 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
90 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
91 #define ACCESSORS_TO_SMI(holder, name, offset) \
92 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
93 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
94 WRITE_FIELD(this, offset, value); \
98 // Getter that returns a Smi as an int and writes an int as a Smi.
99 #define SMI_ACCESSORS(holder, name, offset) \
100 int holder::name() const { \
101 Object* value = READ_FIELD(this, offset); \
102 return Smi::cast(value)->value(); \
104 void holder::set_##name(int value) { \
105 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
108 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
109 int holder::synchronized_##name() const { \
110 Object* value = ACQUIRE_READ_FIELD(this, offset); \
111 return Smi::cast(value)->value(); \
113 void holder::synchronized_set_##name(int value) { \
114 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
117 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
118 int holder::nobarrier_##name() const { \
119 Object* value = NOBARRIER_READ_FIELD(this, offset); \
120 return Smi::cast(value)->value(); \
122 void holder::nobarrier_set_##name(int value) { \
123 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
126 #define BOOL_GETTER(holder, field, name, offset) \
127 bool holder::name() const { \
128 return BooleanBit::get(field(), offset); \
132 #define BOOL_ACCESSORS(holder, field, name, offset) \
133 bool holder::name() const { \
134 return BooleanBit::get(field(), offset); \
136 void holder::set_##name(bool value) { \
137 set_##field(BooleanBit::set(field(), offset, value)); \
141 bool Object::IsFixedArrayBase() const {
142 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
143 IsFixedTypedArrayBase() || IsExternalArray();
147 // External objects are not extensible, so the map check is enough.
148 bool Object::IsExternal() const {
149 return Object::IsHeapObject() &&
150 HeapObject::cast(this)->map() ==
151 HeapObject::cast(this)->GetHeap()->external_map();
155 bool Object::IsAccessorInfo() const {
156 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
160 bool Object::IsSmi() const {
161 return HAS_SMI_TAG(this);
165 bool Object::IsHeapObject() const {
166 return Internals::HasHeapObjectTag(this);
170 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
171 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
172 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
175 bool Object::IsString() const {
176 return Object::IsHeapObject()
177 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
181 bool Object::IsName() const {
182 return IsString() || IsSymbol();
186 bool Object::IsUniqueName() const {
187 return IsInternalizedString() || IsSymbol();
191 bool Object::IsSpecObject() const {
192 return Object::IsHeapObject()
193 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
197 bool Object::IsSpecFunction() const {
198 if (!Object::IsHeapObject()) return false;
199 InstanceType type = HeapObject::cast(this)->map()->instance_type();
200 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
204 bool Object::IsTemplateInfo() const {
205 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
209 bool Object::IsInternalizedString() const {
210 if (!this->IsHeapObject()) return false;
211 uint32_t type = HeapObject::cast(this)->map()->instance_type();
212 STATIC_ASSERT(kNotInternalizedTag != 0);
213 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
214 (kStringTag | kInternalizedTag);
218 bool Object::IsConsString() const {
219 if (!IsString()) return false;
220 return StringShape(String::cast(this)).IsCons();
224 bool Object::IsSlicedString() const {
225 if (!IsString()) return false;
226 return StringShape(String::cast(this)).IsSliced();
230 bool Object::IsSeqString() const {
231 if (!IsString()) return false;
232 return StringShape(String::cast(this)).IsSequential();
236 bool Object::IsSeqOneByteString() const {
237 if (!IsString()) return false;
238 return StringShape(String::cast(this)).IsSequential() &&
239 String::cast(this)->IsOneByteRepresentation();
243 bool Object::IsSeqTwoByteString() const {
244 if (!IsString()) return false;
245 return StringShape(String::cast(this)).IsSequential() &&
246 String::cast(this)->IsTwoByteRepresentation();
250 bool Object::IsExternalString() const {
251 if (!IsString()) return false;
252 return StringShape(String::cast(this)).IsExternal();
256 bool Object::IsExternalOneByteString() const {
257 if (!IsString()) return false;
258 return StringShape(String::cast(this)).IsExternal() &&
259 String::cast(this)->IsOneByteRepresentation();
263 bool Object::IsExternalTwoByteString() const {
264 if (!IsString()) return false;
265 return StringShape(String::cast(this)).IsExternal() &&
266 String::cast(this)->IsTwoByteRepresentation();
270 bool Object::HasValidElements() {
271 // Dictionary is covered under FixedArray.
272 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
273 IsFixedTypedArrayBase();
277 Handle<Object> Object::NewStorageFor(Isolate* isolate,
278 Handle<Object> object,
279 Representation representation) {
280 if (representation.IsSmi() && object->IsUninitialized()) {
281 return handle(Smi::FromInt(0), isolate);
283 if (!representation.IsDouble()) return object;
285 if (object->IsUninitialized()) {
287 } else if (object->IsMutableHeapNumber()) {
288 value = HeapNumber::cast(*object)->value();
290 value = object->Number();
292 return isolate->factory()->NewHeapNumber(value, MUTABLE);
296 Handle<Object> Object::WrapForRead(Isolate* isolate,
297 Handle<Object> object,
298 Representation representation) {
299 DCHECK(!object->IsUninitialized());
300 if (!representation.IsDouble()) {
301 DCHECK(object->FitsRepresentation(representation));
304 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
308 StringShape::StringShape(const String* str)
309 : type_(str->map()->instance_type()) {
311 DCHECK((type_ & kIsNotStringMask) == kStringTag);
315 StringShape::StringShape(Map* map)
316 : type_(map->instance_type()) {
318 DCHECK((type_ & kIsNotStringMask) == kStringTag);
322 StringShape::StringShape(InstanceType t)
323 : type_(static_cast<uint32_t>(t)) {
325 DCHECK((type_ & kIsNotStringMask) == kStringTag);
329 bool StringShape::IsInternalized() {
331 STATIC_ASSERT(kNotInternalizedTag != 0);
332 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
333 (kStringTag | kInternalizedTag);
337 bool String::IsOneByteRepresentation() const {
338 uint32_t type = map()->instance_type();
339 return (type & kStringEncodingMask) == kOneByteStringTag;
343 bool String::IsTwoByteRepresentation() const {
344 uint32_t type = map()->instance_type();
345 return (type & kStringEncodingMask) == kTwoByteStringTag;
349 bool String::IsOneByteRepresentationUnderneath() {
350 uint32_t type = map()->instance_type();
351 STATIC_ASSERT(kIsIndirectStringTag != 0);
352 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
354 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
355 case kOneByteStringTag:
357 case kTwoByteStringTag:
359 default: // Cons or sliced string. Need to go deeper.
360 return GetUnderlying()->IsOneByteRepresentation();
365 bool String::IsTwoByteRepresentationUnderneath() {
366 uint32_t type = map()->instance_type();
367 STATIC_ASSERT(kIsIndirectStringTag != 0);
368 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
370 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
371 case kOneByteStringTag:
373 case kTwoByteStringTag:
375 default: // Cons or sliced string. Need to go deeper.
376 return GetUnderlying()->IsTwoByteRepresentation();
381 bool String::HasOnlyOneByteChars() {
382 uint32_t type = map()->instance_type();
383 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
384 IsOneByteRepresentation();
388 bool StringShape::IsCons() {
389 return (type_ & kStringRepresentationMask) == kConsStringTag;
393 bool StringShape::IsSliced() {
394 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
398 bool StringShape::IsIndirect() {
399 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
403 bool StringShape::IsExternal() {
404 return (type_ & kStringRepresentationMask) == kExternalStringTag;
408 bool StringShape::IsSequential() {
409 return (type_ & kStringRepresentationMask) == kSeqStringTag;
413 StringRepresentationTag StringShape::representation_tag() {
414 uint32_t tag = (type_ & kStringRepresentationMask);
415 return static_cast<StringRepresentationTag>(tag);
419 uint32_t StringShape::encoding_tag() {
420 return type_ & kStringEncodingMask;
424 uint32_t StringShape::full_representation_tag() {
425 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
429 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
430 Internals::kFullStringRepresentationMask);
432 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
433 Internals::kStringEncodingMask);
436 bool StringShape::IsSequentialOneByte() {
437 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
441 bool StringShape::IsSequentialTwoByte() {
442 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
446 bool StringShape::IsExternalOneByte() {
447 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
451 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
452 Internals::kExternalOneByteRepresentationTag);
454 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
457 bool StringShape::IsExternalTwoByte() {
458 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
462 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
463 Internals::kExternalTwoByteRepresentationTag);
465 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
467 uc32 FlatStringReader::Get(int index) {
468 DCHECK(0 <= index && index <= length_);
470 return static_cast<const byte*>(start_)[index];
472 return static_cast<const uc16*>(start_)[index];
477 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
478 return key->AsHandle(isolate);
482 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
483 return key->AsHandle(isolate);
487 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
489 return key->AsHandle(isolate);
493 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
495 return key->AsHandle(isolate);
498 template <typename Char>
499 class SequentialStringKey : public HashTableKey {
501 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
502 : string_(string), hash_field_(0), seed_(seed) { }
504 virtual uint32_t Hash() OVERRIDE {
505 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
509 uint32_t result = hash_field_ >> String::kHashShift;
510 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
515 virtual uint32_t HashForObject(Object* other) OVERRIDE {
516 return String::cast(other)->Hash();
519 Vector<const Char> string_;
520 uint32_t hash_field_;
525 class OneByteStringKey : public SequentialStringKey<uint8_t> {
527 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
528 : SequentialStringKey<uint8_t>(str, seed) { }
530 virtual bool IsMatch(Object* string) OVERRIDE {
531 return String::cast(string)->IsOneByteEqualTo(string_);
534 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
538 class SeqOneByteSubStringKey : public HashTableKey {
540 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
541 : string_(string), from_(from), length_(length) {
542 DCHECK(string_->IsSeqOneByteString());
545 virtual uint32_t Hash() OVERRIDE {
546 DCHECK(length_ >= 0);
547 DCHECK(from_ + length_ <= string_->length());
548 const uint8_t* chars = string_->GetChars() + from_;
549 hash_field_ = StringHasher::HashSequentialString(
550 chars, length_, string_->GetHeap()->HashSeed());
551 uint32_t result = hash_field_ >> String::kHashShift;
552 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
556 virtual uint32_t HashForObject(Object* other) OVERRIDE {
557 return String::cast(other)->Hash();
560 virtual bool IsMatch(Object* string) OVERRIDE;
561 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
564 Handle<SeqOneByteString> string_;
567 uint32_t hash_field_;
571 class TwoByteStringKey : public SequentialStringKey<uc16> {
573 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
574 : SequentialStringKey<uc16>(str, seed) { }
576 virtual bool IsMatch(Object* string) OVERRIDE {
577 return String::cast(string)->IsTwoByteEqualTo(string_);
580 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
584 // Utf8StringKey carries a vector of chars as key.
585 class Utf8StringKey : public HashTableKey {
587 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
588 : string_(string), hash_field_(0), seed_(seed) { }
590 virtual bool IsMatch(Object* string) OVERRIDE {
591 return String::cast(string)->IsUtf8EqualTo(string_);
594 virtual uint32_t Hash() OVERRIDE {
595 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
596 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
597 uint32_t result = hash_field_ >> String::kHashShift;
598 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
602 virtual uint32_t HashForObject(Object* other) OVERRIDE {
603 return String::cast(other)->Hash();
606 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
607 if (hash_field_ == 0) Hash();
608 return isolate->factory()->NewInternalizedStringFromUtf8(
609 string_, chars_, hash_field_);
612 Vector<const char> string_;
613 uint32_t hash_field_;
614 int chars_; // Caches the number of characters when computing the hash code.
619 bool Object::IsNumber() const {
620 return IsSmi() || IsHeapNumber();
624 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
625 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
628 bool Object::IsFiller() const {
629 if (!Object::IsHeapObject()) return false;
630 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
631 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
635 bool Object::IsExternalArray() const {
636 if (!Object::IsHeapObject())
638 InstanceType instance_type =
639 HeapObject::cast(this)->map()->instance_type();
640 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
641 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
645 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
646 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
647 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
649 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
650 #undef TYPED_ARRAY_TYPE_CHECKER
653 bool Object::IsFixedTypedArrayBase() const {
654 if (!Object::IsHeapObject()) return false;
656 InstanceType instance_type =
657 HeapObject::cast(this)->map()->instance_type();
658 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
659 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
663 bool Object::IsJSReceiver() const {
664 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
665 return IsHeapObject() &&
666 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
670 bool Object::IsJSObject() const {
671 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
672 return IsHeapObject() &&
673 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
677 bool Object::IsJSProxy() const {
678 if (!Object::IsHeapObject()) return false;
679 return HeapObject::cast(this)->map()->IsJSProxyMap();
683 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
684 TYPE_CHECKER(JSSet, JS_SET_TYPE)
685 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
686 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
687 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
688 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
689 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
690 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
691 TYPE_CHECKER(Map, MAP_TYPE)
692 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
693 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
694 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
697 bool Object::IsJSWeakCollection() const {
698 return IsJSWeakMap() || IsJSWeakSet();
702 bool Object::IsDescriptorArray() const {
703 return IsFixedArray();
707 bool Object::IsTransitionArray() const {
708 return IsFixedArray();
712 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
715 bool Object::IsDeoptimizationInputData() const {
716 // Must be a fixed array.
717 if (!IsFixedArray()) return false;
719 // There's no sure way to detect the difference between a fixed array and
720 // a deoptimization data array. Since this is used for asserts we can
721 // check that the length is zero or else the fixed size plus a multiple of
723 int length = FixedArray::cast(this)->length();
724 if (length == 0) return true;
726 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
727 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
731 bool Object::IsDeoptimizationOutputData() const {
732 if (!IsFixedArray()) return false;
733 // There's actually no way to see the difference between a fixed array and
734 // a deoptimization data array. Since this is used for asserts we can check
735 // that the length is plausible though.
736 if (FixedArray::cast(this)->length() % 2 != 0) return false;
741 bool Object::IsDependentCode() const {
742 if (!IsFixedArray()) return false;
743 // There's actually no way to see the difference between a fixed array and
744 // a dependent codes array.
749 bool Object::IsContext() const {
750 if (!Object::IsHeapObject()) return false;
751 Map* map = HeapObject::cast(this)->map();
752 Heap* heap = map->GetHeap();
753 return (map == heap->function_context_map() ||
754 map == heap->catch_context_map() ||
755 map == heap->with_context_map() ||
756 map == heap->native_context_map() ||
757 map == heap->block_context_map() ||
758 map == heap->module_context_map() ||
759 map == heap->global_context_map());
763 bool Object::IsNativeContext() const {
764 return Object::IsHeapObject() &&
765 HeapObject::cast(this)->map() ==
766 HeapObject::cast(this)->GetHeap()->native_context_map();
770 bool Object::IsScopeInfo() const {
771 return Object::IsHeapObject() &&
772 HeapObject::cast(this)->map() ==
773 HeapObject::cast(this)->GetHeap()->scope_info_map();
777 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
780 template <> inline bool Is<JSFunction>(Object* obj) {
781 return obj->IsJSFunction();
785 TYPE_CHECKER(Code, CODE_TYPE)
786 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
787 TYPE_CHECKER(Cell, CELL_TYPE)
788 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
789 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
790 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
791 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
792 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
793 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
794 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
795 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
798 bool Object::IsStringWrapper() const {
799 return IsJSValue() && JSValue::cast(this)->value()->IsString();
803 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
806 bool Object::IsBoolean() const {
807 return IsOddball() &&
808 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
812 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
813 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
814 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
815 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
818 bool Object::IsJSArrayBufferView() const {
819 return IsJSDataView() || IsJSTypedArray();
823 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
826 template <> inline bool Is<JSArray>(Object* obj) {
827 return obj->IsJSArray();
831 bool Object::IsHashTable() const {
832 return Object::IsHeapObject() &&
833 HeapObject::cast(this)->map() ==
834 HeapObject::cast(this)->GetHeap()->hash_table_map();
838 bool Object::IsWeakHashTable() const {
839 return IsHashTable();
843 bool Object::IsDictionary() const {
844 return IsHashTable() &&
845 this != HeapObject::cast(this)->GetHeap()->string_table();
849 bool Object::IsNameDictionary() const {
850 return IsDictionary();
854 bool Object::IsSeededNumberDictionary() const {
855 return IsDictionary();
859 bool Object::IsUnseededNumberDictionary() const {
860 return IsDictionary();
864 bool Object::IsStringTable() const {
865 return IsHashTable();
869 bool Object::IsJSFunctionResultCache() const {
870 if (!IsFixedArray()) return false;
871 const FixedArray* self = FixedArray::cast(this);
872 int length = self->length();
873 if (length < JSFunctionResultCache::kEntriesIndex) return false;
874 if ((length - JSFunctionResultCache::kEntriesIndex)
875 % JSFunctionResultCache::kEntrySize != 0) {
879 if (FLAG_verify_heap) {
880 // TODO(svenpanne) We use const_cast here and below to break our dependency
881 // cycle between the predicates and the verifiers. This can be removed when
882 // the verifiers are const-correct, too.
883 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
884 JSFunctionResultCacheVerify();
891 bool Object::IsNormalizedMapCache() const {
892 return NormalizedMapCache::IsNormalizedMapCache(this);
896 int NormalizedMapCache::GetIndex(Handle<Map> map) {
897 return map->Hash() % NormalizedMapCache::kEntries;
901 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
902 if (!obj->IsFixedArray()) return false;
903 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
907 if (FLAG_verify_heap) {
908 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
909 NormalizedMapCacheVerify();
916 bool Object::IsCompilationCacheTable() const {
917 return IsHashTable();
921 bool Object::IsCodeCacheHashTable() const {
922 return IsHashTable();
926 bool Object::IsPolymorphicCodeCacheHashTable() const {
927 return IsHashTable();
931 bool Object::IsMapCache() const {
932 return IsHashTable();
936 bool Object::IsObjectHashTable() const {
937 return IsHashTable();
941 bool Object::IsOrderedHashTable() const {
942 return IsHeapObject() &&
943 HeapObject::cast(this)->map() ==
944 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
948 bool Object::IsOrderedHashSet() const {
949 return IsOrderedHashTable();
953 bool Object::IsOrderedHashMap() const {
954 return IsOrderedHashTable();
958 bool Object::IsPrimitive() const {
959 return IsOddball() || IsNumber() || IsString();
963 bool Object::IsJSGlobalProxy() const {
964 bool result = IsHeapObject() &&
965 (HeapObject::cast(this)->map()->instance_type() ==
966 JS_GLOBAL_PROXY_TYPE);
968 HeapObject::cast(this)->map()->is_access_check_needed());
973 bool Object::IsGlobalObject() const {
974 if (!IsHeapObject()) return false;
976 InstanceType type = HeapObject::cast(this)->map()->instance_type();
977 return type == JS_GLOBAL_OBJECT_TYPE ||
978 type == JS_BUILTINS_OBJECT_TYPE;
982 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
983 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
986 bool Object::IsUndetectableObject() const {
987 return IsHeapObject()
988 && HeapObject::cast(this)->map()->is_undetectable();
992 bool Object::IsAccessCheckNeeded() const {
993 if (!IsHeapObject()) return false;
994 if (IsJSGlobalProxy()) {
995 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
996 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
997 return proxy->IsDetachedFrom(global);
999 return HeapObject::cast(this)->map()->is_access_check_needed();
1003 bool Object::IsStruct() const {
1004 if (!IsHeapObject()) return false;
1005 switch (HeapObject::cast(this)->map()->instance_type()) {
1006 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1007 STRUCT_LIST(MAKE_STRUCT_CASE)
1008 #undef MAKE_STRUCT_CASE
1009 default: return false;
1014 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1015 bool Object::Is##Name() const { \
1016 return Object::IsHeapObject() \
1017 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1019 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1020 #undef MAKE_STRUCT_PREDICATE
1023 bool Object::IsUndefined() const {
1024 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1028 bool Object::IsNull() const {
1029 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1033 bool Object::IsTheHole() const {
1034 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1038 bool Object::IsException() const {
1039 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1043 bool Object::IsUninitialized() const {
1044 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1048 bool Object::IsTrue() const {
1049 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1053 bool Object::IsFalse() const {
1054 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1058 bool Object::IsArgumentsMarker() const {
1059 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1063 double Object::Number() {
1066 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1067 : reinterpret_cast<HeapNumber*>(this)->value();
1071 bool Object::IsNaN() const {
1072 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1076 bool Object::IsMinusZero() const {
1077 return this->IsHeapNumber() &&
1078 i::IsMinusZero(HeapNumber::cast(this)->value());
1082 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1083 if (object->IsSmi()) return Handle<Smi>::cast(object);
1084 if (object->IsHeapNumber()) {
1085 double value = Handle<HeapNumber>::cast(object)->value();
1086 int int_value = FastD2I(value);
1087 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1088 return handle(Smi::FromInt(int_value), isolate);
1091 return Handle<Smi>();
1095 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1096 Handle<Object> object) {
1098 isolate, object, handle(isolate->context()->native_context(), isolate));
1102 bool Object::HasSpecificClassOf(String* name) {
1103 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1107 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1108 Handle<Name> name) {
1109 LookupIterator it(object, name);
1110 return GetProperty(&it);
1114 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1115 Handle<Object> object,
1117 // GetElement can trigger a getter which can cause allocation.
1118 // This was not always the case. This DCHECK is here to catch
1119 // leftover incorrect uses.
1120 DCHECK(AllowHeapAllocation::IsAllowed());
1121 return Object::GetElementWithReceiver(isolate, object, object, index);
1125 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1126 Isolate* isolate, Handle<Object> receiver) {
1127 PrototypeIterator iter(isolate, receiver);
1128 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1129 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1130 return PrototypeIterator::GetCurrent(iter);
1134 return PrototypeIterator::GetCurrent(iter);
1138 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1139 Handle<Name> name) {
1141 Isolate* isolate = name->GetIsolate();
1142 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1143 return GetProperty(object, name);
1147 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1148 Handle<Object> object,
1150 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1151 DCHECK(!str.is_null());
1153 uint32_t index; // Assert that the name is not an array index.
1154 DCHECK(!str->AsArrayIndex(&index));
1156 return GetProperty(object, str);
1160 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1161 Handle<Object> receiver,
1163 return GetPropertyWithHandler(
1164 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1168 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1169 Handle<JSReceiver> receiver,
1171 Handle<Object> value,
1172 StrictMode strict_mode) {
1173 Isolate* isolate = proxy->GetIsolate();
1174 Handle<String> name = isolate->factory()->Uint32ToString(index);
1175 return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode);
1179 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1181 Isolate* isolate = proxy->GetIsolate();
1182 Handle<String> name = isolate->factory()->Uint32ToString(index);
1183 return HasPropertyWithHandler(proxy, name);
1187 #define FIELD_ADDR(p, offset) \
1188 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1190 #define FIELD_ADDR_CONST(p, offset) \
1191 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1193 #define READ_FIELD(p, offset) \
1194 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1196 #define ACQUIRE_READ_FIELD(p, offset) \
1197 reinterpret_cast<Object*>(base::Acquire_Load( \
1198 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1200 #define NOBARRIER_READ_FIELD(p, offset) \
1201 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1202 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1204 #define WRITE_FIELD(p, offset, value) \
1205 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1207 #define RELEASE_WRITE_FIELD(p, offset, value) \
1208 base::Release_Store( \
1209 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1210 reinterpret_cast<base::AtomicWord>(value));
1212 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1213 base::NoBarrier_Store( \
1214 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1215 reinterpret_cast<base::AtomicWord>(value));
1217 #define WRITE_BARRIER(heap, object, offset, value) \
1218 heap->incremental_marking()->RecordWrite( \
1219 object, HeapObject::RawField(object, offset), value); \
1220 if (heap->InNewSpace(value)) { \
1221 heap->RecordWrite(object->address(), offset); \
1224 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1225 if (mode == UPDATE_WRITE_BARRIER) { \
1226 heap->incremental_marking()->RecordWrite( \
1227 object, HeapObject::RawField(object, offset), value); \
1228 if (heap->InNewSpace(value)) { \
1229 heap->RecordWrite(object->address(), offset); \
1233 #ifndef V8_TARGET_ARCH_MIPS
1234 #define READ_DOUBLE_FIELD(p, offset) \
1235 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1236 #else // V8_TARGET_ARCH_MIPS
1237 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1238 // non-64-bit aligned HeapNumber::value.
1239 static inline double read_double_field(const void* p, int offset) {
1244 c.u[0] = (*reinterpret_cast<const uint32_t*>(
1245 FIELD_ADDR_CONST(p, offset)));
1246 c.u[1] = (*reinterpret_cast<const uint32_t*>(
1247 FIELD_ADDR_CONST(p, offset + 4)));
1250 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1251 #endif // V8_TARGET_ARCH_MIPS
1253 #ifndef V8_TARGET_ARCH_MIPS
1254 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1255 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1256 #else // V8_TARGET_ARCH_MIPS
1257 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1258 // non-64-bit aligned HeapNumber::value.
1259 static inline void write_double_field(void* p, int offset,
1266 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1267 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1269 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1270 write_double_field(p, offset, value)
1271 #endif // V8_TARGET_ARCH_MIPS
1274 #define READ_INT_FIELD(p, offset) \
1275 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1277 #define WRITE_INT_FIELD(p, offset, value) \
1278 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1280 #define READ_INTPTR_FIELD(p, offset) \
1281 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1283 #define WRITE_INTPTR_FIELD(p, offset, value) \
1284 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1286 #define READ_UINT32_FIELD(p, offset) \
1287 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1289 #define WRITE_UINT32_FIELD(p, offset, value) \
1290 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1292 #define READ_INT32_FIELD(p, offset) \
1293 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1295 #define WRITE_INT32_FIELD(p, offset, value) \
1296 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1298 #define READ_INT64_FIELD(p, offset) \
1299 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1301 #define WRITE_INT64_FIELD(p, offset, value) \
1302 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1304 #define READ_SHORT_FIELD(p, offset) \
1305 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1307 #define WRITE_SHORT_FIELD(p, offset, value) \
1308 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1310 #define READ_BYTE_FIELD(p, offset) \
1311 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1313 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1314 static_cast<byte>(base::NoBarrier_Load( \
1315 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1317 #define WRITE_BYTE_FIELD(p, offset, value) \
1318 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1320 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1321 base::NoBarrier_Store( \
1322 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1323 static_cast<base::Atomic8>(value));
1325 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1326 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1330 int Smi::value() const {
1331 return Internals::SmiValue(this);
1335 Smi* Smi::FromInt(int value) {
1336 DCHECK(Smi::IsValid(value));
1337 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1341 Smi* Smi::FromIntptr(intptr_t value) {
1342 DCHECK(Smi::IsValid(value));
1343 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1344 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1348 bool Smi::IsValid(intptr_t value) {
1349 bool result = Internals::IsValidSmi(value);
1350 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1355 MapWord MapWord::FromMap(const Map* map) {
1356 return MapWord(reinterpret_cast<uintptr_t>(map));
1360 Map* MapWord::ToMap() {
1361 return reinterpret_cast<Map*>(value_);
1365 bool MapWord::IsForwardingAddress() {
1366 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1370 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1371 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1372 return MapWord(reinterpret_cast<uintptr_t>(raw));
1376 HeapObject* MapWord::ToForwardingAddress() {
1377 DCHECK(IsForwardingAddress());
1378 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1383 void HeapObject::VerifyObjectField(int offset) {
1384 VerifyPointer(READ_FIELD(this, offset));
1387 void HeapObject::VerifySmiField(int offset) {
1388 CHECK(READ_FIELD(this, offset)->IsSmi());
1393 Heap* HeapObject::GetHeap() const {
1395 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1396 SLOW_DCHECK(heap != NULL);
1401 Isolate* HeapObject::GetIsolate() const {
1402 return GetHeap()->isolate();
1406 Map* HeapObject::map() const {
1408 // Clear mark potentially added by PathTracer.
1409 uintptr_t raw_value =
1410 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1411 return MapWord::FromRawValue(raw_value).ToMap();
1413 return map_word().ToMap();
1418 void HeapObject::set_map(Map* value) {
1419 set_map_word(MapWord::FromMap(value));
1420 if (value != NULL) {
1421 // TODO(1600) We are passing NULL as a slot because maps can never be on
1422 // evacuation candidate.
1423 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1428 Map* HeapObject::synchronized_map() {
1429 return synchronized_map_word().ToMap();
1433 void HeapObject::synchronized_set_map(Map* value) {
1434 synchronized_set_map_word(MapWord::FromMap(value));
1435 if (value != NULL) {
1436 // TODO(1600) We are passing NULL as a slot because maps can never be on
1437 // evacuation candidate.
1438 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1443 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1444 synchronized_set_map_word(MapWord::FromMap(value));
1448 // Unsafe accessor omitting write barrier.
1449 void HeapObject::set_map_no_write_barrier(Map* value) {
1450 set_map_word(MapWord::FromMap(value));
1454 MapWord HeapObject::map_word() const {
1456 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1460 void HeapObject::set_map_word(MapWord map_word) {
1461 NOBARRIER_WRITE_FIELD(
1462 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1466 MapWord HeapObject::synchronized_map_word() const {
1468 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1472 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1473 RELEASE_WRITE_FIELD(
1474 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1478 HeapObject* HeapObject::FromAddress(Address address) {
1479 DCHECK_TAG_ALIGNED(address);
1480 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1484 Address HeapObject::address() {
1485 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1489 int HeapObject::Size() {
1490 return SizeFromMap(map());
1494 bool HeapObject::MayContainRawValues() {
1495 InstanceType type = map()->instance_type();
1496 if (type <= LAST_NAME_TYPE) {
1497 if (type == SYMBOL_TYPE) {
1500 DCHECK(type < FIRST_NONSTRING_TYPE);
1501 // There are four string representations: sequential strings, external
1502 // strings, cons strings, and sliced strings.
1503 // Only the former two contain raw values and no heap pointers (besides the
1505 return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
1507 // The ConstantPoolArray contains heap pointers, but also raw values.
1508 if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
1509 return (type <= LAST_DATA_TYPE);
1513 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1514 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1515 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1519 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1520 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1524 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1525 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1529 double HeapNumber::value() const {
1530 return READ_DOUBLE_FIELD(this, kValueOffset);
1534 void HeapNumber::set_value(double value) {
1535 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1539 int HeapNumber::get_exponent() {
1540 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1541 kExponentShift) - kExponentBias;
1545 int HeapNumber::get_sign() {
1546 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1550 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1553 Object** FixedArray::GetFirstElementAddress() {
1554 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1558 bool FixedArray::ContainsOnlySmisOrHoles() {
1559 Object* the_hole = GetHeap()->the_hole_value();
1560 Object** current = GetFirstElementAddress();
1561 for (int i = 0; i < length(); ++i) {
1562 Object* candidate = *current++;
1563 if (!candidate->IsSmi() && candidate != the_hole) return false;
1569 FixedArrayBase* JSObject::elements() const {
1570 Object* array = READ_FIELD(this, kElementsOffset);
1571 return static_cast<FixedArrayBase*>(array);
1575 void JSObject::ValidateElements(Handle<JSObject> object) {
1576 #ifdef ENABLE_SLOW_DCHECKS
1577 if (FLAG_enable_slow_asserts) {
1578 ElementsAccessor* accessor = object->GetElementsAccessor();
1579 accessor->Validate(object);
1585 void AllocationSite::Initialize() {
1586 set_transition_info(Smi::FromInt(0));
1587 SetElementsKind(GetInitialFastElementsKind());
1588 set_nested_site(Smi::FromInt(0));
1589 set_pretenure_data(Smi::FromInt(0));
1590 set_pretenure_create_count(Smi::FromInt(0));
1591 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1592 SKIP_WRITE_BARRIER);
1596 void AllocationSite::MarkZombie() {
1597 DCHECK(!IsZombie());
1599 set_pretenure_decision(kZombie);
1603 // Heuristic: We only need to create allocation site info if the boilerplate
1604 // elements kind is the initial elements kind.
1605 AllocationSiteMode AllocationSite::GetMode(
1606 ElementsKind boilerplate_elements_kind) {
1607 if (FLAG_pretenuring_call_new ||
1608 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1609 return TRACK_ALLOCATION_SITE;
1612 return DONT_TRACK_ALLOCATION_SITE;
1616 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1618 if (FLAG_pretenuring_call_new ||
1619 (IsFastSmiElementsKind(from) &&
1620 IsMoreGeneralElementsKindTransition(from, to))) {
1621 return TRACK_ALLOCATION_SITE;
1624 return DONT_TRACK_ALLOCATION_SITE;
1628 inline bool AllocationSite::CanTrack(InstanceType type) {
1629 if (FLAG_allocation_site_pretenuring) {
1630 return type == JS_ARRAY_TYPE ||
1631 type == JS_OBJECT_TYPE ||
1632 type < FIRST_NONSTRING_TYPE;
1634 return type == JS_ARRAY_TYPE;
1638 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1642 return DependentCode::kAllocationSiteTenuringChangedGroup;
1645 return DependentCode::kAllocationSiteTransitionChangedGroup;
1649 return DependentCode::kAllocationSiteTransitionChangedGroup;
1653 inline void AllocationSite::set_memento_found_count(int count) {
1654 int value = pretenure_data()->value();
1655 // Verify that we can count more mementos than we can possibly find in one
1656 // new space collection.
1657 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1658 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1659 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1660 DCHECK(count < MementoFoundCountBits::kMax);
1662 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1663 SKIP_WRITE_BARRIER);
1666 inline bool AllocationSite::IncrementMementoFoundCount() {
1667 if (IsZombie()) return false;
1669 int value = memento_found_count();
1670 set_memento_found_count(value + 1);
1671 return memento_found_count() == kPretenureMinimumCreated;
1675 inline void AllocationSite::IncrementMementoCreateCount() {
1676 DCHECK(FLAG_allocation_site_pretenuring);
1677 int value = memento_create_count();
1678 set_memento_create_count(value + 1);
1682 inline bool AllocationSite::MakePretenureDecision(
1683 PretenureDecision current_decision,
1685 bool maximum_size_scavenge) {
1686 // Here we just allow state transitions from undecided or maybe tenure
1687 // to don't tenure, maybe tenure, or tenure.
1688 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1689 if (ratio >= kPretenureRatio) {
1690 // We just transition into tenure state when the semi-space was at
1691 // maximum capacity.
1692 if (maximum_size_scavenge) {
1693 set_deopt_dependent_code(true);
1694 set_pretenure_decision(kTenure);
1695 // Currently we just need to deopt when we make a state transition to
1699 set_pretenure_decision(kMaybeTenure);
1701 set_pretenure_decision(kDontTenure);
1708 inline bool AllocationSite::DigestPretenuringFeedback(
1709 bool maximum_size_scavenge) {
1711 int create_count = memento_create_count();
1712 int found_count = memento_found_count();
1713 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1715 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1716 static_cast<double>(found_count) / create_count : 0.0;
1717 PretenureDecision current_decision = pretenure_decision();
1719 if (minimum_mementos_created) {
1720 deopt = MakePretenureDecision(
1721 current_decision, ratio, maximum_size_scavenge);
1724 if (FLAG_trace_pretenuring_statistics) {
1726 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1727 static_cast<void*>(this), create_count, found_count, ratio,
1728 PretenureDecisionName(current_decision),
1729 PretenureDecisionName(pretenure_decision()));
1732 // Clear feedback calculation fields until the next gc.
1733 set_memento_found_count(0);
1734 set_memento_create_count(0);
1739 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1740 JSObject::ValidateElements(object);
1741 ElementsKind elements_kind = object->map()->elements_kind();
1742 if (!IsFastObjectElementsKind(elements_kind)) {
1743 if (IsFastHoleyElementsKind(elements_kind)) {
1744 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1746 TransitionElementsKind(object, FAST_ELEMENTS);
1752 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1755 EnsureElementsMode mode) {
1756 ElementsKind current_kind = object->map()->elements_kind();
1757 ElementsKind target_kind = current_kind;
1759 DisallowHeapAllocation no_allocation;
1760 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1761 bool is_holey = IsFastHoleyElementsKind(current_kind);
1762 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1763 Heap* heap = object->GetHeap();
1764 Object* the_hole = heap->the_hole_value();
1765 for (uint32_t i = 0; i < count; ++i) {
1766 Object* current = *objects++;
1767 if (current == the_hole) {
1769 target_kind = GetHoleyElementsKind(target_kind);
1770 } else if (!current->IsSmi()) {
1771 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1772 if (IsFastSmiElementsKind(target_kind)) {
1774 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1776 target_kind = FAST_DOUBLE_ELEMENTS;
1779 } else if (is_holey) {
1780 target_kind = FAST_HOLEY_ELEMENTS;
1783 target_kind = FAST_ELEMENTS;
1788 if (target_kind != current_kind) {
1789 TransitionElementsKind(object, target_kind);
1794 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1795 Handle<FixedArrayBase> elements,
1797 EnsureElementsMode mode) {
1798 Heap* heap = object->GetHeap();
1799 if (elements->map() != heap->fixed_double_array_map()) {
1800 DCHECK(elements->map() == heap->fixed_array_map() ||
1801 elements->map() == heap->fixed_cow_array_map());
1802 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1803 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1806 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1807 EnsureCanContainElements(object, objects, length, mode);
1811 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1812 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1813 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1814 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1815 Handle<FixedDoubleArray> double_array =
1816 Handle<FixedDoubleArray>::cast(elements);
1817 for (uint32_t i = 0; i < length; ++i) {
1818 if (double_array->is_the_hole(i)) {
1819 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1823 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1828 void JSObject::SetMapAndElements(Handle<JSObject> object,
1829 Handle<Map> new_map,
1830 Handle<FixedArrayBase> value) {
1831 JSObject::MigrateToMap(object, new_map);
1832 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1833 (*value == object->GetHeap()->empty_fixed_array())) ==
1834 (value->map() == object->GetHeap()->fixed_array_map() ||
1835 value->map() == object->GetHeap()->fixed_cow_array_map()));
1836 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1837 (object->map()->has_fast_double_elements() ==
1838 value->IsFixedDoubleArray()));
1839 object->set_elements(*value);
1843 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1844 WRITE_FIELD(this, kElementsOffset, value);
1845 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1849 void JSObject::initialize_properties() {
1850 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1851 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1855 void JSObject::initialize_elements() {
1856 FixedArrayBase* elements = map()->GetInitialElements();
1857 WRITE_FIELD(this, kElementsOffset, elements);
1861 Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
1862 DisallowHeapAllocation no_gc;
1863 if (!map->HasTransitionArray()) return Handle<String>::null();
1864 TransitionArray* transitions = map->transitions();
1865 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1866 int transition = TransitionArray::kSimpleTransitionIndex;
1867 PropertyDetails details = transitions->GetTargetDetails(transition);
1868 Name* name = transitions->GetKey(transition);
1869 if (details.type() != FIELD) return Handle<String>::null();
1870 if (details.attributes() != NONE) return Handle<String>::null();
1871 if (!name->IsString()) return Handle<String>::null();
1872 return Handle<String>(String::cast(name));
1876 Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
1877 DCHECK(!ExpectedTransitionKey(map).is_null());
1878 return Handle<Map>(map->transitions()->GetTarget(
1879 TransitionArray::kSimpleTransitionIndex));
1883 Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1884 DisallowHeapAllocation no_allocation;
1885 if (!map->HasTransitionArray()) return Handle<Map>::null();
1886 TransitionArray* transitions = map->transitions();
1887 int transition = transitions->Search(*key);
1888 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1889 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1890 if (target_details.type() != FIELD) return Handle<Map>::null();
1891 if (target_details.attributes() != NONE) return Handle<Map>::null();
1892 return Handle<Map>(transitions->GetTarget(transition));
1896 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1897 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1900 byte Oddball::kind() const {
1901 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1905 void Oddball::set_kind(byte value) {
1906 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1910 Object* Cell::value() const {
1911 return READ_FIELD(this, kValueOffset);
1915 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1916 // The write barrier is not used for global property cells.
1917 DCHECK(!val->IsPropertyCell() && !val->IsCell());
1918 WRITE_FIELD(this, kValueOffset, val);
1921 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1923 Object* PropertyCell::type_raw() const {
1924 return READ_FIELD(this, kTypeOffset);
1928 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1929 WRITE_FIELD(this, kTypeOffset, val);
1933 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
1936 void WeakCell::clear() {
1937 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
1938 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
1942 void WeakCell::initialize(HeapObject* val) {
1943 WRITE_FIELD(this, kValueOffset, val);
1944 WRITE_BARRIER(GetHeap(), this, kValueOffset, val);
1948 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
1951 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
1954 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
1955 WRITE_FIELD(this, kNextOffset, val);
1956 if (mode == UPDATE_WRITE_BARRIER) {
1957 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
1962 int JSObject::GetHeaderSize() {
1963 InstanceType type = map()->instance_type();
1964 // Check for the most common kind of JavaScript object before
1965 // falling into the generic switch. This speeds up the internal
1966 // field operations considerably on average.
1967 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1969 case JS_GENERATOR_OBJECT_TYPE:
1970 return JSGeneratorObject::kSize;
1971 case JS_MODULE_TYPE:
1972 return JSModule::kSize;
1973 case JS_GLOBAL_PROXY_TYPE:
1974 return JSGlobalProxy::kSize;
1975 case JS_GLOBAL_OBJECT_TYPE:
1976 return JSGlobalObject::kSize;
1977 case JS_BUILTINS_OBJECT_TYPE:
1978 return JSBuiltinsObject::kSize;
1979 case JS_FUNCTION_TYPE:
1980 return JSFunction::kSize;
1982 return JSValue::kSize;
1984 return JSDate::kSize;
1986 return JSArray::kSize;
1987 case JS_ARRAY_BUFFER_TYPE:
1988 return JSArrayBuffer::kSize;
1989 case JS_TYPED_ARRAY_TYPE:
1990 return JSTypedArray::kSize;
1991 case JS_DATA_VIEW_TYPE:
1992 return JSDataView::kSize;
1994 return JSSet::kSize;
1996 return JSMap::kSize;
1997 case JS_SET_ITERATOR_TYPE:
1998 return JSSetIterator::kSize;
1999 case JS_MAP_ITERATOR_TYPE:
2000 return JSMapIterator::kSize;
2001 case JS_WEAK_MAP_TYPE:
2002 return JSWeakMap::kSize;
2003 case JS_WEAK_SET_TYPE:
2004 return JSWeakSet::kSize;
2005 case JS_REGEXP_TYPE:
2006 return JSRegExp::kSize;
2007 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2008 return JSObject::kHeaderSize;
2009 case JS_MESSAGE_OBJECT_TYPE:
2010 return JSMessageObject::kSize;
2012 // TODO(jkummerow): Re-enable this. Blink currently hits this
2013 // from its CustomElementConstructorBuilder.
2020 int JSObject::GetInternalFieldCount() {
2021 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2022 // Make sure to adjust for the number of in-object properties. These
2023 // properties do contribute to the size, but are not internal fields.
2024 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2025 map()->inobject_properties();
2029 int JSObject::GetInternalFieldOffset(int index) {
2030 DCHECK(index < GetInternalFieldCount() && index >= 0);
2031 return GetHeaderSize() + (kPointerSize * index);
2035 Object* JSObject::GetInternalField(int index) {
2036 DCHECK(index < GetInternalFieldCount() && index >= 0);
2037 // Internal objects do follow immediately after the header, whereas in-object
2038 // properties are at the end of the object. Therefore there is no need
2039 // to adjust the index here.
2040 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2044 void JSObject::SetInternalField(int index, Object* value) {
2045 DCHECK(index < GetInternalFieldCount() && index >= 0);
2046 // Internal objects do follow immediately after the header, whereas in-object
2047 // properties are at the end of the object. Therefore there is no need
2048 // to adjust the index here.
2049 int offset = GetHeaderSize() + (kPointerSize * index);
2050 WRITE_FIELD(this, offset, value);
2051 WRITE_BARRIER(GetHeap(), this, offset, value);
2055 void JSObject::SetInternalField(int index, Smi* value) {
2056 DCHECK(index < GetInternalFieldCount() && index >= 0);
2057 // Internal objects do follow immediately after the header, whereas in-object
2058 // properties are at the end of the object. Therefore there is no need
2059 // to adjust the index here.
2060 int offset = GetHeaderSize() + (kPointerSize * index);
2061 WRITE_FIELD(this, offset, value);
2065 // Access fast-case object properties at index. The use of these routines
2066 // is needed to correctly distinguish between properties stored in-object and
2067 // properties stored in the properties array.
2068 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2069 if (index.is_inobject()) {
2070 return READ_FIELD(this, index.offset());
2072 return properties()->get(index.outobject_array_index());
2077 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2078 if (index.is_inobject()) {
2079 int offset = index.offset();
2080 WRITE_FIELD(this, offset, value);
2081 WRITE_BARRIER(GetHeap(), this, offset, value);
2083 properties()->set(index.outobject_array_index(), value);
2088 int JSObject::GetInObjectPropertyOffset(int index) {
2089 return map()->GetInObjectPropertyOffset(index);
2093 Object* JSObject::InObjectPropertyAt(int index) {
2094 int offset = GetInObjectPropertyOffset(index);
2095 return READ_FIELD(this, offset);
2099 Object* JSObject::InObjectPropertyAtPut(int index,
2101 WriteBarrierMode mode) {
2102 // Adjust for the number of properties stored in the object.
2103 int offset = GetInObjectPropertyOffset(index);
2104 WRITE_FIELD(this, offset, value);
2105 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2111 void JSObject::InitializeBody(Map* map,
2112 Object* pre_allocated_value,
2113 Object* filler_value) {
2114 DCHECK(!filler_value->IsHeapObject() ||
2115 !GetHeap()->InNewSpace(filler_value));
2116 DCHECK(!pre_allocated_value->IsHeapObject() ||
2117 !GetHeap()->InNewSpace(pre_allocated_value));
2118 int size = map->instance_size();
2119 int offset = kHeaderSize;
2120 if (filler_value != pre_allocated_value) {
2121 int pre_allocated = map->pre_allocated_property_fields();
2122 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2123 for (int i = 0; i < pre_allocated; i++) {
2124 WRITE_FIELD(this, offset, pre_allocated_value);
2125 offset += kPointerSize;
2128 while (offset < size) {
2129 WRITE_FIELD(this, offset, filler_value);
2130 offset += kPointerSize;
2135 bool JSObject::HasFastProperties() {
2136 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2137 return !properties()->IsDictionary();
2141 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2142 if (unused_property_fields() != 0) return false;
2143 if (is_prototype_map()) return false;
2144 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2145 int limit = Max(minimum, inobject_properties());
2146 int external = NumberOfFields() - inobject_properties();
2147 return external > limit;
2151 void Struct::InitializeBody(int object_size) {
2152 Object* value = GetHeap()->undefined_value();
2153 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2154 WRITE_FIELD(this, offset, value);
2159 bool Object::ToArrayIndex(uint32_t* index) {
2161 int value = Smi::cast(this)->value();
2162 if (value < 0) return false;
2166 if (IsHeapNumber()) {
2167 double value = HeapNumber::cast(this)->value();
2168 uint32_t uint_value = static_cast<uint32_t>(value);
2169 if (value == static_cast<double>(uint_value)) {
2170 *index = uint_value;
2178 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2179 if (!this->IsJSValue()) return false;
2181 JSValue* js_value = JSValue::cast(this);
2182 if (!js_value->value()->IsString()) return false;
2184 String* str = String::cast(js_value->value());
2185 if (index >= static_cast<uint32_t>(str->length())) return false;
2191 void Object::VerifyApiCallResultType() {
2192 #if ENABLE_EXTRA_CHECKS
2202 FATAL("API call returned invalid object");
2204 #endif // ENABLE_EXTRA_CHECKS
2208 Object* FixedArray::get(int index) const {
2209 SLOW_DCHECK(index >= 0 && index < this->length());
2210 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2214 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2215 return handle(array->get(index), array->GetIsolate());
2219 bool FixedArray::is_the_hole(int index) {
2220 return get(index) == GetHeap()->the_hole_value();
2224 void FixedArray::set(int index, Smi* value) {
2225 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2226 DCHECK(index >= 0 && index < this->length());
2227 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2228 int offset = kHeaderSize + index * kPointerSize;
2229 WRITE_FIELD(this, offset, value);
2233 void FixedArray::set(int index, Object* value) {
2234 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2235 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2236 DCHECK(index >= 0 && index < this->length());
2237 int offset = kHeaderSize + index * kPointerSize;
2238 WRITE_FIELD(this, offset, value);
2239 WRITE_BARRIER(GetHeap(), this, offset, value);
2243 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2244 return bit_cast<uint64_t, double>(value) == kHoleNanInt64;
2248 inline double FixedDoubleArray::hole_nan_as_double() {
2249 return bit_cast<double, uint64_t>(kHoleNanInt64);
2253 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2254 DCHECK(bit_cast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64);
2255 DCHECK((bit_cast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32);
2256 return base::OS::nan_value();
2260 double FixedDoubleArray::get_scalar(int index) {
2261 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2262 map() != GetHeap()->fixed_array_map());
2263 DCHECK(index >= 0 && index < this->length());
2264 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2265 DCHECK(!is_the_hole_nan(result));
2269 int64_t FixedDoubleArray::get_representation(int index) {
2270 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2271 map() != GetHeap()->fixed_array_map());
2272 DCHECK(index >= 0 && index < this->length());
2273 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2277 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2279 if (array->is_the_hole(index)) {
2280 return array->GetIsolate()->factory()->the_hole_value();
2282 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2287 void FixedDoubleArray::set(int index, double value) {
2288 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2289 map() != GetHeap()->fixed_array_map());
2290 int offset = kHeaderSize + index * kDoubleSize;
2291 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2292 WRITE_DOUBLE_FIELD(this, offset, value);
2296 void FixedDoubleArray::set_the_hole(int index) {
2297 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2298 map() != GetHeap()->fixed_array_map());
2299 int offset = kHeaderSize + index * kDoubleSize;
2300 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2304 bool FixedDoubleArray::is_the_hole(int index) {
2305 int offset = kHeaderSize + index * kDoubleSize;
2306 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2310 double* FixedDoubleArray::data_start() {
2311 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2315 void FixedDoubleArray::FillWithHoles(int from, int to) {
2316 for (int i = from; i < to; i++) {
2322 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2323 DCHECK(type < NUMBER_OF_TYPES);
2324 element_counts_[type]++;
2328 int ConstantPoolArray::NumberOfEntries::equals(
2329 const ConstantPoolArray::NumberOfEntries& other) const {
2330 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2331 if (element_counts_[i] != other.element_counts_[i]) return false;
2337 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2338 return total_count() == 0;
2342 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2343 DCHECK(type < NUMBER_OF_TYPES);
2344 return element_counts_[type];
2348 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2350 DCHECK(type < NUMBER_OF_TYPES);
2351 for (int i = 0; i < type; i++) {
2352 base += element_counts_[i];
2358 int ConstantPoolArray::NumberOfEntries::total_count() const {
2360 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2361 count += element_counts_[i];
2367 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2368 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2369 if (element_counts_[i] < min || element_counts_[i] > max) {
2377 int ConstantPoolArray::Iterator::next_index() {
2378 DCHECK(!is_finished());
2379 int ret = next_index_++;
2385 bool ConstantPoolArray::Iterator::is_finished() {
2386 return next_index_ > array_->last_index(type_, final_section_);
2390 void ConstantPoolArray::Iterator::update_section() {
2391 if (next_index_ > array_->last_index(type_, current_section_) &&
2392 current_section_ != final_section_) {
2393 DCHECK(final_section_ == EXTENDED_SECTION);
2394 current_section_ = EXTENDED_SECTION;
2395 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2400 bool ConstantPoolArray::is_extended_layout() {
2401 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2402 return IsExtendedField::decode(small_layout_1);
2406 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2407 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2411 int ConstantPoolArray::first_extended_section_index() {
2412 DCHECK(is_extended_layout());
2413 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2414 return TotalCountField::decode(small_layout_2);
2418 int ConstantPoolArray::get_extended_section_header_offset() {
2419 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2423 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2424 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2425 return WeakObjectStateField::decode(small_layout_2);
2429 void ConstantPoolArray::set_weak_object_state(
2430 ConstantPoolArray::WeakObjectState state) {
2431 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2432 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2433 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2437 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2439 if (section == EXTENDED_SECTION) {
2440 DCHECK(is_extended_layout());
2441 index += first_extended_section_index();
2444 for (Type type_iter = FIRST_TYPE; type_iter < type;
2445 type_iter = next_type(type_iter)) {
2446 index += number_of_entries(type_iter, section);
2453 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2454 return first_index(type, section) + number_of_entries(type, section) - 1;
2458 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2459 if (section == SMALL_SECTION) {
2460 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2461 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2464 return Int64CountField::decode(small_layout_1);
2466 return CodePtrCountField::decode(small_layout_1);
2468 return HeapPtrCountField::decode(small_layout_1);
2470 return Int32CountField::decode(small_layout_2);
2476 DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2477 int offset = get_extended_section_header_offset();
2480 offset += kExtendedInt64CountOffset;
2483 offset += kExtendedCodePtrCountOffset;
2486 offset += kExtendedHeapPtrCountOffset;
2489 offset += kExtendedInt32CountOffset;
2494 return READ_INT_FIELD(this, offset);
2499 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2500 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2501 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2502 (is_extended_layout() &&
2503 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2504 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2508 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2509 LayoutSection section;
2510 if (is_extended_layout() && index >= first_extended_section_index()) {
2511 section = EXTENDED_SECTION;
2513 section = SMALL_SECTION;
2516 Type type = FIRST_TYPE;
2517 while (index > last_index(type, section)) {
2518 type = next_type(type);
2520 DCHECK(type <= LAST_TYPE);
2525 int64_t ConstantPoolArray::get_int64_entry(int index) {
2526 DCHECK(map() == GetHeap()->constant_pool_array_map());
2527 DCHECK(get_type(index) == INT64);
2528 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2532 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2533 STATIC_ASSERT(kDoubleSize == kInt64Size);
2534 DCHECK(map() == GetHeap()->constant_pool_array_map());
2535 DCHECK(get_type(index) == INT64);
2536 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2540 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2541 DCHECK(map() == GetHeap()->constant_pool_array_map());
2542 DCHECK(get_type(index) == CODE_PTR);
2543 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2547 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2548 DCHECK(map() == GetHeap()->constant_pool_array_map());
2549 DCHECK(get_type(index) == HEAP_PTR);
2550 return READ_FIELD(this, OffsetOfElementAt(index));
2554 int32_t ConstantPoolArray::get_int32_entry(int index) {
2555 DCHECK(map() == GetHeap()->constant_pool_array_map());
2556 DCHECK(get_type(index) == INT32);
2557 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2561 void ConstantPoolArray::set(int index, int64_t value) {
2562 DCHECK(map() == GetHeap()->constant_pool_array_map());
2563 DCHECK(get_type(index) == INT64);
2564 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2568 void ConstantPoolArray::set(int index, double value) {
2569 STATIC_ASSERT(kDoubleSize == kInt64Size);
2570 DCHECK(map() == GetHeap()->constant_pool_array_map());
2571 DCHECK(get_type(index) == INT64);
2572 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2576 void ConstantPoolArray::set(int index, Address value) {
2577 DCHECK(map() == GetHeap()->constant_pool_array_map());
2578 DCHECK(get_type(index) == CODE_PTR);
2579 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2583 void ConstantPoolArray::set(int index, Object* value) {
2584 DCHECK(map() == GetHeap()->constant_pool_array_map());
2585 DCHECK(!GetHeap()->InNewSpace(value));
2586 DCHECK(get_type(index) == HEAP_PTR);
2587 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2588 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2592 void ConstantPoolArray::set(int index, int32_t value) {
2593 DCHECK(map() == GetHeap()->constant_pool_array_map());
2594 DCHECK(get_type(index) == INT32);
2595 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2599 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2600 DCHECK(map() == GetHeap()->constant_pool_array_map());
2601 DCHECK(offset_is_type(offset, INT32));
2602 WRITE_INT32_FIELD(this, offset, value);
2606 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2607 DCHECK(map() == GetHeap()->constant_pool_array_map());
2608 DCHECK(offset_is_type(offset, INT64));
2609 WRITE_INT64_FIELD(this, offset, value);
2613 void ConstantPoolArray::set_at_offset(int offset, double value) {
2614 DCHECK(map() == GetHeap()->constant_pool_array_map());
2615 DCHECK(offset_is_type(offset, INT64));
2616 WRITE_DOUBLE_FIELD(this, offset, value);
2620 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2621 DCHECK(map() == GetHeap()->constant_pool_array_map());
2622 DCHECK(offset_is_type(offset, CODE_PTR));
2623 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2624 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2628 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2629 DCHECK(map() == GetHeap()->constant_pool_array_map());
2630 DCHECK(!GetHeap()->InNewSpace(value));
2631 DCHECK(offset_is_type(offset, HEAP_PTR));
2632 WRITE_FIELD(this, offset, value);
2633 WRITE_BARRIER(GetHeap(), this, offset, value);
2637 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2638 uint32_t small_layout_1 =
2639 Int64CountField::encode(small.count_of(INT64)) |
2640 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2641 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2642 IsExtendedField::encode(false);
2643 uint32_t small_layout_2 =
2644 Int32CountField::encode(small.count_of(INT32)) |
2645 TotalCountField::encode(small.total_count()) |
2646 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2647 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2648 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2649 if (kHeaderSize != kFirstEntryOffset) {
2650 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2651 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2656 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2657 const NumberOfEntries& extended) {
2658 // Initialize small layout fields first.
2661 // Set is_extended_layout field.
2662 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2663 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2664 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2666 // Initialize the extended layout fields.
2667 int extended_header_offset = get_extended_section_header_offset();
2668 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2669 extended.count_of(INT64));
2670 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2671 extended.count_of(CODE_PTR));
2672 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2673 extended.count_of(HEAP_PTR));
2674 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2675 extended.count_of(INT32));
2679 int ConstantPoolArray::size() {
2680 NumberOfEntries small(this, SMALL_SECTION);
2681 if (!is_extended_layout()) {
2682 return SizeFor(small);
2684 NumberOfEntries extended(this, EXTENDED_SECTION);
2685 return SizeForExtended(small, extended);
2690 int ConstantPoolArray::length() {
2691 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2692 int length = TotalCountField::decode(small_layout_2);
2693 if (is_extended_layout()) {
2694 length += number_of_entries(INT64, EXTENDED_SECTION) +
2695 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2696 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2697 number_of_entries(INT32, EXTENDED_SECTION);
2703 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2704 const DisallowHeapAllocation& promise) {
2705 Heap* heap = GetHeap();
2706 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2707 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2708 return UPDATE_WRITE_BARRIER;
2712 void FixedArray::set(int index,
2714 WriteBarrierMode mode) {
2715 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2716 DCHECK(index >= 0 && index < this->length());
2717 int offset = kHeaderSize + index * kPointerSize;
2718 WRITE_FIELD(this, offset, value);
2719 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2723 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2726 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2727 DCHECK(index >= 0 && index < array->length());
2728 int offset = kHeaderSize + index * kPointerSize;
2729 WRITE_FIELD(array, offset, value);
2730 Heap* heap = array->GetHeap();
2731 if (heap->InNewSpace(value)) {
2732 heap->RecordWrite(array->address(), offset);
2737 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2740 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2741 DCHECK(index >= 0 && index < array->length());
2742 DCHECK(!array->GetHeap()->InNewSpace(value));
2743 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2747 void FixedArray::set_undefined(int index) {
2748 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2749 DCHECK(index >= 0 && index < this->length());
2750 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2752 kHeaderSize + index * kPointerSize,
2753 GetHeap()->undefined_value());
2757 void FixedArray::set_null(int index) {
2758 DCHECK(index >= 0 && index < this->length());
2759 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2761 kHeaderSize + index * kPointerSize,
2762 GetHeap()->null_value());
2766 void FixedArray::set_the_hole(int index) {
2767 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2768 DCHECK(index >= 0 && index < this->length());
2769 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2771 kHeaderSize + index * kPointerSize,
2772 GetHeap()->the_hole_value());
2776 void FixedArray::FillWithHoles(int from, int to) {
2777 for (int i = from; i < to; i++) {
2783 Object** FixedArray::data_start() {
2784 return HeapObject::RawField(this, kHeaderSize);
2788 bool DescriptorArray::IsEmpty() {
2789 DCHECK(length() >= kFirstIndex ||
2790 this == GetHeap()->empty_descriptor_array());
2791 return length() < kFirstIndex;
2795 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2797 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2801 // Perform a binary search in a fixed array. Low and high are entry indices. If
2802 // there are three entries in this array it should be called with low=0 and
2804 template <SearchMode search_mode, typename T>
2805 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2806 int* out_insertion_index) {
2807 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2808 uint32_t hash = name->Hash();
2811 DCHECK(low <= high);
2813 while (low != high) {
2814 int mid = (low + high) / 2;
2815 Name* mid_name = array->GetSortedKey(mid);
2816 uint32_t mid_hash = mid_name->Hash();
2818 if (mid_hash >= hash) {
2825 for (; low <= limit; ++low) {
2826 int sort_index = array->GetSortedKeyIndex(low);
2827 Name* entry = array->GetKey(sort_index);
2828 uint32_t current_hash = entry->Hash();
2829 if (current_hash != hash) {
2830 if (out_insertion_index != NULL) {
2831 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2833 return T::kNotFound;
2835 if (entry->Equals(name)) {
2836 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2839 return T::kNotFound;
2843 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2844 return T::kNotFound;
2848 // Perform a linear search in this fixed array. len is the number of entry
2849 // indices that are valid.
2850 template <SearchMode search_mode, typename T>
2851 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2852 int* out_insertion_index) {
2853 uint32_t hash = name->Hash();
2854 if (search_mode == ALL_ENTRIES) {
2855 for (int number = 0; number < len; number++) {
2856 int sorted_index = array->GetSortedKeyIndex(number);
2857 Name* entry = array->GetKey(sorted_index);
2858 uint32_t current_hash = entry->Hash();
2859 if (current_hash > hash) {
2860 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2861 return T::kNotFound;
2863 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2865 if (out_insertion_index != NULL) *out_insertion_index = len;
2866 return T::kNotFound;
2868 DCHECK(len >= valid_entries);
2869 DCHECK_EQ(NULL, out_insertion_index); // Not supported here.
2870 for (int number = 0; number < valid_entries; number++) {
2871 Name* entry = array->GetKey(number);
2872 uint32_t current_hash = entry->Hash();
2873 if (current_hash == hash && entry->Equals(name)) return number;
2875 return T::kNotFound;
2880 template <SearchMode search_mode, typename T>
2881 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2882 if (search_mode == VALID_ENTRIES) {
2883 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2885 SLOW_DCHECK(array->IsSortedNoDuplicates());
2888 int nof = array->number_of_entries();
2890 if (out_insertion_index != NULL) *out_insertion_index = 0;
2891 return T::kNotFound;
2894 // Fast case: do linear search for small arrays.
2895 const int kMaxElementsForLinearSearch = 8;
2896 if ((search_mode == ALL_ENTRIES &&
2897 nof <= kMaxElementsForLinearSearch) ||
2898 (search_mode == VALID_ENTRIES &&
2899 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2900 return LinearSearch<search_mode>(array, name, nof, valid_entries,
2901 out_insertion_index);
2904 // Slow case: perform binary search.
2905 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2906 out_insertion_index);
2910 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2911 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2915 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2916 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2917 if (number_of_own_descriptors == 0) return kNotFound;
2919 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2920 int number = cache->Lookup(map, name);
2922 if (number == DescriptorLookupCache::kAbsent) {
2923 number = Search(name, number_of_own_descriptors);
2924 cache->Update(map, name, number);
2931 PropertyDetails Map::GetLastDescriptorDetails() {
2932 return instance_descriptors()->GetDetails(LastAdded());
2936 void Map::LookupDescriptor(JSObject* holder,
2938 LookupResult* result) {
2939 DescriptorArray* descriptors = this->instance_descriptors();
2940 int number = descriptors->SearchWithCache(name, this);
2941 if (number == DescriptorArray::kNotFound) return result->NotFound();
2942 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2946 void Map::LookupTransition(JSObject* holder,
2948 LookupResult* result) {
2949 int transition_index = this->SearchTransition(name);
2950 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
2951 result->TransitionResult(holder, this->GetTransition(transition_index));
2955 FixedArrayBase* Map::GetInitialElements() {
2956 if (has_fast_smi_or_object_elements() ||
2957 has_fast_double_elements()) {
2958 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2959 return GetHeap()->empty_fixed_array();
2960 } else if (has_external_array_elements()) {
2961 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2962 DCHECK(!GetHeap()->InNewSpace(empty_array));
2964 } else if (has_fixed_typed_array_elements()) {
2965 FixedTypedArrayBase* empty_array =
2966 GetHeap()->EmptyFixedTypedArrayForMap(this);
2967 DCHECK(!GetHeap()->InNewSpace(empty_array));
2976 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2977 DCHECK(descriptor_number < number_of_descriptors());
2978 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2982 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2983 return GetKeySlot(descriptor_number);
2987 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2988 return GetValueSlot(descriptor_number - 1) + 1;
2992 Name* DescriptorArray::GetKey(int descriptor_number) {
2993 DCHECK(descriptor_number < number_of_descriptors());
2994 return Name::cast(get(ToKeyIndex(descriptor_number)));
2998 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2999 return GetDetails(descriptor_number).pointer();
3003 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
3004 return GetKey(GetSortedKeyIndex(descriptor_number));
3008 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
3009 PropertyDetails details = GetDetails(descriptor_index);
3010 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
3014 void DescriptorArray::SetRepresentation(int descriptor_index,
3015 Representation representation) {
3016 DCHECK(!representation.IsNone());
3017 PropertyDetails details = GetDetails(descriptor_index);
3018 set(ToDetailsIndex(descriptor_index),
3019 details.CopyWithRepresentation(representation).AsSmi());
3023 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3024 DCHECK(descriptor_number < number_of_descriptors());
3025 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3029 int DescriptorArray::GetValueOffset(int descriptor_number) {
3030 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3034 Object* DescriptorArray::GetValue(int descriptor_number) {
3035 DCHECK(descriptor_number < number_of_descriptors());
3036 return get(ToValueIndex(descriptor_number));
3040 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3041 set(ToValueIndex(descriptor_index), value);
3045 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3046 DCHECK(descriptor_number < number_of_descriptors());
3047 Object* details = get(ToDetailsIndex(descriptor_number));
3048 return PropertyDetails(Smi::cast(details));
3052 PropertyType DescriptorArray::GetType(int descriptor_number) {
3053 return GetDetails(descriptor_number).type();
3057 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3058 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3059 return GetDetails(descriptor_number).field_index();
3063 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3064 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3065 return HeapType::cast(GetValue(descriptor_number));
3069 Object* DescriptorArray::GetConstant(int descriptor_number) {
3070 return GetValue(descriptor_number);
3074 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3075 DCHECK(GetType(descriptor_number) == CALLBACKS);
3076 return GetValue(descriptor_number);
3080 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3081 DCHECK(GetType(descriptor_number) == CALLBACKS);
3082 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3083 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3087 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3088 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3089 handle(GetValue(descriptor_number), GetIsolate()),
3090 GetDetails(descriptor_number));
3094 void DescriptorArray::Set(int descriptor_number,
3096 const WhitenessWitness&) {
3098 DCHECK(descriptor_number < number_of_descriptors());
3100 NoIncrementalWriteBarrierSet(this,
3101 ToKeyIndex(descriptor_number),
3103 NoIncrementalWriteBarrierSet(this,
3104 ToValueIndex(descriptor_number),
3106 NoIncrementalWriteBarrierSet(this,
3107 ToDetailsIndex(descriptor_number),
3108 desc->GetDetails().AsSmi());
3112 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3114 DCHECK(descriptor_number < number_of_descriptors());
3116 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3117 set(ToValueIndex(descriptor_number), *desc->GetValue());
3118 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3122 void DescriptorArray::Append(Descriptor* desc) {
3123 DisallowHeapAllocation no_gc;
3124 int descriptor_number = number_of_descriptors();
3125 SetNumberOfDescriptors(descriptor_number + 1);
3126 Set(descriptor_number, desc);
3128 uint32_t hash = desc->GetKey()->Hash();
3132 for (insertion = descriptor_number; insertion > 0; --insertion) {
3133 Name* key = GetSortedKey(insertion - 1);
3134 if (key->Hash() <= hash) break;
3135 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3138 SetSortedKey(insertion, descriptor_number);
3142 void DescriptorArray::SwapSortedKeys(int first, int second) {
3143 int first_key = GetSortedKeyIndex(first);
3144 SetSortedKey(first, GetSortedKeyIndex(second));
3145 SetSortedKey(second, first_key);
3149 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3150 : marking_(array->GetHeap()->incremental_marking()) {
3151 marking_->EnterNoMarkingScope();
3152 DCHECK(!marking_->IsMarking() ||
3153 Marking::Color(array) == Marking::WHITE_OBJECT);
3157 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3158 marking_->LeaveNoMarkingScope();
3162 template<typename Derived, typename Shape, typename Key>
3163 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3164 const int kMinCapacity = 32;
3165 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3166 if (capacity < kMinCapacity) {
3167 capacity = kMinCapacity; // Guarantee min capacity.
3173 template<typename Derived, typename Shape, typename Key>
3174 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3175 return FindEntry(GetIsolate(), key);
3179 // Find entry for key otherwise return kNotFound.
3180 template<typename Derived, typename Shape, typename Key>
3181 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3182 uint32_t capacity = Capacity();
3183 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3185 // EnsureCapacity will guarantee the hash table is never full.
3187 Object* element = KeyAt(entry);
3188 // Empty entry. Uses raw unchecked accessors because it is called by the
3189 // string table during bootstrapping.
3190 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3191 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3192 Shape::IsMatch(key, element)) return entry;
3193 entry = NextProbe(entry, count++, capacity);
3199 bool SeededNumberDictionary::requires_slow_elements() {
3200 Object* max_index_object = get(kMaxNumberKeyIndex);
3201 if (!max_index_object->IsSmi()) return false;
3203 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3206 uint32_t SeededNumberDictionary::max_number_key() {
3207 DCHECK(!requires_slow_elements());
3208 Object* max_index_object = get(kMaxNumberKeyIndex);
3209 if (!max_index_object->IsSmi()) return 0;
3210 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3211 return value >> kRequiresSlowElementsTagSize;
3214 void SeededNumberDictionary::set_requires_slow_elements() {
3215 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3219 // ------------------------------------
3223 CAST_ACCESSOR(AccessorInfo)
3224 CAST_ACCESSOR(ByteArray)
3227 CAST_ACCESSOR(CodeCacheHashTable)
3228 CAST_ACCESSOR(CompilationCacheTable)
3229 CAST_ACCESSOR(ConsString)
3230 CAST_ACCESSOR(ConstantPoolArray)
3231 CAST_ACCESSOR(DeoptimizationInputData)
3232 CAST_ACCESSOR(DeoptimizationOutputData)
3233 CAST_ACCESSOR(DependentCode)
3234 CAST_ACCESSOR(DescriptorArray)
3235 CAST_ACCESSOR(ExternalArray)
3236 CAST_ACCESSOR(ExternalOneByteString)
3237 CAST_ACCESSOR(ExternalFloat32Array)
3238 CAST_ACCESSOR(ExternalFloat64Array)
3239 CAST_ACCESSOR(ExternalInt16Array)
3240 CAST_ACCESSOR(ExternalInt32Array)
3241 CAST_ACCESSOR(ExternalInt8Array)
3242 CAST_ACCESSOR(ExternalString)
3243 CAST_ACCESSOR(ExternalTwoByteString)
3244 CAST_ACCESSOR(ExternalUint16Array)
3245 CAST_ACCESSOR(ExternalUint32Array)
3246 CAST_ACCESSOR(ExternalUint8Array)
3247 CAST_ACCESSOR(ExternalUint8ClampedArray)
3248 CAST_ACCESSOR(FixedArray)
3249 CAST_ACCESSOR(FixedArrayBase)
3250 CAST_ACCESSOR(FixedDoubleArray)
3251 CAST_ACCESSOR(FixedTypedArrayBase)
3252 CAST_ACCESSOR(Foreign)
3253 CAST_ACCESSOR(FreeSpace)
3254 CAST_ACCESSOR(GlobalObject)
3255 CAST_ACCESSOR(HeapObject)
3256 CAST_ACCESSOR(JSArray)
3257 CAST_ACCESSOR(JSArrayBuffer)
3258 CAST_ACCESSOR(JSArrayBufferView)
3259 CAST_ACCESSOR(JSBuiltinsObject)
3260 CAST_ACCESSOR(JSDataView)
3261 CAST_ACCESSOR(JSDate)
3262 CAST_ACCESSOR(JSFunction)
3263 CAST_ACCESSOR(JSFunctionProxy)
3264 CAST_ACCESSOR(JSFunctionResultCache)
3265 CAST_ACCESSOR(JSGeneratorObject)
3266 CAST_ACCESSOR(JSGlobalObject)
3267 CAST_ACCESSOR(JSGlobalProxy)
3268 CAST_ACCESSOR(JSMap)
3269 CAST_ACCESSOR(JSMapIterator)
3270 CAST_ACCESSOR(JSMessageObject)
3271 CAST_ACCESSOR(JSModule)
3272 CAST_ACCESSOR(JSObject)
3273 CAST_ACCESSOR(JSProxy)
3274 CAST_ACCESSOR(JSReceiver)
3275 CAST_ACCESSOR(JSRegExp)
3276 CAST_ACCESSOR(JSSet)
3277 CAST_ACCESSOR(JSSetIterator)
3278 CAST_ACCESSOR(JSTypedArray)
3279 CAST_ACCESSOR(JSValue)
3280 CAST_ACCESSOR(JSWeakMap)
3281 CAST_ACCESSOR(JSWeakSet)
3283 CAST_ACCESSOR(MapCache)
3285 CAST_ACCESSOR(NameDictionary)
3286 CAST_ACCESSOR(NormalizedMapCache)
3287 CAST_ACCESSOR(Object)
3288 CAST_ACCESSOR(ObjectHashTable)
3289 CAST_ACCESSOR(Oddball)
3290 CAST_ACCESSOR(OrderedHashMap)
3291 CAST_ACCESSOR(OrderedHashSet)
3292 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3293 CAST_ACCESSOR(PropertyCell)
3294 CAST_ACCESSOR(ScopeInfo)
3295 CAST_ACCESSOR(SeededNumberDictionary)
3296 CAST_ACCESSOR(SeqOneByteString)
3297 CAST_ACCESSOR(SeqString)
3298 CAST_ACCESSOR(SeqTwoByteString)
3299 CAST_ACCESSOR(SharedFunctionInfo)
3300 CAST_ACCESSOR(SlicedString)
3302 CAST_ACCESSOR(String)
3303 CAST_ACCESSOR(StringTable)
3304 CAST_ACCESSOR(Struct)
3305 CAST_ACCESSOR(Symbol)
3306 CAST_ACCESSOR(UnseededNumberDictionary)
3307 CAST_ACCESSOR(WeakCell)
3308 CAST_ACCESSOR(WeakHashTable)
3311 template <class Traits>
3312 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3313 SLOW_DCHECK(object->IsHeapObject() &&
3314 HeapObject::cast(object)->map()->instance_type() ==
3315 Traits::kInstanceType);
3316 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3320 template <class Traits>
3321 const FixedTypedArray<Traits>*
3322 FixedTypedArray<Traits>::cast(const Object* object) {
3323 SLOW_DCHECK(object->IsHeapObject() &&
3324 HeapObject::cast(object)->map()->instance_type() ==
3325 Traits::kInstanceType);
3326 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3330 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3331 STRUCT_LIST(MAKE_STRUCT_CAST)
3332 #undef MAKE_STRUCT_CAST
3335 template <typename Derived, typename Shape, typename Key>
3336 HashTable<Derived, Shape, Key>*
3337 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3338 SLOW_DCHECK(obj->IsHashTable());
3339 return reinterpret_cast<HashTable*>(obj);
3343 template <typename Derived, typename Shape, typename Key>
3344 const HashTable<Derived, Shape, Key>*
3345 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3346 SLOW_DCHECK(obj->IsHashTable());
3347 return reinterpret_cast<const HashTable*>(obj);
3351 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3352 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3354 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3355 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3357 SMI_ACCESSORS(String, length, kLengthOffset)
3358 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3361 uint32_t Name::hash_field() {
3362 return READ_UINT32_FIELD(this, kHashFieldOffset);
3366 void Name::set_hash_field(uint32_t value) {
3367 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3368 #if V8_HOST_ARCH_64_BIT
3369 #if V8_TARGET_LITTLE_ENDIAN
3370 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3372 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3378 bool Name::Equals(Name* other) {
3379 if (other == this) return true;
3380 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3381 this->IsSymbol() || other->IsSymbol()) {
3384 return String::cast(this)->SlowEquals(String::cast(other));
3388 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3389 if (one.is_identical_to(two)) return true;
3390 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3391 one->IsSymbol() || two->IsSymbol()) {
3394 return String::SlowEquals(Handle<String>::cast(one),
3395 Handle<String>::cast(two));
3399 ACCESSORS(Symbol, name, Object, kNameOffset)
3400 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3401 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3402 BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit)
3405 bool String::Equals(String* other) {
3406 if (other == this) return true;
3407 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3410 return SlowEquals(other);
3414 bool String::Equals(Handle<String> one, Handle<String> two) {
3415 if (one.is_identical_to(two)) return true;
3416 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3419 return SlowEquals(one, two);
3423 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3424 if (!string->IsConsString()) return string;
3425 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3426 if (cons->IsFlat()) return handle(cons->first());
3427 return SlowFlatten(cons, pretenure);
3431 uint16_t String::Get(int index) {
3432 DCHECK(index >= 0 && index < length());
3433 switch (StringShape(this).full_representation_tag()) {
3434 case kSeqStringTag | kOneByteStringTag:
3435 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3436 case kSeqStringTag | kTwoByteStringTag:
3437 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3438 case kConsStringTag | kOneByteStringTag:
3439 case kConsStringTag | kTwoByteStringTag:
3440 return ConsString::cast(this)->ConsStringGet(index);
3441 case kExternalStringTag | kOneByteStringTag:
3442 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3443 case kExternalStringTag | kTwoByteStringTag:
3444 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3445 case kSlicedStringTag | kOneByteStringTag:
3446 case kSlicedStringTag | kTwoByteStringTag:
3447 return SlicedString::cast(this)->SlicedStringGet(index);
3457 void String::Set(int index, uint16_t value) {
3458 DCHECK(index >= 0 && index < length());
3459 DCHECK(StringShape(this).IsSequential());
3461 return this->IsOneByteRepresentation()
3462 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3463 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3467 bool String::IsFlat() {
3468 if (!StringShape(this).IsCons()) return true;
3469 return ConsString::cast(this)->second()->length() == 0;
3473 String* String::GetUnderlying() {
3474 // Giving direct access to underlying string only makes sense if the
3475 // wrapping string is already flattened.
3476 DCHECK(this->IsFlat());
3477 DCHECK(StringShape(this).IsIndirect());
3478 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3479 const int kUnderlyingOffset = SlicedString::kParentOffset;
3480 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3484 template<class Visitor>
3485 ConsString* String::VisitFlat(Visitor* visitor,
3488 int slice_offset = offset;
3489 const int length = string->length();
3490 DCHECK(offset <= length);
3492 int32_t type = string->map()->instance_type();
3493 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3494 case kSeqStringTag | kOneByteStringTag:
3495 visitor->VisitOneByteString(
3496 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3500 case kSeqStringTag | kTwoByteStringTag:
3501 visitor->VisitTwoByteString(
3502 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3506 case kExternalStringTag | kOneByteStringTag:
3507 visitor->VisitOneByteString(
3508 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3512 case kExternalStringTag | kTwoByteStringTag:
3513 visitor->VisitTwoByteString(
3514 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3518 case kSlicedStringTag | kOneByteStringTag:
3519 case kSlicedStringTag | kTwoByteStringTag: {
3520 SlicedString* slicedString = SlicedString::cast(string);
3521 slice_offset += slicedString->offset();
3522 string = slicedString->parent();
3526 case kConsStringTag | kOneByteStringTag:
3527 case kConsStringTag | kTwoByteStringTag:
3528 return ConsString::cast(string);
3538 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3539 DCHECK(index >= 0 && index < length());
3540 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3544 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3545 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3546 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3547 static_cast<byte>(value));
3551 Address SeqOneByteString::GetCharsAddress() {
3552 return FIELD_ADDR(this, kHeaderSize);
3556 uint8_t* SeqOneByteString::GetChars() {
3557 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3561 Address SeqTwoByteString::GetCharsAddress() {
3562 return FIELD_ADDR(this, kHeaderSize);
3566 uc16* SeqTwoByteString::GetChars() {
3567 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3571 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3572 DCHECK(index >= 0 && index < length());
3573 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3577 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3578 DCHECK(index >= 0 && index < length());
3579 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3583 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3584 return SizeFor(length());
3588 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3589 return SizeFor(length());
3593 String* SlicedString::parent() {
3594 return String::cast(READ_FIELD(this, kParentOffset));
3598 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3599 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3600 WRITE_FIELD(this, kParentOffset, parent);
3601 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3605 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3608 String* ConsString::first() {
3609 return String::cast(READ_FIELD(this, kFirstOffset));
3613 Object* ConsString::unchecked_first() {
3614 return READ_FIELD(this, kFirstOffset);
3618 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3619 WRITE_FIELD(this, kFirstOffset, value);
3620 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3624 String* ConsString::second() {
3625 return String::cast(READ_FIELD(this, kSecondOffset));
3629 Object* ConsString::unchecked_second() {
3630 return READ_FIELD(this, kSecondOffset);
3634 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3635 WRITE_FIELD(this, kSecondOffset, value);
3636 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3640 bool ExternalString::is_short() {
3641 InstanceType type = map()->instance_type();
3642 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3646 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3647 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3651 void ExternalOneByteString::update_data_cache() {
3652 if (is_short()) return;
3653 const char** data_field =
3654 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3655 *data_field = resource()->data();
3659 void ExternalOneByteString::set_resource(
3660 const ExternalOneByteString::Resource* resource) {
3661 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3662 *reinterpret_cast<const Resource**>(
3663 FIELD_ADDR(this, kResourceOffset)) = resource;
3664 if (resource != NULL) update_data_cache();
3668 const uint8_t* ExternalOneByteString::GetChars() {
3669 return reinterpret_cast<const uint8_t*>(resource()->data());
3673 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3674 DCHECK(index >= 0 && index < length());
3675 return GetChars()[index];
3679 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3680 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3684 void ExternalTwoByteString::update_data_cache() {
3685 if (is_short()) return;
3686 const uint16_t** data_field =
3687 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3688 *data_field = resource()->data();
3692 void ExternalTwoByteString::set_resource(
3693 const ExternalTwoByteString::Resource* resource) {
3694 *reinterpret_cast<const Resource**>(
3695 FIELD_ADDR(this, kResourceOffset)) = resource;
3696 if (resource != NULL) update_data_cache();
3700 const uint16_t* ExternalTwoByteString::GetChars() {
3701 return resource()->data();
3705 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3706 DCHECK(index >= 0 && index < length());
3707 return GetChars()[index];
3711 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3713 return GetChars() + start;
3717 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3720 void ConsStringIterator::PushLeft(ConsString* string) {
3721 frames_[depth_++ & kDepthMask] = string;
3725 void ConsStringIterator::PushRight(ConsString* string) {
3727 frames_[(depth_-1) & kDepthMask] = string;
3731 void ConsStringIterator::AdjustMaximumDepth() {
3732 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3736 void ConsStringIterator::Pop() {
3738 DCHECK(depth_ <= maximum_depth_);
3743 uint16_t StringCharacterStream::GetNext() {
3744 DCHECK(buffer8_ != NULL && end_ != NULL);
3745 // Advance cursor if needed.
3746 if (buffer8_ == end_) HasMore();
3747 DCHECK(buffer8_ < end_);
3748 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3752 StringCharacterStream::StringCharacterStream(String* string, int offset)
3753 : is_one_byte_(false) {
3754 Reset(string, offset);
3758 void StringCharacterStream::Reset(String* string, int offset) {
3761 ConsString* cons_string = String::VisitFlat(this, string, offset);
3762 iter_.Reset(cons_string, offset);
3763 if (cons_string != NULL) {
3764 string = iter_.Next(&offset);
3765 if (string != NULL) String::VisitFlat(this, string, offset);
3770 bool StringCharacterStream::HasMore() {
3771 if (buffer8_ != end_) return true;
3773 String* string = iter_.Next(&offset);
3774 DCHECK_EQ(offset, 0);
3775 if (string == NULL) return false;
3776 String::VisitFlat(this, string);
3777 DCHECK(buffer8_ != end_);
3782 void StringCharacterStream::VisitOneByteString(
3783 const uint8_t* chars, int length) {
3784 is_one_byte_ = true;
3786 end_ = chars + length;
3790 void StringCharacterStream::VisitTwoByteString(
3791 const uint16_t* chars, int length) {
3792 is_one_byte_ = false;
3794 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3798 void JSFunctionResultCache::MakeZeroSize() {
3799 set_finger_index(kEntriesIndex);
3800 set_size(kEntriesIndex);
3804 void JSFunctionResultCache::Clear() {
3805 int cache_size = size();
3806 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3807 MemsetPointer(entries_start,
3808 GetHeap()->the_hole_value(),
3809 cache_size - kEntriesIndex);
3814 int JSFunctionResultCache::size() {
3815 return Smi::cast(get(kCacheSizeIndex))->value();
3819 void JSFunctionResultCache::set_size(int size) {
3820 set(kCacheSizeIndex, Smi::FromInt(size));
3824 int JSFunctionResultCache::finger_index() {
3825 return Smi::cast(get(kFingerIndex))->value();
3829 void JSFunctionResultCache::set_finger_index(int finger_index) {
3830 set(kFingerIndex, Smi::FromInt(finger_index));
3834 byte ByteArray::get(int index) {
3835 DCHECK(index >= 0 && index < this->length());
3836 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3840 void ByteArray::set(int index, byte value) {
3841 DCHECK(index >= 0 && index < this->length());
3842 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3846 int ByteArray::get_int(int index) {
3847 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3848 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3852 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3853 DCHECK_TAG_ALIGNED(address);
3854 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3858 Address ByteArray::GetDataStartAddress() {
3859 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3863 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3864 return reinterpret_cast<uint8_t*>(external_pointer());
3868 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3869 DCHECK((index >= 0) && (index < this->length()));
3870 uint8_t* ptr = external_uint8_clamped_pointer();
3875 Handle<Object> ExternalUint8ClampedArray::get(
3876 Handle<ExternalUint8ClampedArray> array,
3878 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3879 array->GetIsolate());
3883 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3884 DCHECK((index >= 0) && (index < this->length()));
3885 uint8_t* ptr = external_uint8_clamped_pointer();
3890 void* ExternalArray::external_pointer() const {
3891 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3892 return reinterpret_cast<void*>(ptr);
3896 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3897 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3898 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3902 int8_t ExternalInt8Array::get_scalar(int index) {
3903 DCHECK((index >= 0) && (index < this->length()));
3904 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3909 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3911 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3912 array->GetIsolate());
3916 void ExternalInt8Array::set(int index, int8_t value) {
3917 DCHECK((index >= 0) && (index < this->length()));
3918 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3923 uint8_t ExternalUint8Array::get_scalar(int index) {
3924 DCHECK((index >= 0) && (index < this->length()));
3925 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3930 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3932 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3933 array->GetIsolate());
3937 void ExternalUint8Array::set(int index, uint8_t value) {
3938 DCHECK((index >= 0) && (index < this->length()));
3939 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3944 int16_t ExternalInt16Array::get_scalar(int index) {
3945 DCHECK((index >= 0) && (index < this->length()));
3946 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3951 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3953 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3954 array->GetIsolate());
3958 void ExternalInt16Array::set(int index, int16_t value) {
3959 DCHECK((index >= 0) && (index < this->length()));
3960 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3965 uint16_t ExternalUint16Array::get_scalar(int index) {
3966 DCHECK((index >= 0) && (index < this->length()));
3967 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3972 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3974 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3975 array->GetIsolate());
3979 void ExternalUint16Array::set(int index, uint16_t value) {
3980 DCHECK((index >= 0) && (index < this->length()));
3981 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3986 int32_t ExternalInt32Array::get_scalar(int index) {
3987 DCHECK((index >= 0) && (index < this->length()));
3988 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3993 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3995 return array->GetIsolate()->factory()->
3996 NewNumberFromInt(array->get_scalar(index));
4000 void ExternalInt32Array::set(int index, int32_t value) {
4001 DCHECK((index >= 0) && (index < this->length()));
4002 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4007 uint32_t ExternalUint32Array::get_scalar(int index) {
4008 DCHECK((index >= 0) && (index < this->length()));
4009 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4014 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
4016 return array->GetIsolate()->factory()->
4017 NewNumberFromUint(array->get_scalar(index));
4021 void ExternalUint32Array::set(int index, uint32_t value) {
4022 DCHECK((index >= 0) && (index < this->length()));
4023 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4028 float ExternalFloat32Array::get_scalar(int index) {
4029 DCHECK((index >= 0) && (index < this->length()));
4030 float* ptr = static_cast<float*>(external_pointer());
4035 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
4037 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4041 void ExternalFloat32Array::set(int index, float value) {
4042 DCHECK((index >= 0) && (index < this->length()));
4043 float* ptr = static_cast<float*>(external_pointer());
4048 double ExternalFloat64Array::get_scalar(int index) {
4049 DCHECK((index >= 0) && (index < this->length()));
4050 double* ptr = static_cast<double*>(external_pointer());
4055 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
4057 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4061 void ExternalFloat64Array::set(int index, double value) {
4062 DCHECK((index >= 0) && (index < this->length()));
4063 double* ptr = static_cast<double*>(external_pointer());
4068 void* FixedTypedArrayBase::DataPtr() {
4069 return FIELD_ADDR(this, kDataOffset);
4073 int FixedTypedArrayBase::DataSize(InstanceType type) {
4076 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4077 case FIXED_##TYPE##_ARRAY_TYPE: \
4078 element_size = size; \
4081 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4082 #undef TYPED_ARRAY_CASE
4087 return length() * element_size;
4091 int FixedTypedArrayBase::DataSize() {
4092 return DataSize(map()->instance_type());
4096 int FixedTypedArrayBase::size() {
4097 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4101 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4102 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4106 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4109 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4112 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4115 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4118 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4121 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4124 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4127 float Float32ArrayTraits::defaultValue() {
4128 return static_cast<float>(base::OS::nan_value());
4132 double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); }
4135 template <class Traits>
4136 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4137 DCHECK((index >= 0) && (index < this->length()));
4138 ElementType* ptr = reinterpret_cast<ElementType*>(
4139 FIELD_ADDR(this, kDataOffset));
4145 FixedTypedArray<Float64ArrayTraits>::ElementType
4146 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4147 DCHECK((index >= 0) && (index < this->length()));
4148 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4152 template <class Traits>
4153 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4154 DCHECK((index >= 0) && (index < this->length()));
4155 ElementType* ptr = reinterpret_cast<ElementType*>(
4156 FIELD_ADDR(this, kDataOffset));
4162 void FixedTypedArray<Float64ArrayTraits>::set(
4163 int index, Float64ArrayTraits::ElementType value) {
4164 DCHECK((index >= 0) && (index < this->length()));
4165 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4169 template <class Traits>
4170 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4171 return static_cast<ElementType>(value);
4176 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4177 if (value < 0) return 0;
4178 if (value > 0xFF) return 0xFF;
4179 return static_cast<uint8_t>(value);
4183 template <class Traits>
4184 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4186 return static_cast<ElementType>(DoubleToInt32(value));
4191 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4192 // Handle NaNs and less than zero values which clamp to zero.
4193 if (!(value > 0)) return 0;
4194 if (value > 0xFF) return 0xFF;
4195 return static_cast<uint8_t>(lrint(value));
4200 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4201 return static_cast<float>(value);
4206 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4211 template <class Traits>
4212 Handle<Object> FixedTypedArray<Traits>::get(
4213 Handle<FixedTypedArray<Traits> > array,
4215 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4219 template <class Traits>
4220 Handle<Object> FixedTypedArray<Traits>::SetValue(
4221 Handle<FixedTypedArray<Traits> > array,
4223 Handle<Object> value) {
4224 ElementType cast_value = Traits::defaultValue();
4225 if (index < static_cast<uint32_t>(array->length())) {
4226 if (value->IsSmi()) {
4227 int int_value = Handle<Smi>::cast(value)->value();
4228 cast_value = from_int(int_value);
4229 } else if (value->IsHeapNumber()) {
4230 double double_value = Handle<HeapNumber>::cast(value)->value();
4231 cast_value = from_double(double_value);
4233 // Clamp undefined to the default value. All other types have been
4234 // converted to a number type further up in the call chain.
4235 DCHECK(value->IsUndefined());
4237 array->set(index, cast_value);
4239 return Traits::ToHandle(array->GetIsolate(), cast_value);
4243 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4244 return handle(Smi::FromInt(scalar), isolate);
4248 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4250 return handle(Smi::FromInt(scalar), isolate);
4254 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4255 return handle(Smi::FromInt(scalar), isolate);
4259 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4260 return handle(Smi::FromInt(scalar), isolate);
4264 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4265 return handle(Smi::FromInt(scalar), isolate);
4269 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4270 return isolate->factory()->NewNumberFromUint(scalar);
4274 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4275 return isolate->factory()->NewNumberFromInt(scalar);
4279 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4280 return isolate->factory()->NewNumber(scalar);
4284 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4285 return isolate->factory()->NewNumber(scalar);
4289 int Map::visitor_id() {
4290 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4294 void Map::set_visitor_id(int id) {
4295 DCHECK(0 <= id && id < 256);
4296 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4300 int Map::instance_size() {
4301 return NOBARRIER_READ_BYTE_FIELD(
4302 this, kInstanceSizeOffset) << kPointerSizeLog2;
4306 int Map::inobject_properties() {
4307 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4311 int Map::pre_allocated_property_fields() {
4312 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4316 int Map::GetInObjectPropertyOffset(int index) {
4317 // Adjust for the number of properties stored in the object.
4318 index -= inobject_properties();
4320 return instance_size() + (index * kPointerSize);
4324 int HeapObject::SizeFromMap(Map* map) {
4325 int instance_size = map->instance_size();
4326 if (instance_size != kVariableSizeSentinel) return instance_size;
4327 // Only inline the most frequent cases.
4328 InstanceType instance_type = map->instance_type();
4329 if (instance_type == FIXED_ARRAY_TYPE) {
4330 return FixedArray::BodyDescriptor::SizeOf(map, this);
4332 if (instance_type == ONE_BYTE_STRING_TYPE ||
4333 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4334 return SeqOneByteString::SizeFor(
4335 reinterpret_cast<SeqOneByteString*>(this)->length());
4337 if (instance_type == BYTE_ARRAY_TYPE) {
4338 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4340 if (instance_type == FREE_SPACE_TYPE) {
4341 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4343 if (instance_type == STRING_TYPE ||
4344 instance_type == INTERNALIZED_STRING_TYPE) {
4345 return SeqTwoByteString::SizeFor(
4346 reinterpret_cast<SeqTwoByteString*>(this)->length());
4348 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4349 return FixedDoubleArray::SizeFor(
4350 reinterpret_cast<FixedDoubleArray*>(this)->length());
4352 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4353 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4355 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4356 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4357 return reinterpret_cast<FixedTypedArrayBase*>(
4358 this)->TypedArraySize(instance_type);
4360 DCHECK(instance_type == CODE_TYPE);
4361 return reinterpret_cast<Code*>(this)->CodeSize();
4365 void Map::set_instance_size(int value) {
4366 DCHECK_EQ(0, value & (kPointerSize - 1));
4367 value >>= kPointerSizeLog2;
4368 DCHECK(0 <= value && value < 256);
4369 NOBARRIER_WRITE_BYTE_FIELD(
4370 this, kInstanceSizeOffset, static_cast<byte>(value));
4374 void Map::set_inobject_properties(int value) {
4375 DCHECK(0 <= value && value < 256);
4376 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4380 void Map::set_pre_allocated_property_fields(int value) {
4381 DCHECK(0 <= value && value < 256);
4382 WRITE_BYTE_FIELD(this,
4383 kPreAllocatedPropertyFieldsOffset,
4384 static_cast<byte>(value));
4388 InstanceType Map::instance_type() {
4389 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4393 void Map::set_instance_type(InstanceType value) {
4394 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4398 int Map::unused_property_fields() {
4399 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4403 void Map::set_unused_property_fields(int value) {
4404 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4408 byte Map::bit_field() {
4409 return READ_BYTE_FIELD(this, kBitFieldOffset);
4413 void Map::set_bit_field(byte value) {
4414 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4418 byte Map::bit_field2() {
4419 return READ_BYTE_FIELD(this, kBitField2Offset);
4423 void Map::set_bit_field2(byte value) {
4424 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4428 void Map::set_non_instance_prototype(bool value) {
4430 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4432 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4437 bool Map::has_non_instance_prototype() {
4438 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4442 void Map::set_function_with_prototype(bool value) {
4443 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4447 bool Map::function_with_prototype() {
4448 return FunctionWithPrototype::decode(bit_field());
4452 void Map::set_is_access_check_needed(bool access_check_needed) {
4453 if (access_check_needed) {
4454 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4456 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4461 bool Map::is_access_check_needed() {
4462 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4466 void Map::set_is_extensible(bool value) {
4468 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4470 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4474 bool Map::is_extensible() {
4475 return ((1 << kIsExtensible) & bit_field2()) != 0;
4479 void Map::set_is_prototype_map(bool value) {
4480 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4483 bool Map::is_prototype_map() {
4484 return IsPrototypeMapBits::decode(bit_field2());
4488 void Map::set_dictionary_map(bool value) {
4489 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4490 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4491 set_bit_field3(new_bit_field3);
4495 bool Map::is_dictionary_map() {
4496 return DictionaryMap::decode(bit_field3());
4500 Code::Flags Code::flags() {
4501 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4505 void Map::set_owns_descriptors(bool owns_descriptors) {
4506 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4510 bool Map::owns_descriptors() {
4511 return OwnsDescriptors::decode(bit_field3());
4515 void Map::set_has_instance_call_handler() {
4516 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4520 bool Map::has_instance_call_handler() {
4521 return HasInstanceCallHandler::decode(bit_field3());
4525 void Map::deprecate() {
4526 set_bit_field3(Deprecated::update(bit_field3(), true));
4530 bool Map::is_deprecated() {
4531 return Deprecated::decode(bit_field3());
4535 void Map::set_migration_target(bool value) {
4536 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4540 bool Map::is_migration_target() {
4541 return IsMigrationTarget::decode(bit_field3());
4545 void Map::set_done_inobject_slack_tracking(bool value) {
4546 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4550 bool Map::done_inobject_slack_tracking() {
4551 return DoneInobjectSlackTracking::decode(bit_field3());
4555 void Map::set_construction_count(int value) {
4556 set_bit_field3(ConstructionCount::update(bit_field3(), value));
4560 int Map::construction_count() {
4561 return ConstructionCount::decode(bit_field3());
4565 void Map::freeze() {
4566 set_bit_field3(IsFrozen::update(bit_field3(), true));
4570 bool Map::is_frozen() {
4571 return IsFrozen::decode(bit_field3());
4575 void Map::mark_unstable() {
4576 set_bit_field3(IsUnstable::update(bit_field3(), true));
4580 bool Map::is_stable() {
4581 return !IsUnstable::decode(bit_field3());
4585 bool Map::has_code_cache() {
4586 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4590 bool Map::CanBeDeprecated() {
4591 int descriptor = LastAdded();
4592 for (int i = 0; i <= descriptor; i++) {
4593 PropertyDetails details = instance_descriptors()->GetDetails(i);
4594 if (details.representation().IsNone()) return true;
4595 if (details.representation().IsSmi()) return true;
4596 if (details.representation().IsDouble()) return true;
4597 if (details.representation().IsHeapObject()) return true;
4598 if (details.type() == CONSTANT) return true;
4604 void Map::NotifyLeafMapLayoutChange() {
4607 dependent_code()->DeoptimizeDependentCodeGroup(
4609 DependentCode::kPrototypeCheckGroup);
4614 bool Map::CanOmitMapChecks() {
4615 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4619 int DependentCode::number_of_entries(DependencyGroup group) {
4620 if (length() == 0) return 0;
4621 return Smi::cast(get(group))->value();
4625 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4626 set(group, Smi::FromInt(value));
4630 bool DependentCode::is_code_at(int i) {
4631 return get(kCodesStartIndex + i)->IsCode();
4634 Code* DependentCode::code_at(int i) {
4635 return Code::cast(get(kCodesStartIndex + i));
4639 CompilationInfo* DependentCode::compilation_info_at(int i) {
4640 return reinterpret_cast<CompilationInfo*>(
4641 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4645 void DependentCode::set_object_at(int i, Object* object) {
4646 set(kCodesStartIndex + i, object);
4650 Object* DependentCode::object_at(int i) {
4651 return get(kCodesStartIndex + i);
4655 Object** DependentCode::slot_at(int i) {
4656 return RawFieldOfElementAt(kCodesStartIndex + i);
4660 void DependentCode::clear_at(int i) {
4661 set_undefined(kCodesStartIndex + i);
4665 void DependentCode::copy(int from, int to) {
4666 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4670 void DependentCode::ExtendGroup(DependencyGroup group) {
4671 GroupStartIndexes starts(this);
4672 for (int g = kGroupCount - 1; g > group; g--) {
4673 if (starts.at(g) < starts.at(g + 1)) {
4674 copy(starts.at(g), starts.at(g + 1));
4680 void Code::set_flags(Code::Flags flags) {
4681 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4682 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4686 Code::Kind Code::kind() {
4687 return ExtractKindFromFlags(flags());
4691 bool Code::IsCodeStubOrIC() {
4692 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4693 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4694 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4695 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4696 kind() == TO_BOOLEAN_IC;
4700 InlineCacheState Code::ic_state() {
4701 InlineCacheState result = ExtractICStateFromFlags(flags());
4702 // Only allow uninitialized or debugger states for non-IC code
4703 // objects. This is used in the debugger to determine whether or not
4704 // a call to code object has been replaced with a debug break call.
4705 DCHECK(is_inline_cache_stub() ||
4706 result == UNINITIALIZED ||
4707 result == DEBUG_STUB);
4712 ExtraICState Code::extra_ic_state() {
4713 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4714 return ExtractExtraICStateFromFlags(flags());
4718 Code::StubType Code::type() {
4719 return ExtractTypeFromFlags(flags());
4723 // For initialization.
4724 void Code::set_raw_kind_specific_flags1(int value) {
4725 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4729 void Code::set_raw_kind_specific_flags2(int value) {
4730 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4734 inline bool Code::is_crankshafted() {
4735 return IsCrankshaftedField::decode(
4736 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4740 inline bool Code::is_hydrogen_stub() {
4741 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4745 inline void Code::set_is_crankshafted(bool value) {
4746 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4747 int updated = IsCrankshaftedField::update(previous, value);
4748 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4752 inline bool Code::is_turbofanned() {
4753 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4754 return IsTurbofannedField::decode(
4755 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4759 inline void Code::set_is_turbofanned(bool value) {
4760 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4761 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4762 int updated = IsTurbofannedField::update(previous, value);
4763 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4767 bool Code::optimizable() {
4768 DCHECK_EQ(FUNCTION, kind());
4769 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4773 void Code::set_optimizable(bool value) {
4774 DCHECK_EQ(FUNCTION, kind());
4775 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4779 bool Code::has_deoptimization_support() {
4780 DCHECK_EQ(FUNCTION, kind());
4781 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4782 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4786 void Code::set_has_deoptimization_support(bool value) {
4787 DCHECK_EQ(FUNCTION, kind());
4788 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4789 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4790 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4794 bool Code::has_debug_break_slots() {
4795 DCHECK_EQ(FUNCTION, kind());
4796 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4797 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4801 void Code::set_has_debug_break_slots(bool value) {
4802 DCHECK_EQ(FUNCTION, kind());
4803 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4804 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4805 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4809 bool Code::is_compiled_optimizable() {
4810 DCHECK_EQ(FUNCTION, kind());
4811 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4812 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4816 void Code::set_compiled_optimizable(bool value) {
4817 DCHECK_EQ(FUNCTION, kind());
4818 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4819 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4820 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4824 int Code::allow_osr_at_loop_nesting_level() {
4825 DCHECK_EQ(FUNCTION, kind());
4826 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4827 return AllowOSRAtLoopNestingLevelField::decode(fields);
4831 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4832 DCHECK_EQ(FUNCTION, kind());
4833 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4834 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4835 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4836 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4840 int Code::profiler_ticks() {
4841 DCHECK_EQ(FUNCTION, kind());
4842 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4846 void Code::set_profiler_ticks(int ticks) {
4847 DCHECK(ticks < 256);
4848 if (kind() == FUNCTION) {
4849 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4854 int Code::builtin_index() {
4855 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
4859 void Code::set_builtin_index(int index) {
4860 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
4864 unsigned Code::stack_slots() {
4865 DCHECK(is_crankshafted());
4866 return StackSlotsField::decode(
4867 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4871 void Code::set_stack_slots(unsigned slots) {
4872 CHECK(slots <= (1 << kStackSlotsBitCount));
4873 DCHECK(is_crankshafted());
4874 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4875 int updated = StackSlotsField::update(previous, slots);
4876 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4880 unsigned Code::safepoint_table_offset() {
4881 DCHECK(is_crankshafted());
4882 return SafepointTableOffsetField::decode(
4883 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4887 void Code::set_safepoint_table_offset(unsigned offset) {
4888 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4889 DCHECK(is_crankshafted());
4890 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4891 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4892 int updated = SafepointTableOffsetField::update(previous, offset);
4893 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4897 unsigned Code::back_edge_table_offset() {
4898 DCHECK_EQ(FUNCTION, kind());
4899 return BackEdgeTableOffsetField::decode(
4900 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
4904 void Code::set_back_edge_table_offset(unsigned offset) {
4905 DCHECK_EQ(FUNCTION, kind());
4906 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
4907 offset = offset >> kPointerSizeLog2;
4908 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4909 int updated = BackEdgeTableOffsetField::update(previous, offset);
4910 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4914 bool Code::back_edges_patched_for_osr() {
4915 DCHECK_EQ(FUNCTION, kind());
4916 return allow_osr_at_loop_nesting_level() > 0;
4920 byte Code::to_boolean_state() {
4921 return extra_ic_state();
4925 bool Code::has_function_cache() {
4926 DCHECK(kind() == STUB);
4927 return HasFunctionCacheField::decode(
4928 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4932 void Code::set_has_function_cache(bool flag) {
4933 DCHECK(kind() == STUB);
4934 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4935 int updated = HasFunctionCacheField::update(previous, flag);
4936 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4940 bool Code::marked_for_deoptimization() {
4941 DCHECK(kind() == OPTIMIZED_FUNCTION);
4942 return MarkedForDeoptimizationField::decode(
4943 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4947 void Code::set_marked_for_deoptimization(bool flag) {
4948 DCHECK(kind() == OPTIMIZED_FUNCTION);
4949 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4950 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4951 int updated = MarkedForDeoptimizationField::update(previous, flag);
4952 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4956 bool Code::is_weak_stub() {
4957 return CanBeWeakStub() && WeakStubField::decode(
4958 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4962 void Code::mark_as_weak_stub() {
4963 DCHECK(CanBeWeakStub());
4964 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4965 int updated = WeakStubField::update(previous, true);
4966 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4970 bool Code::is_invalidated_weak_stub() {
4971 return is_weak_stub() && InvalidatedWeakStubField::decode(
4972 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4976 void Code::mark_as_invalidated_weak_stub() {
4977 DCHECK(is_inline_cache_stub());
4978 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4979 int updated = InvalidatedWeakStubField::update(previous, true);
4980 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4984 bool Code::is_inline_cache_stub() {
4985 Kind kind = this->kind();
4987 #define CASE(name) case name: return true;
4990 default: return false;
4995 bool Code::is_keyed_stub() {
4996 return is_keyed_load_stub() || is_keyed_store_stub();
5000 bool Code::is_debug_stub() {
5001 return ic_state() == DEBUG_STUB;
5005 ConstantPoolArray* Code::constant_pool() {
5006 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
5010 void Code::set_constant_pool(Object* value) {
5011 DCHECK(value->IsConstantPoolArray());
5012 WRITE_FIELD(this, kConstantPoolOffset, value);
5013 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
5017 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5018 ExtraICState extra_ic_state, StubType type,
5019 CacheHolderFlag holder) {
5020 // Compute the bit mask.
5021 unsigned int bits = KindField::encode(kind)
5022 | ICStateField::encode(ic_state)
5023 | TypeField::encode(type)
5024 | ExtraICStateField::encode(extra_ic_state)
5025 | CacheHolderField::encode(holder);
5026 return static_cast<Flags>(bits);
5030 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5031 ExtraICState extra_ic_state,
5032 CacheHolderFlag holder,
5034 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5038 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5039 CacheHolderFlag holder) {
5040 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5044 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5045 return KindField::decode(flags);
5049 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5050 return ICStateField::decode(flags);
5054 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5055 return ExtraICStateField::decode(flags);
5059 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5060 return TypeField::decode(flags);
5064 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5065 return CacheHolderField::decode(flags);
5069 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5070 int bits = flags & ~TypeField::kMask;
5071 return static_cast<Flags>(bits);
5075 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5076 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5077 return static_cast<Flags>(bits);
5081 Code* Code::GetCodeFromTargetAddress(Address address) {
5082 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5083 // GetCodeFromTargetAddress might be called when marking objects during mark
5084 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5085 // Code::cast. Code::cast does not work when the object's map is
5087 Code* result = reinterpret_cast<Code*>(code);
5092 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5094 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5098 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5099 if (!FLAG_collect_maps) return false;
5100 if (object->IsMap()) {
5101 return Map::cast(object)->CanTransition() &&
5102 FLAG_weak_embedded_maps_in_optimized_code;
5104 if (object->IsJSObject() ||
5105 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
5106 return FLAG_weak_embedded_objects_in_optimized_code;
5112 class Code::FindAndReplacePattern {
5114 FindAndReplacePattern() : count_(0) { }
5115 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5116 DCHECK(count_ < kMaxCount);
5117 find_[count_] = map_to_find;
5118 replace_[count_] = obj_to_replace;
5122 static const int kMaxCount = 4;
5124 Handle<Map> find_[kMaxCount];
5125 Handle<Object> replace_[kMaxCount];
5130 bool Code::IsWeakObjectInIC(Object* object) {
5131 return object->IsMap() && Map::cast(object)->CanTransition() &&
5132 FLAG_collect_maps &&
5133 FLAG_weak_embedded_maps_in_ic;
5137 Object* Map::prototype() const {
5138 return READ_FIELD(this, kPrototypeOffset);
5142 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5143 DCHECK(value->IsNull() || value->IsJSReceiver());
5144 WRITE_FIELD(this, kPrototypeOffset, value);
5145 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5149 // If the descriptor is using the empty transition array, install a new empty
5150 // transition array that will have place for an element transition.
5151 static void EnsureHasTransitionArray(Handle<Map> map) {
5152 Handle<TransitionArray> transitions;
5153 if (!map->HasTransitionArray()) {
5154 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5155 transitions->set_back_pointer_storage(map->GetBackPointer());
5156 } else if (!map->transitions()->IsFullTransitionArray()) {
5157 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5161 map->set_transitions(*transitions);
5165 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5166 int len = descriptors->number_of_descriptors();
5167 set_instance_descriptors(descriptors);
5168 SetNumberOfOwnDescriptors(len);
5172 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5175 void Map::set_bit_field3(uint32_t bits) {
5176 if (kInt32Size != kPointerSize) {
5177 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5179 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5183 uint32_t Map::bit_field3() {
5184 return READ_UINT32_FIELD(this, kBitField3Offset);
5188 void Map::AppendDescriptor(Descriptor* desc) {
5189 DescriptorArray* descriptors = instance_descriptors();
5190 int number_of_own_descriptors = NumberOfOwnDescriptors();
5191 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5192 descriptors->Append(desc);
5193 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5197 Object* Map::GetBackPointer() {
5198 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5199 if (object->IsDescriptorArray()) {
5200 return TransitionArray::cast(object)->back_pointer_storage();
5202 DCHECK(object->IsMap() || object->IsUndefined());
5208 bool Map::HasElementsTransition() {
5209 return HasTransitionArray() && transitions()->HasElementsTransition();
5213 bool Map::HasTransitionArray() const {
5214 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5215 return object->IsTransitionArray();
5219 Map* Map::elements_transition_map() {
5220 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5221 return transitions()->GetTarget(index);
5225 bool Map::CanHaveMoreTransitions() {
5226 if (!HasTransitionArray()) return true;
5227 return transitions()->number_of_transitions() <
5228 TransitionArray::kMaxNumberOfTransitions;
5232 Map* Map::GetTransition(int transition_index) {
5233 return transitions()->GetTarget(transition_index);
5237 int Map::SearchTransition(Name* name) {
5238 if (HasTransitionArray()) return transitions()->Search(name);
5239 return TransitionArray::kNotFound;
5243 FixedArray* Map::GetPrototypeTransitions() {
5244 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5245 if (!transitions()->HasPrototypeTransitions()) {
5246 return GetHeap()->empty_fixed_array();
5248 return transitions()->GetPrototypeTransitions();
5252 void Map::SetPrototypeTransitions(
5253 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5254 EnsureHasTransitionArray(map);
5255 int old_number_of_transitions = map->NumberOfProtoTransitions();
5257 if (map->HasPrototypeTransitions()) {
5258 DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
5259 map->ZapPrototypeTransitions();
5262 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5263 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5267 bool Map::HasPrototypeTransitions() {
5268 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5272 TransitionArray* Map::transitions() const {
5273 DCHECK(HasTransitionArray());
5274 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5275 return TransitionArray::cast(object);
5279 void Map::set_transitions(TransitionArray* transition_array,
5280 WriteBarrierMode mode) {
5281 // Transition arrays are not shared. When one is replaced, it should not
5282 // keep referenced objects alive, so we zap it.
5283 // When there is another reference to the array somewhere (e.g. a handle),
5284 // not zapping turns from a waste of memory into a source of crashes.
5285 if (HasTransitionArray()) {
5287 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5288 Map* target = transitions()->GetTarget(i);
5289 if (target->instance_descriptors() == instance_descriptors()) {
5290 Name* key = transitions()->GetKey(i);
5291 int new_target_index = transition_array->Search(key);
5292 DCHECK(new_target_index != TransitionArray::kNotFound);
5293 DCHECK(transition_array->GetTarget(new_target_index) == target);
5297 DCHECK(transitions() != transition_array);
5301 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5302 CONDITIONAL_WRITE_BARRIER(
5303 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5307 void Map::init_back_pointer(Object* undefined) {
5308 DCHECK(undefined->IsUndefined());
5309 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5313 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5314 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5315 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5316 (value->IsMap() && GetBackPointer()->IsUndefined()));
5317 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5318 if (object->IsTransitionArray()) {
5319 TransitionArray::cast(object)->set_back_pointer_storage(value);
5321 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5322 CONDITIONAL_WRITE_BARRIER(
5323 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5328 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5329 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5330 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5332 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5333 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5334 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5336 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5337 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5338 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5339 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5341 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5342 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5344 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5345 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5346 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5347 kExpectedReceiverTypeOffset)
5349 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5350 kSerializedDataOffset)
5352 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5355 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5356 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5357 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5359 ACCESSORS(Box, value, Object, kValueOffset)
5361 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5362 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5364 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5365 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5366 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5368 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5369 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5370 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5371 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5372 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5373 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5375 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5376 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5378 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5379 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5380 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5382 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5383 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5384 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5385 kPrototypeTemplateOffset)
5386 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5387 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5388 kNamedPropertyHandlerOffset)
5389 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5390 kIndexedPropertyHandlerOffset)
5391 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5392 kInstanceTemplateOffset)
5393 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5394 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5395 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5396 kInstanceCallHandlerOffset)
5397 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5398 kAccessCheckInfoOffset)
5399 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5401 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5402 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5403 kInternalFieldCountOffset)
5405 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5406 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5408 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5410 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5411 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5412 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5413 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5414 kPretenureCreateCountOffset)
5415 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5416 kDependentCodeOffset)
5417 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5418 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5420 ACCESSORS(Script, source, Object, kSourceOffset)
5421 ACCESSORS(Script, name, Object, kNameOffset)
5422 ACCESSORS(Script, id, Smi, kIdOffset)
5423 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5424 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5425 ACCESSORS(Script, context_data, Object, kContextOffset)
5426 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5427 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5428 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5429 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5430 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5431 kEvalFrominstructionsOffsetOffset)
5432 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5433 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5434 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5435 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5437 Script::CompilationType Script::compilation_type() {
5438 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5439 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5441 void Script::set_compilation_type(CompilationType type) {
5442 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5443 type == COMPILATION_TYPE_EVAL));
5445 Script::CompilationState Script::compilation_state() {
5446 return BooleanBit::get(flags(), kCompilationStateBit) ?
5447 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5449 void Script::set_compilation_state(CompilationState state) {
5450 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5451 state == COMPILATION_STATE_COMPILED));
5455 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5456 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5457 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5458 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5460 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5461 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5462 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5463 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5465 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5466 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5467 kOptimizedCodeMapOffset)
5468 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5469 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5470 kFeedbackVectorOffset)
5471 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5472 kInstanceClassNameOffset)
5473 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5474 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5475 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5476 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5479 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5480 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5481 kHiddenPrototypeBit)
5482 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5483 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5484 kNeedsAccessCheckBit)
5485 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5486 kReadOnlyPrototypeBit)
5487 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5488 kRemovePrototypeBit)
5489 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5491 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5493 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5496 BOOL_ACCESSORS(SharedFunctionInfo,
5498 allows_lazy_compilation,
5499 kAllowLazyCompilation)
5500 BOOL_ACCESSORS(SharedFunctionInfo,
5502 allows_lazy_compilation_without_context,
5503 kAllowLazyCompilationWithoutContext)
5504 BOOL_ACCESSORS(SharedFunctionInfo,
5508 BOOL_ACCESSORS(SharedFunctionInfo,
5510 has_duplicate_parameters,
5511 kHasDuplicateParameters)
5512 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5513 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5516 #if V8_HOST_ARCH_32_BIT
5517 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5518 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5519 kFormalParameterCountOffset)
5520 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5521 kExpectedNofPropertiesOffset)
5522 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5523 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5524 kStartPositionAndTypeOffset)
5525 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5526 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5527 kFunctionTokenPositionOffset)
5528 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5529 kCompilerHintsOffset)
5530 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5531 kOptCountAndBailoutReasonOffset)
5532 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5533 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5534 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5538 #if V8_TARGET_LITTLE_ENDIAN
5539 #define PSEUDO_SMI_LO_ALIGN 0
5540 #define PSEUDO_SMI_HI_ALIGN kIntSize
5542 #define PSEUDO_SMI_LO_ALIGN kIntSize
5543 #define PSEUDO_SMI_HI_ALIGN 0
5546 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5547 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5548 int holder::name() const { \
5549 int value = READ_INT_FIELD(this, offset); \
5550 DCHECK(kHeapObjectTag == 1); \
5551 DCHECK((value & kHeapObjectTag) == 0); \
5552 return value >> 1; \
5554 void holder::set_##name(int value) { \
5555 DCHECK(kHeapObjectTag == 1); \
5556 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5557 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5560 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5561 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5562 INT_ACCESSORS(holder, name, offset)
5565 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5566 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5567 formal_parameter_count,
5568 kFormalParameterCountOffset)
5570 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5571 expected_nof_properties,
5572 kExpectedNofPropertiesOffset)
5573 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5575 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5576 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5577 start_position_and_type,
5578 kStartPositionAndTypeOffset)
5580 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5581 function_token_position,
5582 kFunctionTokenPositionOffset)
5583 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5585 kCompilerHintsOffset)
5587 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5588 opt_count_and_bailout_reason,
5589 kOptCountAndBailoutReasonOffset)
5590 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5592 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5594 kAstNodeCountOffset)
5595 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5597 kProfilerTicksOffset)
5602 BOOL_GETTER(SharedFunctionInfo,
5604 optimization_disabled,
5605 kOptimizationDisabled)
5608 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5609 set_compiler_hints(BooleanBit::set(compiler_hints(),
5610 kOptimizationDisabled,
5612 // If disabling optimizations we reflect that in the code object so
5613 // it will not be counted as optimizable code.
5614 if ((code()->kind() == Code::FUNCTION) && disable) {
5615 code()->set_optimizable(false);
5620 StrictMode SharedFunctionInfo::strict_mode() {
5621 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5626 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5627 // We only allow mode transitions from sloppy to strict.
5628 DCHECK(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5629 int hints = compiler_hints();
5630 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5631 set_compiler_hints(hints);
5635 FunctionKind SharedFunctionInfo::kind() {
5636 return FunctionKindBits::decode(compiler_hints());
5640 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5641 DCHECK(IsValidFunctionKind(kind));
5642 int hints = compiler_hints();
5643 hints = FunctionKindBits::update(hints, kind);
5644 set_compiler_hints(hints);
5648 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5649 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5651 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5652 name_should_print_as_anonymous,
5653 kNameShouldPrintAsAnonymous)
5654 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5655 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5656 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5657 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5658 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5659 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5660 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5661 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5664 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5665 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5667 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5669 bool Script::HasValidSource() {
5670 Object* src = this->source();
5671 if (!src->IsString()) return true;
5672 String* src_str = String::cast(src);
5673 if (!StringShape(src_str).IsExternal()) return true;
5674 if (src_str->IsOneByteRepresentation()) {
5675 return ExternalOneByteString::cast(src)->resource() != NULL;
5676 } else if (src_str->IsTwoByteRepresentation()) {
5677 return ExternalTwoByteString::cast(src)->resource() != NULL;
5683 void SharedFunctionInfo::DontAdaptArguments() {
5684 DCHECK(code()->kind() == Code::BUILTIN);
5685 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5689 int SharedFunctionInfo::start_position() const {
5690 return start_position_and_type() >> kStartPositionShift;
5694 void SharedFunctionInfo::set_start_position(int start_position) {
5695 set_start_position_and_type((start_position << kStartPositionShift)
5696 | (start_position_and_type() & ~kStartPositionMask));
5700 Code* SharedFunctionInfo::code() const {
5701 return Code::cast(READ_FIELD(this, kCodeOffset));
5705 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5706 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5707 WRITE_FIELD(this, kCodeOffset, value);
5708 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5712 void SharedFunctionInfo::ReplaceCode(Code* value) {
5713 // If the GC metadata field is already used then the function was
5714 // enqueued as a code flushing candidate and we remove it now.
5715 if (code()->gc_metadata() != NULL) {
5716 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5717 flusher->EvictCandidate(this);
5720 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5726 ScopeInfo* SharedFunctionInfo::scope_info() const {
5727 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5731 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5732 WriteBarrierMode mode) {
5733 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5734 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5737 reinterpret_cast<Object*>(value),
5742 bool SharedFunctionInfo::is_compiled() {
5743 return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
5747 bool SharedFunctionInfo::IsApiFunction() {
5748 return function_data()->IsFunctionTemplateInfo();
5752 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5753 DCHECK(IsApiFunction());
5754 return FunctionTemplateInfo::cast(function_data());
5758 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5759 return function_data()->IsSmi();
5763 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5764 DCHECK(HasBuiltinFunctionId());
5765 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5769 int SharedFunctionInfo::ic_age() {
5770 return ICAgeBits::decode(counters());
5774 void SharedFunctionInfo::set_ic_age(int ic_age) {
5775 set_counters(ICAgeBits::update(counters(), ic_age));
5779 int SharedFunctionInfo::deopt_count() {
5780 return DeoptCountBits::decode(counters());
5784 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5785 set_counters(DeoptCountBits::update(counters(), deopt_count));
5789 void SharedFunctionInfo::increment_deopt_count() {
5790 int value = counters();
5791 int deopt_count = DeoptCountBits::decode(value);
5792 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5793 set_counters(DeoptCountBits::update(value, deopt_count));
5797 int SharedFunctionInfo::opt_reenable_tries() {
5798 return OptReenableTriesBits::decode(counters());
5802 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5803 set_counters(OptReenableTriesBits::update(counters(), tries));
5807 int SharedFunctionInfo::opt_count() {
5808 return OptCountBits::decode(opt_count_and_bailout_reason());
5812 void SharedFunctionInfo::set_opt_count(int opt_count) {
5813 set_opt_count_and_bailout_reason(
5814 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5818 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5819 BailoutReason reason = static_cast<BailoutReason>(
5820 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5825 bool SharedFunctionInfo::has_deoptimization_support() {
5826 Code* code = this->code();
5827 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5831 void SharedFunctionInfo::TryReenableOptimization() {
5832 int tries = opt_reenable_tries();
5833 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5834 // We reenable optimization whenever the number of tries is a large
5835 // enough power of 2.
5836 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5837 set_optimization_disabled(false);
5840 code()->set_optimizable(true);
5845 bool JSFunction::IsBuiltin() {
5846 return context()->global_object()->IsJSBuiltinsObject();
5850 bool JSFunction::IsFromNativeScript() {
5851 Object* script = shared()->script();
5852 bool native = script->IsScript() &&
5853 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
5854 DCHECK(!IsBuiltin() || native); // All builtins are also native.
5859 bool JSFunction::IsFromExtensionScript() {
5860 Object* script = shared()->script();
5861 return script->IsScript() &&
5862 Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
5866 bool JSFunction::NeedsArgumentsAdaption() {
5867 return shared()->formal_parameter_count() !=
5868 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5872 bool JSFunction::IsOptimized() {
5873 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5877 bool JSFunction::IsOptimizable() {
5878 return code()->kind() == Code::FUNCTION && code()->optimizable();
5882 bool JSFunction::IsMarkedForOptimization() {
5883 return code() == GetIsolate()->builtins()->builtin(
5884 Builtins::kCompileOptimized);
5888 bool JSFunction::IsMarkedForConcurrentOptimization() {
5889 return code() == GetIsolate()->builtins()->builtin(
5890 Builtins::kCompileOptimizedConcurrent);
5894 bool JSFunction::IsInOptimizationQueue() {
5895 return code() == GetIsolate()->builtins()->builtin(
5896 Builtins::kInOptimizationQueue);
5900 bool JSFunction::IsInobjectSlackTrackingInProgress() {
5901 return has_initial_map() &&
5902 initial_map()->construction_count() != JSFunction::kNoSlackTracking;
5906 Code* JSFunction::code() {
5908 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5912 void JSFunction::set_code(Code* value) {
5913 DCHECK(!GetHeap()->InNewSpace(value));
5914 Address entry = value->entry();
5915 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5916 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5918 HeapObject::RawField(this, kCodeEntryOffset),
5923 void JSFunction::set_code_no_write_barrier(Code* value) {
5924 DCHECK(!GetHeap()->InNewSpace(value));
5925 Address entry = value->entry();
5926 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5930 void JSFunction::ReplaceCode(Code* code) {
5931 bool was_optimized = IsOptimized();
5932 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5934 if (was_optimized && is_optimized) {
5935 shared()->EvictFromOptimizedCodeMap(this->code(),
5936 "Replacing with another optimized code");
5941 // Add/remove the function from the list of optimized functions for this
5942 // context based on the state change.
5943 if (!was_optimized && is_optimized) {
5944 context()->native_context()->AddOptimizedFunction(this);
5946 if (was_optimized && !is_optimized) {
5947 // TODO(titzer): linear in the number of optimized functions; fix!
5948 context()->native_context()->RemoveOptimizedFunction(this);
5953 Context* JSFunction::context() {
5954 return Context::cast(READ_FIELD(this, kContextOffset));
5958 JSObject* JSFunction::global_proxy() {
5959 return context()->global_proxy();
5963 void JSFunction::set_context(Object* value) {
5964 DCHECK(value->IsUndefined() || value->IsContext());
5965 WRITE_FIELD(this, kContextOffset, value);
5966 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5969 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5970 kPrototypeOrInitialMapOffset)
5973 Map* JSFunction::initial_map() {
5974 return Map::cast(prototype_or_initial_map());
5978 bool JSFunction::has_initial_map() {
5979 return prototype_or_initial_map()->IsMap();
5983 bool JSFunction::has_instance_prototype() {
5984 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5988 bool JSFunction::has_prototype() {
5989 return map()->has_non_instance_prototype() || has_instance_prototype();
5993 Object* JSFunction::instance_prototype() {
5994 DCHECK(has_instance_prototype());
5995 if (has_initial_map()) return initial_map()->prototype();
5996 // When there is no initial map and the prototype is a JSObject, the
5997 // initial map field is used for the prototype field.
5998 return prototype_or_initial_map();
6002 Object* JSFunction::prototype() {
6003 DCHECK(has_prototype());
6004 // If the function's prototype property has been set to a non-JSObject
6005 // value, that value is stored in the constructor field of the map.
6006 if (map()->has_non_instance_prototype()) return map()->constructor();
6007 return instance_prototype();
6011 bool JSFunction::should_have_prototype() {
6012 return map()->function_with_prototype();
6016 bool JSFunction::is_compiled() {
6017 return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
6021 FixedArray* JSFunction::literals() {
6022 DCHECK(!shared()->bound());
6023 return literals_or_bindings();
6027 void JSFunction::set_literals(FixedArray* literals) {
6028 DCHECK(!shared()->bound());
6029 set_literals_or_bindings(literals);
6033 FixedArray* JSFunction::function_bindings() {
6034 DCHECK(shared()->bound());
6035 return literals_or_bindings();
6039 void JSFunction::set_function_bindings(FixedArray* bindings) {
6040 DCHECK(shared()->bound());
6041 // Bound function literal may be initialized to the empty fixed array
6042 // before the bindings are set.
6043 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6044 bindings->map() == GetHeap()->fixed_cow_array_map());
6045 set_literals_or_bindings(bindings);
6049 int JSFunction::NumberOfLiterals() {
6050 DCHECK(!shared()->bound());
6051 return literals()->length();
6055 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6056 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6057 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6061 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6063 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6064 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6065 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6069 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6070 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6071 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6075 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6077 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6078 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6079 DCHECK(!GetHeap()->InNewSpace(value));
6083 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6084 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6085 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6086 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6089 void JSProxy::InitializeBody(int object_size, Object* value) {
6090 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6091 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6092 WRITE_FIELD(this, offset, value);
6097 ACCESSORS(JSCollection, table, Object, kTableOffset)
6100 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6101 template<class Derived, class TableType> \
6102 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6103 return type::cast(READ_FIELD(this, offset)); \
6105 template<class Derived, class TableType> \
6106 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6107 type* value, WriteBarrierMode mode) { \
6108 WRITE_FIELD(this, offset, value); \
6109 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6112 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6113 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6114 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6116 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6119 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6120 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6123 Address Foreign::foreign_address() {
6124 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6128 void Foreign::set_foreign_address(Address value) {
6129 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6133 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6134 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6135 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6136 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6137 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6138 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6140 bool JSGeneratorObject::is_suspended() {
6141 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6142 DCHECK_EQ(kGeneratorClosed, 0);
6143 return continuation() > 0;
6146 bool JSGeneratorObject::is_closed() {
6147 return continuation() == kGeneratorClosed;
6150 bool JSGeneratorObject::is_executing() {
6151 return continuation() == kGeneratorExecuting;
6154 ACCESSORS(JSModule, context, Object, kContextOffset)
6155 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6158 ACCESSORS(JSValue, value, Object, kValueOffset)
6161 HeapNumber* HeapNumber::cast(Object* object) {
6162 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6163 return reinterpret_cast<HeapNumber*>(object);
6167 const HeapNumber* HeapNumber::cast(const Object* object) {
6168 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6169 return reinterpret_cast<const HeapNumber*>(object);
6173 ACCESSORS(JSDate, value, Object, kValueOffset)
6174 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6175 ACCESSORS(JSDate, year, Object, kYearOffset)
6176 ACCESSORS(JSDate, month, Object, kMonthOffset)
6177 ACCESSORS(JSDate, day, Object, kDayOffset)
6178 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6179 ACCESSORS(JSDate, hour, Object, kHourOffset)
6180 ACCESSORS(JSDate, min, Object, kMinOffset)
6181 ACCESSORS(JSDate, sec, Object, kSecOffset)
6184 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6185 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6186 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6187 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6188 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6189 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6192 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6193 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6194 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6195 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6196 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6197 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6198 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6201 void Code::WipeOutHeader() {
6202 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6203 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6204 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6205 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6206 // Do not wipe out major/minor keys on a code stub or IC
6207 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6208 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6213 Object* Code::type_feedback_info() {
6214 DCHECK(kind() == FUNCTION);
6215 return raw_type_feedback_info();
6219 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6220 DCHECK(kind() == FUNCTION);
6221 set_raw_type_feedback_info(value, mode);
6222 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6227 uint32_t Code::stub_key() {
6228 DCHECK(IsCodeStubOrIC());
6229 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6230 return static_cast<uint32_t>(smi_key->value());
6234 void Code::set_stub_key(uint32_t key) {
6235 DCHECK(IsCodeStubOrIC());
6236 set_raw_type_feedback_info(Smi::FromInt(key));
6240 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6241 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6244 byte* Code::instruction_start() {
6245 return FIELD_ADDR(this, kHeaderSize);
6249 byte* Code::instruction_end() {
6250 return instruction_start() + instruction_size();
6254 int Code::body_size() {
6255 return RoundUp(instruction_size(), kObjectAlignment);
6259 ByteArray* Code::unchecked_relocation_info() {
6260 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6264 byte* Code::relocation_start() {
6265 return unchecked_relocation_info()->GetDataStartAddress();
6269 int Code::relocation_size() {
6270 return unchecked_relocation_info()->length();
6274 byte* Code::entry() {
6275 return instruction_start();
6279 bool Code::contains(byte* inner_pointer) {
6280 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6284 ACCESSORS(JSArray, length, Object, kLengthOffset)
6287 void* JSArrayBuffer::backing_store() const {
6288 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6289 return reinterpret_cast<void*>(ptr);
6293 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6294 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6295 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6299 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6300 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6303 bool JSArrayBuffer::is_external() {
6304 return BooleanBit::get(flag(), kIsExternalBit);
6308 void JSArrayBuffer::set_is_external(bool value) {
6309 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6313 bool JSArrayBuffer::should_be_freed() {
6314 return BooleanBit::get(flag(), kShouldBeFreed);
6318 void JSArrayBuffer::set_should_be_freed(bool value) {
6319 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6323 bool JSArrayBuffer::is_neuterable() {
6324 return BooleanBit::get(flag(), kIsNeuterableBit);
6328 void JSArrayBuffer::set_is_neuterable(bool value) {
6329 set_flag(BooleanBit::set(flag(), kIsNeuterableBit, value));
6333 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6334 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6337 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6338 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6339 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6340 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6341 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6343 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6346 JSRegExp::Type JSRegExp::TypeTag() {
6347 Object* data = this->data();
6348 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6349 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6350 return static_cast<JSRegExp::Type>(smi->value());
6354 int JSRegExp::CaptureCount() {
6355 switch (TypeTag()) {
6359 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6367 JSRegExp::Flags JSRegExp::GetFlags() {
6368 DCHECK(this->data()->IsFixedArray());
6369 Object* data = this->data();
6370 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6371 return Flags(smi->value());
6375 String* JSRegExp::Pattern() {
6376 DCHECK(this->data()->IsFixedArray());
6377 Object* data = this->data();
6378 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6383 Object* JSRegExp::DataAt(int index) {
6384 DCHECK(TypeTag() != NOT_COMPILED);
6385 return FixedArray::cast(data())->get(index);
6389 void JSRegExp::SetDataAt(int index, Object* value) {
6390 DCHECK(TypeTag() != NOT_COMPILED);
6391 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6392 FixedArray::cast(data())->set(index, value);
6396 ElementsKind JSObject::GetElementsKind() {
6397 ElementsKind kind = map()->elements_kind();
6399 FixedArrayBase* fixed_array =
6400 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6402 // If a GC was caused while constructing this object, the elements
6403 // pointer may point to a one pointer filler map.
6404 if (ElementsAreSafeToExamine()) {
6405 Map* map = fixed_array->map();
6406 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6407 (map == GetHeap()->fixed_array_map() ||
6408 map == GetHeap()->fixed_cow_array_map())) ||
6409 (IsFastDoubleElementsKind(kind) &&
6410 (fixed_array->IsFixedDoubleArray() ||
6411 fixed_array == GetHeap()->empty_fixed_array())) ||
6412 (kind == DICTIONARY_ELEMENTS &&
6413 fixed_array->IsFixedArray() &&
6414 fixed_array->IsDictionary()) ||
6415 (kind > DICTIONARY_ELEMENTS));
6416 DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6417 (elements()->IsFixedArray() && elements()->length() >= 2));
6424 ElementsAccessor* JSObject::GetElementsAccessor() {
6425 return ElementsAccessor::ForKind(GetElementsKind());
6429 bool JSObject::HasFastObjectElements() {
6430 return IsFastObjectElementsKind(GetElementsKind());
6434 bool JSObject::HasFastSmiElements() {
6435 return IsFastSmiElementsKind(GetElementsKind());
6439 bool JSObject::HasFastSmiOrObjectElements() {
6440 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6444 bool JSObject::HasFastDoubleElements() {
6445 return IsFastDoubleElementsKind(GetElementsKind());
6449 bool JSObject::HasFastHoleyElements() {
6450 return IsFastHoleyElementsKind(GetElementsKind());
6454 bool JSObject::HasFastElements() {
6455 return IsFastElementsKind(GetElementsKind());
6459 bool JSObject::HasDictionaryElements() {
6460 return GetElementsKind() == DICTIONARY_ELEMENTS;
6464 bool JSObject::HasSloppyArgumentsElements() {
6465 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6469 bool JSObject::HasExternalArrayElements() {
6470 HeapObject* array = elements();
6471 DCHECK(array != NULL);
6472 return array->IsExternalArray();
6476 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6477 bool JSObject::HasExternal##Type##Elements() { \
6478 HeapObject* array = elements(); \
6479 DCHECK(array != NULL); \
6480 if (!array->IsHeapObject()) \
6482 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6485 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6487 #undef EXTERNAL_ELEMENTS_CHECK
6490 bool JSObject::HasFixedTypedArrayElements() {
6491 HeapObject* array = elements();
6492 DCHECK(array != NULL);
6493 return array->IsFixedTypedArrayBase();
6497 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6498 bool JSObject::HasFixed##Type##Elements() { \
6499 HeapObject* array = elements(); \
6500 DCHECK(array != NULL); \
6501 if (!array->IsHeapObject()) \
6503 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6506 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6508 #undef FIXED_TYPED_ELEMENTS_CHECK
6511 bool JSObject::HasNamedInterceptor() {
6512 return map()->has_named_interceptor();
6516 bool JSObject::HasIndexedInterceptor() {
6517 return map()->has_indexed_interceptor();
6521 NameDictionary* JSObject::property_dictionary() {
6522 DCHECK(!HasFastProperties());
6523 return NameDictionary::cast(properties());
6527 SeededNumberDictionary* JSObject::element_dictionary() {
6528 DCHECK(HasDictionaryElements());
6529 return SeededNumberDictionary::cast(elements());
6533 bool Name::IsHashFieldComputed(uint32_t field) {
6534 return (field & kHashNotComputedMask) == 0;
6538 bool Name::HasHashCode() {
6539 return IsHashFieldComputed(hash_field());
6543 uint32_t Name::Hash() {
6544 // Fast case: has hash code already been computed?
6545 uint32_t field = hash_field();
6546 if (IsHashFieldComputed(field)) return field >> kHashShift;
6547 // Slow case: compute hash code and set it. Has to be a string.
6548 return String::cast(this)->ComputeAndSetHash();
6551 bool Name::IsOwn() {
6552 return this->IsSymbol() && Symbol::cast(this)->is_own();
6556 StringHasher::StringHasher(int length, uint32_t seed)
6558 raw_running_hash_(seed),
6560 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6561 is_first_char_(true) {
6562 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6566 bool StringHasher::has_trivial_hash() {
6567 return length_ > String::kMaxHashCalcLength;
6571 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6573 running_hash += (running_hash << 10);
6574 running_hash ^= (running_hash >> 6);
6575 return running_hash;
6579 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6580 running_hash += (running_hash << 3);
6581 running_hash ^= (running_hash >> 11);
6582 running_hash += (running_hash << 15);
6583 if ((running_hash & String::kHashBitMask) == 0) {
6586 return running_hash;
6590 void StringHasher::AddCharacter(uint16_t c) {
6591 // Use the Jenkins one-at-a-time hash function to update the hash
6592 // for the given character.
6593 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6597 bool StringHasher::UpdateIndex(uint16_t c) {
6598 DCHECK(is_array_index_);
6599 if (c < '0' || c > '9') {
6600 is_array_index_ = false;
6604 if (is_first_char_) {
6605 is_first_char_ = false;
6606 if (c == '0' && length_ > 1) {
6607 is_array_index_ = false;
6611 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6612 is_array_index_ = false;
6615 array_index_ = array_index_ * 10 + d;
6620 template<typename Char>
6621 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6622 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6624 if (is_array_index_) {
6625 for (; i < length; i++) {
6626 AddCharacter(chars[i]);
6627 if (!UpdateIndex(chars[i])) {
6633 for (; i < length; i++) {
6634 DCHECK(!is_array_index_);
6635 AddCharacter(chars[i]);
6640 template <typename schar>
6641 uint32_t StringHasher::HashSequentialString(const schar* chars,
6644 StringHasher hasher(length, seed);
6645 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6646 return hasher.GetHashField();
6650 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6651 IteratingStringHasher hasher(string->length(), seed);
6653 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6654 ConsString* cons_string = String::VisitFlat(&hasher, string);
6655 // The string was flat.
6656 if (cons_string == NULL) return hasher.GetHashField();
6657 // This is a ConsString, iterate across it.
6658 ConsStringIterator iter(cons_string);
6660 while (NULL != (string = iter.Next(&offset))) {
6661 String::VisitFlat(&hasher, string, offset);
6663 return hasher.GetHashField();
6667 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6669 AddCharacters(chars, length);
6673 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6675 AddCharacters(chars, length);
6679 bool Name::AsArrayIndex(uint32_t* index) {
6680 return IsString() && String::cast(this)->AsArrayIndex(index);
6684 bool String::AsArrayIndex(uint32_t* index) {
6685 uint32_t field = hash_field();
6686 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6689 return SlowAsArrayIndex(index);
6693 void String::SetForwardedInternalizedString(String* canonical) {
6694 DCHECK(IsInternalizedString());
6695 DCHECK(HasHashCode());
6696 if (canonical == this) return; // No need to forward.
6697 DCHECK(SlowEquals(canonical));
6698 DCHECK(canonical->IsInternalizedString());
6699 DCHECK(canonical->HasHashCode());
6700 WRITE_FIELD(this, kHashFieldSlot, canonical);
6701 // Setting the hash field to a tagged value sets the LSB, causing the hash
6702 // code to be interpreted as uninitialized. We use this fact to recognize
6703 // that we have a forwarded string.
6704 DCHECK(!HasHashCode());
6708 String* String::GetForwardedInternalizedString() {
6709 DCHECK(IsInternalizedString());
6710 if (HasHashCode()) return this;
6711 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
6712 DCHECK(canonical->IsInternalizedString());
6713 DCHECK(SlowEquals(canonical));
6714 DCHECK(canonical->HasHashCode());
6719 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6720 Handle<Name> name) {
6721 if (object->IsJSProxy()) {
6722 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6723 return JSProxy::HasPropertyWithHandler(proxy, name);
6725 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6726 if (!result.has_value) return Maybe<bool>();
6727 return maybe(result.value != ABSENT);
6731 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6732 Handle<Name> name) {
6733 if (object->IsJSProxy()) {
6734 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6735 return JSProxy::HasPropertyWithHandler(proxy, name);
6737 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6738 if (!result.has_value) return Maybe<bool>();
6739 return maybe(result.value != ABSENT);
6743 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6744 Handle<JSReceiver> object, Handle<Name> key) {
6746 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6747 return GetElementAttribute(object, index);
6749 LookupIterator it(object, key);
6750 return GetPropertyAttributes(&it);
6754 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6755 Handle<JSReceiver> object, uint32_t index) {
6756 if (object->IsJSProxy()) {
6757 return JSProxy::GetElementAttributeWithHandler(
6758 Handle<JSProxy>::cast(object), object, index);
6760 return JSObject::GetElementAttributeWithReceiver(
6761 Handle<JSObject>::cast(object), object, index, true);
6765 bool JSGlobalObject::IsDetached() {
6766 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6770 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6771 const PrototypeIterator iter(this->GetIsolate(),
6772 const_cast<JSGlobalProxy*>(this));
6773 return iter.GetCurrent() != global;
6777 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6778 return object->IsJSProxy()
6779 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6780 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6784 Object* JSReceiver::GetIdentityHash() {
6786 ? JSProxy::cast(this)->GetIdentityHash()
6787 : JSObject::cast(this)->GetIdentityHash();
6791 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6792 if (object->IsJSProxy()) {
6793 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6794 return JSProxy::HasElementWithHandler(proxy, index);
6796 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6797 Handle<JSObject>::cast(object), object, index, true);
6798 if (!result.has_value) return Maybe<bool>();
6799 return maybe(result.value != ABSENT);
6803 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
6805 if (object->IsJSProxy()) {
6806 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6807 return JSProxy::HasElementWithHandler(proxy, index);
6809 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6810 Handle<JSObject>::cast(object), object, index, false);
6811 if (!result.has_value) return Maybe<bool>();
6812 return maybe(result.value != ABSENT);
6816 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
6817 Handle<JSReceiver> object, uint32_t index) {
6818 if (object->IsJSProxy()) {
6819 return JSProxy::GetElementAttributeWithHandler(
6820 Handle<JSProxy>::cast(object), object, index);
6822 return JSObject::GetElementAttributeWithReceiver(
6823 Handle<JSObject>::cast(object), object, index, false);
6827 bool AccessorInfo::all_can_read() {
6828 return BooleanBit::get(flag(), kAllCanReadBit);
6832 void AccessorInfo::set_all_can_read(bool value) {
6833 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6837 bool AccessorInfo::all_can_write() {
6838 return BooleanBit::get(flag(), kAllCanWriteBit);
6842 void AccessorInfo::set_all_can_write(bool value) {
6843 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6847 PropertyAttributes AccessorInfo::property_attributes() {
6848 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6852 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6853 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6857 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6858 if (!HasExpectedReceiverType()) return true;
6859 if (!receiver->IsJSObject()) return false;
6860 return FunctionTemplateInfo::cast(expected_receiver_type())
6861 ->IsTemplateFor(JSObject::cast(receiver)->map());
6865 void ExecutableAccessorInfo::clear_setter() {
6866 set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
6870 template<typename Derived, typename Shape, typename Key>
6871 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6873 Handle<Object> value) {
6874 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6878 template<typename Derived, typename Shape, typename Key>
6879 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6881 Handle<Object> value,
6882 PropertyDetails details) {
6883 DCHECK(!key->IsName() ||
6884 details.IsDeleted() ||
6885 details.dictionary_index() > 0);
6886 int index = DerivedHashTable::EntryToIndex(entry);
6887 DisallowHeapAllocation no_gc;
6888 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6889 FixedArray::set(index, *key, mode);
6890 FixedArray::set(index+1, *value, mode);
6891 FixedArray::set(index+2, details.AsSmi());
6895 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6896 DCHECK(other->IsNumber());
6897 return key == static_cast<uint32_t>(other->Number());
6901 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6902 return ComputeIntegerHash(key, 0);
6906 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6908 DCHECK(other->IsNumber());
6909 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6913 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6914 return ComputeIntegerHash(key, seed);
6918 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6921 DCHECK(other->IsNumber());
6922 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6926 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6927 return isolate->factory()->NewNumberFromUint(key);
6931 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6932 // We know that all entries in a hash table had their hash keys created.
6933 // Use that knowledge to have fast failure.
6934 if (key->Hash() != Name::cast(other)->Hash()) return false;
6935 return key->Equals(Name::cast(other));
6939 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6944 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6945 return Name::cast(other)->Hash();
6949 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6951 DCHECK(key->IsUniqueName());
6956 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
6957 Handle<NameDictionary> dictionary) {
6958 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6962 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6963 return key->SameValue(other);
6967 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6968 return Smi::cast(key->GetHash())->value();
6972 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6974 return Smi::cast(other->GetHash())->value();
6978 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6979 Handle<Object> key) {
6984 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6985 Handle<ObjectHashTable> table, Handle<Object> key) {
6986 return DerivedHashTable::Shrink(table, key);
6990 template <int entrysize>
6991 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6992 return key->SameValue(other);
6996 template <int entrysize>
6997 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6998 intptr_t hash = reinterpret_cast<intptr_t>(*key);
6999 return (uint32_t)(hash & 0xFFFFFFFF);
7003 template <int entrysize>
7004 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7006 intptr_t hash = reinterpret_cast<intptr_t>(other);
7007 return (uint32_t)(hash & 0xFFFFFFFF);
7011 template <int entrysize>
7012 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7013 Handle<Object> key) {
7018 void Map::ClearCodeCache(Heap* heap) {
7019 // No write barrier is needed since empty_fixed_array is not in new space.
7020 // Please note this function is used during marking:
7021 // - MarkCompactCollector::MarkUnmarkedObject
7022 // - IncrementalMarking::Step
7023 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7024 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7028 int Map::SlackForArraySize(int old_size, int size_limit) {
7029 const int max_slack = size_limit - old_size;
7030 CHECK(max_slack >= 0);
7031 if (old_size < 4) return Min(max_slack, 1);
7032 return Min(max_slack, old_size / 2);
7036 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
7037 DCHECK(array->HasFastSmiOrObjectElements());
7038 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
7039 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
7040 if (elts->length() < required_size) {
7041 // Doubling in size would be overkill, but leave some slack to avoid
7042 // constantly growing.
7043 Expand(array, required_size + (required_size >> 3));
7044 // It's a performance benefit to keep a frequently used array in new-space.
7045 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
7046 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
7047 // Expand will allocate a new backing store in new space even if the size
7048 // we asked for isn't larger than what we had before.
7049 Expand(array, required_size);
7054 void JSArray::set_length(Smi* length) {
7055 // Don't need a write barrier for a Smi.
7056 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7060 bool JSArray::AllowsSetElementsLength() {
7061 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7062 DCHECK(result == !HasExternalArrayElements());
7067 void JSArray::SetContent(Handle<JSArray> array,
7068 Handle<FixedArrayBase> storage) {
7069 EnsureCanContainElements(array, storage, storage->length(),
7070 ALLOW_COPIED_DOUBLE_ELEMENTS);
7072 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7073 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7074 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7075 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7076 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7077 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7078 array->set_elements(*storage);
7079 array->set_length(Smi::FromInt(storage->length()));
7083 int TypeFeedbackInfo::ic_total_count() {
7084 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7085 return ICTotalCountField::decode(current);
7089 void TypeFeedbackInfo::set_ic_total_count(int count) {
7090 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7091 value = ICTotalCountField::update(value,
7092 ICTotalCountField::decode(count));
7093 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7097 int TypeFeedbackInfo::ic_with_type_info_count() {
7098 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7099 return ICsWithTypeInfoCountField::decode(current);
7103 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7104 if (delta == 0) return;
7105 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7106 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7107 // We can get negative count here when the type-feedback info is
7108 // shared between two code objects. The can only happen when
7109 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7110 // Since we do not optimize when the debugger is active, we can skip
7111 // this counter update.
7112 if (new_count >= 0) {
7113 new_count &= ICsWithTypeInfoCountField::kMask;
7114 value = ICsWithTypeInfoCountField::update(value, new_count);
7115 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7120 int TypeFeedbackInfo::ic_generic_count() {
7121 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7125 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7126 if (delta == 0) return;
7127 int new_count = ic_generic_count() + delta;
7128 if (new_count >= 0) {
7129 new_count &= ~Smi::kMinValue;
7130 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7135 void TypeFeedbackInfo::initialize_storage() {
7136 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7137 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7138 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7142 void TypeFeedbackInfo::change_own_type_change_checksum() {
7143 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7144 int checksum = OwnTypeChangeChecksum::decode(value);
7145 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7146 value = OwnTypeChangeChecksum::update(value, checksum);
7147 // Ensure packed bit field is in Smi range.
7148 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7149 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7150 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7154 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7155 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7156 int mask = (1 << kTypeChangeChecksumBits) - 1;
7157 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7158 // Ensure packed bit field is in Smi range.
7159 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7160 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7161 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7165 int TypeFeedbackInfo::own_type_change_checksum() {
7166 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7167 return OwnTypeChangeChecksum::decode(value);
7171 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7172 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7173 int mask = (1 << kTypeChangeChecksumBits) - 1;
7174 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7178 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7181 Relocatable::Relocatable(Isolate* isolate) {
7183 prev_ = isolate->relocatable_top();
7184 isolate->set_relocatable_top(this);
7188 Relocatable::~Relocatable() {
7189 DCHECK_EQ(isolate_->relocatable_top(), this);
7190 isolate_->set_relocatable_top(prev_);
7194 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7195 return map->instance_size();
7199 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7200 v->VisitExternalReference(
7201 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7205 template<typename StaticVisitor>
7206 void Foreign::ForeignIterateBody() {
7207 StaticVisitor::VisitExternalReference(
7208 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7212 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7213 typedef v8::String::ExternalOneByteStringResource Resource;
7214 v->VisitExternalOneByteString(
7215 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7219 template <typename StaticVisitor>
7220 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7221 typedef v8::String::ExternalOneByteStringResource Resource;
7222 StaticVisitor::VisitExternalOneByteString(
7223 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7227 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7228 typedef v8::String::ExternalStringResource Resource;
7229 v->VisitExternalTwoByteString(
7230 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7234 template<typename StaticVisitor>
7235 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7236 typedef v8::String::ExternalStringResource Resource;
7237 StaticVisitor::VisitExternalTwoByteString(
7238 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7242 template<int start_offset, int end_offset, int size>
7243 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7246 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7247 HeapObject::RawField(obj, end_offset));
7251 template<int start_offset>
7252 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7255 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7256 HeapObject::RawField(obj, object_size));
7260 template<class Derived, class TableType>
7261 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7262 TableType* table(TableType::cast(this->table()));
7263 int index = Smi::cast(this->index())->value();
7264 Object* key = table->KeyAt(index);
7265 DCHECK(!key->IsTheHole());
7270 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7271 array->set(0, CurrentKey());
7275 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7276 array->set(0, CurrentKey());
7277 array->set(1, CurrentValue());
7281 Object* JSMapIterator::CurrentValue() {
7282 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7283 int index = Smi::cast(this->index())->value();
7284 Object* value = table->ValueAt(index);
7285 DCHECK(!value->IsTheHole());
7291 #undef CAST_ACCESSOR
7292 #undef INT_ACCESSORS
7294 #undef ACCESSORS_TO_SMI
7295 #undef SMI_ACCESSORS
7296 #undef SYNCHRONIZED_SMI_ACCESSORS
7297 #undef NOBARRIER_SMI_ACCESSORS
7299 #undef BOOL_ACCESSORS
7301 #undef FIELD_ADDR_CONST
7303 #undef NOBARRIER_READ_FIELD
7305 #undef NOBARRIER_WRITE_FIELD
7306 #undef WRITE_BARRIER
7307 #undef CONDITIONAL_WRITE_BARRIER
7308 #undef READ_DOUBLE_FIELD
7309 #undef WRITE_DOUBLE_FIELD
7310 #undef READ_INT_FIELD
7311 #undef WRITE_INT_FIELD
7312 #undef READ_INTPTR_FIELD
7313 #undef WRITE_INTPTR_FIELD
7314 #undef READ_UINT32_FIELD
7315 #undef WRITE_UINT32_FIELD
7316 #undef READ_SHORT_FIELD
7317 #undef WRITE_SHORT_FIELD
7318 #undef READ_BYTE_FIELD
7319 #undef WRITE_BYTE_FIELD
7320 #undef NOBARRIER_READ_BYTE_FIELD
7321 #undef NOBARRIER_WRITE_BYTE_FIELD
7323 } } // namespace v8::internal
7325 #endif // V8_OBJECTS_INL_H_