1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/elements.h"
20 #include "src/factory.h"
21 #include "src/field-index-inl.h"
22 #include "src/heap/heap-inl.h"
23 #include "src/heap/heap.h"
24 #include "src/heap/incremental-marking.h"
25 #include "src/heap/objects-visiting.h"
26 #include "src/heap/spaces.h"
27 #include "src/heap/store-buffer.h"
28 #include "src/isolate.h"
29 #include "src/lookup.h"
30 #include "src/objects.h"
31 #include "src/property.h"
32 #include "src/prototype.h"
33 #include "src/transitions-inl.h"
34 #include "src/type-feedback-vector-inl.h"
35 #include "src/v8memory.h"
40 PropertyDetails::PropertyDetails(Smi* smi) {
41 value_ = smi->value();
45 Smi* PropertyDetails::AsSmi() const {
46 // Ensure the upper 2 bits have the same value by sign extending it. This is
47 // necessary to be able to use the 31st bit of the property details.
48 int value = value_ << 1;
49 return Smi::FromInt(value >> 1);
53 PropertyDetails PropertyDetails::AsDeleted() const {
54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
55 return PropertyDetails(smi);
59 #define TYPE_CHECKER(type, instancetype) \
60 bool Object::Is##type() const { \
61 return Object::IsHeapObject() && \
62 HeapObject::cast(this)->map()->instance_type() == instancetype; \
66 #define CAST_ACCESSOR(type) \
67 type* type::cast(Object* object) { \
68 SLOW_DCHECK(object->Is##type()); \
69 return reinterpret_cast<type*>(object); \
71 const type* type::cast(const Object* object) { \
72 SLOW_DCHECK(object->Is##type()); \
73 return reinterpret_cast<const type*>(object); \
77 #define INT_ACCESSORS(holder, name, offset) \
78 int holder::name() const { return READ_INT_FIELD(this, offset); } \
79 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
82 #define ACCESSORS(holder, name, type, offset) \
83 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
84 void holder::set_##name(type* value, WriteBarrierMode mode) { \
85 WRITE_FIELD(this, offset, value); \
86 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
90 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
91 #define ACCESSORS_TO_SMI(holder, name, offset) \
92 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
93 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
94 WRITE_FIELD(this, offset, value); \
98 // Getter that returns a Smi as an int and writes an int as a Smi.
99 #define SMI_ACCESSORS(holder, name, offset) \
100 int holder::name() const { \
101 Object* value = READ_FIELD(this, offset); \
102 return Smi::cast(value)->value(); \
104 void holder::set_##name(int value) { \
105 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
108 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
109 int holder::synchronized_##name() const { \
110 Object* value = ACQUIRE_READ_FIELD(this, offset); \
111 return Smi::cast(value)->value(); \
113 void holder::synchronized_set_##name(int value) { \
114 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
117 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
118 int holder::nobarrier_##name() const { \
119 Object* value = NOBARRIER_READ_FIELD(this, offset); \
120 return Smi::cast(value)->value(); \
122 void holder::nobarrier_set_##name(int value) { \
123 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
126 #define BOOL_GETTER(holder, field, name, offset) \
127 bool holder::name() const { \
128 return BooleanBit::get(field(), offset); \
132 #define BOOL_ACCESSORS(holder, field, name, offset) \
133 bool holder::name() const { \
134 return BooleanBit::get(field(), offset); \
136 void holder::set_##name(bool value) { \
137 set_##field(BooleanBit::set(field(), offset, value)); \
141 bool Object::IsFixedArrayBase() const {
142 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
143 IsFixedTypedArrayBase() || IsExternalArray();
147 // External objects are not extensible, so the map check is enough.
148 bool Object::IsExternal() const {
149 return Object::IsHeapObject() &&
150 HeapObject::cast(this)->map() ==
151 HeapObject::cast(this)->GetHeap()->external_map();
155 bool Object::IsAccessorInfo() const {
156 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
160 bool Object::IsSmi() const {
161 return HAS_SMI_TAG(this);
165 bool Object::IsHeapObject() const {
166 return Internals::HasHeapObjectTag(this);
170 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
171 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
172 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
175 bool Object::IsString() const {
176 return Object::IsHeapObject()
177 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
181 bool Object::IsName() const {
182 return IsString() || IsSymbol();
186 bool Object::IsUniqueName() const {
187 return IsInternalizedString() || IsSymbol();
191 bool Object::IsSpecObject() const {
192 return Object::IsHeapObject()
193 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
197 bool Object::IsSpecFunction() const {
198 if (!Object::IsHeapObject()) return false;
199 InstanceType type = HeapObject::cast(this)->map()->instance_type();
200 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
204 bool Object::IsTemplateInfo() const {
205 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
209 bool Object::IsInternalizedString() const {
210 if (!this->IsHeapObject()) return false;
211 uint32_t type = HeapObject::cast(this)->map()->instance_type();
212 STATIC_ASSERT(kNotInternalizedTag != 0);
213 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
214 (kStringTag | kInternalizedTag);
218 bool Object::IsConsString() const {
219 if (!IsString()) return false;
220 return StringShape(String::cast(this)).IsCons();
224 bool Object::IsSlicedString() const {
225 if (!IsString()) return false;
226 return StringShape(String::cast(this)).IsSliced();
230 bool Object::IsSeqString() const {
231 if (!IsString()) return false;
232 return StringShape(String::cast(this)).IsSequential();
236 bool Object::IsSeqOneByteString() const {
237 if (!IsString()) return false;
238 return StringShape(String::cast(this)).IsSequential() &&
239 String::cast(this)->IsOneByteRepresentation();
243 bool Object::IsSeqTwoByteString() const {
244 if (!IsString()) return false;
245 return StringShape(String::cast(this)).IsSequential() &&
246 String::cast(this)->IsTwoByteRepresentation();
250 bool Object::IsExternalString() const {
251 if (!IsString()) return false;
252 return StringShape(String::cast(this)).IsExternal();
256 bool Object::IsExternalOneByteString() const {
257 if (!IsString()) return false;
258 return StringShape(String::cast(this)).IsExternal() &&
259 String::cast(this)->IsOneByteRepresentation();
263 bool Object::IsExternalTwoByteString() const {
264 if (!IsString()) return false;
265 return StringShape(String::cast(this)).IsExternal() &&
266 String::cast(this)->IsTwoByteRepresentation();
270 bool Object::HasValidElements() {
271 // Dictionary is covered under FixedArray.
272 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
273 IsFixedTypedArrayBase();
277 Handle<Object> Object::NewStorageFor(Isolate* isolate,
278 Handle<Object> object,
279 Representation representation) {
280 if (representation.IsSmi() && object->IsUninitialized()) {
281 return handle(Smi::FromInt(0), isolate);
283 if (!representation.IsDouble()) return object;
285 if (object->IsUninitialized()) {
287 } else if (object->IsMutableHeapNumber()) {
288 value = HeapNumber::cast(*object)->value();
290 value = object->Number();
292 return isolate->factory()->NewHeapNumber(value, MUTABLE);
296 Handle<Object> Object::WrapForRead(Isolate* isolate,
297 Handle<Object> object,
298 Representation representation) {
299 DCHECK(!object->IsUninitialized());
300 if (!representation.IsDouble()) {
301 DCHECK(object->FitsRepresentation(representation));
304 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
308 StringShape::StringShape(const String* str)
309 : type_(str->map()->instance_type()) {
311 DCHECK((type_ & kIsNotStringMask) == kStringTag);
315 StringShape::StringShape(Map* map)
316 : type_(map->instance_type()) {
318 DCHECK((type_ & kIsNotStringMask) == kStringTag);
322 StringShape::StringShape(InstanceType t)
323 : type_(static_cast<uint32_t>(t)) {
325 DCHECK((type_ & kIsNotStringMask) == kStringTag);
329 bool StringShape::IsInternalized() {
331 STATIC_ASSERT(kNotInternalizedTag != 0);
332 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
333 (kStringTag | kInternalizedTag);
337 bool String::IsOneByteRepresentation() const {
338 uint32_t type = map()->instance_type();
339 return (type & kStringEncodingMask) == kOneByteStringTag;
343 bool String::IsTwoByteRepresentation() const {
344 uint32_t type = map()->instance_type();
345 return (type & kStringEncodingMask) == kTwoByteStringTag;
349 bool String::IsOneByteRepresentationUnderneath() {
350 uint32_t type = map()->instance_type();
351 STATIC_ASSERT(kIsIndirectStringTag != 0);
352 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
354 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
355 case kOneByteStringTag:
357 case kTwoByteStringTag:
359 default: // Cons or sliced string. Need to go deeper.
360 return GetUnderlying()->IsOneByteRepresentation();
365 bool String::IsTwoByteRepresentationUnderneath() {
366 uint32_t type = map()->instance_type();
367 STATIC_ASSERT(kIsIndirectStringTag != 0);
368 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
370 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
371 case kOneByteStringTag:
373 case kTwoByteStringTag:
375 default: // Cons or sliced string. Need to go deeper.
376 return GetUnderlying()->IsTwoByteRepresentation();
381 bool String::HasOnlyOneByteChars() {
382 uint32_t type = map()->instance_type();
383 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
384 IsOneByteRepresentation();
388 bool StringShape::IsCons() {
389 return (type_ & kStringRepresentationMask) == kConsStringTag;
393 bool StringShape::IsSliced() {
394 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
398 bool StringShape::IsIndirect() {
399 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
403 bool StringShape::IsExternal() {
404 return (type_ & kStringRepresentationMask) == kExternalStringTag;
408 bool StringShape::IsSequential() {
409 return (type_ & kStringRepresentationMask) == kSeqStringTag;
413 StringRepresentationTag StringShape::representation_tag() {
414 uint32_t tag = (type_ & kStringRepresentationMask);
415 return static_cast<StringRepresentationTag>(tag);
419 uint32_t StringShape::encoding_tag() {
420 return type_ & kStringEncodingMask;
424 uint32_t StringShape::full_representation_tag() {
425 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
429 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
430 Internals::kFullStringRepresentationMask);
432 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
433 Internals::kStringEncodingMask);
436 bool StringShape::IsSequentialOneByte() {
437 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
441 bool StringShape::IsSequentialTwoByte() {
442 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
446 bool StringShape::IsExternalOneByte() {
447 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
451 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
452 Internals::kExternalOneByteRepresentationTag);
454 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
457 bool StringShape::IsExternalTwoByte() {
458 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
462 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
463 Internals::kExternalTwoByteRepresentationTag);
465 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
467 uc32 FlatStringReader::Get(int index) {
468 DCHECK(0 <= index && index <= length_);
470 return static_cast<const byte*>(start_)[index];
472 return static_cast<const uc16*>(start_)[index];
477 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
478 return key->AsHandle(isolate);
482 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
483 return key->AsHandle(isolate);
487 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
489 return key->AsHandle(isolate);
493 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
495 return key->AsHandle(isolate);
498 template <typename Char>
499 class SequentialStringKey : public HashTableKey {
501 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
502 : string_(string), hash_field_(0), seed_(seed) { }
504 virtual uint32_t Hash() OVERRIDE {
505 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
509 uint32_t result = hash_field_ >> String::kHashShift;
510 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
515 virtual uint32_t HashForObject(Object* other) OVERRIDE {
516 return String::cast(other)->Hash();
519 Vector<const Char> string_;
520 uint32_t hash_field_;
525 class OneByteStringKey : public SequentialStringKey<uint8_t> {
527 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
528 : SequentialStringKey<uint8_t>(str, seed) { }
530 virtual bool IsMatch(Object* string) OVERRIDE {
531 return String::cast(string)->IsOneByteEqualTo(string_);
534 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
538 class SeqOneByteSubStringKey : public HashTableKey {
540 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
541 : string_(string), from_(from), length_(length) {
542 DCHECK(string_->IsSeqOneByteString());
545 virtual uint32_t Hash() OVERRIDE {
546 DCHECK(length_ >= 0);
547 DCHECK(from_ + length_ <= string_->length());
548 const uint8_t* chars = string_->GetChars() + from_;
549 hash_field_ = StringHasher::HashSequentialString(
550 chars, length_, string_->GetHeap()->HashSeed());
551 uint32_t result = hash_field_ >> String::kHashShift;
552 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
556 virtual uint32_t HashForObject(Object* other) OVERRIDE {
557 return String::cast(other)->Hash();
560 virtual bool IsMatch(Object* string) OVERRIDE;
561 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
564 Handle<SeqOneByteString> string_;
567 uint32_t hash_field_;
571 class TwoByteStringKey : public SequentialStringKey<uc16> {
573 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
574 : SequentialStringKey<uc16>(str, seed) { }
576 virtual bool IsMatch(Object* string) OVERRIDE {
577 return String::cast(string)->IsTwoByteEqualTo(string_);
580 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
584 // Utf8StringKey carries a vector of chars as key.
585 class Utf8StringKey : public HashTableKey {
587 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
588 : string_(string), hash_field_(0), seed_(seed) { }
590 virtual bool IsMatch(Object* string) OVERRIDE {
591 return String::cast(string)->IsUtf8EqualTo(string_);
594 virtual uint32_t Hash() OVERRIDE {
595 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
596 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
597 uint32_t result = hash_field_ >> String::kHashShift;
598 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
602 virtual uint32_t HashForObject(Object* other) OVERRIDE {
603 return String::cast(other)->Hash();
606 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
607 if (hash_field_ == 0) Hash();
608 return isolate->factory()->NewInternalizedStringFromUtf8(
609 string_, chars_, hash_field_);
612 Vector<const char> string_;
613 uint32_t hash_field_;
614 int chars_; // Caches the number of characters when computing the hash code.
619 bool Object::IsNumber() const {
620 return IsSmi() || IsHeapNumber();
624 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
625 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
628 bool Object::IsFiller() const {
629 if (!Object::IsHeapObject()) return false;
630 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
631 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
635 bool Object::IsExternalArray() const {
636 if (!Object::IsHeapObject())
638 InstanceType instance_type =
639 HeapObject::cast(this)->map()->instance_type();
640 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
641 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
645 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
646 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
647 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
649 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
650 #undef TYPED_ARRAY_TYPE_CHECKER
653 bool Object::IsFixedTypedArrayBase() const {
654 if (!Object::IsHeapObject()) return false;
656 InstanceType instance_type =
657 HeapObject::cast(this)->map()->instance_type();
658 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
659 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
663 bool Object::IsJSReceiver() const {
664 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
665 return IsHeapObject() &&
666 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
670 bool Object::IsJSObject() const {
671 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
672 return IsHeapObject() &&
673 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
677 bool Object::IsJSProxy() const {
678 if (!Object::IsHeapObject()) return false;
679 return HeapObject::cast(this)->map()->IsJSProxyMap();
683 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
684 TYPE_CHECKER(JSSet, JS_SET_TYPE)
685 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
686 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
687 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
688 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
689 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
690 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
691 TYPE_CHECKER(Map, MAP_TYPE)
692 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
693 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
694 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
697 bool Object::IsJSWeakCollection() const {
698 return IsJSWeakMap() || IsJSWeakSet();
702 bool Object::IsDescriptorArray() const {
703 return IsFixedArray();
707 bool Object::IsTransitionArray() const {
708 return IsFixedArray();
712 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
715 bool Object::IsDeoptimizationInputData() const {
716 // Must be a fixed array.
717 if (!IsFixedArray()) return false;
719 // There's no sure way to detect the difference between a fixed array and
720 // a deoptimization data array. Since this is used for asserts we can
721 // check that the length is zero or else the fixed size plus a multiple of
723 int length = FixedArray::cast(this)->length();
724 if (length == 0) return true;
726 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
727 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
731 bool Object::IsDeoptimizationOutputData() const {
732 if (!IsFixedArray()) return false;
733 // There's actually no way to see the difference between a fixed array and
734 // a deoptimization data array. Since this is used for asserts we can check
735 // that the length is plausible though.
736 if (FixedArray::cast(this)->length() % 2 != 0) return false;
741 bool Object::IsDependentCode() const {
742 if (!IsFixedArray()) return false;
743 // There's actually no way to see the difference between a fixed array and
744 // a dependent codes array.
749 bool Object::IsContext() const {
750 if (!Object::IsHeapObject()) return false;
751 Map* map = HeapObject::cast(this)->map();
752 Heap* heap = map->GetHeap();
753 return (map == heap->function_context_map() ||
754 map == heap->catch_context_map() ||
755 map == heap->with_context_map() ||
756 map == heap->native_context_map() ||
757 map == heap->block_context_map() ||
758 map == heap->module_context_map() ||
759 map == heap->global_context_map());
763 bool Object::IsNativeContext() const {
764 return Object::IsHeapObject() &&
765 HeapObject::cast(this)->map() ==
766 HeapObject::cast(this)->GetHeap()->native_context_map();
770 bool Object::IsScopeInfo() const {
771 return Object::IsHeapObject() &&
772 HeapObject::cast(this)->map() ==
773 HeapObject::cast(this)->GetHeap()->scope_info_map();
777 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
780 template <> inline bool Is<JSFunction>(Object* obj) {
781 return obj->IsJSFunction();
785 TYPE_CHECKER(Code, CODE_TYPE)
786 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
787 TYPE_CHECKER(Cell, CELL_TYPE)
788 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
789 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
790 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
791 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
792 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
793 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
794 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
797 bool Object::IsStringWrapper() const {
798 return IsJSValue() && JSValue::cast(this)->value()->IsString();
802 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
805 bool Object::IsBoolean() const {
806 return IsOddball() &&
807 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
811 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
812 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
813 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
814 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
817 bool Object::IsJSArrayBufferView() const {
818 return IsJSDataView() || IsJSTypedArray();
822 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
825 template <> inline bool Is<JSArray>(Object* obj) {
826 return obj->IsJSArray();
830 bool Object::IsHashTable() const {
831 return Object::IsHeapObject() &&
832 HeapObject::cast(this)->map() ==
833 HeapObject::cast(this)->GetHeap()->hash_table_map();
837 bool Object::IsWeakHashTable() const {
838 return IsHashTable();
842 bool Object::IsDictionary() const {
843 return IsHashTable() &&
844 this != HeapObject::cast(this)->GetHeap()->string_table();
848 bool Object::IsNameDictionary() const {
849 return IsDictionary();
853 bool Object::IsSeededNumberDictionary() const {
854 return IsDictionary();
858 bool Object::IsUnseededNumberDictionary() const {
859 return IsDictionary();
863 bool Object::IsStringTable() const {
864 return IsHashTable();
868 bool Object::IsJSFunctionResultCache() const {
869 if (!IsFixedArray()) return false;
870 const FixedArray* self = FixedArray::cast(this);
871 int length = self->length();
872 if (length < JSFunctionResultCache::kEntriesIndex) return false;
873 if ((length - JSFunctionResultCache::kEntriesIndex)
874 % JSFunctionResultCache::kEntrySize != 0) {
878 if (FLAG_verify_heap) {
879 // TODO(svenpanne) We use const_cast here and below to break our dependency
880 // cycle between the predicates and the verifiers. This can be removed when
881 // the verifiers are const-correct, too.
882 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
883 JSFunctionResultCacheVerify();
890 bool Object::IsNormalizedMapCache() const {
891 return NormalizedMapCache::IsNormalizedMapCache(this);
895 int NormalizedMapCache::GetIndex(Handle<Map> map) {
896 return map->Hash() % NormalizedMapCache::kEntries;
900 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
901 if (!obj->IsFixedArray()) return false;
902 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
906 if (FLAG_verify_heap) {
907 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
908 NormalizedMapCacheVerify();
915 bool Object::IsCompilationCacheTable() const {
916 return IsHashTable();
920 bool Object::IsCodeCacheHashTable() const {
921 return IsHashTable();
925 bool Object::IsPolymorphicCodeCacheHashTable() const {
926 return IsHashTable();
930 bool Object::IsMapCache() const {
931 return IsHashTable();
935 bool Object::IsObjectHashTable() const {
936 return IsHashTable();
940 bool Object::IsOrderedHashTable() const {
941 return IsHeapObject() &&
942 HeapObject::cast(this)->map() ==
943 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
947 bool Object::IsOrderedHashSet() const {
948 return IsOrderedHashTable();
952 bool Object::IsOrderedHashMap() const {
953 return IsOrderedHashTable();
957 bool Object::IsPrimitive() const {
958 return IsOddball() || IsNumber() || IsString();
962 bool Object::IsJSGlobalProxy() const {
963 bool result = IsHeapObject() &&
964 (HeapObject::cast(this)->map()->instance_type() ==
965 JS_GLOBAL_PROXY_TYPE);
967 HeapObject::cast(this)->map()->is_access_check_needed());
972 bool Object::IsGlobalObject() const {
973 if (!IsHeapObject()) return false;
975 InstanceType type = HeapObject::cast(this)->map()->instance_type();
976 return type == JS_GLOBAL_OBJECT_TYPE ||
977 type == JS_BUILTINS_OBJECT_TYPE;
981 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
982 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
985 bool Object::IsUndetectableObject() const {
986 return IsHeapObject()
987 && HeapObject::cast(this)->map()->is_undetectable();
991 bool Object::IsAccessCheckNeeded() const {
992 if (!IsHeapObject()) return false;
993 if (IsJSGlobalProxy()) {
994 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
995 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
996 return proxy->IsDetachedFrom(global);
998 return HeapObject::cast(this)->map()->is_access_check_needed();
1002 bool Object::IsStruct() const {
1003 if (!IsHeapObject()) return false;
1004 switch (HeapObject::cast(this)->map()->instance_type()) {
1005 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1006 STRUCT_LIST(MAKE_STRUCT_CASE)
1007 #undef MAKE_STRUCT_CASE
1008 default: return false;
1013 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1014 bool Object::Is##Name() const { \
1015 return Object::IsHeapObject() \
1016 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1018 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1019 #undef MAKE_STRUCT_PREDICATE
1022 bool Object::IsUndefined() const {
1023 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1027 bool Object::IsNull() const {
1028 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1032 bool Object::IsTheHole() const {
1033 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1037 bool Object::IsException() const {
1038 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1042 bool Object::IsUninitialized() const {
1043 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1047 bool Object::IsTrue() const {
1048 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1052 bool Object::IsFalse() const {
1053 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1057 bool Object::IsArgumentsMarker() const {
1058 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1062 double Object::Number() {
1065 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1066 : reinterpret_cast<HeapNumber*>(this)->value();
1070 bool Object::IsNaN() const {
1071 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1075 bool Object::IsMinusZero() const {
1076 return this->IsHeapNumber() &&
1077 i::IsMinusZero(HeapNumber::cast(this)->value());
1081 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1082 if (object->IsSmi()) return Handle<Smi>::cast(object);
1083 if (object->IsHeapNumber()) {
1084 double value = Handle<HeapNumber>::cast(object)->value();
1085 int int_value = FastD2I(value);
1086 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1087 return handle(Smi::FromInt(int_value), isolate);
1090 return Handle<Smi>();
1094 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1095 Handle<Object> object) {
1097 isolate, object, handle(isolate->context()->native_context(), isolate));
1101 bool Object::HasSpecificClassOf(String* name) {
1102 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1106 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1107 Handle<Name> name) {
1108 LookupIterator it(object, name);
1109 return GetProperty(&it);
1113 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1114 Handle<Object> object,
1116 // GetElement can trigger a getter which can cause allocation.
1117 // This was not always the case. This DCHECK is here to catch
1118 // leftover incorrect uses.
1119 DCHECK(AllowHeapAllocation::IsAllowed());
1120 return Object::GetElementWithReceiver(isolate, object, object, index);
1124 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1125 Handle<Name> name) {
1127 Isolate* isolate = name->GetIsolate();
1128 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1129 return GetProperty(object, name);
1133 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1134 Handle<Object> object,
1136 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1137 DCHECK(!str.is_null());
1139 uint32_t index; // Assert that the name is not an array index.
1140 DCHECK(!str->AsArrayIndex(&index));
1142 return GetProperty(object, str);
1146 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1147 Handle<Object> receiver,
1149 return GetPropertyWithHandler(
1150 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1154 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1155 Handle<JSReceiver> receiver,
1157 Handle<Object> value,
1158 StrictMode strict_mode) {
1159 Isolate* isolate = proxy->GetIsolate();
1160 Handle<String> name = isolate->factory()->Uint32ToString(index);
1161 return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode);
1165 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1167 Isolate* isolate = proxy->GetIsolate();
1168 Handle<String> name = isolate->factory()->Uint32ToString(index);
1169 return HasPropertyWithHandler(proxy, name);
1173 #define FIELD_ADDR(p, offset) \
1174 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1176 #define FIELD_ADDR_CONST(p, offset) \
1177 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1179 #define READ_FIELD(p, offset) \
1180 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1182 #define ACQUIRE_READ_FIELD(p, offset) \
1183 reinterpret_cast<Object*>(base::Acquire_Load( \
1184 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1186 #define NOBARRIER_READ_FIELD(p, offset) \
1187 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1188 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1190 #define WRITE_FIELD(p, offset, value) \
1191 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1193 #define RELEASE_WRITE_FIELD(p, offset, value) \
1194 base::Release_Store( \
1195 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1196 reinterpret_cast<base::AtomicWord>(value));
1198 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1199 base::NoBarrier_Store( \
1200 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1201 reinterpret_cast<base::AtomicWord>(value));
1203 #define WRITE_BARRIER(heap, object, offset, value) \
1204 heap->incremental_marking()->RecordWrite( \
1205 object, HeapObject::RawField(object, offset), value); \
1206 if (heap->InNewSpace(value)) { \
1207 heap->RecordWrite(object->address(), offset); \
1210 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1211 if (mode == UPDATE_WRITE_BARRIER) { \
1212 heap->incremental_marking()->RecordWrite( \
1213 object, HeapObject::RawField(object, offset), value); \
1214 if (heap->InNewSpace(value)) { \
1215 heap->RecordWrite(object->address(), offset); \
1219 #ifndef V8_TARGET_ARCH_MIPS
1220 #define READ_DOUBLE_FIELD(p, offset) \
1221 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1222 #else // V8_TARGET_ARCH_MIPS
1223 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1224 // non-64-bit aligned HeapNumber::value.
1225 static inline double read_double_field(const void* p, int offset) {
1230 c.u[0] = (*reinterpret_cast<const uint32_t*>(
1231 FIELD_ADDR_CONST(p, offset)));
1232 c.u[1] = (*reinterpret_cast<const uint32_t*>(
1233 FIELD_ADDR_CONST(p, offset + 4)));
1236 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1237 #endif // V8_TARGET_ARCH_MIPS
1239 #ifndef V8_TARGET_ARCH_MIPS
1240 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1241 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1242 #else // V8_TARGET_ARCH_MIPS
1243 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1244 // non-64-bit aligned HeapNumber::value.
1245 static inline void write_double_field(void* p, int offset,
1252 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1253 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1255 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1256 write_double_field(p, offset, value)
1257 #endif // V8_TARGET_ARCH_MIPS
1260 #define READ_INT_FIELD(p, offset) \
1261 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1263 #define WRITE_INT_FIELD(p, offset, value) \
1264 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1266 #define READ_INTPTR_FIELD(p, offset) \
1267 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1269 #define WRITE_INTPTR_FIELD(p, offset, value) \
1270 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1272 #define READ_UINT32_FIELD(p, offset) \
1273 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1275 #define WRITE_UINT32_FIELD(p, offset, value) \
1276 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1278 #define READ_INT32_FIELD(p, offset) \
1279 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1281 #define WRITE_INT32_FIELD(p, offset, value) \
1282 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1284 #define READ_INT64_FIELD(p, offset) \
1285 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1287 #define WRITE_INT64_FIELD(p, offset, value) \
1288 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1290 #define READ_SHORT_FIELD(p, offset) \
1291 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1293 #define WRITE_SHORT_FIELD(p, offset, value) \
1294 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1296 #define READ_BYTE_FIELD(p, offset) \
1297 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1299 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1300 static_cast<byte>(base::NoBarrier_Load( \
1301 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1303 #define WRITE_BYTE_FIELD(p, offset, value) \
1304 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1306 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1307 base::NoBarrier_Store( \
1308 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1309 static_cast<base::Atomic8>(value));
1311 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1312 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1316 int Smi::value() const {
1317 return Internals::SmiValue(this);
1321 Smi* Smi::FromInt(int value) {
1322 DCHECK(Smi::IsValid(value));
1323 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1327 Smi* Smi::FromIntptr(intptr_t value) {
1328 DCHECK(Smi::IsValid(value));
1329 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1330 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1334 bool Smi::IsValid(intptr_t value) {
1335 bool result = Internals::IsValidSmi(value);
1336 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1341 MapWord MapWord::FromMap(const Map* map) {
1342 return MapWord(reinterpret_cast<uintptr_t>(map));
1346 Map* MapWord::ToMap() {
1347 return reinterpret_cast<Map*>(value_);
1351 bool MapWord::IsForwardingAddress() {
1352 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1356 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1357 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1358 return MapWord(reinterpret_cast<uintptr_t>(raw));
1362 HeapObject* MapWord::ToForwardingAddress() {
1363 DCHECK(IsForwardingAddress());
1364 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1369 void HeapObject::VerifyObjectField(int offset) {
1370 VerifyPointer(READ_FIELD(this, offset));
1373 void HeapObject::VerifySmiField(int offset) {
1374 CHECK(READ_FIELD(this, offset)->IsSmi());
1379 Heap* HeapObject::GetHeap() const {
1381 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1382 SLOW_DCHECK(heap != NULL);
1387 Isolate* HeapObject::GetIsolate() const {
1388 return GetHeap()->isolate();
1392 Map* HeapObject::map() const {
1394 // Clear mark potentially added by PathTracer.
1395 uintptr_t raw_value =
1396 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1397 return MapWord::FromRawValue(raw_value).ToMap();
1399 return map_word().ToMap();
1404 void HeapObject::set_map(Map* value) {
1405 set_map_word(MapWord::FromMap(value));
1406 if (value != NULL) {
1407 // TODO(1600) We are passing NULL as a slot because maps can never be on
1408 // evacuation candidate.
1409 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1414 Map* HeapObject::synchronized_map() {
1415 return synchronized_map_word().ToMap();
1419 void HeapObject::synchronized_set_map(Map* value) {
1420 synchronized_set_map_word(MapWord::FromMap(value));
1421 if (value != NULL) {
1422 // TODO(1600) We are passing NULL as a slot because maps can never be on
1423 // evacuation candidate.
1424 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1429 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1430 synchronized_set_map_word(MapWord::FromMap(value));
1434 // Unsafe accessor omitting write barrier.
1435 void HeapObject::set_map_no_write_barrier(Map* value) {
1436 set_map_word(MapWord::FromMap(value));
1440 MapWord HeapObject::map_word() const {
1442 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1446 void HeapObject::set_map_word(MapWord map_word) {
1447 NOBARRIER_WRITE_FIELD(
1448 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1452 MapWord HeapObject::synchronized_map_word() const {
1454 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1458 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1459 RELEASE_WRITE_FIELD(
1460 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1464 HeapObject* HeapObject::FromAddress(Address address) {
1465 DCHECK_TAG_ALIGNED(address);
1466 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1470 Address HeapObject::address() {
1471 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1475 int HeapObject::Size() {
1476 return SizeFromMap(map());
1480 bool HeapObject::MayContainRawValues() {
1481 InstanceType type = map()->instance_type();
1482 if (type <= LAST_NAME_TYPE) {
1483 if (type == SYMBOL_TYPE) {
1486 DCHECK(type < FIRST_NONSTRING_TYPE);
1487 // There are four string representations: sequential strings, external
1488 // strings, cons strings, and sliced strings.
1489 // Only the former two contain raw values and no heap pointers (besides the
1491 return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
1493 // The ConstantPoolArray contains heap pointers, but also raw values.
1494 if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
1495 return (type <= LAST_DATA_TYPE);
1499 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1500 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1501 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1505 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1506 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1510 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1511 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1515 double HeapNumber::value() const {
1516 return READ_DOUBLE_FIELD(this, kValueOffset);
1520 void HeapNumber::set_value(double value) {
1521 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1525 int HeapNumber::get_exponent() {
1526 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1527 kExponentShift) - kExponentBias;
1531 int HeapNumber::get_sign() {
1532 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1536 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1539 Object** FixedArray::GetFirstElementAddress() {
1540 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1544 bool FixedArray::ContainsOnlySmisOrHoles() {
1545 Object* the_hole = GetHeap()->the_hole_value();
1546 Object** current = GetFirstElementAddress();
1547 for (int i = 0; i < length(); ++i) {
1548 Object* candidate = *current++;
1549 if (!candidate->IsSmi() && candidate != the_hole) return false;
1555 FixedArrayBase* JSObject::elements() const {
1556 Object* array = READ_FIELD(this, kElementsOffset);
1557 return static_cast<FixedArrayBase*>(array);
1561 void JSObject::ValidateElements(Handle<JSObject> object) {
1562 #ifdef ENABLE_SLOW_DCHECKS
1563 if (FLAG_enable_slow_asserts) {
1564 ElementsAccessor* accessor = object->GetElementsAccessor();
1565 accessor->Validate(object);
1571 void AllocationSite::Initialize() {
1572 set_transition_info(Smi::FromInt(0));
1573 SetElementsKind(GetInitialFastElementsKind());
1574 set_nested_site(Smi::FromInt(0));
1575 set_pretenure_data(Smi::FromInt(0));
1576 set_pretenure_create_count(Smi::FromInt(0));
1577 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1578 SKIP_WRITE_BARRIER);
1582 void AllocationSite::MarkZombie() {
1583 DCHECK(!IsZombie());
1585 set_pretenure_decision(kZombie);
1589 // Heuristic: We only need to create allocation site info if the boilerplate
1590 // elements kind is the initial elements kind.
1591 AllocationSiteMode AllocationSite::GetMode(
1592 ElementsKind boilerplate_elements_kind) {
1593 if (FLAG_pretenuring_call_new ||
1594 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1595 return TRACK_ALLOCATION_SITE;
1598 return DONT_TRACK_ALLOCATION_SITE;
1602 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1604 if (FLAG_pretenuring_call_new ||
1605 (IsFastSmiElementsKind(from) &&
1606 IsMoreGeneralElementsKindTransition(from, to))) {
1607 return TRACK_ALLOCATION_SITE;
1610 return DONT_TRACK_ALLOCATION_SITE;
1614 inline bool AllocationSite::CanTrack(InstanceType type) {
1615 if (FLAG_allocation_site_pretenuring) {
1616 return type == JS_ARRAY_TYPE ||
1617 type == JS_OBJECT_TYPE ||
1618 type < FIRST_NONSTRING_TYPE;
1620 return type == JS_ARRAY_TYPE;
1624 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1628 return DependentCode::kAllocationSiteTenuringChangedGroup;
1631 return DependentCode::kAllocationSiteTransitionChangedGroup;
1635 return DependentCode::kAllocationSiteTransitionChangedGroup;
1639 inline void AllocationSite::set_memento_found_count(int count) {
1640 int value = pretenure_data()->value();
1641 // Verify that we can count more mementos than we can possibly find in one
1642 // new space collection.
1643 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1644 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1645 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1646 DCHECK(count < MementoFoundCountBits::kMax);
1648 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1649 SKIP_WRITE_BARRIER);
1652 inline bool AllocationSite::IncrementMementoFoundCount() {
1653 if (IsZombie()) return false;
1655 int value = memento_found_count();
1656 set_memento_found_count(value + 1);
1657 return memento_found_count() == kPretenureMinimumCreated;
1661 inline void AllocationSite::IncrementMementoCreateCount() {
1662 DCHECK(FLAG_allocation_site_pretenuring);
1663 int value = memento_create_count();
1664 set_memento_create_count(value + 1);
1668 inline bool AllocationSite::MakePretenureDecision(
1669 PretenureDecision current_decision,
1671 bool maximum_size_scavenge) {
1672 // Here we just allow state transitions from undecided or maybe tenure
1673 // to don't tenure, maybe tenure, or tenure.
1674 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1675 if (ratio >= kPretenureRatio) {
1676 // We just transition into tenure state when the semi-space was at
1677 // maximum capacity.
1678 if (maximum_size_scavenge) {
1679 set_deopt_dependent_code(true);
1680 set_pretenure_decision(kTenure);
1681 // Currently we just need to deopt when we make a state transition to
1685 set_pretenure_decision(kMaybeTenure);
1687 set_pretenure_decision(kDontTenure);
1694 inline bool AllocationSite::DigestPretenuringFeedback(
1695 bool maximum_size_scavenge) {
1697 int create_count = memento_create_count();
1698 int found_count = memento_found_count();
1699 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1701 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1702 static_cast<double>(found_count) / create_count : 0.0;
1703 PretenureDecision current_decision = pretenure_decision();
1705 if (minimum_mementos_created) {
1706 deopt = MakePretenureDecision(
1707 current_decision, ratio, maximum_size_scavenge);
1710 if (FLAG_trace_pretenuring_statistics) {
1712 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1713 static_cast<void*>(this), create_count, found_count, ratio,
1714 PretenureDecisionName(current_decision),
1715 PretenureDecisionName(pretenure_decision()));
1718 // Clear feedback calculation fields until the next gc.
1719 set_memento_found_count(0);
1720 set_memento_create_count(0);
1725 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1726 JSObject::ValidateElements(object);
1727 ElementsKind elements_kind = object->map()->elements_kind();
1728 if (!IsFastObjectElementsKind(elements_kind)) {
1729 if (IsFastHoleyElementsKind(elements_kind)) {
1730 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1732 TransitionElementsKind(object, FAST_ELEMENTS);
1738 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1741 EnsureElementsMode mode) {
1742 ElementsKind current_kind = object->map()->elements_kind();
1743 ElementsKind target_kind = current_kind;
1745 DisallowHeapAllocation no_allocation;
1746 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1747 bool is_holey = IsFastHoleyElementsKind(current_kind);
1748 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1749 Heap* heap = object->GetHeap();
1750 Object* the_hole = heap->the_hole_value();
1751 for (uint32_t i = 0; i < count; ++i) {
1752 Object* current = *objects++;
1753 if (current == the_hole) {
1755 target_kind = GetHoleyElementsKind(target_kind);
1756 } else if (!current->IsSmi()) {
1757 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1758 if (IsFastSmiElementsKind(target_kind)) {
1760 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1762 target_kind = FAST_DOUBLE_ELEMENTS;
1765 } else if (is_holey) {
1766 target_kind = FAST_HOLEY_ELEMENTS;
1769 target_kind = FAST_ELEMENTS;
1774 if (target_kind != current_kind) {
1775 TransitionElementsKind(object, target_kind);
1780 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1781 Handle<FixedArrayBase> elements,
1783 EnsureElementsMode mode) {
1784 Heap* heap = object->GetHeap();
1785 if (elements->map() != heap->fixed_double_array_map()) {
1786 DCHECK(elements->map() == heap->fixed_array_map() ||
1787 elements->map() == heap->fixed_cow_array_map());
1788 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1789 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1792 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1793 EnsureCanContainElements(object, objects, length, mode);
1797 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1798 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1799 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1800 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1801 Handle<FixedDoubleArray> double_array =
1802 Handle<FixedDoubleArray>::cast(elements);
1803 for (uint32_t i = 0; i < length; ++i) {
1804 if (double_array->is_the_hole(i)) {
1805 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1809 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1814 void JSObject::SetMapAndElements(Handle<JSObject> object,
1815 Handle<Map> new_map,
1816 Handle<FixedArrayBase> value) {
1817 JSObject::MigrateToMap(object, new_map);
1818 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1819 (*value == object->GetHeap()->empty_fixed_array())) ==
1820 (value->map() == object->GetHeap()->fixed_array_map() ||
1821 value->map() == object->GetHeap()->fixed_cow_array_map()));
1822 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1823 (object->map()->has_fast_double_elements() ==
1824 value->IsFixedDoubleArray()));
1825 object->set_elements(*value);
1829 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1830 WRITE_FIELD(this, kElementsOffset, value);
1831 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1835 void JSObject::initialize_properties() {
1836 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1837 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1841 void JSObject::initialize_elements() {
1842 FixedArrayBase* elements = map()->GetInitialElements();
1843 WRITE_FIELD(this, kElementsOffset, elements);
1847 Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
1848 DisallowHeapAllocation no_gc;
1849 if (!map->HasTransitionArray()) return Handle<String>::null();
1850 TransitionArray* transitions = map->transitions();
1851 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1852 int transition = TransitionArray::kSimpleTransitionIndex;
1853 PropertyDetails details = transitions->GetTargetDetails(transition);
1854 Name* name = transitions->GetKey(transition);
1855 if (details.type() != FIELD) return Handle<String>::null();
1856 if (details.attributes() != NONE) return Handle<String>::null();
1857 if (!name->IsString()) return Handle<String>::null();
1858 return Handle<String>(String::cast(name));
1862 Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
1863 DCHECK(!ExpectedTransitionKey(map).is_null());
1864 return Handle<Map>(map->transitions()->GetTarget(
1865 TransitionArray::kSimpleTransitionIndex));
1869 Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1870 DisallowHeapAllocation no_allocation;
1871 if (!map->HasTransitionArray()) return Handle<Map>::null();
1872 TransitionArray* transitions = map->transitions();
1873 int transition = transitions->Search(*key);
1874 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1875 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1876 if (target_details.type() != FIELD) return Handle<Map>::null();
1877 if (target_details.attributes() != NONE) return Handle<Map>::null();
1878 return Handle<Map>(transitions->GetTarget(transition));
1882 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1883 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1886 byte Oddball::kind() const {
1887 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1891 void Oddball::set_kind(byte value) {
1892 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1896 Object* Cell::value() const {
1897 return READ_FIELD(this, kValueOffset);
1901 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1902 // The write barrier is not used for global property cells.
1903 DCHECK(!val->IsPropertyCell() && !val->IsCell());
1904 WRITE_FIELD(this, kValueOffset, val);
1907 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1909 Object* PropertyCell::type_raw() const {
1910 return READ_FIELD(this, kTypeOffset);
1914 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1915 WRITE_FIELD(this, kTypeOffset, val);
1919 int JSObject::GetHeaderSize() {
1920 InstanceType type = map()->instance_type();
1921 // Check for the most common kind of JavaScript object before
1922 // falling into the generic switch. This speeds up the internal
1923 // field operations considerably on average.
1924 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1926 case JS_GENERATOR_OBJECT_TYPE:
1927 return JSGeneratorObject::kSize;
1928 case JS_MODULE_TYPE:
1929 return JSModule::kSize;
1930 case JS_GLOBAL_PROXY_TYPE:
1931 return JSGlobalProxy::kSize;
1932 case JS_GLOBAL_OBJECT_TYPE:
1933 return JSGlobalObject::kSize;
1934 case JS_BUILTINS_OBJECT_TYPE:
1935 return JSBuiltinsObject::kSize;
1936 case JS_FUNCTION_TYPE:
1937 return JSFunction::kSize;
1939 return JSValue::kSize;
1941 return JSDate::kSize;
1943 return JSArray::kSize;
1944 case JS_ARRAY_BUFFER_TYPE:
1945 return JSArrayBuffer::kSize;
1946 case JS_TYPED_ARRAY_TYPE:
1947 return JSTypedArray::kSize;
1948 case JS_DATA_VIEW_TYPE:
1949 return JSDataView::kSize;
1951 return JSSet::kSize;
1953 return JSMap::kSize;
1954 case JS_SET_ITERATOR_TYPE:
1955 return JSSetIterator::kSize;
1956 case JS_MAP_ITERATOR_TYPE:
1957 return JSMapIterator::kSize;
1958 case JS_WEAK_MAP_TYPE:
1959 return JSWeakMap::kSize;
1960 case JS_WEAK_SET_TYPE:
1961 return JSWeakSet::kSize;
1962 case JS_REGEXP_TYPE:
1963 return JSRegExp::kSize;
1964 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1965 return JSObject::kHeaderSize;
1966 case JS_MESSAGE_OBJECT_TYPE:
1967 return JSMessageObject::kSize;
1969 // TODO(jkummerow): Re-enable this. Blink currently hits this
1970 // from its CustomElementConstructorBuilder.
1977 int JSObject::GetInternalFieldCount() {
1978 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
1979 // Make sure to adjust for the number of in-object properties. These
1980 // properties do contribute to the size, but are not internal fields.
1981 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1982 map()->inobject_properties();
1986 int JSObject::GetInternalFieldOffset(int index) {
1987 DCHECK(index < GetInternalFieldCount() && index >= 0);
1988 return GetHeaderSize() + (kPointerSize * index);
1992 Object* JSObject::GetInternalField(int index) {
1993 DCHECK(index < GetInternalFieldCount() && index >= 0);
1994 // Internal objects do follow immediately after the header, whereas in-object
1995 // properties are at the end of the object. Therefore there is no need
1996 // to adjust the index here.
1997 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2001 void JSObject::SetInternalField(int index, Object* value) {
2002 DCHECK(index < GetInternalFieldCount() && index >= 0);
2003 // Internal objects do follow immediately after the header, whereas in-object
2004 // properties are at the end of the object. Therefore there is no need
2005 // to adjust the index here.
2006 int offset = GetHeaderSize() + (kPointerSize * index);
2007 WRITE_FIELD(this, offset, value);
2008 WRITE_BARRIER(GetHeap(), this, offset, value);
2012 void JSObject::SetInternalField(int index, Smi* value) {
2013 DCHECK(index < GetInternalFieldCount() && index >= 0);
2014 // Internal objects do follow immediately after the header, whereas in-object
2015 // properties are at the end of the object. Therefore there is no need
2016 // to adjust the index here.
2017 int offset = GetHeaderSize() + (kPointerSize * index);
2018 WRITE_FIELD(this, offset, value);
2022 // Access fast-case object properties at index. The use of these routines
2023 // is needed to correctly distinguish between properties stored in-object and
2024 // properties stored in the properties array.
2025 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2026 if (index.is_inobject()) {
2027 return READ_FIELD(this, index.offset());
2029 return properties()->get(index.outobject_array_index());
2034 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2035 if (index.is_inobject()) {
2036 int offset = index.offset();
2037 WRITE_FIELD(this, offset, value);
2038 WRITE_BARRIER(GetHeap(), this, offset, value);
2040 properties()->set(index.outobject_array_index(), value);
2045 int JSObject::GetInObjectPropertyOffset(int index) {
2046 return map()->GetInObjectPropertyOffset(index);
2050 Object* JSObject::InObjectPropertyAt(int index) {
2051 int offset = GetInObjectPropertyOffset(index);
2052 return READ_FIELD(this, offset);
2056 Object* JSObject::InObjectPropertyAtPut(int index,
2058 WriteBarrierMode mode) {
2059 // Adjust for the number of properties stored in the object.
2060 int offset = GetInObjectPropertyOffset(index);
2061 WRITE_FIELD(this, offset, value);
2062 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2068 void JSObject::InitializeBody(Map* map,
2069 Object* pre_allocated_value,
2070 Object* filler_value) {
2071 DCHECK(!filler_value->IsHeapObject() ||
2072 !GetHeap()->InNewSpace(filler_value));
2073 DCHECK(!pre_allocated_value->IsHeapObject() ||
2074 !GetHeap()->InNewSpace(pre_allocated_value));
2075 int size = map->instance_size();
2076 int offset = kHeaderSize;
2077 if (filler_value != pre_allocated_value) {
2078 int pre_allocated = map->pre_allocated_property_fields();
2079 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2080 for (int i = 0; i < pre_allocated; i++) {
2081 WRITE_FIELD(this, offset, pre_allocated_value);
2082 offset += kPointerSize;
2085 while (offset < size) {
2086 WRITE_FIELD(this, offset, filler_value);
2087 offset += kPointerSize;
2092 bool JSObject::HasFastProperties() {
2093 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2094 return !properties()->IsDictionary();
2098 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2099 if (unused_property_fields() != 0) return false;
2100 if (is_prototype_map()) return false;
2101 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2102 int limit = Max(minimum, inobject_properties());
2103 int external = NumberOfFields() - inobject_properties();
2104 return external > limit;
2108 void Struct::InitializeBody(int object_size) {
2109 Object* value = GetHeap()->undefined_value();
2110 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2111 WRITE_FIELD(this, offset, value);
2116 bool Object::ToArrayIndex(uint32_t* index) {
2118 int value = Smi::cast(this)->value();
2119 if (value < 0) return false;
2123 if (IsHeapNumber()) {
2124 double value = HeapNumber::cast(this)->value();
2125 uint32_t uint_value = static_cast<uint32_t>(value);
2126 if (value == static_cast<double>(uint_value)) {
2127 *index = uint_value;
2135 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2136 if (!this->IsJSValue()) return false;
2138 JSValue* js_value = JSValue::cast(this);
2139 if (!js_value->value()->IsString()) return false;
2141 String* str = String::cast(js_value->value());
2142 if (index >= static_cast<uint32_t>(str->length())) return false;
2148 void Object::VerifyApiCallResultType() {
2149 #if ENABLE_EXTRA_CHECKS
2159 FATAL("API call returned invalid object");
2161 #endif // ENABLE_EXTRA_CHECKS
2165 Object* FixedArray::get(int index) {
2166 SLOW_DCHECK(index >= 0 && index < this->length());
2167 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2171 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2172 return handle(array->get(index), array->GetIsolate());
2176 bool FixedArray::is_the_hole(int index) {
2177 return get(index) == GetHeap()->the_hole_value();
2181 void FixedArray::set(int index, Smi* value) {
2182 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2183 DCHECK(index >= 0 && index < this->length());
2184 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2185 int offset = kHeaderSize + index * kPointerSize;
2186 WRITE_FIELD(this, offset, value);
2190 void FixedArray::set(int index, Object* value) {
2191 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2192 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2193 DCHECK(index >= 0 && index < this->length());
2194 int offset = kHeaderSize + index * kPointerSize;
2195 WRITE_FIELD(this, offset, value);
2196 WRITE_BARRIER(GetHeap(), this, offset, value);
2200 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2201 return bit_cast<uint64_t, double>(value) == kHoleNanInt64;
2205 inline double FixedDoubleArray::hole_nan_as_double() {
2206 return bit_cast<double, uint64_t>(kHoleNanInt64);
2210 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2211 DCHECK(bit_cast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64);
2212 DCHECK((bit_cast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32);
2213 return base::OS::nan_value();
2217 double FixedDoubleArray::get_scalar(int index) {
2218 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2219 map() != GetHeap()->fixed_array_map());
2220 DCHECK(index >= 0 && index < this->length());
2221 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2222 DCHECK(!is_the_hole_nan(result));
2226 int64_t FixedDoubleArray::get_representation(int index) {
2227 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2228 map() != GetHeap()->fixed_array_map());
2229 DCHECK(index >= 0 && index < this->length());
2230 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2234 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2236 if (array->is_the_hole(index)) {
2237 return array->GetIsolate()->factory()->the_hole_value();
2239 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2244 void FixedDoubleArray::set(int index, double value) {
2245 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2246 map() != GetHeap()->fixed_array_map());
2247 int offset = kHeaderSize + index * kDoubleSize;
2248 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2249 WRITE_DOUBLE_FIELD(this, offset, value);
2253 void FixedDoubleArray::set_the_hole(int index) {
2254 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2255 map() != GetHeap()->fixed_array_map());
2256 int offset = kHeaderSize + index * kDoubleSize;
2257 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2261 bool FixedDoubleArray::is_the_hole(int index) {
2262 int offset = kHeaderSize + index * kDoubleSize;
2263 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2267 double* FixedDoubleArray::data_start() {
2268 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2272 void FixedDoubleArray::FillWithHoles(int from, int to) {
2273 for (int i = from; i < to; i++) {
2279 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2280 DCHECK(type < NUMBER_OF_TYPES);
2281 element_counts_[type]++;
2285 int ConstantPoolArray::NumberOfEntries::equals(
2286 const ConstantPoolArray::NumberOfEntries& other) const {
2287 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2288 if (element_counts_[i] != other.element_counts_[i]) return false;
2294 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2295 return total_count() == 0;
2299 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2300 DCHECK(type < NUMBER_OF_TYPES);
2301 return element_counts_[type];
2305 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2307 DCHECK(type < NUMBER_OF_TYPES);
2308 for (int i = 0; i < type; i++) {
2309 base += element_counts_[i];
2315 int ConstantPoolArray::NumberOfEntries::total_count() const {
2317 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2318 count += element_counts_[i];
2324 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2325 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2326 if (element_counts_[i] < min || element_counts_[i] > max) {
2334 int ConstantPoolArray::Iterator::next_index() {
2335 DCHECK(!is_finished());
2336 int ret = next_index_++;
2342 bool ConstantPoolArray::Iterator::is_finished() {
2343 return next_index_ > array_->last_index(type_, final_section_);
2347 void ConstantPoolArray::Iterator::update_section() {
2348 if (next_index_ > array_->last_index(type_, current_section_) &&
2349 current_section_ != final_section_) {
2350 DCHECK(final_section_ == EXTENDED_SECTION);
2351 current_section_ = EXTENDED_SECTION;
2352 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2357 bool ConstantPoolArray::is_extended_layout() {
2358 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2359 return IsExtendedField::decode(small_layout_1);
2363 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2364 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2368 int ConstantPoolArray::first_extended_section_index() {
2369 DCHECK(is_extended_layout());
2370 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2371 return TotalCountField::decode(small_layout_2);
2375 int ConstantPoolArray::get_extended_section_header_offset() {
2376 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2380 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2381 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2382 return WeakObjectStateField::decode(small_layout_2);
2386 void ConstantPoolArray::set_weak_object_state(
2387 ConstantPoolArray::WeakObjectState state) {
2388 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2389 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2390 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2394 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2396 if (section == EXTENDED_SECTION) {
2397 DCHECK(is_extended_layout());
2398 index += first_extended_section_index();
2401 for (Type type_iter = FIRST_TYPE; type_iter < type;
2402 type_iter = next_type(type_iter)) {
2403 index += number_of_entries(type_iter, section);
2410 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2411 return first_index(type, section) + number_of_entries(type, section) - 1;
2415 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2416 if (section == SMALL_SECTION) {
2417 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2418 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2421 return Int64CountField::decode(small_layout_1);
2423 return CodePtrCountField::decode(small_layout_1);
2425 return HeapPtrCountField::decode(small_layout_1);
2427 return Int32CountField::decode(small_layout_2);
2433 DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2434 int offset = get_extended_section_header_offset();
2437 offset += kExtendedInt64CountOffset;
2440 offset += kExtendedCodePtrCountOffset;
2443 offset += kExtendedHeapPtrCountOffset;
2446 offset += kExtendedInt32CountOffset;
2451 return READ_INT_FIELD(this, offset);
2456 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2457 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2458 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2459 (is_extended_layout() &&
2460 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2461 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2465 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2466 LayoutSection section;
2467 if (is_extended_layout() && index >= first_extended_section_index()) {
2468 section = EXTENDED_SECTION;
2470 section = SMALL_SECTION;
2473 Type type = FIRST_TYPE;
2474 while (index > last_index(type, section)) {
2475 type = next_type(type);
2477 DCHECK(type <= LAST_TYPE);
2482 int64_t ConstantPoolArray::get_int64_entry(int index) {
2483 DCHECK(map() == GetHeap()->constant_pool_array_map());
2484 DCHECK(get_type(index) == INT64);
2485 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2489 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2490 STATIC_ASSERT(kDoubleSize == kInt64Size);
2491 DCHECK(map() == GetHeap()->constant_pool_array_map());
2492 DCHECK(get_type(index) == INT64);
2493 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2497 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2498 DCHECK(map() == GetHeap()->constant_pool_array_map());
2499 DCHECK(get_type(index) == CODE_PTR);
2500 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2504 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2505 DCHECK(map() == GetHeap()->constant_pool_array_map());
2506 DCHECK(get_type(index) == HEAP_PTR);
2507 return READ_FIELD(this, OffsetOfElementAt(index));
2511 int32_t ConstantPoolArray::get_int32_entry(int index) {
2512 DCHECK(map() == GetHeap()->constant_pool_array_map());
2513 DCHECK(get_type(index) == INT32);
2514 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2518 void ConstantPoolArray::set(int index, int64_t value) {
2519 DCHECK(map() == GetHeap()->constant_pool_array_map());
2520 DCHECK(get_type(index) == INT64);
2521 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2525 void ConstantPoolArray::set(int index, double value) {
2526 STATIC_ASSERT(kDoubleSize == kInt64Size);
2527 DCHECK(map() == GetHeap()->constant_pool_array_map());
2528 DCHECK(get_type(index) == INT64);
2529 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2533 void ConstantPoolArray::set(int index, Address value) {
2534 DCHECK(map() == GetHeap()->constant_pool_array_map());
2535 DCHECK(get_type(index) == CODE_PTR);
2536 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2540 void ConstantPoolArray::set(int index, Object* value) {
2541 DCHECK(map() == GetHeap()->constant_pool_array_map());
2542 DCHECK(!GetHeap()->InNewSpace(value));
2543 DCHECK(get_type(index) == HEAP_PTR);
2544 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2545 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2549 void ConstantPoolArray::set(int index, int32_t value) {
2550 DCHECK(map() == GetHeap()->constant_pool_array_map());
2551 DCHECK(get_type(index) == INT32);
2552 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2556 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2557 DCHECK(map() == GetHeap()->constant_pool_array_map());
2558 DCHECK(offset_is_type(offset, INT32));
2559 WRITE_INT32_FIELD(this, offset, value);
2563 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2564 DCHECK(map() == GetHeap()->constant_pool_array_map());
2565 DCHECK(offset_is_type(offset, INT64));
2566 WRITE_INT64_FIELD(this, offset, value);
2570 void ConstantPoolArray::set_at_offset(int offset, double value) {
2571 DCHECK(map() == GetHeap()->constant_pool_array_map());
2572 DCHECK(offset_is_type(offset, INT64));
2573 WRITE_DOUBLE_FIELD(this, offset, value);
2577 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2578 DCHECK(map() == GetHeap()->constant_pool_array_map());
2579 DCHECK(offset_is_type(offset, CODE_PTR));
2580 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2581 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2585 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2586 DCHECK(map() == GetHeap()->constant_pool_array_map());
2587 DCHECK(!GetHeap()->InNewSpace(value));
2588 DCHECK(offset_is_type(offset, HEAP_PTR));
2589 WRITE_FIELD(this, offset, value);
2590 WRITE_BARRIER(GetHeap(), this, offset, value);
2594 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2595 uint32_t small_layout_1 =
2596 Int64CountField::encode(small.count_of(INT64)) |
2597 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2598 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2599 IsExtendedField::encode(false);
2600 uint32_t small_layout_2 =
2601 Int32CountField::encode(small.count_of(INT32)) |
2602 TotalCountField::encode(small.total_count()) |
2603 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2604 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2605 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2606 if (kHeaderSize != kFirstEntryOffset) {
2607 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2608 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2613 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2614 const NumberOfEntries& extended) {
2615 // Initialize small layout fields first.
2618 // Set is_extended_layout field.
2619 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2620 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2621 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2623 // Initialize the extended layout fields.
2624 int extended_header_offset = get_extended_section_header_offset();
2625 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2626 extended.count_of(INT64));
2627 WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2628 extended.count_of(CODE_PTR));
2629 WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2630 extended.count_of(HEAP_PTR));
2631 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2632 extended.count_of(INT32));
2636 int ConstantPoolArray::size() {
2637 NumberOfEntries small(this, SMALL_SECTION);
2638 if (!is_extended_layout()) {
2639 return SizeFor(small);
2641 NumberOfEntries extended(this, EXTENDED_SECTION);
2642 return SizeForExtended(small, extended);
2647 int ConstantPoolArray::length() {
2648 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2649 int length = TotalCountField::decode(small_layout_2);
2650 if (is_extended_layout()) {
2651 length += number_of_entries(INT64, EXTENDED_SECTION) +
2652 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2653 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2654 number_of_entries(INT32, EXTENDED_SECTION);
2660 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2661 const DisallowHeapAllocation& promise) {
2662 Heap* heap = GetHeap();
2663 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2664 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2665 return UPDATE_WRITE_BARRIER;
2669 void FixedArray::set(int index,
2671 WriteBarrierMode mode) {
2672 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2673 DCHECK(index >= 0 && index < this->length());
2674 int offset = kHeaderSize + index * kPointerSize;
2675 WRITE_FIELD(this, offset, value);
2676 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2680 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2683 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2684 DCHECK(index >= 0 && index < array->length());
2685 int offset = kHeaderSize + index * kPointerSize;
2686 WRITE_FIELD(array, offset, value);
2687 Heap* heap = array->GetHeap();
2688 if (heap->InNewSpace(value)) {
2689 heap->RecordWrite(array->address(), offset);
2694 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2697 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2698 DCHECK(index >= 0 && index < array->length());
2699 DCHECK(!array->GetHeap()->InNewSpace(value));
2700 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2704 void FixedArray::set_undefined(int index) {
2705 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2706 DCHECK(index >= 0 && index < this->length());
2707 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2709 kHeaderSize + index * kPointerSize,
2710 GetHeap()->undefined_value());
2714 void FixedArray::set_null(int index) {
2715 DCHECK(index >= 0 && index < this->length());
2716 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2718 kHeaderSize + index * kPointerSize,
2719 GetHeap()->null_value());
2723 void FixedArray::set_the_hole(int index) {
2724 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2725 DCHECK(index >= 0 && index < this->length());
2726 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2728 kHeaderSize + index * kPointerSize,
2729 GetHeap()->the_hole_value());
2733 void FixedArray::FillWithHoles(int from, int to) {
2734 for (int i = from; i < to; i++) {
2740 Object** FixedArray::data_start() {
2741 return HeapObject::RawField(this, kHeaderSize);
2745 bool DescriptorArray::IsEmpty() {
2746 DCHECK(length() >= kFirstIndex ||
2747 this == GetHeap()->empty_descriptor_array());
2748 return length() < kFirstIndex;
2752 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2754 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2758 // Perform a binary search in a fixed array. Low and high are entry indices. If
2759 // there are three entries in this array it should be called with low=0 and
2761 template<SearchMode search_mode, typename T>
2762 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2763 uint32_t hash = name->Hash();
2766 DCHECK(low <= high);
2768 while (low != high) {
2769 int mid = (low + high) / 2;
2770 Name* mid_name = array->GetSortedKey(mid);
2771 uint32_t mid_hash = mid_name->Hash();
2773 if (mid_hash >= hash) {
2780 for (; low <= limit; ++low) {
2781 int sort_index = array->GetSortedKeyIndex(low);
2782 Name* entry = array->GetKey(sort_index);
2783 if (entry->Hash() != hash) break;
2784 if (entry->Equals(name)) {
2785 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2788 return T::kNotFound;
2792 return T::kNotFound;
2796 // Perform a linear search in this fixed array. len is the number of entry
2797 // indices that are valid.
2798 template<SearchMode search_mode, typename T>
2799 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2800 uint32_t hash = name->Hash();
2801 if (search_mode == ALL_ENTRIES) {
2802 for (int number = 0; number < len; number++) {
2803 int sorted_index = array->GetSortedKeyIndex(number);
2804 Name* entry = array->GetKey(sorted_index);
2805 uint32_t current_hash = entry->Hash();
2806 if (current_hash > hash) break;
2807 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2810 DCHECK(len >= valid_entries);
2811 for (int number = 0; number < valid_entries; number++) {
2812 Name* entry = array->GetKey(number);
2813 uint32_t current_hash = entry->Hash();
2814 if (current_hash == hash && entry->Equals(name)) return number;
2817 return T::kNotFound;
2821 template<SearchMode search_mode, typename T>
2822 int Search(T* array, Name* name, int valid_entries) {
2823 if (search_mode == VALID_ENTRIES) {
2824 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2826 SLOW_DCHECK(array->IsSortedNoDuplicates());
2829 int nof = array->number_of_entries();
2830 if (nof == 0) return T::kNotFound;
2832 // Fast case: do linear search for small arrays.
2833 const int kMaxElementsForLinearSearch = 8;
2834 if ((search_mode == ALL_ENTRIES &&
2835 nof <= kMaxElementsForLinearSearch) ||
2836 (search_mode == VALID_ENTRIES &&
2837 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2838 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2841 // Slow case: perform binary search.
2842 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2846 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2847 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2851 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2852 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2853 if (number_of_own_descriptors == 0) return kNotFound;
2855 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2856 int number = cache->Lookup(map, name);
2858 if (number == DescriptorLookupCache::kAbsent) {
2859 number = Search(name, number_of_own_descriptors);
2860 cache->Update(map, name, number);
2867 PropertyDetails Map::GetLastDescriptorDetails() {
2868 return instance_descriptors()->GetDetails(LastAdded());
2872 void Map::LookupDescriptor(JSObject* holder,
2874 LookupResult* result) {
2875 DescriptorArray* descriptors = this->instance_descriptors();
2876 int number = descriptors->SearchWithCache(name, this);
2877 if (number == DescriptorArray::kNotFound) return result->NotFound();
2878 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2882 void Map::LookupTransition(JSObject* holder,
2884 LookupResult* result) {
2885 int transition_index = this->SearchTransition(name);
2886 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
2887 result->TransitionResult(holder, this->GetTransition(transition_index));
2891 FixedArrayBase* Map::GetInitialElements() {
2892 if (has_fast_smi_or_object_elements() ||
2893 has_fast_double_elements()) {
2894 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2895 return GetHeap()->empty_fixed_array();
2896 } else if (has_external_array_elements()) {
2897 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2898 DCHECK(!GetHeap()->InNewSpace(empty_array));
2900 } else if (has_fixed_typed_array_elements()) {
2901 FixedTypedArrayBase* empty_array =
2902 GetHeap()->EmptyFixedTypedArrayForMap(this);
2903 DCHECK(!GetHeap()->InNewSpace(empty_array));
2912 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2913 DCHECK(descriptor_number < number_of_descriptors());
2914 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2918 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2919 return GetKeySlot(descriptor_number);
2923 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2924 return GetValueSlot(descriptor_number - 1) + 1;
2928 Name* DescriptorArray::GetKey(int descriptor_number) {
2929 DCHECK(descriptor_number < number_of_descriptors());
2930 return Name::cast(get(ToKeyIndex(descriptor_number)));
2934 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2935 return GetDetails(descriptor_number).pointer();
2939 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2940 return GetKey(GetSortedKeyIndex(descriptor_number));
2944 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2945 PropertyDetails details = GetDetails(descriptor_index);
2946 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2950 void DescriptorArray::SetRepresentation(int descriptor_index,
2951 Representation representation) {
2952 DCHECK(!representation.IsNone());
2953 PropertyDetails details = GetDetails(descriptor_index);
2954 set(ToDetailsIndex(descriptor_index),
2955 details.CopyWithRepresentation(representation).AsSmi());
2959 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2960 DCHECK(descriptor_number < number_of_descriptors());
2961 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2965 int DescriptorArray::GetValueOffset(int descriptor_number) {
2966 return OffsetOfElementAt(ToValueIndex(descriptor_number));
2970 Object* DescriptorArray::GetValue(int descriptor_number) {
2971 DCHECK(descriptor_number < number_of_descriptors());
2972 return get(ToValueIndex(descriptor_number));
2976 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2977 set(ToValueIndex(descriptor_index), value);
2981 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2982 DCHECK(descriptor_number < number_of_descriptors());
2983 Object* details = get(ToDetailsIndex(descriptor_number));
2984 return PropertyDetails(Smi::cast(details));
2988 PropertyType DescriptorArray::GetType(int descriptor_number) {
2989 return GetDetails(descriptor_number).type();
2993 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2994 DCHECK(GetDetails(descriptor_number).type() == FIELD);
2995 return GetDetails(descriptor_number).field_index();
2999 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3000 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3001 return HeapType::cast(GetValue(descriptor_number));
3005 Object* DescriptorArray::GetConstant(int descriptor_number) {
3006 return GetValue(descriptor_number);
3010 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3011 DCHECK(GetType(descriptor_number) == CALLBACKS);
3012 return GetValue(descriptor_number);
3016 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3017 DCHECK(GetType(descriptor_number) == CALLBACKS);
3018 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3019 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3023 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3024 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3025 handle(GetValue(descriptor_number), GetIsolate()),
3026 GetDetails(descriptor_number));
3030 void DescriptorArray::Set(int descriptor_number,
3032 const WhitenessWitness&) {
3034 DCHECK(descriptor_number < number_of_descriptors());
3036 NoIncrementalWriteBarrierSet(this,
3037 ToKeyIndex(descriptor_number),
3039 NoIncrementalWriteBarrierSet(this,
3040 ToValueIndex(descriptor_number),
3042 NoIncrementalWriteBarrierSet(this,
3043 ToDetailsIndex(descriptor_number),
3044 desc->GetDetails().AsSmi());
3048 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3050 DCHECK(descriptor_number < number_of_descriptors());
3052 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3053 set(ToValueIndex(descriptor_number), *desc->GetValue());
3054 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3058 void DescriptorArray::Append(Descriptor* desc) {
3059 DisallowHeapAllocation no_gc;
3060 int descriptor_number = number_of_descriptors();
3061 SetNumberOfDescriptors(descriptor_number + 1);
3062 Set(descriptor_number, desc);
3064 uint32_t hash = desc->GetKey()->Hash();
3068 for (insertion = descriptor_number; insertion > 0; --insertion) {
3069 Name* key = GetSortedKey(insertion - 1);
3070 if (key->Hash() <= hash) break;
3071 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3074 SetSortedKey(insertion, descriptor_number);
3078 void DescriptorArray::SwapSortedKeys(int first, int second) {
3079 int first_key = GetSortedKeyIndex(first);
3080 SetSortedKey(first, GetSortedKeyIndex(second));
3081 SetSortedKey(second, first_key);
3085 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3086 : marking_(array->GetHeap()->incremental_marking()) {
3087 marking_->EnterNoMarkingScope();
3088 DCHECK(!marking_->IsMarking() ||
3089 Marking::Color(array) == Marking::WHITE_OBJECT);
3093 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3094 marking_->LeaveNoMarkingScope();
3098 template<typename Derived, typename Shape, typename Key>
3099 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3100 const int kMinCapacity = 32;
3101 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3102 if (capacity < kMinCapacity) {
3103 capacity = kMinCapacity; // Guarantee min capacity.
3109 template<typename Derived, typename Shape, typename Key>
3110 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3111 return FindEntry(GetIsolate(), key);
3115 // Find entry for key otherwise return kNotFound.
3116 template<typename Derived, typename Shape, typename Key>
3117 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3118 uint32_t capacity = Capacity();
3119 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3121 // EnsureCapacity will guarantee the hash table is never full.
3123 Object* element = KeyAt(entry);
3124 // Empty entry. Uses raw unchecked accessors because it is called by the
3125 // string table during bootstrapping.
3126 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3127 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3128 Shape::IsMatch(key, element)) return entry;
3129 entry = NextProbe(entry, count++, capacity);
3135 bool SeededNumberDictionary::requires_slow_elements() {
3136 Object* max_index_object = get(kMaxNumberKeyIndex);
3137 if (!max_index_object->IsSmi()) return false;
3139 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3142 uint32_t SeededNumberDictionary::max_number_key() {
3143 DCHECK(!requires_slow_elements());
3144 Object* max_index_object = get(kMaxNumberKeyIndex);
3145 if (!max_index_object->IsSmi()) return 0;
3146 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3147 return value >> kRequiresSlowElementsTagSize;
3150 void SeededNumberDictionary::set_requires_slow_elements() {
3151 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3155 // ------------------------------------
3159 CAST_ACCESSOR(AccessorInfo)
3160 CAST_ACCESSOR(ByteArray)
3163 CAST_ACCESSOR(CodeCacheHashTable)
3164 CAST_ACCESSOR(CompilationCacheTable)
3165 CAST_ACCESSOR(ConsString)
3166 CAST_ACCESSOR(ConstantPoolArray)
3167 CAST_ACCESSOR(DeoptimizationInputData)
3168 CAST_ACCESSOR(DeoptimizationOutputData)
3169 CAST_ACCESSOR(DependentCode)
3170 CAST_ACCESSOR(DescriptorArray)
3171 CAST_ACCESSOR(ExternalArray)
3172 CAST_ACCESSOR(ExternalOneByteString)
3173 CAST_ACCESSOR(ExternalFloat32Array)
3174 CAST_ACCESSOR(ExternalFloat64Array)
3175 CAST_ACCESSOR(ExternalInt16Array)
3176 CAST_ACCESSOR(ExternalInt32Array)
3177 CAST_ACCESSOR(ExternalInt8Array)
3178 CAST_ACCESSOR(ExternalString)
3179 CAST_ACCESSOR(ExternalTwoByteString)
3180 CAST_ACCESSOR(ExternalUint16Array)
3181 CAST_ACCESSOR(ExternalUint32Array)
3182 CAST_ACCESSOR(ExternalUint8Array)
3183 CAST_ACCESSOR(ExternalUint8ClampedArray)
3184 CAST_ACCESSOR(FixedArray)
3185 CAST_ACCESSOR(FixedArrayBase)
3186 CAST_ACCESSOR(FixedDoubleArray)
3187 CAST_ACCESSOR(FixedTypedArrayBase)
3188 CAST_ACCESSOR(Foreign)
3189 CAST_ACCESSOR(FreeSpace)
3190 CAST_ACCESSOR(GlobalObject)
3191 CAST_ACCESSOR(HeapObject)
3192 CAST_ACCESSOR(JSArray)
3193 CAST_ACCESSOR(JSArrayBuffer)
3194 CAST_ACCESSOR(JSArrayBufferView)
3195 CAST_ACCESSOR(JSBuiltinsObject)
3196 CAST_ACCESSOR(JSDataView)
3197 CAST_ACCESSOR(JSDate)
3198 CAST_ACCESSOR(JSFunction)
3199 CAST_ACCESSOR(JSFunctionProxy)
3200 CAST_ACCESSOR(JSFunctionResultCache)
3201 CAST_ACCESSOR(JSGeneratorObject)
3202 CAST_ACCESSOR(JSGlobalObject)
3203 CAST_ACCESSOR(JSGlobalProxy)
3204 CAST_ACCESSOR(JSMap)
3205 CAST_ACCESSOR(JSMapIterator)
3206 CAST_ACCESSOR(JSMessageObject)
3207 CAST_ACCESSOR(JSModule)
3208 CAST_ACCESSOR(JSObject)
3209 CAST_ACCESSOR(JSProxy)
3210 CAST_ACCESSOR(JSReceiver)
3211 CAST_ACCESSOR(JSRegExp)
3212 CAST_ACCESSOR(JSSet)
3213 CAST_ACCESSOR(JSSetIterator)
3214 CAST_ACCESSOR(JSTypedArray)
3215 CAST_ACCESSOR(JSValue)
3216 CAST_ACCESSOR(JSWeakMap)
3217 CAST_ACCESSOR(JSWeakSet)
3219 CAST_ACCESSOR(MapCache)
3221 CAST_ACCESSOR(NameDictionary)
3222 CAST_ACCESSOR(NormalizedMapCache)
3223 CAST_ACCESSOR(Object)
3224 CAST_ACCESSOR(ObjectHashTable)
3225 CAST_ACCESSOR(Oddball)
3226 CAST_ACCESSOR(OrderedHashMap)
3227 CAST_ACCESSOR(OrderedHashSet)
3228 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3229 CAST_ACCESSOR(PropertyCell)
3230 CAST_ACCESSOR(ScopeInfo)
3231 CAST_ACCESSOR(SeededNumberDictionary)
3232 CAST_ACCESSOR(SeqOneByteString)
3233 CAST_ACCESSOR(SeqString)
3234 CAST_ACCESSOR(SeqTwoByteString)
3235 CAST_ACCESSOR(SharedFunctionInfo)
3236 CAST_ACCESSOR(SlicedString)
3238 CAST_ACCESSOR(String)
3239 CAST_ACCESSOR(StringTable)
3240 CAST_ACCESSOR(Struct)
3241 CAST_ACCESSOR(Symbol)
3242 CAST_ACCESSOR(UnseededNumberDictionary)
3243 CAST_ACCESSOR(WeakHashTable)
3246 template <class Traits>
3247 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3248 SLOW_DCHECK(object->IsHeapObject() &&
3249 HeapObject::cast(object)->map()->instance_type() ==
3250 Traits::kInstanceType);
3251 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3255 template <class Traits>
3256 const FixedTypedArray<Traits>*
3257 FixedTypedArray<Traits>::cast(const Object* object) {
3258 SLOW_DCHECK(object->IsHeapObject() &&
3259 HeapObject::cast(object)->map()->instance_type() ==
3260 Traits::kInstanceType);
3261 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3265 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3266 STRUCT_LIST(MAKE_STRUCT_CAST)
3267 #undef MAKE_STRUCT_CAST
3270 template <typename Derived, typename Shape, typename Key>
3271 HashTable<Derived, Shape, Key>*
3272 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3273 SLOW_DCHECK(obj->IsHashTable());
3274 return reinterpret_cast<HashTable*>(obj);
3278 template <typename Derived, typename Shape, typename Key>
3279 const HashTable<Derived, Shape, Key>*
3280 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3281 SLOW_DCHECK(obj->IsHashTable());
3282 return reinterpret_cast<const HashTable*>(obj);
3286 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3287 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3289 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3290 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3292 SMI_ACCESSORS(String, length, kLengthOffset)
3293 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3296 uint32_t Name::hash_field() {
3297 return READ_UINT32_FIELD(this, kHashFieldOffset);
3301 void Name::set_hash_field(uint32_t value) {
3302 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3303 #if V8_HOST_ARCH_64_BIT
3304 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3309 bool Name::Equals(Name* other) {
3310 if (other == this) return true;
3311 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3312 this->IsSymbol() || other->IsSymbol()) {
3315 return String::cast(this)->SlowEquals(String::cast(other));
3319 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3320 if (one.is_identical_to(two)) return true;
3321 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3322 one->IsSymbol() || two->IsSymbol()) {
3325 return String::SlowEquals(Handle<String>::cast(one),
3326 Handle<String>::cast(two));
3330 ACCESSORS(Symbol, name, Object, kNameOffset)
3331 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3332 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3333 BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit)
3336 bool String::Equals(String* other) {
3337 if (other == this) return true;
3338 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3341 return SlowEquals(other);
3345 bool String::Equals(Handle<String> one, Handle<String> two) {
3346 if (one.is_identical_to(two)) return true;
3347 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3350 return SlowEquals(one, two);
3354 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3355 if (!string->IsConsString()) return string;
3356 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3357 if (cons->IsFlat()) return handle(cons->first());
3358 return SlowFlatten(cons, pretenure);
3362 uint16_t String::Get(int index) {
3363 DCHECK(index >= 0 && index < length());
3364 switch (StringShape(this).full_representation_tag()) {
3365 case kSeqStringTag | kOneByteStringTag:
3366 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3367 case kSeqStringTag | kTwoByteStringTag:
3368 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3369 case kConsStringTag | kOneByteStringTag:
3370 case kConsStringTag | kTwoByteStringTag:
3371 return ConsString::cast(this)->ConsStringGet(index);
3372 case kExternalStringTag | kOneByteStringTag:
3373 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3374 case kExternalStringTag | kTwoByteStringTag:
3375 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3376 case kSlicedStringTag | kOneByteStringTag:
3377 case kSlicedStringTag | kTwoByteStringTag:
3378 return SlicedString::cast(this)->SlicedStringGet(index);
3388 void String::Set(int index, uint16_t value) {
3389 DCHECK(index >= 0 && index < length());
3390 DCHECK(StringShape(this).IsSequential());
3392 return this->IsOneByteRepresentation()
3393 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3394 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3398 bool String::IsFlat() {
3399 if (!StringShape(this).IsCons()) return true;
3400 return ConsString::cast(this)->second()->length() == 0;
3404 String* String::GetUnderlying() {
3405 // Giving direct access to underlying string only makes sense if the
3406 // wrapping string is already flattened.
3407 DCHECK(this->IsFlat());
3408 DCHECK(StringShape(this).IsIndirect());
3409 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3410 const int kUnderlyingOffset = SlicedString::kParentOffset;
3411 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3415 template<class Visitor>
3416 ConsString* String::VisitFlat(Visitor* visitor,
3419 int slice_offset = offset;
3420 const int length = string->length();
3421 DCHECK(offset <= length);
3423 int32_t type = string->map()->instance_type();
3424 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3425 case kSeqStringTag | kOneByteStringTag:
3426 visitor->VisitOneByteString(
3427 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3431 case kSeqStringTag | kTwoByteStringTag:
3432 visitor->VisitTwoByteString(
3433 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3437 case kExternalStringTag | kOneByteStringTag:
3438 visitor->VisitOneByteString(
3439 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3443 case kExternalStringTag | kTwoByteStringTag:
3444 visitor->VisitTwoByteString(
3445 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3449 case kSlicedStringTag | kOneByteStringTag:
3450 case kSlicedStringTag | kTwoByteStringTag: {
3451 SlicedString* slicedString = SlicedString::cast(string);
3452 slice_offset += slicedString->offset();
3453 string = slicedString->parent();
3457 case kConsStringTag | kOneByteStringTag:
3458 case kConsStringTag | kTwoByteStringTag:
3459 return ConsString::cast(string);
3469 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3470 DCHECK(index >= 0 && index < length());
3471 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3475 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3476 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3477 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3478 static_cast<byte>(value));
3482 Address SeqOneByteString::GetCharsAddress() {
3483 return FIELD_ADDR(this, kHeaderSize);
3487 uint8_t* SeqOneByteString::GetChars() {
3488 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3492 Address SeqTwoByteString::GetCharsAddress() {
3493 return FIELD_ADDR(this, kHeaderSize);
3497 uc16* SeqTwoByteString::GetChars() {
3498 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3502 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3503 DCHECK(index >= 0 && index < length());
3504 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3508 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3509 DCHECK(index >= 0 && index < length());
3510 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3514 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3515 return SizeFor(length());
3519 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3520 return SizeFor(length());
3524 String* SlicedString::parent() {
3525 return String::cast(READ_FIELD(this, kParentOffset));
3529 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3530 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3531 WRITE_FIELD(this, kParentOffset, parent);
3532 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3536 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3539 String* ConsString::first() {
3540 return String::cast(READ_FIELD(this, kFirstOffset));
3544 Object* ConsString::unchecked_first() {
3545 return READ_FIELD(this, kFirstOffset);
3549 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3550 WRITE_FIELD(this, kFirstOffset, value);
3551 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3555 String* ConsString::second() {
3556 return String::cast(READ_FIELD(this, kSecondOffset));
3560 Object* ConsString::unchecked_second() {
3561 return READ_FIELD(this, kSecondOffset);
3565 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3566 WRITE_FIELD(this, kSecondOffset, value);
3567 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3571 bool ExternalString::is_short() {
3572 InstanceType type = map()->instance_type();
3573 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3577 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3578 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3582 void ExternalOneByteString::update_data_cache() {
3583 if (is_short()) return;
3584 const char** data_field =
3585 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3586 *data_field = resource()->data();
3590 void ExternalOneByteString::set_resource(
3591 const ExternalOneByteString::Resource* resource) {
3592 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3593 *reinterpret_cast<const Resource**>(
3594 FIELD_ADDR(this, kResourceOffset)) = resource;
3595 if (resource != NULL) update_data_cache();
3599 const uint8_t* ExternalOneByteString::GetChars() {
3600 return reinterpret_cast<const uint8_t*>(resource()->data());
3604 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3605 DCHECK(index >= 0 && index < length());
3606 return GetChars()[index];
3610 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3611 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3615 void ExternalTwoByteString::update_data_cache() {
3616 if (is_short()) return;
3617 const uint16_t** data_field =
3618 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3619 *data_field = resource()->data();
3623 void ExternalTwoByteString::set_resource(
3624 const ExternalTwoByteString::Resource* resource) {
3625 *reinterpret_cast<const Resource**>(
3626 FIELD_ADDR(this, kResourceOffset)) = resource;
3627 if (resource != NULL) update_data_cache();
3631 const uint16_t* ExternalTwoByteString::GetChars() {
3632 return resource()->data();
3636 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3637 DCHECK(index >= 0 && index < length());
3638 return GetChars()[index];
3642 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3644 return GetChars() + start;
3648 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3649 return depth & kDepthMask;
3653 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3654 frames_[depth_++ & kDepthMask] = string;
3658 void ConsStringIteratorOp::PushRight(ConsString* string) {
3660 frames_[(depth_-1) & kDepthMask] = string;
3664 void ConsStringIteratorOp::AdjustMaximumDepth() {
3665 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3669 void ConsStringIteratorOp::Pop() {
3671 DCHECK(depth_ <= maximum_depth_);
3676 uint16_t StringCharacterStream::GetNext() {
3677 DCHECK(buffer8_ != NULL && end_ != NULL);
3678 // Advance cursor if needed.
3679 if (buffer8_ == end_) HasMore();
3680 DCHECK(buffer8_ < end_);
3681 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3685 StringCharacterStream::StringCharacterStream(String* string,
3686 ConsStringIteratorOp* op,
3688 : is_one_byte_(false),
3690 Reset(string, offset);
3694 void StringCharacterStream::Reset(String* string, int offset) {
3697 ConsString* cons_string = String::VisitFlat(this, string, offset);
3698 op_->Reset(cons_string, offset);
3699 if (cons_string != NULL) {
3700 string = op_->Next(&offset);
3701 if (string != NULL) String::VisitFlat(this, string, offset);
3706 bool StringCharacterStream::HasMore() {
3707 if (buffer8_ != end_) return true;
3709 String* string = op_->Next(&offset);
3710 DCHECK_EQ(offset, 0);
3711 if (string == NULL) return false;
3712 String::VisitFlat(this, string);
3713 DCHECK(buffer8_ != end_);
3718 void StringCharacterStream::VisitOneByteString(
3719 const uint8_t* chars, int length) {
3720 is_one_byte_ = true;
3722 end_ = chars + length;
3726 void StringCharacterStream::VisitTwoByteString(
3727 const uint16_t* chars, int length) {
3728 is_one_byte_ = false;
3730 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3734 void JSFunctionResultCache::MakeZeroSize() {
3735 set_finger_index(kEntriesIndex);
3736 set_size(kEntriesIndex);
3740 void JSFunctionResultCache::Clear() {
3741 int cache_size = size();
3742 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3743 MemsetPointer(entries_start,
3744 GetHeap()->the_hole_value(),
3745 cache_size - kEntriesIndex);
3750 int JSFunctionResultCache::size() {
3751 return Smi::cast(get(kCacheSizeIndex))->value();
3755 void JSFunctionResultCache::set_size(int size) {
3756 set(kCacheSizeIndex, Smi::FromInt(size));
3760 int JSFunctionResultCache::finger_index() {
3761 return Smi::cast(get(kFingerIndex))->value();
3765 void JSFunctionResultCache::set_finger_index(int finger_index) {
3766 set(kFingerIndex, Smi::FromInt(finger_index));
3770 byte ByteArray::get(int index) {
3771 DCHECK(index >= 0 && index < this->length());
3772 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3776 void ByteArray::set(int index, byte value) {
3777 DCHECK(index >= 0 && index < this->length());
3778 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3782 int ByteArray::get_int(int index) {
3783 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3784 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3788 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3789 DCHECK_TAG_ALIGNED(address);
3790 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3794 Address ByteArray::GetDataStartAddress() {
3795 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3799 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3800 return reinterpret_cast<uint8_t*>(external_pointer());
3804 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3805 DCHECK((index >= 0) && (index < this->length()));
3806 uint8_t* ptr = external_uint8_clamped_pointer();
3811 Handle<Object> ExternalUint8ClampedArray::get(
3812 Handle<ExternalUint8ClampedArray> array,
3814 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3815 array->GetIsolate());
3819 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3820 DCHECK((index >= 0) && (index < this->length()));
3821 uint8_t* ptr = external_uint8_clamped_pointer();
3826 void* ExternalArray::external_pointer() const {
3827 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3828 return reinterpret_cast<void*>(ptr);
3832 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3833 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3834 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3838 int8_t ExternalInt8Array::get_scalar(int index) {
3839 DCHECK((index >= 0) && (index < this->length()));
3840 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3845 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3847 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3848 array->GetIsolate());
3852 void ExternalInt8Array::set(int index, int8_t value) {
3853 DCHECK((index >= 0) && (index < this->length()));
3854 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3859 uint8_t ExternalUint8Array::get_scalar(int index) {
3860 DCHECK((index >= 0) && (index < this->length()));
3861 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3866 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3868 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3869 array->GetIsolate());
3873 void ExternalUint8Array::set(int index, uint8_t value) {
3874 DCHECK((index >= 0) && (index < this->length()));
3875 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3880 int16_t ExternalInt16Array::get_scalar(int index) {
3881 DCHECK((index >= 0) && (index < this->length()));
3882 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3887 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3889 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3890 array->GetIsolate());
3894 void ExternalInt16Array::set(int index, int16_t value) {
3895 DCHECK((index >= 0) && (index < this->length()));
3896 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3901 uint16_t ExternalUint16Array::get_scalar(int index) {
3902 DCHECK((index >= 0) && (index < this->length()));
3903 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3908 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3910 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3911 array->GetIsolate());
3915 void ExternalUint16Array::set(int index, uint16_t value) {
3916 DCHECK((index >= 0) && (index < this->length()));
3917 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3922 int32_t ExternalInt32Array::get_scalar(int index) {
3923 DCHECK((index >= 0) && (index < this->length()));
3924 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3929 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3931 return array->GetIsolate()->factory()->
3932 NewNumberFromInt(array->get_scalar(index));
3936 void ExternalInt32Array::set(int index, int32_t value) {
3937 DCHECK((index >= 0) && (index < this->length()));
3938 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3943 uint32_t ExternalUint32Array::get_scalar(int index) {
3944 DCHECK((index >= 0) && (index < this->length()));
3945 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3950 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3952 return array->GetIsolate()->factory()->
3953 NewNumberFromUint(array->get_scalar(index));
3957 void ExternalUint32Array::set(int index, uint32_t value) {
3958 DCHECK((index >= 0) && (index < this->length()));
3959 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3964 float ExternalFloat32Array::get_scalar(int index) {
3965 DCHECK((index >= 0) && (index < this->length()));
3966 float* ptr = static_cast<float*>(external_pointer());
3971 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
3973 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3977 void ExternalFloat32Array::set(int index, float value) {
3978 DCHECK((index >= 0) && (index < this->length()));
3979 float* ptr = static_cast<float*>(external_pointer());
3984 double ExternalFloat64Array::get_scalar(int index) {
3985 DCHECK((index >= 0) && (index < this->length()));
3986 double* ptr = static_cast<double*>(external_pointer());
3991 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
3993 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3997 void ExternalFloat64Array::set(int index, double value) {
3998 DCHECK((index >= 0) && (index < this->length()));
3999 double* ptr = static_cast<double*>(external_pointer());
4004 void* FixedTypedArrayBase::DataPtr() {
4005 return FIELD_ADDR(this, kDataOffset);
4009 int FixedTypedArrayBase::DataSize(InstanceType type) {
4012 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4013 case FIXED_##TYPE##_ARRAY_TYPE: \
4014 element_size = size; \
4017 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4018 #undef TYPED_ARRAY_CASE
4023 return length() * element_size;
4027 int FixedTypedArrayBase::DataSize() {
4028 return DataSize(map()->instance_type());
4032 int FixedTypedArrayBase::size() {
4033 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4037 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4038 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4042 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4045 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4048 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4051 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4054 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4057 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4060 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4063 float Float32ArrayTraits::defaultValue() {
4064 return static_cast<float>(base::OS::nan_value());
4068 double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); }
4071 template <class Traits>
4072 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4073 DCHECK((index >= 0) && (index < this->length()));
4074 ElementType* ptr = reinterpret_cast<ElementType*>(
4075 FIELD_ADDR(this, kDataOffset));
4081 FixedTypedArray<Float64ArrayTraits>::ElementType
4082 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4083 DCHECK((index >= 0) && (index < this->length()));
4084 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4088 template <class Traits>
4089 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4090 DCHECK((index >= 0) && (index < this->length()));
4091 ElementType* ptr = reinterpret_cast<ElementType*>(
4092 FIELD_ADDR(this, kDataOffset));
4098 void FixedTypedArray<Float64ArrayTraits>::set(
4099 int index, Float64ArrayTraits::ElementType value) {
4100 DCHECK((index >= 0) && (index < this->length()));
4101 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4105 template <class Traits>
4106 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4107 return static_cast<ElementType>(value);
4112 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4113 if (value < 0) return 0;
4114 if (value > 0xFF) return 0xFF;
4115 return static_cast<uint8_t>(value);
4119 template <class Traits>
4120 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4122 return static_cast<ElementType>(DoubleToInt32(value));
4127 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4128 if (value < 0) return 0;
4129 if (value > 0xFF) return 0xFF;
4130 return static_cast<uint8_t>(lrint(value));
4135 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4136 return static_cast<float>(value);
4141 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4146 template <class Traits>
4147 Handle<Object> FixedTypedArray<Traits>::get(
4148 Handle<FixedTypedArray<Traits> > array,
4150 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4154 template <class Traits>
4155 Handle<Object> FixedTypedArray<Traits>::SetValue(
4156 Handle<FixedTypedArray<Traits> > array,
4158 Handle<Object> value) {
4159 ElementType cast_value = Traits::defaultValue();
4160 if (index < static_cast<uint32_t>(array->length())) {
4161 if (value->IsSmi()) {
4162 int int_value = Handle<Smi>::cast(value)->value();
4163 cast_value = from_int(int_value);
4164 } else if (value->IsHeapNumber()) {
4165 double double_value = Handle<HeapNumber>::cast(value)->value();
4166 cast_value = from_double(double_value);
4168 // Clamp undefined to the default value. All other types have been
4169 // converted to a number type further up in the call chain.
4170 DCHECK(value->IsUndefined());
4172 array->set(index, cast_value);
4174 return Traits::ToHandle(array->GetIsolate(), cast_value);
4178 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4179 return handle(Smi::FromInt(scalar), isolate);
4183 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4185 return handle(Smi::FromInt(scalar), isolate);
4189 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4190 return handle(Smi::FromInt(scalar), isolate);
4194 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4195 return handle(Smi::FromInt(scalar), isolate);
4199 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4200 return handle(Smi::FromInt(scalar), isolate);
4204 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4205 return isolate->factory()->NewNumberFromUint(scalar);
4209 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4210 return isolate->factory()->NewNumberFromInt(scalar);
4214 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4215 return isolate->factory()->NewNumber(scalar);
4219 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4220 return isolate->factory()->NewNumber(scalar);
4224 int Map::visitor_id() {
4225 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4229 void Map::set_visitor_id(int id) {
4230 DCHECK(0 <= id && id < 256);
4231 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4235 int Map::instance_size() {
4236 return NOBARRIER_READ_BYTE_FIELD(
4237 this, kInstanceSizeOffset) << kPointerSizeLog2;
4241 int Map::inobject_properties() {
4242 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4246 int Map::pre_allocated_property_fields() {
4247 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4251 int Map::GetInObjectPropertyOffset(int index) {
4252 // Adjust for the number of properties stored in the object.
4253 index -= inobject_properties();
4255 return instance_size() + (index * kPointerSize);
4259 int HeapObject::SizeFromMap(Map* map) {
4260 int instance_size = map->instance_size();
4261 if (instance_size != kVariableSizeSentinel) return instance_size;
4262 // Only inline the most frequent cases.
4263 InstanceType instance_type = map->instance_type();
4264 if (instance_type == FIXED_ARRAY_TYPE) {
4265 return FixedArray::BodyDescriptor::SizeOf(map, this);
4267 if (instance_type == ONE_BYTE_STRING_TYPE ||
4268 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4269 return SeqOneByteString::SizeFor(
4270 reinterpret_cast<SeqOneByteString*>(this)->length());
4272 if (instance_type == BYTE_ARRAY_TYPE) {
4273 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4275 if (instance_type == FREE_SPACE_TYPE) {
4276 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4278 if (instance_type == STRING_TYPE ||
4279 instance_type == INTERNALIZED_STRING_TYPE) {
4280 return SeqTwoByteString::SizeFor(
4281 reinterpret_cast<SeqTwoByteString*>(this)->length());
4283 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4284 return FixedDoubleArray::SizeFor(
4285 reinterpret_cast<FixedDoubleArray*>(this)->length());
4287 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4288 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4290 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4291 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4292 return reinterpret_cast<FixedTypedArrayBase*>(
4293 this)->TypedArraySize(instance_type);
4295 DCHECK(instance_type == CODE_TYPE);
4296 return reinterpret_cast<Code*>(this)->CodeSize();
4300 void Map::set_instance_size(int value) {
4301 DCHECK_EQ(0, value & (kPointerSize - 1));
4302 value >>= kPointerSizeLog2;
4303 DCHECK(0 <= value && value < 256);
4304 NOBARRIER_WRITE_BYTE_FIELD(
4305 this, kInstanceSizeOffset, static_cast<byte>(value));
4309 void Map::set_inobject_properties(int value) {
4310 DCHECK(0 <= value && value < 256);
4311 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4315 void Map::set_pre_allocated_property_fields(int value) {
4316 DCHECK(0 <= value && value < 256);
4317 WRITE_BYTE_FIELD(this,
4318 kPreAllocatedPropertyFieldsOffset,
4319 static_cast<byte>(value));
4323 InstanceType Map::instance_type() {
4324 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4328 void Map::set_instance_type(InstanceType value) {
4329 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4333 int Map::unused_property_fields() {
4334 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4338 void Map::set_unused_property_fields(int value) {
4339 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4343 byte Map::bit_field() {
4344 return READ_BYTE_FIELD(this, kBitFieldOffset);
4348 void Map::set_bit_field(byte value) {
4349 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4353 byte Map::bit_field2() {
4354 return READ_BYTE_FIELD(this, kBitField2Offset);
4358 void Map::set_bit_field2(byte value) {
4359 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4363 void Map::set_non_instance_prototype(bool value) {
4365 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4367 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4372 bool Map::has_non_instance_prototype() {
4373 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4377 void Map::set_function_with_prototype(bool value) {
4378 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4382 bool Map::function_with_prototype() {
4383 return FunctionWithPrototype::decode(bit_field());
4387 void Map::set_is_access_check_needed(bool access_check_needed) {
4388 if (access_check_needed) {
4389 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4391 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4396 bool Map::is_access_check_needed() {
4397 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4401 void Map::set_is_extensible(bool value) {
4403 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4405 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4409 bool Map::is_extensible() {
4410 return ((1 << kIsExtensible) & bit_field2()) != 0;
4414 void Map::set_is_prototype_map(bool value) {
4415 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4418 bool Map::is_prototype_map() {
4419 return IsPrototypeMapBits::decode(bit_field2());
4423 void Map::set_dictionary_map(bool value) {
4424 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4425 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4426 set_bit_field3(new_bit_field3);
4430 bool Map::is_dictionary_map() {
4431 return DictionaryMap::decode(bit_field3());
4435 Code::Flags Code::flags() {
4436 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4440 void Map::set_owns_descriptors(bool owns_descriptors) {
4441 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4445 bool Map::owns_descriptors() {
4446 return OwnsDescriptors::decode(bit_field3());
4450 void Map::set_has_instance_call_handler() {
4451 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4455 bool Map::has_instance_call_handler() {
4456 return HasInstanceCallHandler::decode(bit_field3());
4460 void Map::deprecate() {
4461 set_bit_field3(Deprecated::update(bit_field3(), true));
4465 bool Map::is_deprecated() {
4466 return Deprecated::decode(bit_field3());
4470 void Map::set_migration_target(bool value) {
4471 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4475 bool Map::is_migration_target() {
4476 return IsMigrationTarget::decode(bit_field3());
4480 void Map::set_done_inobject_slack_tracking(bool value) {
4481 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4485 bool Map::done_inobject_slack_tracking() {
4486 return DoneInobjectSlackTracking::decode(bit_field3());
4490 void Map::set_construction_count(int value) {
4491 set_bit_field3(ConstructionCount::update(bit_field3(), value));
4495 int Map::construction_count() {
4496 return ConstructionCount::decode(bit_field3());
4500 void Map::freeze() {
4501 set_bit_field3(IsFrozen::update(bit_field3(), true));
4505 bool Map::is_frozen() {
4506 return IsFrozen::decode(bit_field3());
4510 void Map::mark_unstable() {
4511 set_bit_field3(IsUnstable::update(bit_field3(), true));
4515 bool Map::is_stable() {
4516 return !IsUnstable::decode(bit_field3());
4520 bool Map::has_code_cache() {
4521 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4525 bool Map::CanBeDeprecated() {
4526 int descriptor = LastAdded();
4527 for (int i = 0; i <= descriptor; i++) {
4528 PropertyDetails details = instance_descriptors()->GetDetails(i);
4529 if (details.representation().IsNone()) return true;
4530 if (details.representation().IsSmi()) return true;
4531 if (details.representation().IsDouble()) return true;
4532 if (details.representation().IsHeapObject()) return true;
4533 if (details.type() == CONSTANT) return true;
4539 void Map::NotifyLeafMapLayoutChange() {
4542 dependent_code()->DeoptimizeDependentCodeGroup(
4544 DependentCode::kPrototypeCheckGroup);
4549 bool Map::CanOmitMapChecks() {
4550 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4554 int DependentCode::number_of_entries(DependencyGroup group) {
4555 if (length() == 0) return 0;
4556 return Smi::cast(get(group))->value();
4560 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4561 set(group, Smi::FromInt(value));
4565 bool DependentCode::is_code_at(int i) {
4566 return get(kCodesStartIndex + i)->IsCode();
4569 Code* DependentCode::code_at(int i) {
4570 return Code::cast(get(kCodesStartIndex + i));
4574 CompilationInfo* DependentCode::compilation_info_at(int i) {
4575 return reinterpret_cast<CompilationInfo*>(
4576 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4580 void DependentCode::set_object_at(int i, Object* object) {
4581 set(kCodesStartIndex + i, object);
4585 Object* DependentCode::object_at(int i) {
4586 return get(kCodesStartIndex + i);
4590 Object** DependentCode::slot_at(int i) {
4591 return RawFieldOfElementAt(kCodesStartIndex + i);
4595 void DependentCode::clear_at(int i) {
4596 set_undefined(kCodesStartIndex + i);
4600 void DependentCode::copy(int from, int to) {
4601 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4605 void DependentCode::ExtendGroup(DependencyGroup group) {
4606 GroupStartIndexes starts(this);
4607 for (int g = kGroupCount - 1; g > group; g--) {
4608 if (starts.at(g) < starts.at(g + 1)) {
4609 copy(starts.at(g), starts.at(g + 1));
4615 void Code::set_flags(Code::Flags flags) {
4616 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4617 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4621 Code::Kind Code::kind() {
4622 return ExtractKindFromFlags(flags());
4626 bool Code::IsCodeStubOrIC() {
4627 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4628 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4629 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4630 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4631 kind() == TO_BOOLEAN_IC;
4635 InlineCacheState Code::ic_state() {
4636 InlineCacheState result = ExtractICStateFromFlags(flags());
4637 // Only allow uninitialized or debugger states for non-IC code
4638 // objects. This is used in the debugger to determine whether or not
4639 // a call to code object has been replaced with a debug break call.
4640 DCHECK(is_inline_cache_stub() ||
4641 result == UNINITIALIZED ||
4642 result == DEBUG_STUB);
4647 ExtraICState Code::extra_ic_state() {
4648 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4649 return ExtractExtraICStateFromFlags(flags());
4653 Code::StubType Code::type() {
4654 return ExtractTypeFromFlags(flags());
4658 // For initialization.
4659 void Code::set_raw_kind_specific_flags1(int value) {
4660 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4664 void Code::set_raw_kind_specific_flags2(int value) {
4665 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4669 inline bool Code::is_crankshafted() {
4670 return IsCrankshaftedField::decode(
4671 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4675 inline bool Code::is_hydrogen_stub() {
4676 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4680 inline void Code::set_is_crankshafted(bool value) {
4681 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4682 int updated = IsCrankshaftedField::update(previous, value);
4683 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4687 inline bool Code::is_turbofanned() {
4688 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4689 return IsTurbofannedField::decode(
4690 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4694 inline void Code::set_is_turbofanned(bool value) {
4695 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4696 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4697 int updated = IsTurbofannedField::update(previous, value);
4698 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4702 bool Code::optimizable() {
4703 DCHECK_EQ(FUNCTION, kind());
4704 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4708 void Code::set_optimizable(bool value) {
4709 DCHECK_EQ(FUNCTION, kind());
4710 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4714 bool Code::has_deoptimization_support() {
4715 DCHECK_EQ(FUNCTION, kind());
4716 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4717 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4721 void Code::set_has_deoptimization_support(bool value) {
4722 DCHECK_EQ(FUNCTION, kind());
4723 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4724 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4725 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4729 bool Code::has_debug_break_slots() {
4730 DCHECK_EQ(FUNCTION, kind());
4731 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4732 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4736 void Code::set_has_debug_break_slots(bool value) {
4737 DCHECK_EQ(FUNCTION, kind());
4738 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4739 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4740 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4744 bool Code::is_compiled_optimizable() {
4745 DCHECK_EQ(FUNCTION, kind());
4746 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4747 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4751 void Code::set_compiled_optimizable(bool value) {
4752 DCHECK_EQ(FUNCTION, kind());
4753 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4754 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4755 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4759 int Code::allow_osr_at_loop_nesting_level() {
4760 DCHECK_EQ(FUNCTION, kind());
4761 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4762 return AllowOSRAtLoopNestingLevelField::decode(fields);
4766 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4767 DCHECK_EQ(FUNCTION, kind());
4768 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4769 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4770 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4771 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4775 int Code::profiler_ticks() {
4776 DCHECK_EQ(FUNCTION, kind());
4777 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4781 void Code::set_profiler_ticks(int ticks) {
4782 DCHECK(ticks < 256);
4783 if (kind() == FUNCTION) {
4784 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4789 int Code::builtin_index() {
4790 DCHECK_EQ(BUILTIN, kind());
4791 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
4795 void Code::set_builtin_index(int index) {
4796 DCHECK_EQ(BUILTIN, kind());
4797 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
4801 unsigned Code::stack_slots() {
4802 DCHECK(is_crankshafted());
4803 return StackSlotsField::decode(
4804 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4808 void Code::set_stack_slots(unsigned slots) {
4809 CHECK(slots <= (1 << kStackSlotsBitCount));
4810 DCHECK(is_crankshafted());
4811 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4812 int updated = StackSlotsField::update(previous, slots);
4813 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4817 unsigned Code::safepoint_table_offset() {
4818 DCHECK(is_crankshafted());
4819 return SafepointTableOffsetField::decode(
4820 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4824 void Code::set_safepoint_table_offset(unsigned offset) {
4825 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4826 DCHECK(is_crankshafted());
4827 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4828 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4829 int updated = SafepointTableOffsetField::update(previous, offset);
4830 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4834 unsigned Code::back_edge_table_offset() {
4835 DCHECK_EQ(FUNCTION, kind());
4836 return BackEdgeTableOffsetField::decode(
4837 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
4841 void Code::set_back_edge_table_offset(unsigned offset) {
4842 DCHECK_EQ(FUNCTION, kind());
4843 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
4844 offset = offset >> kPointerSizeLog2;
4845 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4846 int updated = BackEdgeTableOffsetField::update(previous, offset);
4847 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4851 bool Code::back_edges_patched_for_osr() {
4852 DCHECK_EQ(FUNCTION, kind());
4853 return allow_osr_at_loop_nesting_level() > 0;
4857 byte Code::to_boolean_state() {
4858 return extra_ic_state();
4862 bool Code::has_function_cache() {
4863 DCHECK(kind() == STUB);
4864 return HasFunctionCacheField::decode(
4865 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4869 void Code::set_has_function_cache(bool flag) {
4870 DCHECK(kind() == STUB);
4871 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4872 int updated = HasFunctionCacheField::update(previous, flag);
4873 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4877 bool Code::marked_for_deoptimization() {
4878 DCHECK(kind() == OPTIMIZED_FUNCTION);
4879 return MarkedForDeoptimizationField::decode(
4880 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4884 void Code::set_marked_for_deoptimization(bool flag) {
4885 DCHECK(kind() == OPTIMIZED_FUNCTION);
4886 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4887 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4888 int updated = MarkedForDeoptimizationField::update(previous, flag);
4889 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4893 bool Code::is_weak_stub() {
4894 return CanBeWeakStub() && WeakStubField::decode(
4895 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4899 void Code::mark_as_weak_stub() {
4900 DCHECK(CanBeWeakStub());
4901 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4902 int updated = WeakStubField::update(previous, true);
4903 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4907 bool Code::is_invalidated_weak_stub() {
4908 return is_weak_stub() && InvalidatedWeakStubField::decode(
4909 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4913 void Code::mark_as_invalidated_weak_stub() {
4914 DCHECK(is_inline_cache_stub());
4915 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4916 int updated = InvalidatedWeakStubField::update(previous, true);
4917 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4921 bool Code::is_inline_cache_stub() {
4922 Kind kind = this->kind();
4924 #define CASE(name) case name: return true;
4927 default: return false;
4932 bool Code::is_keyed_stub() {
4933 return is_keyed_load_stub() || is_keyed_store_stub();
4937 bool Code::is_debug_stub() {
4938 return ic_state() == DEBUG_STUB;
4942 ConstantPoolArray* Code::constant_pool() {
4943 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
4947 void Code::set_constant_pool(Object* value) {
4948 DCHECK(value->IsConstantPoolArray());
4949 WRITE_FIELD(this, kConstantPoolOffset, value);
4950 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4954 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
4955 ExtraICState extra_ic_state, StubType type,
4956 CacheHolderFlag holder) {
4957 // Compute the bit mask.
4958 unsigned int bits = KindField::encode(kind)
4959 | ICStateField::encode(ic_state)
4960 | TypeField::encode(type)
4961 | ExtraICStateField::encode(extra_ic_state)
4962 | CacheHolderField::encode(holder);
4963 return static_cast<Flags>(bits);
4967 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4968 ExtraICState extra_ic_state,
4969 CacheHolderFlag holder,
4971 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
4975 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
4976 CacheHolderFlag holder) {
4977 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
4981 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4982 return KindField::decode(flags);
4986 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4987 return ICStateField::decode(flags);
4991 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4992 return ExtraICStateField::decode(flags);
4996 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4997 return TypeField::decode(flags);
5001 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5002 return CacheHolderField::decode(flags);
5006 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5007 int bits = flags & ~TypeField::kMask;
5008 return static_cast<Flags>(bits);
5012 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5013 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5014 return static_cast<Flags>(bits);
5018 Code* Code::GetCodeFromTargetAddress(Address address) {
5019 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5020 // GetCodeFromTargetAddress might be called when marking objects during mark
5021 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5022 // Code::cast. Code::cast does not work when the object's map is
5024 Code* result = reinterpret_cast<Code*>(code);
5029 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5031 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5035 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5036 if (!FLAG_collect_maps) return false;
5037 if (object->IsMap()) {
5038 return Map::cast(object)->CanTransition() &&
5039 FLAG_weak_embedded_maps_in_optimized_code;
5041 if (object->IsJSObject() ||
5042 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
5043 return FLAG_weak_embedded_objects_in_optimized_code;
5049 class Code::FindAndReplacePattern {
5051 FindAndReplacePattern() : count_(0) { }
5052 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5053 DCHECK(count_ < kMaxCount);
5054 find_[count_] = map_to_find;
5055 replace_[count_] = obj_to_replace;
5059 static const int kMaxCount = 4;
5061 Handle<Map> find_[kMaxCount];
5062 Handle<Object> replace_[kMaxCount];
5067 bool Code::IsWeakObjectInIC(Object* object) {
5068 return object->IsMap() && Map::cast(object)->CanTransition() &&
5069 FLAG_collect_maps &&
5070 FLAG_weak_embedded_maps_in_ic;
5074 Object* Map::prototype() const {
5075 return READ_FIELD(this, kPrototypeOffset);
5079 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5080 DCHECK(value->IsNull() || value->IsJSReceiver());
5081 WRITE_FIELD(this, kPrototypeOffset, value);
5082 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5086 // If the descriptor is using the empty transition array, install a new empty
5087 // transition array that will have place for an element transition.
5088 static void EnsureHasTransitionArray(Handle<Map> map) {
5089 Handle<TransitionArray> transitions;
5090 if (!map->HasTransitionArray()) {
5091 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5092 transitions->set_back_pointer_storage(map->GetBackPointer());
5093 } else if (!map->transitions()->IsFullTransitionArray()) {
5094 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5098 map->set_transitions(*transitions);
5102 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5103 int len = descriptors->number_of_descriptors();
5104 set_instance_descriptors(descriptors);
5105 SetNumberOfOwnDescriptors(len);
5109 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5112 void Map::set_bit_field3(uint32_t bits) {
5113 if (kInt32Size != kPointerSize) {
5114 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5116 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5120 uint32_t Map::bit_field3() {
5121 return READ_UINT32_FIELD(this, kBitField3Offset);
5125 void Map::AppendDescriptor(Descriptor* desc) {
5126 DescriptorArray* descriptors = instance_descriptors();
5127 int number_of_own_descriptors = NumberOfOwnDescriptors();
5128 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5129 descriptors->Append(desc);
5130 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5134 Object* Map::GetBackPointer() {
5135 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5136 if (object->IsDescriptorArray()) {
5137 return TransitionArray::cast(object)->back_pointer_storage();
5139 DCHECK(object->IsMap() || object->IsUndefined());
5145 bool Map::HasElementsTransition() {
5146 return HasTransitionArray() && transitions()->HasElementsTransition();
5150 bool Map::HasTransitionArray() const {
5151 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5152 return object->IsTransitionArray();
5156 Map* Map::elements_transition_map() {
5157 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5158 return transitions()->GetTarget(index);
5162 bool Map::CanHaveMoreTransitions() {
5163 if (!HasTransitionArray()) return true;
5164 return FixedArray::SizeFor(transitions()->length() +
5165 TransitionArray::kTransitionSize)
5166 <= Page::kMaxRegularHeapObjectSize;
5170 Map* Map::GetTransition(int transition_index) {
5171 return transitions()->GetTarget(transition_index);
5175 int Map::SearchTransition(Name* name) {
5176 if (HasTransitionArray()) return transitions()->Search(name);
5177 return TransitionArray::kNotFound;
5181 FixedArray* Map::GetPrototypeTransitions() {
5182 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5183 if (!transitions()->HasPrototypeTransitions()) {
5184 return GetHeap()->empty_fixed_array();
5186 return transitions()->GetPrototypeTransitions();
5190 void Map::SetPrototypeTransitions(
5191 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5192 EnsureHasTransitionArray(map);
5193 int old_number_of_transitions = map->NumberOfProtoTransitions();
5195 if (map->HasPrototypeTransitions()) {
5196 DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
5197 map->ZapPrototypeTransitions();
5200 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5201 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5205 bool Map::HasPrototypeTransitions() {
5206 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5210 TransitionArray* Map::transitions() const {
5211 DCHECK(HasTransitionArray());
5212 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5213 return TransitionArray::cast(object);
5217 void Map::set_transitions(TransitionArray* transition_array,
5218 WriteBarrierMode mode) {
5219 // Transition arrays are not shared. When one is replaced, it should not
5220 // keep referenced objects alive, so we zap it.
5221 // When there is another reference to the array somewhere (e.g. a handle),
5222 // not zapping turns from a waste of memory into a source of crashes.
5223 if (HasTransitionArray()) {
5225 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5226 Map* target = transitions()->GetTarget(i);
5227 if (target->instance_descriptors() == instance_descriptors()) {
5228 Name* key = transitions()->GetKey(i);
5229 int new_target_index = transition_array->Search(key);
5230 DCHECK(new_target_index != TransitionArray::kNotFound);
5231 DCHECK(transition_array->GetTarget(new_target_index) == target);
5235 DCHECK(transitions() != transition_array);
5239 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5240 CONDITIONAL_WRITE_BARRIER(
5241 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5245 void Map::init_back_pointer(Object* undefined) {
5246 DCHECK(undefined->IsUndefined());
5247 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5251 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5252 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5253 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5254 (value->IsMap() && GetBackPointer()->IsUndefined()));
5255 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5256 if (object->IsTransitionArray()) {
5257 TransitionArray::cast(object)->set_back_pointer_storage(value);
5259 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5260 CONDITIONAL_WRITE_BARRIER(
5261 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5266 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5267 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5268 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5270 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5271 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5272 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5274 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5275 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5276 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5277 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5279 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5280 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5282 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5283 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5284 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5285 kExpectedReceiverTypeOffset)
5287 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5288 kSerializedDataOffset)
5290 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5293 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5294 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5295 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5297 ACCESSORS(Box, value, Object, kValueOffset)
5299 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5300 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5302 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5303 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5304 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5306 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5307 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5308 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5309 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5310 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5311 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5313 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5314 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5316 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5317 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5318 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5320 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5321 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5322 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5323 kPrototypeTemplateOffset)
5324 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5325 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5326 kNamedPropertyHandlerOffset)
5327 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5328 kIndexedPropertyHandlerOffset)
5329 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5330 kInstanceTemplateOffset)
5331 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5332 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5333 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5334 kInstanceCallHandlerOffset)
5335 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5336 kAccessCheckInfoOffset)
5337 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5339 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5340 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5341 kInternalFieldCountOffset)
5343 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5344 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5346 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5348 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5349 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5350 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5351 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5352 kPretenureCreateCountOffset)
5353 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5354 kDependentCodeOffset)
5355 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5356 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5358 ACCESSORS(Script, source, Object, kSourceOffset)
5359 ACCESSORS(Script, name, Object, kNameOffset)
5360 ACCESSORS(Script, id, Smi, kIdOffset)
5361 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5362 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5363 ACCESSORS(Script, context_data, Object, kContextOffset)
5364 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5365 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5366 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5367 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5368 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5369 kEvalFrominstructionsOffsetOffset)
5370 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5371 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5372 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5373 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5375 Script::CompilationType Script::compilation_type() {
5376 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5377 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5379 void Script::set_compilation_type(CompilationType type) {
5380 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5381 type == COMPILATION_TYPE_EVAL));
5383 Script::CompilationState Script::compilation_state() {
5384 return BooleanBit::get(flags(), kCompilationStateBit) ?
5385 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5387 void Script::set_compilation_state(CompilationState state) {
5388 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5389 state == COMPILATION_STATE_COMPILED));
5393 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5394 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5395 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5396 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5398 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5399 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5400 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5401 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5403 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5404 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5405 kOptimizedCodeMapOffset)
5406 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5407 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5408 kFeedbackVectorOffset)
5409 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5410 kInstanceClassNameOffset)
5411 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5412 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5413 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5414 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5417 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5418 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5419 kHiddenPrototypeBit)
5420 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5421 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5422 kNeedsAccessCheckBit)
5423 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5424 kReadOnlyPrototypeBit)
5425 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5426 kRemovePrototypeBit)
5427 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5429 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5431 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5434 BOOL_ACCESSORS(SharedFunctionInfo,
5436 allows_lazy_compilation,
5437 kAllowLazyCompilation)
5438 BOOL_ACCESSORS(SharedFunctionInfo,
5440 allows_lazy_compilation_without_context,
5441 kAllowLazyCompilationWithoutContext)
5442 BOOL_ACCESSORS(SharedFunctionInfo,
5446 BOOL_ACCESSORS(SharedFunctionInfo,
5448 has_duplicate_parameters,
5449 kHasDuplicateParameters)
5450 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5453 #if V8_HOST_ARCH_32_BIT
5454 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5455 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5456 kFormalParameterCountOffset)
5457 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5458 kExpectedNofPropertiesOffset)
5459 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5460 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5461 kStartPositionAndTypeOffset)
5462 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5463 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5464 kFunctionTokenPositionOffset)
5465 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5466 kCompilerHintsOffset)
5467 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5468 kOptCountAndBailoutReasonOffset)
5469 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5470 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5471 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5475 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5476 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5477 int holder::name() const { \
5478 int value = READ_INT_FIELD(this, offset); \
5479 DCHECK(kHeapObjectTag == 1); \
5480 DCHECK((value & kHeapObjectTag) == 0); \
5481 return value >> 1; \
5483 void holder::set_##name(int value) { \
5484 DCHECK(kHeapObjectTag == 1); \
5485 DCHECK((value & 0xC0000000) == 0xC0000000 || \
5486 (value & 0xC0000000) == 0x0); \
5487 WRITE_INT_FIELD(this, \
5489 (value << 1) & ~kHeapObjectTag); \
5492 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5493 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5494 INT_ACCESSORS(holder, name, offset)
5497 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5498 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5499 formal_parameter_count,
5500 kFormalParameterCountOffset)
5502 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5503 expected_nof_properties,
5504 kExpectedNofPropertiesOffset)
5505 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5507 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5508 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5509 start_position_and_type,
5510 kStartPositionAndTypeOffset)
5512 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5513 function_token_position,
5514 kFunctionTokenPositionOffset)
5515 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5517 kCompilerHintsOffset)
5519 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5520 opt_count_and_bailout_reason,
5521 kOptCountAndBailoutReasonOffset)
5522 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5524 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5526 kAstNodeCountOffset)
5527 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5529 kProfilerTicksOffset)
5534 BOOL_GETTER(SharedFunctionInfo,
5536 optimization_disabled,
5537 kOptimizationDisabled)
5540 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5541 set_compiler_hints(BooleanBit::set(compiler_hints(),
5542 kOptimizationDisabled,
5544 // If disabling optimizations we reflect that in the code object so
5545 // it will not be counted as optimizable code.
5546 if ((code()->kind() == Code::FUNCTION) && disable) {
5547 code()->set_optimizable(false);
5552 StrictMode SharedFunctionInfo::strict_mode() {
5553 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5558 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5559 // We only allow mode transitions from sloppy to strict.
5560 DCHECK(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5561 int hints = compiler_hints();
5562 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5563 set_compiler_hints(hints);
5567 FunctionKind SharedFunctionInfo::kind() {
5568 return FunctionKindBits::decode(compiler_hints());
5572 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5573 DCHECK(IsValidFunctionKind(kind));
5574 int hints = compiler_hints();
5575 hints = FunctionKindBits::update(hints, kind);
5576 set_compiler_hints(hints);
5580 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5581 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5583 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5584 name_should_print_as_anonymous,
5585 kNameShouldPrintAsAnonymous)
5586 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5587 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5588 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5589 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5590 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5591 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5592 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5593 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5596 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5597 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5599 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5601 bool Script::HasValidSource() {
5602 Object* src = this->source();
5603 if (!src->IsString()) return true;
5604 String* src_str = String::cast(src);
5605 if (!StringShape(src_str).IsExternal()) return true;
5606 if (src_str->IsOneByteRepresentation()) {
5607 return ExternalOneByteString::cast(src)->resource() != NULL;
5608 } else if (src_str->IsTwoByteRepresentation()) {
5609 return ExternalTwoByteString::cast(src)->resource() != NULL;
5615 void SharedFunctionInfo::DontAdaptArguments() {
5616 DCHECK(code()->kind() == Code::BUILTIN);
5617 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5621 int SharedFunctionInfo::start_position() const {
5622 return start_position_and_type() >> kStartPositionShift;
5626 void SharedFunctionInfo::set_start_position(int start_position) {
5627 set_start_position_and_type((start_position << kStartPositionShift)
5628 | (start_position_and_type() & ~kStartPositionMask));
5632 Code* SharedFunctionInfo::code() const {
5633 return Code::cast(READ_FIELD(this, kCodeOffset));
5637 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5638 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5639 WRITE_FIELD(this, kCodeOffset, value);
5640 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5644 void SharedFunctionInfo::ReplaceCode(Code* value) {
5645 // If the GC metadata field is already used then the function was
5646 // enqueued as a code flushing candidate and we remove it now.
5647 if (code()->gc_metadata() != NULL) {
5648 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5649 flusher->EvictCandidate(this);
5652 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5658 ScopeInfo* SharedFunctionInfo::scope_info() const {
5659 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5663 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5664 WriteBarrierMode mode) {
5665 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5666 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5669 reinterpret_cast<Object*>(value),
5674 bool SharedFunctionInfo::is_compiled() {
5675 return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
5679 bool SharedFunctionInfo::IsApiFunction() {
5680 return function_data()->IsFunctionTemplateInfo();
5684 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5685 DCHECK(IsApiFunction());
5686 return FunctionTemplateInfo::cast(function_data());
5690 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5691 return function_data()->IsSmi();
5695 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5696 DCHECK(HasBuiltinFunctionId());
5697 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5701 int SharedFunctionInfo::ic_age() {
5702 return ICAgeBits::decode(counters());
5706 void SharedFunctionInfo::set_ic_age(int ic_age) {
5707 set_counters(ICAgeBits::update(counters(), ic_age));
5711 int SharedFunctionInfo::deopt_count() {
5712 return DeoptCountBits::decode(counters());
5716 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5717 set_counters(DeoptCountBits::update(counters(), deopt_count));
5721 void SharedFunctionInfo::increment_deopt_count() {
5722 int value = counters();
5723 int deopt_count = DeoptCountBits::decode(value);
5724 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5725 set_counters(DeoptCountBits::update(value, deopt_count));
5729 int SharedFunctionInfo::opt_reenable_tries() {
5730 return OptReenableTriesBits::decode(counters());
5734 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5735 set_counters(OptReenableTriesBits::update(counters(), tries));
5739 int SharedFunctionInfo::opt_count() {
5740 return OptCountBits::decode(opt_count_and_bailout_reason());
5744 void SharedFunctionInfo::set_opt_count(int opt_count) {
5745 set_opt_count_and_bailout_reason(
5746 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5750 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5751 BailoutReason reason = static_cast<BailoutReason>(
5752 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5757 bool SharedFunctionInfo::has_deoptimization_support() {
5758 Code* code = this->code();
5759 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5763 void SharedFunctionInfo::TryReenableOptimization() {
5764 int tries = opt_reenable_tries();
5765 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5766 // We reenable optimization whenever the number of tries is a large
5767 // enough power of 2.
5768 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5769 set_optimization_disabled(false);
5772 code()->set_optimizable(true);
5777 bool JSFunction::IsBuiltin() {
5778 return context()->global_object()->IsJSBuiltinsObject();
5782 bool JSFunction::IsFromNativeScript() {
5783 Object* script = shared()->script();
5784 bool native = script->IsScript() &&
5785 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
5786 DCHECK(!IsBuiltin() || native); // All builtins are also native.
5791 bool JSFunction::IsFromExtensionScript() {
5792 Object* script = shared()->script();
5793 return script->IsScript() &&
5794 Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
5798 bool JSFunction::NeedsArgumentsAdaption() {
5799 return shared()->formal_parameter_count() !=
5800 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5804 bool JSFunction::IsOptimized() {
5805 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5809 bool JSFunction::IsOptimizable() {
5810 return code()->kind() == Code::FUNCTION && code()->optimizable();
5814 bool JSFunction::IsMarkedForOptimization() {
5815 return code() == GetIsolate()->builtins()->builtin(
5816 Builtins::kCompileOptimized);
5820 bool JSFunction::IsMarkedForConcurrentOptimization() {
5821 return code() == GetIsolate()->builtins()->builtin(
5822 Builtins::kCompileOptimizedConcurrent);
5826 bool JSFunction::IsInOptimizationQueue() {
5827 return code() == GetIsolate()->builtins()->builtin(
5828 Builtins::kInOptimizationQueue);
5832 bool JSFunction::IsInobjectSlackTrackingInProgress() {
5833 return has_initial_map() &&
5834 initial_map()->construction_count() != JSFunction::kNoSlackTracking;
5838 Code* JSFunction::code() {
5840 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5844 void JSFunction::set_code(Code* value) {
5845 DCHECK(!GetHeap()->InNewSpace(value));
5846 Address entry = value->entry();
5847 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5848 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5850 HeapObject::RawField(this, kCodeEntryOffset),
5855 void JSFunction::set_code_no_write_barrier(Code* value) {
5856 DCHECK(!GetHeap()->InNewSpace(value));
5857 Address entry = value->entry();
5858 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5862 void JSFunction::ReplaceCode(Code* code) {
5863 bool was_optimized = IsOptimized();
5864 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5866 if (was_optimized && is_optimized) {
5867 shared()->EvictFromOptimizedCodeMap(this->code(),
5868 "Replacing with another optimized code");
5873 // Add/remove the function from the list of optimized functions for this
5874 // context based on the state change.
5875 if (!was_optimized && is_optimized) {
5876 context()->native_context()->AddOptimizedFunction(this);
5878 if (was_optimized && !is_optimized) {
5879 // TODO(titzer): linear in the number of optimized functions; fix!
5880 context()->native_context()->RemoveOptimizedFunction(this);
5885 Context* JSFunction::context() {
5886 return Context::cast(READ_FIELD(this, kContextOffset));
5890 JSObject* JSFunction::global_proxy() {
5891 return context()->global_proxy();
5895 void JSFunction::set_context(Object* value) {
5896 DCHECK(value->IsUndefined() || value->IsContext());
5897 WRITE_FIELD(this, kContextOffset, value);
5898 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5901 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5902 kPrototypeOrInitialMapOffset)
5905 Map* JSFunction::initial_map() {
5906 return Map::cast(prototype_or_initial_map());
5910 bool JSFunction::has_initial_map() {
5911 return prototype_or_initial_map()->IsMap();
5915 bool JSFunction::has_instance_prototype() {
5916 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5920 bool JSFunction::has_prototype() {
5921 return map()->has_non_instance_prototype() || has_instance_prototype();
5925 Object* JSFunction::instance_prototype() {
5926 DCHECK(has_instance_prototype());
5927 if (has_initial_map()) return initial_map()->prototype();
5928 // When there is no initial map and the prototype is a JSObject, the
5929 // initial map field is used for the prototype field.
5930 return prototype_or_initial_map();
5934 Object* JSFunction::prototype() {
5935 DCHECK(has_prototype());
5936 // If the function's prototype property has been set to a non-JSObject
5937 // value, that value is stored in the constructor field of the map.
5938 if (map()->has_non_instance_prototype()) return map()->constructor();
5939 return instance_prototype();
5943 bool JSFunction::should_have_prototype() {
5944 return map()->function_with_prototype();
5948 bool JSFunction::is_compiled() {
5949 return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
5953 FixedArray* JSFunction::literals() {
5954 DCHECK(!shared()->bound());
5955 return literals_or_bindings();
5959 void JSFunction::set_literals(FixedArray* literals) {
5960 DCHECK(!shared()->bound());
5961 set_literals_or_bindings(literals);
5965 FixedArray* JSFunction::function_bindings() {
5966 DCHECK(shared()->bound());
5967 return literals_or_bindings();
5971 void JSFunction::set_function_bindings(FixedArray* bindings) {
5972 DCHECK(shared()->bound());
5973 // Bound function literal may be initialized to the empty fixed array
5974 // before the bindings are set.
5975 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
5976 bindings->map() == GetHeap()->fixed_cow_array_map());
5977 set_literals_or_bindings(bindings);
5981 int JSFunction::NumberOfLiterals() {
5982 DCHECK(!shared()->bound());
5983 return literals()->length();
5987 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5988 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
5989 return READ_FIELD(this, OffsetOfFunctionWithId(id));
5993 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5995 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
5996 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5997 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6001 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6002 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6003 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6007 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6009 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6010 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6011 DCHECK(!GetHeap()->InNewSpace(value));
6015 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6016 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6017 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6018 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6021 void JSProxy::InitializeBody(int object_size, Object* value) {
6022 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6023 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6024 WRITE_FIELD(this, offset, value);
6029 ACCESSORS(JSCollection, table, Object, kTableOffset)
6032 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6033 template<class Derived, class TableType> \
6034 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6035 return type::cast(READ_FIELD(this, offset)); \
6037 template<class Derived, class TableType> \
6038 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6039 type* value, WriteBarrierMode mode) { \
6040 WRITE_FIELD(this, offset, value); \
6041 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6044 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6045 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6046 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6048 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6051 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6052 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6055 Address Foreign::foreign_address() {
6056 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6060 void Foreign::set_foreign_address(Address value) {
6061 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6065 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6066 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6067 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6068 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6069 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6070 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6072 bool JSGeneratorObject::is_suspended() {
6073 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6074 DCHECK_EQ(kGeneratorClosed, 0);
6075 return continuation() > 0;
6078 bool JSGeneratorObject::is_closed() {
6079 return continuation() == kGeneratorClosed;
6082 bool JSGeneratorObject::is_executing() {
6083 return continuation() == kGeneratorExecuting;
6086 ACCESSORS(JSModule, context, Object, kContextOffset)
6087 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6090 ACCESSORS(JSValue, value, Object, kValueOffset)
6093 HeapNumber* HeapNumber::cast(Object* object) {
6094 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6095 return reinterpret_cast<HeapNumber*>(object);
6099 const HeapNumber* HeapNumber::cast(const Object* object) {
6100 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6101 return reinterpret_cast<const HeapNumber*>(object);
6105 ACCESSORS(JSDate, value, Object, kValueOffset)
6106 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6107 ACCESSORS(JSDate, year, Object, kYearOffset)
6108 ACCESSORS(JSDate, month, Object, kMonthOffset)
6109 ACCESSORS(JSDate, day, Object, kDayOffset)
6110 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6111 ACCESSORS(JSDate, hour, Object, kHourOffset)
6112 ACCESSORS(JSDate, min, Object, kMinOffset)
6113 ACCESSORS(JSDate, sec, Object, kSecOffset)
6116 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6117 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6118 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6119 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6120 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6121 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6124 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6125 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6126 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6127 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6128 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6129 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6130 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6133 void Code::WipeOutHeader() {
6134 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6135 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6136 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6137 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6138 // Do not wipe out major/minor keys on a code stub or IC
6139 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6140 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6145 Object* Code::type_feedback_info() {
6146 DCHECK(kind() == FUNCTION);
6147 return raw_type_feedback_info();
6151 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6152 DCHECK(kind() == FUNCTION);
6153 set_raw_type_feedback_info(value, mode);
6154 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6159 uint32_t Code::stub_key() {
6160 DCHECK(IsCodeStubOrIC());
6161 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6162 return static_cast<uint32_t>(smi_key->value());
6166 void Code::set_stub_key(uint32_t key) {
6167 DCHECK(IsCodeStubOrIC());
6168 set_raw_type_feedback_info(Smi::FromInt(key));
6172 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6173 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6176 byte* Code::instruction_start() {
6177 return FIELD_ADDR(this, kHeaderSize);
6181 byte* Code::instruction_end() {
6182 return instruction_start() + instruction_size();
6186 int Code::body_size() {
6187 return RoundUp(instruction_size(), kObjectAlignment);
6191 ByteArray* Code::unchecked_relocation_info() {
6192 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6196 byte* Code::relocation_start() {
6197 return unchecked_relocation_info()->GetDataStartAddress();
6201 int Code::relocation_size() {
6202 return unchecked_relocation_info()->length();
6206 byte* Code::entry() {
6207 return instruction_start();
6211 bool Code::contains(byte* inner_pointer) {
6212 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6216 ACCESSORS(JSArray, length, Object, kLengthOffset)
6219 void* JSArrayBuffer::backing_store() const {
6220 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6221 return reinterpret_cast<void*>(ptr);
6225 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6226 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6227 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6231 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6232 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6235 bool JSArrayBuffer::is_external() {
6236 return BooleanBit::get(flag(), kIsExternalBit);
6240 void JSArrayBuffer::set_is_external(bool value) {
6241 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6245 bool JSArrayBuffer::should_be_freed() {
6246 return BooleanBit::get(flag(), kShouldBeFreed);
6250 void JSArrayBuffer::set_should_be_freed(bool value) {
6251 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6255 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6256 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6259 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6260 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6261 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6262 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6263 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6265 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6268 JSRegExp::Type JSRegExp::TypeTag() {
6269 Object* data = this->data();
6270 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6271 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6272 return static_cast<JSRegExp::Type>(smi->value());
6276 int JSRegExp::CaptureCount() {
6277 switch (TypeTag()) {
6281 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6289 JSRegExp::Flags JSRegExp::GetFlags() {
6290 DCHECK(this->data()->IsFixedArray());
6291 Object* data = this->data();
6292 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6293 return Flags(smi->value());
6297 String* JSRegExp::Pattern() {
6298 DCHECK(this->data()->IsFixedArray());
6299 Object* data = this->data();
6300 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6305 Object* JSRegExp::DataAt(int index) {
6306 DCHECK(TypeTag() != NOT_COMPILED);
6307 return FixedArray::cast(data())->get(index);
6311 void JSRegExp::SetDataAt(int index, Object* value) {
6312 DCHECK(TypeTag() != NOT_COMPILED);
6313 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6314 FixedArray::cast(data())->set(index, value);
6318 ElementsKind JSObject::GetElementsKind() {
6319 ElementsKind kind = map()->elements_kind();
6321 FixedArrayBase* fixed_array =
6322 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6324 // If a GC was caused while constructing this object, the elements
6325 // pointer may point to a one pointer filler map.
6326 if (ElementsAreSafeToExamine()) {
6327 Map* map = fixed_array->map();
6328 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6329 (map == GetHeap()->fixed_array_map() ||
6330 map == GetHeap()->fixed_cow_array_map())) ||
6331 (IsFastDoubleElementsKind(kind) &&
6332 (fixed_array->IsFixedDoubleArray() ||
6333 fixed_array == GetHeap()->empty_fixed_array())) ||
6334 (kind == DICTIONARY_ELEMENTS &&
6335 fixed_array->IsFixedArray() &&
6336 fixed_array->IsDictionary()) ||
6337 (kind > DICTIONARY_ELEMENTS));
6338 DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6339 (elements()->IsFixedArray() && elements()->length() >= 2));
6346 ElementsAccessor* JSObject::GetElementsAccessor() {
6347 return ElementsAccessor::ForKind(GetElementsKind());
6351 bool JSObject::HasFastObjectElements() {
6352 return IsFastObjectElementsKind(GetElementsKind());
6356 bool JSObject::HasFastSmiElements() {
6357 return IsFastSmiElementsKind(GetElementsKind());
6361 bool JSObject::HasFastSmiOrObjectElements() {
6362 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6366 bool JSObject::HasFastDoubleElements() {
6367 return IsFastDoubleElementsKind(GetElementsKind());
6371 bool JSObject::HasFastHoleyElements() {
6372 return IsFastHoleyElementsKind(GetElementsKind());
6376 bool JSObject::HasFastElements() {
6377 return IsFastElementsKind(GetElementsKind());
6381 bool JSObject::HasDictionaryElements() {
6382 return GetElementsKind() == DICTIONARY_ELEMENTS;
6386 bool JSObject::HasSloppyArgumentsElements() {
6387 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6391 bool JSObject::HasExternalArrayElements() {
6392 HeapObject* array = elements();
6393 DCHECK(array != NULL);
6394 return array->IsExternalArray();
6398 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6399 bool JSObject::HasExternal##Type##Elements() { \
6400 HeapObject* array = elements(); \
6401 DCHECK(array != NULL); \
6402 if (!array->IsHeapObject()) \
6404 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6407 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6409 #undef EXTERNAL_ELEMENTS_CHECK
6412 bool JSObject::HasFixedTypedArrayElements() {
6413 HeapObject* array = elements();
6414 DCHECK(array != NULL);
6415 return array->IsFixedTypedArrayBase();
6419 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6420 bool JSObject::HasFixed##Type##Elements() { \
6421 HeapObject* array = elements(); \
6422 DCHECK(array != NULL); \
6423 if (!array->IsHeapObject()) \
6425 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6428 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6430 #undef FIXED_TYPED_ELEMENTS_CHECK
6433 bool JSObject::HasNamedInterceptor() {
6434 return map()->has_named_interceptor();
6438 bool JSObject::HasIndexedInterceptor() {
6439 return map()->has_indexed_interceptor();
6443 NameDictionary* JSObject::property_dictionary() {
6444 DCHECK(!HasFastProperties());
6445 return NameDictionary::cast(properties());
6449 SeededNumberDictionary* JSObject::element_dictionary() {
6450 DCHECK(HasDictionaryElements());
6451 return SeededNumberDictionary::cast(elements());
6455 bool Name::IsHashFieldComputed(uint32_t field) {
6456 return (field & kHashNotComputedMask) == 0;
6460 bool Name::HasHashCode() {
6461 return IsHashFieldComputed(hash_field());
6465 uint32_t Name::Hash() {
6466 // Fast case: has hash code already been computed?
6467 uint32_t field = hash_field();
6468 if (IsHashFieldComputed(field)) return field >> kHashShift;
6469 // Slow case: compute hash code and set it. Has to be a string.
6470 return String::cast(this)->ComputeAndSetHash();
6473 bool Name::IsOwn() {
6474 return this->IsSymbol() && Symbol::cast(this)->is_own();
6478 StringHasher::StringHasher(int length, uint32_t seed)
6480 raw_running_hash_(seed),
6482 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6483 is_first_char_(true) {
6484 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6488 bool StringHasher::has_trivial_hash() {
6489 return length_ > String::kMaxHashCalcLength;
6493 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6495 running_hash += (running_hash << 10);
6496 running_hash ^= (running_hash >> 6);
6497 return running_hash;
6501 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6502 running_hash += (running_hash << 3);
6503 running_hash ^= (running_hash >> 11);
6504 running_hash += (running_hash << 15);
6505 if ((running_hash & String::kHashBitMask) == 0) {
6508 return running_hash;
6512 void StringHasher::AddCharacter(uint16_t c) {
6513 // Use the Jenkins one-at-a-time hash function to update the hash
6514 // for the given character.
6515 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6519 bool StringHasher::UpdateIndex(uint16_t c) {
6520 DCHECK(is_array_index_);
6521 if (c < '0' || c > '9') {
6522 is_array_index_ = false;
6526 if (is_first_char_) {
6527 is_first_char_ = false;
6528 if (c == '0' && length_ > 1) {
6529 is_array_index_ = false;
6533 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6534 is_array_index_ = false;
6537 array_index_ = array_index_ * 10 + d;
6542 template<typename Char>
6543 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6544 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6546 if (is_array_index_) {
6547 for (; i < length; i++) {
6548 AddCharacter(chars[i]);
6549 if (!UpdateIndex(chars[i])) {
6555 for (; i < length; i++) {
6556 DCHECK(!is_array_index_);
6557 AddCharacter(chars[i]);
6562 template <typename schar>
6563 uint32_t StringHasher::HashSequentialString(const schar* chars,
6566 StringHasher hasher(length, seed);
6567 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6568 return hasher.GetHashField();
6572 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6573 IteratingStringHasher hasher(string->length(), seed);
6575 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6576 ConsString* cons_string = String::VisitFlat(&hasher, string);
6577 // The string was flat.
6578 if (cons_string == NULL) return hasher.GetHashField();
6579 // This is a ConsString, iterate across it.
6580 ConsStringIteratorOp op(cons_string);
6582 while (NULL != (string = op.Next(&offset))) {
6583 String::VisitFlat(&hasher, string, offset);
6585 return hasher.GetHashField();
6589 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6591 AddCharacters(chars, length);
6595 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6597 AddCharacters(chars, length);
6601 bool Name::AsArrayIndex(uint32_t* index) {
6602 return IsString() && String::cast(this)->AsArrayIndex(index);
6606 bool String::AsArrayIndex(uint32_t* index) {
6607 uint32_t field = hash_field();
6608 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6611 return SlowAsArrayIndex(index);
6615 void String::SetForwardedInternalizedString(String* canonical) {
6616 DCHECK(IsInternalizedString());
6617 DCHECK(HasHashCode());
6618 if (canonical == this) return; // No need to forward.
6619 DCHECK(SlowEquals(canonical));
6620 DCHECK(canonical->IsInternalizedString());
6621 DCHECK(canonical->HasHashCode());
6622 WRITE_FIELD(this, kHashFieldOffset, canonical);
6623 // Setting the hash field to a tagged value sets the LSB, causing the hash
6624 // code to be interpreted as uninitialized. We use this fact to recognize
6625 // that we have a forwarded string.
6626 DCHECK(!HasHashCode());
6630 String* String::GetForwardedInternalizedString() {
6631 DCHECK(IsInternalizedString());
6632 if (HasHashCode()) return this;
6633 String* canonical = String::cast(READ_FIELD(this, kHashFieldOffset));
6634 DCHECK(canonical->IsInternalizedString());
6635 DCHECK(SlowEquals(canonical));
6636 DCHECK(canonical->HasHashCode());
6641 Object* JSReceiver::GetConstructor() {
6642 return map()->constructor();
6646 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6647 Handle<Name> name) {
6648 if (object->IsJSProxy()) {
6649 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6650 return JSProxy::HasPropertyWithHandler(proxy, name);
6652 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6653 if (!result.has_value) return Maybe<bool>();
6654 return maybe(result.value != ABSENT);
6658 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6659 Handle<Name> name) {
6660 if (object->IsJSProxy()) {
6661 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6662 return JSProxy::HasPropertyWithHandler(proxy, name);
6664 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6665 if (!result.has_value) return Maybe<bool>();
6666 return maybe(result.value != ABSENT);
6670 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6671 Handle<JSReceiver> object, Handle<Name> key) {
6673 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6674 return GetElementAttribute(object, index);
6676 LookupIterator it(object, key);
6677 return GetPropertyAttributes(&it);
6681 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6682 Handle<JSReceiver> object, uint32_t index) {
6683 if (object->IsJSProxy()) {
6684 return JSProxy::GetElementAttributeWithHandler(
6685 Handle<JSProxy>::cast(object), object, index);
6687 return JSObject::GetElementAttributeWithReceiver(
6688 Handle<JSObject>::cast(object), object, index, true);
6692 bool JSGlobalObject::IsDetached() {
6693 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6697 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6698 const PrototypeIterator iter(this->GetIsolate(),
6699 const_cast<JSGlobalProxy*>(this));
6700 return iter.GetCurrent() != global;
6704 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6705 return object->IsJSProxy()
6706 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6707 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6711 Object* JSReceiver::GetIdentityHash() {
6713 ? JSProxy::cast(this)->GetIdentityHash()
6714 : JSObject::cast(this)->GetIdentityHash();
6718 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6719 if (object->IsJSProxy()) {
6720 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6721 return JSProxy::HasElementWithHandler(proxy, index);
6723 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6724 Handle<JSObject>::cast(object), object, index, true);
6725 if (!result.has_value) return Maybe<bool>();
6726 return maybe(result.value != ABSENT);
6730 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
6732 if (object->IsJSProxy()) {
6733 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6734 return JSProxy::HasElementWithHandler(proxy, index);
6736 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6737 Handle<JSObject>::cast(object), object, index, false);
6738 if (!result.has_value) return Maybe<bool>();
6739 return maybe(result.value != ABSENT);
6743 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
6744 Handle<JSReceiver> object, uint32_t index) {
6745 if (object->IsJSProxy()) {
6746 return JSProxy::GetElementAttributeWithHandler(
6747 Handle<JSProxy>::cast(object), object, index);
6749 return JSObject::GetElementAttributeWithReceiver(
6750 Handle<JSObject>::cast(object), object, index, false);
6754 bool AccessorInfo::all_can_read() {
6755 return BooleanBit::get(flag(), kAllCanReadBit);
6759 void AccessorInfo::set_all_can_read(bool value) {
6760 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6764 bool AccessorInfo::all_can_write() {
6765 return BooleanBit::get(flag(), kAllCanWriteBit);
6769 void AccessorInfo::set_all_can_write(bool value) {
6770 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6774 PropertyAttributes AccessorInfo::property_attributes() {
6775 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6779 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6780 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6784 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6785 if (!HasExpectedReceiverType()) return true;
6786 if (!receiver->IsJSObject()) return false;
6787 return FunctionTemplateInfo::cast(expected_receiver_type())
6788 ->IsTemplateFor(JSObject::cast(receiver)->map());
6792 void ExecutableAccessorInfo::clear_setter() {
6793 set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
6797 template<typename Derived, typename Shape, typename Key>
6798 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6800 Handle<Object> value) {
6801 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6805 template<typename Derived, typename Shape, typename Key>
6806 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6808 Handle<Object> value,
6809 PropertyDetails details) {
6810 DCHECK(!key->IsName() ||
6811 details.IsDeleted() ||
6812 details.dictionary_index() > 0);
6813 int index = DerivedHashTable::EntryToIndex(entry);
6814 DisallowHeapAllocation no_gc;
6815 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6816 FixedArray::set(index, *key, mode);
6817 FixedArray::set(index+1, *value, mode);
6818 FixedArray::set(index+2, details.AsSmi());
6822 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6823 DCHECK(other->IsNumber());
6824 return key == static_cast<uint32_t>(other->Number());
6828 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6829 return ComputeIntegerHash(key, 0);
6833 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6835 DCHECK(other->IsNumber());
6836 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6840 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6841 return ComputeIntegerHash(key, seed);
6845 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6848 DCHECK(other->IsNumber());
6849 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6853 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6854 return isolate->factory()->NewNumberFromUint(key);
6858 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6859 // We know that all entries in a hash table had their hash keys created.
6860 // Use that knowledge to have fast failure.
6861 if (key->Hash() != Name::cast(other)->Hash()) return false;
6862 return key->Equals(Name::cast(other));
6866 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6871 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6872 return Name::cast(other)->Hash();
6876 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6878 DCHECK(key->IsUniqueName());
6883 void NameDictionary::DoGenerateNewEnumerationIndices(
6884 Handle<NameDictionary> dictionary) {
6885 DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6889 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6890 return key->SameValue(other);
6894 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6895 return Smi::cast(key->GetHash())->value();
6899 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6901 return Smi::cast(other->GetHash())->value();
6905 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6906 Handle<Object> key) {
6911 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6912 Handle<ObjectHashTable> table, Handle<Object> key) {
6913 return DerivedHashTable::Shrink(table, key);
6917 template <int entrysize>
6918 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6919 return key->SameValue(other);
6923 template <int entrysize>
6924 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6925 intptr_t hash = reinterpret_cast<intptr_t>(*key);
6926 return (uint32_t)(hash & 0xFFFFFFFF);
6930 template <int entrysize>
6931 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
6933 intptr_t hash = reinterpret_cast<intptr_t>(other);
6934 return (uint32_t)(hash & 0xFFFFFFFF);
6938 template <int entrysize>
6939 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6940 Handle<Object> key) {
6945 void Map::ClearCodeCache(Heap* heap) {
6946 // No write barrier is needed since empty_fixed_array is not in new space.
6947 // Please note this function is used during marking:
6948 // - MarkCompactCollector::MarkUnmarkedObject
6949 // - IncrementalMarking::Step
6950 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
6951 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6955 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
6956 DCHECK(array->HasFastSmiOrObjectElements());
6957 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
6958 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6959 if (elts->length() < required_size) {
6960 // Doubling in size would be overkill, but leave some slack to avoid
6961 // constantly growing.
6962 Expand(array, required_size + (required_size >> 3));
6963 // It's a performance benefit to keep a frequently used array in new-space.
6964 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6965 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6966 // Expand will allocate a new backing store in new space even if the size
6967 // we asked for isn't larger than what we had before.
6968 Expand(array, required_size);
6973 void JSArray::set_length(Smi* length) {
6974 // Don't need a write barrier for a Smi.
6975 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6979 bool JSArray::AllowsSetElementsLength() {
6980 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6981 DCHECK(result == !HasExternalArrayElements());
6986 void JSArray::SetContent(Handle<JSArray> array,
6987 Handle<FixedArrayBase> storage) {
6988 EnsureCanContainElements(array, storage, storage->length(),
6989 ALLOW_COPIED_DOUBLE_ELEMENTS);
6991 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6992 IsFastDoubleElementsKind(array->GetElementsKind())) ||
6993 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6994 (IsFastObjectElementsKind(array->GetElementsKind()) ||
6995 (IsFastSmiElementsKind(array->GetElementsKind()) &&
6996 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
6997 array->set_elements(*storage);
6998 array->set_length(Smi::FromInt(storage->length()));
7002 int TypeFeedbackInfo::ic_total_count() {
7003 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7004 return ICTotalCountField::decode(current);
7008 void TypeFeedbackInfo::set_ic_total_count(int count) {
7009 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7010 value = ICTotalCountField::update(value,
7011 ICTotalCountField::decode(count));
7012 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7016 int TypeFeedbackInfo::ic_with_type_info_count() {
7017 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7018 return ICsWithTypeInfoCountField::decode(current);
7022 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7023 if (delta == 0) return;
7024 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7025 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7026 // We can get negative count here when the type-feedback info is
7027 // shared between two code objects. The can only happen when
7028 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7029 // Since we do not optimize when the debugger is active, we can skip
7030 // this counter update.
7031 if (new_count >= 0) {
7032 new_count &= ICsWithTypeInfoCountField::kMask;
7033 value = ICsWithTypeInfoCountField::update(value, new_count);
7034 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7039 int TypeFeedbackInfo::ic_generic_count() {
7040 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7044 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7045 if (delta == 0) return;
7046 int new_count = ic_generic_count() + delta;
7047 if (new_count >= 0) {
7048 new_count &= ~Smi::kMinValue;
7049 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7054 void TypeFeedbackInfo::initialize_storage() {
7055 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7056 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7057 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7061 void TypeFeedbackInfo::change_own_type_change_checksum() {
7062 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7063 int checksum = OwnTypeChangeChecksum::decode(value);
7064 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7065 value = OwnTypeChangeChecksum::update(value, checksum);
7066 // Ensure packed bit field is in Smi range.
7067 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7068 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7069 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7073 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7074 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7075 int mask = (1 << kTypeChangeChecksumBits) - 1;
7076 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7077 // Ensure packed bit field is in Smi range.
7078 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7079 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7080 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7084 int TypeFeedbackInfo::own_type_change_checksum() {
7085 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7086 return OwnTypeChangeChecksum::decode(value);
7090 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7091 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7092 int mask = (1 << kTypeChangeChecksumBits) - 1;
7093 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7097 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7100 Relocatable::Relocatable(Isolate* isolate) {
7102 prev_ = isolate->relocatable_top();
7103 isolate->set_relocatable_top(this);
7107 Relocatable::~Relocatable() {
7108 DCHECK_EQ(isolate_->relocatable_top(), this);
7109 isolate_->set_relocatable_top(prev_);
7113 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7114 return map->instance_size();
7118 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7119 v->VisitExternalReference(
7120 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7124 template<typename StaticVisitor>
7125 void Foreign::ForeignIterateBody() {
7126 StaticVisitor::VisitExternalReference(
7127 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7131 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7132 typedef v8::String::ExternalOneByteStringResource Resource;
7133 v->VisitExternalOneByteString(
7134 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7138 template <typename StaticVisitor>
7139 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7140 typedef v8::String::ExternalOneByteStringResource Resource;
7141 StaticVisitor::VisitExternalOneByteString(
7142 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7146 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7147 typedef v8::String::ExternalStringResource Resource;
7148 v->VisitExternalTwoByteString(
7149 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7153 template<typename StaticVisitor>
7154 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7155 typedef v8::String::ExternalStringResource Resource;
7156 StaticVisitor::VisitExternalTwoByteString(
7157 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7161 template<int start_offset, int end_offset, int size>
7162 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7165 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7166 HeapObject::RawField(obj, end_offset));
7170 template<int start_offset>
7171 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7174 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7175 HeapObject::RawField(obj, object_size));
7179 template<class Derived, class TableType>
7180 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7181 TableType* table(TableType::cast(this->table()));
7182 int index = Smi::cast(this->index())->value();
7183 Object* key = table->KeyAt(index);
7184 DCHECK(!key->IsTheHole());
7189 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7190 array->set(0, CurrentKey());
7194 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7195 array->set(0, CurrentKey());
7196 array->set(1, CurrentValue());
7200 Object* JSMapIterator::CurrentValue() {
7201 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7202 int index = Smi::cast(this->index())->value();
7203 Object* value = table->ValueAt(index);
7204 DCHECK(!value->IsTheHole());
7210 #undef CAST_ACCESSOR
7211 #undef INT_ACCESSORS
7213 #undef ACCESSORS_TO_SMI
7214 #undef SMI_ACCESSORS
7215 #undef SYNCHRONIZED_SMI_ACCESSORS
7216 #undef NOBARRIER_SMI_ACCESSORS
7218 #undef BOOL_ACCESSORS
7220 #undef FIELD_ADDR_CONST
7222 #undef NOBARRIER_READ_FIELD
7224 #undef NOBARRIER_WRITE_FIELD
7225 #undef WRITE_BARRIER
7226 #undef CONDITIONAL_WRITE_BARRIER
7227 #undef READ_DOUBLE_FIELD
7228 #undef WRITE_DOUBLE_FIELD
7229 #undef READ_INT_FIELD
7230 #undef WRITE_INT_FIELD
7231 #undef READ_INTPTR_FIELD
7232 #undef WRITE_INTPTR_FIELD
7233 #undef READ_UINT32_FIELD
7234 #undef WRITE_UINT32_FIELD
7235 #undef READ_SHORT_FIELD
7236 #undef WRITE_SHORT_FIELD
7237 #undef READ_BYTE_FIELD
7238 #undef WRITE_BYTE_FIELD
7239 #undef NOBARRIER_READ_BYTE_FIELD
7240 #undef NOBARRIER_WRITE_BYTE_FIELD
7242 } } // namespace v8::internal
7244 #endif // V8_OBJECTS_INL_H_