1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/contexts.h"
17 #include "src/conversions-inl.h"
18 #include "src/elements.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/heap/incremental-marking.h"
24 #include "src/heap/objects-visiting.h"
25 #include "src/heap/spaces.h"
26 #include "src/heap/store-buffer.h"
27 #include "src/isolate.h"
28 #include "src/lookup.h"
29 #include "src/objects.h"
30 #include "src/property.h"
31 #include "src/prototype.h"
32 #include "src/transitions-inl.h"
33 #include "src/v8memory.h"
38 PropertyDetails::PropertyDetails(Smi* smi) {
39 value_ = smi->value();
43 Smi* PropertyDetails::AsSmi() const {
44 // Ensure the upper 2 bits have the same value by sign extending it. This is
45 // necessary to be able to use the 31st bit of the property details.
46 int value = value_ << 1;
47 return Smi::FromInt(value >> 1);
51 PropertyDetails PropertyDetails::AsDeleted() const {
52 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
53 return PropertyDetails(smi);
57 #define TYPE_CHECKER(type, instancetype) \
58 bool Object::Is##type() const { \
59 return Object::IsHeapObject() && \
60 HeapObject::cast(this)->map()->instance_type() == instancetype; \
64 #define CAST_ACCESSOR(type) \
65 type* type::cast(Object* object) { \
66 SLOW_DCHECK(object->Is##type()); \
67 return reinterpret_cast<type*>(object); \
69 const type* type::cast(const Object* object) { \
70 SLOW_DCHECK(object->Is##type()); \
71 return reinterpret_cast<const type*>(object); \
75 #define INT_ACCESSORS(holder, name, offset) \
76 int holder::name() const { return READ_INT_FIELD(this, offset); } \
77 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
80 #define ACCESSORS(holder, name, type, offset) \
81 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
82 void holder::set_##name(type* value, WriteBarrierMode mode) { \
83 WRITE_FIELD(this, offset, value); \
84 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
88 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
89 #define ACCESSORS_TO_SMI(holder, name, offset) \
90 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
96 // Getter that returns a Smi as an int and writes an int as a Smi.
97 #define SMI_ACCESSORS(holder, name, offset) \
98 int holder::name() const { \
99 Object* value = READ_FIELD(this, offset); \
100 return Smi::cast(value)->value(); \
102 void holder::set_##name(int value) { \
103 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
106 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
107 int holder::synchronized_##name() const { \
108 Object* value = ACQUIRE_READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::synchronized_set_##name(int value) { \
112 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
115 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
116 int holder::nobarrier_##name() const { \
117 Object* value = NOBARRIER_READ_FIELD(this, offset); \
118 return Smi::cast(value)->value(); \
120 void holder::nobarrier_set_##name(int value) { \
121 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
124 #define BOOL_GETTER(holder, field, name, offset) \
125 bool holder::name() const { \
126 return BooleanBit::get(field(), offset); \
130 #define BOOL_ACCESSORS(holder, field, name, offset) \
131 bool holder::name() const { \
132 return BooleanBit::get(field(), offset); \
134 void holder::set_##name(bool value) { \
135 set_##field(BooleanBit::set(field(), offset, value)); \
139 bool Object::IsFixedArrayBase() const {
140 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
141 IsFixedTypedArrayBase() || IsExternalArray();
145 // External objects are not extensible, so the map check is enough.
146 bool Object::IsExternal() const {
147 return Object::IsHeapObject() &&
148 HeapObject::cast(this)->map() ==
149 HeapObject::cast(this)->GetHeap()->external_map();
153 bool Object::IsAccessorInfo() const {
154 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
158 bool Object::IsSmi() const {
159 return HAS_SMI_TAG(this);
163 bool Object::IsHeapObject() const {
164 return Internals::HasHeapObjectTag(this);
168 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
169 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
170 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
173 bool Object::IsString() const {
174 return Object::IsHeapObject()
175 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
179 bool Object::IsName() const {
180 return IsString() || IsSymbol();
184 bool Object::IsUniqueName() const {
185 return IsInternalizedString() || IsSymbol();
189 bool Object::IsSpecObject() const {
190 return Object::IsHeapObject()
191 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
195 bool Object::IsSpecFunction() const {
196 if (!Object::IsHeapObject()) return false;
197 InstanceType type = HeapObject::cast(this)->map()->instance_type();
198 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
202 bool Object::IsTemplateInfo() const {
203 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
207 bool Object::IsInternalizedString() const {
208 if (!this->IsHeapObject()) return false;
209 uint32_t type = HeapObject::cast(this)->map()->instance_type();
210 STATIC_ASSERT(kNotInternalizedTag != 0);
211 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
212 (kStringTag | kInternalizedTag);
216 bool Object::IsConsString() const {
217 if (!IsString()) return false;
218 return StringShape(String::cast(this)).IsCons();
222 bool Object::IsSlicedString() const {
223 if (!IsString()) return false;
224 return StringShape(String::cast(this)).IsSliced();
228 bool Object::IsSeqString() const {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsSequential();
234 bool Object::IsSeqOneByteString() const {
235 if (!IsString()) return false;
236 return StringShape(String::cast(this)).IsSequential() &&
237 String::cast(this)->IsOneByteRepresentation();
241 bool Object::IsSeqTwoByteString() const {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsSequential() &&
244 String::cast(this)->IsTwoByteRepresentation();
248 bool Object::IsExternalString() const {
249 if (!IsString()) return false;
250 return StringShape(String::cast(this)).IsExternal();
254 bool Object::IsExternalAsciiString() const {
255 if (!IsString()) return false;
256 return StringShape(String::cast(this)).IsExternal() &&
257 String::cast(this)->IsOneByteRepresentation();
261 bool Object::IsExternalTwoByteString() const {
262 if (!IsString()) return false;
263 return StringShape(String::cast(this)).IsExternal() &&
264 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
271 IsFixedTypedArrayBase();
275 Handle<Object> Object::NewStorageFor(Isolate* isolate,
276 Handle<Object> object,
277 Representation representation) {
278 if (representation.IsSmi() && object->IsUninitialized()) {
279 return handle(Smi::FromInt(0), isolate);
281 if (!representation.IsDouble()) return object;
283 if (object->IsUninitialized()) {
285 } else if (object->IsMutableHeapNumber()) {
286 value = HeapNumber::cast(*object)->value();
288 value = object->Number();
290 return isolate->factory()->NewHeapNumber(value, MUTABLE);
294 Handle<Object> Object::WrapForRead(Isolate* isolate,
295 Handle<Object> object,
296 Representation representation) {
297 DCHECK(!object->IsUninitialized());
298 if (!representation.IsDouble()) {
299 DCHECK(object->FitsRepresentation(representation));
302 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
306 StringShape::StringShape(const String* str)
307 : type_(str->map()->instance_type()) {
309 DCHECK((type_ & kIsNotStringMask) == kStringTag);
313 StringShape::StringShape(Map* map)
314 : type_(map->instance_type()) {
316 DCHECK((type_ & kIsNotStringMask) == kStringTag);
320 StringShape::StringShape(InstanceType t)
321 : type_(static_cast<uint32_t>(t)) {
323 DCHECK((type_ & kIsNotStringMask) == kStringTag);
327 bool StringShape::IsInternalized() {
329 STATIC_ASSERT(kNotInternalizedTag != 0);
330 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
331 (kStringTag | kInternalizedTag);
335 bool String::IsOneByteRepresentation() const {
336 uint32_t type = map()->instance_type();
337 return (type & kStringEncodingMask) == kOneByteStringTag;
341 bool String::IsTwoByteRepresentation() const {
342 uint32_t type = map()->instance_type();
343 return (type & kStringEncodingMask) == kTwoByteStringTag;
347 bool String::IsOneByteRepresentationUnderneath() {
348 uint32_t type = map()->instance_type();
349 STATIC_ASSERT(kIsIndirectStringTag != 0);
350 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
352 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
353 case kOneByteStringTag:
355 case kTwoByteStringTag:
357 default: // Cons or sliced string. Need to go deeper.
358 return GetUnderlying()->IsOneByteRepresentation();
363 bool String::IsTwoByteRepresentationUnderneath() {
364 uint32_t type = map()->instance_type();
365 STATIC_ASSERT(kIsIndirectStringTag != 0);
366 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
368 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
369 case kOneByteStringTag:
371 case kTwoByteStringTag:
373 default: // Cons or sliced string. Need to go deeper.
374 return GetUnderlying()->IsTwoByteRepresentation();
379 bool String::HasOnlyOneByteChars() {
380 uint32_t type = map()->instance_type();
381 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
382 IsOneByteRepresentation();
386 bool StringShape::IsCons() {
387 return (type_ & kStringRepresentationMask) == kConsStringTag;
391 bool StringShape::IsSliced() {
392 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
396 bool StringShape::IsIndirect() {
397 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
401 bool StringShape::IsExternal() {
402 return (type_ & kStringRepresentationMask) == kExternalStringTag;
406 bool StringShape::IsSequential() {
407 return (type_ & kStringRepresentationMask) == kSeqStringTag;
411 StringRepresentationTag StringShape::representation_tag() {
412 uint32_t tag = (type_ & kStringRepresentationMask);
413 return static_cast<StringRepresentationTag>(tag);
417 uint32_t StringShape::encoding_tag() {
418 return type_ & kStringEncodingMask;
422 uint32_t StringShape::full_representation_tag() {
423 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
427 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
428 Internals::kFullStringRepresentationMask);
430 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
431 Internals::kStringEncodingMask);
434 bool StringShape::IsSequentialAscii() {
435 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
439 bool StringShape::IsSequentialTwoByte() {
440 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
444 bool StringShape::IsExternalAscii() {
445 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
449 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
450 Internals::kExternalAsciiRepresentationTag);
452 STATIC_ASSERT(v8::String::ASCII_ENCODING == kOneByteStringTag);
455 bool StringShape::IsExternalTwoByte() {
456 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
460 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
461 Internals::kExternalTwoByteRepresentationTag);
463 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
465 uc32 FlatStringReader::Get(int index) {
466 DCHECK(0 <= index && index <= length_);
468 return static_cast<const byte*>(start_)[index];
470 return static_cast<const uc16*>(start_)[index];
475 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
476 return key->AsHandle(isolate);
480 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
481 return key->AsHandle(isolate);
485 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
487 return key->AsHandle(isolate);
491 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
493 return key->AsHandle(isolate);
496 template <typename Char>
497 class SequentialStringKey : public HashTableKey {
499 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
500 : string_(string), hash_field_(0), seed_(seed) { }
502 virtual uint32_t Hash() V8_OVERRIDE {
503 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
507 uint32_t result = hash_field_ >> String::kHashShift;
508 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
513 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
514 return String::cast(other)->Hash();
517 Vector<const Char> string_;
518 uint32_t hash_field_;
523 class OneByteStringKey : public SequentialStringKey<uint8_t> {
525 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
526 : SequentialStringKey<uint8_t>(str, seed) { }
528 virtual bool IsMatch(Object* string) V8_OVERRIDE {
529 return String::cast(string)->IsOneByteEqualTo(string_);
532 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
537 class SubStringKey : public HashTableKey {
539 SubStringKey(Handle<String> string, int from, int length)
540 : string_(string), from_(from), length_(length) {
541 if (string_->IsSlicedString()) {
542 string_ = Handle<String>(Unslice(*string_, &from_));
544 DCHECK(string_->IsSeqString() || string->IsExternalString());
547 virtual uint32_t Hash() V8_OVERRIDE {
548 DCHECK(length_ >= 0);
549 DCHECK(from_ + length_ <= string_->length());
550 const Char* chars = GetChars() + from_;
551 hash_field_ = StringHasher::HashSequentialString(
552 chars, length_, string_->GetHeap()->HashSeed());
553 uint32_t result = hash_field_ >> String::kHashShift;
554 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
558 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
559 return String::cast(other)->Hash();
562 virtual bool IsMatch(Object* string) V8_OVERRIDE;
563 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
566 const Char* GetChars();
567 String* Unslice(String* string, int* offset) {
568 while (string->IsSlicedString()) {
569 SlicedString* sliced = SlicedString::cast(string);
570 *offset += sliced->offset();
571 string = sliced->parent();
576 Handle<String> string_;
579 uint32_t hash_field_;
583 class TwoByteStringKey : public SequentialStringKey<uc16> {
585 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
586 : SequentialStringKey<uc16>(str, seed) { }
588 virtual bool IsMatch(Object* string) V8_OVERRIDE {
589 return String::cast(string)->IsTwoByteEqualTo(string_);
592 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
596 // Utf8StringKey carries a vector of chars as key.
597 class Utf8StringKey : public HashTableKey {
599 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
600 : string_(string), hash_field_(0), seed_(seed) { }
602 virtual bool IsMatch(Object* string) V8_OVERRIDE {
603 return String::cast(string)->IsUtf8EqualTo(string_);
606 virtual uint32_t Hash() V8_OVERRIDE {
607 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
608 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
609 uint32_t result = hash_field_ >> String::kHashShift;
610 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
614 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
615 return String::cast(other)->Hash();
618 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
619 if (hash_field_ == 0) Hash();
620 return isolate->factory()->NewInternalizedStringFromUtf8(
621 string_, chars_, hash_field_);
624 Vector<const char> string_;
625 uint32_t hash_field_;
626 int chars_; // Caches the number of characters when computing the hash code.
631 bool Object::IsNumber() const {
632 return IsSmi() || IsHeapNumber();
636 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
637 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
640 bool Object::IsFiller() const {
641 if (!Object::IsHeapObject()) return false;
642 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
643 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
647 bool Object::IsExternalArray() const {
648 if (!Object::IsHeapObject())
650 InstanceType instance_type =
651 HeapObject::cast(this)->map()->instance_type();
652 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
653 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
657 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
658 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
659 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
661 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
662 #undef TYPED_ARRAY_TYPE_CHECKER
665 bool Object::IsFixedTypedArrayBase() const {
666 if (!Object::IsHeapObject()) return false;
668 InstanceType instance_type =
669 HeapObject::cast(this)->map()->instance_type();
670 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
671 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
675 bool Object::IsJSReceiver() const {
676 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
677 return IsHeapObject() &&
678 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
682 bool Object::IsJSObject() const {
683 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
684 return IsHeapObject() &&
685 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
689 bool Object::IsJSProxy() const {
690 if (!Object::IsHeapObject()) return false;
691 return HeapObject::cast(this)->map()->IsJSProxyMap();
695 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
696 TYPE_CHECKER(JSSet, JS_SET_TYPE)
697 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
698 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
699 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
700 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
701 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
702 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
703 TYPE_CHECKER(Map, MAP_TYPE)
704 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
705 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
706 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
709 bool Object::IsJSWeakCollection() const {
710 return IsJSWeakMap() || IsJSWeakSet();
714 bool Object::IsDescriptorArray() const {
715 return IsFixedArray();
719 bool Object::IsTransitionArray() const {
720 return IsFixedArray();
724 bool Object::IsDeoptimizationInputData() const {
725 // Must be a fixed array.
726 if (!IsFixedArray()) return false;
728 // There's no sure way to detect the difference between a fixed array and
729 // a deoptimization data array. Since this is used for asserts we can
730 // check that the length is zero or else the fixed size plus a multiple of
732 int length = FixedArray::cast(this)->length();
733 if (length == 0) return true;
734 if (length < DeoptimizationInputData::kFirstDeoptEntryIndex) return false;
736 FixedArray* self = FixedArray::cast(const_cast<Object*>(this));
738 Smi::cast(self->get(DeoptimizationInputData::kDeoptEntryCountIndex))
743 DeoptimizationInputData::kReturnAddressPatchEntryCountIndex))
746 return length == DeoptimizationInputData::LengthFor(deopt_count, patch_count);
750 bool Object::IsDeoptimizationOutputData() const {
751 if (!IsFixedArray()) return false;
752 // There's actually no way to see the difference between a fixed array and
753 // a deoptimization data array. Since this is used for asserts we can check
754 // that the length is plausible though.
755 if (FixedArray::cast(this)->length() % 2 != 0) return false;
760 bool Object::IsDependentCode() const {
761 if (!IsFixedArray()) return false;
762 // There's actually no way to see the difference between a fixed array and
763 // a dependent codes array.
768 bool Object::IsContext() const {
769 if (!Object::IsHeapObject()) return false;
770 Map* map = HeapObject::cast(this)->map();
771 Heap* heap = map->GetHeap();
772 return (map == heap->function_context_map() ||
773 map == heap->catch_context_map() ||
774 map == heap->with_context_map() ||
775 map == heap->native_context_map() ||
776 map == heap->block_context_map() ||
777 map == heap->module_context_map() ||
778 map == heap->global_context_map());
782 bool Object::IsNativeContext() const {
783 return Object::IsHeapObject() &&
784 HeapObject::cast(this)->map() ==
785 HeapObject::cast(this)->GetHeap()->native_context_map();
789 bool Object::IsScopeInfo() const {
790 return Object::IsHeapObject() &&
791 HeapObject::cast(this)->map() ==
792 HeapObject::cast(this)->GetHeap()->scope_info_map();
796 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
799 template <> inline bool Is<JSFunction>(Object* obj) {
800 return obj->IsJSFunction();
804 TYPE_CHECKER(Code, CODE_TYPE)
805 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
806 TYPE_CHECKER(Cell, CELL_TYPE)
807 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
808 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
809 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
810 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
811 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
812 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
813 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
816 bool Object::IsStringWrapper() const {
817 return IsJSValue() && JSValue::cast(this)->value()->IsString();
821 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
824 bool Object::IsBoolean() const {
825 return IsOddball() &&
826 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
830 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
831 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
832 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
833 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
836 bool Object::IsJSArrayBufferView() const {
837 return IsJSDataView() || IsJSTypedArray();
841 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
844 template <> inline bool Is<JSArray>(Object* obj) {
845 return obj->IsJSArray();
849 bool Object::IsHashTable() const {
850 return Object::IsHeapObject() &&
851 HeapObject::cast(this)->map() ==
852 HeapObject::cast(this)->GetHeap()->hash_table_map();
856 bool Object::IsWeakHashTable() const {
857 return IsHashTable();
861 bool Object::IsDictionary() const {
862 return IsHashTable() &&
863 this != HeapObject::cast(this)->GetHeap()->string_table();
867 bool Object::IsNameDictionary() const {
868 return IsDictionary();
872 bool Object::IsSeededNumberDictionary() const {
873 return IsDictionary();
877 bool Object::IsUnseededNumberDictionary() const {
878 return IsDictionary();
882 bool Object::IsStringTable() const {
883 return IsHashTable();
887 bool Object::IsJSFunctionResultCache() const {
888 if (!IsFixedArray()) return false;
889 const FixedArray* self = FixedArray::cast(this);
890 int length = self->length();
891 if (length < JSFunctionResultCache::kEntriesIndex) return false;
892 if ((length - JSFunctionResultCache::kEntriesIndex)
893 % JSFunctionResultCache::kEntrySize != 0) {
897 if (FLAG_verify_heap) {
898 // TODO(svenpanne) We use const_cast here and below to break our dependency
899 // cycle between the predicates and the verifiers. This can be removed when
900 // the verifiers are const-correct, too.
901 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
902 JSFunctionResultCacheVerify();
909 bool Object::IsNormalizedMapCache() const {
910 return NormalizedMapCache::IsNormalizedMapCache(this);
914 int NormalizedMapCache::GetIndex(Handle<Map> map) {
915 return map->Hash() % NormalizedMapCache::kEntries;
919 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
920 if (!obj->IsFixedArray()) return false;
921 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
925 if (FLAG_verify_heap) {
926 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
927 NormalizedMapCacheVerify();
934 bool Object::IsCompilationCacheTable() const {
935 return IsHashTable();
939 bool Object::IsCodeCacheHashTable() const {
940 return IsHashTable();
944 bool Object::IsPolymorphicCodeCacheHashTable() const {
945 return IsHashTable();
949 bool Object::IsMapCache() const {
950 return IsHashTable();
954 bool Object::IsObjectHashTable() const {
955 return IsHashTable();
959 bool Object::IsOrderedHashTable() const {
960 return IsHeapObject() &&
961 HeapObject::cast(this)->map() ==
962 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
966 bool Object::IsOrderedHashSet() const {
967 return IsOrderedHashTable();
971 bool Object::IsOrderedHashMap() const {
972 return IsOrderedHashTable();
976 bool Object::IsPrimitive() const {
977 return IsOddball() || IsNumber() || IsString();
981 bool Object::IsJSGlobalProxy() const {
982 bool result = IsHeapObject() &&
983 (HeapObject::cast(this)->map()->instance_type() ==
984 JS_GLOBAL_PROXY_TYPE);
986 HeapObject::cast(this)->map()->is_access_check_needed());
991 bool Object::IsGlobalObject() const {
992 if (!IsHeapObject()) return false;
994 InstanceType type = HeapObject::cast(this)->map()->instance_type();
995 return type == JS_GLOBAL_OBJECT_TYPE ||
996 type == JS_BUILTINS_OBJECT_TYPE;
1000 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
1001 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1004 bool Object::IsUndetectableObject() const {
1005 return IsHeapObject()
1006 && HeapObject::cast(this)->map()->is_undetectable();
1010 bool Object::IsAccessCheckNeeded() const {
1011 if (!IsHeapObject()) return false;
1012 if (IsJSGlobalProxy()) {
1013 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1014 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1015 return proxy->IsDetachedFrom(global);
1017 return HeapObject::cast(this)->map()->is_access_check_needed();
1021 bool Object::IsStruct() const {
1022 if (!IsHeapObject()) return false;
1023 switch (HeapObject::cast(this)->map()->instance_type()) {
1024 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1025 STRUCT_LIST(MAKE_STRUCT_CASE)
1026 #undef MAKE_STRUCT_CASE
1027 default: return false;
1032 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1033 bool Object::Is##Name() const { \
1034 return Object::IsHeapObject() \
1035 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1037 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1038 #undef MAKE_STRUCT_PREDICATE
1041 bool Object::IsUndefined() const {
1042 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1046 bool Object::IsNull() const {
1047 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1051 bool Object::IsTheHole() const {
1052 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1056 bool Object::IsException() const {
1057 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1061 bool Object::IsUninitialized() const {
1062 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1066 bool Object::IsTrue() const {
1067 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1071 bool Object::IsFalse() const {
1072 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1076 bool Object::IsArgumentsMarker() const {
1077 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1081 double Object::Number() {
1084 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1085 : reinterpret_cast<HeapNumber*>(this)->value();
1089 bool Object::IsNaN() const {
1090 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1094 bool Object::IsMinusZero() const {
1095 return this->IsHeapNumber() &&
1096 i::IsMinusZero(HeapNumber::cast(this)->value());
1100 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1101 if (object->IsSmi()) return Handle<Smi>::cast(object);
1102 if (object->IsHeapNumber()) {
1103 double value = Handle<HeapNumber>::cast(object)->value();
1104 int int_value = FastD2I(value);
1105 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1106 return handle(Smi::FromInt(int_value), isolate);
1109 return Handle<Smi>();
1113 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1114 Handle<Object> object) {
1116 isolate, object, handle(isolate->context()->native_context(), isolate));
1120 bool Object::HasSpecificClassOf(String* name) {
1121 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1125 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1126 Handle<Name> name) {
1127 LookupIterator it(object, name);
1128 return GetProperty(&it);
1132 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1133 Handle<Object> object,
1135 // GetElement can trigger a getter which can cause allocation.
1136 // This was not always the case. This DCHECK is here to catch
1137 // leftover incorrect uses.
1138 DCHECK(AllowHeapAllocation::IsAllowed());
1139 return Object::GetElementWithReceiver(isolate, object, object, index);
1143 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1144 Handle<Name> name) {
1146 Isolate* isolate = name->GetIsolate();
1147 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1148 return GetProperty(object, name);
1152 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1153 Handle<Object> object,
1155 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1156 DCHECK(!str.is_null());
1158 uint32_t index; // Assert that the name is not an array index.
1159 DCHECK(!str->AsArrayIndex(&index));
1161 return GetProperty(object, str);
1165 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1166 Handle<Object> receiver,
1168 return GetPropertyWithHandler(
1169 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1173 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1174 Handle<JSReceiver> receiver,
1176 Handle<Object> value,
1177 StrictMode strict_mode) {
1178 Isolate* isolate = proxy->GetIsolate();
1179 Handle<String> name = isolate->factory()->Uint32ToString(index);
1180 return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode);
1184 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1186 Isolate* isolate = proxy->GetIsolate();
1187 Handle<String> name = isolate->factory()->Uint32ToString(index);
1188 return HasPropertyWithHandler(proxy, name);
1192 #define FIELD_ADDR(p, offset) \
1193 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1195 #define FIELD_ADDR_CONST(p, offset) \
1196 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1198 #define READ_FIELD(p, offset) \
1199 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1201 #define ACQUIRE_READ_FIELD(p, offset) \
1202 reinterpret_cast<Object*>(base::Acquire_Load( \
1203 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1205 #define NOBARRIER_READ_FIELD(p, offset) \
1206 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1207 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1209 #define WRITE_FIELD(p, offset, value) \
1210 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1212 #define RELEASE_WRITE_FIELD(p, offset, value) \
1213 base::Release_Store( \
1214 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1215 reinterpret_cast<base::AtomicWord>(value));
1217 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1218 base::NoBarrier_Store( \
1219 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1220 reinterpret_cast<base::AtomicWord>(value));
1222 #define WRITE_BARRIER(heap, object, offset, value) \
1223 heap->incremental_marking()->RecordWrite( \
1224 object, HeapObject::RawField(object, offset), value); \
1225 if (heap->InNewSpace(value)) { \
1226 heap->RecordWrite(object->address(), offset); \
1229 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1230 if (mode == UPDATE_WRITE_BARRIER) { \
1231 heap->incremental_marking()->RecordWrite( \
1232 object, HeapObject::RawField(object, offset), value); \
1233 if (heap->InNewSpace(value)) { \
1234 heap->RecordWrite(object->address(), offset); \
1238 #ifndef V8_TARGET_ARCH_MIPS
1239 #define READ_DOUBLE_FIELD(p, offset) \
1240 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1241 #else // V8_TARGET_ARCH_MIPS
1242 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1243 // non-64-bit aligned HeapNumber::value.
1244 static inline double read_double_field(const void* p, int offset) {
1249 c.u[0] = (*reinterpret_cast<const uint32_t*>(
1250 FIELD_ADDR_CONST(p, offset)));
1251 c.u[1] = (*reinterpret_cast<const uint32_t*>(
1252 FIELD_ADDR_CONST(p, offset + 4)));
1255 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1256 #endif // V8_TARGET_ARCH_MIPS
1258 #ifndef V8_TARGET_ARCH_MIPS
1259 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1260 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1261 #else // V8_TARGET_ARCH_MIPS
1262 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1263 // non-64-bit aligned HeapNumber::value.
1264 static inline void write_double_field(void* p, int offset,
1271 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1272 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1274 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1275 write_double_field(p, offset, value)
1276 #endif // V8_TARGET_ARCH_MIPS
1279 #define READ_INT_FIELD(p, offset) \
1280 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1282 #define WRITE_INT_FIELD(p, offset, value) \
1283 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1285 #define READ_INTPTR_FIELD(p, offset) \
1286 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1288 #define WRITE_INTPTR_FIELD(p, offset, value) \
1289 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1291 #define READ_UINT32_FIELD(p, offset) \
1292 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1294 #define WRITE_UINT32_FIELD(p, offset, value) \
1295 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1297 #define READ_INT32_FIELD(p, offset) \
1298 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1300 #define WRITE_INT32_FIELD(p, offset, value) \
1301 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1303 #define READ_INT64_FIELD(p, offset) \
1304 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1306 #define WRITE_INT64_FIELD(p, offset, value) \
1307 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1309 #define READ_SHORT_FIELD(p, offset) \
1310 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1312 #define WRITE_SHORT_FIELD(p, offset, value) \
1313 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1315 #define READ_BYTE_FIELD(p, offset) \
1316 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1318 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1319 static_cast<byte>(base::NoBarrier_Load( \
1320 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1322 #define WRITE_BYTE_FIELD(p, offset, value) \
1323 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1325 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1326 base::NoBarrier_Store( \
1327 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1328 static_cast<base::Atomic8>(value));
1330 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1331 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1335 int Smi::value() const {
1336 return Internals::SmiValue(this);
1340 Smi* Smi::FromInt(int value) {
1341 DCHECK(Smi::IsValid(value));
1342 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1346 Smi* Smi::FromIntptr(intptr_t value) {
1347 DCHECK(Smi::IsValid(value));
1348 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1349 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1353 bool Smi::IsValid(intptr_t value) {
1354 bool result = Internals::IsValidSmi(value);
1355 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1360 MapWord MapWord::FromMap(const Map* map) {
1361 return MapWord(reinterpret_cast<uintptr_t>(map));
1365 Map* MapWord::ToMap() {
1366 return reinterpret_cast<Map*>(value_);
1370 bool MapWord::IsForwardingAddress() {
1371 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1375 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1376 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1377 return MapWord(reinterpret_cast<uintptr_t>(raw));
1381 HeapObject* MapWord::ToForwardingAddress() {
1382 DCHECK(IsForwardingAddress());
1383 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1388 void HeapObject::VerifyObjectField(int offset) {
1389 VerifyPointer(READ_FIELD(this, offset));
1392 void HeapObject::VerifySmiField(int offset) {
1393 CHECK(READ_FIELD(this, offset)->IsSmi());
1398 Heap* HeapObject::GetHeap() const {
1400 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1401 SLOW_DCHECK(heap != NULL);
1406 Isolate* HeapObject::GetIsolate() const {
1407 return GetHeap()->isolate();
1411 Map* HeapObject::map() const {
1413 // Clear mark potentially added by PathTracer.
1414 uintptr_t raw_value =
1415 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1416 return MapWord::FromRawValue(raw_value).ToMap();
1418 return map_word().ToMap();
1423 void HeapObject::set_map(Map* value) {
1424 set_map_word(MapWord::FromMap(value));
1425 if (value != NULL) {
1426 // TODO(1600) We are passing NULL as a slot because maps can never be on
1427 // evacuation candidate.
1428 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1433 Map* HeapObject::synchronized_map() {
1434 return synchronized_map_word().ToMap();
1438 void HeapObject::synchronized_set_map(Map* value) {
1439 synchronized_set_map_word(MapWord::FromMap(value));
1440 if (value != NULL) {
1441 // TODO(1600) We are passing NULL as a slot because maps can never be on
1442 // evacuation candidate.
1443 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1448 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1449 synchronized_set_map_word(MapWord::FromMap(value));
1453 // Unsafe accessor omitting write barrier.
1454 void HeapObject::set_map_no_write_barrier(Map* value) {
1455 set_map_word(MapWord::FromMap(value));
1459 MapWord HeapObject::map_word() const {
1461 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1465 void HeapObject::set_map_word(MapWord map_word) {
1466 NOBARRIER_WRITE_FIELD(
1467 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1471 MapWord HeapObject::synchronized_map_word() const {
1473 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1477 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1478 RELEASE_WRITE_FIELD(
1479 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1483 HeapObject* HeapObject::FromAddress(Address address) {
1484 DCHECK_TAG_ALIGNED(address);
1485 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1489 Address HeapObject::address() {
1490 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1494 int HeapObject::Size() {
1495 return SizeFromMap(map());
1499 bool HeapObject::MayContainNewSpacePointers() {
1500 InstanceType type = map()->instance_type();
1501 if (type <= LAST_NAME_TYPE) {
1502 if (type == SYMBOL_TYPE) {
1505 DCHECK(type < FIRST_NONSTRING_TYPE);
1506 // There are four string representations: sequential strings, external
1507 // strings, cons strings, and sliced strings.
1508 // Only the latter two contain non-map-word pointers to heap objects.
1509 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag);
1511 // The ConstantPoolArray contains heap pointers, but not new space pointers.
1512 if (type == CONSTANT_POOL_ARRAY_TYPE) return false;
1513 return (type > LAST_DATA_TYPE);
1517 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1518 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1519 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1523 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1524 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1528 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1529 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1533 double HeapNumber::value() const {
1534 return READ_DOUBLE_FIELD(this, kValueOffset);
1538 void HeapNumber::set_value(double value) {
1539 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1543 int HeapNumber::get_exponent() {
1544 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1545 kExponentShift) - kExponentBias;
1549 int HeapNumber::get_sign() {
1550 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1554 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1557 Object** FixedArray::GetFirstElementAddress() {
1558 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1562 bool FixedArray::ContainsOnlySmisOrHoles() {
1563 Object* the_hole = GetHeap()->the_hole_value();
1564 Object** current = GetFirstElementAddress();
1565 for (int i = 0; i < length(); ++i) {
1566 Object* candidate = *current++;
1567 if (!candidate->IsSmi() && candidate != the_hole) return false;
1573 FixedArrayBase* JSObject::elements() const {
1574 Object* array = READ_FIELD(this, kElementsOffset);
1575 return static_cast<FixedArrayBase*>(array);
1579 void JSObject::ValidateElements(Handle<JSObject> object) {
1580 #ifdef ENABLE_SLOW_DCHECKS
1581 if (FLAG_enable_slow_asserts) {
1582 ElementsAccessor* accessor = object->GetElementsAccessor();
1583 accessor->Validate(object);
1589 void AllocationSite::Initialize() {
1590 set_transition_info(Smi::FromInt(0));
1591 SetElementsKind(GetInitialFastElementsKind());
1592 set_nested_site(Smi::FromInt(0));
1593 set_pretenure_data(Smi::FromInt(0));
1594 set_pretenure_create_count(Smi::FromInt(0));
1595 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1596 SKIP_WRITE_BARRIER);
1600 void AllocationSite::MarkZombie() {
1601 DCHECK(!IsZombie());
1603 set_pretenure_decision(kZombie);
1607 // Heuristic: We only need to create allocation site info if the boilerplate
1608 // elements kind is the initial elements kind.
1609 AllocationSiteMode AllocationSite::GetMode(
1610 ElementsKind boilerplate_elements_kind) {
1611 if (FLAG_pretenuring_call_new ||
1612 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1613 return TRACK_ALLOCATION_SITE;
1616 return DONT_TRACK_ALLOCATION_SITE;
1620 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1622 if (FLAG_pretenuring_call_new ||
1623 (IsFastSmiElementsKind(from) &&
1624 IsMoreGeneralElementsKindTransition(from, to))) {
1625 return TRACK_ALLOCATION_SITE;
1628 return DONT_TRACK_ALLOCATION_SITE;
1632 inline bool AllocationSite::CanTrack(InstanceType type) {
1633 if (FLAG_allocation_site_pretenuring) {
1634 return type == JS_ARRAY_TYPE ||
1635 type == JS_OBJECT_TYPE ||
1636 type < FIRST_NONSTRING_TYPE;
1638 return type == JS_ARRAY_TYPE;
1642 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1646 return DependentCode::kAllocationSiteTenuringChangedGroup;
1649 return DependentCode::kAllocationSiteTransitionChangedGroup;
1653 return DependentCode::kAllocationSiteTransitionChangedGroup;
1657 inline void AllocationSite::set_memento_found_count(int count) {
1658 int value = pretenure_data()->value();
1659 // Verify that we can count more mementos than we can possibly find in one
1660 // new space collection.
1661 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1662 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1663 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1664 DCHECK(count < MementoFoundCountBits::kMax);
1666 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1667 SKIP_WRITE_BARRIER);
1670 inline bool AllocationSite::IncrementMementoFoundCount() {
1671 if (IsZombie()) return false;
1673 int value = memento_found_count();
1674 set_memento_found_count(value + 1);
1675 return memento_found_count() == kPretenureMinimumCreated;
1679 inline void AllocationSite::IncrementMementoCreateCount() {
1680 DCHECK(FLAG_allocation_site_pretenuring);
1681 int value = memento_create_count();
1682 set_memento_create_count(value + 1);
1686 inline bool AllocationSite::MakePretenureDecision(
1687 PretenureDecision current_decision,
1689 bool maximum_size_scavenge) {
1690 // Here we just allow state transitions from undecided or maybe tenure
1691 // to don't tenure, maybe tenure, or tenure.
1692 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1693 if (ratio >= kPretenureRatio) {
1694 // We just transition into tenure state when the semi-space was at
1695 // maximum capacity.
1696 if (maximum_size_scavenge) {
1697 set_deopt_dependent_code(true);
1698 set_pretenure_decision(kTenure);
1699 // Currently we just need to deopt when we make a state transition to
1703 set_pretenure_decision(kMaybeTenure);
1705 set_pretenure_decision(kDontTenure);
1712 inline bool AllocationSite::DigestPretenuringFeedback(
1713 bool maximum_size_scavenge) {
1715 int create_count = memento_create_count();
1716 int found_count = memento_found_count();
1717 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1719 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1720 static_cast<double>(found_count) / create_count : 0.0;
1721 PretenureDecision current_decision = pretenure_decision();
1723 if (minimum_mementos_created) {
1724 deopt = MakePretenureDecision(
1725 current_decision, ratio, maximum_size_scavenge);
1728 if (FLAG_trace_pretenuring_statistics) {
1730 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1731 static_cast<void*>(this), create_count, found_count, ratio,
1732 PretenureDecisionName(current_decision),
1733 PretenureDecisionName(pretenure_decision()));
1736 // Clear feedback calculation fields until the next gc.
1737 set_memento_found_count(0);
1738 set_memento_create_count(0);
1743 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1744 JSObject::ValidateElements(object);
1745 ElementsKind elements_kind = object->map()->elements_kind();
1746 if (!IsFastObjectElementsKind(elements_kind)) {
1747 if (IsFastHoleyElementsKind(elements_kind)) {
1748 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1750 TransitionElementsKind(object, FAST_ELEMENTS);
1756 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1759 EnsureElementsMode mode) {
1760 ElementsKind current_kind = object->map()->elements_kind();
1761 ElementsKind target_kind = current_kind;
1763 DisallowHeapAllocation no_allocation;
1764 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1765 bool is_holey = IsFastHoleyElementsKind(current_kind);
1766 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1767 Heap* heap = object->GetHeap();
1768 Object* the_hole = heap->the_hole_value();
1769 for (uint32_t i = 0; i < count; ++i) {
1770 Object* current = *objects++;
1771 if (current == the_hole) {
1773 target_kind = GetHoleyElementsKind(target_kind);
1774 } else if (!current->IsSmi()) {
1775 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1776 if (IsFastSmiElementsKind(target_kind)) {
1778 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1780 target_kind = FAST_DOUBLE_ELEMENTS;
1783 } else if (is_holey) {
1784 target_kind = FAST_HOLEY_ELEMENTS;
1787 target_kind = FAST_ELEMENTS;
1792 if (target_kind != current_kind) {
1793 TransitionElementsKind(object, target_kind);
1798 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1799 Handle<FixedArrayBase> elements,
1801 EnsureElementsMode mode) {
1802 Heap* heap = object->GetHeap();
1803 if (elements->map() != heap->fixed_double_array_map()) {
1804 DCHECK(elements->map() == heap->fixed_array_map() ||
1805 elements->map() == heap->fixed_cow_array_map());
1806 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1807 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1810 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1811 EnsureCanContainElements(object, objects, length, mode);
1815 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1816 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1817 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1818 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1819 Handle<FixedDoubleArray> double_array =
1820 Handle<FixedDoubleArray>::cast(elements);
1821 for (uint32_t i = 0; i < length; ++i) {
1822 if (double_array->is_the_hole(i)) {
1823 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1827 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1832 void JSObject::SetMapAndElements(Handle<JSObject> object,
1833 Handle<Map> new_map,
1834 Handle<FixedArrayBase> value) {
1835 JSObject::MigrateToMap(object, new_map);
1836 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1837 (*value == object->GetHeap()->empty_fixed_array())) ==
1838 (value->map() == object->GetHeap()->fixed_array_map() ||
1839 value->map() == object->GetHeap()->fixed_cow_array_map()));
1840 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1841 (object->map()->has_fast_double_elements() ==
1842 value->IsFixedDoubleArray()));
1843 object->set_elements(*value);
1847 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1848 WRITE_FIELD(this, kElementsOffset, value);
1849 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1853 void JSObject::initialize_properties() {
1854 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1855 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1859 void JSObject::initialize_elements() {
1860 FixedArrayBase* elements = map()->GetInitialElements();
1861 WRITE_FIELD(this, kElementsOffset, elements);
1865 Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
1866 DisallowHeapAllocation no_gc;
1867 if (!map->HasTransitionArray()) return Handle<String>::null();
1868 TransitionArray* transitions = map->transitions();
1869 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1870 int transition = TransitionArray::kSimpleTransitionIndex;
1871 PropertyDetails details = transitions->GetTargetDetails(transition);
1872 Name* name = transitions->GetKey(transition);
1873 if (details.type() != FIELD) return Handle<String>::null();
1874 if (details.attributes() != NONE) return Handle<String>::null();
1875 if (!name->IsString()) return Handle<String>::null();
1876 return Handle<String>(String::cast(name));
1880 Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
1881 DCHECK(!ExpectedTransitionKey(map).is_null());
1882 return Handle<Map>(map->transitions()->GetTarget(
1883 TransitionArray::kSimpleTransitionIndex));
1887 Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1888 DisallowHeapAllocation no_allocation;
1889 if (!map->HasTransitionArray()) return Handle<Map>::null();
1890 TransitionArray* transitions = map->transitions();
1891 int transition = transitions->Search(*key);
1892 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1893 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1894 if (target_details.type() != FIELD) return Handle<Map>::null();
1895 if (target_details.attributes() != NONE) return Handle<Map>::null();
1896 return Handle<Map>(transitions->GetTarget(transition));
1900 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1901 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1904 byte Oddball::kind() const {
1905 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1909 void Oddball::set_kind(byte value) {
1910 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1914 Object* Cell::value() const {
1915 return READ_FIELD(this, kValueOffset);
1919 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1920 // The write barrier is not used for global property cells.
1921 DCHECK(!val->IsPropertyCell() && !val->IsCell());
1922 WRITE_FIELD(this, kValueOffset, val);
1925 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1927 Object* PropertyCell::type_raw() const {
1928 return READ_FIELD(this, kTypeOffset);
1932 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1933 WRITE_FIELD(this, kTypeOffset, val);
1937 int JSObject::GetHeaderSize() {
1938 InstanceType type = map()->instance_type();
1939 // Check for the most common kind of JavaScript object before
1940 // falling into the generic switch. This speeds up the internal
1941 // field operations considerably on average.
1942 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1944 case JS_GENERATOR_OBJECT_TYPE:
1945 return JSGeneratorObject::kSize;
1946 case JS_MODULE_TYPE:
1947 return JSModule::kSize;
1948 case JS_GLOBAL_PROXY_TYPE:
1949 return JSGlobalProxy::kSize;
1950 case JS_GLOBAL_OBJECT_TYPE:
1951 return JSGlobalObject::kSize;
1952 case JS_BUILTINS_OBJECT_TYPE:
1953 return JSBuiltinsObject::kSize;
1954 case JS_FUNCTION_TYPE:
1955 return JSFunction::kSize;
1957 return JSValue::kSize;
1959 return JSDate::kSize;
1961 return JSArray::kSize;
1962 case JS_ARRAY_BUFFER_TYPE:
1963 return JSArrayBuffer::kSize;
1964 case JS_TYPED_ARRAY_TYPE:
1965 return JSTypedArray::kSize;
1966 case JS_DATA_VIEW_TYPE:
1967 return JSDataView::kSize;
1969 return JSSet::kSize;
1971 return JSMap::kSize;
1972 case JS_SET_ITERATOR_TYPE:
1973 return JSSetIterator::kSize;
1974 case JS_MAP_ITERATOR_TYPE:
1975 return JSMapIterator::kSize;
1976 case JS_WEAK_MAP_TYPE:
1977 return JSWeakMap::kSize;
1978 case JS_WEAK_SET_TYPE:
1979 return JSWeakSet::kSize;
1980 case JS_REGEXP_TYPE:
1981 return JSRegExp::kSize;
1982 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1983 return JSObject::kHeaderSize;
1984 case JS_MESSAGE_OBJECT_TYPE:
1985 return JSMessageObject::kSize;
1987 // TODO(jkummerow): Re-enable this. Blink currently hits this
1988 // from its CustomElementConstructorBuilder.
1995 int JSObject::GetInternalFieldCount() {
1996 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
1997 // Make sure to adjust for the number of in-object properties. These
1998 // properties do contribute to the size, but are not internal fields.
1999 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2000 map()->inobject_properties();
2004 int JSObject::GetInternalFieldOffset(int index) {
2005 DCHECK(index < GetInternalFieldCount() && index >= 0);
2006 return GetHeaderSize() + (kPointerSize * index);
2010 Object* JSObject::GetInternalField(int index) {
2011 DCHECK(index < GetInternalFieldCount() && index >= 0);
2012 // Internal objects do follow immediately after the header, whereas in-object
2013 // properties are at the end of the object. Therefore there is no need
2014 // to adjust the index here.
2015 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2019 void JSObject::SetInternalField(int index, Object* value) {
2020 DCHECK(index < GetInternalFieldCount() && index >= 0);
2021 // Internal objects do follow immediately after the header, whereas in-object
2022 // properties are at the end of the object. Therefore there is no need
2023 // to adjust the index here.
2024 int offset = GetHeaderSize() + (kPointerSize * index);
2025 WRITE_FIELD(this, offset, value);
2026 WRITE_BARRIER(GetHeap(), this, offset, value);
2030 void JSObject::SetInternalField(int index, Smi* value) {
2031 DCHECK(index < GetInternalFieldCount() && index >= 0);
2032 // Internal objects do follow immediately after the header, whereas in-object
2033 // properties are at the end of the object. Therefore there is no need
2034 // to adjust the index here.
2035 int offset = GetHeaderSize() + (kPointerSize * index);
2036 WRITE_FIELD(this, offset, value);
2040 // Access fast-case object properties at index. The use of these routines
2041 // is needed to correctly distinguish between properties stored in-object and
2042 // properties stored in the properties array.
2043 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2044 if (index.is_inobject()) {
2045 return READ_FIELD(this, index.offset());
2047 return properties()->get(index.outobject_array_index());
2052 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2053 if (index.is_inobject()) {
2054 int offset = index.offset();
2055 WRITE_FIELD(this, offset, value);
2056 WRITE_BARRIER(GetHeap(), this, offset, value);
2058 properties()->set(index.outobject_array_index(), value);
2063 int JSObject::GetInObjectPropertyOffset(int index) {
2064 return map()->GetInObjectPropertyOffset(index);
2068 Object* JSObject::InObjectPropertyAt(int index) {
2069 int offset = GetInObjectPropertyOffset(index);
2070 return READ_FIELD(this, offset);
2074 Object* JSObject::InObjectPropertyAtPut(int index,
2076 WriteBarrierMode mode) {
2077 // Adjust for the number of properties stored in the object.
2078 int offset = GetInObjectPropertyOffset(index);
2079 WRITE_FIELD(this, offset, value);
2080 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2086 void JSObject::InitializeBody(Map* map,
2087 Object* pre_allocated_value,
2088 Object* filler_value) {
2089 DCHECK(!filler_value->IsHeapObject() ||
2090 !GetHeap()->InNewSpace(filler_value));
2091 DCHECK(!pre_allocated_value->IsHeapObject() ||
2092 !GetHeap()->InNewSpace(pre_allocated_value));
2093 int size = map->instance_size();
2094 int offset = kHeaderSize;
2095 if (filler_value != pre_allocated_value) {
2096 int pre_allocated = map->pre_allocated_property_fields();
2097 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2098 for (int i = 0; i < pre_allocated; i++) {
2099 WRITE_FIELD(this, offset, pre_allocated_value);
2100 offset += kPointerSize;
2103 while (offset < size) {
2104 WRITE_FIELD(this, offset, filler_value);
2105 offset += kPointerSize;
2110 bool JSObject::HasFastProperties() {
2111 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2112 return !properties()->IsDictionary();
2116 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2117 if (unused_property_fields() != 0) return false;
2118 if (is_prototype_map()) return false;
2119 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2120 int limit = Max(minimum, inobject_properties());
2121 int external = NumberOfFields() - inobject_properties();
2122 return external > limit;
2126 void Struct::InitializeBody(int object_size) {
2127 Object* value = GetHeap()->undefined_value();
2128 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2129 WRITE_FIELD(this, offset, value);
2134 bool Object::ToArrayIndex(uint32_t* index) {
2136 int value = Smi::cast(this)->value();
2137 if (value < 0) return false;
2141 if (IsHeapNumber()) {
2142 double value = HeapNumber::cast(this)->value();
2143 uint32_t uint_value = static_cast<uint32_t>(value);
2144 if (value == static_cast<double>(uint_value)) {
2145 *index = uint_value;
2153 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2154 if (!this->IsJSValue()) return false;
2156 JSValue* js_value = JSValue::cast(this);
2157 if (!js_value->value()->IsString()) return false;
2159 String* str = String::cast(js_value->value());
2160 if (index >= static_cast<uint32_t>(str->length())) return false;
2166 void Object::VerifyApiCallResultType() {
2167 #if ENABLE_EXTRA_CHECKS
2177 FATAL("API call returned invalid object");
2179 #endif // ENABLE_EXTRA_CHECKS
2183 Object* FixedArray::get(int index) {
2184 SLOW_DCHECK(index >= 0 && index < this->length());
2185 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2189 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2190 return handle(array->get(index), array->GetIsolate());
2194 bool FixedArray::is_the_hole(int index) {
2195 return get(index) == GetHeap()->the_hole_value();
2199 void FixedArray::set(int index, Smi* value) {
2200 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2201 DCHECK(index >= 0 && index < this->length());
2202 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2203 int offset = kHeaderSize + index * kPointerSize;
2204 WRITE_FIELD(this, offset, value);
2208 void FixedArray::set(int index, Object* value) {
2209 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2210 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2211 DCHECK(index >= 0 && index < this->length());
2212 int offset = kHeaderSize + index * kPointerSize;
2213 WRITE_FIELD(this, offset, value);
2214 WRITE_BARRIER(GetHeap(), this, offset, value);
2218 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2219 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2223 inline double FixedDoubleArray::hole_nan_as_double() {
2224 return BitCast<double, uint64_t>(kHoleNanInt64);
2228 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2229 DCHECK(BitCast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64);
2230 DCHECK((BitCast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32);
2231 return base::OS::nan_value();
2235 double FixedDoubleArray::get_scalar(int index) {
2236 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2237 map() != GetHeap()->fixed_array_map());
2238 DCHECK(index >= 0 && index < this->length());
2239 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2240 DCHECK(!is_the_hole_nan(result));
2244 int64_t FixedDoubleArray::get_representation(int index) {
2245 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2246 map() != GetHeap()->fixed_array_map());
2247 DCHECK(index >= 0 && index < this->length());
2248 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2252 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2254 if (array->is_the_hole(index)) {
2255 return array->GetIsolate()->factory()->the_hole_value();
2257 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2262 void FixedDoubleArray::set(int index, double value) {
2263 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2264 map() != GetHeap()->fixed_array_map());
2265 int offset = kHeaderSize + index * kDoubleSize;
2266 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2267 WRITE_DOUBLE_FIELD(this, offset, value);
2271 void FixedDoubleArray::set_the_hole(int index) {
2272 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2273 map() != GetHeap()->fixed_array_map());
2274 int offset = kHeaderSize + index * kDoubleSize;
2275 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2279 bool FixedDoubleArray::is_the_hole(int index) {
2280 int offset = kHeaderSize + index * kDoubleSize;
2281 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2285 double* FixedDoubleArray::data_start() {
2286 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2290 void FixedDoubleArray::FillWithHoles(int from, int to) {
2291 for (int i = from; i < to; i++) {
2297 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2298 DCHECK(type < NUMBER_OF_TYPES);
2299 element_counts_[type]++;
2303 int ConstantPoolArray::NumberOfEntries::equals(
2304 const ConstantPoolArray::NumberOfEntries& other) const {
2305 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2306 if (element_counts_[i] != other.element_counts_[i]) return false;
2312 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2313 return total_count() == 0;
2317 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2318 DCHECK(type < NUMBER_OF_TYPES);
2319 return element_counts_[type];
2323 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2325 DCHECK(type < NUMBER_OF_TYPES);
2326 for (int i = 0; i < type; i++) {
2327 base += element_counts_[i];
2333 int ConstantPoolArray::NumberOfEntries::total_count() const {
2335 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2336 count += element_counts_[i];
2342 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2343 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2344 if (element_counts_[i] < min || element_counts_[i] > max) {
2352 int ConstantPoolArray::Iterator::next_index() {
2353 DCHECK(!is_finished());
2354 int ret = next_index_++;
2360 bool ConstantPoolArray::Iterator::is_finished() {
2361 return next_index_ > array_->last_index(type_, final_section_);
2365 void ConstantPoolArray::Iterator::update_section() {
2366 if (next_index_ > array_->last_index(type_, current_section_) &&
2367 current_section_ != final_section_) {
2368 DCHECK(final_section_ == EXTENDED_SECTION);
2369 current_section_ = EXTENDED_SECTION;
2370 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2375 bool ConstantPoolArray::is_extended_layout() {
2376 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2377 return IsExtendedField::decode(small_layout_1);
2381 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2382 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2386 int ConstantPoolArray::first_extended_section_index() {
2387 DCHECK(is_extended_layout());
2388 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2389 return TotalCountField::decode(small_layout_2);
2393 int ConstantPoolArray::get_extended_section_header_offset() {
2394 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2398 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2399 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2400 return WeakObjectStateField::decode(small_layout_2);
2404 void ConstantPoolArray::set_weak_object_state(
2405 ConstantPoolArray::WeakObjectState state) {
2406 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2407 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2408 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2412 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2414 if (section == EXTENDED_SECTION) {
2415 DCHECK(is_extended_layout());
2416 index += first_extended_section_index();
2419 for (Type type_iter = FIRST_TYPE; type_iter < type;
2420 type_iter = next_type(type_iter)) {
2421 index += number_of_entries(type_iter, section);
2428 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2429 return first_index(type, section) + number_of_entries(type, section) - 1;
2433 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2434 if (section == SMALL_SECTION) {
2435 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2436 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2439 return Int64CountField::decode(small_layout_1);
2441 return CodePtrCountField::decode(small_layout_1);
2443 return HeapPtrCountField::decode(small_layout_1);
2445 return Int32CountField::decode(small_layout_2);
2451 DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2452 int offset = get_extended_section_header_offset();
2455 offset += kExtendedInt64CountOffset;
2458 offset += kExtendedCodePtrCountOffset;
2461 offset += kExtendedHeapPtrCountOffset;
2464 offset += kExtendedInt32CountOffset;
2469 return READ_INT_FIELD(this, offset);
2474 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2475 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2476 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2477 (is_extended_layout() &&
2478 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2479 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2483 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2484 LayoutSection section;
2485 if (is_extended_layout() && index >= first_extended_section_index()) {
2486 section = EXTENDED_SECTION;
2488 section = SMALL_SECTION;
2491 Type type = FIRST_TYPE;
2492 while (index > last_index(type, section)) {
2493 type = next_type(type);
2495 DCHECK(type <= LAST_TYPE);
2500 int64_t ConstantPoolArray::get_int64_entry(int index) {
2501 DCHECK(map() == GetHeap()->constant_pool_array_map());
2502 DCHECK(get_type(index) == INT64);
2503 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2507 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2508 STATIC_ASSERT(kDoubleSize == kInt64Size);
2509 DCHECK(map() == GetHeap()->constant_pool_array_map());
2510 DCHECK(get_type(index) == INT64);
2511 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2515 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2516 DCHECK(map() == GetHeap()->constant_pool_array_map());
2517 DCHECK(get_type(index) == CODE_PTR);
2518 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2522 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2523 DCHECK(map() == GetHeap()->constant_pool_array_map());
2524 DCHECK(get_type(index) == HEAP_PTR);
2525 return READ_FIELD(this, OffsetOfElementAt(index));
2529 int32_t ConstantPoolArray::get_int32_entry(int index) {
2530 DCHECK(map() == GetHeap()->constant_pool_array_map());
2531 DCHECK(get_type(index) == INT32);
2532 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2536 void ConstantPoolArray::set(int index, int64_t value) {
2537 DCHECK(map() == GetHeap()->constant_pool_array_map());
2538 DCHECK(get_type(index) == INT64);
2539 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2543 void ConstantPoolArray::set(int index, double value) {
2544 STATIC_ASSERT(kDoubleSize == kInt64Size);
2545 DCHECK(map() == GetHeap()->constant_pool_array_map());
2546 DCHECK(get_type(index) == INT64);
2547 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2551 void ConstantPoolArray::set(int index, Address value) {
2552 DCHECK(map() == GetHeap()->constant_pool_array_map());
2553 DCHECK(get_type(index) == CODE_PTR);
2554 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2558 void ConstantPoolArray::set(int index, Object* value) {
2559 DCHECK(map() == GetHeap()->constant_pool_array_map());
2560 DCHECK(!GetHeap()->InNewSpace(value));
2561 DCHECK(get_type(index) == HEAP_PTR);
2562 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2563 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2567 void ConstantPoolArray::set(int index, int32_t value) {
2568 DCHECK(map() == GetHeap()->constant_pool_array_map());
2569 DCHECK(get_type(index) == INT32);
2570 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2574 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2575 DCHECK(map() == GetHeap()->constant_pool_array_map());
2576 DCHECK(offset_is_type(offset, INT32));
2577 WRITE_INT32_FIELD(this, offset, value);
2581 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2582 DCHECK(map() == GetHeap()->constant_pool_array_map());
2583 DCHECK(offset_is_type(offset, INT64));
2584 WRITE_INT64_FIELD(this, offset, value);
2588 void ConstantPoolArray::set_at_offset(int offset, double value) {
2589 DCHECK(map() == GetHeap()->constant_pool_array_map());
2590 DCHECK(offset_is_type(offset, INT64));
2591 WRITE_DOUBLE_FIELD(this, offset, value);
2595 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2596 DCHECK(map() == GetHeap()->constant_pool_array_map());
2597 DCHECK(offset_is_type(offset, CODE_PTR));
2598 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2599 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2603 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2604 DCHECK(map() == GetHeap()->constant_pool_array_map());
2605 DCHECK(!GetHeap()->InNewSpace(value));
2606 DCHECK(offset_is_type(offset, HEAP_PTR));
2607 WRITE_FIELD(this, offset, value);
2608 WRITE_BARRIER(GetHeap(), this, offset, value);
2612 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2613 uint32_t small_layout_1 =
2614 Int64CountField::encode(small.count_of(INT64)) |
2615 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2616 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2617 IsExtendedField::encode(false);
2618 uint32_t small_layout_2 =
2619 Int32CountField::encode(small.count_of(INT32)) |
2620 TotalCountField::encode(small.total_count()) |
2621 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2622 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2623 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2624 if (kHeaderSize != kFirstEntryOffset) {
2625 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2626 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2631 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2632 const NumberOfEntries& extended) {
2633 // Initialize small layout fields first.
2636 // Set is_extended_layout field.
2637 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2638 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2639 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2641 // Initialize the extended layout fields.
2642 int extended_header_offset = get_extended_section_header_offset();
2643 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2644 extended.count_of(INT64));
2645 WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2646 extended.count_of(CODE_PTR));
2647 WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2648 extended.count_of(HEAP_PTR));
2649 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2650 extended.count_of(INT32));
2654 int ConstantPoolArray::size() {
2655 NumberOfEntries small(this, SMALL_SECTION);
2656 if (!is_extended_layout()) {
2657 return SizeFor(small);
2659 NumberOfEntries extended(this, EXTENDED_SECTION);
2660 return SizeForExtended(small, extended);
2665 int ConstantPoolArray::length() {
2666 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2667 int length = TotalCountField::decode(small_layout_2);
2668 if (is_extended_layout()) {
2669 length += number_of_entries(INT64, EXTENDED_SECTION) +
2670 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2671 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2672 number_of_entries(INT32, EXTENDED_SECTION);
2678 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2679 const DisallowHeapAllocation& promise) {
2680 Heap* heap = GetHeap();
2681 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2682 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2683 return UPDATE_WRITE_BARRIER;
2687 void FixedArray::set(int index,
2689 WriteBarrierMode mode) {
2690 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2691 DCHECK(index >= 0 && index < this->length());
2692 int offset = kHeaderSize + index * kPointerSize;
2693 WRITE_FIELD(this, offset, value);
2694 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2698 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2701 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2702 DCHECK(index >= 0 && index < array->length());
2703 int offset = kHeaderSize + index * kPointerSize;
2704 WRITE_FIELD(array, offset, value);
2705 Heap* heap = array->GetHeap();
2706 if (heap->InNewSpace(value)) {
2707 heap->RecordWrite(array->address(), offset);
2712 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2715 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2716 DCHECK(index >= 0 && index < array->length());
2717 DCHECK(!array->GetHeap()->InNewSpace(value));
2718 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2722 void FixedArray::set_undefined(int index) {
2723 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2724 DCHECK(index >= 0 && index < this->length());
2725 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2727 kHeaderSize + index * kPointerSize,
2728 GetHeap()->undefined_value());
2732 void FixedArray::set_null(int index) {
2733 DCHECK(index >= 0 && index < this->length());
2734 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2736 kHeaderSize + index * kPointerSize,
2737 GetHeap()->null_value());
2741 void FixedArray::set_the_hole(int index) {
2742 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2743 DCHECK(index >= 0 && index < this->length());
2744 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2746 kHeaderSize + index * kPointerSize,
2747 GetHeap()->the_hole_value());
2751 void FixedArray::FillWithHoles(int from, int to) {
2752 for (int i = from; i < to; i++) {
2758 Object** FixedArray::data_start() {
2759 return HeapObject::RawField(this, kHeaderSize);
2763 bool DescriptorArray::IsEmpty() {
2764 DCHECK(length() >= kFirstIndex ||
2765 this == GetHeap()->empty_descriptor_array());
2766 return length() < kFirstIndex;
2770 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2772 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2776 // Perform a binary search in a fixed array. Low and high are entry indices. If
2777 // there are three entries in this array it should be called with low=0 and
2779 template<SearchMode search_mode, typename T>
2780 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2781 uint32_t hash = name->Hash();
2784 DCHECK(low <= high);
2786 while (low != high) {
2787 int mid = (low + high) / 2;
2788 Name* mid_name = array->GetSortedKey(mid);
2789 uint32_t mid_hash = mid_name->Hash();
2791 if (mid_hash >= hash) {
2798 for (; low <= limit; ++low) {
2799 int sort_index = array->GetSortedKeyIndex(low);
2800 Name* entry = array->GetKey(sort_index);
2801 if (entry->Hash() != hash) break;
2802 if (entry->Equals(name)) {
2803 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2806 return T::kNotFound;
2810 return T::kNotFound;
2814 // Perform a linear search in this fixed array. len is the number of entry
2815 // indices that are valid.
2816 template<SearchMode search_mode, typename T>
2817 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2818 uint32_t hash = name->Hash();
2819 if (search_mode == ALL_ENTRIES) {
2820 for (int number = 0; number < len; number++) {
2821 int sorted_index = array->GetSortedKeyIndex(number);
2822 Name* entry = array->GetKey(sorted_index);
2823 uint32_t current_hash = entry->Hash();
2824 if (current_hash > hash) break;
2825 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2828 DCHECK(len >= valid_entries);
2829 for (int number = 0; number < valid_entries; number++) {
2830 Name* entry = array->GetKey(number);
2831 uint32_t current_hash = entry->Hash();
2832 if (current_hash == hash && entry->Equals(name)) return number;
2835 return T::kNotFound;
2839 template<SearchMode search_mode, typename T>
2840 int Search(T* array, Name* name, int valid_entries) {
2841 if (search_mode == VALID_ENTRIES) {
2842 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2844 SLOW_DCHECK(array->IsSortedNoDuplicates());
2847 int nof = array->number_of_entries();
2848 if (nof == 0) return T::kNotFound;
2850 // Fast case: do linear search for small arrays.
2851 const int kMaxElementsForLinearSearch = 8;
2852 if ((search_mode == ALL_ENTRIES &&
2853 nof <= kMaxElementsForLinearSearch) ||
2854 (search_mode == VALID_ENTRIES &&
2855 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2856 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2859 // Slow case: perform binary search.
2860 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2864 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2865 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2869 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2870 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2871 if (number_of_own_descriptors == 0) return kNotFound;
2873 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2874 int number = cache->Lookup(map, name);
2876 if (number == DescriptorLookupCache::kAbsent) {
2877 number = Search(name, number_of_own_descriptors);
2878 cache->Update(map, name, number);
2885 PropertyDetails Map::GetLastDescriptorDetails() {
2886 return instance_descriptors()->GetDetails(LastAdded());
2890 void Map::LookupDescriptor(JSObject* holder,
2892 LookupResult* result) {
2893 DescriptorArray* descriptors = this->instance_descriptors();
2894 int number = descriptors->SearchWithCache(name, this);
2895 if (number == DescriptorArray::kNotFound) return result->NotFound();
2896 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2900 void Map::LookupTransition(JSObject* holder,
2902 LookupResult* result) {
2903 int transition_index = this->SearchTransition(name);
2904 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
2905 result->TransitionResult(holder, this->GetTransition(transition_index));
2909 FixedArrayBase* Map::GetInitialElements() {
2910 if (has_fast_smi_or_object_elements() ||
2911 has_fast_double_elements()) {
2912 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2913 return GetHeap()->empty_fixed_array();
2914 } else if (has_external_array_elements()) {
2915 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2916 DCHECK(!GetHeap()->InNewSpace(empty_array));
2918 } else if (has_fixed_typed_array_elements()) {
2919 FixedTypedArrayBase* empty_array =
2920 GetHeap()->EmptyFixedTypedArrayForMap(this);
2921 DCHECK(!GetHeap()->InNewSpace(empty_array));
2923 } else if (has_dictionary_elements()) {
2924 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_slow_element_dictionary()));
2925 return GetHeap()->empty_slow_element_dictionary();
2933 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2934 DCHECK(descriptor_number < number_of_descriptors());
2935 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2939 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2940 return GetKeySlot(descriptor_number);
2944 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2945 return GetValueSlot(descriptor_number - 1) + 1;
2949 Name* DescriptorArray::GetKey(int descriptor_number) {
2950 DCHECK(descriptor_number < number_of_descriptors());
2951 return Name::cast(get(ToKeyIndex(descriptor_number)));
2955 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2956 return GetDetails(descriptor_number).pointer();
2960 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2961 return GetKey(GetSortedKeyIndex(descriptor_number));
2965 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2966 PropertyDetails details = GetDetails(descriptor_index);
2967 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2971 void DescriptorArray::SetRepresentation(int descriptor_index,
2972 Representation representation) {
2973 DCHECK(!representation.IsNone());
2974 PropertyDetails details = GetDetails(descriptor_index);
2975 set(ToDetailsIndex(descriptor_index),
2976 details.CopyWithRepresentation(representation).AsSmi());
2980 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2981 DCHECK(descriptor_number < number_of_descriptors());
2982 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2986 int DescriptorArray::GetValueOffset(int descriptor_number) {
2987 return OffsetOfElementAt(ToValueIndex(descriptor_number));
2991 Object* DescriptorArray::GetValue(int descriptor_number) {
2992 DCHECK(descriptor_number < number_of_descriptors());
2993 return get(ToValueIndex(descriptor_number));
2997 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2998 set(ToValueIndex(descriptor_index), value);
3002 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3003 DCHECK(descriptor_number < number_of_descriptors());
3004 Object* details = get(ToDetailsIndex(descriptor_number));
3005 return PropertyDetails(Smi::cast(details));
3009 PropertyType DescriptorArray::GetType(int descriptor_number) {
3010 return GetDetails(descriptor_number).type();
3014 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3015 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3016 return GetDetails(descriptor_number).field_index();
3020 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3021 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3022 return HeapType::cast(GetValue(descriptor_number));
3026 Object* DescriptorArray::GetConstant(int descriptor_number) {
3027 return GetValue(descriptor_number);
3031 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3032 DCHECK(GetType(descriptor_number) == CALLBACKS);
3033 return GetValue(descriptor_number);
3037 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3038 DCHECK(GetType(descriptor_number) == CALLBACKS);
3039 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3040 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3044 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3045 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3046 handle(GetValue(descriptor_number), GetIsolate()),
3047 GetDetails(descriptor_number));
3051 void DescriptorArray::Set(int descriptor_number,
3053 const WhitenessWitness&) {
3055 DCHECK(descriptor_number < number_of_descriptors());
3057 NoIncrementalWriteBarrierSet(this,
3058 ToKeyIndex(descriptor_number),
3060 NoIncrementalWriteBarrierSet(this,
3061 ToValueIndex(descriptor_number),
3063 NoIncrementalWriteBarrierSet(this,
3064 ToDetailsIndex(descriptor_number),
3065 desc->GetDetails().AsSmi());
3069 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3071 DCHECK(descriptor_number < number_of_descriptors());
3073 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3074 set(ToValueIndex(descriptor_number), *desc->GetValue());
3075 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3079 void DescriptorArray::Append(Descriptor* desc,
3080 const WhitenessWitness& witness) {
3081 DisallowHeapAllocation no_gc;
3082 int descriptor_number = number_of_descriptors();
3083 SetNumberOfDescriptors(descriptor_number + 1);
3084 Set(descriptor_number, desc, witness);
3086 uint32_t hash = desc->GetKey()->Hash();
3090 for (insertion = descriptor_number; insertion > 0; --insertion) {
3091 Name* key = GetSortedKey(insertion - 1);
3092 if (key->Hash() <= hash) break;
3093 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3096 SetSortedKey(insertion, descriptor_number);
3100 void DescriptorArray::Append(Descriptor* desc) {
3101 DisallowHeapAllocation no_gc;
3102 int descriptor_number = number_of_descriptors();
3103 SetNumberOfDescriptors(descriptor_number + 1);
3104 Set(descriptor_number, desc);
3106 uint32_t hash = desc->GetKey()->Hash();
3110 for (insertion = descriptor_number; insertion > 0; --insertion) {
3111 Name* key = GetSortedKey(insertion - 1);
3112 if (key->Hash() <= hash) break;
3113 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3116 SetSortedKey(insertion, descriptor_number);
3120 void DescriptorArray::SwapSortedKeys(int first, int second) {
3121 int first_key = GetSortedKeyIndex(first);
3122 SetSortedKey(first, GetSortedKeyIndex(second));
3123 SetSortedKey(second, first_key);
3127 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3128 : marking_(array->GetHeap()->incremental_marking()) {
3129 marking_->EnterNoMarkingScope();
3130 DCHECK(!marking_->IsMarking() ||
3131 Marking::Color(array) == Marking::WHITE_OBJECT);
3135 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3136 marking_->LeaveNoMarkingScope();
3140 template<typename Derived, typename Shape, typename Key>
3141 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3142 const int kMinCapacity = 32;
3143 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
3144 if (capacity < kMinCapacity) {
3145 capacity = kMinCapacity; // Guarantee min capacity.
3151 template<typename Derived, typename Shape, typename Key>
3152 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3153 return FindEntry(GetIsolate(), key);
3157 // Find entry for key otherwise return kNotFound.
3158 template<typename Derived, typename Shape, typename Key>
3159 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3160 uint32_t capacity = Capacity();
3161 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3163 // EnsureCapacity will guarantee the hash table is never full.
3165 Object* element = KeyAt(entry);
3166 // Empty entry. Uses raw unchecked accessors because it is called by the
3167 // string table during bootstrapping.
3168 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3169 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3170 Shape::IsMatch(key, element)) return entry;
3171 entry = NextProbe(entry, count++, capacity);
3177 bool SeededNumberDictionary::requires_slow_elements() {
3178 Object* max_index_object = get(kMaxNumberKeyIndex);
3179 if (!max_index_object->IsSmi()) return false;
3181 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3184 uint32_t SeededNumberDictionary::max_number_key() {
3185 DCHECK(!requires_slow_elements());
3186 Object* max_index_object = get(kMaxNumberKeyIndex);
3187 if (!max_index_object->IsSmi()) return 0;
3188 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3189 return value >> kRequiresSlowElementsTagSize;
3192 void SeededNumberDictionary::set_requires_slow_elements() {
3193 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3197 // ------------------------------------
3201 CAST_ACCESSOR(AccessorInfo)
3202 CAST_ACCESSOR(ByteArray)
3205 CAST_ACCESSOR(CodeCacheHashTable)
3206 CAST_ACCESSOR(CompilationCacheTable)
3207 CAST_ACCESSOR(ConsString)
3208 CAST_ACCESSOR(ConstantPoolArray)
3209 CAST_ACCESSOR(DeoptimizationInputData)
3210 CAST_ACCESSOR(DeoptimizationOutputData)
3211 CAST_ACCESSOR(DependentCode)
3212 CAST_ACCESSOR(DescriptorArray)
3213 CAST_ACCESSOR(ExternalArray)
3214 CAST_ACCESSOR(ExternalAsciiString)
3215 CAST_ACCESSOR(ExternalFloat32Array)
3216 CAST_ACCESSOR(ExternalFloat64Array)
3217 CAST_ACCESSOR(ExternalInt16Array)
3218 CAST_ACCESSOR(ExternalInt32Array)
3219 CAST_ACCESSOR(ExternalInt8Array)
3220 CAST_ACCESSOR(ExternalString)
3221 CAST_ACCESSOR(ExternalTwoByteString)
3222 CAST_ACCESSOR(ExternalUint16Array)
3223 CAST_ACCESSOR(ExternalUint32Array)
3224 CAST_ACCESSOR(ExternalUint8Array)
3225 CAST_ACCESSOR(ExternalUint8ClampedArray)
3226 CAST_ACCESSOR(FixedArray)
3227 CAST_ACCESSOR(FixedArrayBase)
3228 CAST_ACCESSOR(FixedDoubleArray)
3229 CAST_ACCESSOR(FixedTypedArrayBase)
3230 CAST_ACCESSOR(Foreign)
3231 CAST_ACCESSOR(FreeSpace)
3232 CAST_ACCESSOR(GlobalObject)
3233 CAST_ACCESSOR(HeapObject)
3234 CAST_ACCESSOR(JSArray)
3235 CAST_ACCESSOR(JSArrayBuffer)
3236 CAST_ACCESSOR(JSArrayBufferView)
3237 CAST_ACCESSOR(JSBuiltinsObject)
3238 CAST_ACCESSOR(JSDataView)
3239 CAST_ACCESSOR(JSDate)
3240 CAST_ACCESSOR(JSFunction)
3241 CAST_ACCESSOR(JSFunctionProxy)
3242 CAST_ACCESSOR(JSFunctionResultCache)
3243 CAST_ACCESSOR(JSGeneratorObject)
3244 CAST_ACCESSOR(JSGlobalObject)
3245 CAST_ACCESSOR(JSGlobalProxy)
3246 CAST_ACCESSOR(JSMap)
3247 CAST_ACCESSOR(JSMapIterator)
3248 CAST_ACCESSOR(JSMessageObject)
3249 CAST_ACCESSOR(JSModule)
3250 CAST_ACCESSOR(JSObject)
3251 CAST_ACCESSOR(JSProxy)
3252 CAST_ACCESSOR(JSReceiver)
3253 CAST_ACCESSOR(JSRegExp)
3254 CAST_ACCESSOR(JSSet)
3255 CAST_ACCESSOR(JSSetIterator)
3256 CAST_ACCESSOR(JSTypedArray)
3257 CAST_ACCESSOR(JSValue)
3258 CAST_ACCESSOR(JSWeakMap)
3259 CAST_ACCESSOR(JSWeakSet)
3261 CAST_ACCESSOR(MapCache)
3263 CAST_ACCESSOR(NameDictionary)
3264 CAST_ACCESSOR(NormalizedMapCache)
3265 CAST_ACCESSOR(Object)
3266 CAST_ACCESSOR(ObjectHashTable)
3267 CAST_ACCESSOR(Oddball)
3268 CAST_ACCESSOR(OrderedHashMap)
3269 CAST_ACCESSOR(OrderedHashSet)
3270 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3271 CAST_ACCESSOR(PropertyCell)
3272 CAST_ACCESSOR(ScopeInfo)
3273 CAST_ACCESSOR(SeededNumberDictionary)
3274 CAST_ACCESSOR(SeqOneByteString)
3275 CAST_ACCESSOR(SeqString)
3276 CAST_ACCESSOR(SeqTwoByteString)
3277 CAST_ACCESSOR(SharedFunctionInfo)
3278 CAST_ACCESSOR(SlicedString)
3280 CAST_ACCESSOR(String)
3281 CAST_ACCESSOR(StringTable)
3282 CAST_ACCESSOR(Struct)
3283 CAST_ACCESSOR(Symbol)
3284 CAST_ACCESSOR(UnseededNumberDictionary)
3285 CAST_ACCESSOR(WeakHashTable)
3288 template <class Traits>
3289 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3290 SLOW_DCHECK(object->IsHeapObject() &&
3291 HeapObject::cast(object)->map()->instance_type() ==
3292 Traits::kInstanceType);
3293 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3297 template <class Traits>
3298 const FixedTypedArray<Traits>*
3299 FixedTypedArray<Traits>::cast(const Object* object) {
3300 SLOW_DCHECK(object->IsHeapObject() &&
3301 HeapObject::cast(object)->map()->instance_type() ==
3302 Traits::kInstanceType);
3303 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3307 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3308 STRUCT_LIST(MAKE_STRUCT_CAST)
3309 #undef MAKE_STRUCT_CAST
3312 template <typename Derived, typename Shape, typename Key>
3313 HashTable<Derived, Shape, Key>*
3314 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3315 SLOW_DCHECK(obj->IsHashTable());
3316 return reinterpret_cast<HashTable*>(obj);
3320 template <typename Derived, typename Shape, typename Key>
3321 const HashTable<Derived, Shape, Key>*
3322 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3323 SLOW_DCHECK(obj->IsHashTable());
3324 return reinterpret_cast<const HashTable*>(obj);
3328 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3329 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3331 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3332 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3334 SMI_ACCESSORS(String, length, kLengthOffset)
3335 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3338 uint32_t Name::hash_field() {
3339 return READ_UINT32_FIELD(this, kHashFieldOffset);
3343 void Name::set_hash_field(uint32_t value) {
3344 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3345 #if V8_HOST_ARCH_64_BIT
3346 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3351 bool Name::Equals(Name* other) {
3352 if (other == this) return true;
3353 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3354 this->IsSymbol() || other->IsSymbol()) {
3357 return String::cast(this)->SlowEquals(String::cast(other));
3361 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3362 if (one.is_identical_to(two)) return true;
3363 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3364 one->IsSymbol() || two->IsSymbol()) {
3367 return String::SlowEquals(Handle<String>::cast(one),
3368 Handle<String>::cast(two));
3372 ACCESSORS(Symbol, name, Object, kNameOffset)
3373 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3374 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3377 bool String::Equals(String* other) {
3378 if (other == this) return true;
3379 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3382 return SlowEquals(other);
3386 bool String::Equals(Handle<String> one, Handle<String> two) {
3387 if (one.is_identical_to(two)) return true;
3388 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3391 return SlowEquals(one, two);
3395 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3396 if (!string->IsConsString()) return string;
3397 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3398 if (cons->IsFlat()) return handle(cons->first());
3399 return SlowFlatten(cons, pretenure);
3403 uint16_t String::Get(int index) {
3404 DCHECK(index >= 0 && index < length());
3405 switch (StringShape(this).full_representation_tag()) {
3406 case kSeqStringTag | kOneByteStringTag:
3407 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3408 case kSeqStringTag | kTwoByteStringTag:
3409 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3410 case kConsStringTag | kOneByteStringTag:
3411 case kConsStringTag | kTwoByteStringTag:
3412 return ConsString::cast(this)->ConsStringGet(index);
3413 case kExternalStringTag | kOneByteStringTag:
3414 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
3415 case kExternalStringTag | kTwoByteStringTag:
3416 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3417 case kSlicedStringTag | kOneByteStringTag:
3418 case kSlicedStringTag | kTwoByteStringTag:
3419 return SlicedString::cast(this)->SlicedStringGet(index);
3429 void String::Set(int index, uint16_t value) {
3430 DCHECK(index >= 0 && index < length());
3431 DCHECK(StringShape(this).IsSequential());
3433 return this->IsOneByteRepresentation()
3434 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3435 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3439 bool String::IsFlat() {
3440 if (!StringShape(this).IsCons()) return true;
3441 return ConsString::cast(this)->second()->length() == 0;
3445 String* String::GetUnderlying() {
3446 // Giving direct access to underlying string only makes sense if the
3447 // wrapping string is already flattened.
3448 DCHECK(this->IsFlat());
3449 DCHECK(StringShape(this).IsIndirect());
3450 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3451 const int kUnderlyingOffset = SlicedString::kParentOffset;
3452 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3456 template<class Visitor>
3457 ConsString* String::VisitFlat(Visitor* visitor,
3460 int slice_offset = offset;
3461 const int length = string->length();
3462 DCHECK(offset <= length);
3464 int32_t type = string->map()->instance_type();
3465 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3466 case kSeqStringTag | kOneByteStringTag:
3467 visitor->VisitOneByteString(
3468 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3472 case kSeqStringTag | kTwoByteStringTag:
3473 visitor->VisitTwoByteString(
3474 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3478 case kExternalStringTag | kOneByteStringTag:
3479 visitor->VisitOneByteString(
3480 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3484 case kExternalStringTag | kTwoByteStringTag:
3485 visitor->VisitTwoByteString(
3486 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3490 case kSlicedStringTag | kOneByteStringTag:
3491 case kSlicedStringTag | kTwoByteStringTag: {
3492 SlicedString* slicedString = SlicedString::cast(string);
3493 slice_offset += slicedString->offset();
3494 string = slicedString->parent();
3498 case kConsStringTag | kOneByteStringTag:
3499 case kConsStringTag | kTwoByteStringTag:
3500 return ConsString::cast(string);
3510 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3511 DCHECK(index >= 0 && index < length());
3512 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3516 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3517 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3518 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3519 static_cast<byte>(value));
3523 Address SeqOneByteString::GetCharsAddress() {
3524 return FIELD_ADDR(this, kHeaderSize);
3528 uint8_t* SeqOneByteString::GetChars() {
3529 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3533 Address SeqTwoByteString::GetCharsAddress() {
3534 return FIELD_ADDR(this, kHeaderSize);
3538 uc16* SeqTwoByteString::GetChars() {
3539 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3543 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3544 DCHECK(index >= 0 && index < length());
3545 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3549 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3550 DCHECK(index >= 0 && index < length());
3551 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3555 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3556 return SizeFor(length());
3560 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3561 return SizeFor(length());
3565 String* SlicedString::parent() {
3566 return String::cast(READ_FIELD(this, kParentOffset));
3570 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3571 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3572 WRITE_FIELD(this, kParentOffset, parent);
3573 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3577 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3580 String* ConsString::first() {
3581 return String::cast(READ_FIELD(this, kFirstOffset));
3585 Object* ConsString::unchecked_first() {
3586 return READ_FIELD(this, kFirstOffset);
3590 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3591 WRITE_FIELD(this, kFirstOffset, value);
3592 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3596 String* ConsString::second() {
3597 return String::cast(READ_FIELD(this, kSecondOffset));
3601 Object* ConsString::unchecked_second() {
3602 return READ_FIELD(this, kSecondOffset);
3606 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3607 WRITE_FIELD(this, kSecondOffset, value);
3608 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3612 bool ExternalString::is_short() {
3613 InstanceType type = map()->instance_type();
3614 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3618 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3619 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3623 void ExternalAsciiString::update_data_cache() {
3624 if (is_short()) return;
3625 const char** data_field =
3626 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3627 *data_field = resource()->data();
3631 void ExternalAsciiString::set_resource(
3632 const ExternalAsciiString::Resource* resource) {
3633 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3634 *reinterpret_cast<const Resource**>(
3635 FIELD_ADDR(this, kResourceOffset)) = resource;
3636 if (resource != NULL) update_data_cache();
3640 const uint8_t* ExternalAsciiString::GetChars() {
3641 return reinterpret_cast<const uint8_t*>(resource()->data());
3645 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3646 DCHECK(index >= 0 && index < length());
3647 return GetChars()[index];
3651 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3652 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3656 void ExternalTwoByteString::update_data_cache() {
3657 if (is_short()) return;
3658 const uint16_t** data_field =
3659 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3660 *data_field = resource()->data();
3664 void ExternalTwoByteString::set_resource(
3665 const ExternalTwoByteString::Resource* resource) {
3666 *reinterpret_cast<const Resource**>(
3667 FIELD_ADDR(this, kResourceOffset)) = resource;
3668 if (resource != NULL) update_data_cache();
3672 const uint16_t* ExternalTwoByteString::GetChars() {
3673 return resource()->data();
3677 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3678 DCHECK(index >= 0 && index < length());
3679 return GetChars()[index];
3683 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3685 return GetChars() + start;
3689 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3690 return depth & kDepthMask;
3694 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3695 frames_[depth_++ & kDepthMask] = string;
3699 void ConsStringIteratorOp::PushRight(ConsString* string) {
3701 frames_[(depth_-1) & kDepthMask] = string;
3705 void ConsStringIteratorOp::AdjustMaximumDepth() {
3706 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3710 void ConsStringIteratorOp::Pop() {
3712 DCHECK(depth_ <= maximum_depth_);
3717 uint16_t StringCharacterStream::GetNext() {
3718 DCHECK(buffer8_ != NULL && end_ != NULL);
3719 // Advance cursor if needed.
3720 if (buffer8_ == end_) HasMore();
3721 DCHECK(buffer8_ < end_);
3722 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3726 StringCharacterStream::StringCharacterStream(String* string,
3727 ConsStringIteratorOp* op,
3729 : is_one_byte_(false),
3731 Reset(string, offset);
3735 void StringCharacterStream::Reset(String* string, int offset) {
3738 ConsString* cons_string = String::VisitFlat(this, string, offset);
3739 op_->Reset(cons_string, offset);
3740 if (cons_string != NULL) {
3741 string = op_->Next(&offset);
3742 if (string != NULL) String::VisitFlat(this, string, offset);
3747 bool StringCharacterStream::HasMore() {
3748 if (buffer8_ != end_) return true;
3750 String* string = op_->Next(&offset);
3751 DCHECK_EQ(offset, 0);
3752 if (string == NULL) return false;
3753 String::VisitFlat(this, string);
3754 DCHECK(buffer8_ != end_);
3759 void StringCharacterStream::VisitOneByteString(
3760 const uint8_t* chars, int length) {
3761 is_one_byte_ = true;
3763 end_ = chars + length;
3767 void StringCharacterStream::VisitTwoByteString(
3768 const uint16_t* chars, int length) {
3769 is_one_byte_ = false;
3771 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3775 void JSFunctionResultCache::MakeZeroSize() {
3776 set_finger_index(kEntriesIndex);
3777 set_size(kEntriesIndex);
3781 void JSFunctionResultCache::Clear() {
3782 int cache_size = size();
3783 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3784 MemsetPointer(entries_start,
3785 GetHeap()->the_hole_value(),
3786 cache_size - kEntriesIndex);
3791 int JSFunctionResultCache::size() {
3792 return Smi::cast(get(kCacheSizeIndex))->value();
3796 void JSFunctionResultCache::set_size(int size) {
3797 set(kCacheSizeIndex, Smi::FromInt(size));
3801 int JSFunctionResultCache::finger_index() {
3802 return Smi::cast(get(kFingerIndex))->value();
3806 void JSFunctionResultCache::set_finger_index(int finger_index) {
3807 set(kFingerIndex, Smi::FromInt(finger_index));
3811 byte ByteArray::get(int index) {
3812 DCHECK(index >= 0 && index < this->length());
3813 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3817 void ByteArray::set(int index, byte value) {
3818 DCHECK(index >= 0 && index < this->length());
3819 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3823 int ByteArray::get_int(int index) {
3824 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3825 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3829 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3830 DCHECK_TAG_ALIGNED(address);
3831 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3835 Address ByteArray::GetDataStartAddress() {
3836 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3840 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3841 return reinterpret_cast<uint8_t*>(external_pointer());
3845 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3846 DCHECK((index >= 0) && (index < this->length()));
3847 uint8_t* ptr = external_uint8_clamped_pointer();
3852 Handle<Object> ExternalUint8ClampedArray::get(
3853 Handle<ExternalUint8ClampedArray> array,
3855 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3856 array->GetIsolate());
3860 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3861 DCHECK((index >= 0) && (index < this->length()));
3862 uint8_t* ptr = external_uint8_clamped_pointer();
3867 void* ExternalArray::external_pointer() const {
3868 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3869 return reinterpret_cast<void*>(ptr);
3873 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3874 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3875 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3879 int8_t ExternalInt8Array::get_scalar(int index) {
3880 DCHECK((index >= 0) && (index < this->length()));
3881 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3886 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3888 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3889 array->GetIsolate());
3893 void ExternalInt8Array::set(int index, int8_t value) {
3894 DCHECK((index >= 0) && (index < this->length()));
3895 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3900 uint8_t ExternalUint8Array::get_scalar(int index) {
3901 DCHECK((index >= 0) && (index < this->length()));
3902 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3907 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3909 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3910 array->GetIsolate());
3914 void ExternalUint8Array::set(int index, uint8_t value) {
3915 DCHECK((index >= 0) && (index < this->length()));
3916 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3921 int16_t ExternalInt16Array::get_scalar(int index) {
3922 DCHECK((index >= 0) && (index < this->length()));
3923 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3928 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3930 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3931 array->GetIsolate());
3935 void ExternalInt16Array::set(int index, int16_t value) {
3936 DCHECK((index >= 0) && (index < this->length()));
3937 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3942 uint16_t ExternalUint16Array::get_scalar(int index) {
3943 DCHECK((index >= 0) && (index < this->length()));
3944 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3949 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3951 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3952 array->GetIsolate());
3956 void ExternalUint16Array::set(int index, uint16_t value) {
3957 DCHECK((index >= 0) && (index < this->length()));
3958 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3963 int32_t ExternalInt32Array::get_scalar(int index) {
3964 DCHECK((index >= 0) && (index < this->length()));
3965 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3970 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3972 return array->GetIsolate()->factory()->
3973 NewNumberFromInt(array->get_scalar(index));
3977 void ExternalInt32Array::set(int index, int32_t value) {
3978 DCHECK((index >= 0) && (index < this->length()));
3979 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3984 uint32_t ExternalUint32Array::get_scalar(int index) {
3985 DCHECK((index >= 0) && (index < this->length()));
3986 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3991 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3993 return array->GetIsolate()->factory()->
3994 NewNumberFromUint(array->get_scalar(index));
3998 void ExternalUint32Array::set(int index, uint32_t value) {
3999 DCHECK((index >= 0) && (index < this->length()));
4000 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4005 float ExternalFloat32Array::get_scalar(int index) {
4006 DCHECK((index >= 0) && (index < this->length()));
4007 float* ptr = static_cast<float*>(external_pointer());
4012 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
4014 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4018 void ExternalFloat32Array::set(int index, float value) {
4019 DCHECK((index >= 0) && (index < this->length()));
4020 float* ptr = static_cast<float*>(external_pointer());
4025 double ExternalFloat64Array::get_scalar(int index) {
4026 DCHECK((index >= 0) && (index < this->length()));
4027 double* ptr = static_cast<double*>(external_pointer());
4032 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
4034 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4038 void ExternalFloat64Array::set(int index, double value) {
4039 DCHECK((index >= 0) && (index < this->length()));
4040 double* ptr = static_cast<double*>(external_pointer());
4045 void* FixedTypedArrayBase::DataPtr() {
4046 return FIELD_ADDR(this, kDataOffset);
4050 int FixedTypedArrayBase::DataSize(InstanceType type) {
4053 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4054 case FIXED_##TYPE##_ARRAY_TYPE: \
4055 element_size = size; \
4058 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4059 #undef TYPED_ARRAY_CASE
4064 return length() * element_size;
4068 int FixedTypedArrayBase::DataSize() {
4069 return DataSize(map()->instance_type());
4073 int FixedTypedArrayBase::size() {
4074 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4078 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4079 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4083 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4086 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4089 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4092 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4095 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4098 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4101 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4104 float Float32ArrayTraits::defaultValue() {
4105 return static_cast<float>(base::OS::nan_value());
4109 double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); }
4112 template <class Traits>
4113 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4114 DCHECK((index >= 0) && (index < this->length()));
4115 ElementType* ptr = reinterpret_cast<ElementType*>(
4116 FIELD_ADDR(this, kDataOffset));
4122 FixedTypedArray<Float64ArrayTraits>::ElementType
4123 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4124 DCHECK((index >= 0) && (index < this->length()));
4125 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4129 template <class Traits>
4130 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4131 DCHECK((index >= 0) && (index < this->length()));
4132 ElementType* ptr = reinterpret_cast<ElementType*>(
4133 FIELD_ADDR(this, kDataOffset));
4139 void FixedTypedArray<Float64ArrayTraits>::set(
4140 int index, Float64ArrayTraits::ElementType value) {
4141 DCHECK((index >= 0) && (index < this->length()));
4142 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4146 template <class Traits>
4147 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4148 return static_cast<ElementType>(value);
4153 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4154 if (value < 0) return 0;
4155 if (value > 0xFF) return 0xFF;
4156 return static_cast<uint8_t>(value);
4160 template <class Traits>
4161 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4163 return static_cast<ElementType>(DoubleToInt32(value));
4168 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4169 if (value < 0) return 0;
4170 if (value > 0xFF) return 0xFF;
4171 return static_cast<uint8_t>(lrint(value));
4176 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4177 return static_cast<float>(value);
4182 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4187 template <class Traits>
4188 Handle<Object> FixedTypedArray<Traits>::get(
4189 Handle<FixedTypedArray<Traits> > array,
4191 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4195 template <class Traits>
4196 Handle<Object> FixedTypedArray<Traits>::SetValue(
4197 Handle<FixedTypedArray<Traits> > array,
4199 Handle<Object> value) {
4200 ElementType cast_value = Traits::defaultValue();
4201 if (index < static_cast<uint32_t>(array->length())) {
4202 if (value->IsSmi()) {
4203 int int_value = Handle<Smi>::cast(value)->value();
4204 cast_value = from_int(int_value);
4205 } else if (value->IsHeapNumber()) {
4206 double double_value = Handle<HeapNumber>::cast(value)->value();
4207 cast_value = from_double(double_value);
4209 // Clamp undefined to the default value. All other types have been
4210 // converted to a number type further up in the call chain.
4211 DCHECK(value->IsUndefined());
4213 array->set(index, cast_value);
4215 return Traits::ToHandle(array->GetIsolate(), cast_value);
4219 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4220 return handle(Smi::FromInt(scalar), isolate);
4224 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4226 return handle(Smi::FromInt(scalar), isolate);
4230 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4231 return handle(Smi::FromInt(scalar), isolate);
4235 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4236 return handle(Smi::FromInt(scalar), isolate);
4240 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4241 return handle(Smi::FromInt(scalar), isolate);
4245 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4246 return isolate->factory()->NewNumberFromUint(scalar);
4250 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4251 return isolate->factory()->NewNumberFromInt(scalar);
4255 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4256 return isolate->factory()->NewNumber(scalar);
4260 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4261 return isolate->factory()->NewNumber(scalar);
4265 int Map::visitor_id() {
4266 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4270 void Map::set_visitor_id(int id) {
4271 DCHECK(0 <= id && id < 256);
4272 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4276 int Map::instance_size() {
4277 return NOBARRIER_READ_BYTE_FIELD(
4278 this, kInstanceSizeOffset) << kPointerSizeLog2;
4282 int Map::inobject_properties() {
4283 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4287 int Map::pre_allocated_property_fields() {
4288 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4292 int Map::GetInObjectPropertyOffset(int index) {
4293 // Adjust for the number of properties stored in the object.
4294 index -= inobject_properties();
4296 return instance_size() + (index * kPointerSize);
4300 int HeapObject::SizeFromMap(Map* map) {
4301 int instance_size = map->instance_size();
4302 if (instance_size != kVariableSizeSentinel) return instance_size;
4303 // Only inline the most frequent cases.
4304 InstanceType instance_type = map->instance_type();
4305 if (instance_type == FIXED_ARRAY_TYPE) {
4306 return FixedArray::BodyDescriptor::SizeOf(map, this);
4308 if (instance_type == ASCII_STRING_TYPE ||
4309 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
4310 return SeqOneByteString::SizeFor(
4311 reinterpret_cast<SeqOneByteString*>(this)->length());
4313 if (instance_type == BYTE_ARRAY_TYPE) {
4314 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4316 if (instance_type == FREE_SPACE_TYPE) {
4317 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4319 if (instance_type == STRING_TYPE ||
4320 instance_type == INTERNALIZED_STRING_TYPE) {
4321 return SeqTwoByteString::SizeFor(
4322 reinterpret_cast<SeqTwoByteString*>(this)->length());
4324 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4325 return FixedDoubleArray::SizeFor(
4326 reinterpret_cast<FixedDoubleArray*>(this)->length());
4328 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4329 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4331 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4332 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4333 return reinterpret_cast<FixedTypedArrayBase*>(
4334 this)->TypedArraySize(instance_type);
4336 DCHECK(instance_type == CODE_TYPE);
4337 return reinterpret_cast<Code*>(this)->CodeSize();
4341 void Map::set_instance_size(int value) {
4342 DCHECK_EQ(0, value & (kPointerSize - 1));
4343 value >>= kPointerSizeLog2;
4344 DCHECK(0 <= value && value < 256);
4345 NOBARRIER_WRITE_BYTE_FIELD(
4346 this, kInstanceSizeOffset, static_cast<byte>(value));
4350 void Map::set_inobject_properties(int value) {
4351 DCHECK(0 <= value && value < 256);
4352 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4356 void Map::set_pre_allocated_property_fields(int value) {
4357 DCHECK(0 <= value && value < 256);
4358 WRITE_BYTE_FIELD(this,
4359 kPreAllocatedPropertyFieldsOffset,
4360 static_cast<byte>(value));
4364 InstanceType Map::instance_type() {
4365 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4369 void Map::set_instance_type(InstanceType value) {
4370 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4374 int Map::unused_property_fields() {
4375 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4379 void Map::set_unused_property_fields(int value) {
4380 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4384 byte Map::bit_field() {
4385 return READ_BYTE_FIELD(this, kBitFieldOffset);
4389 void Map::set_bit_field(byte value) {
4390 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4394 byte Map::bit_field2() {
4395 return READ_BYTE_FIELD(this, kBitField2Offset);
4399 void Map::set_bit_field2(byte value) {
4400 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4404 void Map::set_non_instance_prototype(bool value) {
4406 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4408 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4413 bool Map::has_non_instance_prototype() {
4414 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4418 void Map::set_function_with_prototype(bool value) {
4419 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4423 bool Map::function_with_prototype() {
4424 return FunctionWithPrototype::decode(bit_field());
4428 void Map::set_is_access_check_needed(bool access_check_needed) {
4429 if (access_check_needed) {
4430 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4432 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4437 bool Map::is_access_check_needed() {
4438 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4442 void Map::set_is_extensible(bool value) {
4444 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4446 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4450 bool Map::is_extensible() {
4451 return ((1 << kIsExtensible) & bit_field2()) != 0;
4455 void Map::set_is_prototype_map(bool value) {
4456 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4459 bool Map::is_prototype_map() {
4460 return IsPrototypeMapBits::decode(bit_field2());
4464 void Map::set_dictionary_map(bool value) {
4465 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4466 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4467 set_bit_field3(new_bit_field3);
4471 bool Map::is_dictionary_map() {
4472 return DictionaryMap::decode(bit_field3());
4476 Code::Flags Code::flags() {
4477 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4481 void Map::set_owns_descriptors(bool owns_descriptors) {
4482 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4486 bool Map::owns_descriptors() {
4487 return OwnsDescriptors::decode(bit_field3());
4491 void Map::set_has_instance_call_handler() {
4492 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4496 bool Map::has_instance_call_handler() {
4497 return HasInstanceCallHandler::decode(bit_field3());
4501 void Map::deprecate() {
4502 set_bit_field3(Deprecated::update(bit_field3(), true));
4506 bool Map::is_deprecated() {
4507 return Deprecated::decode(bit_field3());
4511 void Map::set_migration_target(bool value) {
4512 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4516 bool Map::is_migration_target() {
4517 return IsMigrationTarget::decode(bit_field3());
4521 void Map::set_done_inobject_slack_tracking(bool value) {
4522 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4526 bool Map::done_inobject_slack_tracking() {
4527 return DoneInobjectSlackTracking::decode(bit_field3());
4531 void Map::set_construction_count(int value) {
4532 set_bit_field3(ConstructionCount::update(bit_field3(), value));
4536 int Map::construction_count() {
4537 return ConstructionCount::decode(bit_field3());
4541 void Map::freeze() {
4542 set_bit_field3(IsFrozen::update(bit_field3(), true));
4546 bool Map::is_frozen() {
4547 return IsFrozen::decode(bit_field3());
4551 void Map::mark_unstable() {
4552 set_bit_field3(IsUnstable::update(bit_field3(), true));
4556 bool Map::is_stable() {
4557 return !IsUnstable::decode(bit_field3());
4561 bool Map::has_code_cache() {
4562 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4566 bool Map::CanBeDeprecated() {
4567 int descriptor = LastAdded();
4568 for (int i = 0; i <= descriptor; i++) {
4569 PropertyDetails details = instance_descriptors()->GetDetails(i);
4570 if (details.representation().IsNone()) return true;
4571 if (details.representation().IsSmi()) return true;
4572 if (details.representation().IsDouble()) return true;
4573 if (details.representation().IsHeapObject()) return true;
4574 if (details.type() == CONSTANT) return true;
4580 void Map::NotifyLeafMapLayoutChange() {
4583 dependent_code()->DeoptimizeDependentCodeGroup(
4585 DependentCode::kPrototypeCheckGroup);
4590 bool Map::CanOmitMapChecks() {
4591 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4595 int DependentCode::number_of_entries(DependencyGroup group) {
4596 if (length() == 0) return 0;
4597 return Smi::cast(get(group))->value();
4601 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4602 set(group, Smi::FromInt(value));
4606 bool DependentCode::is_code_at(int i) {
4607 return get(kCodesStartIndex + i)->IsCode();
4610 Code* DependentCode::code_at(int i) {
4611 return Code::cast(get(kCodesStartIndex + i));
4615 CompilationInfo* DependentCode::compilation_info_at(int i) {
4616 return reinterpret_cast<CompilationInfo*>(
4617 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4621 void DependentCode::set_object_at(int i, Object* object) {
4622 set(kCodesStartIndex + i, object);
4626 Object* DependentCode::object_at(int i) {
4627 return get(kCodesStartIndex + i);
4631 Object** DependentCode::slot_at(int i) {
4632 return RawFieldOfElementAt(kCodesStartIndex + i);
4636 void DependentCode::clear_at(int i) {
4637 set_undefined(kCodesStartIndex + i);
4641 void DependentCode::copy(int from, int to) {
4642 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4646 void DependentCode::ExtendGroup(DependencyGroup group) {
4647 GroupStartIndexes starts(this);
4648 for (int g = kGroupCount - 1; g > group; g--) {
4649 if (starts.at(g) < starts.at(g + 1)) {
4650 copy(starts.at(g), starts.at(g + 1));
4656 void Code::set_flags(Code::Flags flags) {
4657 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4658 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4662 Code::Kind Code::kind() {
4663 return ExtractKindFromFlags(flags());
4667 bool Code::IsCodeStubOrIC() {
4668 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4669 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4670 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4671 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4672 kind() == TO_BOOLEAN_IC;
4676 InlineCacheState Code::ic_state() {
4677 InlineCacheState result = ExtractICStateFromFlags(flags());
4678 // Only allow uninitialized or debugger states for non-IC code
4679 // objects. This is used in the debugger to determine whether or not
4680 // a call to code object has been replaced with a debug break call.
4681 DCHECK(is_inline_cache_stub() ||
4682 result == UNINITIALIZED ||
4683 result == DEBUG_STUB);
4688 ExtraICState Code::extra_ic_state() {
4689 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4690 return ExtractExtraICStateFromFlags(flags());
4694 Code::StubType Code::type() {
4695 return ExtractTypeFromFlags(flags());
4699 // For initialization.
4700 void Code::set_raw_kind_specific_flags1(int value) {
4701 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4705 void Code::set_raw_kind_specific_flags2(int value) {
4706 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4710 inline bool Code::is_crankshafted() {
4711 return IsCrankshaftedField::decode(
4712 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4716 inline bool Code::is_hydrogen_stub() {
4717 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4721 inline void Code::set_is_crankshafted(bool value) {
4722 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4723 int updated = IsCrankshaftedField::update(previous, value);
4724 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4728 inline bool Code::is_turbofanned() {
4729 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4730 return IsTurbofannedField::decode(
4731 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4735 inline void Code::set_is_turbofanned(bool value) {
4736 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4737 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4738 int updated = IsTurbofannedField::update(previous, value);
4739 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4743 bool Code::optimizable() {
4744 DCHECK_EQ(FUNCTION, kind());
4745 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4749 void Code::set_optimizable(bool value) {
4750 DCHECK_EQ(FUNCTION, kind());
4751 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4755 bool Code::has_deoptimization_support() {
4756 DCHECK_EQ(FUNCTION, kind());
4757 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4758 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4762 void Code::set_has_deoptimization_support(bool value) {
4763 DCHECK_EQ(FUNCTION, kind());
4764 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4765 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4766 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4770 bool Code::has_debug_break_slots() {
4771 DCHECK_EQ(FUNCTION, kind());
4772 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4773 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4777 void Code::set_has_debug_break_slots(bool value) {
4778 DCHECK_EQ(FUNCTION, kind());
4779 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4780 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4781 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4785 bool Code::is_compiled_optimizable() {
4786 DCHECK_EQ(FUNCTION, kind());
4787 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4788 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4792 void Code::set_compiled_optimizable(bool value) {
4793 DCHECK_EQ(FUNCTION, kind());
4794 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4795 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4796 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4800 int Code::allow_osr_at_loop_nesting_level() {
4801 DCHECK_EQ(FUNCTION, kind());
4802 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4803 return AllowOSRAtLoopNestingLevelField::decode(fields);
4807 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4808 DCHECK_EQ(FUNCTION, kind());
4809 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4810 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4811 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4812 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4816 int Code::profiler_ticks() {
4817 DCHECK_EQ(FUNCTION, kind());
4818 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4822 void Code::set_profiler_ticks(int ticks) {
4823 DCHECK_EQ(FUNCTION, kind());
4824 DCHECK(ticks < 256);
4825 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4829 int Code::builtin_index() {
4830 DCHECK_EQ(BUILTIN, kind());
4831 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
4835 void Code::set_builtin_index(int index) {
4836 DCHECK_EQ(BUILTIN, kind());
4837 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
4841 unsigned Code::stack_slots() {
4842 DCHECK(is_crankshafted());
4843 return StackSlotsField::decode(
4844 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4848 void Code::set_stack_slots(unsigned slots) {
4849 CHECK(slots <= (1 << kStackSlotsBitCount));
4850 DCHECK(is_crankshafted());
4851 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4852 int updated = StackSlotsField::update(previous, slots);
4853 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4857 unsigned Code::safepoint_table_offset() {
4858 DCHECK(is_crankshafted());
4859 return SafepointTableOffsetField::decode(
4860 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4864 void Code::set_safepoint_table_offset(unsigned offset) {
4865 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4866 DCHECK(is_crankshafted());
4867 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4868 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4869 int updated = SafepointTableOffsetField::update(previous, offset);
4870 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4874 unsigned Code::back_edge_table_offset() {
4875 DCHECK_EQ(FUNCTION, kind());
4876 return BackEdgeTableOffsetField::decode(
4877 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
4881 void Code::set_back_edge_table_offset(unsigned offset) {
4882 DCHECK_EQ(FUNCTION, kind());
4883 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
4884 offset = offset >> kPointerSizeLog2;
4885 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4886 int updated = BackEdgeTableOffsetField::update(previous, offset);
4887 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4891 bool Code::back_edges_patched_for_osr() {
4892 DCHECK_EQ(FUNCTION, kind());
4893 return allow_osr_at_loop_nesting_level() > 0;
4897 byte Code::to_boolean_state() {
4898 return extra_ic_state();
4902 bool Code::has_function_cache() {
4903 DCHECK(kind() == STUB);
4904 return HasFunctionCacheField::decode(
4905 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4909 void Code::set_has_function_cache(bool flag) {
4910 DCHECK(kind() == STUB);
4911 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4912 int updated = HasFunctionCacheField::update(previous, flag);
4913 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4917 bool Code::marked_for_deoptimization() {
4918 DCHECK(kind() == OPTIMIZED_FUNCTION);
4919 return MarkedForDeoptimizationField::decode(
4920 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4924 void Code::set_marked_for_deoptimization(bool flag) {
4925 DCHECK(kind() == OPTIMIZED_FUNCTION);
4926 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4927 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4928 int updated = MarkedForDeoptimizationField::update(previous, flag);
4929 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4933 bool Code::is_weak_stub() {
4934 return CanBeWeakStub() && WeakStubField::decode(
4935 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4939 void Code::mark_as_weak_stub() {
4940 DCHECK(CanBeWeakStub());
4941 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4942 int updated = WeakStubField::update(previous, true);
4943 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4947 bool Code::is_invalidated_weak_stub() {
4948 return is_weak_stub() && InvalidatedWeakStubField::decode(
4949 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4953 void Code::mark_as_invalidated_weak_stub() {
4954 DCHECK(is_inline_cache_stub());
4955 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4956 int updated = InvalidatedWeakStubField::update(previous, true);
4957 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4961 bool Code::is_inline_cache_stub() {
4962 Kind kind = this->kind();
4964 #define CASE(name) case name: return true;
4967 default: return false;
4972 bool Code::is_keyed_stub() {
4973 return is_keyed_load_stub() || is_keyed_store_stub();
4977 bool Code::is_debug_stub() {
4978 return ic_state() == DEBUG_STUB;
4982 ConstantPoolArray* Code::constant_pool() {
4983 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
4987 void Code::set_constant_pool(Object* value) {
4988 DCHECK(value->IsConstantPoolArray());
4989 WRITE_FIELD(this, kConstantPoolOffset, value);
4990 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4994 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
4995 ExtraICState extra_ic_state, StubType type,
4996 CacheHolderFlag holder) {
4997 // Compute the bit mask.
4998 unsigned int bits = KindField::encode(kind)
4999 | ICStateField::encode(ic_state)
5000 | TypeField::encode(type)
5001 | ExtraICStateField::encode(extra_ic_state)
5002 | CacheHolderField::encode(holder);
5003 return static_cast<Flags>(bits);
5007 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5008 ExtraICState extra_ic_state,
5009 CacheHolderFlag holder,
5011 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5015 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5016 CacheHolderFlag holder) {
5017 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5021 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5022 return KindField::decode(flags);
5026 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5027 return ICStateField::decode(flags);
5031 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5032 return ExtraICStateField::decode(flags);
5036 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5037 return TypeField::decode(flags);
5041 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5042 return CacheHolderField::decode(flags);
5046 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5047 int bits = flags & ~TypeField::kMask;
5048 return static_cast<Flags>(bits);
5052 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5053 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5054 return static_cast<Flags>(bits);
5058 Code* Code::GetCodeFromTargetAddress(Address address) {
5059 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5060 // GetCodeFromTargetAddress might be called when marking objects during mark
5061 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5062 // Code::cast. Code::cast does not work when the object's map is
5064 Code* result = reinterpret_cast<Code*>(code);
5069 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5071 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5075 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5076 if (!FLAG_collect_maps) return false;
5077 if (object->IsMap()) {
5078 return Map::cast(object)->CanTransition() &&
5079 FLAG_weak_embedded_maps_in_optimized_code;
5081 if (object->IsJSObject() ||
5082 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
5083 return FLAG_weak_embedded_objects_in_optimized_code;
5089 class Code::FindAndReplacePattern {
5091 FindAndReplacePattern() : count_(0) { }
5092 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5093 DCHECK(count_ < kMaxCount);
5094 find_[count_] = map_to_find;
5095 replace_[count_] = obj_to_replace;
5099 static const int kMaxCount = 4;
5101 Handle<Map> find_[kMaxCount];
5102 Handle<Object> replace_[kMaxCount];
5107 bool Code::IsWeakObjectInIC(Object* object) {
5108 return object->IsMap() && Map::cast(object)->CanTransition() &&
5109 FLAG_collect_maps &&
5110 FLAG_weak_embedded_maps_in_ic;
5114 Object* Map::prototype() const {
5115 return READ_FIELD(this, kPrototypeOffset);
5119 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5120 DCHECK(value->IsNull() || value->IsJSReceiver());
5121 WRITE_FIELD(this, kPrototypeOffset, value);
5122 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5126 // If the descriptor is using the empty transition array, install a new empty
5127 // transition array that will have place for an element transition.
5128 static void EnsureHasTransitionArray(Handle<Map> map) {
5129 Handle<TransitionArray> transitions;
5130 if (!map->HasTransitionArray()) {
5131 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5132 transitions->set_back_pointer_storage(map->GetBackPointer());
5133 } else if (!map->transitions()->IsFullTransitionArray()) {
5134 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5138 map->set_transitions(*transitions);
5142 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5143 int len = descriptors->number_of_descriptors();
5144 set_instance_descriptors(descriptors);
5145 SetNumberOfOwnDescriptors(len);
5149 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5152 void Map::set_bit_field3(uint32_t bits) {
5153 if (kInt32Size != kPointerSize) {
5154 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5156 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5160 uint32_t Map::bit_field3() {
5161 return READ_UINT32_FIELD(this, kBitField3Offset);
5165 void Map::AppendDescriptor(Descriptor* desc) {
5166 DescriptorArray* descriptors = instance_descriptors();
5167 int number_of_own_descriptors = NumberOfOwnDescriptors();
5168 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5169 descriptors->Append(desc);
5170 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5174 Object* Map::GetBackPointer() {
5175 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5176 if (object->IsDescriptorArray()) {
5177 return TransitionArray::cast(object)->back_pointer_storage();
5179 DCHECK(object->IsMap() || object->IsUndefined());
5185 bool Map::HasElementsTransition() {
5186 return HasTransitionArray() && transitions()->HasElementsTransition();
5190 bool Map::HasTransitionArray() const {
5191 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5192 return object->IsTransitionArray();
5196 Map* Map::elements_transition_map() {
5197 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5198 return transitions()->GetTarget(index);
5202 bool Map::CanHaveMoreTransitions() {
5203 if (!HasTransitionArray()) return true;
5204 return FixedArray::SizeFor(transitions()->length() +
5205 TransitionArray::kTransitionSize)
5206 <= Page::kMaxRegularHeapObjectSize;
5210 Map* Map::GetTransition(int transition_index) {
5211 return transitions()->GetTarget(transition_index);
5215 int Map::SearchTransition(Name* name) {
5216 if (HasTransitionArray()) return transitions()->Search(name);
5217 return TransitionArray::kNotFound;
5221 FixedArray* Map::GetPrototypeTransitions() {
5222 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5223 if (!transitions()->HasPrototypeTransitions()) {
5224 return GetHeap()->empty_fixed_array();
5226 return transitions()->GetPrototypeTransitions();
5230 void Map::SetPrototypeTransitions(
5231 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5232 EnsureHasTransitionArray(map);
5233 int old_number_of_transitions = map->NumberOfProtoTransitions();
5235 if (map->HasPrototypeTransitions()) {
5236 DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
5237 map->ZapPrototypeTransitions();
5240 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5241 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5245 bool Map::HasPrototypeTransitions() {
5246 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5250 TransitionArray* Map::transitions() const {
5251 DCHECK(HasTransitionArray());
5252 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5253 return TransitionArray::cast(object);
5257 void Map::set_transitions(TransitionArray* transition_array,
5258 WriteBarrierMode mode) {
5259 // Transition arrays are not shared. When one is replaced, it should not
5260 // keep referenced objects alive, so we zap it.
5261 // When there is another reference to the array somewhere (e.g. a handle),
5262 // not zapping turns from a waste of memory into a source of crashes.
5263 if (HasTransitionArray()) {
5265 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5266 Map* target = transitions()->GetTarget(i);
5267 if (target->instance_descriptors() == instance_descriptors()) {
5268 Name* key = transitions()->GetKey(i);
5269 int new_target_index = transition_array->Search(key);
5270 DCHECK(new_target_index != TransitionArray::kNotFound);
5271 DCHECK(transition_array->GetTarget(new_target_index) == target);
5275 DCHECK(transitions() != transition_array);
5279 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5280 CONDITIONAL_WRITE_BARRIER(
5281 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5285 void Map::init_back_pointer(Object* undefined) {
5286 DCHECK(undefined->IsUndefined());
5287 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5291 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5292 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5293 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5294 (value->IsMap() && GetBackPointer()->IsUndefined()));
5295 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5296 if (object->IsTransitionArray()) {
5297 TransitionArray::cast(object)->set_back_pointer_storage(value);
5299 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5300 CONDITIONAL_WRITE_BARRIER(
5301 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5306 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5307 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5308 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5310 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5311 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5312 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5314 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5315 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5316 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5317 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5319 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5320 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5322 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5323 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5324 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5325 kExpectedReceiverTypeOffset)
5327 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5328 kSerializedDataOffset)
5330 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5333 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5334 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5335 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5337 ACCESSORS(Box, value, Object, kValueOffset)
5339 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5340 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5342 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5343 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5344 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5346 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5347 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5348 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5349 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5350 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5351 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5353 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5354 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5356 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5357 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5358 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5360 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5361 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5362 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5363 kPrototypeTemplateOffset)
5364 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5365 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5366 kNamedPropertyHandlerOffset)
5367 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5368 kIndexedPropertyHandlerOffset)
5369 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5370 kInstanceTemplateOffset)
5371 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5372 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5373 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5374 kInstanceCallHandlerOffset)
5375 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5376 kAccessCheckInfoOffset)
5377 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5379 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5380 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5381 kInternalFieldCountOffset)
5383 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5384 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5386 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5388 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5389 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5390 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5391 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5392 kPretenureCreateCountOffset)
5393 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5394 kDependentCodeOffset)
5395 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5396 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5398 ACCESSORS(Script, source, Object, kSourceOffset)
5399 ACCESSORS(Script, name, Object, kNameOffset)
5400 ACCESSORS(Script, id, Smi, kIdOffset)
5401 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5402 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5403 ACCESSORS(Script, context_data, Object, kContextOffset)
5404 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5405 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5406 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5407 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5408 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5409 kEvalFrominstructionsOffsetOffset)
5410 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5411 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5412 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5413 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5415 Script::CompilationType Script::compilation_type() {
5416 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5417 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5419 void Script::set_compilation_type(CompilationType type) {
5420 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5421 type == COMPILATION_TYPE_EVAL));
5423 Script::CompilationState Script::compilation_state() {
5424 return BooleanBit::get(flags(), kCompilationStateBit) ?
5425 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5427 void Script::set_compilation_state(CompilationState state) {
5428 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5429 state == COMPILATION_STATE_COMPILED));
5433 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5434 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5435 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5436 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5438 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5439 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5440 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5441 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5443 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5444 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5445 kOptimizedCodeMapOffset)
5446 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5447 ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
5448 kFeedbackVectorOffset)
5449 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5450 kInstanceClassNameOffset)
5451 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5452 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5453 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5454 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5457 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5458 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5459 kHiddenPrototypeBit)
5460 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5461 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5462 kNeedsAccessCheckBit)
5463 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5464 kReadOnlyPrototypeBit)
5465 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5466 kRemovePrototypeBit)
5467 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5469 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5471 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5474 BOOL_ACCESSORS(SharedFunctionInfo,
5476 allows_lazy_compilation,
5477 kAllowLazyCompilation)
5478 BOOL_ACCESSORS(SharedFunctionInfo,
5480 allows_lazy_compilation_without_context,
5481 kAllowLazyCompilationWithoutContext)
5482 BOOL_ACCESSORS(SharedFunctionInfo,
5486 BOOL_ACCESSORS(SharedFunctionInfo,
5488 has_duplicate_parameters,
5489 kHasDuplicateParameters)
5492 #if V8_HOST_ARCH_32_BIT
5493 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5494 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5495 kFormalParameterCountOffset)
5496 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5497 kExpectedNofPropertiesOffset)
5498 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5499 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5500 kStartPositionAndTypeOffset)
5501 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5502 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5503 kFunctionTokenPositionOffset)
5504 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5505 kCompilerHintsOffset)
5506 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5507 kOptCountAndBailoutReasonOffset)
5508 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5509 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5510 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5514 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5515 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5516 int holder::name() const { \
5517 int value = READ_INT_FIELD(this, offset); \
5518 DCHECK(kHeapObjectTag == 1); \
5519 DCHECK((value & kHeapObjectTag) == 0); \
5520 return value >> 1; \
5522 void holder::set_##name(int value) { \
5523 DCHECK(kHeapObjectTag == 1); \
5524 DCHECK((value & 0xC0000000) == 0xC0000000 || \
5525 (value & 0xC0000000) == 0x0); \
5526 WRITE_INT_FIELD(this, \
5528 (value << 1) & ~kHeapObjectTag); \
5531 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5532 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5533 INT_ACCESSORS(holder, name, offset)
5536 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5537 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5538 formal_parameter_count,
5539 kFormalParameterCountOffset)
5541 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5542 expected_nof_properties,
5543 kExpectedNofPropertiesOffset)
5544 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5546 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5547 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5548 start_position_and_type,
5549 kStartPositionAndTypeOffset)
5551 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5552 function_token_position,
5553 kFunctionTokenPositionOffset)
5554 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5556 kCompilerHintsOffset)
5558 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5559 opt_count_and_bailout_reason,
5560 kOptCountAndBailoutReasonOffset)
5561 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5563 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5565 kAstNodeCountOffset)
5566 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5568 kProfilerTicksOffset)
5573 BOOL_GETTER(SharedFunctionInfo,
5575 optimization_disabled,
5576 kOptimizationDisabled)
5579 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5580 set_compiler_hints(BooleanBit::set(compiler_hints(),
5581 kOptimizationDisabled,
5583 // If disabling optimizations we reflect that in the code object so
5584 // it will not be counted as optimizable code.
5585 if ((code()->kind() == Code::FUNCTION) && disable) {
5586 code()->set_optimizable(false);
5591 StrictMode SharedFunctionInfo::strict_mode() {
5592 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5597 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5598 // We only allow mode transitions from sloppy to strict.
5599 DCHECK(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5600 int hints = compiler_hints();
5601 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5602 set_compiler_hints(hints);
5606 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5607 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5609 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5610 name_should_print_as_anonymous,
5611 kNameShouldPrintAsAnonymous)
5612 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5613 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5614 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5615 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5616 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5617 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5618 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5620 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5621 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5623 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5625 bool Script::HasValidSource() {
5626 Object* src = this->source();
5627 if (!src->IsString()) return true;
5628 String* src_str = String::cast(src);
5629 if (!StringShape(src_str).IsExternal()) return true;
5630 if (src_str->IsOneByteRepresentation()) {
5631 return ExternalAsciiString::cast(src)->resource() != NULL;
5632 } else if (src_str->IsTwoByteRepresentation()) {
5633 return ExternalTwoByteString::cast(src)->resource() != NULL;
5639 void SharedFunctionInfo::DontAdaptArguments() {
5640 DCHECK(code()->kind() == Code::BUILTIN);
5641 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5645 int SharedFunctionInfo::start_position() const {
5646 return start_position_and_type() >> kStartPositionShift;
5650 void SharedFunctionInfo::set_start_position(int start_position) {
5651 set_start_position_and_type((start_position << kStartPositionShift)
5652 | (start_position_and_type() & ~kStartPositionMask));
5656 Code* SharedFunctionInfo::code() const {
5657 return Code::cast(READ_FIELD(this, kCodeOffset));
5661 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5662 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5663 WRITE_FIELD(this, kCodeOffset, value);
5664 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5668 void SharedFunctionInfo::ReplaceCode(Code* value) {
5669 // If the GC metadata field is already used then the function was
5670 // enqueued as a code flushing candidate and we remove it now.
5671 if (code()->gc_metadata() != NULL) {
5672 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5673 flusher->EvictCandidate(this);
5676 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5682 ScopeInfo* SharedFunctionInfo::scope_info() const {
5683 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5687 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5688 WriteBarrierMode mode) {
5689 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5690 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5693 reinterpret_cast<Object*>(value),
5698 bool SharedFunctionInfo::is_compiled() {
5700 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5704 bool SharedFunctionInfo::IsApiFunction() {
5705 return function_data()->IsFunctionTemplateInfo();
5709 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5710 DCHECK(IsApiFunction());
5711 return FunctionTemplateInfo::cast(function_data());
5715 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5716 return function_data()->IsSmi();
5720 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5721 DCHECK(HasBuiltinFunctionId());
5722 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5726 int SharedFunctionInfo::ic_age() {
5727 return ICAgeBits::decode(counters());
5731 void SharedFunctionInfo::set_ic_age(int ic_age) {
5732 set_counters(ICAgeBits::update(counters(), ic_age));
5736 int SharedFunctionInfo::deopt_count() {
5737 return DeoptCountBits::decode(counters());
5741 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5742 set_counters(DeoptCountBits::update(counters(), deopt_count));
5746 void SharedFunctionInfo::increment_deopt_count() {
5747 int value = counters();
5748 int deopt_count = DeoptCountBits::decode(value);
5749 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5750 set_counters(DeoptCountBits::update(value, deopt_count));
5754 int SharedFunctionInfo::opt_reenable_tries() {
5755 return OptReenableTriesBits::decode(counters());
5759 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5760 set_counters(OptReenableTriesBits::update(counters(), tries));
5764 int SharedFunctionInfo::opt_count() {
5765 return OptCountBits::decode(opt_count_and_bailout_reason());
5769 void SharedFunctionInfo::set_opt_count(int opt_count) {
5770 set_opt_count_and_bailout_reason(
5771 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5775 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5776 BailoutReason reason = static_cast<BailoutReason>(
5777 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5782 bool SharedFunctionInfo::has_deoptimization_support() {
5783 Code* code = this->code();
5784 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5788 void SharedFunctionInfo::TryReenableOptimization() {
5789 int tries = opt_reenable_tries();
5790 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5791 // We reenable optimization whenever the number of tries is a large
5792 // enough power of 2.
5793 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5794 set_optimization_disabled(false);
5797 code()->set_optimizable(true);
5802 bool JSFunction::IsBuiltin() {
5803 return context()->global_object()->IsJSBuiltinsObject();
5807 bool JSFunction::IsFromNativeScript() {
5808 Object* script = shared()->script();
5809 bool native = script->IsScript() &&
5810 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
5811 DCHECK(!IsBuiltin() || native); // All builtins are also native.
5816 bool JSFunction::IsFromExtensionScript() {
5817 Object* script = shared()->script();
5818 return script->IsScript() &&
5819 Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
5823 bool JSFunction::NeedsArgumentsAdaption() {
5824 return shared()->formal_parameter_count() !=
5825 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5829 bool JSFunction::IsOptimized() {
5830 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5834 bool JSFunction::IsOptimizable() {
5835 return code()->kind() == Code::FUNCTION && code()->optimizable();
5839 bool JSFunction::IsMarkedForOptimization() {
5840 return code() == GetIsolate()->builtins()->builtin(
5841 Builtins::kCompileOptimized);
5845 bool JSFunction::IsMarkedForConcurrentOptimization() {
5846 return code() == GetIsolate()->builtins()->builtin(
5847 Builtins::kCompileOptimizedConcurrent);
5851 bool JSFunction::IsInOptimizationQueue() {
5852 return code() == GetIsolate()->builtins()->builtin(
5853 Builtins::kInOptimizationQueue);
5857 bool JSFunction::IsInobjectSlackTrackingInProgress() {
5858 return has_initial_map() &&
5859 initial_map()->construction_count() != JSFunction::kNoSlackTracking;
5863 Code* JSFunction::code() {
5865 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5869 void JSFunction::set_code(Code* value) {
5870 DCHECK(!GetHeap()->InNewSpace(value));
5871 Address entry = value->entry();
5872 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5873 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5875 HeapObject::RawField(this, kCodeEntryOffset),
5880 void JSFunction::set_code_no_write_barrier(Code* value) {
5881 DCHECK(!GetHeap()->InNewSpace(value));
5882 Address entry = value->entry();
5883 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5887 void JSFunction::ReplaceCode(Code* code) {
5888 bool was_optimized = IsOptimized();
5889 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5891 if (was_optimized && is_optimized) {
5892 shared()->EvictFromOptimizedCodeMap(this->code(),
5893 "Replacing with another optimized code");
5898 // Add/remove the function from the list of optimized functions for this
5899 // context based on the state change.
5900 if (!was_optimized && is_optimized) {
5901 context()->native_context()->AddOptimizedFunction(this);
5903 if (was_optimized && !is_optimized) {
5904 // TODO(titzer): linear in the number of optimized functions; fix!
5905 context()->native_context()->RemoveOptimizedFunction(this);
5910 Context* JSFunction::context() {
5911 return Context::cast(READ_FIELD(this, kContextOffset));
5915 JSObject* JSFunction::global_proxy() {
5916 return context()->global_proxy();
5920 void JSFunction::set_context(Object* value) {
5921 DCHECK(value->IsUndefined() || value->IsContext());
5922 WRITE_FIELD(this, kContextOffset, value);
5923 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5926 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5927 kPrototypeOrInitialMapOffset)
5930 Map* JSFunction::initial_map() {
5931 return Map::cast(prototype_or_initial_map());
5935 bool JSFunction::has_initial_map() {
5936 return prototype_or_initial_map()->IsMap();
5940 bool JSFunction::has_instance_prototype() {
5941 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5945 bool JSFunction::has_prototype() {
5946 return map()->has_non_instance_prototype() || has_instance_prototype();
5950 Object* JSFunction::instance_prototype() {
5951 DCHECK(has_instance_prototype());
5952 if (has_initial_map()) return initial_map()->prototype();
5953 // When there is no initial map and the prototype is a JSObject, the
5954 // initial map field is used for the prototype field.
5955 return prototype_or_initial_map();
5959 Object* JSFunction::prototype() {
5960 DCHECK(has_prototype());
5961 // If the function's prototype property has been set to a non-JSObject
5962 // value, that value is stored in the constructor field of the map.
5963 if (map()->has_non_instance_prototype()) return map()->constructor();
5964 return instance_prototype();
5968 bool JSFunction::should_have_prototype() {
5969 return map()->function_with_prototype();
5973 bool JSFunction::is_compiled() {
5975 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5979 FixedArray* JSFunction::literals() {
5980 DCHECK(!shared()->bound());
5981 return literals_or_bindings();
5985 void JSFunction::set_literals(FixedArray* literals) {
5986 DCHECK(!shared()->bound());
5987 set_literals_or_bindings(literals);
5991 FixedArray* JSFunction::function_bindings() {
5992 DCHECK(shared()->bound());
5993 return literals_or_bindings();
5997 void JSFunction::set_function_bindings(FixedArray* bindings) {
5998 DCHECK(shared()->bound());
5999 // Bound function literal may be initialized to the empty fixed array
6000 // before the bindings are set.
6001 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6002 bindings->map() == GetHeap()->fixed_cow_array_map());
6003 set_literals_or_bindings(bindings);
6007 int JSFunction::NumberOfLiterals() {
6008 DCHECK(!shared()->bound());
6009 return literals()->length();
6013 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6014 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6015 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6019 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6021 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6022 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6023 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6027 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6028 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6029 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6033 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6035 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6036 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6037 DCHECK(!GetHeap()->InNewSpace(value));
6041 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6042 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6043 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6044 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6047 void JSProxy::InitializeBody(int object_size, Object* value) {
6048 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6049 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6050 WRITE_FIELD(this, offset, value);
6055 ACCESSORS(JSCollection, table, Object, kTableOffset)
6058 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6059 template<class Derived, class TableType> \
6060 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6061 return type::cast(READ_FIELD(this, offset)); \
6063 template<class Derived, class TableType> \
6064 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6065 type* value, WriteBarrierMode mode) { \
6066 WRITE_FIELD(this, offset, value); \
6067 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6070 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6071 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Smi, kIndexOffset)
6072 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Smi, kKindOffset)
6074 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6077 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6078 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6081 Address Foreign::foreign_address() {
6082 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6086 void Foreign::set_foreign_address(Address value) {
6087 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6091 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6092 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6093 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6094 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6095 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6096 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6098 bool JSGeneratorObject::is_suspended() {
6099 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6100 DCHECK_EQ(kGeneratorClosed, 0);
6101 return continuation() > 0;
6104 bool JSGeneratorObject::is_closed() {
6105 return continuation() == kGeneratorClosed;
6108 bool JSGeneratorObject::is_executing() {
6109 return continuation() == kGeneratorExecuting;
6112 ACCESSORS(JSModule, context, Object, kContextOffset)
6113 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6116 ACCESSORS(JSValue, value, Object, kValueOffset)
6119 HeapNumber* HeapNumber::cast(Object* object) {
6120 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6121 return reinterpret_cast<HeapNumber*>(object);
6125 const HeapNumber* HeapNumber::cast(const Object* object) {
6126 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6127 return reinterpret_cast<const HeapNumber*>(object);
6131 ACCESSORS(JSDate, value, Object, kValueOffset)
6132 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6133 ACCESSORS(JSDate, year, Object, kYearOffset)
6134 ACCESSORS(JSDate, month, Object, kMonthOffset)
6135 ACCESSORS(JSDate, day, Object, kDayOffset)
6136 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6137 ACCESSORS(JSDate, hour, Object, kHourOffset)
6138 ACCESSORS(JSDate, min, Object, kMinOffset)
6139 ACCESSORS(JSDate, sec, Object, kSecOffset)
6142 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6143 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6144 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6145 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6146 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6147 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6150 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6151 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6152 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6153 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6154 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6155 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6156 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6159 void Code::WipeOutHeader() {
6160 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6161 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6162 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6163 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6164 // Do not wipe out major/minor keys on a code stub or IC
6165 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6166 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6171 Object* Code::type_feedback_info() {
6172 DCHECK(kind() == FUNCTION);
6173 return raw_type_feedback_info();
6177 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6178 DCHECK(kind() == FUNCTION);
6179 set_raw_type_feedback_info(value, mode);
6180 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6185 uint32_t Code::stub_key() {
6186 DCHECK(IsCodeStubOrIC());
6187 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6188 return static_cast<uint32_t>(smi_key->value());
6192 void Code::set_stub_key(uint32_t key) {
6193 DCHECK(IsCodeStubOrIC());
6194 set_raw_type_feedback_info(Smi::FromInt(key));
6198 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6199 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6202 byte* Code::instruction_start() {
6203 return FIELD_ADDR(this, kHeaderSize);
6207 byte* Code::instruction_end() {
6208 return instruction_start() + instruction_size();
6212 int Code::body_size() {
6213 return RoundUp(instruction_size(), kObjectAlignment);
6217 ByteArray* Code::unchecked_relocation_info() {
6218 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6222 byte* Code::relocation_start() {
6223 return unchecked_relocation_info()->GetDataStartAddress();
6227 int Code::relocation_size() {
6228 return unchecked_relocation_info()->length();
6232 byte* Code::entry() {
6233 return instruction_start();
6237 bool Code::contains(byte* inner_pointer) {
6238 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6242 ACCESSORS(JSArray, length, Object, kLengthOffset)
6245 void* JSArrayBuffer::backing_store() const {
6246 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6247 return reinterpret_cast<void*>(ptr);
6251 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6252 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6253 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6257 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6258 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6261 bool JSArrayBuffer::is_external() {
6262 return BooleanBit::get(flag(), kIsExternalBit);
6266 void JSArrayBuffer::set_is_external(bool value) {
6267 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6271 bool JSArrayBuffer::should_be_freed() {
6272 return BooleanBit::get(flag(), kShouldBeFreed);
6276 void JSArrayBuffer::set_should_be_freed(bool value) {
6277 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6281 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6282 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6285 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6286 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6287 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6288 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6289 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6291 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6294 JSRegExp::Type JSRegExp::TypeTag() {
6295 Object* data = this->data();
6296 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6297 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6298 return static_cast<JSRegExp::Type>(smi->value());
6302 int JSRegExp::CaptureCount() {
6303 switch (TypeTag()) {
6307 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6315 JSRegExp::Flags JSRegExp::GetFlags() {
6316 DCHECK(this->data()->IsFixedArray());
6317 Object* data = this->data();
6318 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6319 return Flags(smi->value());
6323 String* JSRegExp::Pattern() {
6324 DCHECK(this->data()->IsFixedArray());
6325 Object* data = this->data();
6326 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6331 Object* JSRegExp::DataAt(int index) {
6332 DCHECK(TypeTag() != NOT_COMPILED);
6333 return FixedArray::cast(data())->get(index);
6337 void JSRegExp::SetDataAt(int index, Object* value) {
6338 DCHECK(TypeTag() != NOT_COMPILED);
6339 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6340 FixedArray::cast(data())->set(index, value);
6344 ElementsKind JSObject::GetElementsKind() {
6345 ElementsKind kind = map()->elements_kind();
6347 FixedArrayBase* fixed_array =
6348 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6350 // If a GC was caused while constructing this object, the elements
6351 // pointer may point to a one pointer filler map.
6352 if (ElementsAreSafeToExamine()) {
6353 Map* map = fixed_array->map();
6354 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6355 (map == GetHeap()->fixed_array_map() ||
6356 map == GetHeap()->fixed_cow_array_map())) ||
6357 (IsFastDoubleElementsKind(kind) &&
6358 (fixed_array->IsFixedDoubleArray() ||
6359 fixed_array == GetHeap()->empty_fixed_array())) ||
6360 (kind == DICTIONARY_ELEMENTS &&
6361 fixed_array->IsFixedArray() &&
6362 fixed_array->IsDictionary()) ||
6363 (kind > DICTIONARY_ELEMENTS));
6364 DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6365 (elements()->IsFixedArray() && elements()->length() >= 2));
6372 ElementsAccessor* JSObject::GetElementsAccessor() {
6373 return ElementsAccessor::ForKind(GetElementsKind());
6377 bool JSObject::HasFastObjectElements() {
6378 return IsFastObjectElementsKind(GetElementsKind());
6382 bool JSObject::HasFastSmiElements() {
6383 return IsFastSmiElementsKind(GetElementsKind());
6387 bool JSObject::HasFastSmiOrObjectElements() {
6388 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6392 bool JSObject::HasFastDoubleElements() {
6393 return IsFastDoubleElementsKind(GetElementsKind());
6397 bool JSObject::HasFastHoleyElements() {
6398 return IsFastHoleyElementsKind(GetElementsKind());
6402 bool JSObject::HasFastElements() {
6403 return IsFastElementsKind(GetElementsKind());
6407 bool JSObject::HasDictionaryElements() {
6408 return GetElementsKind() == DICTIONARY_ELEMENTS;
6412 bool JSObject::HasSloppyArgumentsElements() {
6413 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6417 bool JSObject::HasExternalArrayElements() {
6418 HeapObject* array = elements();
6419 DCHECK(array != NULL);
6420 return array->IsExternalArray();
6424 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6425 bool JSObject::HasExternal##Type##Elements() { \
6426 HeapObject* array = elements(); \
6427 DCHECK(array != NULL); \
6428 if (!array->IsHeapObject()) \
6430 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6433 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6435 #undef EXTERNAL_ELEMENTS_CHECK
6438 bool JSObject::HasFixedTypedArrayElements() {
6439 HeapObject* array = elements();
6440 DCHECK(array != NULL);
6441 return array->IsFixedTypedArrayBase();
6445 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6446 bool JSObject::HasFixed##Type##Elements() { \
6447 HeapObject* array = elements(); \
6448 DCHECK(array != NULL); \
6449 if (!array->IsHeapObject()) \
6451 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6454 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6456 #undef FIXED_TYPED_ELEMENTS_CHECK
6459 bool JSObject::HasNamedInterceptor() {
6460 return map()->has_named_interceptor();
6464 bool JSObject::HasIndexedInterceptor() {
6465 return map()->has_indexed_interceptor();
6469 NameDictionary* JSObject::property_dictionary() {
6470 DCHECK(!HasFastProperties());
6471 return NameDictionary::cast(properties());
6475 SeededNumberDictionary* JSObject::element_dictionary() {
6476 DCHECK(HasDictionaryElements());
6477 return SeededNumberDictionary::cast(elements());
6481 bool Name::IsHashFieldComputed(uint32_t field) {
6482 return (field & kHashNotComputedMask) == 0;
6486 bool Name::HasHashCode() {
6487 return IsHashFieldComputed(hash_field());
6491 uint32_t Name::Hash() {
6492 // Fast case: has hash code already been computed?
6493 uint32_t field = hash_field();
6494 if (IsHashFieldComputed(field)) return field >> kHashShift;
6495 // Slow case: compute hash code and set it. Has to be a string.
6496 return String::cast(this)->ComputeAndSetHash();
6500 StringHasher::StringHasher(int length, uint32_t seed)
6502 raw_running_hash_(seed),
6504 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6505 is_first_char_(true) {
6506 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6510 bool StringHasher::has_trivial_hash() {
6511 return length_ > String::kMaxHashCalcLength;
6515 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6517 running_hash += (running_hash << 10);
6518 running_hash ^= (running_hash >> 6);
6519 return running_hash;
6523 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6524 running_hash += (running_hash << 3);
6525 running_hash ^= (running_hash >> 11);
6526 running_hash += (running_hash << 15);
6527 if ((running_hash & String::kHashBitMask) == 0) {
6530 return running_hash;
6534 void StringHasher::AddCharacter(uint16_t c) {
6535 // Use the Jenkins one-at-a-time hash function to update the hash
6536 // for the given character.
6537 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6541 bool StringHasher::UpdateIndex(uint16_t c) {
6542 DCHECK(is_array_index_);
6543 if (c < '0' || c > '9') {
6544 is_array_index_ = false;
6548 if (is_first_char_) {
6549 is_first_char_ = false;
6550 if (c == '0' && length_ > 1) {
6551 is_array_index_ = false;
6555 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6556 is_array_index_ = false;
6559 array_index_ = array_index_ * 10 + d;
6564 template<typename Char>
6565 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6566 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6568 if (is_array_index_) {
6569 for (; i < length; i++) {
6570 AddCharacter(chars[i]);
6571 if (!UpdateIndex(chars[i])) {
6577 for (; i < length; i++) {
6578 DCHECK(!is_array_index_);
6579 AddCharacter(chars[i]);
6584 template <typename schar>
6585 uint32_t StringHasher::HashSequentialString(const schar* chars,
6588 StringHasher hasher(length, seed);
6589 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6590 return hasher.GetHashField();
6594 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6595 IteratingStringHasher hasher(string->length(), seed);
6597 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6598 ConsString* cons_string = String::VisitFlat(&hasher, string);
6599 // The string was flat.
6600 if (cons_string == NULL) return hasher.GetHashField();
6601 // This is a ConsString, iterate across it.
6602 ConsStringIteratorOp op(cons_string);
6604 while (NULL != (string = op.Next(&offset))) {
6605 String::VisitFlat(&hasher, string, offset);
6607 return hasher.GetHashField();
6611 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6613 AddCharacters(chars, length);
6617 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6619 AddCharacters(chars, length);
6623 bool Name::AsArrayIndex(uint32_t* index) {
6624 return IsString() && String::cast(this)->AsArrayIndex(index);
6628 bool String::AsArrayIndex(uint32_t* index) {
6629 uint32_t field = hash_field();
6630 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6633 return SlowAsArrayIndex(index);
6637 void String::SetForwardedInternalizedString(String* canonical) {
6638 DCHECK(IsInternalizedString());
6639 DCHECK(HasHashCode());
6640 if (canonical == this) return; // No need to forward.
6641 DCHECK(SlowEquals(canonical));
6642 DCHECK(canonical->IsInternalizedString());
6643 DCHECK(canonical->HasHashCode());
6644 WRITE_FIELD(this, kHashFieldOffset, canonical);
6645 // Setting the hash field to a tagged value sets the LSB, causing the hash
6646 // code to be interpreted as uninitialized. We use this fact to recognize
6647 // that we have a forwarded string.
6648 DCHECK(!HasHashCode());
6652 String* String::GetForwardedInternalizedString() {
6653 DCHECK(IsInternalizedString());
6654 if (HasHashCode()) return this;
6655 String* canonical = String::cast(READ_FIELD(this, kHashFieldOffset));
6656 DCHECK(canonical->IsInternalizedString());
6657 DCHECK(SlowEquals(canonical));
6658 DCHECK(canonical->HasHashCode());
6663 Object* JSReceiver::GetConstructor() {
6664 return map()->constructor();
6668 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6669 Handle<Name> name) {
6670 if (object->IsJSProxy()) {
6671 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6672 return JSProxy::HasPropertyWithHandler(proxy, name);
6674 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6675 if (!result.has_value) return Maybe<bool>();
6676 return maybe(result.value != ABSENT);
6680 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6681 Handle<Name> name) {
6682 if (object->IsJSProxy()) {
6683 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6684 return JSProxy::HasPropertyWithHandler(proxy, name);
6686 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6687 if (!result.has_value) return Maybe<bool>();
6688 return maybe(result.value != ABSENT);
6692 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6693 Handle<JSReceiver> object, Handle<Name> key) {
6695 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6696 return GetElementAttribute(object, index);
6698 LookupIterator it(object, key);
6699 return GetPropertyAttributes(&it);
6703 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6704 Handle<JSReceiver> object, uint32_t index) {
6705 if (object->IsJSProxy()) {
6706 return JSProxy::GetElementAttributeWithHandler(
6707 Handle<JSProxy>::cast(object), object, index);
6709 return JSObject::GetElementAttributeWithReceiver(
6710 Handle<JSObject>::cast(object), object, index, true);
6714 bool JSGlobalObject::IsDetached() {
6715 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6719 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6720 const PrototypeIterator iter(this->GetIsolate(),
6721 const_cast<JSGlobalProxy*>(this));
6722 return iter.GetCurrent() != global;
6726 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6727 return object->IsJSProxy()
6728 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6729 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6733 Object* JSReceiver::GetIdentityHash() {
6735 ? JSProxy::cast(this)->GetIdentityHash()
6736 : JSObject::cast(this)->GetIdentityHash();
6740 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6741 if (object->IsJSProxy()) {
6742 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6743 return JSProxy::HasElementWithHandler(proxy, index);
6745 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6746 Handle<JSObject>::cast(object), object, index, true);
6747 if (!result.has_value) return Maybe<bool>();
6748 return maybe(result.value != ABSENT);
6752 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
6754 if (object->IsJSProxy()) {
6755 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6756 return JSProxy::HasElementWithHandler(proxy, index);
6758 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6759 Handle<JSObject>::cast(object), object, index, false);
6760 if (!result.has_value) return Maybe<bool>();
6761 return maybe(result.value != ABSENT);
6765 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
6766 Handle<JSReceiver> object, uint32_t index) {
6767 if (object->IsJSProxy()) {
6768 return JSProxy::GetElementAttributeWithHandler(
6769 Handle<JSProxy>::cast(object), object, index);
6771 return JSObject::GetElementAttributeWithReceiver(
6772 Handle<JSObject>::cast(object), object, index, false);
6776 bool AccessorInfo::all_can_read() {
6777 return BooleanBit::get(flag(), kAllCanReadBit);
6781 void AccessorInfo::set_all_can_read(bool value) {
6782 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6786 bool AccessorInfo::all_can_write() {
6787 return BooleanBit::get(flag(), kAllCanWriteBit);
6791 void AccessorInfo::set_all_can_write(bool value) {
6792 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6796 PropertyAttributes AccessorInfo::property_attributes() {
6797 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6801 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6802 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6806 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6807 if (!HasExpectedReceiverType()) return true;
6808 if (!receiver->IsJSObject()) return false;
6809 return FunctionTemplateInfo::cast(expected_receiver_type())
6810 ->IsTemplateFor(JSObject::cast(receiver)->map());
6814 void ExecutableAccessorInfo::clear_setter() {
6815 set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
6819 template<typename Derived, typename Shape, typename Key>
6820 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6822 Handle<Object> value) {
6823 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6827 template<typename Derived, typename Shape, typename Key>
6828 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6830 Handle<Object> value,
6831 PropertyDetails details) {
6832 DCHECK(!key->IsName() ||
6833 details.IsDeleted() ||
6834 details.dictionary_index() > 0);
6835 int index = DerivedHashTable::EntryToIndex(entry);
6836 DisallowHeapAllocation no_gc;
6837 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6838 FixedArray::set(index, *key, mode);
6839 FixedArray::set(index+1, *value, mode);
6840 FixedArray::set(index+2, details.AsSmi());
6844 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6845 DCHECK(other->IsNumber());
6846 return key == static_cast<uint32_t>(other->Number());
6850 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6851 return ComputeIntegerHash(key, 0);
6855 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6857 DCHECK(other->IsNumber());
6858 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6862 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6863 return ComputeIntegerHash(key, seed);
6867 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6870 DCHECK(other->IsNumber());
6871 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6875 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6876 return isolate->factory()->NewNumberFromUint(key);
6880 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6881 // We know that all entries in a hash table had their hash keys created.
6882 // Use that knowledge to have fast failure.
6883 if (key->Hash() != Name::cast(other)->Hash()) return false;
6884 return key->Equals(Name::cast(other));
6888 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6893 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6894 return Name::cast(other)->Hash();
6898 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6900 DCHECK(key->IsUniqueName());
6905 void NameDictionary::DoGenerateNewEnumerationIndices(
6906 Handle<NameDictionary> dictionary) {
6907 DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6911 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6912 return key->SameValue(other);
6916 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6917 return Smi::cast(key->GetHash())->value();
6921 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6923 return Smi::cast(other->GetHash())->value();
6927 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6928 Handle<Object> key) {
6933 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6934 Handle<ObjectHashTable> table, Handle<Object> key) {
6935 return DerivedHashTable::Shrink(table, key);
6939 template <int entrysize>
6940 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6941 return key->SameValue(other);
6945 template <int entrysize>
6946 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6947 intptr_t hash = reinterpret_cast<intptr_t>(*key);
6948 return (uint32_t)(hash & 0xFFFFFFFF);
6952 template <int entrysize>
6953 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
6955 intptr_t hash = reinterpret_cast<intptr_t>(other);
6956 return (uint32_t)(hash & 0xFFFFFFFF);
6960 template <int entrysize>
6961 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6962 Handle<Object> key) {
6967 void Map::ClearCodeCache(Heap* heap) {
6968 // No write barrier is needed since empty_fixed_array is not in new space.
6969 // Please note this function is used during marking:
6970 // - MarkCompactCollector::MarkUnmarkedObject
6971 // - IncrementalMarking::Step
6972 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
6973 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6977 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
6978 DCHECK(array->HasFastSmiOrObjectElements());
6979 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
6980 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6981 if (elts->length() < required_size) {
6982 // Doubling in size would be overkill, but leave some slack to avoid
6983 // constantly growing.
6984 Expand(array, required_size + (required_size >> 3));
6985 // It's a performance benefit to keep a frequently used array in new-space.
6986 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6987 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6988 // Expand will allocate a new backing store in new space even if the size
6989 // we asked for isn't larger than what we had before.
6990 Expand(array, required_size);
6995 void JSArray::set_length(Smi* length) {
6996 // Don't need a write barrier for a Smi.
6997 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7001 bool JSArray::AllowsSetElementsLength() {
7002 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7003 DCHECK(result == !HasExternalArrayElements());
7008 void JSArray::SetContent(Handle<JSArray> array,
7009 Handle<FixedArrayBase> storage) {
7010 EnsureCanContainElements(array, storage, storage->length(),
7011 ALLOW_COPIED_DOUBLE_ELEMENTS);
7013 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7014 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7015 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7016 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7017 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7018 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7019 array->set_elements(*storage);
7020 array->set_length(Smi::FromInt(storage->length()));
7024 Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
7025 return isolate->factory()->uninitialized_symbol();
7029 Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
7030 return isolate->factory()->megamorphic_symbol();
7034 Handle<Object> TypeFeedbackInfo::MonomorphicArraySentinel(Isolate* isolate,
7035 ElementsKind elements_kind) {
7036 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
7040 Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) {
7041 return heap->uninitialized_symbol();
7045 int TypeFeedbackInfo::ic_total_count() {
7046 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7047 return ICTotalCountField::decode(current);
7051 void TypeFeedbackInfo::set_ic_total_count(int count) {
7052 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7053 value = ICTotalCountField::update(value,
7054 ICTotalCountField::decode(count));
7055 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7059 int TypeFeedbackInfo::ic_with_type_info_count() {
7060 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7061 return ICsWithTypeInfoCountField::decode(current);
7065 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7066 if (delta == 0) return;
7067 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7068 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7069 // We can get negative count here when the type-feedback info is
7070 // shared between two code objects. The can only happen when
7071 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7072 // Since we do not optimize when the debugger is active, we can skip
7073 // this counter update.
7074 if (new_count >= 0) {
7075 new_count &= ICsWithTypeInfoCountField::kMask;
7076 value = ICsWithTypeInfoCountField::update(value, new_count);
7077 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7082 int TypeFeedbackInfo::ic_generic_count() {
7083 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7087 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7088 if (delta == 0) return;
7089 int new_count = ic_generic_count() + delta;
7090 if (new_count >= 0) {
7091 new_count &= ~Smi::kMinValue;
7092 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7097 void TypeFeedbackInfo::initialize_storage() {
7098 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7099 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7100 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7104 void TypeFeedbackInfo::change_own_type_change_checksum() {
7105 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7106 int checksum = OwnTypeChangeChecksum::decode(value);
7107 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7108 value = OwnTypeChangeChecksum::update(value, checksum);
7109 // Ensure packed bit field is in Smi range.
7110 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7111 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7112 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7116 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7117 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7118 int mask = (1 << kTypeChangeChecksumBits) - 1;
7119 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7120 // Ensure packed bit field is in Smi range.
7121 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7122 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7123 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7127 int TypeFeedbackInfo::own_type_change_checksum() {
7128 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7129 return OwnTypeChangeChecksum::decode(value);
7133 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7134 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7135 int mask = (1 << kTypeChangeChecksumBits) - 1;
7136 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7140 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7143 Relocatable::Relocatable(Isolate* isolate) {
7145 prev_ = isolate->relocatable_top();
7146 isolate->set_relocatable_top(this);
7150 Relocatable::~Relocatable() {
7151 DCHECK_EQ(isolate_->relocatable_top(), this);
7152 isolate_->set_relocatable_top(prev_);
7156 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7157 return map->instance_size();
7161 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7162 v->VisitExternalReference(
7163 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7167 template<typename StaticVisitor>
7168 void Foreign::ForeignIterateBody() {
7169 StaticVisitor::VisitExternalReference(
7170 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7174 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
7175 typedef v8::String::ExternalAsciiStringResource Resource;
7176 v->VisitExternalAsciiString(
7177 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7181 template<typename StaticVisitor>
7182 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
7183 typedef v8::String::ExternalAsciiStringResource Resource;
7184 StaticVisitor::VisitExternalAsciiString(
7185 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7189 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7190 typedef v8::String::ExternalStringResource Resource;
7191 v->VisitExternalTwoByteString(
7192 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7196 template<typename StaticVisitor>
7197 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7198 typedef v8::String::ExternalStringResource Resource;
7199 StaticVisitor::VisitExternalTwoByteString(
7200 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7204 template<int start_offset, int end_offset, int size>
7205 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7208 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7209 HeapObject::RawField(obj, end_offset));
7213 template<int start_offset>
7214 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7217 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7218 HeapObject::RawField(obj, object_size));
7222 template<class Derived, class TableType>
7223 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7224 TableType* table(TableType::cast(this->table()));
7225 int index = Smi::cast(this->index())->value();
7226 Object* key = table->KeyAt(index);
7227 DCHECK(!key->IsTheHole());
7232 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7233 array->set(0, CurrentKey());
7237 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7238 array->set(0, CurrentKey());
7239 array->set(1, CurrentValue());
7243 Object* JSMapIterator::CurrentValue() {
7244 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7245 int index = Smi::cast(this->index())->value();
7246 Object* value = table->ValueAt(index);
7247 DCHECK(!value->IsTheHole());
7253 #undef CAST_ACCESSOR
7254 #undef INT_ACCESSORS
7256 #undef ACCESSORS_TO_SMI
7257 #undef SMI_ACCESSORS
7258 #undef SYNCHRONIZED_SMI_ACCESSORS
7259 #undef NOBARRIER_SMI_ACCESSORS
7261 #undef BOOL_ACCESSORS
7263 #undef FIELD_ADDR_CONST
7265 #undef NOBARRIER_READ_FIELD
7267 #undef NOBARRIER_WRITE_FIELD
7268 #undef WRITE_BARRIER
7269 #undef CONDITIONAL_WRITE_BARRIER
7270 #undef READ_DOUBLE_FIELD
7271 #undef WRITE_DOUBLE_FIELD
7272 #undef READ_INT_FIELD
7273 #undef WRITE_INT_FIELD
7274 #undef READ_INTPTR_FIELD
7275 #undef WRITE_INTPTR_FIELD
7276 #undef READ_UINT32_FIELD
7277 #undef WRITE_UINT32_FIELD
7278 #undef READ_SHORT_FIELD
7279 #undef WRITE_SHORT_FIELD
7280 #undef READ_BYTE_FIELD
7281 #undef WRITE_BYTE_FIELD
7282 #undef NOBARRIER_READ_BYTE_FIELD
7283 #undef NOBARRIER_WRITE_BYTE_FIELD
7285 } } // namespace v8::internal
7287 #endif // V8_OBJECTS_INL_H_