1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/heap/incremental-marking.h"
24 #include "src/heap/objects-visiting.h"
25 #include "src/heap/spaces.h"
26 #include "src/heap/store-buffer.h"
27 #include "src/isolate.h"
28 #include "src/layout-descriptor-inl.h"
29 #include "src/lookup.h"
30 #include "src/objects.h"
31 #include "src/property.h"
32 #include "src/prototype.h"
33 #include "src/transitions-inl.h"
34 #include "src/type-feedback-vector-inl.h"
35 #include "src/v8memory.h"
40 PropertyDetails::PropertyDetails(Smi* smi) {
41 value_ = smi->value();
45 Smi* PropertyDetails::AsSmi() const {
46 // Ensure the upper 2 bits have the same value by sign extending it. This is
47 // necessary to be able to use the 31st bit of the property details.
48 int value = value_ << 1;
49 return Smi::FromInt(value >> 1);
53 int PropertyDetails::field_width_in_words() const {
54 DCHECK(location() == kField);
55 if (!FLAG_unbox_double_fields) return 1;
56 if (kDoubleSize == kPointerSize) return 1;
57 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
61 #define TYPE_CHECKER(type, instancetype) \
62 bool Object::Is##type() const { \
63 return Object::IsHeapObject() && \
64 HeapObject::cast(this)->map()->instance_type() == instancetype; \
68 #define CAST_ACCESSOR(type) \
69 type* type::cast(Object* object) { \
70 SLOW_DCHECK(object->Is##type()); \
71 return reinterpret_cast<type*>(object); \
73 const type* type::cast(const Object* object) { \
74 SLOW_DCHECK(object->Is##type()); \
75 return reinterpret_cast<const type*>(object); \
79 #define INT_ACCESSORS(holder, name, offset) \
80 int holder::name() const { return READ_INT_FIELD(this, offset); } \
81 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
84 #define ACCESSORS(holder, name, type, offset) \
85 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
86 void holder::set_##name(type* value, WriteBarrierMode mode) { \
87 WRITE_FIELD(this, offset, value); \
88 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
92 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
93 #define ACCESSORS_TO_SMI(holder, name, offset) \
94 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
95 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
96 WRITE_FIELD(this, offset, value); \
100 // Getter that returns a Smi as an int and writes an int as a Smi.
101 #define SMI_ACCESSORS(holder, name, offset) \
102 int holder::name() const { \
103 Object* value = READ_FIELD(this, offset); \
104 return Smi::cast(value)->value(); \
106 void holder::set_##name(int value) { \
107 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
110 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
111 int holder::synchronized_##name() const { \
112 Object* value = ACQUIRE_READ_FIELD(this, offset); \
113 return Smi::cast(value)->value(); \
115 void holder::synchronized_set_##name(int value) { \
116 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
119 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
120 int holder::nobarrier_##name() const { \
121 Object* value = NOBARRIER_READ_FIELD(this, offset); \
122 return Smi::cast(value)->value(); \
124 void holder::nobarrier_set_##name(int value) { \
125 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
128 #define BOOL_GETTER(holder, field, name, offset) \
129 bool holder::name() const { \
130 return BooleanBit::get(field(), offset); \
134 #define BOOL_ACCESSORS(holder, field, name, offset) \
135 bool holder::name() const { \
136 return BooleanBit::get(field(), offset); \
138 void holder::set_##name(bool value) { \
139 set_##field(BooleanBit::set(field(), offset, value)); \
143 bool Object::IsFixedArrayBase() const {
144 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase() ||
149 // External objects are not extensible, so the map check is enough.
150 bool Object::IsExternal() const {
151 return Object::IsHeapObject() &&
152 HeapObject::cast(this)->map() ==
153 HeapObject::cast(this)->GetHeap()->external_map();
157 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
160 bool Object::IsSmi() const {
161 return HAS_SMI_TAG(this);
165 bool Object::IsHeapObject() const {
166 return Internals::HasHeapObjectTag(this);
170 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
171 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
172 TYPE_CHECKER(Float32x4, FLOAT32X4_TYPE)
173 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
176 bool Object::IsString() const {
177 return Object::IsHeapObject()
178 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
182 bool Object::IsName() const {
183 return IsString() || IsSymbol();
187 bool Object::IsUniqueName() const {
188 return IsInternalizedString() || IsSymbol();
192 bool Object::IsSpecObject() const {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() const {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsTemplateInfo() const {
206 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
210 bool Object::IsInternalizedString() const {
211 if (!this->IsHeapObject()) return false;
212 uint32_t type = HeapObject::cast(this)->map()->instance_type();
213 STATIC_ASSERT(kNotInternalizedTag != 0);
214 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
215 (kStringTag | kInternalizedTag);
219 bool Object::IsConsString() const {
220 if (!IsString()) return false;
221 return StringShape(String::cast(this)).IsCons();
225 bool Object::IsSlicedString() const {
226 if (!IsString()) return false;
227 return StringShape(String::cast(this)).IsSliced();
231 bool Object::IsSeqString() const {
232 if (!IsString()) return false;
233 return StringShape(String::cast(this)).IsSequential();
237 bool Object::IsSeqOneByteString() const {
238 if (!IsString()) return false;
239 return StringShape(String::cast(this)).IsSequential() &&
240 String::cast(this)->IsOneByteRepresentation();
244 bool Object::IsSeqTwoByteString() const {
245 if (!IsString()) return false;
246 return StringShape(String::cast(this)).IsSequential() &&
247 String::cast(this)->IsTwoByteRepresentation();
251 bool Object::IsExternalString() const {
252 if (!IsString()) return false;
253 return StringShape(String::cast(this)).IsExternal();
257 bool Object::IsExternalOneByteString() const {
258 if (!IsString()) return false;
259 return StringShape(String::cast(this)).IsExternal() &&
260 String::cast(this)->IsOneByteRepresentation();
264 bool Object::IsExternalTwoByteString() const {
265 if (!IsString()) return false;
266 return StringShape(String::cast(this)).IsExternal() &&
267 String::cast(this)->IsTwoByteRepresentation();
271 bool Object::HasValidElements() {
272 // Dictionary is covered under FixedArray.
273 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
274 IsFixedTypedArrayBase();
278 bool Object::KeyEquals(Object* second) {
279 Object* first = this;
280 if (second->IsNumber()) {
281 if (first->IsNumber()) return first->Number() == second->Number();
282 Object* temp = first;
286 if (first->IsNumber()) {
287 DCHECK_LE(0, first->Number());
288 uint32_t expected = static_cast<uint32_t>(first->Number());
290 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
292 return Name::cast(first)->Equals(Name::cast(second));
296 Handle<Object> Object::NewStorageFor(Isolate* isolate,
297 Handle<Object> object,
298 Representation representation) {
299 if (representation.IsSmi() && object->IsUninitialized()) {
300 return handle(Smi::FromInt(0), isolate);
302 if (!representation.IsDouble()) return object;
304 if (object->IsUninitialized()) {
306 } else if (object->IsMutableHeapNumber()) {
307 value = HeapNumber::cast(*object)->value();
309 value = object->Number();
311 return isolate->factory()->NewHeapNumber(value, MUTABLE);
315 Handle<Object> Object::WrapForRead(Isolate* isolate,
316 Handle<Object> object,
317 Representation representation) {
318 DCHECK(!object->IsUninitialized());
319 if (!representation.IsDouble()) {
320 DCHECK(object->FitsRepresentation(representation));
323 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
327 StringShape::StringShape(const String* str)
328 : type_(str->map()->instance_type()) {
330 DCHECK((type_ & kIsNotStringMask) == kStringTag);
334 StringShape::StringShape(Map* map)
335 : type_(map->instance_type()) {
337 DCHECK((type_ & kIsNotStringMask) == kStringTag);
341 StringShape::StringShape(InstanceType t)
342 : type_(static_cast<uint32_t>(t)) {
344 DCHECK((type_ & kIsNotStringMask) == kStringTag);
348 bool StringShape::IsInternalized() {
350 STATIC_ASSERT(kNotInternalizedTag != 0);
351 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
352 (kStringTag | kInternalizedTag);
356 bool String::IsOneByteRepresentation() const {
357 uint32_t type = map()->instance_type();
358 return (type & kStringEncodingMask) == kOneByteStringTag;
362 bool String::IsTwoByteRepresentation() const {
363 uint32_t type = map()->instance_type();
364 return (type & kStringEncodingMask) == kTwoByteStringTag;
368 bool String::IsOneByteRepresentationUnderneath() {
369 uint32_t type = map()->instance_type();
370 STATIC_ASSERT(kIsIndirectStringTag != 0);
371 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
373 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
374 case kOneByteStringTag:
376 case kTwoByteStringTag:
378 default: // Cons or sliced string. Need to go deeper.
379 return GetUnderlying()->IsOneByteRepresentation();
384 bool String::IsTwoByteRepresentationUnderneath() {
385 uint32_t type = map()->instance_type();
386 STATIC_ASSERT(kIsIndirectStringTag != 0);
387 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
389 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
390 case kOneByteStringTag:
392 case kTwoByteStringTag:
394 default: // Cons or sliced string. Need to go deeper.
395 return GetUnderlying()->IsTwoByteRepresentation();
400 bool String::HasOnlyOneByteChars() {
401 uint32_t type = map()->instance_type();
402 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
403 IsOneByteRepresentation();
407 bool StringShape::IsCons() {
408 return (type_ & kStringRepresentationMask) == kConsStringTag;
412 bool StringShape::IsSliced() {
413 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
417 bool StringShape::IsIndirect() {
418 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
422 bool StringShape::IsExternal() {
423 return (type_ & kStringRepresentationMask) == kExternalStringTag;
427 bool StringShape::IsSequential() {
428 return (type_ & kStringRepresentationMask) == kSeqStringTag;
432 StringRepresentationTag StringShape::representation_tag() {
433 uint32_t tag = (type_ & kStringRepresentationMask);
434 return static_cast<StringRepresentationTag>(tag);
438 uint32_t StringShape::encoding_tag() {
439 return type_ & kStringEncodingMask;
443 uint32_t StringShape::full_representation_tag() {
444 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
448 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
449 Internals::kFullStringRepresentationMask);
451 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
452 Internals::kStringEncodingMask);
455 bool StringShape::IsSequentialOneByte() {
456 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
460 bool StringShape::IsSequentialTwoByte() {
461 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
465 bool StringShape::IsExternalOneByte() {
466 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
470 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
471 Internals::kExternalOneByteRepresentationTag);
473 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
476 bool StringShape::IsExternalTwoByte() {
477 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
481 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
482 Internals::kExternalTwoByteRepresentationTag);
484 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
487 uc32 FlatStringReader::Get(int index) {
489 return Get<uint8_t>(index);
491 return Get<uc16>(index);
496 template <typename Char>
497 Char FlatStringReader::Get(int index) {
498 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
499 DCHECK(0 <= index && index <= length_);
500 if (sizeof(Char) == 1) {
501 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
503 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
508 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
509 return key->AsHandle(isolate);
513 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
515 return key->AsHandle(isolate);
519 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
521 return key->AsHandle(isolate);
524 template <typename Char>
525 class SequentialStringKey : public HashTableKey {
527 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
528 : string_(string), hash_field_(0), seed_(seed) { }
530 uint32_t Hash() override {
531 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
535 uint32_t result = hash_field_ >> String::kHashShift;
536 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
541 uint32_t HashForObject(Object* other) override {
542 return String::cast(other)->Hash();
545 Vector<const Char> string_;
546 uint32_t hash_field_;
551 class OneByteStringKey : public SequentialStringKey<uint8_t> {
553 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
554 : SequentialStringKey<uint8_t>(str, seed) { }
556 bool IsMatch(Object* string) override {
557 return String::cast(string)->IsOneByteEqualTo(string_);
560 Handle<Object> AsHandle(Isolate* isolate) override;
564 class SeqOneByteSubStringKey : public HashTableKey {
566 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
567 : string_(string), from_(from), length_(length) {
568 DCHECK(string_->IsSeqOneByteString());
571 uint32_t Hash() override {
572 DCHECK(length_ >= 0);
573 DCHECK(from_ + length_ <= string_->length());
574 const uint8_t* chars = string_->GetChars() + from_;
575 hash_field_ = StringHasher::HashSequentialString(
576 chars, length_, string_->GetHeap()->HashSeed());
577 uint32_t result = hash_field_ >> String::kHashShift;
578 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
582 uint32_t HashForObject(Object* other) override {
583 return String::cast(other)->Hash();
586 bool IsMatch(Object* string) override;
587 Handle<Object> AsHandle(Isolate* isolate) override;
590 Handle<SeqOneByteString> string_;
593 uint32_t hash_field_;
597 class TwoByteStringKey : public SequentialStringKey<uc16> {
599 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
600 : SequentialStringKey<uc16>(str, seed) { }
602 bool IsMatch(Object* string) override {
603 return String::cast(string)->IsTwoByteEqualTo(string_);
606 Handle<Object> AsHandle(Isolate* isolate) override;
610 // Utf8StringKey carries a vector of chars as key.
611 class Utf8StringKey : public HashTableKey {
613 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
614 : string_(string), hash_field_(0), seed_(seed) { }
616 bool IsMatch(Object* string) override {
617 return String::cast(string)->IsUtf8EqualTo(string_);
620 uint32_t Hash() override {
621 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
622 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
623 uint32_t result = hash_field_ >> String::kHashShift;
624 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
628 uint32_t HashForObject(Object* other) override {
629 return String::cast(other)->Hash();
632 Handle<Object> AsHandle(Isolate* isolate) override {
633 if (hash_field_ == 0) Hash();
634 return isolate->factory()->NewInternalizedStringFromUtf8(
635 string_, chars_, hash_field_);
638 Vector<const char> string_;
639 uint32_t hash_field_;
640 int chars_; // Caches the number of characters when computing the hash code.
645 bool Object::IsNumber() const {
646 return IsSmi() || IsHeapNumber();
650 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
651 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
652 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
655 bool Object::IsFiller() const {
656 if (!Object::IsHeapObject()) return false;
657 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
658 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
662 bool Object::IsExternalArray() const {
663 if (!Object::IsHeapObject())
665 InstanceType instance_type =
666 HeapObject::cast(this)->map()->instance_type();
667 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
668 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
672 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
673 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
674 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
676 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
677 #undef TYPED_ARRAY_TYPE_CHECKER
680 bool Object::IsFixedTypedArrayBase() const {
681 if (!Object::IsHeapObject()) return false;
683 InstanceType instance_type =
684 HeapObject::cast(this)->map()->instance_type();
685 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
686 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
690 bool Object::IsJSReceiver() const {
691 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
692 return IsHeapObject() &&
693 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
697 bool Object::IsJSObject() const {
698 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
699 return IsHeapObject() && HeapObject::cast(this)->map()->IsJSObjectMap();
703 bool Object::IsJSProxy() const {
704 if (!Object::IsHeapObject()) return false;
705 return HeapObject::cast(this)->map()->IsJSProxyMap();
709 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
710 TYPE_CHECKER(JSSet, JS_SET_TYPE)
711 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
712 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
713 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
714 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
715 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
716 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
717 TYPE_CHECKER(Map, MAP_TYPE)
718 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
719 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
720 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
723 bool Object::IsJSWeakCollection() const {
724 return IsJSWeakMap() || IsJSWeakSet();
728 bool Object::IsDescriptorArray() const {
729 return IsFixedArray();
733 bool Object::IsArrayList() const { return IsFixedArray(); }
736 bool Object::IsLayoutDescriptor() const {
737 return IsSmi() || IsFixedTypedArrayBase();
741 bool Object::IsTransitionArray() const {
742 return IsFixedArray();
746 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
749 bool Object::IsDeoptimizationInputData() const {
750 // Must be a fixed array.
751 if (!IsFixedArray()) return false;
753 // There's no sure way to detect the difference between a fixed array and
754 // a deoptimization data array. Since this is used for asserts we can
755 // check that the length is zero or else the fixed size plus a multiple of
757 int length = FixedArray::cast(this)->length();
758 if (length == 0) return true;
760 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
761 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
765 bool Object::IsDeoptimizationOutputData() const {
766 if (!IsFixedArray()) return false;
767 // There's actually no way to see the difference between a fixed array and
768 // a deoptimization data array. Since this is used for asserts we can check
769 // that the length is plausible though.
770 if (FixedArray::cast(this)->length() % 2 != 0) return false;
775 bool Object::IsHandlerTable() const {
776 if (!IsFixedArray()) return false;
777 // There's actually no way to see the difference between a fixed array and
778 // a handler table array.
783 bool Object::IsDependentCode() const {
784 if (!IsFixedArray()) return false;
785 // There's actually no way to see the difference between a fixed array and
786 // a dependent codes array.
791 bool Object::IsContext() const {
792 if (!Object::IsHeapObject()) return false;
793 Map* map = HeapObject::cast(this)->map();
794 Heap* heap = map->GetHeap();
795 return (map == heap->function_context_map() ||
796 map == heap->catch_context_map() ||
797 map == heap->with_context_map() ||
798 map == heap->native_context_map() ||
799 map == heap->block_context_map() ||
800 map == heap->module_context_map() ||
801 map == heap->script_context_map());
805 bool Object::IsNativeContext() const {
806 return Object::IsHeapObject() &&
807 HeapObject::cast(this)->map() ==
808 HeapObject::cast(this)->GetHeap()->native_context_map();
812 bool Object::IsScriptContextTable() const {
813 if (!Object::IsHeapObject()) return false;
814 Map* map = HeapObject::cast(this)->map();
815 Heap* heap = map->GetHeap();
816 return map == heap->script_context_table_map();
820 bool Object::IsScopeInfo() const {
821 return Object::IsHeapObject() &&
822 HeapObject::cast(this)->map() ==
823 HeapObject::cast(this)->GetHeap()->scope_info_map();
827 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
830 template <> inline bool Is<JSFunction>(Object* obj) {
831 return obj->IsJSFunction();
835 TYPE_CHECKER(Code, CODE_TYPE)
836 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
837 TYPE_CHECKER(Cell, CELL_TYPE)
838 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
839 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
840 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
841 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
842 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
843 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
844 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
845 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
848 bool Object::IsStringWrapper() const {
849 return IsJSValue() && JSValue::cast(this)->value()->IsString();
853 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
856 bool Object::IsBoolean() const {
857 return IsOddball() &&
858 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
862 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
863 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
864 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
865 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
868 bool Object::IsJSArrayBufferView() const {
869 return IsJSDataView() || IsJSTypedArray();
873 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
876 template <> inline bool Is<JSArray>(Object* obj) {
877 return obj->IsJSArray();
881 bool Object::IsHashTable() const {
882 return Object::IsHeapObject() &&
883 HeapObject::cast(this)->map() ==
884 HeapObject::cast(this)->GetHeap()->hash_table_map();
888 bool Object::IsWeakHashTable() const {
889 return IsHashTable();
893 bool Object::IsWeakValueHashTable() const { return IsHashTable(); }
896 bool Object::IsDictionary() const {
897 return IsHashTable() &&
898 this != HeapObject::cast(this)->GetHeap()->string_table();
902 bool Object::IsNameDictionary() const {
903 return IsDictionary();
907 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
910 bool Object::IsSeededNumberDictionary() const {
911 return IsDictionary();
915 bool Object::IsUnseededNumberDictionary() const {
916 return IsDictionary();
920 bool Object::IsStringTable() const {
921 return IsHashTable();
925 bool Object::IsJSFunctionResultCache() const {
926 if (!IsFixedArray()) return false;
927 const FixedArray* self = FixedArray::cast(this);
928 int length = self->length();
929 if (length < JSFunctionResultCache::kEntriesIndex) return false;
930 if ((length - JSFunctionResultCache::kEntriesIndex)
931 % JSFunctionResultCache::kEntrySize != 0) {
935 if (FLAG_verify_heap) {
936 // TODO(svenpanne) We use const_cast here and below to break our dependency
937 // cycle between the predicates and the verifiers. This can be removed when
938 // the verifiers are const-correct, too.
939 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
940 JSFunctionResultCacheVerify();
947 bool Object::IsNormalizedMapCache() const {
948 return NormalizedMapCache::IsNormalizedMapCache(this);
952 int NormalizedMapCache::GetIndex(Handle<Map> map) {
953 return map->Hash() % NormalizedMapCache::kEntries;
957 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
958 if (!obj->IsFixedArray()) return false;
959 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
963 if (FLAG_verify_heap) {
964 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
965 NormalizedMapCacheVerify();
972 bool Object::IsCompilationCacheTable() const {
973 return IsHashTable();
977 bool Object::IsCodeCacheHashTable() const {
978 return IsHashTable();
982 bool Object::IsPolymorphicCodeCacheHashTable() const {
983 return IsHashTable();
987 bool Object::IsMapCache() const {
988 return IsHashTable();
992 bool Object::IsObjectHashTable() const {
993 return IsHashTable();
997 bool Object::IsOrderedHashTable() const {
998 return IsHeapObject() &&
999 HeapObject::cast(this)->map() ==
1000 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
1004 bool Object::IsOrderedHashSet() const {
1005 return IsOrderedHashTable();
1009 bool Object::IsOrderedHashMap() const {
1010 return IsOrderedHashTable();
1014 bool Object::IsPrimitive() const {
1015 return IsOddball() || IsNumber() || IsString();
1019 bool Object::IsJSGlobalProxy() const {
1020 bool result = IsHeapObject() &&
1021 (HeapObject::cast(this)->map()->instance_type() ==
1022 JS_GLOBAL_PROXY_TYPE);
1024 HeapObject::cast(this)->map()->is_access_check_needed());
1029 bool Object::IsGlobalObject() const {
1030 if (!IsHeapObject()) return false;
1031 return HeapObject::cast(this)->map()->IsGlobalObjectMap();
1035 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
1036 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1039 bool Object::IsUndetectableObject() const {
1040 return IsHeapObject()
1041 && HeapObject::cast(this)->map()->is_undetectable();
1045 bool Object::IsAccessCheckNeeded() const {
1046 if (!IsHeapObject()) return false;
1047 if (IsJSGlobalProxy()) {
1048 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1049 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1050 return proxy->IsDetachedFrom(global);
1052 return HeapObject::cast(this)->map()->is_access_check_needed();
1056 bool Object::IsStruct() const {
1057 if (!IsHeapObject()) return false;
1058 switch (HeapObject::cast(this)->map()->instance_type()) {
1059 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1060 STRUCT_LIST(MAKE_STRUCT_CASE)
1061 #undef MAKE_STRUCT_CASE
1062 default: return false;
1067 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1068 bool Object::Is##Name() const { \
1069 return Object::IsHeapObject() \
1070 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1072 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1073 #undef MAKE_STRUCT_PREDICATE
1076 bool Object::IsUndefined() const {
1077 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1081 bool Object::IsNull() const {
1082 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1086 bool Object::IsTheHole() const {
1087 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1091 bool Object::IsException() const {
1092 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1096 bool Object::IsUninitialized() const {
1097 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1101 bool Object::IsTrue() const {
1102 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1106 bool Object::IsFalse() const {
1107 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1111 bool Object::IsArgumentsMarker() const {
1112 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1116 double Object::Number() {
1119 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1120 : reinterpret_cast<HeapNumber*>(this)->value();
1124 bool Object::IsNaN() const {
1125 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1129 bool Object::IsMinusZero() const {
1130 return this->IsHeapNumber() &&
1131 i::IsMinusZero(HeapNumber::cast(this)->value());
1135 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1136 Handle<Object> object) {
1138 isolate, object, handle(isolate->context()->native_context(), isolate));
1142 bool Object::HasSpecificClassOf(String* name) {
1143 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1147 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1149 LanguageMode language_mode) {
1150 LookupIterator it(object, name);
1151 return GetProperty(&it, language_mode);
1155 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1157 LanguageMode language_mode) {
1158 LookupIterator it(isolate, object, index);
1159 return GetProperty(&it, language_mode);
1163 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1164 uint32_t index, Handle<Object> value,
1165 LanguageMode language_mode) {
1166 LookupIterator it(isolate, object, index);
1167 return SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED);
1171 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1172 Isolate* isolate, Handle<Object> receiver) {
1173 PrototypeIterator iter(isolate, receiver);
1174 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1175 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1176 return PrototypeIterator::GetCurrent(iter);
1180 return PrototypeIterator::GetCurrent(iter);
1184 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
1186 LanguageMode language_mode) {
1187 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1188 return GetProperty(object, str, language_mode);
1192 #define FIELD_ADDR(p, offset) \
1193 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1195 #define FIELD_ADDR_CONST(p, offset) \
1196 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1198 #define READ_FIELD(p, offset) \
1199 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1201 #define ACQUIRE_READ_FIELD(p, offset) \
1202 reinterpret_cast<Object*>(base::Acquire_Load( \
1203 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1205 #define NOBARRIER_READ_FIELD(p, offset) \
1206 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1207 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1209 #define WRITE_FIELD(p, offset, value) \
1210 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1212 #define RELEASE_WRITE_FIELD(p, offset, value) \
1213 base::Release_Store( \
1214 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1215 reinterpret_cast<base::AtomicWord>(value));
1217 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1218 base::NoBarrier_Store( \
1219 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1220 reinterpret_cast<base::AtomicWord>(value));
1222 #define WRITE_BARRIER(heap, object, offset, value) \
1223 heap->incremental_marking()->RecordWrite( \
1224 object, HeapObject::RawField(object, offset), value); \
1225 if (heap->InNewSpace(value)) { \
1226 heap->RecordWrite(object->address(), offset); \
1229 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1230 if (mode != SKIP_WRITE_BARRIER) { \
1231 if (mode == UPDATE_WRITE_BARRIER) { \
1232 heap->incremental_marking()->RecordWrite( \
1233 object, HeapObject::RawField(object, offset), value); \
1235 if (heap->InNewSpace(value)) { \
1236 heap->RecordWrite(object->address(), offset); \
1240 #define READ_DOUBLE_FIELD(p, offset) \
1241 ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1243 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1244 WriteDoubleValue(FIELD_ADDR(p, offset), value)
1246 #define READ_INT_FIELD(p, offset) \
1247 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1249 #define WRITE_INT_FIELD(p, offset, value) \
1250 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1252 #define READ_INTPTR_FIELD(p, offset) \
1253 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1255 #define WRITE_INTPTR_FIELD(p, offset, value) \
1256 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1258 #define READ_UINT32_FIELD(p, offset) \
1259 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1261 #define WRITE_UINT32_FIELD(p, offset, value) \
1262 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1264 #define READ_INT32_FIELD(p, offset) \
1265 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1267 #define WRITE_INT32_FIELD(p, offset, value) \
1268 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1270 #define READ_FLOAT_FIELD(p, offset) \
1271 (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1273 #define WRITE_FLOAT_FIELD(p, offset, value) \
1274 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1276 #define READ_UINT64_FIELD(p, offset) \
1277 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1279 #define WRITE_UINT64_FIELD(p, offset, value) \
1280 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1282 #define READ_INT64_FIELD(p, offset) \
1283 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1285 #define WRITE_INT64_FIELD(p, offset, value) \
1286 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1288 #define READ_SHORT_FIELD(p, offset) \
1289 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1291 #define WRITE_SHORT_FIELD(p, offset, value) \
1292 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1294 #define READ_BYTE_FIELD(p, offset) \
1295 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1297 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1298 static_cast<byte>(base::NoBarrier_Load( \
1299 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1301 #define WRITE_BYTE_FIELD(p, offset, value) \
1302 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1304 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1305 base::NoBarrier_Store( \
1306 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1307 static_cast<base::Atomic8>(value));
1309 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1310 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1314 int Smi::value() const {
1315 return Internals::SmiValue(this);
1319 Smi* Smi::FromInt(int value) {
1320 DCHECK(Smi::IsValid(value));
1321 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1325 Smi* Smi::FromIntptr(intptr_t value) {
1326 DCHECK(Smi::IsValid(value));
1327 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1328 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1332 bool Smi::IsValid(intptr_t value) {
1333 bool result = Internals::IsValidSmi(value);
1334 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1339 MapWord MapWord::FromMap(const Map* map) {
1340 return MapWord(reinterpret_cast<uintptr_t>(map));
1344 Map* MapWord::ToMap() {
1345 return reinterpret_cast<Map*>(value_);
1349 bool MapWord::IsForwardingAddress() {
1350 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1354 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1355 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1356 return MapWord(reinterpret_cast<uintptr_t>(raw));
1360 HeapObject* MapWord::ToForwardingAddress() {
1361 DCHECK(IsForwardingAddress());
1362 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1367 void HeapObject::VerifyObjectField(int offset) {
1368 VerifyPointer(READ_FIELD(this, offset));
1371 void HeapObject::VerifySmiField(int offset) {
1372 CHECK(READ_FIELD(this, offset)->IsSmi());
1377 Heap* HeapObject::GetHeap() const {
1379 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1380 SLOW_DCHECK(heap != NULL);
1385 Isolate* HeapObject::GetIsolate() const {
1386 return GetHeap()->isolate();
1390 Map* HeapObject::map() const {
1392 // Clear mark potentially added by PathTracer.
1393 uintptr_t raw_value =
1394 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1395 return MapWord::FromRawValue(raw_value).ToMap();
1397 return map_word().ToMap();
1402 void HeapObject::set_map(Map* value) {
1403 set_map_word(MapWord::FromMap(value));
1404 if (value != NULL) {
1405 // TODO(1600) We are passing NULL as a slot because maps can never be on
1406 // evacuation candidate.
1407 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1412 Map* HeapObject::synchronized_map() {
1413 return synchronized_map_word().ToMap();
1417 void HeapObject::synchronized_set_map(Map* value) {
1418 synchronized_set_map_word(MapWord::FromMap(value));
1419 if (value != NULL) {
1420 // TODO(1600) We are passing NULL as a slot because maps can never be on
1421 // evacuation candidate.
1422 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1427 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1428 synchronized_set_map_word(MapWord::FromMap(value));
1432 // Unsafe accessor omitting write barrier.
1433 void HeapObject::set_map_no_write_barrier(Map* value) {
1434 set_map_word(MapWord::FromMap(value));
1438 MapWord HeapObject::map_word() const {
1440 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1444 void HeapObject::set_map_word(MapWord map_word) {
1445 NOBARRIER_WRITE_FIELD(
1446 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1450 MapWord HeapObject::synchronized_map_word() const {
1452 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1456 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1457 RELEASE_WRITE_FIELD(
1458 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1462 HeapObject* HeapObject::FromAddress(Address address) {
1463 DCHECK_TAG_ALIGNED(address);
1464 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1468 Address HeapObject::address() {
1469 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1473 int HeapObject::Size() {
1474 return SizeFromMap(map());
1478 HeapObjectContents HeapObject::ContentType() {
1479 InstanceType type = map()->instance_type();
1480 if (type <= LAST_NAME_TYPE) {
1481 if (type == SYMBOL_TYPE) {
1482 return HeapObjectContents::kTaggedValues;
1484 DCHECK(type < FIRST_NONSTRING_TYPE);
1485 // There are four string representations: sequential strings, external
1486 // strings, cons strings, and sliced strings.
1487 // Only the former two contain raw values and no heap pointers (besides the
1489 if (((type & kIsIndirectStringMask) != kIsIndirectStringTag))
1490 return HeapObjectContents::kRawValues;
1492 return HeapObjectContents::kTaggedValues;
1494 // TODO(jochen): Enable eventually.
1495 } else if (type == JS_FUNCTION_TYPE) {
1496 return HeapObjectContents::kMixedValues;
1498 } else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
1499 type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
1500 return HeapObjectContents::kMixedValues;
1501 } else if (type <= LAST_DATA_TYPE) {
1502 // TODO(jochen): Why do we claim that Code and Map contain only raw values?
1503 return HeapObjectContents::kRawValues;
1505 if (FLAG_unbox_double_fields) {
1506 LayoutDescriptorHelper helper(map());
1507 if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues;
1509 return HeapObjectContents::kTaggedValues;
1514 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1515 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1516 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1520 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1521 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1525 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1526 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1530 double HeapNumber::value() const {
1531 return READ_DOUBLE_FIELD(this, kValueOffset);
1535 void HeapNumber::set_value(double value) {
1536 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1540 int HeapNumber::get_exponent() {
1541 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1542 kExponentShift) - kExponentBias;
1546 int HeapNumber::get_sign() {
1547 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1551 float Float32x4::get_lane(int lane) const {
1552 DCHECK(lane < 4 && lane >= 0);
1553 #if defined(V8_TARGET_LITTLE_ENDIAN)
1554 return READ_FLOAT_FIELD(this, kValueOffset + lane * kFloatSize);
1555 #elif defined(V8_TARGET_BIG_ENDIAN)
1556 return READ_FLOAT_FIELD(this, kValueOffset + (3 - lane) * kFloatSize);
1558 #error Unknown byte ordering
1563 void Float32x4::set_lane(int lane, float value) {
1564 DCHECK(lane < 4 && lane >= 0);
1565 #if defined(V8_TARGET_LITTLE_ENDIAN)
1566 WRITE_FLOAT_FIELD(this, kValueOffset + lane * kFloatSize, value);
1567 #elif defined(V8_TARGET_BIG_ENDIAN)
1568 WRITE_FLOAT_FIELD(this, kValueOffset + (3 - lane) * kFloatSize, value);
1570 #error Unknown byte ordering
1575 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1578 Object** FixedArray::GetFirstElementAddress() {
1579 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1583 bool FixedArray::ContainsOnlySmisOrHoles() {
1584 Object* the_hole = GetHeap()->the_hole_value();
1585 Object** current = GetFirstElementAddress();
1586 for (int i = 0; i < length(); ++i) {
1587 Object* candidate = *current++;
1588 if (!candidate->IsSmi() && candidate != the_hole) return false;
1594 FixedArrayBase* JSObject::elements() const {
1595 Object* array = READ_FIELD(this, kElementsOffset);
1596 return static_cast<FixedArrayBase*>(array);
1600 void AllocationSite::Initialize() {
1601 set_transition_info(Smi::FromInt(0));
1602 SetElementsKind(GetInitialFastElementsKind());
1603 set_nested_site(Smi::FromInt(0));
1604 set_pretenure_data(Smi::FromInt(0));
1605 set_pretenure_create_count(Smi::FromInt(0));
1606 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1607 SKIP_WRITE_BARRIER);
1611 void AllocationSite::MarkZombie() {
1612 DCHECK(!IsZombie());
1614 set_pretenure_decision(kZombie);
1618 // Heuristic: We only need to create allocation site info if the boilerplate
1619 // elements kind is the initial elements kind.
1620 AllocationSiteMode AllocationSite::GetMode(
1621 ElementsKind boilerplate_elements_kind) {
1622 if (FLAG_pretenuring_call_new ||
1623 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1624 return TRACK_ALLOCATION_SITE;
1627 return DONT_TRACK_ALLOCATION_SITE;
1631 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1633 if (FLAG_pretenuring_call_new ||
1634 (IsFastSmiElementsKind(from) &&
1635 IsMoreGeneralElementsKindTransition(from, to))) {
1636 return TRACK_ALLOCATION_SITE;
1639 return DONT_TRACK_ALLOCATION_SITE;
1643 inline bool AllocationSite::CanTrack(InstanceType type) {
1644 if (FLAG_allocation_site_pretenuring) {
1645 return type == JS_ARRAY_TYPE ||
1646 type == JS_OBJECT_TYPE ||
1647 type < FIRST_NONSTRING_TYPE;
1649 return type == JS_ARRAY_TYPE;
1653 inline void AllocationSite::set_memento_found_count(int count) {
1654 int value = pretenure_data()->value();
1655 // Verify that we can count more mementos than we can possibly find in one
1656 // new space collection.
1657 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1658 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1659 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1660 DCHECK(count < MementoFoundCountBits::kMax);
1662 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1663 SKIP_WRITE_BARRIER);
1666 inline bool AllocationSite::IncrementMementoFoundCount() {
1667 if (IsZombie()) return false;
1669 int value = memento_found_count();
1670 set_memento_found_count(value + 1);
1671 return memento_found_count() == kPretenureMinimumCreated;
1675 inline void AllocationSite::IncrementMementoCreateCount() {
1676 DCHECK(FLAG_allocation_site_pretenuring);
1677 int value = memento_create_count();
1678 set_memento_create_count(value + 1);
1682 inline bool AllocationSite::MakePretenureDecision(
1683 PretenureDecision current_decision,
1685 bool maximum_size_scavenge) {
1686 // Here we just allow state transitions from undecided or maybe tenure
1687 // to don't tenure, maybe tenure, or tenure.
1688 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1689 if (ratio >= kPretenureRatio) {
1690 // We just transition into tenure state when the semi-space was at
1691 // maximum capacity.
1692 if (maximum_size_scavenge) {
1693 set_deopt_dependent_code(true);
1694 set_pretenure_decision(kTenure);
1695 // Currently we just need to deopt when we make a state transition to
1699 set_pretenure_decision(kMaybeTenure);
1701 set_pretenure_decision(kDontTenure);
1708 inline bool AllocationSite::DigestPretenuringFeedback(
1709 bool maximum_size_scavenge) {
1711 int create_count = memento_create_count();
1712 int found_count = memento_found_count();
1713 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1715 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1716 static_cast<double>(found_count) / create_count : 0.0;
1717 PretenureDecision current_decision = pretenure_decision();
1719 if (minimum_mementos_created) {
1720 deopt = MakePretenureDecision(
1721 current_decision, ratio, maximum_size_scavenge);
1724 if (FLAG_trace_pretenuring_statistics) {
1726 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1727 static_cast<void*>(this), create_count, found_count, ratio,
1728 PretenureDecisionName(current_decision),
1729 PretenureDecisionName(pretenure_decision()));
1732 // Clear feedback calculation fields until the next gc.
1733 set_memento_found_count(0);
1734 set_memento_create_count(0);
1739 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1740 JSObject::ValidateElements(object);
1741 ElementsKind elements_kind = object->map()->elements_kind();
1742 if (!IsFastObjectElementsKind(elements_kind)) {
1743 if (IsFastHoleyElementsKind(elements_kind)) {
1744 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1746 TransitionElementsKind(object, FAST_ELEMENTS);
1752 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1755 EnsureElementsMode mode) {
1756 ElementsKind current_kind = object->map()->elements_kind();
1757 ElementsKind target_kind = current_kind;
1759 DisallowHeapAllocation no_allocation;
1760 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1761 bool is_holey = IsFastHoleyElementsKind(current_kind);
1762 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1763 Heap* heap = object->GetHeap();
1764 Object* the_hole = heap->the_hole_value();
1765 for (uint32_t i = 0; i < count; ++i) {
1766 Object* current = *objects++;
1767 if (current == the_hole) {
1769 target_kind = GetHoleyElementsKind(target_kind);
1770 } else if (!current->IsSmi()) {
1771 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1772 if (IsFastSmiElementsKind(target_kind)) {
1774 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1776 target_kind = FAST_DOUBLE_ELEMENTS;
1779 } else if (is_holey) {
1780 target_kind = FAST_HOLEY_ELEMENTS;
1783 target_kind = FAST_ELEMENTS;
1788 if (target_kind != current_kind) {
1789 TransitionElementsKind(object, target_kind);
1794 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1795 Handle<FixedArrayBase> elements,
1797 EnsureElementsMode mode) {
1798 Heap* heap = object->GetHeap();
1799 if (elements->map() != heap->fixed_double_array_map()) {
1800 DCHECK(elements->map() == heap->fixed_array_map() ||
1801 elements->map() == heap->fixed_cow_array_map());
1802 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1803 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1806 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1807 EnsureCanContainElements(object, objects, length, mode);
1811 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1812 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1813 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1814 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1815 Handle<FixedDoubleArray> double_array =
1816 Handle<FixedDoubleArray>::cast(elements);
1817 for (uint32_t i = 0; i < length; ++i) {
1818 if (double_array->is_the_hole(i)) {
1819 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1823 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1828 void JSObject::SetMapAndElements(Handle<JSObject> object,
1829 Handle<Map> new_map,
1830 Handle<FixedArrayBase> value) {
1831 JSObject::MigrateToMap(object, new_map);
1832 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1833 (*value == object->GetHeap()->empty_fixed_array())) ==
1834 (value->map() == object->GetHeap()->fixed_array_map() ||
1835 value->map() == object->GetHeap()->fixed_cow_array_map()));
1836 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1837 (object->map()->has_fast_double_elements() ==
1838 value->IsFixedDoubleArray()));
1839 object->set_elements(*value);
1843 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1844 WRITE_FIELD(this, kElementsOffset, value);
1845 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1849 void JSObject::initialize_properties() {
1850 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1851 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1855 void JSObject::initialize_elements() {
1856 FixedArrayBase* elements = map()->GetInitialElements();
1857 WRITE_FIELD(this, kElementsOffset, elements);
1861 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1862 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1865 byte Oddball::kind() const {
1866 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1870 void Oddball::set_kind(byte value) {
1871 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1875 ACCESSORS(Cell, value, Object, kValueOffset)
1876 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1877 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
1878 ACCESSORS(PropertyCell, value, Object, kValueOffset)
1880 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
1883 void WeakCell::clear() {
1884 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
1885 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
1889 void WeakCell::initialize(HeapObject* val) {
1890 WRITE_FIELD(this, kValueOffset, val);
1891 Heap* heap = GetHeap();
1892 // We just have to execute the generational barrier here because we never
1893 // mark through a weak cell and collect evacuation candidates when we process
1895 if (heap->InNewSpace(val)) {
1896 heap->RecordWrite(address(), kValueOffset);
1901 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
1904 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
1907 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
1908 WRITE_FIELD(this, kNextOffset, val);
1909 if (mode == UPDATE_WRITE_BARRIER) {
1910 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
1915 void WeakCell::clear_next(Heap* heap) {
1916 set_next(heap->the_hole_value(), SKIP_WRITE_BARRIER);
1920 bool WeakCell::next_cleared() { return next()->IsTheHole(); }
1923 int JSObject::GetHeaderSize() {
1924 InstanceType type = map()->instance_type();
1925 // Check for the most common kind of JavaScript object before
1926 // falling into the generic switch. This speeds up the internal
1927 // field operations considerably on average.
1928 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1930 case JS_GENERATOR_OBJECT_TYPE:
1931 return JSGeneratorObject::kSize;
1932 case JS_MODULE_TYPE:
1933 return JSModule::kSize;
1934 case JS_GLOBAL_PROXY_TYPE:
1935 return JSGlobalProxy::kSize;
1936 case JS_GLOBAL_OBJECT_TYPE:
1937 return JSGlobalObject::kSize;
1938 case JS_BUILTINS_OBJECT_TYPE:
1939 return JSBuiltinsObject::kSize;
1940 case JS_FUNCTION_TYPE:
1941 return JSFunction::kSize;
1943 return JSValue::kSize;
1945 return JSDate::kSize;
1947 return JSArray::kSize;
1948 case JS_ARRAY_BUFFER_TYPE:
1949 return JSArrayBuffer::kSize;
1950 case JS_TYPED_ARRAY_TYPE:
1951 return JSTypedArray::kSize;
1952 case JS_DATA_VIEW_TYPE:
1953 return JSDataView::kSize;
1955 return JSSet::kSize;
1957 return JSMap::kSize;
1958 case JS_SET_ITERATOR_TYPE:
1959 return JSSetIterator::kSize;
1960 case JS_MAP_ITERATOR_TYPE:
1961 return JSMapIterator::kSize;
1962 case JS_WEAK_MAP_TYPE:
1963 return JSWeakMap::kSize;
1964 case JS_WEAK_SET_TYPE:
1965 return JSWeakSet::kSize;
1966 case JS_REGEXP_TYPE:
1967 return JSRegExp::kSize;
1968 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1969 return JSObject::kHeaderSize;
1970 case JS_MESSAGE_OBJECT_TYPE:
1971 return JSMessageObject::kSize;
1979 int JSObject::GetInternalFieldCount() {
1980 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
1981 // Make sure to adjust for the number of in-object properties. These
1982 // properties do contribute to the size, but are not internal fields.
1983 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1984 map()->inobject_properties();
1988 int JSObject::GetInternalFieldOffset(int index) {
1989 DCHECK(index < GetInternalFieldCount() && index >= 0);
1990 return GetHeaderSize() + (kPointerSize * index);
1994 Object* JSObject::GetInternalField(int index) {
1995 DCHECK(index < GetInternalFieldCount() && index >= 0);
1996 // Internal objects do follow immediately after the header, whereas in-object
1997 // properties are at the end of the object. Therefore there is no need
1998 // to adjust the index here.
1999 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2003 void JSObject::SetInternalField(int index, Object* value) {
2004 DCHECK(index < GetInternalFieldCount() && index >= 0);
2005 // Internal objects do follow immediately after the header, whereas in-object
2006 // properties are at the end of the object. Therefore there is no need
2007 // to adjust the index here.
2008 int offset = GetHeaderSize() + (kPointerSize * index);
2009 WRITE_FIELD(this, offset, value);
2010 WRITE_BARRIER(GetHeap(), this, offset, value);
2014 void JSObject::SetInternalField(int index, Smi* value) {
2015 DCHECK(index < GetInternalFieldCount() && index >= 0);
2016 // Internal objects do follow immediately after the header, whereas in-object
2017 // properties are at the end of the object. Therefore there is no need
2018 // to adjust the index here.
2019 int offset = GetHeaderSize() + (kPointerSize * index);
2020 WRITE_FIELD(this, offset, value);
2024 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2025 if (!FLAG_unbox_double_fields) return false;
2026 return map()->IsUnboxedDoubleField(index);
2030 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2031 if (!FLAG_unbox_double_fields) return false;
2032 if (index.is_hidden_field() || !index.is_inobject()) return false;
2033 return !layout_descriptor()->IsTagged(index.property_index());
2037 // Access fast-case object properties at index. The use of these routines
2038 // is needed to correctly distinguish between properties stored in-object and
2039 // properties stored in the properties array.
2040 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2041 DCHECK(!IsUnboxedDoubleField(index));
2042 if (index.is_inobject()) {
2043 return READ_FIELD(this, index.offset());
2045 return properties()->get(index.outobject_array_index());
2050 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2051 DCHECK(IsUnboxedDoubleField(index));
2052 return READ_DOUBLE_FIELD(this, index.offset());
2056 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2057 if (index.is_inobject()) {
2058 int offset = index.offset();
2059 WRITE_FIELD(this, offset, value);
2060 WRITE_BARRIER(GetHeap(), this, offset, value);
2062 properties()->set(index.outobject_array_index(), value);
2067 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2068 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2072 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2073 if (IsUnboxedDoubleField(index)) {
2074 DCHECK(value->IsMutableHeapNumber());
2075 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2077 RawFastPropertyAtPut(index, value);
2082 void JSObject::WriteToField(int descriptor, Object* value) {
2083 DisallowHeapAllocation no_gc;
2085 DescriptorArray* desc = map()->instance_descriptors();
2086 PropertyDetails details = desc->GetDetails(descriptor);
2088 DCHECK(details.type() == DATA);
2090 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2091 if (details.representation().IsDouble()) {
2092 // Nothing more to be done.
2093 if (value->IsUninitialized()) return;
2094 if (IsUnboxedDoubleField(index)) {
2095 RawFastDoublePropertyAtPut(index, value->Number());
2097 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2098 DCHECK(box->IsMutableHeapNumber());
2099 box->set_value(value->Number());
2102 RawFastPropertyAtPut(index, value);
2107 int JSObject::GetInObjectPropertyOffset(int index) {
2108 return map()->GetInObjectPropertyOffset(index);
2112 Object* JSObject::InObjectPropertyAt(int index) {
2113 int offset = GetInObjectPropertyOffset(index);
2114 return READ_FIELD(this, offset);
2118 Object* JSObject::InObjectPropertyAtPut(int index,
2120 WriteBarrierMode mode) {
2121 // Adjust for the number of properties stored in the object.
2122 int offset = GetInObjectPropertyOffset(index);
2123 WRITE_FIELD(this, offset, value);
2124 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2130 void JSObject::InitializeBody(Map* map,
2131 Object* pre_allocated_value,
2132 Object* filler_value) {
2133 DCHECK(!filler_value->IsHeapObject() ||
2134 !GetHeap()->InNewSpace(filler_value));
2135 DCHECK(!pre_allocated_value->IsHeapObject() ||
2136 !GetHeap()->InNewSpace(pre_allocated_value));
2137 int size = map->instance_size();
2138 int offset = kHeaderSize;
2139 if (filler_value != pre_allocated_value) {
2141 map->inobject_properties() - map->unused_property_fields();
2142 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2143 for (int i = 0; i < pre_allocated; i++) {
2144 WRITE_FIELD(this, offset, pre_allocated_value);
2145 offset += kPointerSize;
2148 while (offset < size) {
2149 WRITE_FIELD(this, offset, filler_value);
2150 offset += kPointerSize;
2155 bool JSObject::HasFastProperties() {
2156 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2157 return !properties()->IsDictionary();
2161 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2162 if (unused_property_fields() != 0) return false;
2163 if (is_prototype_map()) return false;
2164 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2165 int limit = Max(minimum, inobject_properties());
2166 int external = NumberOfFields() - inobject_properties();
2167 return external > limit;
2171 void Struct::InitializeBody(int object_size) {
2172 Object* value = GetHeap()->undefined_value();
2173 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2174 WRITE_FIELD(this, offset, value);
2179 bool Object::ToArrayLength(uint32_t* index) {
2181 int value = Smi::cast(this)->value();
2182 if (value < 0) return false;
2186 if (IsHeapNumber()) {
2187 double value = HeapNumber::cast(this)->value();
2188 uint32_t uint_value = static_cast<uint32_t>(value);
2189 if (value == static_cast<double>(uint_value)) {
2190 *index = uint_value;
2198 bool Object::ToArrayIndex(uint32_t* index) {
2199 return ToArrayLength(index) && *index != kMaxUInt32;
2203 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2204 if (!this->IsJSValue()) return false;
2206 JSValue* js_value = JSValue::cast(this);
2207 if (!js_value->value()->IsString()) return false;
2209 String* str = String::cast(js_value->value());
2210 if (index >= static_cast<uint32_t>(str->length())) return false;
2216 void Object::VerifyApiCallResultType() {
2218 if (!(IsSmi() || IsString() || IsSymbol() || IsSpecObject() ||
2219 IsHeapNumber() || IsFloat32x4() || IsUndefined() || IsTrue() ||
2220 IsFalse() || IsNull())) {
2221 FATAL("API call returned invalid object");
2227 Object* FixedArray::get(int index) const {
2228 SLOW_DCHECK(index >= 0 && index < this->length());
2229 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2233 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2234 return handle(array->get(index), array->GetIsolate());
2238 bool FixedArray::is_the_hole(int index) {
2239 return get(index) == GetHeap()->the_hole_value();
2243 void FixedArray::set(int index, Smi* value) {
2244 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2245 DCHECK(index >= 0 && index < this->length());
2246 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2247 int offset = kHeaderSize + index * kPointerSize;
2248 WRITE_FIELD(this, offset, value);
2252 void FixedArray::set(int index, Object* value) {
2253 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2254 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2255 DCHECK(index >= 0 && index < this->length());
2256 int offset = kHeaderSize + index * kPointerSize;
2257 WRITE_FIELD(this, offset, value);
2258 WRITE_BARRIER(GetHeap(), this, offset, value);
2262 double FixedDoubleArray::get_scalar(int index) {
2263 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2264 map() != GetHeap()->fixed_array_map());
2265 DCHECK(index >= 0 && index < this->length());
2266 DCHECK(!is_the_hole(index));
2267 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2271 uint64_t FixedDoubleArray::get_representation(int index) {
2272 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2273 map() != GetHeap()->fixed_array_map());
2274 DCHECK(index >= 0 && index < this->length());
2275 int offset = kHeaderSize + index * kDoubleSize;
2276 return READ_UINT64_FIELD(this, offset);
2280 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2282 if (array->is_the_hole(index)) {
2283 return array->GetIsolate()->factory()->the_hole_value();
2285 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2290 void FixedDoubleArray::set(int index, double value) {
2291 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2292 map() != GetHeap()->fixed_array_map());
2293 int offset = kHeaderSize + index * kDoubleSize;
2294 if (std::isnan(value)) {
2295 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2297 WRITE_DOUBLE_FIELD(this, offset, value);
2299 DCHECK(!is_the_hole(index));
2303 void FixedDoubleArray::set_the_hole(int index) {
2304 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2305 map() != GetHeap()->fixed_array_map());
2306 int offset = kHeaderSize + index * kDoubleSize;
2307 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2311 bool FixedDoubleArray::is_the_hole(int index) {
2312 return get_representation(index) == kHoleNanInt64;
2316 double* FixedDoubleArray::data_start() {
2317 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2321 void FixedDoubleArray::FillWithHoles(int from, int to) {
2322 for (int i = from; i < to; i++) {
2328 Object* WeakFixedArray::Get(int index) const {
2329 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2330 if (raw->IsSmi()) return raw;
2331 DCHECK(raw->IsWeakCell());
2332 return WeakCell::cast(raw)->value();
2336 bool WeakFixedArray::IsEmptySlot(int index) const {
2337 DCHECK(index < Length());
2338 return Get(index)->IsSmi();
2342 void WeakFixedArray::Clear(int index) {
2343 FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2347 int WeakFixedArray::Length() const {
2348 return FixedArray::cast(this)->length() - kFirstIndex;
2352 int WeakFixedArray::last_used_index() const {
2353 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2357 void WeakFixedArray::set_last_used_index(int index) {
2358 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2362 int ArrayList::Length() {
2363 if (FixedArray::cast(this)->length() == 0) return 0;
2364 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2368 void ArrayList::SetLength(int length) {
2369 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2373 Object* ArrayList::Get(int index) {
2374 return FixedArray::cast(this)->get(kFirstIndex + index);
2378 Object** ArrayList::Slot(int index) {
2379 return data_start() + kFirstIndex + index;
2383 void ArrayList::Set(int index, Object* obj) {
2384 FixedArray::cast(this)->set(kFirstIndex + index, obj);
2388 void ArrayList::Clear(int index, Object* undefined) {
2389 DCHECK(undefined->IsUndefined());
2390 FixedArray::cast(this)
2391 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2395 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2396 const DisallowHeapAllocation& promise) {
2397 Heap* heap = GetHeap();
2398 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2399 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2400 return UPDATE_WRITE_BARRIER;
2404 AllocationAlignment HeapObject::RequiredAlignment() {
2405 #ifdef V8_HOST_ARCH_32_BIT
2406 if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2407 FixedArrayBase::cast(this)->length() != 0) {
2408 return kDoubleAligned;
2410 if (IsHeapNumber()) return kDoubleUnaligned;
2411 if (IsFloat32x4()) return kSimd128Unaligned;
2412 #endif // V8_HOST_ARCH_32_BIT
2413 return kWordAligned;
2417 void FixedArray::set(int index,
2419 WriteBarrierMode mode) {
2420 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2421 DCHECK(index >= 0 && index < this->length());
2422 int offset = kHeaderSize + index * kPointerSize;
2423 WRITE_FIELD(this, offset, value);
2424 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2428 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2431 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2432 DCHECK(index >= 0 && index < array->length());
2433 int offset = kHeaderSize + index * kPointerSize;
2434 WRITE_FIELD(array, offset, value);
2435 Heap* heap = array->GetHeap();
2436 if (heap->InNewSpace(value)) {
2437 heap->RecordWrite(array->address(), offset);
2442 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2445 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2446 DCHECK(index >= 0 && index < array->length());
2447 DCHECK(!array->GetHeap()->InNewSpace(value));
2448 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2452 void FixedArray::set_undefined(int index) {
2453 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2454 DCHECK(index >= 0 && index < this->length());
2455 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2457 kHeaderSize + index * kPointerSize,
2458 GetHeap()->undefined_value());
2462 void FixedArray::set_null(int index) {
2463 DCHECK(index >= 0 && index < this->length());
2464 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2466 kHeaderSize + index * kPointerSize,
2467 GetHeap()->null_value());
2471 void FixedArray::set_the_hole(int index) {
2472 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2473 DCHECK(index >= 0 && index < this->length());
2474 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2476 kHeaderSize + index * kPointerSize,
2477 GetHeap()->the_hole_value());
2481 void FixedArray::FillWithHoles(int from, int to) {
2482 for (int i = from; i < to; i++) {
2488 Object** FixedArray::data_start() {
2489 return HeapObject::RawField(this, kHeaderSize);
2493 bool DescriptorArray::IsEmpty() {
2494 DCHECK(length() >= kFirstIndex ||
2495 this == GetHeap()->empty_descriptor_array());
2496 return length() < kFirstIndex;
2500 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2502 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2506 // Perform a binary search in a fixed array. Low and high are entry indices. If
2507 // there are three entries in this array it should be called with low=0 and
2509 template <SearchMode search_mode, typename T>
2510 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2511 int* out_insertion_index) {
2512 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2513 uint32_t hash = name->Hash();
2516 DCHECK(low <= high);
2518 while (low != high) {
2519 int mid = (low + high) / 2;
2520 Name* mid_name = array->GetSortedKey(mid);
2521 uint32_t mid_hash = mid_name->Hash();
2523 if (mid_hash >= hash) {
2530 for (; low <= limit; ++low) {
2531 int sort_index = array->GetSortedKeyIndex(low);
2532 Name* entry = array->GetKey(sort_index);
2533 uint32_t current_hash = entry->Hash();
2534 if (current_hash != hash) {
2535 if (out_insertion_index != NULL) {
2536 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2538 return T::kNotFound;
2540 if (entry->Equals(name)) {
2541 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2544 return T::kNotFound;
2548 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2549 return T::kNotFound;
2553 // Perform a linear search in this fixed array. len is the number of entry
2554 // indices that are valid.
2555 template <SearchMode search_mode, typename T>
2556 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2557 int* out_insertion_index) {
2558 uint32_t hash = name->Hash();
2559 if (search_mode == ALL_ENTRIES) {
2560 for (int number = 0; number < len; number++) {
2561 int sorted_index = array->GetSortedKeyIndex(number);
2562 Name* entry = array->GetKey(sorted_index);
2563 uint32_t current_hash = entry->Hash();
2564 if (current_hash > hash) {
2565 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2566 return T::kNotFound;
2568 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2570 if (out_insertion_index != NULL) *out_insertion_index = len;
2571 return T::kNotFound;
2573 DCHECK(len >= valid_entries);
2574 DCHECK_NULL(out_insertion_index); // Not supported here.
2575 for (int number = 0; number < valid_entries; number++) {
2576 Name* entry = array->GetKey(number);
2577 uint32_t current_hash = entry->Hash();
2578 if (current_hash == hash && entry->Equals(name)) return number;
2580 return T::kNotFound;
2585 template <SearchMode search_mode, typename T>
2586 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2587 if (search_mode == VALID_ENTRIES) {
2588 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2590 SLOW_DCHECK(array->IsSortedNoDuplicates());
2593 int nof = array->number_of_entries();
2595 if (out_insertion_index != NULL) *out_insertion_index = 0;
2596 return T::kNotFound;
2599 // Fast case: do linear search for small arrays.
2600 const int kMaxElementsForLinearSearch = 8;
2601 if ((search_mode == ALL_ENTRIES &&
2602 nof <= kMaxElementsForLinearSearch) ||
2603 (search_mode == VALID_ENTRIES &&
2604 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2605 return LinearSearch<search_mode>(array, name, nof, valid_entries,
2606 out_insertion_index);
2609 // Slow case: perform binary search.
2610 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2611 out_insertion_index);
2615 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2616 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2620 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2621 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2622 if (number_of_own_descriptors == 0) return kNotFound;
2624 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2625 int number = cache->Lookup(map, name);
2627 if (number == DescriptorLookupCache::kAbsent) {
2628 number = Search(name, number_of_own_descriptors);
2629 cache->Update(map, name, number);
2636 PropertyDetails Map::GetLastDescriptorDetails() {
2637 return instance_descriptors()->GetDetails(LastAdded());
2641 FixedArrayBase* Map::GetInitialElements() {
2642 if (has_fast_smi_or_object_elements() ||
2643 has_fast_double_elements()) {
2644 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2645 return GetHeap()->empty_fixed_array();
2646 } else if (has_external_array_elements()) {
2647 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2648 DCHECK(!GetHeap()->InNewSpace(empty_array));
2650 } else if (has_fixed_typed_array_elements()) {
2651 FixedTypedArrayBase* empty_array =
2652 GetHeap()->EmptyFixedTypedArrayForMap(this);
2653 DCHECK(!GetHeap()->InNewSpace(empty_array));
2662 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2663 DCHECK(descriptor_number < number_of_descriptors());
2664 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2668 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2669 return GetKeySlot(descriptor_number);
2673 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2674 return GetValueSlot(descriptor_number - 1) + 1;
2678 Name* DescriptorArray::GetKey(int descriptor_number) {
2679 DCHECK(descriptor_number < number_of_descriptors());
2680 return Name::cast(get(ToKeyIndex(descriptor_number)));
2684 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2685 return GetDetails(descriptor_number).pointer();
2689 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2690 return GetKey(GetSortedKeyIndex(descriptor_number));
2694 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2695 PropertyDetails details = GetDetails(descriptor_index);
2696 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2700 void DescriptorArray::SetRepresentation(int descriptor_index,
2701 Representation representation) {
2702 DCHECK(!representation.IsNone());
2703 PropertyDetails details = GetDetails(descriptor_index);
2704 set(ToDetailsIndex(descriptor_index),
2705 details.CopyWithRepresentation(representation).AsSmi());
2709 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2710 DCHECK(descriptor_number < number_of_descriptors());
2711 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2715 int DescriptorArray::GetValueOffset(int descriptor_number) {
2716 return OffsetOfElementAt(ToValueIndex(descriptor_number));
2720 Object* DescriptorArray::GetValue(int descriptor_number) {
2721 DCHECK(descriptor_number < number_of_descriptors());
2722 return get(ToValueIndex(descriptor_number));
2726 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2727 set(ToValueIndex(descriptor_index), value);
2731 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2732 DCHECK(descriptor_number < number_of_descriptors());
2733 Object* details = get(ToDetailsIndex(descriptor_number));
2734 return PropertyDetails(Smi::cast(details));
2738 PropertyType DescriptorArray::GetType(int descriptor_number) {
2739 return GetDetails(descriptor_number).type();
2743 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2744 DCHECK(GetDetails(descriptor_number).location() == kField);
2745 return GetDetails(descriptor_number).field_index();
2749 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
2750 DCHECK(GetDetails(descriptor_number).location() == kField);
2751 Object* value = GetValue(descriptor_number);
2752 if (value->IsWeakCell()) {
2753 if (WeakCell::cast(value)->cleared()) return HeapType::None();
2754 value = WeakCell::cast(value)->value();
2756 return HeapType::cast(value);
2760 Object* DescriptorArray::GetConstant(int descriptor_number) {
2761 return GetValue(descriptor_number);
2765 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2766 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
2767 return GetValue(descriptor_number);
2771 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2772 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
2773 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2774 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2778 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2779 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
2780 handle(GetValue(descriptor_number), GetIsolate()),
2781 GetDetails(descriptor_number));
2785 void DescriptorArray::Set(int descriptor_number,
2787 const WhitenessWitness&) {
2789 DCHECK(descriptor_number < number_of_descriptors());
2791 NoIncrementalWriteBarrierSet(this,
2792 ToKeyIndex(descriptor_number),
2794 NoIncrementalWriteBarrierSet(this,
2795 ToValueIndex(descriptor_number),
2797 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
2798 desc->GetDetails().AsSmi());
2802 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2804 DCHECK(descriptor_number < number_of_descriptors());
2806 set(ToKeyIndex(descriptor_number), *desc->GetKey());
2807 set(ToValueIndex(descriptor_number), *desc->GetValue());
2808 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2812 void DescriptorArray::Append(Descriptor* desc) {
2813 DisallowHeapAllocation no_gc;
2814 int descriptor_number = number_of_descriptors();
2815 SetNumberOfDescriptors(descriptor_number + 1);
2816 Set(descriptor_number, desc);
2818 uint32_t hash = desc->GetKey()->Hash();
2822 for (insertion = descriptor_number; insertion > 0; --insertion) {
2823 Name* key = GetSortedKey(insertion - 1);
2824 if (key->Hash() <= hash) break;
2825 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2828 SetSortedKey(insertion, descriptor_number);
2832 void DescriptorArray::SwapSortedKeys(int first, int second) {
2833 int first_key = GetSortedKeyIndex(first);
2834 SetSortedKey(first, GetSortedKeyIndex(second));
2835 SetSortedKey(second, first_key);
2839 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2840 : marking_(array->GetHeap()->incremental_marking()) {
2841 marking_->EnterNoMarkingScope();
2842 DCHECK(!marking_->IsMarking() ||
2843 Marking::Color(array) == Marking::WHITE_OBJECT);
2847 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2848 marking_->LeaveNoMarkingScope();
2852 int HashTableBase::ComputeCapacity(int at_least_space_for) {
2853 const int kMinCapacity = 4;
2854 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
2855 return Max(capacity, kMinCapacity);
2859 int HashTableBase::ComputeCapacityForSerialization(int at_least_space_for) {
2860 const int kMinCapacity = 1;
2861 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for);
2862 return Max(capacity, kMinCapacity);
2866 template <typename Derived, typename Shape, typename Key>
2867 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
2868 return FindEntry(GetIsolate(), key);
2872 template<typename Derived, typename Shape, typename Key>
2873 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2874 return FindEntry(isolate, key, HashTable::Hash(key));
2878 // Find entry for key otherwise return kNotFound.
2879 template <typename Derived, typename Shape, typename Key>
2880 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
2882 uint32_t capacity = Capacity();
2883 uint32_t entry = FirstProbe(hash, capacity);
2885 // EnsureCapacity will guarantee the hash table is never full.
2887 Object* element = KeyAt(entry);
2888 // Empty entry. Uses raw unchecked accessors because it is called by the
2889 // string table during bootstrapping.
2890 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2891 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2892 Shape::IsMatch(key, element)) return entry;
2893 entry = NextProbe(entry, count++, capacity);
2899 bool SeededNumberDictionary::requires_slow_elements() {
2900 Object* max_index_object = get(kMaxNumberKeyIndex);
2901 if (!max_index_object->IsSmi()) return false;
2903 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2907 uint32_t SeededNumberDictionary::max_number_key() {
2908 DCHECK(!requires_slow_elements());
2909 Object* max_index_object = get(kMaxNumberKeyIndex);
2910 if (!max_index_object->IsSmi()) return 0;
2911 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2912 return value >> kRequiresSlowElementsTagSize;
2916 void SeededNumberDictionary::set_requires_slow_elements() {
2917 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2921 // ------------------------------------
2925 CAST_ACCESSOR(AccessorInfo)
2926 CAST_ACCESSOR(ArrayList)
2927 CAST_ACCESSOR(ByteArray)
2928 CAST_ACCESSOR(BytecodeArray)
2931 CAST_ACCESSOR(CodeCacheHashTable)
2932 CAST_ACCESSOR(CompilationCacheTable)
2933 CAST_ACCESSOR(ConsString)
2934 CAST_ACCESSOR(DeoptimizationInputData)
2935 CAST_ACCESSOR(DeoptimizationOutputData)
2936 CAST_ACCESSOR(DependentCode)
2937 CAST_ACCESSOR(DescriptorArray)
2938 CAST_ACCESSOR(ExternalArray)
2939 CAST_ACCESSOR(ExternalOneByteString)
2940 CAST_ACCESSOR(ExternalFloat32Array)
2941 CAST_ACCESSOR(ExternalFloat64Array)
2942 CAST_ACCESSOR(ExternalInt16Array)
2943 CAST_ACCESSOR(ExternalInt32Array)
2944 CAST_ACCESSOR(ExternalInt8Array)
2945 CAST_ACCESSOR(ExternalString)
2946 CAST_ACCESSOR(ExternalTwoByteString)
2947 CAST_ACCESSOR(ExternalUint16Array)
2948 CAST_ACCESSOR(ExternalUint32Array)
2949 CAST_ACCESSOR(ExternalUint8Array)
2950 CAST_ACCESSOR(ExternalUint8ClampedArray)
2951 CAST_ACCESSOR(FixedArray)
2952 CAST_ACCESSOR(FixedArrayBase)
2953 CAST_ACCESSOR(FixedDoubleArray)
2954 CAST_ACCESSOR(FixedTypedArrayBase)
2955 CAST_ACCESSOR(Float32x4)
2956 CAST_ACCESSOR(Foreign)
2957 CAST_ACCESSOR(GlobalDictionary)
2958 CAST_ACCESSOR(GlobalObject)
2959 CAST_ACCESSOR(HandlerTable)
2960 CAST_ACCESSOR(HeapObject)
2961 CAST_ACCESSOR(JSArray)
2962 CAST_ACCESSOR(JSArrayBuffer)
2963 CAST_ACCESSOR(JSArrayBufferView)
2964 CAST_ACCESSOR(JSBuiltinsObject)
2965 CAST_ACCESSOR(JSDataView)
2966 CAST_ACCESSOR(JSDate)
2967 CAST_ACCESSOR(JSFunction)
2968 CAST_ACCESSOR(JSFunctionProxy)
2969 CAST_ACCESSOR(JSFunctionResultCache)
2970 CAST_ACCESSOR(JSGeneratorObject)
2971 CAST_ACCESSOR(JSGlobalObject)
2972 CAST_ACCESSOR(JSGlobalProxy)
2973 CAST_ACCESSOR(JSMap)
2974 CAST_ACCESSOR(JSMapIterator)
2975 CAST_ACCESSOR(JSMessageObject)
2976 CAST_ACCESSOR(JSModule)
2977 CAST_ACCESSOR(JSObject)
2978 CAST_ACCESSOR(JSProxy)
2979 CAST_ACCESSOR(JSReceiver)
2980 CAST_ACCESSOR(JSRegExp)
2981 CAST_ACCESSOR(JSSet)
2982 CAST_ACCESSOR(JSSetIterator)
2983 CAST_ACCESSOR(JSTypedArray)
2984 CAST_ACCESSOR(JSValue)
2985 CAST_ACCESSOR(JSWeakMap)
2986 CAST_ACCESSOR(JSWeakSet)
2987 CAST_ACCESSOR(LayoutDescriptor)
2990 CAST_ACCESSOR(NameDictionary)
2991 CAST_ACCESSOR(NormalizedMapCache)
2992 CAST_ACCESSOR(Object)
2993 CAST_ACCESSOR(ObjectHashTable)
2994 CAST_ACCESSOR(Oddball)
2995 CAST_ACCESSOR(OrderedHashMap)
2996 CAST_ACCESSOR(OrderedHashSet)
2997 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2998 CAST_ACCESSOR(PropertyCell)
2999 CAST_ACCESSOR(ScopeInfo)
3000 CAST_ACCESSOR(SeededNumberDictionary)
3001 CAST_ACCESSOR(SeqOneByteString)
3002 CAST_ACCESSOR(SeqString)
3003 CAST_ACCESSOR(SeqTwoByteString)
3004 CAST_ACCESSOR(SharedFunctionInfo)
3005 CAST_ACCESSOR(SlicedString)
3007 CAST_ACCESSOR(String)
3008 CAST_ACCESSOR(StringTable)
3009 CAST_ACCESSOR(Struct)
3010 CAST_ACCESSOR(Symbol)
3011 CAST_ACCESSOR(UnseededNumberDictionary)
3012 CAST_ACCESSOR(WeakCell)
3013 CAST_ACCESSOR(WeakFixedArray)
3014 CAST_ACCESSOR(WeakHashTable)
3015 CAST_ACCESSOR(WeakValueHashTable)
3019 template <class Traits>
3020 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3021 FixedTypedArray<Traits>::kInstanceType;
3024 template <class Traits>
3025 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3026 SLOW_DCHECK(object->IsHeapObject() &&
3027 HeapObject::cast(object)->map()->instance_type() ==
3028 Traits::kInstanceType);
3029 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3033 template <class Traits>
3034 const FixedTypedArray<Traits>*
3035 FixedTypedArray<Traits>::cast(const Object* object) {
3036 SLOW_DCHECK(object->IsHeapObject() &&
3037 HeapObject::cast(object)->map()->instance_type() ==
3038 Traits::kInstanceType);
3039 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3043 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3044 STRUCT_LIST(MAKE_STRUCT_CAST)
3045 #undef MAKE_STRUCT_CAST
3048 template <typename Derived, typename Shape, typename Key>
3049 HashTable<Derived, Shape, Key>*
3050 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3051 SLOW_DCHECK(obj->IsHashTable());
3052 return reinterpret_cast<HashTable*>(obj);
3056 template <typename Derived, typename Shape, typename Key>
3057 const HashTable<Derived, Shape, Key>*
3058 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3059 SLOW_DCHECK(obj->IsHashTable());
3060 return reinterpret_cast<const HashTable*>(obj);
3064 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3065 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3067 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3068 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3070 SMI_ACCESSORS(String, length, kLengthOffset)
3071 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3074 FreeSpace* FreeSpace::next() {
3075 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3076 (!GetHeap()->deserialization_complete() && map() == NULL));
3077 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3078 return reinterpret_cast<FreeSpace*>(
3079 Memory::Address_at(address() + kNextOffset));
3083 FreeSpace** FreeSpace::next_address() {
3084 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3085 (!GetHeap()->deserialization_complete() && map() == NULL));
3086 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3087 return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
3091 void FreeSpace::set_next(FreeSpace* next) {
3092 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3093 (!GetHeap()->deserialization_complete() && map() == NULL));
3094 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3095 base::NoBarrier_Store(
3096 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3097 reinterpret_cast<base::AtomicWord>(next));
3101 FreeSpace* FreeSpace::cast(HeapObject* o) {
3102 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3103 return reinterpret_cast<FreeSpace*>(o);
3107 uint32_t Name::hash_field() {
3108 return READ_UINT32_FIELD(this, kHashFieldOffset);
3112 void Name::set_hash_field(uint32_t value) {
3113 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3114 #if V8_HOST_ARCH_64_BIT
3115 #if V8_TARGET_LITTLE_ENDIAN
3116 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3118 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3124 bool Name::Equals(Name* other) {
3125 if (other == this) return true;
3126 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3127 this->IsSymbol() || other->IsSymbol()) {
3130 return String::cast(this)->SlowEquals(String::cast(other));
3134 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3135 if (one.is_identical_to(two)) return true;
3136 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3137 one->IsSymbol() || two->IsSymbol()) {
3140 return String::SlowEquals(Handle<String>::cast(one),
3141 Handle<String>::cast(two));
3145 ACCESSORS(Symbol, name, Object, kNameOffset)
3146 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3147 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3150 bool String::Equals(String* other) {
3151 if (other == this) return true;
3152 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3155 return SlowEquals(other);
3159 bool String::Equals(Handle<String> one, Handle<String> two) {
3160 if (one.is_identical_to(two)) return true;
3161 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3164 return SlowEquals(one, two);
3168 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3169 if (!string->IsConsString()) return string;
3170 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3171 if (cons->IsFlat()) return handle(cons->first());
3172 return SlowFlatten(cons, pretenure);
3176 Handle<Name> Name::Flatten(Handle<Name> name, PretenureFlag pretenure) {
3177 if (name->IsSymbol()) return name;
3178 return String::Flatten(Handle<String>::cast(name));
3182 uint16_t String::Get(int index) {
3183 DCHECK(index >= 0 && index < length());
3184 switch (StringShape(this).full_representation_tag()) {
3185 case kSeqStringTag | kOneByteStringTag:
3186 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3187 case kSeqStringTag | kTwoByteStringTag:
3188 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3189 case kConsStringTag | kOneByteStringTag:
3190 case kConsStringTag | kTwoByteStringTag:
3191 return ConsString::cast(this)->ConsStringGet(index);
3192 case kExternalStringTag | kOneByteStringTag:
3193 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3194 case kExternalStringTag | kTwoByteStringTag:
3195 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3196 case kSlicedStringTag | kOneByteStringTag:
3197 case kSlicedStringTag | kTwoByteStringTag:
3198 return SlicedString::cast(this)->SlicedStringGet(index);
3208 void String::Set(int index, uint16_t value) {
3209 DCHECK(index >= 0 && index < length());
3210 DCHECK(StringShape(this).IsSequential());
3212 return this->IsOneByteRepresentation()
3213 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3214 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3218 bool String::IsFlat() {
3219 if (!StringShape(this).IsCons()) return true;
3220 return ConsString::cast(this)->second()->length() == 0;
3224 String* String::GetUnderlying() {
3225 // Giving direct access to underlying string only makes sense if the
3226 // wrapping string is already flattened.
3227 DCHECK(this->IsFlat());
3228 DCHECK(StringShape(this).IsIndirect());
3229 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3230 const int kUnderlyingOffset = SlicedString::kParentOffset;
3231 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3235 template<class Visitor>
3236 ConsString* String::VisitFlat(Visitor* visitor,
3239 int slice_offset = offset;
3240 const int length = string->length();
3241 DCHECK(offset <= length);
3243 int32_t type = string->map()->instance_type();
3244 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3245 case kSeqStringTag | kOneByteStringTag:
3246 visitor->VisitOneByteString(
3247 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3251 case kSeqStringTag | kTwoByteStringTag:
3252 visitor->VisitTwoByteString(
3253 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3257 case kExternalStringTag | kOneByteStringTag:
3258 visitor->VisitOneByteString(
3259 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3263 case kExternalStringTag | kTwoByteStringTag:
3264 visitor->VisitTwoByteString(
3265 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3269 case kSlicedStringTag | kOneByteStringTag:
3270 case kSlicedStringTag | kTwoByteStringTag: {
3271 SlicedString* slicedString = SlicedString::cast(string);
3272 slice_offset += slicedString->offset();
3273 string = slicedString->parent();
3277 case kConsStringTag | kOneByteStringTag:
3278 case kConsStringTag | kTwoByteStringTag:
3279 return ConsString::cast(string);
3290 inline Vector<const uint8_t> String::GetCharVector() {
3291 String::FlatContent flat = GetFlatContent();
3292 DCHECK(flat.IsOneByte());
3293 return flat.ToOneByteVector();
3298 inline Vector<const uc16> String::GetCharVector() {
3299 String::FlatContent flat = GetFlatContent();
3300 DCHECK(flat.IsTwoByte());
3301 return flat.ToUC16Vector();
3305 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3306 DCHECK(index >= 0 && index < length());
3307 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3311 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3312 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3313 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3314 static_cast<byte>(value));
3318 Address SeqOneByteString::GetCharsAddress() {
3319 return FIELD_ADDR(this, kHeaderSize);
3323 uint8_t* SeqOneByteString::GetChars() {
3324 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3328 Address SeqTwoByteString::GetCharsAddress() {
3329 return FIELD_ADDR(this, kHeaderSize);
3333 uc16* SeqTwoByteString::GetChars() {
3334 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3338 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3339 DCHECK(index >= 0 && index < length());
3340 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3344 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3345 DCHECK(index >= 0 && index < length());
3346 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3350 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3351 return SizeFor(length());
3355 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3356 return SizeFor(length());
3360 String* SlicedString::parent() {
3361 return String::cast(READ_FIELD(this, kParentOffset));
3365 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3366 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3367 WRITE_FIELD(this, kParentOffset, parent);
3368 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3372 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3375 String* ConsString::first() {
3376 return String::cast(READ_FIELD(this, kFirstOffset));
3380 Object* ConsString::unchecked_first() {
3381 return READ_FIELD(this, kFirstOffset);
3385 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3386 WRITE_FIELD(this, kFirstOffset, value);
3387 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3391 String* ConsString::second() {
3392 return String::cast(READ_FIELD(this, kSecondOffset));
3396 Object* ConsString::unchecked_second() {
3397 return READ_FIELD(this, kSecondOffset);
3401 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3402 WRITE_FIELD(this, kSecondOffset, value);
3403 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3407 bool ExternalString::is_short() {
3408 InstanceType type = map()->instance_type();
3409 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3413 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3414 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3418 void ExternalOneByteString::update_data_cache() {
3419 if (is_short()) return;
3420 const char** data_field =
3421 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3422 *data_field = resource()->data();
3426 void ExternalOneByteString::set_resource(
3427 const ExternalOneByteString::Resource* resource) {
3428 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3429 *reinterpret_cast<const Resource**>(
3430 FIELD_ADDR(this, kResourceOffset)) = resource;
3431 if (resource != NULL) update_data_cache();
3435 const uint8_t* ExternalOneByteString::GetChars() {
3436 return reinterpret_cast<const uint8_t*>(resource()->data());
3440 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3441 DCHECK(index >= 0 && index < length());
3442 return GetChars()[index];
3446 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3447 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3451 void ExternalTwoByteString::update_data_cache() {
3452 if (is_short()) return;
3453 const uint16_t** data_field =
3454 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3455 *data_field = resource()->data();
3459 void ExternalTwoByteString::set_resource(
3460 const ExternalTwoByteString::Resource* resource) {
3461 *reinterpret_cast<const Resource**>(
3462 FIELD_ADDR(this, kResourceOffset)) = resource;
3463 if (resource != NULL) update_data_cache();
3467 const uint16_t* ExternalTwoByteString::GetChars() {
3468 return resource()->data();
3472 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3473 DCHECK(index >= 0 && index < length());
3474 return GetChars()[index];
3478 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3480 return GetChars() + start;
3484 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3487 void ConsStringIterator::PushLeft(ConsString* string) {
3488 frames_[depth_++ & kDepthMask] = string;
3492 void ConsStringIterator::PushRight(ConsString* string) {
3494 frames_[(depth_-1) & kDepthMask] = string;
3498 void ConsStringIterator::AdjustMaximumDepth() {
3499 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3503 void ConsStringIterator::Pop() {
3505 DCHECK(depth_ <= maximum_depth_);
3510 uint16_t StringCharacterStream::GetNext() {
3511 DCHECK(buffer8_ != NULL && end_ != NULL);
3512 // Advance cursor if needed.
3513 if (buffer8_ == end_) HasMore();
3514 DCHECK(buffer8_ < end_);
3515 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3519 StringCharacterStream::StringCharacterStream(String* string, int offset)
3520 : is_one_byte_(false) {
3521 Reset(string, offset);
3525 void StringCharacterStream::Reset(String* string, int offset) {
3528 ConsString* cons_string = String::VisitFlat(this, string, offset);
3529 iter_.Reset(cons_string, offset);
3530 if (cons_string != NULL) {
3531 string = iter_.Next(&offset);
3532 if (string != NULL) String::VisitFlat(this, string, offset);
3537 bool StringCharacterStream::HasMore() {
3538 if (buffer8_ != end_) return true;
3540 String* string = iter_.Next(&offset);
3541 DCHECK_EQ(offset, 0);
3542 if (string == NULL) return false;
3543 String::VisitFlat(this, string);
3544 DCHECK(buffer8_ != end_);
3549 void StringCharacterStream::VisitOneByteString(
3550 const uint8_t* chars, int length) {
3551 is_one_byte_ = true;
3553 end_ = chars + length;
3557 void StringCharacterStream::VisitTwoByteString(
3558 const uint16_t* chars, int length) {
3559 is_one_byte_ = false;
3561 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3565 void JSFunctionResultCache::MakeZeroSize() {
3566 set_finger_index(kEntriesIndex);
3567 set_size(kEntriesIndex);
3571 void JSFunctionResultCache::Clear() {
3572 int cache_size = size();
3573 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3574 MemsetPointer(entries_start,
3575 GetHeap()->the_hole_value(),
3576 cache_size - kEntriesIndex);
3581 int JSFunctionResultCache::size() {
3582 return Smi::cast(get(kCacheSizeIndex))->value();
3586 void JSFunctionResultCache::set_size(int size) {
3587 set(kCacheSizeIndex, Smi::FromInt(size));
3591 int JSFunctionResultCache::finger_index() {
3592 return Smi::cast(get(kFingerIndex))->value();
3596 void JSFunctionResultCache::set_finger_index(int finger_index) {
3597 set(kFingerIndex, Smi::FromInt(finger_index));
3601 byte ByteArray::get(int index) {
3602 DCHECK(index >= 0 && index < this->length());
3603 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3607 void ByteArray::set(int index, byte value) {
3608 DCHECK(index >= 0 && index < this->length());
3609 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3613 int ByteArray::get_int(int index) {
3614 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3615 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3619 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3620 DCHECK_TAG_ALIGNED(address);
3621 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3625 Address ByteArray::GetDataStartAddress() {
3626 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3630 byte BytecodeArray::get(int index) {
3631 DCHECK(index >= 0 && index < this->length());
3632 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3636 void BytecodeArray::set(int index, byte value) {
3637 DCHECK(index >= 0 && index < this->length());
3638 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3642 void BytecodeArray::set_frame_size(int frame_size) {
3643 // We need at least one stack slot for the return register.
3644 DCHECK_GE(frame_size, kPointerSize);
3645 DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
3646 WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
3650 int BytecodeArray::frame_size() const {
3651 return READ_INT_FIELD(this, kFrameSizeOffset);
3655 Address BytecodeArray::GetFirstBytecodeAddress() {
3656 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3660 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3661 return reinterpret_cast<uint8_t*>(external_pointer());
3665 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3666 DCHECK((index >= 0) && (index < this->length()));
3667 uint8_t* ptr = external_uint8_clamped_pointer();
3672 Handle<Object> ExternalUint8ClampedArray::get(
3673 Handle<ExternalUint8ClampedArray> array,
3675 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3676 array->GetIsolate());
3680 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3681 DCHECK((index >= 0) && (index < this->length()));
3682 uint8_t* ptr = external_uint8_clamped_pointer();
3687 void* ExternalArray::external_pointer() const {
3688 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3689 return reinterpret_cast<void*>(ptr);
3693 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3694 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3695 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3699 int8_t ExternalInt8Array::get_scalar(int index) {
3700 DCHECK((index >= 0) && (index < this->length()));
3701 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3706 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3708 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3709 array->GetIsolate());
3713 void ExternalInt8Array::set(int index, int8_t value) {
3714 DCHECK((index >= 0) && (index < this->length()));
3715 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3720 uint8_t ExternalUint8Array::get_scalar(int index) {
3721 DCHECK((index >= 0) && (index < this->length()));
3722 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3727 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3729 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3730 array->GetIsolate());
3734 void ExternalUint8Array::set(int index, uint8_t value) {
3735 DCHECK((index >= 0) && (index < this->length()));
3736 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3741 int16_t ExternalInt16Array::get_scalar(int index) {
3742 DCHECK((index >= 0) && (index < this->length()));
3743 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3748 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3750 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3751 array->GetIsolate());
3755 void ExternalInt16Array::set(int index, int16_t value) {
3756 DCHECK((index >= 0) && (index < this->length()));
3757 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3762 uint16_t ExternalUint16Array::get_scalar(int index) {
3763 DCHECK((index >= 0) && (index < this->length()));
3764 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3769 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3771 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3772 array->GetIsolate());
3776 void ExternalUint16Array::set(int index, uint16_t value) {
3777 DCHECK((index >= 0) && (index < this->length()));
3778 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3783 int32_t ExternalInt32Array::get_scalar(int index) {
3784 DCHECK((index >= 0) && (index < this->length()));
3785 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3790 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3792 return array->GetIsolate()->factory()->
3793 NewNumberFromInt(array->get_scalar(index));
3797 void ExternalInt32Array::set(int index, int32_t value) {
3798 DCHECK((index >= 0) && (index < this->length()));
3799 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3804 uint32_t ExternalUint32Array::get_scalar(int index) {
3805 DCHECK((index >= 0) && (index < this->length()));
3806 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3811 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3813 return array->GetIsolate()->factory()->
3814 NewNumberFromUint(array->get_scalar(index));
3818 void ExternalUint32Array::set(int index, uint32_t value) {
3819 DCHECK((index >= 0) && (index < this->length()));
3820 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3825 float ExternalFloat32Array::get_scalar(int index) {
3826 DCHECK((index >= 0) && (index < this->length()));
3827 float* ptr = static_cast<float*>(external_pointer());
3832 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
3834 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3838 void ExternalFloat32Array::set(int index, float value) {
3839 DCHECK((index >= 0) && (index < this->length()));
3840 float* ptr = static_cast<float*>(external_pointer());
3845 double ExternalFloat64Array::get_scalar(int index) {
3846 DCHECK((index >= 0) && (index < this->length()));
3847 double* ptr = static_cast<double*>(external_pointer());
3852 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
3854 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3858 void ExternalFloat64Array::set(int index, double value) {
3859 DCHECK((index >= 0) && (index < this->length()));
3860 double* ptr = static_cast<double*>(external_pointer());
3865 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
3868 void* FixedTypedArrayBase::external_pointer() const {
3869 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3870 return reinterpret_cast<void*>(ptr);
3874 void FixedTypedArrayBase::set_external_pointer(void* value,
3875 WriteBarrierMode mode) {
3876 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3877 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3881 void* FixedTypedArrayBase::DataPtr() {
3882 return reinterpret_cast<void*>(
3883 reinterpret_cast<intptr_t>(base_pointer()) +
3884 reinterpret_cast<intptr_t>(external_pointer()));
3888 int FixedTypedArrayBase::ElementSize(InstanceType type) {
3891 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3892 case FIXED_##TYPE##_ARRAY_TYPE: \
3893 element_size = size; \
3896 TYPED_ARRAYS(TYPED_ARRAY_CASE)
3897 #undef TYPED_ARRAY_CASE
3902 return element_size;
3906 int FixedTypedArrayBase::DataSize(InstanceType type) {
3907 return length() * ElementSize(type);
3911 int FixedTypedArrayBase::DataSize() {
3912 return DataSize(map()->instance_type());
3916 int FixedTypedArrayBase::size() {
3917 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
3921 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
3922 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
3926 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
3927 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
3931 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
3934 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
3937 int8_t Int8ArrayTraits::defaultValue() { return 0; }
3940 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
3943 int16_t Int16ArrayTraits::defaultValue() { return 0; }
3946 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
3949 int32_t Int32ArrayTraits::defaultValue() { return 0; }
3952 float Float32ArrayTraits::defaultValue() {
3953 return std::numeric_limits<float>::quiet_NaN();
3957 double Float64ArrayTraits::defaultValue() {
3958 return std::numeric_limits<double>::quiet_NaN();
3962 template <class Traits>
3963 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
3964 DCHECK((index >= 0) && (index < this->length()));
3965 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
3970 template <class Traits>
3971 void FixedTypedArray<Traits>::set(int index, ElementType value) {
3972 DCHECK((index >= 0) && (index < this->length()));
3973 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
3978 template <class Traits>
3979 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
3980 return static_cast<ElementType>(value);
3985 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
3986 if (value < 0) return 0;
3987 if (value > 0xFF) return 0xFF;
3988 return static_cast<uint8_t>(value);
3992 template <class Traits>
3993 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
3995 return static_cast<ElementType>(DoubleToInt32(value));
4000 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4001 // Handle NaNs and less than zero values which clamp to zero.
4002 if (!(value > 0)) return 0;
4003 if (value > 0xFF) return 0xFF;
4004 return static_cast<uint8_t>(lrint(value));
4009 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4010 return static_cast<float>(value);
4015 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4020 template <class Traits>
4021 Handle<Object> FixedTypedArray<Traits>::get(
4022 Handle<FixedTypedArray<Traits> > array,
4024 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4028 template <class Traits>
4029 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4030 ElementType cast_value = Traits::defaultValue();
4031 if (value->IsSmi()) {
4032 int int_value = Smi::cast(value)->value();
4033 cast_value = from_int(int_value);
4034 } else if (value->IsHeapNumber()) {
4035 double double_value = HeapNumber::cast(value)->value();
4036 cast_value = from_double(double_value);
4038 // Clamp undefined to the default value. All other types have been
4039 // converted to a number type further up in the call chain.
4040 DCHECK(value->IsUndefined());
4042 set(index, cast_value);
4046 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4047 return handle(Smi::FromInt(scalar), isolate);
4051 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4053 return handle(Smi::FromInt(scalar), isolate);
4057 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4058 return handle(Smi::FromInt(scalar), isolate);
4062 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4063 return handle(Smi::FromInt(scalar), isolate);
4067 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4068 return handle(Smi::FromInt(scalar), isolate);
4072 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4073 return isolate->factory()->NewNumberFromUint(scalar);
4077 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4078 return isolate->factory()->NewNumberFromInt(scalar);
4082 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4083 return isolate->factory()->NewNumber(scalar);
4087 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4088 return isolate->factory()->NewNumber(scalar);
4092 int Map::visitor_id() {
4093 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4097 void Map::set_visitor_id(int id) {
4098 DCHECK(0 <= id && id < 256);
4099 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4103 int Map::instance_size() {
4104 return NOBARRIER_READ_BYTE_FIELD(
4105 this, kInstanceSizeOffset) << kPointerSizeLog2;
4109 int Map::inobject_properties() {
4110 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4114 int Map::GetInObjectPropertyOffset(int index) {
4115 // Adjust for the number of properties stored in the object.
4116 index -= inobject_properties();
4118 return instance_size() + (index * kPointerSize);
4122 Handle<Map> Map::CopyInstallDescriptorsForTesting(
4123 Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
4124 Handle<LayoutDescriptor> layout_descriptor) {
4125 return CopyInstallDescriptors(map, new_descriptor, descriptors,
4130 int HeapObject::SizeFromMap(Map* map) {
4131 int instance_size = map->instance_size();
4132 if (instance_size != kVariableSizeSentinel) return instance_size;
4133 // Only inline the most frequent cases.
4134 InstanceType instance_type = map->instance_type();
4135 if (instance_type == FIXED_ARRAY_TYPE) {
4136 return FixedArray::BodyDescriptor::SizeOf(map, this);
4138 if (instance_type == ONE_BYTE_STRING_TYPE ||
4139 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4140 // Strings may get concurrently truncated, hence we have to access its
4141 // length synchronized.
4142 return SeqOneByteString::SizeFor(
4143 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4145 if (instance_type == BYTE_ARRAY_TYPE) {
4146 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4148 if (instance_type == BYTECODE_ARRAY_TYPE) {
4149 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4151 if (instance_type == FREE_SPACE_TYPE) {
4152 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4154 if (instance_type == STRING_TYPE ||
4155 instance_type == INTERNALIZED_STRING_TYPE) {
4156 // Strings may get concurrently truncated, hence we have to access its
4157 // length synchronized.
4158 return SeqTwoByteString::SizeFor(
4159 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4161 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4162 return FixedDoubleArray::SizeFor(
4163 reinterpret_cast<FixedDoubleArray*>(this)->length());
4165 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4166 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4167 return reinterpret_cast<FixedTypedArrayBase*>(
4168 this)->TypedArraySize(instance_type);
4170 DCHECK(instance_type == CODE_TYPE);
4171 return reinterpret_cast<Code*>(this)->CodeSize();
4175 void Map::set_instance_size(int value) {
4176 DCHECK_EQ(0, value & (kPointerSize - 1));
4177 value >>= kPointerSizeLog2;
4178 DCHECK(0 <= value && value < 256);
4179 NOBARRIER_WRITE_BYTE_FIELD(
4180 this, kInstanceSizeOffset, static_cast<byte>(value));
4184 void Map::set_inobject_properties(int value) {
4185 DCHECK(0 <= value && value < 256);
4186 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4190 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4193 InstanceType Map::instance_type() {
4194 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4198 void Map::set_instance_type(InstanceType value) {
4199 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4203 int Map::unused_property_fields() {
4204 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4208 void Map::set_unused_property_fields(int value) {
4209 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4213 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4216 void Map::set_bit_field(byte value) {
4217 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4221 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4224 void Map::set_bit_field2(byte value) {
4225 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4229 void Map::set_non_instance_prototype(bool value) {
4231 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4233 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4238 bool Map::has_non_instance_prototype() {
4239 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4243 void Map::set_function_with_prototype(bool value) {
4244 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4248 bool Map::function_with_prototype() {
4249 return FunctionWithPrototype::decode(bit_field());
4253 void Map::set_is_access_check_needed(bool access_check_needed) {
4254 if (access_check_needed) {
4255 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4257 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4262 bool Map::is_access_check_needed() {
4263 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4267 void Map::set_is_extensible(bool value) {
4269 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4271 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4275 bool Map::is_extensible() {
4276 return ((1 << kIsExtensible) & bit_field2()) != 0;
4280 void Map::set_is_prototype_map(bool value) {
4281 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4284 bool Map::is_prototype_map() const {
4285 return IsPrototypeMapBits::decode(bit_field2());
4289 void Map::set_dictionary_map(bool value) {
4290 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4291 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4292 set_bit_field3(new_bit_field3);
4296 bool Map::is_dictionary_map() {
4297 return DictionaryMap::decode(bit_field3());
4301 Code::Flags Code::flags() {
4302 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4306 void Map::set_owns_descriptors(bool owns_descriptors) {
4307 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4311 bool Map::owns_descriptors() {
4312 return OwnsDescriptors::decode(bit_field3());
4316 void Map::set_has_instance_call_handler() {
4317 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4321 bool Map::has_instance_call_handler() {
4322 return HasInstanceCallHandler::decode(bit_field3());
4326 void Map::deprecate() {
4327 set_bit_field3(Deprecated::update(bit_field3(), true));
4331 bool Map::is_deprecated() {
4332 return Deprecated::decode(bit_field3());
4336 void Map::set_migration_target(bool value) {
4337 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4341 bool Map::is_migration_target() {
4342 return IsMigrationTarget::decode(bit_field3());
4346 void Map::set_is_strong() {
4347 set_bit_field3(IsStrong::update(bit_field3(), true));
4351 bool Map::is_strong() {
4352 return IsStrong::decode(bit_field3());
4356 void Map::set_counter(int value) {
4357 set_bit_field3(Counter::update(bit_field3(), value));
4361 int Map::counter() { return Counter::decode(bit_field3()); }
4364 void Map::mark_unstable() {
4365 set_bit_field3(IsUnstable::update(bit_field3(), true));
4369 bool Map::is_stable() {
4370 return !IsUnstable::decode(bit_field3());
4374 bool Map::has_code_cache() {
4375 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4379 bool Map::CanBeDeprecated() {
4380 int descriptor = LastAdded();
4381 for (int i = 0; i <= descriptor; i++) {
4382 PropertyDetails details = instance_descriptors()->GetDetails(i);
4383 if (details.representation().IsNone()) return true;
4384 if (details.representation().IsSmi()) return true;
4385 if (details.representation().IsDouble()) return true;
4386 if (details.representation().IsHeapObject()) return true;
4387 if (details.type() == DATA_CONSTANT) return true;
4393 void Map::NotifyLeafMapLayoutChange() {
4396 dependent_code()->DeoptimizeDependentCodeGroup(
4398 DependentCode::kPrototypeCheckGroup);
4403 bool Map::CanOmitMapChecks() {
4404 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4408 int DependentCode::number_of_entries(DependencyGroup group) {
4409 if (length() == 0) return 0;
4410 return Smi::cast(get(group))->value();
4414 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4415 set(group, Smi::FromInt(value));
4419 void DependentCode::set_object_at(int i, Object* object) {
4420 set(kCodesStartIndex + i, object);
4424 Object* DependentCode::object_at(int i) {
4425 return get(kCodesStartIndex + i);
4429 void DependentCode::clear_at(int i) {
4430 set_undefined(kCodesStartIndex + i);
4434 void DependentCode::copy(int from, int to) {
4435 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4439 void DependentCode::ExtendGroup(DependencyGroup group) {
4440 GroupStartIndexes starts(this);
4441 for (int g = kGroupCount - 1; g > group; g--) {
4442 if (starts.at(g) < starts.at(g + 1)) {
4443 copy(starts.at(g), starts.at(g + 1));
4449 void Code::set_flags(Code::Flags flags) {
4450 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4451 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4455 Code::Kind Code::kind() {
4456 return ExtractKindFromFlags(flags());
4460 bool Code::IsCodeStubOrIC() {
4461 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4462 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4463 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4464 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4465 kind() == TO_BOOLEAN_IC;
4469 InlineCacheState Code::ic_state() {
4470 InlineCacheState result = ExtractICStateFromFlags(flags());
4471 // Only allow uninitialized or debugger states for non-IC code
4472 // objects. This is used in the debugger to determine whether or not
4473 // a call to code object has been replaced with a debug break call.
4474 DCHECK(is_inline_cache_stub() ||
4475 result == UNINITIALIZED ||
4476 result == DEBUG_STUB);
4481 ExtraICState Code::extra_ic_state() {
4482 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4483 return ExtractExtraICStateFromFlags(flags());
4487 Code::StubType Code::type() {
4488 return ExtractTypeFromFlags(flags());
4492 // For initialization.
4493 void Code::set_raw_kind_specific_flags1(int value) {
4494 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4498 void Code::set_raw_kind_specific_flags2(int value) {
4499 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4503 inline bool Code::is_crankshafted() {
4504 return IsCrankshaftedField::decode(
4505 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4509 inline bool Code::is_hydrogen_stub() {
4510 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4514 inline void Code::set_is_crankshafted(bool value) {
4515 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4516 int updated = IsCrankshaftedField::update(previous, value);
4517 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4521 inline bool Code::is_turbofanned() {
4522 return IsTurbofannedField::decode(
4523 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4527 inline void Code::set_is_turbofanned(bool value) {
4528 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4529 int updated = IsTurbofannedField::update(previous, value);
4530 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4534 inline bool Code::can_have_weak_objects() {
4535 DCHECK(kind() == OPTIMIZED_FUNCTION);
4536 return CanHaveWeakObjectsField::decode(
4537 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4541 inline void Code::set_can_have_weak_objects(bool value) {
4542 DCHECK(kind() == OPTIMIZED_FUNCTION);
4543 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4544 int updated = CanHaveWeakObjectsField::update(previous, value);
4545 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4549 bool Code::has_deoptimization_support() {
4550 DCHECK_EQ(FUNCTION, kind());
4551 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4552 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4556 void Code::set_has_deoptimization_support(bool value) {
4557 DCHECK_EQ(FUNCTION, kind());
4558 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4559 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4560 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4564 bool Code::has_debug_break_slots() {
4565 DCHECK_EQ(FUNCTION, kind());
4566 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4567 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4571 void Code::set_has_debug_break_slots(bool value) {
4572 DCHECK_EQ(FUNCTION, kind());
4573 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4574 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4575 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4579 bool Code::has_reloc_info_for_serialization() {
4580 DCHECK_EQ(FUNCTION, kind());
4581 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4582 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4586 void Code::set_has_reloc_info_for_serialization(bool value) {
4587 DCHECK_EQ(FUNCTION, kind());
4588 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4589 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4590 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4594 int Code::allow_osr_at_loop_nesting_level() {
4595 DCHECK_EQ(FUNCTION, kind());
4596 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4597 return AllowOSRAtLoopNestingLevelField::decode(fields);
4601 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4602 DCHECK_EQ(FUNCTION, kind());
4603 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4604 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4605 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4606 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4610 int Code::profiler_ticks() {
4611 DCHECK_EQ(FUNCTION, kind());
4612 return ProfilerTicksField::decode(
4613 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4617 void Code::set_profiler_ticks(int ticks) {
4618 if (kind() == FUNCTION) {
4619 unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4620 unsigned updated = ProfilerTicksField::update(previous, ticks);
4621 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4626 int Code::builtin_index() {
4627 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
4631 void Code::set_builtin_index(int index) {
4632 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
4636 unsigned Code::stack_slots() {
4637 DCHECK(is_crankshafted());
4638 return StackSlotsField::decode(
4639 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4643 void Code::set_stack_slots(unsigned slots) {
4644 CHECK(slots <= (1 << kStackSlotsBitCount));
4645 DCHECK(is_crankshafted());
4646 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4647 int updated = StackSlotsField::update(previous, slots);
4648 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4652 unsigned Code::safepoint_table_offset() {
4653 DCHECK(is_crankshafted());
4654 return SafepointTableOffsetField::decode(
4655 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4659 void Code::set_safepoint_table_offset(unsigned offset) {
4660 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4661 DCHECK(is_crankshafted());
4662 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4663 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4664 int updated = SafepointTableOffsetField::update(previous, offset);
4665 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4669 unsigned Code::back_edge_table_offset() {
4670 DCHECK_EQ(FUNCTION, kind());
4671 return BackEdgeTableOffsetField::decode(
4672 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
4676 void Code::set_back_edge_table_offset(unsigned offset) {
4677 DCHECK_EQ(FUNCTION, kind());
4678 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
4679 offset = offset >> kPointerSizeLog2;
4680 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4681 int updated = BackEdgeTableOffsetField::update(previous, offset);
4682 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4686 bool Code::back_edges_patched_for_osr() {
4687 DCHECK_EQ(FUNCTION, kind());
4688 return allow_osr_at_loop_nesting_level() > 0;
4692 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
4695 bool Code::has_function_cache() {
4696 DCHECK(kind() == STUB);
4697 return HasFunctionCacheField::decode(
4698 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4702 void Code::set_has_function_cache(bool flag) {
4703 DCHECK(kind() == STUB);
4704 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4705 int updated = HasFunctionCacheField::update(previous, flag);
4706 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4710 bool Code::marked_for_deoptimization() {
4711 DCHECK(kind() == OPTIMIZED_FUNCTION);
4712 return MarkedForDeoptimizationField::decode(
4713 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4717 void Code::set_marked_for_deoptimization(bool flag) {
4718 DCHECK(kind() == OPTIMIZED_FUNCTION);
4719 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4720 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4721 int updated = MarkedForDeoptimizationField::update(previous, flag);
4722 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4726 bool Code::is_inline_cache_stub() {
4727 Kind kind = this->kind();
4729 #define CASE(name) case name: return true;
4732 default: return false;
4737 bool Code::is_keyed_stub() {
4738 return is_keyed_load_stub() || is_keyed_store_stub();
4742 bool Code::is_debug_stub() {
4743 return ic_state() == DEBUG_STUB;
4747 Address Code::constant_pool() {
4748 Address constant_pool = NULL;
4749 if (FLAG_enable_embedded_constant_pool) {
4750 int offset = constant_pool_offset();
4751 if (offset < instruction_size()) {
4752 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
4755 return constant_pool;
4759 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
4760 ExtraICState extra_ic_state, StubType type,
4761 CacheHolderFlag holder) {
4762 // Compute the bit mask.
4763 unsigned int bits = KindField::encode(kind)
4764 | ICStateField::encode(ic_state)
4765 | TypeField::encode(type)
4766 | ExtraICStateField::encode(extra_ic_state)
4767 | CacheHolderField::encode(holder);
4768 return static_cast<Flags>(bits);
4772 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4773 ExtraICState extra_ic_state,
4774 CacheHolderFlag holder,
4776 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
4780 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
4781 CacheHolderFlag holder) {
4782 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
4786 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4787 return KindField::decode(flags);
4791 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4792 return ICStateField::decode(flags);
4796 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4797 return ExtraICStateField::decode(flags);
4801 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4802 return TypeField::decode(flags);
4806 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4807 return CacheHolderField::decode(flags);
4811 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4812 int bits = flags & ~TypeField::kMask;
4813 return static_cast<Flags>(bits);
4817 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
4818 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
4819 return static_cast<Flags>(bits);
4823 Code* Code::GetCodeFromTargetAddress(Address address) {
4824 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4825 // GetCodeFromTargetAddress might be called when marking objects during mark
4826 // sweep. reinterpret_cast is therefore used instead of the more appropriate
4827 // Code::cast. Code::cast does not work when the object's map is
4829 Code* result = reinterpret_cast<Code*>(code);
4834 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4836 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4840 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
4841 if (object->IsMap()) {
4842 return Map::cast(object)->CanTransition() &&
4843 FLAG_weak_embedded_maps_in_optimized_code;
4845 if (object->IsCell()) {
4846 object = Cell::cast(object)->value();
4847 } else if (object->IsPropertyCell()) {
4848 object = PropertyCell::cast(object)->value();
4850 if (object->IsJSObject()) {
4851 return FLAG_weak_embedded_objects_in_optimized_code;
4853 if (object->IsFixedArray()) {
4854 // Contexts of inlined functions are embedded in optimized code.
4855 Map* map = HeapObject::cast(object)->map();
4856 Heap* heap = map->GetHeap();
4857 return FLAG_weak_embedded_objects_in_optimized_code &&
4858 map == heap->function_context_map();
4864 class Code::FindAndReplacePattern {
4866 FindAndReplacePattern() : count_(0) { }
4867 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
4868 DCHECK(count_ < kMaxCount);
4869 find_[count_] = map_to_find;
4870 replace_[count_] = obj_to_replace;
4874 static const int kMaxCount = 4;
4876 Handle<Map> find_[kMaxCount];
4877 Handle<Object> replace_[kMaxCount];
4882 Object* Map::prototype() const {
4883 return READ_FIELD(this, kPrototypeOffset);
4887 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4888 DCHECK(value->IsNull() || value->IsJSReceiver());
4889 WRITE_FIELD(this, kPrototypeOffset, value);
4890 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4894 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
4895 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
4896 return LayoutDescriptor::cast_gc_safe(layout_desc);
4900 bool Map::HasFastPointerLayout() const {
4901 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
4902 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
4906 void Map::UpdateDescriptors(DescriptorArray* descriptors,
4907 LayoutDescriptor* layout_desc) {
4908 set_instance_descriptors(descriptors);
4909 if (FLAG_unbox_double_fields) {
4910 if (layout_descriptor()->IsSlowLayout()) {
4911 set_layout_descriptor(layout_desc);
4914 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
4915 if (FLAG_verify_heap) {
4916 CHECK(layout_descriptor()->IsConsistentWithMap(this));
4917 CHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
4920 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
4921 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
4927 void Map::InitializeDescriptors(DescriptorArray* descriptors,
4928 LayoutDescriptor* layout_desc) {
4929 int len = descriptors->number_of_descriptors();
4930 set_instance_descriptors(descriptors);
4931 SetNumberOfOwnDescriptors(len);
4933 if (FLAG_unbox_double_fields) {
4934 set_layout_descriptor(layout_desc);
4936 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
4937 if (FLAG_verify_heap) {
4938 CHECK(layout_descriptor()->IsConsistentWithMap(this));
4941 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
4943 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
4948 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4949 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
4952 void Map::set_bit_field3(uint32_t bits) {
4953 if (kInt32Size != kPointerSize) {
4954 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
4956 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
4960 uint32_t Map::bit_field3() const {
4961 return READ_UINT32_FIELD(this, kBitField3Offset);
4965 LayoutDescriptor* Map::GetLayoutDescriptor() {
4966 return FLAG_unbox_double_fields ? layout_descriptor()
4967 : LayoutDescriptor::FastPointerLayout();
4971 void Map::AppendDescriptor(Descriptor* desc) {
4972 DescriptorArray* descriptors = instance_descriptors();
4973 int number_of_own_descriptors = NumberOfOwnDescriptors();
4974 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
4975 descriptors->Append(desc);
4976 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4978 // This function does not support appending double field descriptors and
4979 // it should never try to (otherwise, layout descriptor must be updated too).
4981 PropertyDetails details = desc->GetDetails();
4982 CHECK(details.type() != DATA || !details.representation().IsDouble());
4987 Object* Map::GetBackPointer() {
4988 Object* object = constructor_or_backpointer();
4989 if (object->IsMap()) {
4992 return GetIsolate()->heap()->undefined_value();
4996 Map* Map::ElementsTransitionMap() {
4997 return TransitionArray::SearchSpecial(
4998 this, GetHeap()->elements_transition_symbol());
5002 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5005 Object* Map::prototype_info() const {
5006 DCHECK(is_prototype_map());
5007 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5011 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5012 DCHECK(is_prototype_map());
5013 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5014 CONDITIONAL_WRITE_BARRIER(
5015 GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5019 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5020 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5021 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5022 (value->IsMap() && GetBackPointer()->IsUndefined()));
5023 DCHECK(!value->IsMap() ||
5024 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5025 set_constructor_or_backpointer(value, mode);
5029 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5030 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5031 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5032 ACCESSORS(Map, constructor_or_backpointer, Object,
5033 kConstructorOrBackPointerOffset)
5036 Object* Map::GetConstructor() const {
5037 Object* maybe_constructor = constructor_or_backpointer();
5038 // Follow any back pointers.
5039 while (maybe_constructor->IsMap()) {
5041 Map::cast(maybe_constructor)->constructor_or_backpointer();
5043 return maybe_constructor;
5047 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5048 // Never overwrite a back pointer with a constructor.
5049 DCHECK(!constructor_or_backpointer()->IsMap());
5050 set_constructor_or_backpointer(constructor, mode);
5054 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5055 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5056 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5058 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5059 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5060 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5062 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5063 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5065 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5066 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5067 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5068 kExpectedReceiverTypeOffset)
5070 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5071 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5072 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5074 ACCESSORS(Box, value, Object, kValueOffset)
5076 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5077 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5078 ACCESSORS(PrototypeInfo, constructor_name, Object, kConstructorNameOffset)
5080 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5081 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5083 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5084 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5085 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5087 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5088 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5089 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5090 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5091 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5092 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5093 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5094 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5095 kCanInterceptSymbolsBit)
5096 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5097 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5099 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5100 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5102 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5103 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5104 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5105 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5107 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5108 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5109 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5110 kPrototypeTemplateOffset)
5111 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5112 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5113 kNamedPropertyHandlerOffset)
5114 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5115 kIndexedPropertyHandlerOffset)
5116 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5117 kInstanceTemplateOffset)
5118 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5119 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5120 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5121 kInstanceCallHandlerOffset)
5122 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5123 kAccessCheckInfoOffset)
5124 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5126 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5127 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5128 kInternalFieldCountOffset)
5130 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5132 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5133 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5134 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5135 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5136 kPretenureCreateCountOffset)
5137 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5138 kDependentCodeOffset)
5139 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5140 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5142 ACCESSORS(Script, source, Object, kSourceOffset)
5143 ACCESSORS(Script, name, Object, kNameOffset)
5144 ACCESSORS(Script, id, Smi, kIdOffset)
5145 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5146 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5147 ACCESSORS(Script, context_data, Object, kContextOffset)
5148 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5149 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5150 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5151 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5152 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5153 kEvalFrominstructionsOffsetOffset)
5154 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5155 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5156 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5157 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5159 Script::CompilationType Script::compilation_type() {
5160 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5161 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5163 void Script::set_compilation_type(CompilationType type) {
5164 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5165 type == COMPILATION_TYPE_EVAL));
5167 Script::CompilationState Script::compilation_state() {
5168 return BooleanBit::get(flags(), kCompilationStateBit) ?
5169 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5171 void Script::set_compilation_state(CompilationState state) {
5172 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5173 state == COMPILATION_STATE_COMPILED));
5175 ScriptOriginOptions Script::origin_options() {
5176 return ScriptOriginOptions((flags()->value() & kOriginOptionsMask) >>
5177 kOriginOptionsShift);
5179 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5180 DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5181 set_flags(Smi::FromInt((flags()->value() & ~kOriginOptionsMask) |
5182 (origin_options.Flags() << kOriginOptionsShift)));
5186 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5187 ACCESSORS(DebugInfo, code, Code, kCodeIndex)
5188 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5190 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5191 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5192 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5193 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5195 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5196 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5197 kOptimizedCodeMapOffset)
5198 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5199 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5200 kFeedbackVectorOffset)
5202 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5204 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5205 kInstanceClassNameOffset)
5206 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5207 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5208 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5209 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5212 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5213 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5214 kHiddenPrototypeBit)
5215 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5216 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5217 kNeedsAccessCheckBit)
5218 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5219 kReadOnlyPrototypeBit)
5220 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5221 kRemovePrototypeBit)
5222 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5224 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5225 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5227 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5229 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5232 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5233 kAllowLazyCompilation)
5234 BOOL_ACCESSORS(SharedFunctionInfo,
5236 allows_lazy_compilation_without_context,
5237 kAllowLazyCompilationWithoutContext)
5238 BOOL_ACCESSORS(SharedFunctionInfo,
5242 BOOL_ACCESSORS(SharedFunctionInfo,
5244 has_duplicate_parameters,
5245 kHasDuplicateParameters)
5246 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5247 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5248 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
5252 #if V8_HOST_ARCH_32_BIT
5253 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5254 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5255 kFormalParameterCountOffset)
5256 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5257 kExpectedNofPropertiesOffset)
5258 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5259 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5260 kStartPositionAndTypeOffset)
5261 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5262 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5263 kFunctionTokenPositionOffset)
5264 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5265 kCompilerHintsOffset)
5266 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5267 kOptCountAndBailoutReasonOffset)
5268 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5269 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5270 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5274 #if V8_TARGET_LITTLE_ENDIAN
5275 #define PSEUDO_SMI_LO_ALIGN 0
5276 #define PSEUDO_SMI_HI_ALIGN kIntSize
5278 #define PSEUDO_SMI_LO_ALIGN kIntSize
5279 #define PSEUDO_SMI_HI_ALIGN 0
5282 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5283 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5284 int holder::name() const { \
5285 int value = READ_INT_FIELD(this, offset); \
5286 DCHECK(kHeapObjectTag == 1); \
5287 DCHECK((value & kHeapObjectTag) == 0); \
5288 return value >> 1; \
5290 void holder::set_##name(int value) { \
5291 DCHECK(kHeapObjectTag == 1); \
5292 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5293 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5296 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5297 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5298 INT_ACCESSORS(holder, name, offset)
5301 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5302 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5303 kFormalParameterCountOffset)
5305 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5306 expected_nof_properties,
5307 kExpectedNofPropertiesOffset)
5308 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5310 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5311 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5312 start_position_and_type,
5313 kStartPositionAndTypeOffset)
5315 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5316 function_token_position,
5317 kFunctionTokenPositionOffset)
5318 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5320 kCompilerHintsOffset)
5322 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5323 opt_count_and_bailout_reason,
5324 kOptCountAndBailoutReasonOffset)
5325 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5327 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5329 kAstNodeCountOffset)
5330 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5332 kProfilerTicksOffset)
5337 BOOL_GETTER(SharedFunctionInfo,
5339 optimization_disabled,
5340 kOptimizationDisabled)
5343 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5344 set_compiler_hints(BooleanBit::set(compiler_hints(),
5345 kOptimizationDisabled,
5350 LanguageMode SharedFunctionInfo::language_mode() {
5351 STATIC_ASSERT(LANGUAGE_END == 3);
5352 return construct_language_mode(
5353 BooleanBit::get(compiler_hints(), kStrictModeFunction),
5354 BooleanBit::get(compiler_hints(), kStrongModeFunction));
5358 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5359 STATIC_ASSERT(LANGUAGE_END == 3);
5360 // We only allow language mode transitions that set the same language mode
5361 // again or go up in the chain:
5362 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5363 int hints = compiler_hints();
5364 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5365 hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5366 set_compiler_hints(hints);
5370 FunctionKind SharedFunctionInfo::kind() {
5371 return FunctionKindBits::decode(compiler_hints());
5375 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5376 DCHECK(IsValidFunctionKind(kind));
5377 int hints = compiler_hints();
5378 hints = FunctionKindBits::update(hints, kind);
5379 set_compiler_hints(hints);
5383 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5385 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5386 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5387 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5388 name_should_print_as_anonymous,
5389 kNameShouldPrintAsAnonymous)
5390 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5391 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5392 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5393 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
5395 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5396 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5397 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5398 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5400 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5401 kIsAccessorFunction)
5402 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5403 kIsDefaultConstructor)
5405 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5406 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5408 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5410 bool Script::HasValidSource() {
5411 Object* src = this->source();
5412 if (!src->IsString()) return true;
5413 String* src_str = String::cast(src);
5414 if (!StringShape(src_str).IsExternal()) return true;
5415 if (src_str->IsOneByteRepresentation()) {
5416 return ExternalOneByteString::cast(src)->resource() != NULL;
5417 } else if (src_str->IsTwoByteRepresentation()) {
5418 return ExternalTwoByteString::cast(src)->resource() != NULL;
5424 void SharedFunctionInfo::DontAdaptArguments() {
5425 DCHECK(code()->kind() == Code::BUILTIN);
5426 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5430 int SharedFunctionInfo::start_position() const {
5431 return start_position_and_type() >> kStartPositionShift;
5435 void SharedFunctionInfo::set_start_position(int start_position) {
5436 set_start_position_and_type((start_position << kStartPositionShift)
5437 | (start_position_and_type() & ~kStartPositionMask));
5441 Code* SharedFunctionInfo::code() const {
5442 return Code::cast(READ_FIELD(this, kCodeOffset));
5446 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5447 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5448 WRITE_FIELD(this, kCodeOffset, value);
5449 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5453 void SharedFunctionInfo::ReplaceCode(Code* value) {
5454 // If the GC metadata field is already used then the function was
5455 // enqueued as a code flushing candidate and we remove it now.
5456 if (code()->gc_metadata() != NULL) {
5457 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5458 flusher->EvictCandidate(this);
5461 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5465 if (is_compiled()) set_never_compiled(false);
5469 ScopeInfo* SharedFunctionInfo::scope_info() const {
5470 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5474 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5475 WriteBarrierMode mode) {
5476 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5477 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5480 reinterpret_cast<Object*>(value),
5485 bool SharedFunctionInfo::is_compiled() {
5486 Builtins* builtins = GetIsolate()->builtins();
5487 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5488 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5489 return code() != builtins->builtin(Builtins::kCompileLazy);
5493 bool SharedFunctionInfo::is_simple_parameter_list() {
5494 return scope_info()->IsSimpleParameterList();
5498 bool SharedFunctionInfo::HasDebugInfo() {
5499 bool has_debug_info = debug_info()->IsStruct();
5500 DCHECK(!has_debug_info || HasDebugCode());
5501 return has_debug_info;
5505 DebugInfo* SharedFunctionInfo::GetDebugInfo() {
5506 DCHECK(HasDebugInfo());
5507 return DebugInfo::cast(debug_info());
5511 bool SharedFunctionInfo::HasDebugCode() {
5512 return code()->kind() == Code::FUNCTION && code()->has_debug_break_slots();
5516 bool SharedFunctionInfo::IsApiFunction() {
5517 return function_data()->IsFunctionTemplateInfo();
5521 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5522 DCHECK(IsApiFunction());
5523 return FunctionTemplateInfo::cast(function_data());
5527 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5528 return function_data()->IsSmi();
5532 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5533 DCHECK(HasBuiltinFunctionId());
5534 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5538 bool SharedFunctionInfo::HasBytecodeArray() {
5539 return function_data()->IsBytecodeArray();
5543 BytecodeArray* SharedFunctionInfo::bytecode_array() {
5544 DCHECK(HasBytecodeArray());
5545 return BytecodeArray::cast(function_data());
5549 int SharedFunctionInfo::ic_age() {
5550 return ICAgeBits::decode(counters());
5554 void SharedFunctionInfo::set_ic_age(int ic_age) {
5555 set_counters(ICAgeBits::update(counters(), ic_age));
5559 int SharedFunctionInfo::deopt_count() {
5560 return DeoptCountBits::decode(counters());
5564 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5565 set_counters(DeoptCountBits::update(counters(), deopt_count));
5569 void SharedFunctionInfo::increment_deopt_count() {
5570 int value = counters();
5571 int deopt_count = DeoptCountBits::decode(value);
5572 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5573 set_counters(DeoptCountBits::update(value, deopt_count));
5577 int SharedFunctionInfo::opt_reenable_tries() {
5578 return OptReenableTriesBits::decode(counters());
5582 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5583 set_counters(OptReenableTriesBits::update(counters(), tries));
5587 int SharedFunctionInfo::opt_count() {
5588 return OptCountBits::decode(opt_count_and_bailout_reason());
5592 void SharedFunctionInfo::set_opt_count(int opt_count) {
5593 set_opt_count_and_bailout_reason(
5594 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5598 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
5599 return static_cast<BailoutReason>(
5600 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5604 bool SharedFunctionInfo::has_deoptimization_support() {
5605 Code* code = this->code();
5606 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5610 void SharedFunctionInfo::TryReenableOptimization() {
5611 int tries = opt_reenable_tries();
5612 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5613 // We reenable optimization whenever the number of tries is a large
5614 // enough power of 2.
5615 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5616 set_optimization_disabled(false);
5623 bool SharedFunctionInfo::IsSubjectToDebugging() {
5624 Object* script_obj = script();
5625 if (script_obj->IsUndefined()) return false;
5626 Script* script = Script::cast(script_obj);
5627 Script::Type type = static_cast<Script::Type>(script->type()->value());
5628 return type == Script::TYPE_NORMAL;
5632 bool JSFunction::IsBuiltin() {
5633 return context()->global_object()->IsJSBuiltinsObject();
5637 bool JSFunction::IsSubjectToDebugging() {
5638 return shared()->IsSubjectToDebugging();
5642 bool JSFunction::NeedsArgumentsAdaption() {
5643 return shared()->internal_formal_parameter_count() !=
5644 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5648 bool JSFunction::IsOptimized() {
5649 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5653 bool JSFunction::IsMarkedForOptimization() {
5654 return code() == GetIsolate()->builtins()->builtin(
5655 Builtins::kCompileOptimized);
5659 bool JSFunction::IsMarkedForConcurrentOptimization() {
5660 return code() == GetIsolate()->builtins()->builtin(
5661 Builtins::kCompileOptimizedConcurrent);
5665 bool JSFunction::IsInOptimizationQueue() {
5666 return code() == GetIsolate()->builtins()->builtin(
5667 Builtins::kInOptimizationQueue);
5671 bool JSFunction::IsInobjectSlackTrackingInProgress() {
5672 return has_initial_map() &&
5673 initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
5677 Code* JSFunction::code() {
5679 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5683 void JSFunction::set_code(Code* value) {
5684 DCHECK(!GetHeap()->InNewSpace(value));
5685 Address entry = value->entry();
5686 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5687 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5689 HeapObject::RawField(this, kCodeEntryOffset),
5694 void JSFunction::set_code_no_write_barrier(Code* value) {
5695 DCHECK(!GetHeap()->InNewSpace(value));
5696 Address entry = value->entry();
5697 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5701 void JSFunction::ReplaceCode(Code* code) {
5702 bool was_optimized = IsOptimized();
5703 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5705 if (was_optimized && is_optimized) {
5706 shared()->EvictFromOptimizedCodeMap(this->code(),
5707 "Replacing with another optimized code");
5712 // Add/remove the function from the list of optimized functions for this
5713 // context based on the state change.
5714 if (!was_optimized && is_optimized) {
5715 context()->native_context()->AddOptimizedFunction(this);
5717 if (was_optimized && !is_optimized) {
5718 // TODO(titzer): linear in the number of optimized functions; fix!
5719 context()->native_context()->RemoveOptimizedFunction(this);
5724 Context* JSFunction::context() {
5725 return Context::cast(READ_FIELD(this, kContextOffset));
5729 JSObject* JSFunction::global_proxy() {
5730 return context()->global_proxy();
5734 void JSFunction::set_context(Object* value) {
5735 DCHECK(value->IsUndefined() || value->IsContext());
5736 WRITE_FIELD(this, kContextOffset, value);
5737 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5740 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5741 kPrototypeOrInitialMapOffset)
5744 Map* JSFunction::initial_map() {
5745 return Map::cast(prototype_or_initial_map());
5749 bool JSFunction::has_initial_map() {
5750 return prototype_or_initial_map()->IsMap();
5754 bool JSFunction::has_instance_prototype() {
5755 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5759 bool JSFunction::has_prototype() {
5760 return map()->has_non_instance_prototype() || has_instance_prototype();
5764 Object* JSFunction::instance_prototype() {
5765 DCHECK(has_instance_prototype());
5766 if (has_initial_map()) return initial_map()->prototype();
5767 // When there is no initial map and the prototype is a JSObject, the
5768 // initial map field is used for the prototype field.
5769 return prototype_or_initial_map();
5773 Object* JSFunction::prototype() {
5774 DCHECK(has_prototype());
5775 // If the function's prototype property has been set to a non-JSObject
5776 // value, that value is stored in the constructor field of the map.
5777 if (map()->has_non_instance_prototype()) {
5778 Object* prototype = map()->GetConstructor();
5779 // The map must have a prototype in that field, not a back pointer.
5780 DCHECK(!prototype->IsMap());
5783 return instance_prototype();
5787 bool JSFunction::should_have_prototype() {
5788 return map()->function_with_prototype();
5792 bool JSFunction::is_compiled() {
5793 Builtins* builtins = GetIsolate()->builtins();
5794 return code() != builtins->builtin(Builtins::kCompileLazy) &&
5795 code() != builtins->builtin(Builtins::kCompileOptimized) &&
5796 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
5800 bool JSFunction::is_simple_parameter_list() {
5801 return shared()->is_simple_parameter_list();
5805 FixedArray* JSFunction::literals() {
5806 DCHECK(!shared()->bound());
5807 return literals_or_bindings();
5811 void JSFunction::set_literals(FixedArray* literals) {
5812 DCHECK(!shared()->bound());
5813 set_literals_or_bindings(literals);
5817 FixedArray* JSFunction::function_bindings() {
5818 DCHECK(shared()->bound());
5819 return literals_or_bindings();
5823 void JSFunction::set_function_bindings(FixedArray* bindings) {
5824 DCHECK(shared()->bound());
5825 // Bound function literal may be initialized to the empty fixed array
5826 // before the bindings are set.
5827 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
5828 bindings->map() == GetHeap()->fixed_array_map());
5829 set_literals_or_bindings(bindings);
5833 int JSFunction::NumberOfLiterals() {
5834 DCHECK(!shared()->bound());
5835 return literals()->length();
5839 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5840 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
5841 return READ_FIELD(this, OffsetOfFunctionWithId(id));
5845 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5847 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
5848 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5849 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5853 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5854 ACCESSORS(JSProxy, hash, Object, kHashOffset)
5855 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5856 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5859 void JSProxy::InitializeBody(int object_size, Object* value) {
5860 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5861 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5862 WRITE_FIELD(this, offset, value);
5867 ACCESSORS(JSCollection, table, Object, kTableOffset)
5870 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
5871 template<class Derived, class TableType> \
5872 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
5873 return type::cast(READ_FIELD(this, offset)); \
5875 template<class Derived, class TableType> \
5876 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
5877 type* value, WriteBarrierMode mode) { \
5878 WRITE_FIELD(this, offset, value); \
5879 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
5882 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
5883 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
5884 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
5886 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
5889 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5890 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5893 Address Foreign::foreign_address() {
5894 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5898 void Foreign::set_foreign_address(Address value) {
5899 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5903 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5904 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5905 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5906 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5907 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5909 bool JSGeneratorObject::is_suspended() {
5910 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
5911 DCHECK_EQ(kGeneratorClosed, 0);
5912 return continuation() > 0;
5915 bool JSGeneratorObject::is_closed() {
5916 return continuation() == kGeneratorClosed;
5919 bool JSGeneratorObject::is_executing() {
5920 return continuation() == kGeneratorExecuting;
5923 ACCESSORS(JSModule, context, Object, kContextOffset)
5924 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5927 ACCESSORS(JSValue, value, Object, kValueOffset)
5930 HeapNumber* HeapNumber::cast(Object* object) {
5931 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
5932 return reinterpret_cast<HeapNumber*>(object);
5936 const HeapNumber* HeapNumber::cast(const Object* object) {
5937 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
5938 return reinterpret_cast<const HeapNumber*>(object);
5942 ACCESSORS(JSDate, value, Object, kValueOffset)
5943 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5944 ACCESSORS(JSDate, year, Object, kYearOffset)
5945 ACCESSORS(JSDate, month, Object, kMonthOffset)
5946 ACCESSORS(JSDate, day, Object, kDayOffset)
5947 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5948 ACCESSORS(JSDate, hour, Object, kHourOffset)
5949 ACCESSORS(JSDate, min, Object, kMinOffset)
5950 ACCESSORS(JSDate, sec, Object, kSecOffset)
5953 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
5954 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
5955 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5956 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5957 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5958 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5961 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5962 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5963 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
5964 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5965 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5966 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5967 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
5968 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
5971 void Code::WipeOutHeader() {
5972 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
5973 WRITE_FIELD(this, kHandlerTableOffset, NULL);
5974 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
5975 // Do not wipe out major/minor keys on a code stub or IC
5976 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
5977 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
5982 Object* Code::type_feedback_info() {
5983 DCHECK(kind() == FUNCTION);
5984 return raw_type_feedback_info();
5988 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
5989 DCHECK(kind() == FUNCTION);
5990 set_raw_type_feedback_info(value, mode);
5991 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5996 uint32_t Code::stub_key() {
5997 DCHECK(IsCodeStubOrIC());
5998 Smi* smi_key = Smi::cast(raw_type_feedback_info());
5999 return static_cast<uint32_t>(smi_key->value());
6003 void Code::set_stub_key(uint32_t key) {
6004 DCHECK(IsCodeStubOrIC());
6005 set_raw_type_feedback_info(Smi::FromInt(key));
6009 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6010 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6013 byte* Code::instruction_start() {
6014 return FIELD_ADDR(this, kHeaderSize);
6018 byte* Code::instruction_end() {
6019 return instruction_start() + instruction_size();
6023 int Code::body_size() {
6024 return RoundUp(instruction_size(), kObjectAlignment);
6028 ByteArray* Code::unchecked_relocation_info() {
6029 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6033 byte* Code::relocation_start() {
6034 return unchecked_relocation_info()->GetDataStartAddress();
6038 int Code::relocation_size() {
6039 return unchecked_relocation_info()->length();
6043 byte* Code::entry() {
6044 return instruction_start();
6048 bool Code::contains(byte* inner_pointer) {
6049 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6053 ACCESSORS(JSArray, length, Object, kLengthOffset)
6056 void* JSArrayBuffer::backing_store() const {
6057 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6058 return reinterpret_cast<void*>(ptr);
6062 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6063 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6064 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6068 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6071 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6072 if (kInt32Size != kPointerSize) {
6073 #if V8_TARGET_LITTLE_ENDIAN
6074 WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6076 WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6079 WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6083 uint32_t JSArrayBuffer::bit_field() const {
6084 return READ_UINT32_FIELD(this, kBitFieldOffset);
6088 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6091 void JSArrayBuffer::set_is_external(bool value) {
6092 set_bit_field(IsExternal::update(bit_field(), value));
6096 bool JSArrayBuffer::is_neuterable() {
6097 return IsNeuterable::decode(bit_field());
6101 void JSArrayBuffer::set_is_neuterable(bool value) {
6102 set_bit_field(IsNeuterable::update(bit_field(), value));
6106 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6109 void JSArrayBuffer::set_was_neutered(bool value) {
6110 set_bit_field(WasNeutered::update(bit_field(), value));
6114 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6117 void JSArrayBuffer::set_is_shared(bool value) {
6118 set_bit_field(IsShared::update(bit_field(), value));
6122 Object* JSArrayBufferView::byte_offset() const {
6123 if (WasNeutered()) return Smi::FromInt(0);
6124 return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6128 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6129 WRITE_FIELD(this, kByteOffsetOffset, value);
6130 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6134 Object* JSArrayBufferView::byte_length() const {
6135 if (WasNeutered()) return Smi::FromInt(0);
6136 return Object::cast(READ_FIELD(this, kByteLengthOffset));
6140 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6141 WRITE_FIELD(this, kByteLengthOffset, value);
6142 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6146 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6148 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6149 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6153 bool JSArrayBufferView::WasNeutered() const {
6154 return JSArrayBuffer::cast(buffer())->was_neutered();
6158 Object* JSTypedArray::length() const {
6159 if (WasNeutered()) return Smi::FromInt(0);
6160 return Object::cast(READ_FIELD(this, kLengthOffset));
6164 uint32_t JSTypedArray::length_value() const {
6165 if (WasNeutered()) return 0;
6167 CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6172 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6173 WRITE_FIELD(this, kLengthOffset, value);
6174 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6179 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6183 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6186 JSRegExp::Type JSRegExp::TypeTag() {
6187 Object* data = this->data();
6188 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6189 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6190 return static_cast<JSRegExp::Type>(smi->value());
6194 int JSRegExp::CaptureCount() {
6195 switch (TypeTag()) {
6199 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6207 JSRegExp::Flags JSRegExp::GetFlags() {
6208 DCHECK(this->data()->IsFixedArray());
6209 Object* data = this->data();
6210 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6211 return Flags(smi->value());
6215 String* JSRegExp::Pattern() {
6216 DCHECK(this->data()->IsFixedArray());
6217 Object* data = this->data();
6218 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6223 Object* JSRegExp::DataAt(int index) {
6224 DCHECK(TypeTag() != NOT_COMPILED);
6225 return FixedArray::cast(data())->get(index);
6229 void JSRegExp::SetDataAt(int index, Object* value) {
6230 DCHECK(TypeTag() != NOT_COMPILED);
6231 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6232 FixedArray::cast(data())->set(index, value);
6236 ElementsKind JSObject::GetElementsKind() {
6237 ElementsKind kind = map()->elements_kind();
6238 #if VERIFY_HEAP && DEBUG
6239 FixedArrayBase* fixed_array =
6240 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6242 // If a GC was caused while constructing this object, the elements
6243 // pointer may point to a one pointer filler map.
6244 if (ElementsAreSafeToExamine()) {
6245 Map* map = fixed_array->map();
6246 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6247 (map == GetHeap()->fixed_array_map() ||
6248 map == GetHeap()->fixed_cow_array_map())) ||
6249 (IsFastDoubleElementsKind(kind) &&
6250 (fixed_array->IsFixedDoubleArray() ||
6251 fixed_array == GetHeap()->empty_fixed_array())) ||
6252 (kind == DICTIONARY_ELEMENTS &&
6253 fixed_array->IsFixedArray() &&
6254 fixed_array->IsDictionary()) ||
6255 (kind > DICTIONARY_ELEMENTS));
6256 DCHECK(!IsSloppyArgumentsElements(kind) ||
6257 (elements()->IsFixedArray() && elements()->length() >= 2));
6264 bool JSObject::HasFastObjectElements() {
6265 return IsFastObjectElementsKind(GetElementsKind());
6269 bool JSObject::HasFastSmiElements() {
6270 return IsFastSmiElementsKind(GetElementsKind());
6274 bool JSObject::HasFastSmiOrObjectElements() {
6275 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6279 bool JSObject::HasFastDoubleElements() {
6280 return IsFastDoubleElementsKind(GetElementsKind());
6284 bool JSObject::HasFastHoleyElements() {
6285 return IsFastHoleyElementsKind(GetElementsKind());
6289 bool JSObject::HasFastElements() {
6290 return IsFastElementsKind(GetElementsKind());
6294 bool JSObject::HasDictionaryElements() {
6295 return GetElementsKind() == DICTIONARY_ELEMENTS;
6299 bool JSObject::HasFastArgumentsElements() {
6300 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
6304 bool JSObject::HasSlowArgumentsElements() {
6305 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
6309 bool JSObject::HasSloppyArgumentsElements() {
6310 return IsSloppyArgumentsElements(GetElementsKind());
6314 bool JSObject::HasExternalArrayElements() {
6315 HeapObject* array = elements();
6316 DCHECK(array != NULL);
6317 return array->IsExternalArray();
6321 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6322 bool JSObject::HasExternal##Type##Elements() { \
6323 HeapObject* array = elements(); \
6324 DCHECK(array != NULL); \
6325 if (!array->IsHeapObject()) \
6327 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6330 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6332 #undef EXTERNAL_ELEMENTS_CHECK
6335 bool JSObject::HasFixedTypedArrayElements() {
6336 HeapObject* array = elements();
6337 DCHECK(array != NULL);
6338 return array->IsFixedTypedArrayBase();
6342 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6343 bool JSObject::HasFixed##Type##Elements() { \
6344 HeapObject* array = elements(); \
6345 DCHECK(array != NULL); \
6346 if (!array->IsHeapObject()) \
6348 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6351 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6353 #undef FIXED_TYPED_ELEMENTS_CHECK
6356 bool JSObject::HasNamedInterceptor() {
6357 return map()->has_named_interceptor();
6361 bool JSObject::HasIndexedInterceptor() {
6362 return map()->has_indexed_interceptor();
6366 NameDictionary* JSObject::property_dictionary() {
6367 DCHECK(!HasFastProperties());
6368 DCHECK(!IsGlobalObject());
6369 return NameDictionary::cast(properties());
6373 GlobalDictionary* JSObject::global_dictionary() {
6374 DCHECK(!HasFastProperties());
6375 DCHECK(IsGlobalObject());
6376 return GlobalDictionary::cast(properties());
6380 SeededNumberDictionary* JSObject::element_dictionary() {
6381 DCHECK(HasDictionaryElements());
6382 return SeededNumberDictionary::cast(elements());
6386 bool Name::IsHashFieldComputed(uint32_t field) {
6387 return (field & kHashNotComputedMask) == 0;
6391 bool Name::HasHashCode() {
6392 return IsHashFieldComputed(hash_field());
6396 uint32_t Name::Hash() {
6397 // Fast case: has hash code already been computed?
6398 uint32_t field = hash_field();
6399 if (IsHashFieldComputed(field)) return field >> kHashShift;
6400 // Slow case: compute hash code and set it. Has to be a string.
6401 return String::cast(this)->ComputeAndSetHash();
6405 bool Name::IsPrivate() {
6406 return this->IsSymbol() && Symbol::cast(this)->is_private();
6410 StringHasher::StringHasher(int length, uint32_t seed)
6412 raw_running_hash_(seed),
6414 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6415 is_first_char_(true) {
6416 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6420 bool StringHasher::has_trivial_hash() {
6421 return length_ > String::kMaxHashCalcLength;
6425 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6427 running_hash += (running_hash << 10);
6428 running_hash ^= (running_hash >> 6);
6429 return running_hash;
6433 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6434 running_hash += (running_hash << 3);
6435 running_hash ^= (running_hash >> 11);
6436 running_hash += (running_hash << 15);
6437 if ((running_hash & String::kHashBitMask) == 0) {
6440 return running_hash;
6444 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6445 const uc16* chars, int length) {
6446 DCHECK_NOT_NULL(chars);
6447 DCHECK(length >= 0);
6448 for (int i = 0; i < length; ++i) {
6449 running_hash = AddCharacterCore(running_hash, *chars++);
6451 return running_hash;
6455 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6458 DCHECK_NOT_NULL(chars);
6459 DCHECK(length >= 0);
6460 for (int i = 0; i < length; ++i) {
6461 uint16_t c = static_cast<uint16_t>(*chars++);
6462 running_hash = AddCharacterCore(running_hash, c);
6464 return running_hash;
6468 void StringHasher::AddCharacter(uint16_t c) {
6469 // Use the Jenkins one-at-a-time hash function to update the hash
6470 // for the given character.
6471 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6475 bool StringHasher::UpdateIndex(uint16_t c) {
6476 DCHECK(is_array_index_);
6477 if (c < '0' || c > '9') {
6478 is_array_index_ = false;
6482 if (is_first_char_) {
6483 is_first_char_ = false;
6484 if (c == '0' && length_ > 1) {
6485 is_array_index_ = false;
6489 if (array_index_ > 429496729U - ((d + 3) >> 3)) {
6490 is_array_index_ = false;
6493 array_index_ = array_index_ * 10 + d;
6498 template<typename Char>
6499 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6500 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6502 if (is_array_index_) {
6503 for (; i < length; i++) {
6504 AddCharacter(chars[i]);
6505 if (!UpdateIndex(chars[i])) {
6511 for (; i < length; i++) {
6512 DCHECK(!is_array_index_);
6513 AddCharacter(chars[i]);
6518 template <typename schar>
6519 uint32_t StringHasher::HashSequentialString(const schar* chars,
6522 StringHasher hasher(length, seed);
6523 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6524 return hasher.GetHashField();
6528 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6529 IteratingStringHasher hasher(string->length(), seed);
6531 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6532 ConsString* cons_string = String::VisitFlat(&hasher, string);
6533 if (cons_string == nullptr) return hasher.GetHashField();
6534 hasher.VisitConsString(cons_string);
6535 return hasher.GetHashField();
6539 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6541 AddCharacters(chars, length);
6545 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6547 AddCharacters(chars, length);
6551 bool Name::AsArrayIndex(uint32_t* index) {
6552 return IsString() && String::cast(this)->AsArrayIndex(index);
6556 bool String::AsArrayIndex(uint32_t* index) {
6557 uint32_t field = hash_field();
6558 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6561 return SlowAsArrayIndex(index);
6565 void String::SetForwardedInternalizedString(String* canonical) {
6566 DCHECK(IsInternalizedString());
6567 DCHECK(HasHashCode());
6568 if (canonical == this) return; // No need to forward.
6569 DCHECK(SlowEquals(canonical));
6570 DCHECK(canonical->IsInternalizedString());
6571 DCHECK(canonical->HasHashCode());
6572 WRITE_FIELD(this, kHashFieldSlot, canonical);
6573 // Setting the hash field to a tagged value sets the LSB, causing the hash
6574 // code to be interpreted as uninitialized. We use this fact to recognize
6575 // that we have a forwarded string.
6576 DCHECK(!HasHashCode());
6580 String* String::GetForwardedInternalizedString() {
6581 DCHECK(IsInternalizedString());
6582 if (HasHashCode()) return this;
6583 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
6584 DCHECK(canonical->IsInternalizedString());
6585 DCHECK(SlowEquals(canonical));
6586 DCHECK(canonical->HasHashCode());
6591 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
6593 LanguageMode language_mode) {
6595 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
6596 return GetProperty(&it, language_mode);
6600 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6601 Handle<Name> name) {
6602 // Call the "has" trap on proxies.
6603 if (object->IsJSProxy()) {
6604 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6605 return JSProxy::HasPropertyWithHandler(proxy, name);
6608 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6609 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6613 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6614 Handle<Name> name) {
6615 // Call the "has" trap on proxies.
6616 if (object->IsJSProxy()) {
6617 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6618 return JSProxy::HasPropertyWithHandler(proxy, name);
6621 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6622 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6626 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6627 Handle<JSReceiver> object, Handle<Name> name) {
6629 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
6630 return GetPropertyAttributes(&it);
6634 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
6635 Handle<JSReceiver> object, Handle<Name> name) {
6636 LookupIterator it = LookupIterator::PropertyOrElement(
6637 name->GetIsolate(), object, name, LookupIterator::HIDDEN);
6638 return GetPropertyAttributes(&it);
6642 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6643 // Call the "has" trap on proxies.
6644 if (object->IsJSProxy()) {
6645 Isolate* isolate = object->GetIsolate();
6646 Handle<Name> name = isolate->factory()->Uint32ToString(index);
6647 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6648 return JSProxy::HasPropertyWithHandler(proxy, name);
6651 Maybe<PropertyAttributes> result = GetElementAttributes(object, index);
6652 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6656 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
6658 // Call the "has" trap on proxies.
6659 if (object->IsJSProxy()) {
6660 Isolate* isolate = object->GetIsolate();
6661 Handle<Name> name = isolate->factory()->Uint32ToString(index);
6662 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6663 return JSProxy::HasPropertyWithHandler(proxy, name);
6666 Maybe<PropertyAttributes> result = GetOwnElementAttributes(object, index);
6667 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6671 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
6672 Handle<JSReceiver> object, uint32_t index) {
6673 Isolate* isolate = object->GetIsolate();
6674 LookupIterator it(isolate, object, index);
6675 return GetPropertyAttributes(&it);
6679 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
6680 Handle<JSReceiver> object, uint32_t index) {
6681 Isolate* isolate = object->GetIsolate();
6682 LookupIterator it(isolate, object, index, LookupIterator::HIDDEN);
6683 return GetPropertyAttributes(&it);
6687 bool JSGlobalObject::IsDetached() {
6688 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6692 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6693 const PrototypeIterator iter(this->GetIsolate(),
6694 const_cast<JSGlobalProxy*>(this));
6695 return iter.GetCurrent() != global;
6699 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6700 return object->IsJSProxy()
6701 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6702 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6706 Object* JSReceiver::GetIdentityHash() {
6708 ? JSProxy::cast(this)->GetIdentityHash()
6709 : JSObject::cast(this)->GetIdentityHash();
6713 bool AccessorInfo::all_can_read() {
6714 return BooleanBit::get(flag(), kAllCanReadBit);
6718 void AccessorInfo::set_all_can_read(bool value) {
6719 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6723 bool AccessorInfo::all_can_write() {
6724 return BooleanBit::get(flag(), kAllCanWriteBit);
6728 void AccessorInfo::set_all_can_write(bool value) {
6729 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6733 bool AccessorInfo::is_special_data_property() {
6734 return BooleanBit::get(flag(), kSpecialDataProperty);
6738 void AccessorInfo::set_is_special_data_property(bool value) {
6739 set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
6743 PropertyAttributes AccessorInfo::property_attributes() {
6744 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6748 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6749 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6753 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6754 if (!HasExpectedReceiverType()) return true;
6755 if (!receiver->IsJSObject()) return false;
6756 return FunctionTemplateInfo::cast(expected_receiver_type())
6757 ->IsTemplateFor(JSObject::cast(receiver)->map());
6761 template<typename Derived, typename Shape, typename Key>
6762 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6764 Handle<Object> value) {
6765 this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6769 template<typename Derived, typename Shape, typename Key>
6770 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6772 Handle<Object> value,
6773 PropertyDetails details) {
6774 Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
6778 template <typename Key>
6779 template <typename Dictionary>
6780 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
6782 Handle<Object> value,
6783 PropertyDetails details) {
6784 STATIC_ASSERT(Dictionary::kEntrySize == 3);
6785 DCHECK(!key->IsName() || details.dictionary_index() > 0);
6786 int index = dict->EntryToIndex(entry);
6787 DisallowHeapAllocation no_gc;
6788 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
6789 dict->set(index, *key, mode);
6790 dict->set(index + 1, *value, mode);
6791 dict->set(index + 2, details.AsSmi());
6795 template <typename Dictionary>
6796 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
6797 Handle<Object> key, Handle<Object> value,
6798 PropertyDetails details) {
6799 STATIC_ASSERT(Dictionary::kEntrySize == 2);
6800 DCHECK(!key->IsName() || details.dictionary_index() > 0);
6801 DCHECK(value->IsPropertyCell());
6802 int index = dict->EntryToIndex(entry);
6803 DisallowHeapAllocation no_gc;
6804 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
6805 dict->set(index, *key, mode);
6806 dict->set(index + 1, *value, mode);
6807 PropertyCell::cast(*value)->set_property_details(details);
6811 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6812 DCHECK(other->IsNumber());
6813 return key == static_cast<uint32_t>(other->Number());
6817 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6818 return ComputeIntegerHash(key, 0);
6822 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6824 DCHECK(other->IsNumber());
6825 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6829 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6830 return ComputeIntegerHash(key, seed);
6834 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6837 DCHECK(other->IsNumber());
6838 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6842 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6843 return isolate->factory()->NewNumberFromUint(key);
6847 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6848 // We know that all entries in a hash table had their hash keys created.
6849 // Use that knowledge to have fast failure.
6850 if (key->Hash() != Name::cast(other)->Hash()) return false;
6851 return key->Equals(Name::cast(other));
6855 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6860 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6861 return Name::cast(other)->Hash();
6865 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6867 DCHECK(key->IsUniqueName());
6872 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
6873 Handle<NameDictionary> dictionary) {
6874 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6878 template <typename Dictionary>
6879 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
6880 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
6881 Object* raw_value = dict->ValueAt(entry);
6882 DCHECK(raw_value->IsPropertyCell());
6883 PropertyCell* cell = PropertyCell::cast(raw_value);
6884 return cell->property_details();
6888 template <typename Dictionary>
6889 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
6890 PropertyDetails value) {
6891 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
6892 Object* raw_value = dict->ValueAt(entry);
6893 DCHECK(raw_value->IsPropertyCell());
6894 PropertyCell* cell = PropertyCell::cast(raw_value);
6895 cell->set_property_details(value);
6899 template <typename Dictionary>
6900 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
6901 DCHECK(dict->ValueAt(entry)->IsPropertyCell());
6902 return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole();
6906 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6907 return key->SameValue(other);
6911 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6912 return Smi::cast(key->GetHash())->value();
6916 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6918 return Smi::cast(other->GetHash())->value();
6922 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6923 Handle<Object> key) {
6928 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6929 Handle<ObjectHashTable> table, Handle<Object> key) {
6930 return DerivedHashTable::Shrink(table, key);
6934 template <int entrysize>
6935 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6936 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
6937 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
6942 template <int entrysize>
6943 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6946 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
6947 : reinterpret_cast<intptr_t>(*key);
6948 return (uint32_t)(hash & 0xFFFFFFFF);
6952 template <int entrysize>
6953 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
6955 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
6956 intptr_t hash = reinterpret_cast<intptr_t>(other);
6957 return (uint32_t)(hash & 0xFFFFFFFF);
6961 template <int entrysize>
6962 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6963 Handle<Object> key) {
6968 void Map::ClearCodeCache(Heap* heap) {
6969 // No write barrier is needed since empty_fixed_array is not in new space.
6970 // Please note this function is used during marking:
6971 // - MarkCompactCollector::MarkUnmarkedObject
6972 // - IncrementalMarking::Step
6973 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
6974 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6978 int Map::SlackForArraySize(int old_size, int size_limit) {
6979 const int max_slack = size_limit - old_size;
6980 CHECK_LE(0, max_slack);
6982 DCHECK_LE(1, max_slack);
6985 return Min(max_slack, old_size / 4);
6989 void JSArray::set_length(Smi* length) {
6990 // Don't need a write barrier for a Smi.
6991 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6995 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
6996 // If the new array won't fit in a some non-trivial fraction of the max old
6997 // space size, then force it to go dictionary mode.
6998 uint32_t max_fast_array_size =
6999 static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7000 return new_length >= max_fast_array_size;
7004 bool JSArray::AllowsSetLength() {
7005 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7006 DCHECK(result == !HasExternalArrayElements());
7011 void JSArray::SetContent(Handle<JSArray> array,
7012 Handle<FixedArrayBase> storage) {
7013 EnsureCanContainElements(array, storage, storage->length(),
7014 ALLOW_COPIED_DOUBLE_ELEMENTS);
7016 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7017 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7018 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7019 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7020 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7021 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7022 array->set_elements(*storage);
7023 array->set_length(Smi::FromInt(storage->length()));
7027 int TypeFeedbackInfo::ic_total_count() {
7028 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7029 return ICTotalCountField::decode(current);
7033 void TypeFeedbackInfo::set_ic_total_count(int count) {
7034 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7035 value = ICTotalCountField::update(value,
7036 ICTotalCountField::decode(count));
7037 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7041 int TypeFeedbackInfo::ic_with_type_info_count() {
7042 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7043 return ICsWithTypeInfoCountField::decode(current);
7047 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7048 if (delta == 0) return;
7049 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7050 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7051 // We can get negative count here when the type-feedback info is
7052 // shared between two code objects. The can only happen when
7053 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7054 // Since we do not optimize when the debugger is active, we can skip
7055 // this counter update.
7056 if (new_count >= 0) {
7057 new_count &= ICsWithTypeInfoCountField::kMask;
7058 value = ICsWithTypeInfoCountField::update(value, new_count);
7059 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7064 int TypeFeedbackInfo::ic_generic_count() {
7065 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7069 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7070 if (delta == 0) return;
7071 int new_count = ic_generic_count() + delta;
7072 if (new_count >= 0) {
7073 new_count &= ~Smi::kMinValue;
7074 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7079 void TypeFeedbackInfo::initialize_storage() {
7080 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7081 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7082 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7086 void TypeFeedbackInfo::change_own_type_change_checksum() {
7087 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7088 int checksum = OwnTypeChangeChecksum::decode(value);
7089 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7090 value = OwnTypeChangeChecksum::update(value, checksum);
7091 // Ensure packed bit field is in Smi range.
7092 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7093 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7094 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7098 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7099 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7100 int mask = (1 << kTypeChangeChecksumBits) - 1;
7101 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7102 // Ensure packed bit field is in Smi range.
7103 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7104 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7105 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7109 int TypeFeedbackInfo::own_type_change_checksum() {
7110 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7111 return OwnTypeChangeChecksum::decode(value);
7115 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7116 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7117 int mask = (1 << kTypeChangeChecksumBits) - 1;
7118 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7122 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7125 Relocatable::Relocatable(Isolate* isolate) {
7127 prev_ = isolate->relocatable_top();
7128 isolate->set_relocatable_top(this);
7132 Relocatable::~Relocatable() {
7133 DCHECK_EQ(isolate_->relocatable_top(), this);
7134 isolate_->set_relocatable_top(prev_);
7138 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7139 return map->instance_size();
7143 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7144 v->VisitExternalReference(
7145 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7149 template<typename StaticVisitor>
7150 void Foreign::ForeignIterateBody() {
7151 StaticVisitor::VisitExternalReference(
7152 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7156 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody(ObjectVisitor* v) {
7158 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7162 template <typename StaticVisitor>
7163 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody() {
7164 StaticVisitor::VisitPointer(
7165 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7169 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7170 typedef v8::String::ExternalOneByteStringResource Resource;
7171 v->VisitExternalOneByteString(
7172 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7176 template <typename StaticVisitor>
7177 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7178 typedef v8::String::ExternalOneByteStringResource Resource;
7179 StaticVisitor::VisitExternalOneByteString(
7180 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7184 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7185 typedef v8::String::ExternalStringResource Resource;
7186 v->VisitExternalTwoByteString(
7187 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7191 template<typename StaticVisitor>
7192 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7193 typedef v8::String::ExternalStringResource Resource;
7194 StaticVisitor::VisitExternalTwoByteString(
7195 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7199 static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7203 DCHECK(FLAG_unbox_double_fields);
7204 DCHECK(IsAligned(start_offset, kPointerSize) &&
7205 IsAligned(end_offset, kPointerSize));
7207 LayoutDescriptorHelper helper(object->map());
7208 DCHECK(!helper.all_fields_tagged());
7210 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7211 // Visit all tagged fields.
7212 if (helper.IsTagged(offset)) {
7213 v->VisitPointer(HeapObject::RawField(object, offset));
7219 template<int start_offset, int end_offset, int size>
7220 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7223 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7224 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7225 HeapObject::RawField(obj, end_offset));
7227 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7232 template<int start_offset>
7233 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7236 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7237 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7238 HeapObject::RawField(obj, object_size));
7240 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7245 template<class Derived, class TableType>
7246 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7247 TableType* table(TableType::cast(this->table()));
7248 int index = Smi::cast(this->index())->value();
7249 Object* key = table->KeyAt(index);
7250 DCHECK(!key->IsTheHole());
7255 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7256 array->set(0, CurrentKey());
7260 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7261 array->set(0, CurrentKey());
7262 array->set(1, CurrentValue());
7266 Object* JSMapIterator::CurrentValue() {
7267 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7268 int index = Smi::cast(this->index())->value();
7269 Object* value = table->ValueAt(index);
7270 DCHECK(!value->IsTheHole());
7275 class String::SubStringRange::iterator final {
7277 typedef std::forward_iterator_tag iterator_category;
7278 typedef int difference_type;
7279 typedef uc16 value_type;
7280 typedef uc16* pointer;
7281 typedef uc16& reference;
7283 iterator(const iterator& other)
7284 : content_(other.content_), offset_(other.offset_) {}
7286 uc16 operator*() { return content_.Get(offset_); }
7287 bool operator==(const iterator& other) const {
7288 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7290 bool operator!=(const iterator& other) const {
7291 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7293 iterator& operator++() {
7297 iterator operator++(int);
7300 friend class String;
7301 iterator(String* from, int offset)
7302 : content_(from->GetFlatContent()), offset_(offset) {}
7303 String::FlatContent content_;
7308 String::SubStringRange::iterator String::SubStringRange::begin() {
7309 return String::SubStringRange::iterator(string_, first_);
7313 String::SubStringRange::iterator String::SubStringRange::end() {
7314 return String::SubStringRange::iterator(string_, first_ + length_);
7319 #undef CAST_ACCESSOR
7320 #undef INT_ACCESSORS
7322 #undef ACCESSORS_TO_SMI
7323 #undef SMI_ACCESSORS
7324 #undef SYNCHRONIZED_SMI_ACCESSORS
7325 #undef NOBARRIER_SMI_ACCESSORS
7327 #undef BOOL_ACCESSORS
7329 #undef FIELD_ADDR_CONST
7331 #undef NOBARRIER_READ_FIELD
7333 #undef NOBARRIER_WRITE_FIELD
7334 #undef WRITE_BARRIER
7335 #undef CONDITIONAL_WRITE_BARRIER
7336 #undef READ_DOUBLE_FIELD
7337 #undef WRITE_DOUBLE_FIELD
7338 #undef READ_INT_FIELD
7339 #undef WRITE_INT_FIELD
7340 #undef READ_INTPTR_FIELD
7341 #undef WRITE_INTPTR_FIELD
7342 #undef READ_UINT32_FIELD
7343 #undef WRITE_UINT32_FIELD
7344 #undef READ_SHORT_FIELD
7345 #undef WRITE_SHORT_FIELD
7346 #undef READ_BYTE_FIELD
7347 #undef WRITE_BYTE_FIELD
7348 #undef NOBARRIER_READ_BYTE_FIELD
7349 #undef NOBARRIER_WRITE_BYTE_FIELD
7351 } } // namespace v8::internal
7353 #endif // V8_OBJECTS_INL_H_