1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/heap/incremental-marking.h"
24 #include "src/heap/objects-visiting.h"
25 #include "src/heap/spaces.h"
26 #include "src/heap/store-buffer.h"
27 #include "src/isolate.h"
28 #include "src/layout-descriptor-inl.h"
29 #include "src/lookup.h"
30 #include "src/objects.h"
31 #include "src/property.h"
32 #include "src/prototype.h"
33 #include "src/transitions-inl.h"
34 #include "src/type-feedback-vector-inl.h"
35 #include "src/v8memory.h"
40 PropertyDetails::PropertyDetails(Smi* smi) {
41 value_ = smi->value();
45 Smi* PropertyDetails::AsSmi() const {
46 // Ensure the upper 2 bits have the same value by sign extending it. This is
47 // necessary to be able to use the 31st bit of the property details.
48 int value = value_ << 1;
49 return Smi::FromInt(value >> 1);
53 int PropertyDetails::field_width_in_words() const {
54 DCHECK(location() == kField);
55 if (!FLAG_unbox_double_fields) return 1;
56 if (kDoubleSize == kPointerSize) return 1;
57 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
61 #define TYPE_CHECKER(type, instancetype) \
62 bool Object::Is##type() const { \
63 return Object::IsHeapObject() && \
64 HeapObject::cast(this)->map()->instance_type() == instancetype; \
68 #define CAST_ACCESSOR(type) \
69 type* type::cast(Object* object) { \
70 SLOW_DCHECK(object->Is##type()); \
71 return reinterpret_cast<type*>(object); \
73 const type* type::cast(const Object* object) { \
74 SLOW_DCHECK(object->Is##type()); \
75 return reinterpret_cast<const type*>(object); \
79 #define INT_ACCESSORS(holder, name, offset) \
80 int holder::name() const { return READ_INT_FIELD(this, offset); } \
81 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
84 #define ACCESSORS(holder, name, type, offset) \
85 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
86 void holder::set_##name(type* value, WriteBarrierMode mode) { \
87 WRITE_FIELD(this, offset, value); \
88 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
92 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
93 #define ACCESSORS_TO_SMI(holder, name, offset) \
94 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
95 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
96 WRITE_FIELD(this, offset, value); \
100 // Getter that returns a Smi as an int and writes an int as a Smi.
101 #define SMI_ACCESSORS(holder, name, offset) \
102 int holder::name() const { \
103 Object* value = READ_FIELD(this, offset); \
104 return Smi::cast(value)->value(); \
106 void holder::set_##name(int value) { \
107 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
110 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
111 int holder::synchronized_##name() const { \
112 Object* value = ACQUIRE_READ_FIELD(this, offset); \
113 return Smi::cast(value)->value(); \
115 void holder::synchronized_set_##name(int value) { \
116 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
119 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
120 int holder::nobarrier_##name() const { \
121 Object* value = NOBARRIER_READ_FIELD(this, offset); \
122 return Smi::cast(value)->value(); \
124 void holder::nobarrier_set_##name(int value) { \
125 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
128 #define BOOL_GETTER(holder, field, name, offset) \
129 bool holder::name() const { \
130 return BooleanBit::get(field(), offset); \
134 #define BOOL_ACCESSORS(holder, field, name, offset) \
135 bool holder::name() const { \
136 return BooleanBit::get(field(), offset); \
138 void holder::set_##name(bool value) { \
139 set_##field(BooleanBit::set(field(), offset, value)); \
143 bool Object::IsFixedArrayBase() const {
144 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase() ||
149 // External objects are not extensible, so the map check is enough.
150 bool Object::IsExternal() const {
151 return Object::IsHeapObject() &&
152 HeapObject::cast(this)->map() ==
153 HeapObject::cast(this)->GetHeap()->external_map();
157 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
160 bool Object::IsSmi() const {
161 return HAS_SMI_TAG(this);
165 bool Object::IsHeapObject() const {
166 return Internals::HasHeapObjectTag(this);
170 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
171 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
172 TYPE_CHECKER(Float32x4, FLOAT32X4_TYPE)
173 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
176 bool Object::IsString() const {
177 return Object::IsHeapObject()
178 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
182 bool Object::IsName() const {
183 return IsString() || IsSymbol();
187 bool Object::IsUniqueName() const {
188 return IsInternalizedString() || IsSymbol();
192 bool Object::IsSpecObject() const {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() const {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsTemplateInfo() const {
206 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
210 bool Object::IsInternalizedString() const {
211 if (!this->IsHeapObject()) return false;
212 uint32_t type = HeapObject::cast(this)->map()->instance_type();
213 STATIC_ASSERT(kNotInternalizedTag != 0);
214 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
215 (kStringTag | kInternalizedTag);
219 bool Object::IsConsString() const {
220 if (!IsString()) return false;
221 return StringShape(String::cast(this)).IsCons();
225 bool Object::IsSlicedString() const {
226 if (!IsString()) return false;
227 return StringShape(String::cast(this)).IsSliced();
231 bool Object::IsSeqString() const {
232 if (!IsString()) return false;
233 return StringShape(String::cast(this)).IsSequential();
237 bool Object::IsSeqOneByteString() const {
238 if (!IsString()) return false;
239 return StringShape(String::cast(this)).IsSequential() &&
240 String::cast(this)->IsOneByteRepresentation();
244 bool Object::IsSeqTwoByteString() const {
245 if (!IsString()) return false;
246 return StringShape(String::cast(this)).IsSequential() &&
247 String::cast(this)->IsTwoByteRepresentation();
251 bool Object::IsExternalString() const {
252 if (!IsString()) return false;
253 return StringShape(String::cast(this)).IsExternal();
257 bool Object::IsExternalOneByteString() const {
258 if (!IsString()) return false;
259 return StringShape(String::cast(this)).IsExternal() &&
260 String::cast(this)->IsOneByteRepresentation();
264 bool Object::IsExternalTwoByteString() const {
265 if (!IsString()) return false;
266 return StringShape(String::cast(this)).IsExternal() &&
267 String::cast(this)->IsTwoByteRepresentation();
271 bool Object::HasValidElements() {
272 // Dictionary is covered under FixedArray.
273 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
274 IsFixedTypedArrayBase();
278 bool Object::KeyEquals(Object* second) {
279 Object* first = this;
280 if (second->IsNumber()) {
281 if (first->IsNumber()) return first->Number() == second->Number();
282 Object* temp = first;
286 if (first->IsNumber()) {
287 DCHECK_LE(0, first->Number());
288 uint32_t expected = static_cast<uint32_t>(first->Number());
290 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
292 return Name::cast(first)->Equals(Name::cast(second));
296 Handle<Object> Object::NewStorageFor(Isolate* isolate,
297 Handle<Object> object,
298 Representation representation) {
299 if (representation.IsSmi() && object->IsUninitialized()) {
300 return handle(Smi::FromInt(0), isolate);
302 if (!representation.IsDouble()) return object;
304 if (object->IsUninitialized()) {
306 } else if (object->IsMutableHeapNumber()) {
307 value = HeapNumber::cast(*object)->value();
309 value = object->Number();
311 return isolate->factory()->NewHeapNumber(value, MUTABLE);
315 Handle<Object> Object::WrapForRead(Isolate* isolate,
316 Handle<Object> object,
317 Representation representation) {
318 DCHECK(!object->IsUninitialized());
319 if (!representation.IsDouble()) {
320 DCHECK(object->FitsRepresentation(representation));
323 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
327 StringShape::StringShape(const String* str)
328 : type_(str->map()->instance_type()) {
330 DCHECK((type_ & kIsNotStringMask) == kStringTag);
334 StringShape::StringShape(Map* map)
335 : type_(map->instance_type()) {
337 DCHECK((type_ & kIsNotStringMask) == kStringTag);
341 StringShape::StringShape(InstanceType t)
342 : type_(static_cast<uint32_t>(t)) {
344 DCHECK((type_ & kIsNotStringMask) == kStringTag);
348 bool StringShape::IsInternalized() {
350 STATIC_ASSERT(kNotInternalizedTag != 0);
351 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
352 (kStringTag | kInternalizedTag);
356 bool String::IsOneByteRepresentation() const {
357 uint32_t type = map()->instance_type();
358 return (type & kStringEncodingMask) == kOneByteStringTag;
362 bool String::IsTwoByteRepresentation() const {
363 uint32_t type = map()->instance_type();
364 return (type & kStringEncodingMask) == kTwoByteStringTag;
368 bool String::IsOneByteRepresentationUnderneath() {
369 uint32_t type = map()->instance_type();
370 STATIC_ASSERT(kIsIndirectStringTag != 0);
371 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
373 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
374 case kOneByteStringTag:
376 case kTwoByteStringTag:
378 default: // Cons or sliced string. Need to go deeper.
379 return GetUnderlying()->IsOneByteRepresentation();
384 bool String::IsTwoByteRepresentationUnderneath() {
385 uint32_t type = map()->instance_type();
386 STATIC_ASSERT(kIsIndirectStringTag != 0);
387 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
389 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
390 case kOneByteStringTag:
392 case kTwoByteStringTag:
394 default: // Cons or sliced string. Need to go deeper.
395 return GetUnderlying()->IsTwoByteRepresentation();
400 bool String::HasOnlyOneByteChars() {
401 uint32_t type = map()->instance_type();
402 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
403 IsOneByteRepresentation();
407 bool StringShape::IsCons() {
408 return (type_ & kStringRepresentationMask) == kConsStringTag;
412 bool StringShape::IsSliced() {
413 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
417 bool StringShape::IsIndirect() {
418 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
422 bool StringShape::IsExternal() {
423 return (type_ & kStringRepresentationMask) == kExternalStringTag;
427 bool StringShape::IsSequential() {
428 return (type_ & kStringRepresentationMask) == kSeqStringTag;
432 StringRepresentationTag StringShape::representation_tag() {
433 uint32_t tag = (type_ & kStringRepresentationMask);
434 return static_cast<StringRepresentationTag>(tag);
438 uint32_t StringShape::encoding_tag() {
439 return type_ & kStringEncodingMask;
443 uint32_t StringShape::full_representation_tag() {
444 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
448 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
449 Internals::kFullStringRepresentationMask);
451 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
452 Internals::kStringEncodingMask);
455 bool StringShape::IsSequentialOneByte() {
456 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
460 bool StringShape::IsSequentialTwoByte() {
461 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
465 bool StringShape::IsExternalOneByte() {
466 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
470 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
471 Internals::kExternalOneByteRepresentationTag);
473 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
476 bool StringShape::IsExternalTwoByte() {
477 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
481 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
482 Internals::kExternalTwoByteRepresentationTag);
484 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
487 uc32 FlatStringReader::Get(int index) {
489 return Get<uint8_t>(index);
491 return Get<uc16>(index);
496 template <typename Char>
497 Char FlatStringReader::Get(int index) {
498 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
499 DCHECK(0 <= index && index <= length_);
500 if (sizeof(Char) == 1) {
501 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
503 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
508 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
509 return key->AsHandle(isolate);
513 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
515 return key->AsHandle(isolate);
519 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
521 return key->AsHandle(isolate);
524 template <typename Char>
525 class SequentialStringKey : public HashTableKey {
527 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
528 : string_(string), hash_field_(0), seed_(seed) { }
530 uint32_t Hash() override {
531 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
535 uint32_t result = hash_field_ >> String::kHashShift;
536 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
541 uint32_t HashForObject(Object* other) override {
542 return String::cast(other)->Hash();
545 Vector<const Char> string_;
546 uint32_t hash_field_;
551 class OneByteStringKey : public SequentialStringKey<uint8_t> {
553 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
554 : SequentialStringKey<uint8_t>(str, seed) { }
556 bool IsMatch(Object* string) override {
557 return String::cast(string)->IsOneByteEqualTo(string_);
560 Handle<Object> AsHandle(Isolate* isolate) override;
564 class SeqOneByteSubStringKey : public HashTableKey {
566 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
567 : string_(string), from_(from), length_(length) {
568 DCHECK(string_->IsSeqOneByteString());
571 uint32_t Hash() override {
572 DCHECK(length_ >= 0);
573 DCHECK(from_ + length_ <= string_->length());
574 const uint8_t* chars = string_->GetChars() + from_;
575 hash_field_ = StringHasher::HashSequentialString(
576 chars, length_, string_->GetHeap()->HashSeed());
577 uint32_t result = hash_field_ >> String::kHashShift;
578 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
582 uint32_t HashForObject(Object* other) override {
583 return String::cast(other)->Hash();
586 bool IsMatch(Object* string) override;
587 Handle<Object> AsHandle(Isolate* isolate) override;
590 Handle<SeqOneByteString> string_;
593 uint32_t hash_field_;
597 class TwoByteStringKey : public SequentialStringKey<uc16> {
599 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
600 : SequentialStringKey<uc16>(str, seed) { }
602 bool IsMatch(Object* string) override {
603 return String::cast(string)->IsTwoByteEqualTo(string_);
606 Handle<Object> AsHandle(Isolate* isolate) override;
610 // Utf8StringKey carries a vector of chars as key.
611 class Utf8StringKey : public HashTableKey {
613 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
614 : string_(string), hash_field_(0), seed_(seed) { }
616 bool IsMatch(Object* string) override {
617 return String::cast(string)->IsUtf8EqualTo(string_);
620 uint32_t Hash() override {
621 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
622 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
623 uint32_t result = hash_field_ >> String::kHashShift;
624 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
628 uint32_t HashForObject(Object* other) override {
629 return String::cast(other)->Hash();
632 Handle<Object> AsHandle(Isolate* isolate) override {
633 if (hash_field_ == 0) Hash();
634 return isolate->factory()->NewInternalizedStringFromUtf8(
635 string_, chars_, hash_field_);
638 Vector<const char> string_;
639 uint32_t hash_field_;
640 int chars_; // Caches the number of characters when computing the hash code.
645 bool Object::IsNumber() const {
646 return IsSmi() || IsHeapNumber();
650 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
651 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
654 bool Object::IsFiller() const {
655 if (!Object::IsHeapObject()) return false;
656 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
657 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
661 bool Object::IsExternalArray() const {
662 if (!Object::IsHeapObject())
664 InstanceType instance_type =
665 HeapObject::cast(this)->map()->instance_type();
666 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
667 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
671 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
672 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
673 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
675 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
676 #undef TYPED_ARRAY_TYPE_CHECKER
679 bool Object::IsFixedTypedArrayBase() const {
680 if (!Object::IsHeapObject()) return false;
682 InstanceType instance_type =
683 HeapObject::cast(this)->map()->instance_type();
684 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
685 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
689 bool Object::IsJSReceiver() const {
690 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
691 return IsHeapObject() &&
692 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
696 bool Object::IsJSObject() const {
697 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
698 return IsHeapObject() && HeapObject::cast(this)->map()->IsJSObjectMap();
702 bool Object::IsJSProxy() const {
703 if (!Object::IsHeapObject()) return false;
704 return HeapObject::cast(this)->map()->IsJSProxyMap();
708 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
709 TYPE_CHECKER(JSSet, JS_SET_TYPE)
710 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
711 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
712 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
713 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
714 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
715 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
716 TYPE_CHECKER(Map, MAP_TYPE)
717 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
718 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
719 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
722 bool Object::IsJSWeakCollection() const {
723 return IsJSWeakMap() || IsJSWeakSet();
727 bool Object::IsDescriptorArray() const {
728 return IsFixedArray();
732 bool Object::IsArrayList() const { return IsFixedArray(); }
735 bool Object::IsLayoutDescriptor() const {
736 return IsSmi() || IsFixedTypedArrayBase();
740 bool Object::IsTransitionArray() const {
741 return IsFixedArray();
745 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
748 bool Object::IsDeoptimizationInputData() const {
749 // Must be a fixed array.
750 if (!IsFixedArray()) return false;
752 // There's no sure way to detect the difference between a fixed array and
753 // a deoptimization data array. Since this is used for asserts we can
754 // check that the length is zero or else the fixed size plus a multiple of
756 int length = FixedArray::cast(this)->length();
757 if (length == 0) return true;
759 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
760 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
764 bool Object::IsDeoptimizationOutputData() const {
765 if (!IsFixedArray()) return false;
766 // There's actually no way to see the difference between a fixed array and
767 // a deoptimization data array. Since this is used for asserts we can check
768 // that the length is plausible though.
769 if (FixedArray::cast(this)->length() % 2 != 0) return false;
774 bool Object::IsHandlerTable() const {
775 if (!IsFixedArray()) return false;
776 // There's actually no way to see the difference between a fixed array and
777 // a handler table array.
782 bool Object::IsDependentCode() const {
783 if (!IsFixedArray()) return false;
784 // There's actually no way to see the difference between a fixed array and
785 // a dependent codes array.
790 bool Object::IsContext() const {
791 if (!Object::IsHeapObject()) return false;
792 Map* map = HeapObject::cast(this)->map();
793 Heap* heap = map->GetHeap();
794 return (map == heap->function_context_map() ||
795 map == heap->catch_context_map() ||
796 map == heap->with_context_map() ||
797 map == heap->native_context_map() ||
798 map == heap->block_context_map() ||
799 map == heap->module_context_map() ||
800 map == heap->script_context_map());
804 bool Object::IsNativeContext() const {
805 return Object::IsHeapObject() &&
806 HeapObject::cast(this)->map() ==
807 HeapObject::cast(this)->GetHeap()->native_context_map();
811 bool Object::IsScriptContextTable() const {
812 if (!Object::IsHeapObject()) return false;
813 Map* map = HeapObject::cast(this)->map();
814 Heap* heap = map->GetHeap();
815 return map == heap->script_context_table_map();
819 bool Object::IsScopeInfo() const {
820 return Object::IsHeapObject() &&
821 HeapObject::cast(this)->map() ==
822 HeapObject::cast(this)->GetHeap()->scope_info_map();
826 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
829 template <> inline bool Is<JSFunction>(Object* obj) {
830 return obj->IsJSFunction();
834 TYPE_CHECKER(Code, CODE_TYPE)
835 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
836 TYPE_CHECKER(Cell, CELL_TYPE)
837 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
838 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
839 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
840 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
841 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
842 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
843 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
844 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
847 bool Object::IsStringWrapper() const {
848 return IsJSValue() && JSValue::cast(this)->value()->IsString();
852 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
855 bool Object::IsBoolean() const {
856 return IsOddball() &&
857 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
861 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
862 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
863 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
864 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
867 bool Object::IsJSArrayBufferView() const {
868 return IsJSDataView() || IsJSTypedArray();
872 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
875 template <> inline bool Is<JSArray>(Object* obj) {
876 return obj->IsJSArray();
880 bool Object::IsHashTable() const {
881 return Object::IsHeapObject() &&
882 HeapObject::cast(this)->map() ==
883 HeapObject::cast(this)->GetHeap()->hash_table_map();
887 bool Object::IsWeakHashTable() const {
888 return IsHashTable();
892 bool Object::IsWeakValueHashTable() const { return IsHashTable(); }
895 bool Object::IsDictionary() const {
896 return IsHashTable() &&
897 this != HeapObject::cast(this)->GetHeap()->string_table();
901 bool Object::IsNameDictionary() const {
902 return IsDictionary();
906 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
909 bool Object::IsSeededNumberDictionary() const {
910 return IsDictionary();
914 bool Object::IsUnseededNumberDictionary() const {
915 return IsDictionary();
919 bool Object::IsStringTable() const {
920 return IsHashTable();
924 bool Object::IsJSFunctionResultCache() const {
925 if (!IsFixedArray()) return false;
926 const FixedArray* self = FixedArray::cast(this);
927 int length = self->length();
928 if (length < JSFunctionResultCache::kEntriesIndex) return false;
929 if ((length - JSFunctionResultCache::kEntriesIndex)
930 % JSFunctionResultCache::kEntrySize != 0) {
934 if (FLAG_verify_heap) {
935 // TODO(svenpanne) We use const_cast here and below to break our dependency
936 // cycle between the predicates and the verifiers. This can be removed when
937 // the verifiers are const-correct, too.
938 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
939 JSFunctionResultCacheVerify();
946 bool Object::IsNormalizedMapCache() const {
947 return NormalizedMapCache::IsNormalizedMapCache(this);
951 int NormalizedMapCache::GetIndex(Handle<Map> map) {
952 return map->Hash() % NormalizedMapCache::kEntries;
956 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
957 if (!obj->IsFixedArray()) return false;
958 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
962 if (FLAG_verify_heap) {
963 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
964 NormalizedMapCacheVerify();
971 bool Object::IsCompilationCacheTable() const {
972 return IsHashTable();
976 bool Object::IsCodeCacheHashTable() const {
977 return IsHashTable();
981 bool Object::IsPolymorphicCodeCacheHashTable() const {
982 return IsHashTable();
986 bool Object::IsMapCache() const {
987 return IsHashTable();
991 bool Object::IsObjectHashTable() const {
992 return IsHashTable();
996 bool Object::IsOrderedHashTable() const {
997 return IsHeapObject() &&
998 HeapObject::cast(this)->map() ==
999 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
1003 bool Object::IsOrderedHashSet() const {
1004 return IsOrderedHashTable();
1008 bool Object::IsOrderedHashMap() const {
1009 return IsOrderedHashTable();
1013 bool Object::IsPrimitive() const {
1014 return IsOddball() || IsNumber() || IsString();
1018 bool Object::IsJSGlobalProxy() const {
1019 bool result = IsHeapObject() &&
1020 (HeapObject::cast(this)->map()->instance_type() ==
1021 JS_GLOBAL_PROXY_TYPE);
1023 HeapObject::cast(this)->map()->is_access_check_needed());
1028 bool Object::IsGlobalObject() const {
1029 if (!IsHeapObject()) return false;
1030 return HeapObject::cast(this)->map()->IsGlobalObjectMap();
1034 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
1035 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1038 bool Object::IsUndetectableObject() const {
1039 return IsHeapObject()
1040 && HeapObject::cast(this)->map()->is_undetectable();
1044 bool Object::IsAccessCheckNeeded() const {
1045 if (!IsHeapObject()) return false;
1046 if (IsJSGlobalProxy()) {
1047 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1048 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1049 return proxy->IsDetachedFrom(global);
1051 return HeapObject::cast(this)->map()->is_access_check_needed();
1055 bool Object::IsStruct() const {
1056 if (!IsHeapObject()) return false;
1057 switch (HeapObject::cast(this)->map()->instance_type()) {
1058 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1059 STRUCT_LIST(MAKE_STRUCT_CASE)
1060 #undef MAKE_STRUCT_CASE
1061 default: return false;
1066 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1067 bool Object::Is##Name() const { \
1068 return Object::IsHeapObject() \
1069 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1071 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1072 #undef MAKE_STRUCT_PREDICATE
1075 bool Object::IsUndefined() const {
1076 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1080 bool Object::IsNull() const {
1081 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1085 bool Object::IsTheHole() const {
1086 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1090 bool Object::IsException() const {
1091 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1095 bool Object::IsUninitialized() const {
1096 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1100 bool Object::IsTrue() const {
1101 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1105 bool Object::IsFalse() const {
1106 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1110 bool Object::IsArgumentsMarker() const {
1111 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1115 double Object::Number() {
1118 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1119 : reinterpret_cast<HeapNumber*>(this)->value();
1123 bool Object::IsNaN() const {
1124 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1128 bool Object::IsMinusZero() const {
1129 return this->IsHeapNumber() &&
1130 i::IsMinusZero(HeapNumber::cast(this)->value());
1134 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1135 Handle<Object> object) {
1137 isolate, object, handle(isolate->context()->native_context(), isolate));
1141 bool Object::HasSpecificClassOf(String* name) {
1142 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1146 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1148 LanguageMode language_mode) {
1149 LookupIterator it(object, name);
1150 return GetProperty(&it, language_mode);
1154 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1156 LanguageMode language_mode) {
1157 LookupIterator it(isolate, object, index);
1158 return GetProperty(&it, language_mode);
1162 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1163 uint32_t index, Handle<Object> value,
1164 LanguageMode language_mode) {
1165 LookupIterator it(isolate, object, index);
1166 return SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED);
1170 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1171 Isolate* isolate, Handle<Object> receiver) {
1172 PrototypeIterator iter(isolate, receiver);
1173 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1174 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1175 return PrototypeIterator::GetCurrent(iter);
1179 return PrototypeIterator::GetCurrent(iter);
1183 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
1185 LanguageMode language_mode) {
1186 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1187 return GetProperty(object, str, language_mode);
1191 #define FIELD_ADDR(p, offset) \
1192 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1194 #define FIELD_ADDR_CONST(p, offset) \
1195 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1197 #define READ_FIELD(p, offset) \
1198 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1200 #define ACQUIRE_READ_FIELD(p, offset) \
1201 reinterpret_cast<Object*>(base::Acquire_Load( \
1202 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1204 #define NOBARRIER_READ_FIELD(p, offset) \
1205 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1206 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1208 #define WRITE_FIELD(p, offset, value) \
1209 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1211 #define RELEASE_WRITE_FIELD(p, offset, value) \
1212 base::Release_Store( \
1213 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1214 reinterpret_cast<base::AtomicWord>(value));
1216 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1217 base::NoBarrier_Store( \
1218 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1219 reinterpret_cast<base::AtomicWord>(value));
1221 #define WRITE_BARRIER(heap, object, offset, value) \
1222 heap->incremental_marking()->RecordWrite( \
1223 object, HeapObject::RawField(object, offset), value); \
1224 if (heap->InNewSpace(value)) { \
1225 heap->RecordWrite(object->address(), offset); \
1228 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1229 if (mode != SKIP_WRITE_BARRIER) { \
1230 if (mode == UPDATE_WRITE_BARRIER) { \
1231 heap->incremental_marking()->RecordWrite( \
1232 object, HeapObject::RawField(object, offset), value); \
1234 if (heap->InNewSpace(value)) { \
1235 heap->RecordWrite(object->address(), offset); \
1239 #define READ_DOUBLE_FIELD(p, offset) \
1240 ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1242 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1243 WriteDoubleValue(FIELD_ADDR(p, offset), value)
1245 #define READ_INT_FIELD(p, offset) \
1246 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1248 #define WRITE_INT_FIELD(p, offset, value) \
1249 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1251 #define READ_INTPTR_FIELD(p, offset) \
1252 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1254 #define WRITE_INTPTR_FIELD(p, offset, value) \
1255 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1257 #define READ_UINT32_FIELD(p, offset) \
1258 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1260 #define WRITE_UINT32_FIELD(p, offset, value) \
1261 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1263 #define READ_INT32_FIELD(p, offset) \
1264 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1266 #define WRITE_INT32_FIELD(p, offset, value) \
1267 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1269 #define READ_FLOAT_FIELD(p, offset) \
1270 (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1272 #define WRITE_FLOAT_FIELD(p, offset, value) \
1273 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1275 #define READ_UINT64_FIELD(p, offset) \
1276 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1278 #define WRITE_UINT64_FIELD(p, offset, value) \
1279 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1281 #define READ_INT64_FIELD(p, offset) \
1282 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1284 #define WRITE_INT64_FIELD(p, offset, value) \
1285 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1287 #define READ_SHORT_FIELD(p, offset) \
1288 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1290 #define WRITE_SHORT_FIELD(p, offset, value) \
1291 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1293 #define READ_BYTE_FIELD(p, offset) \
1294 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1296 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1297 static_cast<byte>(base::NoBarrier_Load( \
1298 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1300 #define WRITE_BYTE_FIELD(p, offset, value) \
1301 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1303 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1304 base::NoBarrier_Store( \
1305 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1306 static_cast<base::Atomic8>(value));
1308 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1309 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1313 int Smi::value() const {
1314 return Internals::SmiValue(this);
1318 Smi* Smi::FromInt(int value) {
1319 DCHECK(Smi::IsValid(value));
1320 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1324 Smi* Smi::FromIntptr(intptr_t value) {
1325 DCHECK(Smi::IsValid(value));
1326 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1327 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1331 bool Smi::IsValid(intptr_t value) {
1332 bool result = Internals::IsValidSmi(value);
1333 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1338 MapWord MapWord::FromMap(const Map* map) {
1339 return MapWord(reinterpret_cast<uintptr_t>(map));
1343 Map* MapWord::ToMap() {
1344 return reinterpret_cast<Map*>(value_);
1348 bool MapWord::IsForwardingAddress() {
1349 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1353 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1354 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1355 return MapWord(reinterpret_cast<uintptr_t>(raw));
1359 HeapObject* MapWord::ToForwardingAddress() {
1360 DCHECK(IsForwardingAddress());
1361 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1366 void HeapObject::VerifyObjectField(int offset) {
1367 VerifyPointer(READ_FIELD(this, offset));
1370 void HeapObject::VerifySmiField(int offset) {
1371 CHECK(READ_FIELD(this, offset)->IsSmi());
1376 Heap* HeapObject::GetHeap() const {
1378 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1379 SLOW_DCHECK(heap != NULL);
1384 Isolate* HeapObject::GetIsolate() const {
1385 return GetHeap()->isolate();
1389 Map* HeapObject::map() const {
1391 // Clear mark potentially added by PathTracer.
1392 uintptr_t raw_value =
1393 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1394 return MapWord::FromRawValue(raw_value).ToMap();
1396 return map_word().ToMap();
1401 void HeapObject::set_map(Map* value) {
1402 set_map_word(MapWord::FromMap(value));
1403 if (value != NULL) {
1404 // TODO(1600) We are passing NULL as a slot because maps can never be on
1405 // evacuation candidate.
1406 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1411 Map* HeapObject::synchronized_map() {
1412 return synchronized_map_word().ToMap();
1416 void HeapObject::synchronized_set_map(Map* value) {
1417 synchronized_set_map_word(MapWord::FromMap(value));
1418 if (value != NULL) {
1419 // TODO(1600) We are passing NULL as a slot because maps can never be on
1420 // evacuation candidate.
1421 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1426 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1427 synchronized_set_map_word(MapWord::FromMap(value));
1431 // Unsafe accessor omitting write barrier.
1432 void HeapObject::set_map_no_write_barrier(Map* value) {
1433 set_map_word(MapWord::FromMap(value));
1437 MapWord HeapObject::map_word() const {
1439 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1443 void HeapObject::set_map_word(MapWord map_word) {
1444 NOBARRIER_WRITE_FIELD(
1445 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1449 MapWord HeapObject::synchronized_map_word() const {
1451 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1455 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1456 RELEASE_WRITE_FIELD(
1457 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1461 HeapObject* HeapObject::FromAddress(Address address) {
1462 DCHECK_TAG_ALIGNED(address);
1463 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1467 Address HeapObject::address() {
1468 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1472 int HeapObject::Size() {
1473 return SizeFromMap(map());
1477 HeapObjectContents HeapObject::ContentType() {
1478 InstanceType type = map()->instance_type();
1479 if (type <= LAST_NAME_TYPE) {
1480 if (type == SYMBOL_TYPE) {
1481 return HeapObjectContents::kTaggedValues;
1483 DCHECK(type < FIRST_NONSTRING_TYPE);
1484 // There are four string representations: sequential strings, external
1485 // strings, cons strings, and sliced strings.
1486 // Only the former two contain raw values and no heap pointers (besides the
1488 if (((type & kIsIndirectStringMask) != kIsIndirectStringTag))
1489 return HeapObjectContents::kRawValues;
1491 return HeapObjectContents::kTaggedValues;
1493 // TODO(jochen): Enable eventually.
1494 } else if (type == JS_FUNCTION_TYPE) {
1495 return HeapObjectContents::kMixedValues;
1497 } else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
1498 type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
1499 return HeapObjectContents::kMixedValues;
1500 } else if (type <= LAST_DATA_TYPE) {
1501 // TODO(jochen): Why do we claim that Code and Map contain only raw values?
1502 return HeapObjectContents::kRawValues;
1504 if (FLAG_unbox_double_fields) {
1505 LayoutDescriptorHelper helper(map());
1506 if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues;
1508 return HeapObjectContents::kTaggedValues;
1513 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1514 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1515 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1519 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1520 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1524 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1525 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1529 double HeapNumber::value() const {
1530 return READ_DOUBLE_FIELD(this, kValueOffset);
1534 void HeapNumber::set_value(double value) {
1535 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1539 int HeapNumber::get_exponent() {
1540 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1541 kExponentShift) - kExponentBias;
1545 int HeapNumber::get_sign() {
1546 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1550 float Float32x4::get_lane(int lane) const {
1551 DCHECK(lane < 4 && lane >= 0);
1552 #if defined(V8_TARGET_LITTLE_ENDIAN)
1553 return READ_FLOAT_FIELD(this, kValueOffset + lane * kFloatSize);
1554 #elif defined(V8_TARGET_BIG_ENDIAN)
1555 return READ_FLOAT_FIELD(this, kValueOffset + (3 - lane) * kFloatSize);
1557 #error Unknown byte ordering
1562 void Float32x4::set_lane(int lane, float value) {
1563 DCHECK(lane < 4 && lane >= 0);
1564 #if defined(V8_TARGET_LITTLE_ENDIAN)
1565 WRITE_FLOAT_FIELD(this, kValueOffset + lane * kFloatSize, value);
1566 #elif defined(V8_TARGET_BIG_ENDIAN)
1567 WRITE_FLOAT_FIELD(this, kValueOffset + (3 - lane) * kFloatSize, value);
1569 #error Unknown byte ordering
1574 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1577 Object** FixedArray::GetFirstElementAddress() {
1578 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1582 bool FixedArray::ContainsOnlySmisOrHoles() {
1583 Object* the_hole = GetHeap()->the_hole_value();
1584 Object** current = GetFirstElementAddress();
1585 for (int i = 0; i < length(); ++i) {
1586 Object* candidate = *current++;
1587 if (!candidate->IsSmi() && candidate != the_hole) return false;
1593 FixedArrayBase* JSObject::elements() const {
1594 Object* array = READ_FIELD(this, kElementsOffset);
1595 return static_cast<FixedArrayBase*>(array);
1599 void AllocationSite::Initialize() {
1600 set_transition_info(Smi::FromInt(0));
1601 SetElementsKind(GetInitialFastElementsKind());
1602 set_nested_site(Smi::FromInt(0));
1603 set_pretenure_data(Smi::FromInt(0));
1604 set_pretenure_create_count(Smi::FromInt(0));
1605 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1606 SKIP_WRITE_BARRIER);
1610 void AllocationSite::MarkZombie() {
1611 DCHECK(!IsZombie());
1613 set_pretenure_decision(kZombie);
1617 // Heuristic: We only need to create allocation site info if the boilerplate
1618 // elements kind is the initial elements kind.
1619 AllocationSiteMode AllocationSite::GetMode(
1620 ElementsKind boilerplate_elements_kind) {
1621 if (FLAG_pretenuring_call_new ||
1622 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1623 return TRACK_ALLOCATION_SITE;
1626 return DONT_TRACK_ALLOCATION_SITE;
1630 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1632 if (FLAG_pretenuring_call_new ||
1633 (IsFastSmiElementsKind(from) &&
1634 IsMoreGeneralElementsKindTransition(from, to))) {
1635 return TRACK_ALLOCATION_SITE;
1638 return DONT_TRACK_ALLOCATION_SITE;
1642 inline bool AllocationSite::CanTrack(InstanceType type) {
1643 if (FLAG_allocation_site_pretenuring) {
1644 return type == JS_ARRAY_TYPE ||
1645 type == JS_OBJECT_TYPE ||
1646 type < FIRST_NONSTRING_TYPE;
1648 return type == JS_ARRAY_TYPE;
1652 inline void AllocationSite::set_memento_found_count(int count) {
1653 int value = pretenure_data()->value();
1654 // Verify that we can count more mementos than we can possibly find in one
1655 // new space collection.
1656 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1657 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1658 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1659 DCHECK(count < MementoFoundCountBits::kMax);
1661 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1662 SKIP_WRITE_BARRIER);
1665 inline bool AllocationSite::IncrementMementoFoundCount() {
1666 if (IsZombie()) return false;
1668 int value = memento_found_count();
1669 set_memento_found_count(value + 1);
1670 return memento_found_count() == kPretenureMinimumCreated;
1674 inline void AllocationSite::IncrementMementoCreateCount() {
1675 DCHECK(FLAG_allocation_site_pretenuring);
1676 int value = memento_create_count();
1677 set_memento_create_count(value + 1);
1681 inline bool AllocationSite::MakePretenureDecision(
1682 PretenureDecision current_decision,
1684 bool maximum_size_scavenge) {
1685 // Here we just allow state transitions from undecided or maybe tenure
1686 // to don't tenure, maybe tenure, or tenure.
1687 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1688 if (ratio >= kPretenureRatio) {
1689 // We just transition into tenure state when the semi-space was at
1690 // maximum capacity.
1691 if (maximum_size_scavenge) {
1692 set_deopt_dependent_code(true);
1693 set_pretenure_decision(kTenure);
1694 // Currently we just need to deopt when we make a state transition to
1698 set_pretenure_decision(kMaybeTenure);
1700 set_pretenure_decision(kDontTenure);
1707 inline bool AllocationSite::DigestPretenuringFeedback(
1708 bool maximum_size_scavenge) {
1710 int create_count = memento_create_count();
1711 int found_count = memento_found_count();
1712 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1714 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1715 static_cast<double>(found_count) / create_count : 0.0;
1716 PretenureDecision current_decision = pretenure_decision();
1718 if (minimum_mementos_created) {
1719 deopt = MakePretenureDecision(
1720 current_decision, ratio, maximum_size_scavenge);
1723 if (FLAG_trace_pretenuring_statistics) {
1725 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1726 static_cast<void*>(this), create_count, found_count, ratio,
1727 PretenureDecisionName(current_decision),
1728 PretenureDecisionName(pretenure_decision()));
1731 // Clear feedback calculation fields until the next gc.
1732 set_memento_found_count(0);
1733 set_memento_create_count(0);
1738 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1739 JSObject::ValidateElements(object);
1740 ElementsKind elements_kind = object->map()->elements_kind();
1741 if (!IsFastObjectElementsKind(elements_kind)) {
1742 if (IsFastHoleyElementsKind(elements_kind)) {
1743 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1745 TransitionElementsKind(object, FAST_ELEMENTS);
1751 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1754 EnsureElementsMode mode) {
1755 ElementsKind current_kind = object->map()->elements_kind();
1756 ElementsKind target_kind = current_kind;
1758 DisallowHeapAllocation no_allocation;
1759 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1760 bool is_holey = IsFastHoleyElementsKind(current_kind);
1761 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1762 Heap* heap = object->GetHeap();
1763 Object* the_hole = heap->the_hole_value();
1764 for (uint32_t i = 0; i < count; ++i) {
1765 Object* current = *objects++;
1766 if (current == the_hole) {
1768 target_kind = GetHoleyElementsKind(target_kind);
1769 } else if (!current->IsSmi()) {
1770 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1771 if (IsFastSmiElementsKind(target_kind)) {
1773 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1775 target_kind = FAST_DOUBLE_ELEMENTS;
1778 } else if (is_holey) {
1779 target_kind = FAST_HOLEY_ELEMENTS;
1782 target_kind = FAST_ELEMENTS;
1787 if (target_kind != current_kind) {
1788 TransitionElementsKind(object, target_kind);
1793 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1794 Handle<FixedArrayBase> elements,
1796 EnsureElementsMode mode) {
1797 Heap* heap = object->GetHeap();
1798 if (elements->map() != heap->fixed_double_array_map()) {
1799 DCHECK(elements->map() == heap->fixed_array_map() ||
1800 elements->map() == heap->fixed_cow_array_map());
1801 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1802 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1805 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1806 EnsureCanContainElements(object, objects, length, mode);
1810 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1811 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1812 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1813 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1814 Handle<FixedDoubleArray> double_array =
1815 Handle<FixedDoubleArray>::cast(elements);
1816 for (uint32_t i = 0; i < length; ++i) {
1817 if (double_array->is_the_hole(i)) {
1818 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1822 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1827 void JSObject::SetMapAndElements(Handle<JSObject> object,
1828 Handle<Map> new_map,
1829 Handle<FixedArrayBase> value) {
1830 JSObject::MigrateToMap(object, new_map);
1831 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1832 (*value == object->GetHeap()->empty_fixed_array())) ==
1833 (value->map() == object->GetHeap()->fixed_array_map() ||
1834 value->map() == object->GetHeap()->fixed_cow_array_map()));
1835 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1836 (object->map()->has_fast_double_elements() ==
1837 value->IsFixedDoubleArray()));
1838 object->set_elements(*value);
1842 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1843 WRITE_FIELD(this, kElementsOffset, value);
1844 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1848 void JSObject::initialize_properties() {
1849 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1850 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1854 void JSObject::initialize_elements() {
1855 FixedArrayBase* elements = map()->GetInitialElements();
1856 WRITE_FIELD(this, kElementsOffset, elements);
1860 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1861 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1864 byte Oddball::kind() const {
1865 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1869 void Oddball::set_kind(byte value) {
1870 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1874 ACCESSORS(Cell, value, Object, kValueOffset)
1875 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1876 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
1877 ACCESSORS(PropertyCell, value, Object, kValueOffset)
1879 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
1882 void WeakCell::clear() {
1883 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
1884 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
1888 void WeakCell::initialize(HeapObject* val) {
1889 WRITE_FIELD(this, kValueOffset, val);
1890 Heap* heap = GetHeap();
1891 // We just have to execute the generational barrier here because we never
1892 // mark through a weak cell and collect evacuation candidates when we process
1894 if (heap->InNewSpace(val)) {
1895 heap->RecordWrite(address(), kValueOffset);
1900 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
1903 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
1906 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
1907 WRITE_FIELD(this, kNextOffset, val);
1908 if (mode == UPDATE_WRITE_BARRIER) {
1909 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
1914 void WeakCell::clear_next(Heap* heap) {
1915 set_next(heap->the_hole_value(), SKIP_WRITE_BARRIER);
1919 bool WeakCell::next_cleared() { return next()->IsTheHole(); }
1922 int JSObject::GetHeaderSize() {
1923 InstanceType type = map()->instance_type();
1924 // Check for the most common kind of JavaScript object before
1925 // falling into the generic switch. This speeds up the internal
1926 // field operations considerably on average.
1927 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1929 case JS_GENERATOR_OBJECT_TYPE:
1930 return JSGeneratorObject::kSize;
1931 case JS_MODULE_TYPE:
1932 return JSModule::kSize;
1933 case JS_GLOBAL_PROXY_TYPE:
1934 return JSGlobalProxy::kSize;
1935 case JS_GLOBAL_OBJECT_TYPE:
1936 return JSGlobalObject::kSize;
1937 case JS_BUILTINS_OBJECT_TYPE:
1938 return JSBuiltinsObject::kSize;
1939 case JS_FUNCTION_TYPE:
1940 return JSFunction::kSize;
1942 return JSValue::kSize;
1944 return JSDate::kSize;
1946 return JSArray::kSize;
1947 case JS_ARRAY_BUFFER_TYPE:
1948 return JSArrayBuffer::kSize;
1949 case JS_TYPED_ARRAY_TYPE:
1950 return JSTypedArray::kSize;
1951 case JS_DATA_VIEW_TYPE:
1952 return JSDataView::kSize;
1954 return JSSet::kSize;
1956 return JSMap::kSize;
1957 case JS_SET_ITERATOR_TYPE:
1958 return JSSetIterator::kSize;
1959 case JS_MAP_ITERATOR_TYPE:
1960 return JSMapIterator::kSize;
1961 case JS_WEAK_MAP_TYPE:
1962 return JSWeakMap::kSize;
1963 case JS_WEAK_SET_TYPE:
1964 return JSWeakSet::kSize;
1965 case JS_REGEXP_TYPE:
1966 return JSRegExp::kSize;
1967 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1968 return JSObject::kHeaderSize;
1969 case JS_MESSAGE_OBJECT_TYPE:
1970 return JSMessageObject::kSize;
1978 int JSObject::GetInternalFieldCount() {
1979 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
1980 // Make sure to adjust for the number of in-object properties. These
1981 // properties do contribute to the size, but are not internal fields.
1982 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1983 map()->inobject_properties();
1987 int JSObject::GetInternalFieldOffset(int index) {
1988 DCHECK(index < GetInternalFieldCount() && index >= 0);
1989 return GetHeaderSize() + (kPointerSize * index);
1993 Object* JSObject::GetInternalField(int index) {
1994 DCHECK(index < GetInternalFieldCount() && index >= 0);
1995 // Internal objects do follow immediately after the header, whereas in-object
1996 // properties are at the end of the object. Therefore there is no need
1997 // to adjust the index here.
1998 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2002 void JSObject::SetInternalField(int index, Object* value) {
2003 DCHECK(index < GetInternalFieldCount() && index >= 0);
2004 // Internal objects do follow immediately after the header, whereas in-object
2005 // properties are at the end of the object. Therefore there is no need
2006 // to adjust the index here.
2007 int offset = GetHeaderSize() + (kPointerSize * index);
2008 WRITE_FIELD(this, offset, value);
2009 WRITE_BARRIER(GetHeap(), this, offset, value);
2013 void JSObject::SetInternalField(int index, Smi* value) {
2014 DCHECK(index < GetInternalFieldCount() && index >= 0);
2015 // Internal objects do follow immediately after the header, whereas in-object
2016 // properties are at the end of the object. Therefore there is no need
2017 // to adjust the index here.
2018 int offset = GetHeaderSize() + (kPointerSize * index);
2019 WRITE_FIELD(this, offset, value);
2023 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2024 if (!FLAG_unbox_double_fields) return false;
2025 return map()->IsUnboxedDoubleField(index);
2029 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2030 if (!FLAG_unbox_double_fields) return false;
2031 if (index.is_hidden_field() || !index.is_inobject()) return false;
2032 return !layout_descriptor()->IsTagged(index.property_index());
2036 // Access fast-case object properties at index. The use of these routines
2037 // is needed to correctly distinguish between properties stored in-object and
2038 // properties stored in the properties array.
2039 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2040 DCHECK(!IsUnboxedDoubleField(index));
2041 if (index.is_inobject()) {
2042 return READ_FIELD(this, index.offset());
2044 return properties()->get(index.outobject_array_index());
2049 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2050 DCHECK(IsUnboxedDoubleField(index));
2051 return READ_DOUBLE_FIELD(this, index.offset());
2055 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2056 if (index.is_inobject()) {
2057 int offset = index.offset();
2058 WRITE_FIELD(this, offset, value);
2059 WRITE_BARRIER(GetHeap(), this, offset, value);
2061 properties()->set(index.outobject_array_index(), value);
2066 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2067 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2071 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2072 if (IsUnboxedDoubleField(index)) {
2073 DCHECK(value->IsMutableHeapNumber());
2074 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2076 RawFastPropertyAtPut(index, value);
2081 void JSObject::WriteToField(int descriptor, Object* value) {
2082 DisallowHeapAllocation no_gc;
2084 DescriptorArray* desc = map()->instance_descriptors();
2085 PropertyDetails details = desc->GetDetails(descriptor);
2087 DCHECK(details.type() == DATA);
2089 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2090 if (details.representation().IsDouble()) {
2091 // Nothing more to be done.
2092 if (value->IsUninitialized()) return;
2093 if (IsUnboxedDoubleField(index)) {
2094 RawFastDoublePropertyAtPut(index, value->Number());
2096 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2097 DCHECK(box->IsMutableHeapNumber());
2098 box->set_value(value->Number());
2101 RawFastPropertyAtPut(index, value);
2106 int JSObject::GetInObjectPropertyOffset(int index) {
2107 return map()->GetInObjectPropertyOffset(index);
2111 Object* JSObject::InObjectPropertyAt(int index) {
2112 int offset = GetInObjectPropertyOffset(index);
2113 return READ_FIELD(this, offset);
2117 Object* JSObject::InObjectPropertyAtPut(int index,
2119 WriteBarrierMode mode) {
2120 // Adjust for the number of properties stored in the object.
2121 int offset = GetInObjectPropertyOffset(index);
2122 WRITE_FIELD(this, offset, value);
2123 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2129 void JSObject::InitializeBody(Map* map,
2130 Object* pre_allocated_value,
2131 Object* filler_value) {
2132 DCHECK(!filler_value->IsHeapObject() ||
2133 !GetHeap()->InNewSpace(filler_value));
2134 DCHECK(!pre_allocated_value->IsHeapObject() ||
2135 !GetHeap()->InNewSpace(pre_allocated_value));
2136 int size = map->instance_size();
2137 int offset = kHeaderSize;
2138 if (filler_value != pre_allocated_value) {
2140 map->inobject_properties() - map->unused_property_fields();
2141 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2142 for (int i = 0; i < pre_allocated; i++) {
2143 WRITE_FIELD(this, offset, pre_allocated_value);
2144 offset += kPointerSize;
2147 while (offset < size) {
2148 WRITE_FIELD(this, offset, filler_value);
2149 offset += kPointerSize;
2154 bool JSObject::HasFastProperties() {
2155 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2156 return !properties()->IsDictionary();
2160 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2161 if (unused_property_fields() != 0) return false;
2162 if (is_prototype_map()) return false;
2163 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2164 int limit = Max(minimum, inobject_properties());
2165 int external = NumberOfFields() - inobject_properties();
2166 return external > limit;
2170 void Struct::InitializeBody(int object_size) {
2171 Object* value = GetHeap()->undefined_value();
2172 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2173 WRITE_FIELD(this, offset, value);
2178 bool Object::ToArrayLength(uint32_t* index) {
2180 int value = Smi::cast(this)->value();
2181 if (value < 0) return false;
2185 if (IsHeapNumber()) {
2186 double value = HeapNumber::cast(this)->value();
2187 uint32_t uint_value = static_cast<uint32_t>(value);
2188 if (value == static_cast<double>(uint_value)) {
2189 *index = uint_value;
2197 bool Object::ToArrayIndex(uint32_t* index) {
2198 return ToArrayLength(index) && *index != kMaxUInt32;
2202 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2203 if (!this->IsJSValue()) return false;
2205 JSValue* js_value = JSValue::cast(this);
2206 if (!js_value->value()->IsString()) return false;
2208 String* str = String::cast(js_value->value());
2209 if (index >= static_cast<uint32_t>(str->length())) return false;
2215 void Object::VerifyApiCallResultType() {
2217 if (!(IsSmi() || IsString() || IsSymbol() || IsSpecObject() ||
2218 IsHeapNumber() || IsFloat32x4() || IsUndefined() || IsTrue() ||
2219 IsFalse() || IsNull())) {
2220 FATAL("API call returned invalid object");
2226 Object* FixedArray::get(int index) const {
2227 SLOW_DCHECK(index >= 0 && index < this->length());
2228 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2232 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2233 return handle(array->get(index), array->GetIsolate());
2237 bool FixedArray::is_the_hole(int index) {
2238 return get(index) == GetHeap()->the_hole_value();
2242 void FixedArray::set(int index, Smi* value) {
2243 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2244 DCHECK(index >= 0 && index < this->length());
2245 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2246 int offset = kHeaderSize + index * kPointerSize;
2247 WRITE_FIELD(this, offset, value);
2251 void FixedArray::set(int index, Object* value) {
2252 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2253 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2254 DCHECK(index >= 0 && index < this->length());
2255 int offset = kHeaderSize + index * kPointerSize;
2256 WRITE_FIELD(this, offset, value);
2257 WRITE_BARRIER(GetHeap(), this, offset, value);
2261 double FixedDoubleArray::get_scalar(int index) {
2262 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2263 map() != GetHeap()->fixed_array_map());
2264 DCHECK(index >= 0 && index < this->length());
2265 DCHECK(!is_the_hole(index));
2266 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2270 uint64_t FixedDoubleArray::get_representation(int index) {
2271 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2272 map() != GetHeap()->fixed_array_map());
2273 DCHECK(index >= 0 && index < this->length());
2274 int offset = kHeaderSize + index * kDoubleSize;
2275 return READ_UINT64_FIELD(this, offset);
2279 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2281 if (array->is_the_hole(index)) {
2282 return array->GetIsolate()->factory()->the_hole_value();
2284 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2289 void FixedDoubleArray::set(int index, double value) {
2290 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2291 map() != GetHeap()->fixed_array_map());
2292 int offset = kHeaderSize + index * kDoubleSize;
2293 if (std::isnan(value)) {
2294 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2296 WRITE_DOUBLE_FIELD(this, offset, value);
2298 DCHECK(!is_the_hole(index));
2302 void FixedDoubleArray::set_the_hole(int index) {
2303 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2304 map() != GetHeap()->fixed_array_map());
2305 int offset = kHeaderSize + index * kDoubleSize;
2306 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2310 bool FixedDoubleArray::is_the_hole(int index) {
2311 return get_representation(index) == kHoleNanInt64;
2315 double* FixedDoubleArray::data_start() {
2316 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2320 void FixedDoubleArray::FillWithHoles(int from, int to) {
2321 for (int i = from; i < to; i++) {
2327 Object* WeakFixedArray::Get(int index) const {
2328 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2329 if (raw->IsSmi()) return raw;
2330 DCHECK(raw->IsWeakCell());
2331 return WeakCell::cast(raw)->value();
2335 bool WeakFixedArray::IsEmptySlot(int index) const {
2336 DCHECK(index < Length());
2337 return Get(index)->IsSmi();
2341 void WeakFixedArray::Clear(int index) {
2342 FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2346 int WeakFixedArray::Length() const {
2347 return FixedArray::cast(this)->length() - kFirstIndex;
2351 int WeakFixedArray::last_used_index() const {
2352 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2356 void WeakFixedArray::set_last_used_index(int index) {
2357 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2361 int ArrayList::Length() {
2362 if (FixedArray::cast(this)->length() == 0) return 0;
2363 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2367 void ArrayList::SetLength(int length) {
2368 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2372 Object* ArrayList::Get(int index) {
2373 return FixedArray::cast(this)->get(kFirstIndex + index);
2377 Object** ArrayList::Slot(int index) {
2378 return data_start() + kFirstIndex + index;
2382 void ArrayList::Set(int index, Object* obj) {
2383 FixedArray::cast(this)->set(kFirstIndex + index, obj);
2387 void ArrayList::Clear(int index, Object* undefined) {
2388 DCHECK(undefined->IsUndefined());
2389 FixedArray::cast(this)
2390 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2394 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2395 const DisallowHeapAllocation& promise) {
2396 Heap* heap = GetHeap();
2397 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2398 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2399 return UPDATE_WRITE_BARRIER;
2403 AllocationAlignment HeapObject::RequiredAlignment() {
2404 #ifdef V8_HOST_ARCH_32_BIT
2405 if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2406 FixedArrayBase::cast(this)->length() != 0) {
2407 return kDoubleAligned;
2409 if (IsHeapNumber()) return kDoubleUnaligned;
2410 if (IsFloat32x4()) return kSimd128Unaligned;
2411 #endif // V8_HOST_ARCH_32_BIT
2412 return kWordAligned;
2416 void FixedArray::set(int index,
2418 WriteBarrierMode mode) {
2419 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2420 DCHECK(index >= 0 && index < this->length());
2421 int offset = kHeaderSize + index * kPointerSize;
2422 WRITE_FIELD(this, offset, value);
2423 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2427 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2430 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2431 DCHECK(index >= 0 && index < array->length());
2432 int offset = kHeaderSize + index * kPointerSize;
2433 WRITE_FIELD(array, offset, value);
2434 Heap* heap = array->GetHeap();
2435 if (heap->InNewSpace(value)) {
2436 heap->RecordWrite(array->address(), offset);
2441 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2444 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2445 DCHECK(index >= 0 && index < array->length());
2446 DCHECK(!array->GetHeap()->InNewSpace(value));
2447 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2451 void FixedArray::set_undefined(int index) {
2452 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2453 DCHECK(index >= 0 && index < this->length());
2454 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2456 kHeaderSize + index * kPointerSize,
2457 GetHeap()->undefined_value());
2461 void FixedArray::set_null(int index) {
2462 DCHECK(index >= 0 && index < this->length());
2463 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2465 kHeaderSize + index * kPointerSize,
2466 GetHeap()->null_value());
2470 void FixedArray::set_the_hole(int index) {
2471 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2472 DCHECK(index >= 0 && index < this->length());
2473 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2475 kHeaderSize + index * kPointerSize,
2476 GetHeap()->the_hole_value());
2480 void FixedArray::FillWithHoles(int from, int to) {
2481 for (int i = from; i < to; i++) {
2487 Object** FixedArray::data_start() {
2488 return HeapObject::RawField(this, kHeaderSize);
2492 bool DescriptorArray::IsEmpty() {
2493 DCHECK(length() >= kFirstIndex ||
2494 this == GetHeap()->empty_descriptor_array());
2495 return length() < kFirstIndex;
2499 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2501 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2505 // Perform a binary search in a fixed array. Low and high are entry indices. If
2506 // there are three entries in this array it should be called with low=0 and
2508 template <SearchMode search_mode, typename T>
2509 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2510 int* out_insertion_index) {
2511 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2512 uint32_t hash = name->Hash();
2515 DCHECK(low <= high);
2517 while (low != high) {
2518 int mid = (low + high) / 2;
2519 Name* mid_name = array->GetSortedKey(mid);
2520 uint32_t mid_hash = mid_name->Hash();
2522 if (mid_hash >= hash) {
2529 for (; low <= limit; ++low) {
2530 int sort_index = array->GetSortedKeyIndex(low);
2531 Name* entry = array->GetKey(sort_index);
2532 uint32_t current_hash = entry->Hash();
2533 if (current_hash != hash) {
2534 if (out_insertion_index != NULL) {
2535 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2537 return T::kNotFound;
2539 if (entry->Equals(name)) {
2540 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2543 return T::kNotFound;
2547 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2548 return T::kNotFound;
2552 // Perform a linear search in this fixed array. len is the number of entry
2553 // indices that are valid.
2554 template <SearchMode search_mode, typename T>
2555 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2556 int* out_insertion_index) {
2557 uint32_t hash = name->Hash();
2558 if (search_mode == ALL_ENTRIES) {
2559 for (int number = 0; number < len; number++) {
2560 int sorted_index = array->GetSortedKeyIndex(number);
2561 Name* entry = array->GetKey(sorted_index);
2562 uint32_t current_hash = entry->Hash();
2563 if (current_hash > hash) {
2564 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2565 return T::kNotFound;
2567 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2569 if (out_insertion_index != NULL) *out_insertion_index = len;
2570 return T::kNotFound;
2572 DCHECK(len >= valid_entries);
2573 DCHECK_NULL(out_insertion_index); // Not supported here.
2574 for (int number = 0; number < valid_entries; number++) {
2575 Name* entry = array->GetKey(number);
2576 uint32_t current_hash = entry->Hash();
2577 if (current_hash == hash && entry->Equals(name)) return number;
2579 return T::kNotFound;
2584 template <SearchMode search_mode, typename T>
2585 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2586 if (search_mode == VALID_ENTRIES) {
2587 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2589 SLOW_DCHECK(array->IsSortedNoDuplicates());
2592 int nof = array->number_of_entries();
2594 if (out_insertion_index != NULL) *out_insertion_index = 0;
2595 return T::kNotFound;
2598 // Fast case: do linear search for small arrays.
2599 const int kMaxElementsForLinearSearch = 8;
2600 if ((search_mode == ALL_ENTRIES &&
2601 nof <= kMaxElementsForLinearSearch) ||
2602 (search_mode == VALID_ENTRIES &&
2603 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2604 return LinearSearch<search_mode>(array, name, nof, valid_entries,
2605 out_insertion_index);
2608 // Slow case: perform binary search.
2609 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2610 out_insertion_index);
2614 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2615 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2619 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2620 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2621 if (number_of_own_descriptors == 0) return kNotFound;
2623 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2624 int number = cache->Lookup(map, name);
2626 if (number == DescriptorLookupCache::kAbsent) {
2627 number = Search(name, number_of_own_descriptors);
2628 cache->Update(map, name, number);
2635 PropertyDetails Map::GetLastDescriptorDetails() {
2636 return instance_descriptors()->GetDetails(LastAdded());
2640 FixedArrayBase* Map::GetInitialElements() {
2641 if (has_fast_smi_or_object_elements() ||
2642 has_fast_double_elements()) {
2643 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2644 return GetHeap()->empty_fixed_array();
2645 } else if (has_external_array_elements()) {
2646 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2647 DCHECK(!GetHeap()->InNewSpace(empty_array));
2649 } else if (has_fixed_typed_array_elements()) {
2650 FixedTypedArrayBase* empty_array =
2651 GetHeap()->EmptyFixedTypedArrayForMap(this);
2652 DCHECK(!GetHeap()->InNewSpace(empty_array));
2661 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2662 DCHECK(descriptor_number < number_of_descriptors());
2663 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2667 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2668 return GetKeySlot(descriptor_number);
2672 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2673 return GetValueSlot(descriptor_number - 1) + 1;
2677 Name* DescriptorArray::GetKey(int descriptor_number) {
2678 DCHECK(descriptor_number < number_of_descriptors());
2679 return Name::cast(get(ToKeyIndex(descriptor_number)));
2683 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2684 return GetDetails(descriptor_number).pointer();
2688 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2689 return GetKey(GetSortedKeyIndex(descriptor_number));
2693 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2694 PropertyDetails details = GetDetails(descriptor_index);
2695 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2699 void DescriptorArray::SetRepresentation(int descriptor_index,
2700 Representation representation) {
2701 DCHECK(!representation.IsNone());
2702 PropertyDetails details = GetDetails(descriptor_index);
2703 set(ToDetailsIndex(descriptor_index),
2704 details.CopyWithRepresentation(representation).AsSmi());
2708 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2709 DCHECK(descriptor_number < number_of_descriptors());
2710 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2714 int DescriptorArray::GetValueOffset(int descriptor_number) {
2715 return OffsetOfElementAt(ToValueIndex(descriptor_number));
2719 Object* DescriptorArray::GetValue(int descriptor_number) {
2720 DCHECK(descriptor_number < number_of_descriptors());
2721 return get(ToValueIndex(descriptor_number));
2725 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2726 set(ToValueIndex(descriptor_index), value);
2730 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2731 DCHECK(descriptor_number < number_of_descriptors());
2732 Object* details = get(ToDetailsIndex(descriptor_number));
2733 return PropertyDetails(Smi::cast(details));
2737 PropertyType DescriptorArray::GetType(int descriptor_number) {
2738 return GetDetails(descriptor_number).type();
2742 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2743 DCHECK(GetDetails(descriptor_number).location() == kField);
2744 return GetDetails(descriptor_number).field_index();
2748 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
2749 DCHECK(GetDetails(descriptor_number).location() == kField);
2750 Object* value = GetValue(descriptor_number);
2751 if (value->IsWeakCell()) {
2752 if (WeakCell::cast(value)->cleared()) return HeapType::None();
2753 value = WeakCell::cast(value)->value();
2755 return HeapType::cast(value);
2759 Object* DescriptorArray::GetConstant(int descriptor_number) {
2760 return GetValue(descriptor_number);
2764 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2765 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
2766 return GetValue(descriptor_number);
2770 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2771 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
2772 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2773 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2777 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2778 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
2779 handle(GetValue(descriptor_number), GetIsolate()),
2780 GetDetails(descriptor_number));
2784 void DescriptorArray::Set(int descriptor_number,
2786 const WhitenessWitness&) {
2788 DCHECK(descriptor_number < number_of_descriptors());
2790 NoIncrementalWriteBarrierSet(this,
2791 ToKeyIndex(descriptor_number),
2793 NoIncrementalWriteBarrierSet(this,
2794 ToValueIndex(descriptor_number),
2796 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
2797 desc->GetDetails().AsSmi());
2801 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2803 DCHECK(descriptor_number < number_of_descriptors());
2805 set(ToKeyIndex(descriptor_number), *desc->GetKey());
2806 set(ToValueIndex(descriptor_number), *desc->GetValue());
2807 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2811 void DescriptorArray::Append(Descriptor* desc) {
2812 DisallowHeapAllocation no_gc;
2813 int descriptor_number = number_of_descriptors();
2814 SetNumberOfDescriptors(descriptor_number + 1);
2815 Set(descriptor_number, desc);
2817 uint32_t hash = desc->GetKey()->Hash();
2821 for (insertion = descriptor_number; insertion > 0; --insertion) {
2822 Name* key = GetSortedKey(insertion - 1);
2823 if (key->Hash() <= hash) break;
2824 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2827 SetSortedKey(insertion, descriptor_number);
2831 void DescriptorArray::SwapSortedKeys(int first, int second) {
2832 int first_key = GetSortedKeyIndex(first);
2833 SetSortedKey(first, GetSortedKeyIndex(second));
2834 SetSortedKey(second, first_key);
2838 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2839 : marking_(array->GetHeap()->incremental_marking()) {
2840 marking_->EnterNoMarkingScope();
2841 DCHECK(!marking_->IsMarking() ||
2842 Marking::Color(array) == Marking::WHITE_OBJECT);
2846 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2847 marking_->LeaveNoMarkingScope();
2851 int HashTableBase::ComputeCapacity(int at_least_space_for) {
2852 const int kMinCapacity = 4;
2853 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
2854 return Max(capacity, kMinCapacity);
2858 int HashTableBase::ComputeCapacityForSerialization(int at_least_space_for) {
2859 const int kMinCapacity = 1;
2860 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for);
2861 return Max(capacity, kMinCapacity);
2865 template <typename Derived, typename Shape, typename Key>
2866 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
2867 return FindEntry(GetIsolate(), key);
2871 template<typename Derived, typename Shape, typename Key>
2872 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2873 return FindEntry(isolate, key, HashTable::Hash(key));
2877 // Find entry for key otherwise return kNotFound.
2878 template <typename Derived, typename Shape, typename Key>
2879 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
2881 uint32_t capacity = Capacity();
2882 uint32_t entry = FirstProbe(hash, capacity);
2884 // EnsureCapacity will guarantee the hash table is never full.
2886 Object* element = KeyAt(entry);
2887 // Empty entry. Uses raw unchecked accessors because it is called by the
2888 // string table during bootstrapping.
2889 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2890 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2891 Shape::IsMatch(key, element)) return entry;
2892 entry = NextProbe(entry, count++, capacity);
2898 bool SeededNumberDictionary::requires_slow_elements() {
2899 Object* max_index_object = get(kMaxNumberKeyIndex);
2900 if (!max_index_object->IsSmi()) return false;
2902 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2906 uint32_t SeededNumberDictionary::max_number_key() {
2907 DCHECK(!requires_slow_elements());
2908 Object* max_index_object = get(kMaxNumberKeyIndex);
2909 if (!max_index_object->IsSmi()) return 0;
2910 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2911 return value >> kRequiresSlowElementsTagSize;
2915 void SeededNumberDictionary::set_requires_slow_elements() {
2916 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2920 // ------------------------------------
2924 CAST_ACCESSOR(AccessorInfo)
2925 CAST_ACCESSOR(ArrayList)
2926 CAST_ACCESSOR(ByteArray)
2929 CAST_ACCESSOR(CodeCacheHashTable)
2930 CAST_ACCESSOR(CompilationCacheTable)
2931 CAST_ACCESSOR(ConsString)
2932 CAST_ACCESSOR(DeoptimizationInputData)
2933 CAST_ACCESSOR(DeoptimizationOutputData)
2934 CAST_ACCESSOR(DependentCode)
2935 CAST_ACCESSOR(DescriptorArray)
2936 CAST_ACCESSOR(ExternalArray)
2937 CAST_ACCESSOR(ExternalOneByteString)
2938 CAST_ACCESSOR(ExternalFloat32Array)
2939 CAST_ACCESSOR(ExternalFloat64Array)
2940 CAST_ACCESSOR(ExternalInt16Array)
2941 CAST_ACCESSOR(ExternalInt32Array)
2942 CAST_ACCESSOR(ExternalInt8Array)
2943 CAST_ACCESSOR(ExternalString)
2944 CAST_ACCESSOR(ExternalTwoByteString)
2945 CAST_ACCESSOR(ExternalUint16Array)
2946 CAST_ACCESSOR(ExternalUint32Array)
2947 CAST_ACCESSOR(ExternalUint8Array)
2948 CAST_ACCESSOR(ExternalUint8ClampedArray)
2949 CAST_ACCESSOR(FixedArray)
2950 CAST_ACCESSOR(FixedArrayBase)
2951 CAST_ACCESSOR(FixedDoubleArray)
2952 CAST_ACCESSOR(FixedTypedArrayBase)
2953 CAST_ACCESSOR(Float32x4)
2954 CAST_ACCESSOR(Foreign)
2955 CAST_ACCESSOR(GlobalDictionary)
2956 CAST_ACCESSOR(GlobalObject)
2957 CAST_ACCESSOR(HandlerTable)
2958 CAST_ACCESSOR(HeapObject)
2959 CAST_ACCESSOR(JSArray)
2960 CAST_ACCESSOR(JSArrayBuffer)
2961 CAST_ACCESSOR(JSArrayBufferView)
2962 CAST_ACCESSOR(JSBuiltinsObject)
2963 CAST_ACCESSOR(JSDataView)
2964 CAST_ACCESSOR(JSDate)
2965 CAST_ACCESSOR(JSFunction)
2966 CAST_ACCESSOR(JSFunctionProxy)
2967 CAST_ACCESSOR(JSFunctionResultCache)
2968 CAST_ACCESSOR(JSGeneratorObject)
2969 CAST_ACCESSOR(JSGlobalObject)
2970 CAST_ACCESSOR(JSGlobalProxy)
2971 CAST_ACCESSOR(JSMap)
2972 CAST_ACCESSOR(JSMapIterator)
2973 CAST_ACCESSOR(JSMessageObject)
2974 CAST_ACCESSOR(JSModule)
2975 CAST_ACCESSOR(JSObject)
2976 CAST_ACCESSOR(JSProxy)
2977 CAST_ACCESSOR(JSReceiver)
2978 CAST_ACCESSOR(JSRegExp)
2979 CAST_ACCESSOR(JSSet)
2980 CAST_ACCESSOR(JSSetIterator)
2981 CAST_ACCESSOR(JSTypedArray)
2982 CAST_ACCESSOR(JSValue)
2983 CAST_ACCESSOR(JSWeakMap)
2984 CAST_ACCESSOR(JSWeakSet)
2985 CAST_ACCESSOR(LayoutDescriptor)
2988 CAST_ACCESSOR(NameDictionary)
2989 CAST_ACCESSOR(NormalizedMapCache)
2990 CAST_ACCESSOR(Object)
2991 CAST_ACCESSOR(ObjectHashTable)
2992 CAST_ACCESSOR(Oddball)
2993 CAST_ACCESSOR(OrderedHashMap)
2994 CAST_ACCESSOR(OrderedHashSet)
2995 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2996 CAST_ACCESSOR(PropertyCell)
2997 CAST_ACCESSOR(ScopeInfo)
2998 CAST_ACCESSOR(SeededNumberDictionary)
2999 CAST_ACCESSOR(SeqOneByteString)
3000 CAST_ACCESSOR(SeqString)
3001 CAST_ACCESSOR(SeqTwoByteString)
3002 CAST_ACCESSOR(SharedFunctionInfo)
3003 CAST_ACCESSOR(SlicedString)
3005 CAST_ACCESSOR(String)
3006 CAST_ACCESSOR(StringTable)
3007 CAST_ACCESSOR(Struct)
3008 CAST_ACCESSOR(Symbol)
3009 CAST_ACCESSOR(UnseededNumberDictionary)
3010 CAST_ACCESSOR(WeakCell)
3011 CAST_ACCESSOR(WeakFixedArray)
3012 CAST_ACCESSOR(WeakHashTable)
3013 CAST_ACCESSOR(WeakValueHashTable)
3017 template <class Traits>
3018 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3019 FixedTypedArray<Traits>::kInstanceType;
3022 template <class Traits>
3023 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3024 SLOW_DCHECK(object->IsHeapObject() &&
3025 HeapObject::cast(object)->map()->instance_type() ==
3026 Traits::kInstanceType);
3027 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3031 template <class Traits>
3032 const FixedTypedArray<Traits>*
3033 FixedTypedArray<Traits>::cast(const Object* object) {
3034 SLOW_DCHECK(object->IsHeapObject() &&
3035 HeapObject::cast(object)->map()->instance_type() ==
3036 Traits::kInstanceType);
3037 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3041 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3042 STRUCT_LIST(MAKE_STRUCT_CAST)
3043 #undef MAKE_STRUCT_CAST
3046 template <typename Derived, typename Shape, typename Key>
3047 HashTable<Derived, Shape, Key>*
3048 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3049 SLOW_DCHECK(obj->IsHashTable());
3050 return reinterpret_cast<HashTable*>(obj);
3054 template <typename Derived, typename Shape, typename Key>
3055 const HashTable<Derived, Shape, Key>*
3056 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3057 SLOW_DCHECK(obj->IsHashTable());
3058 return reinterpret_cast<const HashTable*>(obj);
3062 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3063 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3065 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3066 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3068 SMI_ACCESSORS(String, length, kLengthOffset)
3069 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3072 FreeSpace* FreeSpace::next() {
3073 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3074 (!GetHeap()->deserialization_complete() && map() == NULL));
3075 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3076 return reinterpret_cast<FreeSpace*>(
3077 Memory::Address_at(address() + kNextOffset));
3081 FreeSpace** FreeSpace::next_address() {
3082 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3083 (!GetHeap()->deserialization_complete() && map() == NULL));
3084 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3085 return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
3089 void FreeSpace::set_next(FreeSpace* next) {
3090 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3091 (!GetHeap()->deserialization_complete() && map() == NULL));
3092 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3093 base::NoBarrier_Store(
3094 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3095 reinterpret_cast<base::AtomicWord>(next));
3099 FreeSpace* FreeSpace::cast(HeapObject* o) {
3100 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3101 return reinterpret_cast<FreeSpace*>(o);
3105 uint32_t Name::hash_field() {
3106 return READ_UINT32_FIELD(this, kHashFieldOffset);
3110 void Name::set_hash_field(uint32_t value) {
3111 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3112 #if V8_HOST_ARCH_64_BIT
3113 #if V8_TARGET_LITTLE_ENDIAN
3114 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3116 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3122 bool Name::Equals(Name* other) {
3123 if (other == this) return true;
3124 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3125 this->IsSymbol() || other->IsSymbol()) {
3128 return String::cast(this)->SlowEquals(String::cast(other));
3132 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3133 if (one.is_identical_to(two)) return true;
3134 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3135 one->IsSymbol() || two->IsSymbol()) {
3138 return String::SlowEquals(Handle<String>::cast(one),
3139 Handle<String>::cast(two));
3143 ACCESSORS(Symbol, name, Object, kNameOffset)
3144 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3145 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3148 bool String::Equals(String* other) {
3149 if (other == this) return true;
3150 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3153 return SlowEquals(other);
3157 bool String::Equals(Handle<String> one, Handle<String> two) {
3158 if (one.is_identical_to(two)) return true;
3159 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3162 return SlowEquals(one, two);
3166 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3167 if (!string->IsConsString()) return string;
3168 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3169 if (cons->IsFlat()) return handle(cons->first());
3170 return SlowFlatten(cons, pretenure);
3174 Handle<Name> Name::Flatten(Handle<Name> name, PretenureFlag pretenure) {
3175 if (name->IsSymbol()) return name;
3176 return String::Flatten(Handle<String>::cast(name));
3180 uint16_t String::Get(int index) {
3181 DCHECK(index >= 0 && index < length());
3182 switch (StringShape(this).full_representation_tag()) {
3183 case kSeqStringTag | kOneByteStringTag:
3184 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3185 case kSeqStringTag | kTwoByteStringTag:
3186 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3187 case kConsStringTag | kOneByteStringTag:
3188 case kConsStringTag | kTwoByteStringTag:
3189 return ConsString::cast(this)->ConsStringGet(index);
3190 case kExternalStringTag | kOneByteStringTag:
3191 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3192 case kExternalStringTag | kTwoByteStringTag:
3193 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3194 case kSlicedStringTag | kOneByteStringTag:
3195 case kSlicedStringTag | kTwoByteStringTag:
3196 return SlicedString::cast(this)->SlicedStringGet(index);
3206 void String::Set(int index, uint16_t value) {
3207 DCHECK(index >= 0 && index < length());
3208 DCHECK(StringShape(this).IsSequential());
3210 return this->IsOneByteRepresentation()
3211 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3212 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3216 bool String::IsFlat() {
3217 if (!StringShape(this).IsCons()) return true;
3218 return ConsString::cast(this)->second()->length() == 0;
3222 String* String::GetUnderlying() {
3223 // Giving direct access to underlying string only makes sense if the
3224 // wrapping string is already flattened.
3225 DCHECK(this->IsFlat());
3226 DCHECK(StringShape(this).IsIndirect());
3227 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3228 const int kUnderlyingOffset = SlicedString::kParentOffset;
3229 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3233 template<class Visitor>
3234 ConsString* String::VisitFlat(Visitor* visitor,
3237 int slice_offset = offset;
3238 const int length = string->length();
3239 DCHECK(offset <= length);
3241 int32_t type = string->map()->instance_type();
3242 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3243 case kSeqStringTag | kOneByteStringTag:
3244 visitor->VisitOneByteString(
3245 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3249 case kSeqStringTag | kTwoByteStringTag:
3250 visitor->VisitTwoByteString(
3251 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3255 case kExternalStringTag | kOneByteStringTag:
3256 visitor->VisitOneByteString(
3257 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3261 case kExternalStringTag | kTwoByteStringTag:
3262 visitor->VisitTwoByteString(
3263 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3267 case kSlicedStringTag | kOneByteStringTag:
3268 case kSlicedStringTag | kTwoByteStringTag: {
3269 SlicedString* slicedString = SlicedString::cast(string);
3270 slice_offset += slicedString->offset();
3271 string = slicedString->parent();
3275 case kConsStringTag | kOneByteStringTag:
3276 case kConsStringTag | kTwoByteStringTag:
3277 return ConsString::cast(string);
3288 inline Vector<const uint8_t> String::GetCharVector() {
3289 String::FlatContent flat = GetFlatContent();
3290 DCHECK(flat.IsOneByte());
3291 return flat.ToOneByteVector();
3296 inline Vector<const uc16> String::GetCharVector() {
3297 String::FlatContent flat = GetFlatContent();
3298 DCHECK(flat.IsTwoByte());
3299 return flat.ToUC16Vector();
3303 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3304 DCHECK(index >= 0 && index < length());
3305 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3309 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3310 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3311 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3312 static_cast<byte>(value));
3316 Address SeqOneByteString::GetCharsAddress() {
3317 return FIELD_ADDR(this, kHeaderSize);
3321 uint8_t* SeqOneByteString::GetChars() {
3322 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3326 Address SeqTwoByteString::GetCharsAddress() {
3327 return FIELD_ADDR(this, kHeaderSize);
3331 uc16* SeqTwoByteString::GetChars() {
3332 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3336 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3337 DCHECK(index >= 0 && index < length());
3338 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3342 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3343 DCHECK(index >= 0 && index < length());
3344 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3348 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3349 return SizeFor(length());
3353 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3354 return SizeFor(length());
3358 String* SlicedString::parent() {
3359 return String::cast(READ_FIELD(this, kParentOffset));
3363 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3364 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3365 WRITE_FIELD(this, kParentOffset, parent);
3366 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3370 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3373 String* ConsString::first() {
3374 return String::cast(READ_FIELD(this, kFirstOffset));
3378 Object* ConsString::unchecked_first() {
3379 return READ_FIELD(this, kFirstOffset);
3383 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3384 WRITE_FIELD(this, kFirstOffset, value);
3385 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3389 String* ConsString::second() {
3390 return String::cast(READ_FIELD(this, kSecondOffset));
3394 Object* ConsString::unchecked_second() {
3395 return READ_FIELD(this, kSecondOffset);
3399 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3400 WRITE_FIELD(this, kSecondOffset, value);
3401 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3405 bool ExternalString::is_short() {
3406 InstanceType type = map()->instance_type();
3407 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3411 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3412 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3416 void ExternalOneByteString::update_data_cache() {
3417 if (is_short()) return;
3418 const char** data_field =
3419 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3420 *data_field = resource()->data();
3424 void ExternalOneByteString::set_resource(
3425 const ExternalOneByteString::Resource* resource) {
3426 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3427 *reinterpret_cast<const Resource**>(
3428 FIELD_ADDR(this, kResourceOffset)) = resource;
3429 if (resource != NULL) update_data_cache();
3433 const uint8_t* ExternalOneByteString::GetChars() {
3434 return reinterpret_cast<const uint8_t*>(resource()->data());
3438 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3439 DCHECK(index >= 0 && index < length());
3440 return GetChars()[index];
3444 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3445 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3449 void ExternalTwoByteString::update_data_cache() {
3450 if (is_short()) return;
3451 const uint16_t** data_field =
3452 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3453 *data_field = resource()->data();
3457 void ExternalTwoByteString::set_resource(
3458 const ExternalTwoByteString::Resource* resource) {
3459 *reinterpret_cast<const Resource**>(
3460 FIELD_ADDR(this, kResourceOffset)) = resource;
3461 if (resource != NULL) update_data_cache();
3465 const uint16_t* ExternalTwoByteString::GetChars() {
3466 return resource()->data();
3470 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3471 DCHECK(index >= 0 && index < length());
3472 return GetChars()[index];
3476 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3478 return GetChars() + start;
3482 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3485 void ConsStringIterator::PushLeft(ConsString* string) {
3486 frames_[depth_++ & kDepthMask] = string;
3490 void ConsStringIterator::PushRight(ConsString* string) {
3492 frames_[(depth_-1) & kDepthMask] = string;
3496 void ConsStringIterator::AdjustMaximumDepth() {
3497 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3501 void ConsStringIterator::Pop() {
3503 DCHECK(depth_ <= maximum_depth_);
3508 uint16_t StringCharacterStream::GetNext() {
3509 DCHECK(buffer8_ != NULL && end_ != NULL);
3510 // Advance cursor if needed.
3511 if (buffer8_ == end_) HasMore();
3512 DCHECK(buffer8_ < end_);
3513 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3517 StringCharacterStream::StringCharacterStream(String* string, int offset)
3518 : is_one_byte_(false) {
3519 Reset(string, offset);
3523 void StringCharacterStream::Reset(String* string, int offset) {
3526 ConsString* cons_string = String::VisitFlat(this, string, offset);
3527 iter_.Reset(cons_string, offset);
3528 if (cons_string != NULL) {
3529 string = iter_.Next(&offset);
3530 if (string != NULL) String::VisitFlat(this, string, offset);
3535 bool StringCharacterStream::HasMore() {
3536 if (buffer8_ != end_) return true;
3538 String* string = iter_.Next(&offset);
3539 DCHECK_EQ(offset, 0);
3540 if (string == NULL) return false;
3541 String::VisitFlat(this, string);
3542 DCHECK(buffer8_ != end_);
3547 void StringCharacterStream::VisitOneByteString(
3548 const uint8_t* chars, int length) {
3549 is_one_byte_ = true;
3551 end_ = chars + length;
3555 void StringCharacterStream::VisitTwoByteString(
3556 const uint16_t* chars, int length) {
3557 is_one_byte_ = false;
3559 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3563 void JSFunctionResultCache::MakeZeroSize() {
3564 set_finger_index(kEntriesIndex);
3565 set_size(kEntriesIndex);
3569 void JSFunctionResultCache::Clear() {
3570 int cache_size = size();
3571 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3572 MemsetPointer(entries_start,
3573 GetHeap()->the_hole_value(),
3574 cache_size - kEntriesIndex);
3579 int JSFunctionResultCache::size() {
3580 return Smi::cast(get(kCacheSizeIndex))->value();
3584 void JSFunctionResultCache::set_size(int size) {
3585 set(kCacheSizeIndex, Smi::FromInt(size));
3589 int JSFunctionResultCache::finger_index() {
3590 return Smi::cast(get(kFingerIndex))->value();
3594 void JSFunctionResultCache::set_finger_index(int finger_index) {
3595 set(kFingerIndex, Smi::FromInt(finger_index));
3599 byte ByteArray::get(int index) {
3600 DCHECK(index >= 0 && index < this->length());
3601 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3605 void ByteArray::set(int index, byte value) {
3606 DCHECK(index >= 0 && index < this->length());
3607 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3611 int ByteArray::get_int(int index) {
3612 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3613 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3617 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3618 DCHECK_TAG_ALIGNED(address);
3619 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3623 Address ByteArray::GetDataStartAddress() {
3624 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3628 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3629 return reinterpret_cast<uint8_t*>(external_pointer());
3633 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3634 DCHECK((index >= 0) && (index < this->length()));
3635 uint8_t* ptr = external_uint8_clamped_pointer();
3640 Handle<Object> ExternalUint8ClampedArray::get(
3641 Handle<ExternalUint8ClampedArray> array,
3643 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3644 array->GetIsolate());
3648 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3649 DCHECK((index >= 0) && (index < this->length()));
3650 uint8_t* ptr = external_uint8_clamped_pointer();
3655 void* ExternalArray::external_pointer() const {
3656 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3657 return reinterpret_cast<void*>(ptr);
3661 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3662 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3663 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3667 int8_t ExternalInt8Array::get_scalar(int index) {
3668 DCHECK((index >= 0) && (index < this->length()));
3669 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3674 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3676 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3677 array->GetIsolate());
3681 void ExternalInt8Array::set(int index, int8_t value) {
3682 DCHECK((index >= 0) && (index < this->length()));
3683 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3688 uint8_t ExternalUint8Array::get_scalar(int index) {
3689 DCHECK((index >= 0) && (index < this->length()));
3690 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3695 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3697 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3698 array->GetIsolate());
3702 void ExternalUint8Array::set(int index, uint8_t value) {
3703 DCHECK((index >= 0) && (index < this->length()));
3704 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3709 int16_t ExternalInt16Array::get_scalar(int index) {
3710 DCHECK((index >= 0) && (index < this->length()));
3711 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3716 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3718 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3719 array->GetIsolate());
3723 void ExternalInt16Array::set(int index, int16_t value) {
3724 DCHECK((index >= 0) && (index < this->length()));
3725 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3730 uint16_t ExternalUint16Array::get_scalar(int index) {
3731 DCHECK((index >= 0) && (index < this->length()));
3732 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3737 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3739 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3740 array->GetIsolate());
3744 void ExternalUint16Array::set(int index, uint16_t value) {
3745 DCHECK((index >= 0) && (index < this->length()));
3746 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3751 int32_t ExternalInt32Array::get_scalar(int index) {
3752 DCHECK((index >= 0) && (index < this->length()));
3753 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3758 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3760 return array->GetIsolate()->factory()->
3761 NewNumberFromInt(array->get_scalar(index));
3765 void ExternalInt32Array::set(int index, int32_t value) {
3766 DCHECK((index >= 0) && (index < this->length()));
3767 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3772 uint32_t ExternalUint32Array::get_scalar(int index) {
3773 DCHECK((index >= 0) && (index < this->length()));
3774 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3779 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3781 return array->GetIsolate()->factory()->
3782 NewNumberFromUint(array->get_scalar(index));
3786 void ExternalUint32Array::set(int index, uint32_t value) {
3787 DCHECK((index >= 0) && (index < this->length()));
3788 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3793 float ExternalFloat32Array::get_scalar(int index) {
3794 DCHECK((index >= 0) && (index < this->length()));
3795 float* ptr = static_cast<float*>(external_pointer());
3800 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
3802 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3806 void ExternalFloat32Array::set(int index, float value) {
3807 DCHECK((index >= 0) && (index < this->length()));
3808 float* ptr = static_cast<float*>(external_pointer());
3813 double ExternalFloat64Array::get_scalar(int index) {
3814 DCHECK((index >= 0) && (index < this->length()));
3815 double* ptr = static_cast<double*>(external_pointer());
3820 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
3822 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3826 void ExternalFloat64Array::set(int index, double value) {
3827 DCHECK((index >= 0) && (index < this->length()));
3828 double* ptr = static_cast<double*>(external_pointer());
3833 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
3836 void* FixedTypedArrayBase::DataPtr() {
3837 return FIELD_ADDR(this, kDataOffset);
3841 int FixedTypedArrayBase::ElementSize(InstanceType type) {
3844 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3845 case FIXED_##TYPE##_ARRAY_TYPE: \
3846 element_size = size; \
3849 TYPED_ARRAYS(TYPED_ARRAY_CASE)
3850 #undef TYPED_ARRAY_CASE
3855 return element_size;
3859 int FixedTypedArrayBase::DataSize(InstanceType type) {
3860 return length() * ElementSize(type);
3864 int FixedTypedArrayBase::DataSize() {
3865 return DataSize(map()->instance_type());
3869 int FixedTypedArrayBase::size() {
3870 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
3874 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
3875 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
3879 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
3880 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
3884 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
3887 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
3890 int8_t Int8ArrayTraits::defaultValue() { return 0; }
3893 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
3896 int16_t Int16ArrayTraits::defaultValue() { return 0; }
3899 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
3902 int32_t Int32ArrayTraits::defaultValue() { return 0; }
3905 float Float32ArrayTraits::defaultValue() {
3906 return std::numeric_limits<float>::quiet_NaN();
3910 double Float64ArrayTraits::defaultValue() {
3911 return std::numeric_limits<double>::quiet_NaN();
3915 template <class Traits>
3916 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
3917 DCHECK((index >= 0) && (index < this->length()));
3918 ElementType* ptr = reinterpret_cast<ElementType*>(
3919 FIELD_ADDR(this, kDataOffset));
3924 template <class Traits>
3925 void FixedTypedArray<Traits>::set(int index, ElementType value) {
3926 DCHECK((index >= 0) && (index < this->length()));
3927 ElementType* ptr = reinterpret_cast<ElementType*>(
3928 FIELD_ADDR(this, kDataOffset));
3933 template <class Traits>
3934 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
3935 return static_cast<ElementType>(value);
3940 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
3941 if (value < 0) return 0;
3942 if (value > 0xFF) return 0xFF;
3943 return static_cast<uint8_t>(value);
3947 template <class Traits>
3948 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
3950 return static_cast<ElementType>(DoubleToInt32(value));
3955 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
3956 // Handle NaNs and less than zero values which clamp to zero.
3957 if (!(value > 0)) return 0;
3958 if (value > 0xFF) return 0xFF;
3959 return static_cast<uint8_t>(lrint(value));
3964 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
3965 return static_cast<float>(value);
3970 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
3975 template <class Traits>
3976 Handle<Object> FixedTypedArray<Traits>::get(
3977 Handle<FixedTypedArray<Traits> > array,
3979 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
3983 template <class Traits>
3984 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
3985 ElementType cast_value = Traits::defaultValue();
3986 if (value->IsSmi()) {
3987 int int_value = Smi::cast(value)->value();
3988 cast_value = from_int(int_value);
3989 } else if (value->IsHeapNumber()) {
3990 double double_value = HeapNumber::cast(value)->value();
3991 cast_value = from_double(double_value);
3993 // Clamp undefined to the default value. All other types have been
3994 // converted to a number type further up in the call chain.
3995 DCHECK(value->IsUndefined());
3997 set(index, cast_value);
4001 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4002 return handle(Smi::FromInt(scalar), isolate);
4006 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4008 return handle(Smi::FromInt(scalar), isolate);
4012 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4013 return handle(Smi::FromInt(scalar), isolate);
4017 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4018 return handle(Smi::FromInt(scalar), isolate);
4022 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4023 return handle(Smi::FromInt(scalar), isolate);
4027 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4028 return isolate->factory()->NewNumberFromUint(scalar);
4032 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4033 return isolate->factory()->NewNumberFromInt(scalar);
4037 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4038 return isolate->factory()->NewNumber(scalar);
4042 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4043 return isolate->factory()->NewNumber(scalar);
4047 int Map::visitor_id() {
4048 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4052 void Map::set_visitor_id(int id) {
4053 DCHECK(0 <= id && id < 256);
4054 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4058 int Map::instance_size() {
4059 return NOBARRIER_READ_BYTE_FIELD(
4060 this, kInstanceSizeOffset) << kPointerSizeLog2;
4064 int Map::inobject_properties() {
4065 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4069 int Map::GetInObjectPropertyOffset(int index) {
4070 // Adjust for the number of properties stored in the object.
4071 index -= inobject_properties();
4073 return instance_size() + (index * kPointerSize);
4077 Handle<Map> Map::CopyInstallDescriptorsForTesting(
4078 Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
4079 Handle<LayoutDescriptor> layout_descriptor) {
4080 return CopyInstallDescriptors(map, new_descriptor, descriptors,
4085 int HeapObject::SizeFromMap(Map* map) {
4086 int instance_size = map->instance_size();
4087 if (instance_size != kVariableSizeSentinel) return instance_size;
4088 // Only inline the most frequent cases.
4089 InstanceType instance_type = map->instance_type();
4090 if (instance_type == FIXED_ARRAY_TYPE) {
4091 return FixedArray::BodyDescriptor::SizeOf(map, this);
4093 if (instance_type == ONE_BYTE_STRING_TYPE ||
4094 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4095 // Strings may get concurrently truncated, hence we have to access its
4096 // length synchronized.
4097 return SeqOneByteString::SizeFor(
4098 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4100 if (instance_type == BYTE_ARRAY_TYPE) {
4101 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4103 if (instance_type == FREE_SPACE_TYPE) {
4104 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4106 if (instance_type == STRING_TYPE ||
4107 instance_type == INTERNALIZED_STRING_TYPE) {
4108 // Strings may get concurrently truncated, hence we have to access its
4109 // length synchronized.
4110 return SeqTwoByteString::SizeFor(
4111 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4113 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4114 return FixedDoubleArray::SizeFor(
4115 reinterpret_cast<FixedDoubleArray*>(this)->length());
4117 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4118 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4119 return reinterpret_cast<FixedTypedArrayBase*>(
4120 this)->TypedArraySize(instance_type);
4122 DCHECK(instance_type == CODE_TYPE);
4123 return reinterpret_cast<Code*>(this)->CodeSize();
4127 void Map::set_instance_size(int value) {
4128 DCHECK_EQ(0, value & (kPointerSize - 1));
4129 value >>= kPointerSizeLog2;
4130 DCHECK(0 <= value && value < 256);
4131 NOBARRIER_WRITE_BYTE_FIELD(
4132 this, kInstanceSizeOffset, static_cast<byte>(value));
4136 void Map::set_inobject_properties(int value) {
4137 DCHECK(0 <= value && value < 256);
4138 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4142 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4145 InstanceType Map::instance_type() {
4146 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4150 void Map::set_instance_type(InstanceType value) {
4151 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4155 int Map::unused_property_fields() {
4156 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4160 void Map::set_unused_property_fields(int value) {
4161 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4165 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4168 void Map::set_bit_field(byte value) {
4169 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4173 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4176 void Map::set_bit_field2(byte value) {
4177 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4181 void Map::set_non_instance_prototype(bool value) {
4183 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4185 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4190 bool Map::has_non_instance_prototype() {
4191 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4195 void Map::set_function_with_prototype(bool value) {
4196 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4200 bool Map::function_with_prototype() {
4201 return FunctionWithPrototype::decode(bit_field());
4205 void Map::set_is_access_check_needed(bool access_check_needed) {
4206 if (access_check_needed) {
4207 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4209 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4214 bool Map::is_access_check_needed() {
4215 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4219 void Map::set_is_extensible(bool value) {
4221 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4223 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4227 bool Map::is_extensible() {
4228 return ((1 << kIsExtensible) & bit_field2()) != 0;
4232 void Map::set_is_prototype_map(bool value) {
4233 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4236 bool Map::is_prototype_map() const {
4237 return IsPrototypeMapBits::decode(bit_field2());
4241 void Map::set_dictionary_map(bool value) {
4242 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4243 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4244 set_bit_field3(new_bit_field3);
4248 bool Map::is_dictionary_map() {
4249 return DictionaryMap::decode(bit_field3());
4253 Code::Flags Code::flags() {
4254 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4258 void Map::set_owns_descriptors(bool owns_descriptors) {
4259 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4263 bool Map::owns_descriptors() {
4264 return OwnsDescriptors::decode(bit_field3());
4268 void Map::set_has_instance_call_handler() {
4269 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4273 bool Map::has_instance_call_handler() {
4274 return HasInstanceCallHandler::decode(bit_field3());
4278 void Map::deprecate() {
4279 set_bit_field3(Deprecated::update(bit_field3(), true));
4283 bool Map::is_deprecated() {
4284 return Deprecated::decode(bit_field3());
4288 void Map::set_migration_target(bool value) {
4289 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4293 bool Map::is_migration_target() {
4294 return IsMigrationTarget::decode(bit_field3());
4298 void Map::set_is_strong() {
4299 set_bit_field3(IsStrong::update(bit_field3(), true));
4303 bool Map::is_strong() {
4304 return IsStrong::decode(bit_field3());
4308 void Map::set_counter(int value) {
4309 set_bit_field3(Counter::update(bit_field3(), value));
4313 int Map::counter() { return Counter::decode(bit_field3()); }
4316 void Map::mark_unstable() {
4317 set_bit_field3(IsUnstable::update(bit_field3(), true));
4321 bool Map::is_stable() {
4322 return !IsUnstable::decode(bit_field3());
4326 bool Map::has_code_cache() {
4327 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4331 bool Map::CanBeDeprecated() {
4332 int descriptor = LastAdded();
4333 for (int i = 0; i <= descriptor; i++) {
4334 PropertyDetails details = instance_descriptors()->GetDetails(i);
4335 if (details.representation().IsNone()) return true;
4336 if (details.representation().IsSmi()) return true;
4337 if (details.representation().IsDouble()) return true;
4338 if (details.representation().IsHeapObject()) return true;
4339 if (details.type() == DATA_CONSTANT) return true;
4345 void Map::NotifyLeafMapLayoutChange() {
4348 dependent_code()->DeoptimizeDependentCodeGroup(
4350 DependentCode::kPrototypeCheckGroup);
4355 bool Map::CanOmitMapChecks() {
4356 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4360 int DependentCode::number_of_entries(DependencyGroup group) {
4361 if (length() == 0) return 0;
4362 return Smi::cast(get(group))->value();
4366 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4367 set(group, Smi::FromInt(value));
4371 void DependentCode::set_object_at(int i, Object* object) {
4372 set(kCodesStartIndex + i, object);
4376 Object* DependentCode::object_at(int i) {
4377 return get(kCodesStartIndex + i);
4381 void DependentCode::clear_at(int i) {
4382 set_undefined(kCodesStartIndex + i);
4386 void DependentCode::copy(int from, int to) {
4387 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4391 void DependentCode::ExtendGroup(DependencyGroup group) {
4392 GroupStartIndexes starts(this);
4393 for (int g = kGroupCount - 1; g > group; g--) {
4394 if (starts.at(g) < starts.at(g + 1)) {
4395 copy(starts.at(g), starts.at(g + 1));
4401 void Code::set_flags(Code::Flags flags) {
4402 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4403 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4407 Code::Kind Code::kind() {
4408 return ExtractKindFromFlags(flags());
4412 bool Code::IsCodeStubOrIC() {
4413 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4414 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4415 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4416 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4417 kind() == TO_BOOLEAN_IC;
4421 InlineCacheState Code::ic_state() {
4422 InlineCacheState result = ExtractICStateFromFlags(flags());
4423 // Only allow uninitialized or debugger states for non-IC code
4424 // objects. This is used in the debugger to determine whether or not
4425 // a call to code object has been replaced with a debug break call.
4426 DCHECK(is_inline_cache_stub() ||
4427 result == UNINITIALIZED ||
4428 result == DEBUG_STUB);
4433 ExtraICState Code::extra_ic_state() {
4434 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4435 return ExtractExtraICStateFromFlags(flags());
4439 Code::StubType Code::type() {
4440 return ExtractTypeFromFlags(flags());
4444 // For initialization.
4445 void Code::set_raw_kind_specific_flags1(int value) {
4446 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4450 void Code::set_raw_kind_specific_flags2(int value) {
4451 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4455 inline bool Code::is_crankshafted() {
4456 return IsCrankshaftedField::decode(
4457 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4461 inline bool Code::is_hydrogen_stub() {
4462 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4466 inline void Code::set_is_crankshafted(bool value) {
4467 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4468 int updated = IsCrankshaftedField::update(previous, value);
4469 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4473 inline bool Code::is_turbofanned() {
4474 return IsTurbofannedField::decode(
4475 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4479 inline void Code::set_is_turbofanned(bool value) {
4480 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4481 int updated = IsTurbofannedField::update(previous, value);
4482 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4486 inline bool Code::can_have_weak_objects() {
4487 DCHECK(kind() == OPTIMIZED_FUNCTION);
4488 return CanHaveWeakObjectsField::decode(
4489 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4493 inline void Code::set_can_have_weak_objects(bool value) {
4494 DCHECK(kind() == OPTIMIZED_FUNCTION);
4495 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4496 int updated = CanHaveWeakObjectsField::update(previous, value);
4497 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4501 bool Code::has_deoptimization_support() {
4502 DCHECK_EQ(FUNCTION, kind());
4503 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4504 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4508 void Code::set_has_deoptimization_support(bool value) {
4509 DCHECK_EQ(FUNCTION, kind());
4510 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4511 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4512 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4516 bool Code::has_debug_break_slots() {
4517 DCHECK_EQ(FUNCTION, kind());
4518 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4519 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4523 void Code::set_has_debug_break_slots(bool value) {
4524 DCHECK_EQ(FUNCTION, kind());
4525 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4526 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4527 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4531 bool Code::is_compiled_optimizable() {
4532 DCHECK_EQ(FUNCTION, kind());
4533 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4534 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4538 void Code::set_compiled_optimizable(bool value) {
4539 DCHECK_EQ(FUNCTION, kind());
4540 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4541 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4542 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4546 bool Code::has_reloc_info_for_serialization() {
4547 DCHECK_EQ(FUNCTION, kind());
4548 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4549 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4553 void Code::set_has_reloc_info_for_serialization(bool value) {
4554 DCHECK_EQ(FUNCTION, kind());
4555 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4556 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4557 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4561 int Code::allow_osr_at_loop_nesting_level() {
4562 DCHECK_EQ(FUNCTION, kind());
4563 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4564 return AllowOSRAtLoopNestingLevelField::decode(fields);
4568 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4569 DCHECK_EQ(FUNCTION, kind());
4570 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4571 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4572 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4573 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4577 int Code::profiler_ticks() {
4578 DCHECK_EQ(FUNCTION, kind());
4579 return ProfilerTicksField::decode(
4580 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4584 void Code::set_profiler_ticks(int ticks) {
4585 if (kind() == FUNCTION) {
4586 unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4587 unsigned updated = ProfilerTicksField::update(previous, ticks);
4588 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4593 int Code::builtin_index() {
4594 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
4598 void Code::set_builtin_index(int index) {
4599 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
4603 unsigned Code::stack_slots() {
4604 DCHECK(is_crankshafted());
4605 return StackSlotsField::decode(
4606 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4610 void Code::set_stack_slots(unsigned slots) {
4611 CHECK(slots <= (1 << kStackSlotsBitCount));
4612 DCHECK(is_crankshafted());
4613 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4614 int updated = StackSlotsField::update(previous, slots);
4615 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4619 unsigned Code::safepoint_table_offset() {
4620 DCHECK(is_crankshafted());
4621 return SafepointTableOffsetField::decode(
4622 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4626 void Code::set_safepoint_table_offset(unsigned offset) {
4627 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4628 DCHECK(is_crankshafted());
4629 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4630 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4631 int updated = SafepointTableOffsetField::update(previous, offset);
4632 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4636 unsigned Code::back_edge_table_offset() {
4637 DCHECK_EQ(FUNCTION, kind());
4638 return BackEdgeTableOffsetField::decode(
4639 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
4643 void Code::set_back_edge_table_offset(unsigned offset) {
4644 DCHECK_EQ(FUNCTION, kind());
4645 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
4646 offset = offset >> kPointerSizeLog2;
4647 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4648 int updated = BackEdgeTableOffsetField::update(previous, offset);
4649 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4653 bool Code::back_edges_patched_for_osr() {
4654 DCHECK_EQ(FUNCTION, kind());
4655 return allow_osr_at_loop_nesting_level() > 0;
4659 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
4662 bool Code::has_function_cache() {
4663 DCHECK(kind() == STUB);
4664 return HasFunctionCacheField::decode(
4665 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4669 void Code::set_has_function_cache(bool flag) {
4670 DCHECK(kind() == STUB);
4671 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4672 int updated = HasFunctionCacheField::update(previous, flag);
4673 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4677 bool Code::marked_for_deoptimization() {
4678 DCHECK(kind() == OPTIMIZED_FUNCTION);
4679 return MarkedForDeoptimizationField::decode(
4680 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4684 void Code::set_marked_for_deoptimization(bool flag) {
4685 DCHECK(kind() == OPTIMIZED_FUNCTION);
4686 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4687 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4688 int updated = MarkedForDeoptimizationField::update(previous, flag);
4689 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4693 bool Code::is_inline_cache_stub() {
4694 Kind kind = this->kind();
4696 #define CASE(name) case name: return true;
4699 default: return false;
4704 bool Code::is_keyed_stub() {
4705 return is_keyed_load_stub() || is_keyed_store_stub();
4709 bool Code::is_debug_stub() {
4710 return ic_state() == DEBUG_STUB;
4714 Address Code::constant_pool() {
4715 Address constant_pool = NULL;
4716 if (FLAG_enable_embedded_constant_pool) {
4717 int offset = constant_pool_offset();
4718 if (offset < instruction_size()) {
4719 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
4722 return constant_pool;
4726 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
4727 ExtraICState extra_ic_state, StubType type,
4728 CacheHolderFlag holder) {
4729 // Compute the bit mask.
4730 unsigned int bits = KindField::encode(kind)
4731 | ICStateField::encode(ic_state)
4732 | TypeField::encode(type)
4733 | ExtraICStateField::encode(extra_ic_state)
4734 | CacheHolderField::encode(holder);
4735 return static_cast<Flags>(bits);
4739 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4740 ExtraICState extra_ic_state,
4741 CacheHolderFlag holder,
4743 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
4747 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
4748 CacheHolderFlag holder) {
4749 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
4753 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4754 return KindField::decode(flags);
4758 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4759 return ICStateField::decode(flags);
4763 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4764 return ExtraICStateField::decode(flags);
4768 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4769 return TypeField::decode(flags);
4773 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4774 return CacheHolderField::decode(flags);
4778 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4779 int bits = flags & ~TypeField::kMask;
4780 return static_cast<Flags>(bits);
4784 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
4785 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
4786 return static_cast<Flags>(bits);
4790 Code* Code::GetCodeFromTargetAddress(Address address) {
4791 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4792 // GetCodeFromTargetAddress might be called when marking objects during mark
4793 // sweep. reinterpret_cast is therefore used instead of the more appropriate
4794 // Code::cast. Code::cast does not work when the object's map is
4796 Code* result = reinterpret_cast<Code*>(code);
4801 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4803 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4807 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
4808 if (object->IsMap()) {
4809 return Map::cast(object)->CanTransition() &&
4810 FLAG_weak_embedded_maps_in_optimized_code;
4812 if (object->IsCell()) {
4813 object = Cell::cast(object)->value();
4814 } else if (object->IsPropertyCell()) {
4815 object = PropertyCell::cast(object)->value();
4817 if (object->IsJSObject()) {
4818 return FLAG_weak_embedded_objects_in_optimized_code;
4820 if (object->IsFixedArray()) {
4821 // Contexts of inlined functions are embedded in optimized code.
4822 Map* map = HeapObject::cast(object)->map();
4823 Heap* heap = map->GetHeap();
4824 return FLAG_weak_embedded_objects_in_optimized_code &&
4825 map == heap->function_context_map();
4831 class Code::FindAndReplacePattern {
4833 FindAndReplacePattern() : count_(0) { }
4834 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
4835 DCHECK(count_ < kMaxCount);
4836 find_[count_] = map_to_find;
4837 replace_[count_] = obj_to_replace;
4841 static const int kMaxCount = 4;
4843 Handle<Map> find_[kMaxCount];
4844 Handle<Object> replace_[kMaxCount];
4849 Object* Map::prototype() const {
4850 return READ_FIELD(this, kPrototypeOffset);
4854 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4855 DCHECK(value->IsNull() || value->IsJSReceiver());
4856 WRITE_FIELD(this, kPrototypeOffset, value);
4857 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4861 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
4862 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
4863 return LayoutDescriptor::cast_gc_safe(layout_desc);
4867 bool Map::HasFastPointerLayout() const {
4868 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
4869 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
4873 void Map::UpdateDescriptors(DescriptorArray* descriptors,
4874 LayoutDescriptor* layout_desc) {
4875 set_instance_descriptors(descriptors);
4876 if (FLAG_unbox_double_fields) {
4877 if (layout_descriptor()->IsSlowLayout()) {
4878 set_layout_descriptor(layout_desc);
4881 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
4882 if (FLAG_verify_heap) {
4883 CHECK(layout_descriptor()->IsConsistentWithMap(this));
4884 CHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
4887 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
4888 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
4894 void Map::InitializeDescriptors(DescriptorArray* descriptors,
4895 LayoutDescriptor* layout_desc) {
4896 int len = descriptors->number_of_descriptors();
4897 set_instance_descriptors(descriptors);
4898 SetNumberOfOwnDescriptors(len);
4900 if (FLAG_unbox_double_fields) {
4901 set_layout_descriptor(layout_desc);
4903 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
4904 if (FLAG_verify_heap) {
4905 CHECK(layout_descriptor()->IsConsistentWithMap(this));
4908 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
4910 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
4915 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4916 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
4919 void Map::set_bit_field3(uint32_t bits) {
4920 if (kInt32Size != kPointerSize) {
4921 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
4923 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
4927 uint32_t Map::bit_field3() const {
4928 return READ_UINT32_FIELD(this, kBitField3Offset);
4932 LayoutDescriptor* Map::GetLayoutDescriptor() {
4933 return FLAG_unbox_double_fields ? layout_descriptor()
4934 : LayoutDescriptor::FastPointerLayout();
4938 void Map::AppendDescriptor(Descriptor* desc) {
4939 DescriptorArray* descriptors = instance_descriptors();
4940 int number_of_own_descriptors = NumberOfOwnDescriptors();
4941 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
4942 descriptors->Append(desc);
4943 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4945 // This function does not support appending double field descriptors and
4946 // it should never try to (otherwise, layout descriptor must be updated too).
4948 PropertyDetails details = desc->GetDetails();
4949 CHECK(details.type() != DATA || !details.representation().IsDouble());
4954 Object* Map::GetBackPointer() {
4955 Object* object = constructor_or_backpointer();
4956 if (object->IsMap()) {
4959 return GetIsolate()->heap()->undefined_value();
4963 Map* Map::ElementsTransitionMap() {
4964 return TransitionArray::SearchSpecial(
4965 this, GetHeap()->elements_transition_symbol());
4969 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
4972 Object* Map::prototype_info() const {
4973 DCHECK(is_prototype_map());
4974 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
4978 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
4979 DCHECK(is_prototype_map());
4980 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
4981 CONDITIONAL_WRITE_BARRIER(
4982 GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
4986 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
4987 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
4988 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
4989 (value->IsMap() && GetBackPointer()->IsUndefined()));
4990 DCHECK(!value->IsMap() ||
4991 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
4992 set_constructor_or_backpointer(value, mode);
4996 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
4997 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
4998 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
4999 ACCESSORS(Map, constructor_or_backpointer, Object,
5000 kConstructorOrBackPointerOffset)
5003 Object* Map::GetConstructor() const {
5004 Object* maybe_constructor = constructor_or_backpointer();
5005 // Follow any back pointers.
5006 while (maybe_constructor->IsMap()) {
5008 Map::cast(maybe_constructor)->constructor_or_backpointer();
5010 return maybe_constructor;
5014 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5015 // Never overwrite a back pointer with a constructor.
5016 DCHECK(!constructor_or_backpointer()->IsMap());
5017 set_constructor_or_backpointer(constructor, mode);
5021 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5022 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5023 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5025 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5026 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5027 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5029 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5030 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5032 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5033 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5034 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5035 kExpectedReceiverTypeOffset)
5037 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5038 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5039 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5041 ACCESSORS(Box, value, Object, kValueOffset)
5043 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5044 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5045 ACCESSORS(PrototypeInfo, constructor_name, Object, kConstructorNameOffset)
5047 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5048 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5050 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5051 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5052 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5054 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5055 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5056 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5057 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5058 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5059 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5060 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5061 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5062 kCanInterceptSymbolsBit)
5063 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5064 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5066 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5067 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5069 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5070 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5071 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5072 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5074 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5075 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5076 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5077 kPrototypeTemplateOffset)
5078 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5079 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5080 kNamedPropertyHandlerOffset)
5081 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5082 kIndexedPropertyHandlerOffset)
5083 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5084 kInstanceTemplateOffset)
5085 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5086 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5087 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5088 kInstanceCallHandlerOffset)
5089 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5090 kAccessCheckInfoOffset)
5091 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5093 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5094 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5095 kInternalFieldCountOffset)
5097 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5099 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5100 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5101 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5102 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5103 kPretenureCreateCountOffset)
5104 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5105 kDependentCodeOffset)
5106 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5107 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5109 ACCESSORS(Script, source, Object, kSourceOffset)
5110 ACCESSORS(Script, name, Object, kNameOffset)
5111 ACCESSORS(Script, id, Smi, kIdOffset)
5112 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5113 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5114 ACCESSORS(Script, context_data, Object, kContextOffset)
5115 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5116 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5117 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5118 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5119 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5120 kEvalFrominstructionsOffsetOffset)
5121 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5122 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5123 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5124 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5126 Script::CompilationType Script::compilation_type() {
5127 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5128 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5130 void Script::set_compilation_type(CompilationType type) {
5131 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5132 type == COMPILATION_TYPE_EVAL));
5134 Script::CompilationState Script::compilation_state() {
5135 return BooleanBit::get(flags(), kCompilationStateBit) ?
5136 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5138 void Script::set_compilation_state(CompilationState state) {
5139 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5140 state == COMPILATION_STATE_COMPILED));
5142 ScriptOriginOptions Script::origin_options() {
5143 return ScriptOriginOptions((flags()->value() & kOriginOptionsMask) >>
5144 kOriginOptionsShift);
5146 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5147 DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5148 set_flags(Smi::FromInt((flags()->value() & ~kOriginOptionsMask) |
5149 (origin_options.Flags() << kOriginOptionsShift)));
5153 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5154 ACCESSORS(DebugInfo, code, Code, kCodeIndex)
5155 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5157 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5158 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5159 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5160 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5162 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5163 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5164 kOptimizedCodeMapOffset)
5165 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5166 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5167 kFeedbackVectorOffset)
5169 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5171 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5172 kInstanceClassNameOffset)
5173 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5174 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5175 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5176 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5179 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5180 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5181 kHiddenPrototypeBit)
5182 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5183 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5184 kNeedsAccessCheckBit)
5185 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5186 kReadOnlyPrototypeBit)
5187 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5188 kRemovePrototypeBit)
5189 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5191 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5192 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5194 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5196 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5199 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5200 kAllowLazyCompilation)
5201 BOOL_ACCESSORS(SharedFunctionInfo,
5203 allows_lazy_compilation_without_context,
5204 kAllowLazyCompilationWithoutContext)
5205 BOOL_ACCESSORS(SharedFunctionInfo,
5209 BOOL_ACCESSORS(SharedFunctionInfo,
5211 has_duplicate_parameters,
5212 kHasDuplicateParameters)
5213 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5214 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5215 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
5219 #if V8_HOST_ARCH_32_BIT
5220 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5221 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5222 kFormalParameterCountOffset)
5223 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5224 kExpectedNofPropertiesOffset)
5225 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5226 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5227 kStartPositionAndTypeOffset)
5228 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5229 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5230 kFunctionTokenPositionOffset)
5231 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5232 kCompilerHintsOffset)
5233 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5234 kOptCountAndBailoutReasonOffset)
5235 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5236 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5237 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5241 #if V8_TARGET_LITTLE_ENDIAN
5242 #define PSEUDO_SMI_LO_ALIGN 0
5243 #define PSEUDO_SMI_HI_ALIGN kIntSize
5245 #define PSEUDO_SMI_LO_ALIGN kIntSize
5246 #define PSEUDO_SMI_HI_ALIGN 0
5249 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5250 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5251 int holder::name() const { \
5252 int value = READ_INT_FIELD(this, offset); \
5253 DCHECK(kHeapObjectTag == 1); \
5254 DCHECK((value & kHeapObjectTag) == 0); \
5255 return value >> 1; \
5257 void holder::set_##name(int value) { \
5258 DCHECK(kHeapObjectTag == 1); \
5259 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5260 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5263 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5264 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5265 INT_ACCESSORS(holder, name, offset)
5268 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5269 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5270 kFormalParameterCountOffset)
5272 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5273 expected_nof_properties,
5274 kExpectedNofPropertiesOffset)
5275 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5277 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5278 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5279 start_position_and_type,
5280 kStartPositionAndTypeOffset)
5282 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5283 function_token_position,
5284 kFunctionTokenPositionOffset)
5285 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5287 kCompilerHintsOffset)
5289 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5290 opt_count_and_bailout_reason,
5291 kOptCountAndBailoutReasonOffset)
5292 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5294 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5296 kAstNodeCountOffset)
5297 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5299 kProfilerTicksOffset)
5304 BOOL_GETTER(SharedFunctionInfo,
5306 optimization_disabled,
5307 kOptimizationDisabled)
5310 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5311 set_compiler_hints(BooleanBit::set(compiler_hints(),
5312 kOptimizationDisabled,
5317 LanguageMode SharedFunctionInfo::language_mode() {
5318 STATIC_ASSERT(LANGUAGE_END == 3);
5319 return construct_language_mode(
5320 BooleanBit::get(compiler_hints(), kStrictModeFunction),
5321 BooleanBit::get(compiler_hints(), kStrongModeFunction));
5325 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5326 STATIC_ASSERT(LANGUAGE_END == 3);
5327 // We only allow language mode transitions that set the same language mode
5328 // again or go up in the chain:
5329 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5330 int hints = compiler_hints();
5331 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5332 hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5333 set_compiler_hints(hints);
5337 FunctionKind SharedFunctionInfo::kind() {
5338 return FunctionKindBits::decode(compiler_hints());
5342 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5343 DCHECK(IsValidFunctionKind(kind));
5344 int hints = compiler_hints();
5345 hints = FunctionKindBits::update(hints, kind);
5346 set_compiler_hints(hints);
5350 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5352 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5353 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5354 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5355 name_should_print_as_anonymous,
5356 kNameShouldPrintAsAnonymous)
5357 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5358 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5359 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5360 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
5362 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5363 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5364 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5365 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5367 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5368 kIsAccessorFunction)
5369 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5370 kIsDefaultConstructor)
5372 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5373 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5375 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5377 bool Script::HasValidSource() {
5378 Object* src = this->source();
5379 if (!src->IsString()) return true;
5380 String* src_str = String::cast(src);
5381 if (!StringShape(src_str).IsExternal()) return true;
5382 if (src_str->IsOneByteRepresentation()) {
5383 return ExternalOneByteString::cast(src)->resource() != NULL;
5384 } else if (src_str->IsTwoByteRepresentation()) {
5385 return ExternalTwoByteString::cast(src)->resource() != NULL;
5391 void SharedFunctionInfo::DontAdaptArguments() {
5392 DCHECK(code()->kind() == Code::BUILTIN);
5393 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5397 int SharedFunctionInfo::start_position() const {
5398 return start_position_and_type() >> kStartPositionShift;
5402 void SharedFunctionInfo::set_start_position(int start_position) {
5403 set_start_position_and_type((start_position << kStartPositionShift)
5404 | (start_position_and_type() & ~kStartPositionMask));
5408 Code* SharedFunctionInfo::code() const {
5409 return Code::cast(READ_FIELD(this, kCodeOffset));
5413 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5414 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5415 WRITE_FIELD(this, kCodeOffset, value);
5416 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5420 void SharedFunctionInfo::ReplaceCode(Code* value) {
5421 // If the GC metadata field is already used then the function was
5422 // enqueued as a code flushing candidate and we remove it now.
5423 if (code()->gc_metadata() != NULL) {
5424 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5425 flusher->EvictCandidate(this);
5428 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5432 if (is_compiled()) set_never_compiled(false);
5436 ScopeInfo* SharedFunctionInfo::scope_info() const {
5437 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5441 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5442 WriteBarrierMode mode) {
5443 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5444 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5447 reinterpret_cast<Object*>(value),
5452 bool SharedFunctionInfo::is_compiled() {
5453 Builtins* builtins = GetIsolate()->builtins();
5454 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5455 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5456 return code() != builtins->builtin(Builtins::kCompileLazy);
5460 bool SharedFunctionInfo::is_simple_parameter_list() {
5461 return scope_info()->IsSimpleParameterList();
5465 bool SharedFunctionInfo::IsApiFunction() {
5466 return function_data()->IsFunctionTemplateInfo();
5470 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5471 DCHECK(IsApiFunction());
5472 return FunctionTemplateInfo::cast(function_data());
5476 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5477 return function_data()->IsSmi();
5481 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5482 DCHECK(HasBuiltinFunctionId());
5483 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5487 int SharedFunctionInfo::ic_age() {
5488 return ICAgeBits::decode(counters());
5492 void SharedFunctionInfo::set_ic_age(int ic_age) {
5493 set_counters(ICAgeBits::update(counters(), ic_age));
5497 int SharedFunctionInfo::deopt_count() {
5498 return DeoptCountBits::decode(counters());
5502 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5503 set_counters(DeoptCountBits::update(counters(), deopt_count));
5507 void SharedFunctionInfo::increment_deopt_count() {
5508 int value = counters();
5509 int deopt_count = DeoptCountBits::decode(value);
5510 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5511 set_counters(DeoptCountBits::update(value, deopt_count));
5515 int SharedFunctionInfo::opt_reenable_tries() {
5516 return OptReenableTriesBits::decode(counters());
5520 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5521 set_counters(OptReenableTriesBits::update(counters(), tries));
5525 int SharedFunctionInfo::opt_count() {
5526 return OptCountBits::decode(opt_count_and_bailout_reason());
5530 void SharedFunctionInfo::set_opt_count(int opt_count) {
5531 set_opt_count_and_bailout_reason(
5532 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5536 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
5537 return static_cast<BailoutReason>(
5538 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5542 bool SharedFunctionInfo::has_deoptimization_support() {
5543 Code* code = this->code();
5544 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5548 void SharedFunctionInfo::TryReenableOptimization() {
5549 int tries = opt_reenable_tries();
5550 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5551 // We reenable optimization whenever the number of tries is a large
5552 // enough power of 2.
5553 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5554 set_optimization_disabled(false);
5561 bool SharedFunctionInfo::IsSubjectToDebugging() {
5562 Object* script_obj = script();
5563 if (script_obj->IsUndefined()) return false;
5564 Script* script = Script::cast(script_obj);
5565 Script::Type type = static_cast<Script::Type>(script->type()->value());
5566 return type == Script::TYPE_NORMAL;
5570 bool JSFunction::IsBuiltin() {
5571 return context()->global_object()->IsJSBuiltinsObject();
5575 bool JSFunction::IsSubjectToDebugging() {
5576 return shared()->IsSubjectToDebugging();
5580 bool JSFunction::NeedsArgumentsAdaption() {
5581 return shared()->internal_formal_parameter_count() !=
5582 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5586 bool JSFunction::IsOptimized() {
5587 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5591 bool JSFunction::IsMarkedForOptimization() {
5592 return code() == GetIsolate()->builtins()->builtin(
5593 Builtins::kCompileOptimized);
5597 bool JSFunction::IsMarkedForConcurrentOptimization() {
5598 return code() == GetIsolate()->builtins()->builtin(
5599 Builtins::kCompileOptimizedConcurrent);
5603 bool JSFunction::IsInOptimizationQueue() {
5604 return code() == GetIsolate()->builtins()->builtin(
5605 Builtins::kInOptimizationQueue);
5609 bool JSFunction::IsInobjectSlackTrackingInProgress() {
5610 return has_initial_map() &&
5611 initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
5615 Code* JSFunction::code() {
5617 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5621 void JSFunction::set_code(Code* value) {
5622 DCHECK(!GetHeap()->InNewSpace(value));
5623 Address entry = value->entry();
5624 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5625 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5627 HeapObject::RawField(this, kCodeEntryOffset),
5632 void JSFunction::set_code_no_write_barrier(Code* value) {
5633 DCHECK(!GetHeap()->InNewSpace(value));
5634 Address entry = value->entry();
5635 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5639 void JSFunction::ReplaceCode(Code* code) {
5640 bool was_optimized = IsOptimized();
5641 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5643 if (was_optimized && is_optimized) {
5644 shared()->EvictFromOptimizedCodeMap(this->code(),
5645 "Replacing with another optimized code");
5650 // Add/remove the function from the list of optimized functions for this
5651 // context based on the state change.
5652 if (!was_optimized && is_optimized) {
5653 context()->native_context()->AddOptimizedFunction(this);
5655 if (was_optimized && !is_optimized) {
5656 // TODO(titzer): linear in the number of optimized functions; fix!
5657 context()->native_context()->RemoveOptimizedFunction(this);
5662 Context* JSFunction::context() {
5663 return Context::cast(READ_FIELD(this, kContextOffset));
5667 JSObject* JSFunction::global_proxy() {
5668 return context()->global_proxy();
5672 void JSFunction::set_context(Object* value) {
5673 DCHECK(value->IsUndefined() || value->IsContext());
5674 WRITE_FIELD(this, kContextOffset, value);
5675 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5678 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5679 kPrototypeOrInitialMapOffset)
5682 Map* JSFunction::initial_map() {
5683 return Map::cast(prototype_or_initial_map());
5687 bool JSFunction::has_initial_map() {
5688 return prototype_or_initial_map()->IsMap();
5692 bool JSFunction::has_instance_prototype() {
5693 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5697 bool JSFunction::has_prototype() {
5698 return map()->has_non_instance_prototype() || has_instance_prototype();
5702 Object* JSFunction::instance_prototype() {
5703 DCHECK(has_instance_prototype());
5704 if (has_initial_map()) return initial_map()->prototype();
5705 // When there is no initial map and the prototype is a JSObject, the
5706 // initial map field is used for the prototype field.
5707 return prototype_or_initial_map();
5711 Object* JSFunction::prototype() {
5712 DCHECK(has_prototype());
5713 // If the function's prototype property has been set to a non-JSObject
5714 // value, that value is stored in the constructor field of the map.
5715 if (map()->has_non_instance_prototype()) {
5716 Object* prototype = map()->GetConstructor();
5717 // The map must have a prototype in that field, not a back pointer.
5718 DCHECK(!prototype->IsMap());
5721 return instance_prototype();
5725 bool JSFunction::should_have_prototype() {
5726 return map()->function_with_prototype();
5730 bool JSFunction::is_compiled() {
5731 Builtins* builtins = GetIsolate()->builtins();
5732 return code() != builtins->builtin(Builtins::kCompileLazy) &&
5733 code() != builtins->builtin(Builtins::kCompileOptimized) &&
5734 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
5738 bool JSFunction::is_simple_parameter_list() {
5739 return shared()->is_simple_parameter_list();
5743 FixedArray* JSFunction::literals() {
5744 DCHECK(!shared()->bound());
5745 return literals_or_bindings();
5749 void JSFunction::set_literals(FixedArray* literals) {
5750 DCHECK(!shared()->bound());
5751 set_literals_or_bindings(literals);
5755 FixedArray* JSFunction::function_bindings() {
5756 DCHECK(shared()->bound());
5757 return literals_or_bindings();
5761 void JSFunction::set_function_bindings(FixedArray* bindings) {
5762 DCHECK(shared()->bound());
5763 // Bound function literal may be initialized to the empty fixed array
5764 // before the bindings are set.
5765 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
5766 bindings->map() == GetHeap()->fixed_array_map());
5767 set_literals_or_bindings(bindings);
5771 int JSFunction::NumberOfLiterals() {
5772 DCHECK(!shared()->bound());
5773 return literals()->length();
5777 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5778 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
5779 return READ_FIELD(this, OffsetOfFunctionWithId(id));
5783 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5785 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
5786 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5787 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5791 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5792 ACCESSORS(JSProxy, hash, Object, kHashOffset)
5793 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5794 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5797 void JSProxy::InitializeBody(int object_size, Object* value) {
5798 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5799 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5800 WRITE_FIELD(this, offset, value);
5805 ACCESSORS(JSCollection, table, Object, kTableOffset)
5808 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
5809 template<class Derived, class TableType> \
5810 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
5811 return type::cast(READ_FIELD(this, offset)); \
5813 template<class Derived, class TableType> \
5814 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
5815 type* value, WriteBarrierMode mode) { \
5816 WRITE_FIELD(this, offset, value); \
5817 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
5820 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
5821 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
5822 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
5824 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
5827 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5828 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5831 Address Foreign::foreign_address() {
5832 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5836 void Foreign::set_foreign_address(Address value) {
5837 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5841 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5842 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5843 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5844 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5845 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5847 bool JSGeneratorObject::is_suspended() {
5848 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
5849 DCHECK_EQ(kGeneratorClosed, 0);
5850 return continuation() > 0;
5853 bool JSGeneratorObject::is_closed() {
5854 return continuation() == kGeneratorClosed;
5857 bool JSGeneratorObject::is_executing() {
5858 return continuation() == kGeneratorExecuting;
5861 ACCESSORS(JSModule, context, Object, kContextOffset)
5862 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5865 ACCESSORS(JSValue, value, Object, kValueOffset)
5868 HeapNumber* HeapNumber::cast(Object* object) {
5869 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
5870 return reinterpret_cast<HeapNumber*>(object);
5874 const HeapNumber* HeapNumber::cast(const Object* object) {
5875 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
5876 return reinterpret_cast<const HeapNumber*>(object);
5880 ACCESSORS(JSDate, value, Object, kValueOffset)
5881 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5882 ACCESSORS(JSDate, year, Object, kYearOffset)
5883 ACCESSORS(JSDate, month, Object, kMonthOffset)
5884 ACCESSORS(JSDate, day, Object, kDayOffset)
5885 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5886 ACCESSORS(JSDate, hour, Object, kHourOffset)
5887 ACCESSORS(JSDate, min, Object, kMinOffset)
5888 ACCESSORS(JSDate, sec, Object, kSecOffset)
5891 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
5892 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
5893 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5894 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5895 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5896 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5899 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5900 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5901 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
5902 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5903 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5904 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5905 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
5906 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
5909 void Code::WipeOutHeader() {
5910 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
5911 WRITE_FIELD(this, kHandlerTableOffset, NULL);
5912 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
5913 // Do not wipe out major/minor keys on a code stub or IC
5914 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
5915 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
5920 Object* Code::type_feedback_info() {
5921 DCHECK(kind() == FUNCTION);
5922 return raw_type_feedback_info();
5926 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
5927 DCHECK(kind() == FUNCTION);
5928 set_raw_type_feedback_info(value, mode);
5929 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5934 uint32_t Code::stub_key() {
5935 DCHECK(IsCodeStubOrIC());
5936 Smi* smi_key = Smi::cast(raw_type_feedback_info());
5937 return static_cast<uint32_t>(smi_key->value());
5941 void Code::set_stub_key(uint32_t key) {
5942 DCHECK(IsCodeStubOrIC());
5943 set_raw_type_feedback_info(Smi::FromInt(key));
5947 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5948 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
5951 byte* Code::instruction_start() {
5952 return FIELD_ADDR(this, kHeaderSize);
5956 byte* Code::instruction_end() {
5957 return instruction_start() + instruction_size();
5961 int Code::body_size() {
5962 return RoundUp(instruction_size(), kObjectAlignment);
5966 ByteArray* Code::unchecked_relocation_info() {
5967 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
5971 byte* Code::relocation_start() {
5972 return unchecked_relocation_info()->GetDataStartAddress();
5976 int Code::relocation_size() {
5977 return unchecked_relocation_info()->length();
5981 byte* Code::entry() {
5982 return instruction_start();
5986 bool Code::contains(byte* inner_pointer) {
5987 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5991 ACCESSORS(JSArray, length, Object, kLengthOffset)
5994 void* JSArrayBuffer::backing_store() const {
5995 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
5996 return reinterpret_cast<void*>(ptr);
6000 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6001 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6002 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6006 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6009 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6010 if (kInt32Size != kPointerSize) {
6011 #if V8_TARGET_LITTLE_ENDIAN
6012 WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6014 WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6017 WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6021 uint32_t JSArrayBuffer::bit_field() const {
6022 return READ_UINT32_FIELD(this, kBitFieldOffset);
6026 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6029 void JSArrayBuffer::set_is_external(bool value) {
6030 set_bit_field(IsExternal::update(bit_field(), value));
6034 bool JSArrayBuffer::is_neuterable() {
6035 return IsNeuterable::decode(bit_field());
6039 void JSArrayBuffer::set_is_neuterable(bool value) {
6040 set_bit_field(IsNeuterable::update(bit_field(), value));
6044 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6047 void JSArrayBuffer::set_was_neutered(bool value) {
6048 set_bit_field(WasNeutered::update(bit_field(), value));
6052 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6055 void JSArrayBuffer::set_is_shared(bool value) {
6056 set_bit_field(IsShared::update(bit_field(), value));
6060 Object* JSArrayBufferView::byte_offset() const {
6061 if (WasNeutered()) return Smi::FromInt(0);
6062 return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6066 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6067 WRITE_FIELD(this, kByteOffsetOffset, value);
6068 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6072 Object* JSArrayBufferView::byte_length() const {
6073 if (WasNeutered()) return Smi::FromInt(0);
6074 return Object::cast(READ_FIELD(this, kByteLengthOffset));
6078 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6079 WRITE_FIELD(this, kByteLengthOffset, value);
6080 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6084 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6086 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6087 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6091 bool JSArrayBufferView::WasNeutered() const {
6092 return JSArrayBuffer::cast(buffer())->was_neutered();
6096 Object* JSTypedArray::length() const {
6097 if (WasNeutered()) return Smi::FromInt(0);
6098 return Object::cast(READ_FIELD(this, kLengthOffset));
6102 uint32_t JSTypedArray::length_value() const {
6103 if (WasNeutered()) return 0;
6105 CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6110 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6111 WRITE_FIELD(this, kLengthOffset, value);
6112 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6117 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6121 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6124 JSRegExp::Type JSRegExp::TypeTag() {
6125 Object* data = this->data();
6126 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6127 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6128 return static_cast<JSRegExp::Type>(smi->value());
6132 int JSRegExp::CaptureCount() {
6133 switch (TypeTag()) {
6137 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6145 JSRegExp::Flags JSRegExp::GetFlags() {
6146 DCHECK(this->data()->IsFixedArray());
6147 Object* data = this->data();
6148 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6149 return Flags(smi->value());
6153 String* JSRegExp::Pattern() {
6154 DCHECK(this->data()->IsFixedArray());
6155 Object* data = this->data();
6156 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6161 Object* JSRegExp::DataAt(int index) {
6162 DCHECK(TypeTag() != NOT_COMPILED);
6163 return FixedArray::cast(data())->get(index);
6167 void JSRegExp::SetDataAt(int index, Object* value) {
6168 DCHECK(TypeTag() != NOT_COMPILED);
6169 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6170 FixedArray::cast(data())->set(index, value);
6174 ElementsKind JSObject::GetElementsKind() {
6175 ElementsKind kind = map()->elements_kind();
6176 #if VERIFY_HEAP && DEBUG
6177 FixedArrayBase* fixed_array =
6178 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6180 // If a GC was caused while constructing this object, the elements
6181 // pointer may point to a one pointer filler map.
6182 if (ElementsAreSafeToExamine()) {
6183 Map* map = fixed_array->map();
6184 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6185 (map == GetHeap()->fixed_array_map() ||
6186 map == GetHeap()->fixed_cow_array_map())) ||
6187 (IsFastDoubleElementsKind(kind) &&
6188 (fixed_array->IsFixedDoubleArray() ||
6189 fixed_array == GetHeap()->empty_fixed_array())) ||
6190 (kind == DICTIONARY_ELEMENTS &&
6191 fixed_array->IsFixedArray() &&
6192 fixed_array->IsDictionary()) ||
6193 (kind > DICTIONARY_ELEMENTS));
6194 DCHECK(!IsSloppyArgumentsElements(kind) ||
6195 (elements()->IsFixedArray() && elements()->length() >= 2));
6202 bool JSObject::HasFastObjectElements() {
6203 return IsFastObjectElementsKind(GetElementsKind());
6207 bool JSObject::HasFastSmiElements() {
6208 return IsFastSmiElementsKind(GetElementsKind());
6212 bool JSObject::HasFastSmiOrObjectElements() {
6213 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6217 bool JSObject::HasFastDoubleElements() {
6218 return IsFastDoubleElementsKind(GetElementsKind());
6222 bool JSObject::HasFastHoleyElements() {
6223 return IsFastHoleyElementsKind(GetElementsKind());
6227 bool JSObject::HasFastElements() {
6228 return IsFastElementsKind(GetElementsKind());
6232 bool JSObject::HasDictionaryElements() {
6233 return GetElementsKind() == DICTIONARY_ELEMENTS;
6237 bool JSObject::HasFastArgumentsElements() {
6238 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
6242 bool JSObject::HasSlowArgumentsElements() {
6243 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
6247 bool JSObject::HasSloppyArgumentsElements() {
6248 return IsSloppyArgumentsElements(GetElementsKind());
6252 bool JSObject::HasExternalArrayElements() {
6253 HeapObject* array = elements();
6254 DCHECK(array != NULL);
6255 return array->IsExternalArray();
6259 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6260 bool JSObject::HasExternal##Type##Elements() { \
6261 HeapObject* array = elements(); \
6262 DCHECK(array != NULL); \
6263 if (!array->IsHeapObject()) \
6265 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6268 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6270 #undef EXTERNAL_ELEMENTS_CHECK
6273 bool JSObject::HasFixedTypedArrayElements() {
6274 HeapObject* array = elements();
6275 DCHECK(array != NULL);
6276 return array->IsFixedTypedArrayBase();
6280 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6281 bool JSObject::HasFixed##Type##Elements() { \
6282 HeapObject* array = elements(); \
6283 DCHECK(array != NULL); \
6284 if (!array->IsHeapObject()) \
6286 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6289 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6291 #undef FIXED_TYPED_ELEMENTS_CHECK
6294 bool JSObject::HasNamedInterceptor() {
6295 return map()->has_named_interceptor();
6299 bool JSObject::HasIndexedInterceptor() {
6300 return map()->has_indexed_interceptor();
6304 NameDictionary* JSObject::property_dictionary() {
6305 DCHECK(!HasFastProperties());
6306 DCHECK(!IsGlobalObject());
6307 return NameDictionary::cast(properties());
6311 GlobalDictionary* JSObject::global_dictionary() {
6312 DCHECK(!HasFastProperties());
6313 DCHECK(IsGlobalObject());
6314 return GlobalDictionary::cast(properties());
6318 SeededNumberDictionary* JSObject::element_dictionary() {
6319 DCHECK(HasDictionaryElements());
6320 return SeededNumberDictionary::cast(elements());
6324 bool Name::IsHashFieldComputed(uint32_t field) {
6325 return (field & kHashNotComputedMask) == 0;
6329 bool Name::HasHashCode() {
6330 return IsHashFieldComputed(hash_field());
6334 uint32_t Name::Hash() {
6335 // Fast case: has hash code already been computed?
6336 uint32_t field = hash_field();
6337 if (IsHashFieldComputed(field)) return field >> kHashShift;
6338 // Slow case: compute hash code and set it. Has to be a string.
6339 return String::cast(this)->ComputeAndSetHash();
6343 bool Name::IsPrivate() {
6344 return this->IsSymbol() && Symbol::cast(this)->is_private();
6348 StringHasher::StringHasher(int length, uint32_t seed)
6350 raw_running_hash_(seed),
6352 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6353 is_first_char_(true) {
6354 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6358 bool StringHasher::has_trivial_hash() {
6359 return length_ > String::kMaxHashCalcLength;
6363 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6365 running_hash += (running_hash << 10);
6366 running_hash ^= (running_hash >> 6);
6367 return running_hash;
6371 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6372 running_hash += (running_hash << 3);
6373 running_hash ^= (running_hash >> 11);
6374 running_hash += (running_hash << 15);
6375 if ((running_hash & String::kHashBitMask) == 0) {
6378 return running_hash;
6382 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6383 const uc16* chars, int length) {
6384 DCHECK_NOT_NULL(chars);
6385 DCHECK(length >= 0);
6386 for (int i = 0; i < length; ++i) {
6387 running_hash = AddCharacterCore(running_hash, *chars++);
6389 return running_hash;
6393 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6396 DCHECK_NOT_NULL(chars);
6397 DCHECK(length >= 0);
6398 for (int i = 0; i < length; ++i) {
6399 uint16_t c = static_cast<uint16_t>(*chars++);
6400 running_hash = AddCharacterCore(running_hash, c);
6402 return running_hash;
6406 void StringHasher::AddCharacter(uint16_t c) {
6407 // Use the Jenkins one-at-a-time hash function to update the hash
6408 // for the given character.
6409 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6413 bool StringHasher::UpdateIndex(uint16_t c) {
6414 DCHECK(is_array_index_);
6415 if (c < '0' || c > '9') {
6416 is_array_index_ = false;
6420 if (is_first_char_) {
6421 is_first_char_ = false;
6422 if (c == '0' && length_ > 1) {
6423 is_array_index_ = false;
6427 if (array_index_ > 429496729U - ((d + 3) >> 3)) {
6428 is_array_index_ = false;
6431 array_index_ = array_index_ * 10 + d;
6436 template<typename Char>
6437 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6438 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6440 if (is_array_index_) {
6441 for (; i < length; i++) {
6442 AddCharacter(chars[i]);
6443 if (!UpdateIndex(chars[i])) {
6449 for (; i < length; i++) {
6450 DCHECK(!is_array_index_);
6451 AddCharacter(chars[i]);
6456 template <typename schar>
6457 uint32_t StringHasher::HashSequentialString(const schar* chars,
6460 StringHasher hasher(length, seed);
6461 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6462 return hasher.GetHashField();
6466 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6467 IteratingStringHasher hasher(string->length(), seed);
6469 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6470 ConsString* cons_string = String::VisitFlat(&hasher, string);
6471 if (cons_string == nullptr) return hasher.GetHashField();
6472 hasher.VisitConsString(cons_string);
6473 return hasher.GetHashField();
6477 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6479 AddCharacters(chars, length);
6483 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6485 AddCharacters(chars, length);
6489 bool Name::AsArrayIndex(uint32_t* index) {
6490 return IsString() && String::cast(this)->AsArrayIndex(index);
6494 bool String::AsArrayIndex(uint32_t* index) {
6495 uint32_t field = hash_field();
6496 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6499 return SlowAsArrayIndex(index);
6503 void String::SetForwardedInternalizedString(String* canonical) {
6504 DCHECK(IsInternalizedString());
6505 DCHECK(HasHashCode());
6506 if (canonical == this) return; // No need to forward.
6507 DCHECK(SlowEquals(canonical));
6508 DCHECK(canonical->IsInternalizedString());
6509 DCHECK(canonical->HasHashCode());
6510 WRITE_FIELD(this, kHashFieldSlot, canonical);
6511 // Setting the hash field to a tagged value sets the LSB, causing the hash
6512 // code to be interpreted as uninitialized. We use this fact to recognize
6513 // that we have a forwarded string.
6514 DCHECK(!HasHashCode());
6518 String* String::GetForwardedInternalizedString() {
6519 DCHECK(IsInternalizedString());
6520 if (HasHashCode()) return this;
6521 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
6522 DCHECK(canonical->IsInternalizedString());
6523 DCHECK(SlowEquals(canonical));
6524 DCHECK(canonical->HasHashCode());
6529 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
6531 LanguageMode language_mode) {
6533 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
6534 return GetProperty(&it, language_mode);
6538 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6539 Handle<Name> name) {
6540 // Call the "has" trap on proxies.
6541 if (object->IsJSProxy()) {
6542 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6543 return JSProxy::HasPropertyWithHandler(proxy, name);
6546 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6547 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6551 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6552 Handle<Name> name) {
6553 // Call the "has" trap on proxies.
6554 if (object->IsJSProxy()) {
6555 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6556 return JSProxy::HasPropertyWithHandler(proxy, name);
6559 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6560 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6564 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6565 Handle<JSReceiver> object, Handle<Name> name) {
6567 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
6568 return GetPropertyAttributes(&it);
6572 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
6573 Handle<JSReceiver> object, Handle<Name> name) {
6574 LookupIterator it = LookupIterator::PropertyOrElement(
6575 name->GetIsolate(), object, name, LookupIterator::HIDDEN);
6576 return GetPropertyAttributes(&it);
6580 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6581 // Call the "has" trap on proxies.
6582 if (object->IsJSProxy()) {
6583 Isolate* isolate = object->GetIsolate();
6584 Handle<Name> name = isolate->factory()->Uint32ToString(index);
6585 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6586 return JSProxy::HasPropertyWithHandler(proxy, name);
6589 Maybe<PropertyAttributes> result = GetElementAttributes(object, index);
6590 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6594 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
6596 // Call the "has" trap on proxies.
6597 if (object->IsJSProxy()) {
6598 Isolate* isolate = object->GetIsolate();
6599 Handle<Name> name = isolate->factory()->Uint32ToString(index);
6600 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6601 return JSProxy::HasPropertyWithHandler(proxy, name);
6604 Maybe<PropertyAttributes> result = GetOwnElementAttributes(object, index);
6605 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6609 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
6610 Handle<JSReceiver> object, uint32_t index) {
6611 Isolate* isolate = object->GetIsolate();
6612 LookupIterator it(isolate, object, index);
6613 return GetPropertyAttributes(&it);
6617 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
6618 Handle<JSReceiver> object, uint32_t index) {
6619 Isolate* isolate = object->GetIsolate();
6620 LookupIterator it(isolate, object, index, LookupIterator::HIDDEN);
6621 return GetPropertyAttributes(&it);
6625 bool JSGlobalObject::IsDetached() {
6626 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6630 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6631 const PrototypeIterator iter(this->GetIsolate(),
6632 const_cast<JSGlobalProxy*>(this));
6633 return iter.GetCurrent() != global;
6637 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6638 return object->IsJSProxy()
6639 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6640 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6644 Object* JSReceiver::GetIdentityHash() {
6646 ? JSProxy::cast(this)->GetIdentityHash()
6647 : JSObject::cast(this)->GetIdentityHash();
6651 bool AccessorInfo::all_can_read() {
6652 return BooleanBit::get(flag(), kAllCanReadBit);
6656 void AccessorInfo::set_all_can_read(bool value) {
6657 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6661 bool AccessorInfo::all_can_write() {
6662 return BooleanBit::get(flag(), kAllCanWriteBit);
6666 void AccessorInfo::set_all_can_write(bool value) {
6667 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6671 bool AccessorInfo::is_special_data_property() {
6672 return BooleanBit::get(flag(), kSpecialDataProperty);
6676 void AccessorInfo::set_is_special_data_property(bool value) {
6677 set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
6681 PropertyAttributes AccessorInfo::property_attributes() {
6682 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6686 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6687 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6691 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6692 if (!HasExpectedReceiverType()) return true;
6693 if (!receiver->IsJSObject()) return false;
6694 return FunctionTemplateInfo::cast(expected_receiver_type())
6695 ->IsTemplateFor(JSObject::cast(receiver)->map());
6699 template<typename Derived, typename Shape, typename Key>
6700 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6702 Handle<Object> value) {
6703 this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6707 template<typename Derived, typename Shape, typename Key>
6708 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6710 Handle<Object> value,
6711 PropertyDetails details) {
6712 Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
6716 template <typename Key>
6717 template <typename Dictionary>
6718 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
6720 Handle<Object> value,
6721 PropertyDetails details) {
6722 STATIC_ASSERT(Dictionary::kEntrySize == 3);
6723 DCHECK(!key->IsName() || details.dictionary_index() > 0);
6724 int index = dict->EntryToIndex(entry);
6725 DisallowHeapAllocation no_gc;
6726 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
6727 dict->set(index, *key, mode);
6728 dict->set(index + 1, *value, mode);
6729 dict->set(index + 2, details.AsSmi());
6733 template <typename Dictionary>
6734 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
6735 Handle<Object> key, Handle<Object> value,
6736 PropertyDetails details) {
6737 STATIC_ASSERT(Dictionary::kEntrySize == 2);
6738 DCHECK(!key->IsName() || details.dictionary_index() > 0);
6739 DCHECK(value->IsPropertyCell());
6740 int index = dict->EntryToIndex(entry);
6741 DisallowHeapAllocation no_gc;
6742 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
6743 dict->set(index, *key, mode);
6744 dict->set(index + 1, *value, mode);
6745 PropertyCell::cast(*value)->set_property_details(details);
6749 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6750 DCHECK(other->IsNumber());
6751 return key == static_cast<uint32_t>(other->Number());
6755 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6756 return ComputeIntegerHash(key, 0);
6760 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6762 DCHECK(other->IsNumber());
6763 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6767 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6768 return ComputeIntegerHash(key, seed);
6772 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6775 DCHECK(other->IsNumber());
6776 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6780 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6781 return isolate->factory()->NewNumberFromUint(key);
6785 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6786 // We know that all entries in a hash table had their hash keys created.
6787 // Use that knowledge to have fast failure.
6788 if (key->Hash() != Name::cast(other)->Hash()) return false;
6789 return key->Equals(Name::cast(other));
6793 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6798 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6799 return Name::cast(other)->Hash();
6803 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6805 DCHECK(key->IsUniqueName());
6810 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
6811 Handle<NameDictionary> dictionary) {
6812 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6816 template <typename Dictionary>
6817 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
6818 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
6819 Object* raw_value = dict->ValueAt(entry);
6820 DCHECK(raw_value->IsPropertyCell());
6821 PropertyCell* cell = PropertyCell::cast(raw_value);
6822 return cell->property_details();
6826 template <typename Dictionary>
6827 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
6828 PropertyDetails value) {
6829 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
6830 Object* raw_value = dict->ValueAt(entry);
6831 DCHECK(raw_value->IsPropertyCell());
6832 PropertyCell* cell = PropertyCell::cast(raw_value);
6833 cell->set_property_details(value);
6837 template <typename Dictionary>
6838 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
6839 DCHECK(dict->ValueAt(entry)->IsPropertyCell());
6840 return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole();
6844 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6845 return key->SameValue(other);
6849 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6850 return Smi::cast(key->GetHash())->value();
6854 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6856 return Smi::cast(other->GetHash())->value();
6860 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6861 Handle<Object> key) {
6866 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6867 Handle<ObjectHashTable> table, Handle<Object> key) {
6868 return DerivedHashTable::Shrink(table, key);
6872 template <int entrysize>
6873 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6874 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
6875 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
6880 template <int entrysize>
6881 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6884 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
6885 : reinterpret_cast<intptr_t>(*key);
6886 return (uint32_t)(hash & 0xFFFFFFFF);
6890 template <int entrysize>
6891 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
6893 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
6894 intptr_t hash = reinterpret_cast<intptr_t>(other);
6895 return (uint32_t)(hash & 0xFFFFFFFF);
6899 template <int entrysize>
6900 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6901 Handle<Object> key) {
6906 void Map::ClearCodeCache(Heap* heap) {
6907 // No write barrier is needed since empty_fixed_array is not in new space.
6908 // Please note this function is used during marking:
6909 // - MarkCompactCollector::MarkUnmarkedObject
6910 // - IncrementalMarking::Step
6911 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
6912 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6916 int Map::SlackForArraySize(int old_size, int size_limit) {
6917 const int max_slack = size_limit - old_size;
6918 CHECK_LE(0, max_slack);
6920 DCHECK_LE(1, max_slack);
6923 return Min(max_slack, old_size / 4);
6927 void JSArray::set_length(Smi* length) {
6928 // Don't need a write barrier for a Smi.
6929 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6933 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
6934 // If the new array won't fit in a some non-trivial fraction of the max old
6935 // space size, then force it to go dictionary mode.
6936 uint32_t max_fast_array_size =
6937 static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
6938 return new_length >= max_fast_array_size;
6942 bool JSArray::AllowsSetLength() {
6943 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6944 DCHECK(result == !HasExternalArrayElements());
6949 void JSArray::SetContent(Handle<JSArray> array,
6950 Handle<FixedArrayBase> storage) {
6951 EnsureCanContainElements(array, storage, storage->length(),
6952 ALLOW_COPIED_DOUBLE_ELEMENTS);
6954 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6955 IsFastDoubleElementsKind(array->GetElementsKind())) ||
6956 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6957 (IsFastObjectElementsKind(array->GetElementsKind()) ||
6958 (IsFastSmiElementsKind(array->GetElementsKind()) &&
6959 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
6960 array->set_elements(*storage);
6961 array->set_length(Smi::FromInt(storage->length()));
6965 int TypeFeedbackInfo::ic_total_count() {
6966 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6967 return ICTotalCountField::decode(current);
6971 void TypeFeedbackInfo::set_ic_total_count(int count) {
6972 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6973 value = ICTotalCountField::update(value,
6974 ICTotalCountField::decode(count));
6975 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6979 int TypeFeedbackInfo::ic_with_type_info_count() {
6980 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6981 return ICsWithTypeInfoCountField::decode(current);
6985 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
6986 if (delta == 0) return;
6987 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6988 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6989 // We can get negative count here when the type-feedback info is
6990 // shared between two code objects. The can only happen when
6991 // the debugger made a shallow copy of code object (see Heap::CopyCode).
6992 // Since we do not optimize when the debugger is active, we can skip
6993 // this counter update.
6994 if (new_count >= 0) {
6995 new_count &= ICsWithTypeInfoCountField::kMask;
6996 value = ICsWithTypeInfoCountField::update(value, new_count);
6997 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7002 int TypeFeedbackInfo::ic_generic_count() {
7003 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7007 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7008 if (delta == 0) return;
7009 int new_count = ic_generic_count() + delta;
7010 if (new_count >= 0) {
7011 new_count &= ~Smi::kMinValue;
7012 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7017 void TypeFeedbackInfo::initialize_storage() {
7018 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7019 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7020 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7024 void TypeFeedbackInfo::change_own_type_change_checksum() {
7025 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7026 int checksum = OwnTypeChangeChecksum::decode(value);
7027 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7028 value = OwnTypeChangeChecksum::update(value, checksum);
7029 // Ensure packed bit field is in Smi range.
7030 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7031 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7032 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7036 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7037 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7038 int mask = (1 << kTypeChangeChecksumBits) - 1;
7039 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7040 // Ensure packed bit field is in Smi range.
7041 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7042 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7043 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7047 int TypeFeedbackInfo::own_type_change_checksum() {
7048 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7049 return OwnTypeChangeChecksum::decode(value);
7053 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7054 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7055 int mask = (1 << kTypeChangeChecksumBits) - 1;
7056 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7060 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7063 Relocatable::Relocatable(Isolate* isolate) {
7065 prev_ = isolate->relocatable_top();
7066 isolate->set_relocatable_top(this);
7070 Relocatable::~Relocatable() {
7071 DCHECK_EQ(isolate_->relocatable_top(), this);
7072 isolate_->set_relocatable_top(prev_);
7076 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7077 return map->instance_size();
7081 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7082 v->VisitExternalReference(
7083 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7087 template<typename StaticVisitor>
7088 void Foreign::ForeignIterateBody() {
7089 StaticVisitor::VisitExternalReference(
7090 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7094 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody(ObjectVisitor* v) {
7096 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7100 template <typename StaticVisitor>
7101 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody() {
7102 StaticVisitor::VisitPointer(
7103 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7107 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7108 typedef v8::String::ExternalOneByteStringResource Resource;
7109 v->VisitExternalOneByteString(
7110 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7114 template <typename StaticVisitor>
7115 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7116 typedef v8::String::ExternalOneByteStringResource Resource;
7117 StaticVisitor::VisitExternalOneByteString(
7118 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7122 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7123 typedef v8::String::ExternalStringResource Resource;
7124 v->VisitExternalTwoByteString(
7125 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7129 template<typename StaticVisitor>
7130 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7131 typedef v8::String::ExternalStringResource Resource;
7132 StaticVisitor::VisitExternalTwoByteString(
7133 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7137 static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7141 DCHECK(FLAG_unbox_double_fields);
7142 DCHECK(IsAligned(start_offset, kPointerSize) &&
7143 IsAligned(end_offset, kPointerSize));
7145 LayoutDescriptorHelper helper(object->map());
7146 DCHECK(!helper.all_fields_tagged());
7148 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7149 // Visit all tagged fields.
7150 if (helper.IsTagged(offset)) {
7151 v->VisitPointer(HeapObject::RawField(object, offset));
7157 template<int start_offset, int end_offset, int size>
7158 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7161 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7162 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7163 HeapObject::RawField(obj, end_offset));
7165 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7170 template<int start_offset>
7171 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7174 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7175 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7176 HeapObject::RawField(obj, object_size));
7178 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7183 template<class Derived, class TableType>
7184 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7185 TableType* table(TableType::cast(this->table()));
7186 int index = Smi::cast(this->index())->value();
7187 Object* key = table->KeyAt(index);
7188 DCHECK(!key->IsTheHole());
7193 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7194 array->set(0, CurrentKey());
7198 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7199 array->set(0, CurrentKey());
7200 array->set(1, CurrentValue());
7204 Object* JSMapIterator::CurrentValue() {
7205 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7206 int index = Smi::cast(this->index())->value();
7207 Object* value = table->ValueAt(index);
7208 DCHECK(!value->IsTheHole());
7213 class String::SubStringRange::iterator final {
7215 typedef std::forward_iterator_tag iterator_category;
7216 typedef int difference_type;
7217 typedef uc16 value_type;
7218 typedef uc16* pointer;
7219 typedef uc16& reference;
7221 iterator(const iterator& other)
7222 : content_(other.content_), offset_(other.offset_) {}
7224 uc16 operator*() { return content_.Get(offset_); }
7225 bool operator==(const iterator& other) const {
7226 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7228 bool operator!=(const iterator& other) const {
7229 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7231 iterator& operator++() {
7235 iterator operator++(int);
7238 friend class String;
7239 iterator(String* from, int offset)
7240 : content_(from->GetFlatContent()), offset_(offset) {}
7241 String::FlatContent content_;
7246 String::SubStringRange::iterator String::SubStringRange::begin() {
7247 return String::SubStringRange::iterator(string_, first_);
7251 String::SubStringRange::iterator String::SubStringRange::end() {
7252 return String::SubStringRange::iterator(string_, first_ + length_);
7257 #undef CAST_ACCESSOR
7258 #undef INT_ACCESSORS
7260 #undef ACCESSORS_TO_SMI
7261 #undef SMI_ACCESSORS
7262 #undef SYNCHRONIZED_SMI_ACCESSORS
7263 #undef NOBARRIER_SMI_ACCESSORS
7265 #undef BOOL_ACCESSORS
7267 #undef FIELD_ADDR_CONST
7269 #undef NOBARRIER_READ_FIELD
7271 #undef NOBARRIER_WRITE_FIELD
7272 #undef WRITE_BARRIER
7273 #undef CONDITIONAL_WRITE_BARRIER
7274 #undef READ_DOUBLE_FIELD
7275 #undef WRITE_DOUBLE_FIELD
7276 #undef READ_INT_FIELD
7277 #undef WRITE_INT_FIELD
7278 #undef READ_INTPTR_FIELD
7279 #undef WRITE_INTPTR_FIELD
7280 #undef READ_UINT32_FIELD
7281 #undef WRITE_UINT32_FIELD
7282 #undef READ_SHORT_FIELD
7283 #undef WRITE_SHORT_FIELD
7284 #undef READ_BYTE_FIELD
7285 #undef WRITE_BYTE_FIELD
7286 #undef NOBARRIER_READ_BYTE_FIELD
7287 #undef NOBARRIER_WRITE_BYTE_FIELD
7289 } } // namespace v8::internal
7291 #endif // V8_OBJECTS_INL_H_