1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/isolate.h"
24 #include "src/layout-descriptor-inl.h"
25 #include "src/lookup.h"
26 #include "src/objects.h"
27 #include "src/property.h"
28 #include "src/prototype.h"
29 #include "src/transitions-inl.h"
30 #include "src/type-feedback-vector-inl.h"
31 #include "src/types-inl.h"
32 #include "src/v8memory.h"
37 PropertyDetails::PropertyDetails(Smi* smi) {
38 value_ = smi->value();
42 Smi* PropertyDetails::AsSmi() const {
43 // Ensure the upper 2 bits have the same value by sign extending it. This is
44 // necessary to be able to use the 31st bit of the property details.
45 int value = value_ << 1;
46 return Smi::FromInt(value >> 1);
50 int PropertyDetails::field_width_in_words() const {
51 DCHECK(location() == kField);
52 if (!FLAG_unbox_double_fields) return 1;
53 if (kDoubleSize == kPointerSize) return 1;
54 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
58 #define TYPE_CHECKER(type, instancetype) \
59 bool Object::Is##type() const { \
60 return Object::IsHeapObject() && \
61 HeapObject::cast(this)->map()->instance_type() == instancetype; \
65 #define CAST_ACCESSOR(type) \
66 type* type::cast(Object* object) { \
67 SLOW_DCHECK(object->Is##type()); \
68 return reinterpret_cast<type*>(object); \
70 const type* type::cast(const Object* object) { \
71 SLOW_DCHECK(object->Is##type()); \
72 return reinterpret_cast<const type*>(object); \
76 #define INT_ACCESSORS(holder, name, offset) \
77 int holder::name() const { return READ_INT_FIELD(this, offset); } \
78 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
81 #define ACCESSORS(holder, name, type, offset) \
82 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
83 void holder::set_##name(type* value, WriteBarrierMode mode) { \
84 WRITE_FIELD(this, offset, value); \
85 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
89 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
90 #define ACCESSORS_TO_SMI(holder, name, offset) \
91 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
92 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
93 WRITE_FIELD(this, offset, value); \
97 // Getter that returns a Smi as an int and writes an int as a Smi.
98 #define SMI_ACCESSORS(holder, name, offset) \
99 int holder::name() const { \
100 Object* value = READ_FIELD(this, offset); \
101 return Smi::cast(value)->value(); \
103 void holder::set_##name(int value) { \
104 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
108 int holder::synchronized_##name() const { \
109 Object* value = ACQUIRE_READ_FIELD(this, offset); \
110 return Smi::cast(value)->value(); \
112 void holder::synchronized_set_##name(int value) { \
113 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
117 int holder::nobarrier_##name() const { \
118 Object* value = NOBARRIER_READ_FIELD(this, offset); \
119 return Smi::cast(value)->value(); \
121 void holder::nobarrier_set_##name(int value) { \
122 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
125 #define BOOL_GETTER(holder, field, name, offset) \
126 bool holder::name() const { \
127 return BooleanBit::get(field(), offset); \
131 #define BOOL_ACCESSORS(holder, field, name, offset) \
132 bool holder::name() const { \
133 return BooleanBit::get(field(), offset); \
135 void holder::set_##name(bool value) { \
136 set_##field(BooleanBit::set(field(), offset, value)); \
140 bool Object::IsFixedArrayBase() const {
141 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
145 // External objects are not extensible, so the map check is enough.
146 bool Object::IsExternal() const {
147 return Object::IsHeapObject() &&
148 HeapObject::cast(this)->map() ==
149 HeapObject::cast(this)->GetHeap()->external_map();
153 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
156 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
157 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
158 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
159 TYPE_CHECKER(Simd128Value, SIMD128_VALUE_TYPE)
162 #define SIMD128_TYPE_CHECKER(TYPE, Type, type, lane_count, lane_type) \
163 bool Object::Is##Type() const { \
164 return Object::IsHeapObject() && \
165 HeapObject::cast(this)->map() == \
166 HeapObject::cast(this)->GetHeap()->type##_map(); \
168 SIMD128_TYPES(SIMD128_TYPE_CHECKER)
169 #undef SIMD128_TYPE_CHECKER
172 bool Object::IsString() const {
173 return Object::IsHeapObject()
174 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
178 bool Object::IsName() const {
179 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
180 return Object::IsHeapObject() &&
181 HeapObject::cast(this)->map()->instance_type() <= LAST_NAME_TYPE;
185 bool Object::IsUniqueName() const {
186 return IsInternalizedString() || IsSymbol();
190 bool Object::IsSpecObject() const {
191 return Object::IsHeapObject()
192 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
196 bool Object::IsSpecFunction() const {
197 if (!Object::IsHeapObject()) return false;
198 InstanceType type = HeapObject::cast(this)->map()->instance_type();
199 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
203 bool Object::IsTemplateInfo() const {
204 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
208 bool Object::IsInternalizedString() const {
209 if (!this->IsHeapObject()) return false;
210 uint32_t type = HeapObject::cast(this)->map()->instance_type();
211 STATIC_ASSERT(kNotInternalizedTag != 0);
212 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
213 (kStringTag | kInternalizedTag);
217 bool Object::IsConsString() const {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() const {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() const {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqOneByteString() const {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsOneByteRepresentation();
242 bool Object::IsSeqTwoByteString() const {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() const {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalOneByteString() const {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsOneByteRepresentation();
262 bool Object::IsExternalTwoByteString() const {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
269 bool Object::HasValidElements() {
270 // Dictionary is covered under FixedArray.
271 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
275 bool Object::KeyEquals(Object* second) {
276 Object* first = this;
277 if (second->IsNumber()) {
278 if (first->IsNumber()) return first->Number() == second->Number();
279 Object* temp = first;
283 if (first->IsNumber()) {
284 DCHECK_LE(0, first->Number());
285 uint32_t expected = static_cast<uint32_t>(first->Number());
287 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
289 return Name::cast(first)->Equals(Name::cast(second));
293 Handle<Object> Object::NewStorageFor(Isolate* isolate,
294 Handle<Object> object,
295 Representation representation) {
296 if (representation.IsSmi() && object->IsUninitialized()) {
297 return handle(Smi::FromInt(0), isolate);
299 if (!representation.IsDouble()) return object;
301 if (object->IsUninitialized()) {
303 } else if (object->IsMutableHeapNumber()) {
304 value = HeapNumber::cast(*object)->value();
306 value = object->Number();
308 return isolate->factory()->NewHeapNumber(value, MUTABLE);
312 Handle<Object> Object::WrapForRead(Isolate* isolate,
313 Handle<Object> object,
314 Representation representation) {
315 DCHECK(!object->IsUninitialized());
316 if (!representation.IsDouble()) {
317 DCHECK(object->FitsRepresentation(representation));
320 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
324 StringShape::StringShape(const String* str)
325 : type_(str->map()->instance_type()) {
327 DCHECK((type_ & kIsNotStringMask) == kStringTag);
331 StringShape::StringShape(Map* map)
332 : type_(map->instance_type()) {
334 DCHECK((type_ & kIsNotStringMask) == kStringTag);
338 StringShape::StringShape(InstanceType t)
339 : type_(static_cast<uint32_t>(t)) {
341 DCHECK((type_ & kIsNotStringMask) == kStringTag);
345 bool StringShape::IsInternalized() {
347 STATIC_ASSERT(kNotInternalizedTag != 0);
348 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
349 (kStringTag | kInternalizedTag);
353 bool String::IsOneByteRepresentation() const {
354 uint32_t type = map()->instance_type();
355 return (type & kStringEncodingMask) == kOneByteStringTag;
359 bool String::IsTwoByteRepresentation() const {
360 uint32_t type = map()->instance_type();
361 return (type & kStringEncodingMask) == kTwoByteStringTag;
365 bool String::IsOneByteRepresentationUnderneath() {
366 uint32_t type = map()->instance_type();
367 STATIC_ASSERT(kIsIndirectStringTag != 0);
368 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
370 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
371 case kOneByteStringTag:
373 case kTwoByteStringTag:
375 default: // Cons or sliced string. Need to go deeper.
376 return GetUnderlying()->IsOneByteRepresentation();
381 bool String::IsTwoByteRepresentationUnderneath() {
382 uint32_t type = map()->instance_type();
383 STATIC_ASSERT(kIsIndirectStringTag != 0);
384 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
386 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
387 case kOneByteStringTag:
389 case kTwoByteStringTag:
391 default: // Cons or sliced string. Need to go deeper.
392 return GetUnderlying()->IsTwoByteRepresentation();
397 bool String::HasOnlyOneByteChars() {
398 uint32_t type = map()->instance_type();
399 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
400 IsOneByteRepresentation();
404 bool StringShape::IsCons() {
405 return (type_ & kStringRepresentationMask) == kConsStringTag;
409 bool StringShape::IsSliced() {
410 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
414 bool StringShape::IsIndirect() {
415 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
419 bool StringShape::IsExternal() {
420 return (type_ & kStringRepresentationMask) == kExternalStringTag;
424 bool StringShape::IsSequential() {
425 return (type_ & kStringRepresentationMask) == kSeqStringTag;
429 StringRepresentationTag StringShape::representation_tag() {
430 uint32_t tag = (type_ & kStringRepresentationMask);
431 return static_cast<StringRepresentationTag>(tag);
435 uint32_t StringShape::encoding_tag() {
436 return type_ & kStringEncodingMask;
440 uint32_t StringShape::full_representation_tag() {
441 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
445 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
446 Internals::kFullStringRepresentationMask);
448 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
449 Internals::kStringEncodingMask);
452 bool StringShape::IsSequentialOneByte() {
453 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
457 bool StringShape::IsSequentialTwoByte() {
458 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
462 bool StringShape::IsExternalOneByte() {
463 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
467 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
468 Internals::kExternalOneByteRepresentationTag);
470 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
473 bool StringShape::IsExternalTwoByte() {
474 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
478 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
479 Internals::kExternalTwoByteRepresentationTag);
481 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
484 uc32 FlatStringReader::Get(int index) {
486 return Get<uint8_t>(index);
488 return Get<uc16>(index);
493 template <typename Char>
494 Char FlatStringReader::Get(int index) {
495 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
496 DCHECK(0 <= index && index <= length_);
497 if (sizeof(Char) == 1) {
498 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
500 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
505 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
506 return key->AsHandle(isolate);
510 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
512 return key->AsHandle(isolate);
516 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
518 return key->AsHandle(isolate);
521 template <typename Char>
522 class SequentialStringKey : public HashTableKey {
524 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
525 : string_(string), hash_field_(0), seed_(seed) { }
527 uint32_t Hash() override {
528 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
532 uint32_t result = hash_field_ >> String::kHashShift;
533 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
538 uint32_t HashForObject(Object* other) override {
539 return String::cast(other)->Hash();
542 Vector<const Char> string_;
543 uint32_t hash_field_;
548 class OneByteStringKey : public SequentialStringKey<uint8_t> {
550 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
551 : SequentialStringKey<uint8_t>(str, seed) { }
553 bool IsMatch(Object* string) override {
554 return String::cast(string)->IsOneByteEqualTo(string_);
557 Handle<Object> AsHandle(Isolate* isolate) override;
561 class SeqOneByteSubStringKey : public HashTableKey {
563 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
564 : string_(string), from_(from), length_(length) {
565 DCHECK(string_->IsSeqOneByteString());
568 uint32_t Hash() override {
569 DCHECK(length_ >= 0);
570 DCHECK(from_ + length_ <= string_->length());
571 const uint8_t* chars = string_->GetChars() + from_;
572 hash_field_ = StringHasher::HashSequentialString(
573 chars, length_, string_->GetHeap()->HashSeed());
574 uint32_t result = hash_field_ >> String::kHashShift;
575 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
579 uint32_t HashForObject(Object* other) override {
580 return String::cast(other)->Hash();
583 bool IsMatch(Object* string) override;
584 Handle<Object> AsHandle(Isolate* isolate) override;
587 Handle<SeqOneByteString> string_;
590 uint32_t hash_field_;
594 class TwoByteStringKey : public SequentialStringKey<uc16> {
596 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
597 : SequentialStringKey<uc16>(str, seed) { }
599 bool IsMatch(Object* string) override {
600 return String::cast(string)->IsTwoByteEqualTo(string_);
603 Handle<Object> AsHandle(Isolate* isolate) override;
607 // Utf8StringKey carries a vector of chars as key.
608 class Utf8StringKey : public HashTableKey {
610 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
611 : string_(string), hash_field_(0), seed_(seed) { }
613 bool IsMatch(Object* string) override {
614 return String::cast(string)->IsUtf8EqualTo(string_);
617 uint32_t Hash() override {
618 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
619 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
620 uint32_t result = hash_field_ >> String::kHashShift;
621 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
625 uint32_t HashForObject(Object* other) override {
626 return String::cast(other)->Hash();
629 Handle<Object> AsHandle(Isolate* isolate) override {
630 if (hash_field_ == 0) Hash();
631 return isolate->factory()->NewInternalizedStringFromUtf8(
632 string_, chars_, hash_field_);
635 Vector<const char> string_;
636 uint32_t hash_field_;
637 int chars_; // Caches the number of characters when computing the hash code.
642 bool Object::IsNumber() const {
643 return IsSmi() || IsHeapNumber();
647 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
648 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
649 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
652 bool Object::IsFiller() const {
653 if (!Object::IsHeapObject()) return false;
654 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
655 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
660 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
661 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
663 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
664 #undef TYPED_ARRAY_TYPE_CHECKER
667 bool Object::IsFixedTypedArrayBase() const {
668 if (!Object::IsHeapObject()) return false;
670 InstanceType instance_type =
671 HeapObject::cast(this)->map()->instance_type();
672 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
673 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
677 bool Object::IsJSReceiver() const {
678 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
679 return IsHeapObject() &&
680 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
684 bool Object::IsJSObject() const {
685 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
686 return IsHeapObject() && HeapObject::cast(this)->map()->IsJSObjectMap();
690 bool Object::IsJSProxy() const {
691 if (!Object::IsHeapObject()) return false;
692 return HeapObject::cast(this)->map()->IsJSProxyMap();
696 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
697 TYPE_CHECKER(JSSet, JS_SET_TYPE)
698 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
699 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
700 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
701 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
702 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
703 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
704 TYPE_CHECKER(Map, MAP_TYPE)
705 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
706 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
707 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
710 bool Object::IsJSWeakCollection() const {
711 return IsJSWeakMap() || IsJSWeakSet();
715 bool Object::IsDescriptorArray() const {
716 return IsFixedArray();
720 bool Object::IsArrayList() const { return IsFixedArray(); }
723 bool Object::IsLayoutDescriptor() const {
724 return IsSmi() || IsFixedTypedArrayBase();
728 bool Object::IsTransitionArray() const {
729 return IsFixedArray();
733 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
736 bool Object::IsDeoptimizationInputData() const {
737 // Must be a fixed array.
738 if (!IsFixedArray()) return false;
740 // There's no sure way to detect the difference between a fixed array and
741 // a deoptimization data array. Since this is used for asserts we can
742 // check that the length is zero or else the fixed size plus a multiple of
744 int length = FixedArray::cast(this)->length();
745 if (length == 0) return true;
747 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
748 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
752 bool Object::IsDeoptimizationOutputData() const {
753 if (!IsFixedArray()) return false;
754 // There's actually no way to see the difference between a fixed array and
755 // a deoptimization data array. Since this is used for asserts we can check
756 // that the length is plausible though.
757 if (FixedArray::cast(this)->length() % 2 != 0) return false;
762 bool Object::IsHandlerTable() const {
763 if (!IsFixedArray()) return false;
764 // There's actually no way to see the difference between a fixed array and
765 // a handler table array.
770 bool Object::IsDependentCode() const {
771 if (!IsFixedArray()) return false;
772 // There's actually no way to see the difference between a fixed array and
773 // a dependent codes array.
778 bool Object::IsContext() const {
779 if (!Object::IsHeapObject()) return false;
780 Map* map = HeapObject::cast(this)->map();
781 Heap* heap = map->GetHeap();
782 return (map == heap->function_context_map() ||
783 map == heap->catch_context_map() ||
784 map == heap->with_context_map() ||
785 map == heap->native_context_map() ||
786 map == heap->block_context_map() ||
787 map == heap->module_context_map() ||
788 map == heap->script_context_map());
792 bool Object::IsNativeContext() const {
793 return Object::IsHeapObject() &&
794 HeapObject::cast(this)->map() ==
795 HeapObject::cast(this)->GetHeap()->native_context_map();
799 bool Object::IsScriptContextTable() const {
800 if (!Object::IsHeapObject()) return false;
801 Map* map = HeapObject::cast(this)->map();
802 Heap* heap = map->GetHeap();
803 return map == heap->script_context_table_map();
807 bool Object::IsScopeInfo() const {
808 return Object::IsHeapObject() &&
809 HeapObject::cast(this)->map() ==
810 HeapObject::cast(this)->GetHeap()->scope_info_map();
814 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
817 template <> inline bool Is<JSFunction>(Object* obj) {
818 return obj->IsJSFunction();
822 TYPE_CHECKER(Code, CODE_TYPE)
823 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
824 TYPE_CHECKER(Cell, CELL_TYPE)
825 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
826 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
827 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
828 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
829 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
830 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
831 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
832 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
835 bool Object::IsStringWrapper() const {
836 return IsJSValue() && JSValue::cast(this)->value()->IsString();
840 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
843 bool Object::IsBoolean() const {
844 return IsOddball() &&
845 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
849 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
850 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
851 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
852 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
855 bool Object::IsJSArrayBufferView() const {
856 return IsJSDataView() || IsJSTypedArray();
860 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
863 template <> inline bool Is<JSArray>(Object* obj) {
864 return obj->IsJSArray();
868 bool Object::IsHashTable() const {
869 return Object::IsHeapObject() &&
870 HeapObject::cast(this)->map() ==
871 HeapObject::cast(this)->GetHeap()->hash_table_map();
875 bool Object::IsWeakHashTable() const {
876 return IsHashTable();
880 bool Object::IsDictionary() const {
881 return IsHashTable() &&
882 this != HeapObject::cast(this)->GetHeap()->string_table();
886 bool Object::IsNameDictionary() const {
887 return IsDictionary();
891 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
894 bool Object::IsSeededNumberDictionary() const {
895 return IsDictionary();
899 bool Object::IsUnseededNumberDictionary() const {
900 return IsDictionary();
904 bool Object::IsStringTable() const {
905 return IsHashTable();
909 bool Object::IsNormalizedMapCache() const {
910 return NormalizedMapCache::IsNormalizedMapCache(this);
914 int NormalizedMapCache::GetIndex(Handle<Map> map) {
915 return map->Hash() % NormalizedMapCache::kEntries;
919 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
920 if (!obj->IsFixedArray()) return false;
921 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
925 if (FLAG_verify_heap) {
926 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
927 NormalizedMapCacheVerify();
934 bool Object::IsCompilationCacheTable() const {
935 return IsHashTable();
939 bool Object::IsCodeCacheHashTable() const {
940 return IsHashTable();
944 bool Object::IsPolymorphicCodeCacheHashTable() const {
945 return IsHashTable();
949 bool Object::IsMapCache() const {
950 return IsHashTable();
954 bool Object::IsObjectHashTable() const {
955 return IsHashTable();
959 bool Object::IsOrderedHashTable() const {
960 return IsHeapObject() &&
961 HeapObject::cast(this)->map() ==
962 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
966 bool Object::IsOrderedHashSet() const {
967 return IsOrderedHashTable();
971 bool Object::IsOrderedHashMap() const {
972 return IsOrderedHashTable();
976 bool Object::IsPrimitive() const {
977 return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
981 bool Object::IsJSGlobalProxy() const {
982 bool result = IsHeapObject() &&
983 (HeapObject::cast(this)->map()->instance_type() ==
984 JS_GLOBAL_PROXY_TYPE);
986 HeapObject::cast(this)->map()->is_access_check_needed());
991 bool Object::IsGlobalObject() const {
992 if (!IsHeapObject()) return false;
993 return HeapObject::cast(this)->map()->IsGlobalObjectMap();
997 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
998 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1001 bool Object::IsUndetectableObject() const {
1002 return IsHeapObject()
1003 && HeapObject::cast(this)->map()->is_undetectable();
1007 bool Object::IsAccessCheckNeeded() const {
1008 if (!IsHeapObject()) return false;
1009 if (IsJSGlobalProxy()) {
1010 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1011 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1012 return proxy->IsDetachedFrom(global);
1014 return HeapObject::cast(this)->map()->is_access_check_needed();
1018 bool Object::IsStruct() const {
1019 if (!IsHeapObject()) return false;
1020 switch (HeapObject::cast(this)->map()->instance_type()) {
1021 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1022 STRUCT_LIST(MAKE_STRUCT_CASE)
1023 #undef MAKE_STRUCT_CASE
1024 default: return false;
1029 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1030 bool Object::Is##Name() const { \
1031 return Object::IsHeapObject() \
1032 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1034 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1035 #undef MAKE_STRUCT_PREDICATE
1038 bool Object::IsUndefined() const {
1039 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1043 bool Object::IsNull() const {
1044 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1048 bool Object::IsTheHole() const {
1049 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1053 bool Object::IsException() const {
1054 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1058 bool Object::IsUninitialized() const {
1059 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1063 bool Object::IsTrue() const {
1064 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1068 bool Object::IsFalse() const {
1069 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1073 bool Object::IsArgumentsMarker() const {
1074 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1078 double Object::Number() {
1081 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1082 : reinterpret_cast<HeapNumber*>(this)->value();
1086 bool Object::IsNaN() const {
1087 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1091 bool Object::IsMinusZero() const {
1092 return this->IsHeapNumber() &&
1093 i::IsMinusZero(HeapNumber::cast(this)->value());
1097 Representation Object::OptimalRepresentation() {
1098 if (!FLAG_track_fields) return Representation::Tagged();
1100 return Representation::Smi();
1101 } else if (FLAG_track_double_fields && IsHeapNumber()) {
1102 return Representation::Double();
1103 } else if (FLAG_track_computed_fields && IsUninitialized()) {
1104 return Representation::None();
1105 } else if (FLAG_track_heap_object_fields) {
1106 DCHECK(IsHeapObject());
1107 return Representation::HeapObject();
1109 return Representation::Tagged();
1114 ElementsKind Object::OptimalElementsKind() {
1115 if (IsSmi()) return FAST_SMI_ELEMENTS;
1116 if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1117 return FAST_ELEMENTS;
1121 bool Object::FitsRepresentation(Representation representation) {
1122 if (FLAG_track_fields && representation.IsNone()) {
1124 } else if (FLAG_track_fields && representation.IsSmi()) {
1126 } else if (FLAG_track_double_fields && representation.IsDouble()) {
1127 return IsMutableHeapNumber() || IsNumber();
1128 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1129 return IsHeapObject();
1135 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1136 Handle<Object> object) {
1138 isolate, object, handle(isolate->context()->native_context(), isolate));
1142 bool Object::HasSpecificClassOf(String* name) {
1143 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1147 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1149 LanguageMode language_mode) {
1150 LookupIterator it(object, name);
1151 return GetProperty(&it, language_mode);
1155 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1157 LanguageMode language_mode) {
1158 LookupIterator it(isolate, object, index);
1159 return GetProperty(&it, language_mode);
1163 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1164 uint32_t index, Handle<Object> value,
1165 LanguageMode language_mode) {
1166 LookupIterator it(isolate, object, index);
1167 return SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED);
1171 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1172 Isolate* isolate, Handle<Object> receiver) {
1173 PrototypeIterator iter(isolate, receiver);
1174 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1175 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1176 return PrototypeIterator::GetCurrent(iter);
1180 return PrototypeIterator::GetCurrent(iter);
1184 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
1186 LanguageMode language_mode) {
1187 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1188 return GetProperty(object, str, language_mode);
1192 #define FIELD_ADDR(p, offset) \
1193 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1195 #define FIELD_ADDR_CONST(p, offset) \
1196 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1198 #define READ_FIELD(p, offset) \
1199 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1201 #define ACQUIRE_READ_FIELD(p, offset) \
1202 reinterpret_cast<Object*>(base::Acquire_Load( \
1203 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1205 #define NOBARRIER_READ_FIELD(p, offset) \
1206 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1207 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1209 #define WRITE_FIELD(p, offset, value) \
1210 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1212 #define RELEASE_WRITE_FIELD(p, offset, value) \
1213 base::Release_Store( \
1214 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1215 reinterpret_cast<base::AtomicWord>(value));
1217 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1218 base::NoBarrier_Store( \
1219 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1220 reinterpret_cast<base::AtomicWord>(value));
1222 #define WRITE_BARRIER(heap, object, offset, value) \
1223 heap->incremental_marking()->RecordWrite( \
1224 object, HeapObject::RawField(object, offset), value); \
1225 if (heap->InNewSpace(value)) { \
1226 heap->RecordWrite(object->address(), offset); \
1229 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1230 if (mode != SKIP_WRITE_BARRIER) { \
1231 if (mode == UPDATE_WRITE_BARRIER) { \
1232 heap->incremental_marking()->RecordWrite( \
1233 object, HeapObject::RawField(object, offset), value); \
1235 if (heap->InNewSpace(value)) { \
1236 heap->RecordWrite(object->address(), offset); \
1240 #define READ_DOUBLE_FIELD(p, offset) \
1241 ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1243 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1244 WriteDoubleValue(FIELD_ADDR(p, offset), value)
1246 #define READ_INT_FIELD(p, offset) \
1247 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1249 #define WRITE_INT_FIELD(p, offset, value) \
1250 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1252 #define READ_INTPTR_FIELD(p, offset) \
1253 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1255 #define WRITE_INTPTR_FIELD(p, offset, value) \
1256 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1258 #define READ_UINT8_FIELD(p, offset) \
1259 (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1261 #define WRITE_UINT8_FIELD(p, offset, value) \
1262 (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1264 #define READ_INT8_FIELD(p, offset) \
1265 (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1267 #define WRITE_INT8_FIELD(p, offset, value) \
1268 (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1270 #define READ_UINT16_FIELD(p, offset) \
1271 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1273 #define WRITE_UINT16_FIELD(p, offset, value) \
1274 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1276 #define READ_INT16_FIELD(p, offset) \
1277 (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1279 #define WRITE_INT16_FIELD(p, offset, value) \
1280 (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1282 #define READ_UINT32_FIELD(p, offset) \
1283 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1285 #define WRITE_UINT32_FIELD(p, offset, value) \
1286 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1288 #define READ_INT32_FIELD(p, offset) \
1289 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1291 #define WRITE_INT32_FIELD(p, offset, value) \
1292 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1294 #define READ_FLOAT_FIELD(p, offset) \
1295 (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1297 #define WRITE_FLOAT_FIELD(p, offset, value) \
1298 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1300 #define READ_UINT64_FIELD(p, offset) \
1301 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1303 #define WRITE_UINT64_FIELD(p, offset, value) \
1304 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1306 #define READ_INT64_FIELD(p, offset) \
1307 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1309 #define WRITE_INT64_FIELD(p, offset, value) \
1310 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1312 #define READ_BYTE_FIELD(p, offset) \
1313 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1315 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1316 static_cast<byte>(base::NoBarrier_Load( \
1317 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1319 #define WRITE_BYTE_FIELD(p, offset, value) \
1320 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1322 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1323 base::NoBarrier_Store( \
1324 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1325 static_cast<base::Atomic8>(value));
1327 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1328 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1332 MapWord MapWord::FromMap(const Map* map) {
1333 return MapWord(reinterpret_cast<uintptr_t>(map));
1337 Map* MapWord::ToMap() {
1338 return reinterpret_cast<Map*>(value_);
1342 bool MapWord::IsForwardingAddress() {
1343 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1347 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1348 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1349 return MapWord(reinterpret_cast<uintptr_t>(raw));
1353 HeapObject* MapWord::ToForwardingAddress() {
1354 DCHECK(IsForwardingAddress());
1355 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1360 void HeapObject::VerifyObjectField(int offset) {
1361 VerifyPointer(READ_FIELD(this, offset));
1364 void HeapObject::VerifySmiField(int offset) {
1365 CHECK(READ_FIELD(this, offset)->IsSmi());
1370 Heap* HeapObject::GetHeap() const {
1372 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1373 SLOW_DCHECK(heap != NULL);
1378 Isolate* HeapObject::GetIsolate() const {
1379 return GetHeap()->isolate();
1383 Map* HeapObject::map() const {
1385 // Clear mark potentially added by PathTracer.
1386 uintptr_t raw_value =
1387 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1388 return MapWord::FromRawValue(raw_value).ToMap();
1390 return map_word().ToMap();
1395 void HeapObject::set_map(Map* value) {
1396 set_map_word(MapWord::FromMap(value));
1397 if (value != NULL) {
1398 // TODO(1600) We are passing NULL as a slot because maps can never be on
1399 // evacuation candidate.
1400 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1405 Map* HeapObject::synchronized_map() {
1406 return synchronized_map_word().ToMap();
1410 void HeapObject::synchronized_set_map(Map* value) {
1411 synchronized_set_map_word(MapWord::FromMap(value));
1412 if (value != NULL) {
1413 // TODO(1600) We are passing NULL as a slot because maps can never be on
1414 // evacuation candidate.
1415 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1420 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1421 synchronized_set_map_word(MapWord::FromMap(value));
1425 // Unsafe accessor omitting write barrier.
1426 void HeapObject::set_map_no_write_barrier(Map* value) {
1427 set_map_word(MapWord::FromMap(value));
1431 MapWord HeapObject::map_word() const {
1433 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1437 void HeapObject::set_map_word(MapWord map_word) {
1438 NOBARRIER_WRITE_FIELD(
1439 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1443 MapWord HeapObject::synchronized_map_word() const {
1445 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1449 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1450 RELEASE_WRITE_FIELD(
1451 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1455 int HeapObject::Size() {
1456 return SizeFromMap(map());
1460 HeapObjectContents HeapObject::ContentType() {
1461 InstanceType type = map()->instance_type();
1462 if (type <= LAST_NAME_TYPE) {
1463 if (type == SYMBOL_TYPE) {
1464 return HeapObjectContents::kTaggedValues;
1466 DCHECK(type < FIRST_NONSTRING_TYPE);
1467 // There are four string representations: sequential strings, external
1468 // strings, cons strings, and sliced strings.
1469 // Only the former two contain raw values and no heap pointers (besides the
1471 if (((type & kIsIndirectStringMask) != kIsIndirectStringTag))
1472 return HeapObjectContents::kRawValues;
1474 return HeapObjectContents::kTaggedValues;
1476 // TODO(jochen): Enable eventually.
1477 } else if (type == JS_FUNCTION_TYPE) {
1478 return HeapObjectContents::kMixedValues;
1480 } else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
1481 type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
1482 return HeapObjectContents::kMixedValues;
1483 } else if (type <= LAST_DATA_TYPE) {
1484 // TODO(jochen): Why do we claim that Code and Map contain only raw values?
1485 return HeapObjectContents::kRawValues;
1487 if (FLAG_unbox_double_fields) {
1488 LayoutDescriptorHelper helper(map());
1489 if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues;
1491 return HeapObjectContents::kTaggedValues;
1496 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1497 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1498 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1502 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1503 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1507 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1508 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1512 double HeapNumber::value() const {
1513 return READ_DOUBLE_FIELD(this, kValueOffset);
1517 void HeapNumber::set_value(double value) {
1518 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1522 int HeapNumber::get_exponent() {
1523 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1524 kExponentShift) - kExponentBias;
1528 int HeapNumber::get_sign() {
1529 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1533 bool Simd128Value::Equals(Simd128Value* that) {
1534 #define SIMD128_VALUE(TYPE, Type, type, lane_count, lane_type) \
1535 if (this->Is##Type()) { \
1536 if (!that->Is##Type()) return false; \
1537 return Type::cast(this)->Equals(Type::cast(that)); \
1539 SIMD128_TYPES(SIMD128_VALUE)
1540 #undef SIMD128_VALUE
1545 #define SIMD128_VALUE_EQUALS(TYPE, Type, type, lane_count, lane_type) \
1546 bool Type::Equals(Type* that) { \
1547 for (int lane = 0; lane < lane_count; ++lane) { \
1548 if (this->get_lane(lane) != that->get_lane(lane)) return false; \
1552 SIMD128_TYPES(SIMD128_VALUE_EQUALS)
1553 #undef SIMD128_VALUE_EQUALS
1556 #if defined(V8_TARGET_LITTLE_ENDIAN)
1557 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1559 READ_##field_type##_FIELD(this, kValueOffset + lane * field_size);
1560 #elif defined(V8_TARGET_BIG_ENDIAN)
1561 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1562 lane_type value = READ_##field_type##_FIELD( \
1563 this, kValueOffset + (lane_count - lane - 1) * field_size);
1565 #error Unknown byte ordering
1568 #if defined(V8_TARGET_LITTLE_ENDIAN)
1569 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1570 WRITE_##field_type##_FIELD(this, kValueOffset + lane * field_size, value);
1571 #elif defined(V8_TARGET_BIG_ENDIAN)
1572 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1573 WRITE_##field_type##_FIELD( \
1574 this, kValueOffset + (lane_count - lane - 1) * field_size, value);
1576 #error Unknown byte ordering
1579 #define SIMD128_NUMERIC_LANE_FNS(type, lane_type, lane_count, field_type, \
1581 lane_type type::get_lane(int lane) const { \
1582 DCHECK(lane < lane_count && lane >= 0); \
1583 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1587 void type::set_lane(int lane, lane_type value) { \
1588 DCHECK(lane < lane_count && lane >= 0); \
1589 SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1592 SIMD128_NUMERIC_LANE_FNS(Float32x4, float, 4, FLOAT, kFloatSize)
1593 SIMD128_NUMERIC_LANE_FNS(Int32x4, int32_t, 4, INT32, kInt32Size)
1594 SIMD128_NUMERIC_LANE_FNS(Uint32x4, uint32_t, 4, UINT32, kInt32Size)
1595 SIMD128_NUMERIC_LANE_FNS(Int16x8, int16_t, 8, INT16, kShortSize)
1596 SIMD128_NUMERIC_LANE_FNS(Uint16x8, uint16_t, 8, UINT16, kShortSize)
1597 SIMD128_NUMERIC_LANE_FNS(Int8x16, int8_t, 16, INT8, kCharSize)
1598 SIMD128_NUMERIC_LANE_FNS(Uint8x16, uint8_t, 16, UINT8, kCharSize)
1599 #undef SIMD128_NUMERIC_LANE_FNS
1602 #define SIMD128_BOOLEAN_LANE_FNS(type, lane_type, lane_count, field_type, \
1604 bool type::get_lane(int lane) const { \
1605 DCHECK(lane < lane_count && lane >= 0); \
1606 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1607 DCHECK(value == 0 || value == -1); \
1608 return value != 0; \
1611 void type::set_lane(int lane, bool value) { \
1612 DCHECK(lane < lane_count && lane >= 0); \
1613 int32_t int_val = value ? -1 : 0; \
1614 SIMD128_WRITE_LANE(lane_count, field_type, field_size, int_val) \
1617 SIMD128_BOOLEAN_LANE_FNS(Bool32x4, int32_t, 4, INT32, kInt32Size)
1618 SIMD128_BOOLEAN_LANE_FNS(Bool16x8, int16_t, 8, INT16, kShortSize)
1619 SIMD128_BOOLEAN_LANE_FNS(Bool8x16, int8_t, 16, INT8, kCharSize)
1620 #undef SIMD128_BOOLEAN_LANE_FNS
1622 #undef SIMD128_READ_LANE
1623 #undef SIMD128_WRITE_LANE
1626 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1629 Object** FixedArray::GetFirstElementAddress() {
1630 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1634 bool FixedArray::ContainsOnlySmisOrHoles() {
1635 Object* the_hole = GetHeap()->the_hole_value();
1636 Object** current = GetFirstElementAddress();
1637 for (int i = 0; i < length(); ++i) {
1638 Object* candidate = *current++;
1639 if (!candidate->IsSmi() && candidate != the_hole) return false;
1645 FixedArrayBase* JSObject::elements() const {
1646 Object* array = READ_FIELD(this, kElementsOffset);
1647 return static_cast<FixedArrayBase*>(array);
1651 void AllocationSite::Initialize() {
1652 set_transition_info(Smi::FromInt(0));
1653 SetElementsKind(GetInitialFastElementsKind());
1654 set_nested_site(Smi::FromInt(0));
1655 set_pretenure_data(Smi::FromInt(0));
1656 set_pretenure_create_count(Smi::FromInt(0));
1657 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1658 SKIP_WRITE_BARRIER);
1662 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1665 bool AllocationSite::IsMaybeTenure() {
1666 return pretenure_decision() == kMaybeTenure;
1670 bool AllocationSite::PretenuringDecisionMade() {
1671 return pretenure_decision() != kUndecided;
1675 void AllocationSite::MarkZombie() {
1676 DCHECK(!IsZombie());
1678 set_pretenure_decision(kZombie);
1682 ElementsKind AllocationSite::GetElementsKind() {
1683 DCHECK(!SitePointsToLiteral());
1684 int value = Smi::cast(transition_info())->value();
1685 return ElementsKindBits::decode(value);
1689 void AllocationSite::SetElementsKind(ElementsKind kind) {
1690 int value = Smi::cast(transition_info())->value();
1691 set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1692 SKIP_WRITE_BARRIER);
1696 bool AllocationSite::CanInlineCall() {
1697 int value = Smi::cast(transition_info())->value();
1698 return DoNotInlineBit::decode(value) == 0;
1702 void AllocationSite::SetDoNotInlineCall() {
1703 int value = Smi::cast(transition_info())->value();
1704 set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1705 SKIP_WRITE_BARRIER);
1709 bool AllocationSite::SitePointsToLiteral() {
1710 // If transition_info is a smi, then it represents an ElementsKind
1711 // for a constructed array. Otherwise, it must be a boilerplate
1712 // for an object or array literal.
1713 return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1717 // Heuristic: We only need to create allocation site info if the boilerplate
1718 // elements kind is the initial elements kind.
1719 AllocationSiteMode AllocationSite::GetMode(
1720 ElementsKind boilerplate_elements_kind) {
1721 if (FLAG_pretenuring_call_new ||
1722 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1723 return TRACK_ALLOCATION_SITE;
1726 return DONT_TRACK_ALLOCATION_SITE;
1730 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1732 if (FLAG_pretenuring_call_new ||
1733 (IsFastSmiElementsKind(from) &&
1734 IsMoreGeneralElementsKindTransition(from, to))) {
1735 return TRACK_ALLOCATION_SITE;
1738 return DONT_TRACK_ALLOCATION_SITE;
1742 inline bool AllocationSite::CanTrack(InstanceType type) {
1743 if (FLAG_allocation_site_pretenuring) {
1744 return type == JS_ARRAY_TYPE ||
1745 type == JS_OBJECT_TYPE ||
1746 type < FIRST_NONSTRING_TYPE;
1748 return type == JS_ARRAY_TYPE;
1752 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1753 int value = pretenure_data()->value();
1754 return PretenureDecisionBits::decode(value);
1758 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1759 int value = pretenure_data()->value();
1761 Smi::FromInt(PretenureDecisionBits::update(value, decision)),
1762 SKIP_WRITE_BARRIER);
1766 bool AllocationSite::deopt_dependent_code() {
1767 int value = pretenure_data()->value();
1768 return DeoptDependentCodeBit::decode(value);
1772 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1773 int value = pretenure_data()->value();
1774 set_pretenure_data(Smi::FromInt(DeoptDependentCodeBit::update(value, deopt)),
1775 SKIP_WRITE_BARRIER);
1779 int AllocationSite::memento_found_count() {
1780 int value = pretenure_data()->value();
1781 return MementoFoundCountBits::decode(value);
1785 inline void AllocationSite::set_memento_found_count(int count) {
1786 int value = pretenure_data()->value();
1787 // Verify that we can count more mementos than we can possibly find in one
1788 // new space collection.
1789 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1790 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1791 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1792 DCHECK(count < MementoFoundCountBits::kMax);
1794 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1795 SKIP_WRITE_BARRIER);
1799 int AllocationSite::memento_create_count() {
1800 return pretenure_create_count()->value();
1804 void AllocationSite::set_memento_create_count(int count) {
1805 set_pretenure_create_count(Smi::FromInt(count), SKIP_WRITE_BARRIER);
1809 inline bool AllocationSite::IncrementMementoFoundCount() {
1810 if (IsZombie()) return false;
1812 int value = memento_found_count();
1813 set_memento_found_count(value + 1);
1814 return memento_found_count() == kPretenureMinimumCreated;
1818 inline void AllocationSite::IncrementMementoCreateCount() {
1819 DCHECK(FLAG_allocation_site_pretenuring);
1820 int value = memento_create_count();
1821 set_memento_create_count(value + 1);
1825 inline bool AllocationSite::MakePretenureDecision(
1826 PretenureDecision current_decision,
1828 bool maximum_size_scavenge) {
1829 // Here we just allow state transitions from undecided or maybe tenure
1830 // to don't tenure, maybe tenure, or tenure.
1831 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1832 if (ratio >= kPretenureRatio) {
1833 // We just transition into tenure state when the semi-space was at
1834 // maximum capacity.
1835 if (maximum_size_scavenge) {
1836 set_deopt_dependent_code(true);
1837 set_pretenure_decision(kTenure);
1838 // Currently we just need to deopt when we make a state transition to
1842 set_pretenure_decision(kMaybeTenure);
1844 set_pretenure_decision(kDontTenure);
1851 inline bool AllocationSite::DigestPretenuringFeedback(
1852 bool maximum_size_scavenge) {
1854 int create_count = memento_create_count();
1855 int found_count = memento_found_count();
1856 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1858 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1859 static_cast<double>(found_count) / create_count : 0.0;
1860 PretenureDecision current_decision = pretenure_decision();
1862 if (minimum_mementos_created) {
1863 deopt = MakePretenureDecision(
1864 current_decision, ratio, maximum_size_scavenge);
1867 if (FLAG_trace_pretenuring_statistics) {
1869 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1870 static_cast<void*>(this), create_count, found_count, ratio,
1871 PretenureDecisionName(current_decision),
1872 PretenureDecisionName(pretenure_decision()));
1875 // Clear feedback calculation fields until the next gc.
1876 set_memento_found_count(0);
1877 set_memento_create_count(0);
1882 bool AllocationMemento::IsValid() {
1883 return allocation_site()->IsAllocationSite() &&
1884 !AllocationSite::cast(allocation_site())->IsZombie();
1888 AllocationSite* AllocationMemento::GetAllocationSite() {
1890 return AllocationSite::cast(allocation_site());
1894 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1895 JSObject::ValidateElements(object);
1896 ElementsKind elements_kind = object->map()->elements_kind();
1897 if (!IsFastObjectElementsKind(elements_kind)) {
1898 if (IsFastHoleyElementsKind(elements_kind)) {
1899 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1901 TransitionElementsKind(object, FAST_ELEMENTS);
1907 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1910 EnsureElementsMode mode) {
1911 ElementsKind current_kind = object->map()->elements_kind();
1912 ElementsKind target_kind = current_kind;
1914 DisallowHeapAllocation no_allocation;
1915 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1916 bool is_holey = IsFastHoleyElementsKind(current_kind);
1917 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1918 Heap* heap = object->GetHeap();
1919 Object* the_hole = heap->the_hole_value();
1920 for (uint32_t i = 0; i < count; ++i) {
1921 Object* current = *objects++;
1922 if (current == the_hole) {
1924 target_kind = GetHoleyElementsKind(target_kind);
1925 } else if (!current->IsSmi()) {
1926 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1927 if (IsFastSmiElementsKind(target_kind)) {
1929 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1931 target_kind = FAST_DOUBLE_ELEMENTS;
1934 } else if (is_holey) {
1935 target_kind = FAST_HOLEY_ELEMENTS;
1938 target_kind = FAST_ELEMENTS;
1943 if (target_kind != current_kind) {
1944 TransitionElementsKind(object, target_kind);
1949 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1950 Handle<FixedArrayBase> elements,
1952 EnsureElementsMode mode) {
1953 Heap* heap = object->GetHeap();
1954 if (elements->map() != heap->fixed_double_array_map()) {
1955 DCHECK(elements->map() == heap->fixed_array_map() ||
1956 elements->map() == heap->fixed_cow_array_map());
1957 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1958 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1961 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1962 EnsureCanContainElements(object, objects, length, mode);
1966 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1967 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1968 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1969 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1970 Handle<FixedDoubleArray> double_array =
1971 Handle<FixedDoubleArray>::cast(elements);
1972 for (uint32_t i = 0; i < length; ++i) {
1973 if (double_array->is_the_hole(i)) {
1974 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1978 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1983 void JSObject::SetMapAndElements(Handle<JSObject> object,
1984 Handle<Map> new_map,
1985 Handle<FixedArrayBase> value) {
1986 JSObject::MigrateToMap(object, new_map);
1987 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1988 (*value == object->GetHeap()->empty_fixed_array())) ==
1989 (value->map() == object->GetHeap()->fixed_array_map() ||
1990 value->map() == object->GetHeap()->fixed_cow_array_map()));
1991 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1992 (object->map()->has_fast_double_elements() ==
1993 value->IsFixedDoubleArray()));
1994 object->set_elements(*value);
1998 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1999 WRITE_FIELD(this, kElementsOffset, value);
2000 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
2004 void JSObject::initialize_properties() {
2005 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2006 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
2010 void JSObject::initialize_elements() {
2011 FixedArrayBase* elements = map()->GetInitialElements();
2012 WRITE_FIELD(this, kElementsOffset, elements);
2016 ACCESSORS(Oddball, to_string, String, kToStringOffset)
2017 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
2018 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
2021 byte Oddball::kind() const {
2022 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
2026 void Oddball::set_kind(byte value) {
2027 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2031 ACCESSORS(Cell, value, Object, kValueOffset)
2032 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2033 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
2034 ACCESSORS(PropertyCell, value, Object, kValueOffset)
2037 PropertyDetails PropertyCell::property_details() {
2038 return PropertyDetails(Smi::cast(property_details_raw()));
2042 void PropertyCell::set_property_details(PropertyDetails details) {
2043 set_property_details_raw(details.AsSmi());
2047 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2050 void WeakCell::clear() {
2051 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
2052 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
2056 void WeakCell::initialize(HeapObject* val) {
2057 WRITE_FIELD(this, kValueOffset, val);
2058 Heap* heap = GetHeap();
2059 // We just have to execute the generational barrier here because we never
2060 // mark through a weak cell and collect evacuation candidates when we process
2062 if (heap->InNewSpace(val)) {
2063 heap->RecordWrite(address(), kValueOffset);
2068 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
2071 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
2074 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2075 WRITE_FIELD(this, kNextOffset, val);
2076 if (mode == UPDATE_WRITE_BARRIER) {
2077 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2082 void WeakCell::clear_next(Heap* heap) {
2083 set_next(heap->the_hole_value(), SKIP_WRITE_BARRIER);
2087 bool WeakCell::next_cleared() { return next()->IsTheHole(); }
2090 int JSObject::GetHeaderSize() {
2091 InstanceType type = map()->instance_type();
2092 // Check for the most common kind of JavaScript object before
2093 // falling into the generic switch. This speeds up the internal
2094 // field operations considerably on average.
2095 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2097 case JS_GENERATOR_OBJECT_TYPE:
2098 return JSGeneratorObject::kSize;
2099 case JS_MODULE_TYPE:
2100 return JSModule::kSize;
2101 case JS_GLOBAL_PROXY_TYPE:
2102 return JSGlobalProxy::kSize;
2103 case JS_GLOBAL_OBJECT_TYPE:
2104 return JSGlobalObject::kSize;
2105 case JS_BUILTINS_OBJECT_TYPE:
2106 return JSBuiltinsObject::kSize;
2107 case JS_FUNCTION_TYPE:
2108 return JSFunction::kSize;
2110 return JSValue::kSize;
2112 return JSDate::kSize;
2114 return JSArray::kSize;
2115 case JS_ARRAY_BUFFER_TYPE:
2116 return JSArrayBuffer::kSize;
2117 case JS_TYPED_ARRAY_TYPE:
2118 return JSTypedArray::kSize;
2119 case JS_DATA_VIEW_TYPE:
2120 return JSDataView::kSize;
2122 return JSSet::kSize;
2124 return JSMap::kSize;
2125 case JS_SET_ITERATOR_TYPE:
2126 return JSSetIterator::kSize;
2127 case JS_MAP_ITERATOR_TYPE:
2128 return JSMapIterator::kSize;
2129 case JS_WEAK_MAP_TYPE:
2130 return JSWeakMap::kSize;
2131 case JS_WEAK_SET_TYPE:
2132 return JSWeakSet::kSize;
2133 case JS_REGEXP_TYPE:
2134 return JSRegExp::kSize;
2135 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2136 return JSObject::kHeaderSize;
2137 case JS_MESSAGE_OBJECT_TYPE:
2138 return JSMessageObject::kSize;
2146 int JSObject::GetInternalFieldCount() {
2147 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2148 // Make sure to adjust for the number of in-object properties. These
2149 // properties do contribute to the size, but are not internal fields.
2150 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2151 map()->GetInObjectProperties();
2155 int JSObject::GetInternalFieldOffset(int index) {
2156 DCHECK(index < GetInternalFieldCount() && index >= 0);
2157 return GetHeaderSize() + (kPointerSize * index);
2161 Object* JSObject::GetInternalField(int index) {
2162 DCHECK(index < GetInternalFieldCount() && index >= 0);
2163 // Internal objects do follow immediately after the header, whereas in-object
2164 // properties are at the end of the object. Therefore there is no need
2165 // to adjust the index here.
2166 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2170 void JSObject::SetInternalField(int index, Object* value) {
2171 DCHECK(index < GetInternalFieldCount() && index >= 0);
2172 // Internal objects do follow immediately after the header, whereas in-object
2173 // properties are at the end of the object. Therefore there is no need
2174 // to adjust the index here.
2175 int offset = GetHeaderSize() + (kPointerSize * index);
2176 WRITE_FIELD(this, offset, value);
2177 WRITE_BARRIER(GetHeap(), this, offset, value);
2181 void JSObject::SetInternalField(int index, Smi* value) {
2182 DCHECK(index < GetInternalFieldCount() && index >= 0);
2183 // Internal objects do follow immediately after the header, whereas in-object
2184 // properties are at the end of the object. Therefore there is no need
2185 // to adjust the index here.
2186 int offset = GetHeaderSize() + (kPointerSize * index);
2187 WRITE_FIELD(this, offset, value);
2191 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2192 if (!FLAG_unbox_double_fields) return false;
2193 return map()->IsUnboxedDoubleField(index);
2197 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2198 if (!FLAG_unbox_double_fields) return false;
2199 if (index.is_hidden_field() || !index.is_inobject()) return false;
2200 return !layout_descriptor()->IsTagged(index.property_index());
2204 // Access fast-case object properties at index. The use of these routines
2205 // is needed to correctly distinguish between properties stored in-object and
2206 // properties stored in the properties array.
2207 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2208 DCHECK(!IsUnboxedDoubleField(index));
2209 if (index.is_inobject()) {
2210 return READ_FIELD(this, index.offset());
2212 return properties()->get(index.outobject_array_index());
2217 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2218 DCHECK(IsUnboxedDoubleField(index));
2219 return READ_DOUBLE_FIELD(this, index.offset());
2223 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2224 if (index.is_inobject()) {
2225 int offset = index.offset();
2226 WRITE_FIELD(this, offset, value);
2227 WRITE_BARRIER(GetHeap(), this, offset, value);
2229 properties()->set(index.outobject_array_index(), value);
2234 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2235 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2239 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2240 if (IsUnboxedDoubleField(index)) {
2241 DCHECK(value->IsMutableHeapNumber());
2242 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2244 RawFastPropertyAtPut(index, value);
2249 void JSObject::WriteToField(int descriptor, Object* value) {
2250 DisallowHeapAllocation no_gc;
2252 DescriptorArray* desc = map()->instance_descriptors();
2253 PropertyDetails details = desc->GetDetails(descriptor);
2255 DCHECK(details.type() == DATA);
2257 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2258 if (details.representation().IsDouble()) {
2259 // Nothing more to be done.
2260 if (value->IsUninitialized()) return;
2261 if (IsUnboxedDoubleField(index)) {
2262 RawFastDoublePropertyAtPut(index, value->Number());
2264 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2265 DCHECK(box->IsMutableHeapNumber());
2266 box->set_value(value->Number());
2269 RawFastPropertyAtPut(index, value);
2274 int JSObject::GetInObjectPropertyOffset(int index) {
2275 return map()->GetInObjectPropertyOffset(index);
2279 Object* JSObject::InObjectPropertyAt(int index) {
2280 int offset = GetInObjectPropertyOffset(index);
2281 return READ_FIELD(this, offset);
2285 Object* JSObject::InObjectPropertyAtPut(int index,
2287 WriteBarrierMode mode) {
2288 // Adjust for the number of properties stored in the object.
2289 int offset = GetInObjectPropertyOffset(index);
2290 WRITE_FIELD(this, offset, value);
2291 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2297 void JSObject::InitializeBody(Map* map,
2298 Object* pre_allocated_value,
2299 Object* filler_value) {
2300 DCHECK(!filler_value->IsHeapObject() ||
2301 !GetHeap()->InNewSpace(filler_value));
2302 DCHECK(!pre_allocated_value->IsHeapObject() ||
2303 !GetHeap()->InNewSpace(pre_allocated_value));
2304 int size = map->instance_size();
2305 int offset = kHeaderSize;
2306 if (filler_value != pre_allocated_value) {
2308 map->GetInObjectProperties() - map->unused_property_fields();
2309 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2310 for (int i = 0; i < pre_allocated; i++) {
2311 WRITE_FIELD(this, offset, pre_allocated_value);
2312 offset += kPointerSize;
2315 while (offset < size) {
2316 WRITE_FIELD(this, offset, filler_value);
2317 offset += kPointerSize;
2322 bool JSObject::HasFastProperties() {
2323 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2324 return !properties()->IsDictionary();
2328 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2329 if (unused_property_fields() != 0) return false;
2330 if (is_prototype_map()) return false;
2331 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2332 int limit = Max(minimum, GetInObjectProperties());
2333 int external = NumberOfFields() - GetInObjectProperties();
2334 return external > limit;
2338 void Struct::InitializeBody(int object_size) {
2339 Object* value = GetHeap()->undefined_value();
2340 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2341 WRITE_FIELD(this, offset, value);
2346 bool Object::ToArrayLength(uint32_t* index) {
2348 int value = Smi::cast(this)->value();
2349 if (value < 0) return false;
2353 if (IsHeapNumber()) {
2354 double value = HeapNumber::cast(this)->value();
2355 uint32_t uint_value = static_cast<uint32_t>(value);
2356 if (value == static_cast<double>(uint_value)) {
2357 *index = uint_value;
2365 bool Object::ToArrayIndex(uint32_t* index) {
2366 return ToArrayLength(index) && *index != kMaxUInt32;
2370 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2371 if (!this->IsJSValue()) return false;
2373 JSValue* js_value = JSValue::cast(this);
2374 if (!js_value->value()->IsString()) return false;
2376 String* str = String::cast(js_value->value());
2377 if (index >= static_cast<uint32_t>(str->length())) return false;
2383 void Object::VerifyApiCallResultType() {
2385 if (!(IsSmi() || IsString() || IsSymbol() || IsSpecObject() ||
2386 IsHeapNumber() || IsSimd128Value() || IsUndefined() || IsTrue() ||
2387 IsFalse() || IsNull())) {
2388 FATAL("API call returned invalid object");
2394 Object* FixedArray::get(int index) const {
2395 SLOW_DCHECK(index >= 0 && index < this->length());
2396 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2400 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2401 return handle(array->get(index), array->GetIsolate());
2405 bool FixedArray::is_the_hole(int index) {
2406 return get(index) == GetHeap()->the_hole_value();
2410 void FixedArray::set(int index, Smi* value) {
2411 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2412 DCHECK(index >= 0 && index < this->length());
2413 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2414 int offset = kHeaderSize + index * kPointerSize;
2415 WRITE_FIELD(this, offset, value);
2419 void FixedArray::set(int index, Object* value) {
2420 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2421 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2422 DCHECK(index >= 0 && index < this->length());
2423 int offset = kHeaderSize + index * kPointerSize;
2424 WRITE_FIELD(this, offset, value);
2425 WRITE_BARRIER(GetHeap(), this, offset, value);
2429 double FixedDoubleArray::get_scalar(int index) {
2430 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2431 map() != GetHeap()->fixed_array_map());
2432 DCHECK(index >= 0 && index < this->length());
2433 DCHECK(!is_the_hole(index));
2434 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2438 uint64_t FixedDoubleArray::get_representation(int index) {
2439 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2440 map() != GetHeap()->fixed_array_map());
2441 DCHECK(index >= 0 && index < this->length());
2442 int offset = kHeaderSize + index * kDoubleSize;
2443 return READ_UINT64_FIELD(this, offset);
2447 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2449 if (array->is_the_hole(index)) {
2450 return array->GetIsolate()->factory()->the_hole_value();
2452 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2457 void FixedDoubleArray::set(int index, double value) {
2458 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2459 map() != GetHeap()->fixed_array_map());
2460 int offset = kHeaderSize + index * kDoubleSize;
2461 if (std::isnan(value)) {
2462 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2464 WRITE_DOUBLE_FIELD(this, offset, value);
2466 DCHECK(!is_the_hole(index));
2470 void FixedDoubleArray::set_the_hole(int index) {
2471 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2472 map() != GetHeap()->fixed_array_map());
2473 int offset = kHeaderSize + index * kDoubleSize;
2474 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2478 bool FixedDoubleArray::is_the_hole(int index) {
2479 return get_representation(index) == kHoleNanInt64;
2483 double* FixedDoubleArray::data_start() {
2484 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2488 void FixedDoubleArray::FillWithHoles(int from, int to) {
2489 for (int i = from; i < to; i++) {
2495 Object* WeakFixedArray::Get(int index) const {
2496 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2497 if (raw->IsSmi()) return raw;
2498 DCHECK(raw->IsWeakCell());
2499 return WeakCell::cast(raw)->value();
2503 bool WeakFixedArray::IsEmptySlot(int index) const {
2504 DCHECK(index < Length());
2505 return Get(index)->IsSmi();
2509 void WeakFixedArray::Clear(int index) {
2510 FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2514 int WeakFixedArray::Length() const {
2515 return FixedArray::cast(this)->length() - kFirstIndex;
2519 int WeakFixedArray::last_used_index() const {
2520 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2524 void WeakFixedArray::set_last_used_index(int index) {
2525 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2530 T* WeakFixedArray::Iterator::Next() {
2531 if (list_ != NULL) {
2532 // Assert that list did not change during iteration.
2533 DCHECK_EQ(last_used_index_, list_->last_used_index());
2534 while (index_ < list_->Length()) {
2535 Object* item = list_->Get(index_++);
2536 if (item != Empty()) return T::cast(item);
2544 int ArrayList::Length() {
2545 if (FixedArray::cast(this)->length() == 0) return 0;
2546 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2550 void ArrayList::SetLength(int length) {
2551 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2555 Object* ArrayList::Get(int index) {
2556 return FixedArray::cast(this)->get(kFirstIndex + index);
2560 Object** ArrayList::Slot(int index) {
2561 return data_start() + kFirstIndex + index;
2565 void ArrayList::Set(int index, Object* obj) {
2566 FixedArray::cast(this)->set(kFirstIndex + index, obj);
2570 void ArrayList::Clear(int index, Object* undefined) {
2571 DCHECK(undefined->IsUndefined());
2572 FixedArray::cast(this)
2573 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2577 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2578 const DisallowHeapAllocation& promise) {
2579 Heap* heap = GetHeap();
2580 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2581 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2582 return UPDATE_WRITE_BARRIER;
2586 AllocationAlignment HeapObject::RequiredAlignment() {
2587 #ifdef V8_HOST_ARCH_32_BIT
2588 if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2589 FixedArrayBase::cast(this)->length() != 0) {
2590 return kDoubleAligned;
2592 if (IsHeapNumber()) return kDoubleUnaligned;
2593 if (IsSimd128Value()) return kSimd128Unaligned;
2594 #endif // V8_HOST_ARCH_32_BIT
2595 return kWordAligned;
2599 void FixedArray::set(int index,
2601 WriteBarrierMode mode) {
2602 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2603 DCHECK(index >= 0 && index < this->length());
2604 int offset = kHeaderSize + index * kPointerSize;
2605 WRITE_FIELD(this, offset, value);
2606 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2610 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2613 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2614 DCHECK(index >= 0 && index < array->length());
2615 int offset = kHeaderSize + index * kPointerSize;
2616 WRITE_FIELD(array, offset, value);
2617 Heap* heap = array->GetHeap();
2618 if (heap->InNewSpace(value)) {
2619 heap->RecordWrite(array->address(), offset);
2624 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2627 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2628 DCHECK(index >= 0 && index < array->length());
2629 DCHECK(!array->GetHeap()->InNewSpace(value));
2630 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2634 void FixedArray::set_undefined(int index) {
2635 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2636 DCHECK(index >= 0 && index < this->length());
2637 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2639 kHeaderSize + index * kPointerSize,
2640 GetHeap()->undefined_value());
2644 void FixedArray::set_null(int index) {
2645 DCHECK(index >= 0 && index < this->length());
2646 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2648 kHeaderSize + index * kPointerSize,
2649 GetHeap()->null_value());
2653 void FixedArray::set_the_hole(int index) {
2654 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2655 DCHECK(index >= 0 && index < this->length());
2656 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2658 kHeaderSize + index * kPointerSize,
2659 GetHeap()->the_hole_value());
2663 void FixedArray::FillWithHoles(int from, int to) {
2664 for (int i = from; i < to; i++) {
2670 Object** FixedArray::data_start() {
2671 return HeapObject::RawField(this, kHeaderSize);
2675 Object** FixedArray::RawFieldOfElementAt(int index) {
2676 return HeapObject::RawField(this, OffsetOfElementAt(index));
2680 bool DescriptorArray::IsEmpty() {
2681 DCHECK(length() >= kFirstIndex ||
2682 this == GetHeap()->empty_descriptor_array());
2683 return length() < kFirstIndex;
2687 int DescriptorArray::number_of_descriptors() {
2688 DCHECK(length() >= kFirstIndex || IsEmpty());
2690 return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2694 int DescriptorArray::number_of_descriptors_storage() {
2696 return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
2700 int DescriptorArray::NumberOfSlackDescriptors() {
2701 return number_of_descriptors_storage() - number_of_descriptors();
2705 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2707 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2711 inline int DescriptorArray::number_of_entries() {
2712 return number_of_descriptors();
2716 bool DescriptorArray::HasEnumCache() {
2717 return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2721 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2722 set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2726 FixedArray* DescriptorArray::GetEnumCache() {
2727 DCHECK(HasEnumCache());
2728 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2729 return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2733 bool DescriptorArray::HasEnumIndicesCache() {
2734 if (IsEmpty()) return false;
2735 Object* object = get(kEnumCacheIndex);
2736 if (object->IsSmi()) return false;
2737 FixedArray* bridge = FixedArray::cast(object);
2738 return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2742 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2743 DCHECK(HasEnumIndicesCache());
2744 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2745 return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2749 Object** DescriptorArray::GetEnumCacheSlot() {
2750 DCHECK(HasEnumCache());
2751 return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2756 // Perform a binary search in a fixed array. Low and high are entry indices. If
2757 // there are three entries in this array it should be called with low=0 and
2759 template <SearchMode search_mode, typename T>
2760 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2761 int* out_insertion_index) {
2762 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2763 uint32_t hash = name->Hash();
2766 DCHECK(low <= high);
2768 while (low != high) {
2769 int mid = (low + high) / 2;
2770 Name* mid_name = array->GetSortedKey(mid);
2771 uint32_t mid_hash = mid_name->Hash();
2773 if (mid_hash >= hash) {
2780 for (; low <= limit; ++low) {
2781 int sort_index = array->GetSortedKeyIndex(low);
2782 Name* entry = array->GetKey(sort_index);
2783 uint32_t current_hash = entry->Hash();
2784 if (current_hash != hash) {
2785 if (out_insertion_index != NULL) {
2786 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2788 return T::kNotFound;
2790 if (entry->Equals(name)) {
2791 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2794 return T::kNotFound;
2798 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2799 return T::kNotFound;
2803 // Perform a linear search in this fixed array. len is the number of entry
2804 // indices that are valid.
2805 template <SearchMode search_mode, typename T>
2806 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2807 int* out_insertion_index) {
2808 uint32_t hash = name->Hash();
2809 if (search_mode == ALL_ENTRIES) {
2810 for (int number = 0; number < len; number++) {
2811 int sorted_index = array->GetSortedKeyIndex(number);
2812 Name* entry = array->GetKey(sorted_index);
2813 uint32_t current_hash = entry->Hash();
2814 if (current_hash > hash) {
2815 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2816 return T::kNotFound;
2818 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2820 if (out_insertion_index != NULL) *out_insertion_index = len;
2821 return T::kNotFound;
2823 DCHECK(len >= valid_entries);
2824 DCHECK_NULL(out_insertion_index); // Not supported here.
2825 for (int number = 0; number < valid_entries; number++) {
2826 Name* entry = array->GetKey(number);
2827 uint32_t current_hash = entry->Hash();
2828 if (current_hash == hash && entry->Equals(name)) return number;
2830 return T::kNotFound;
2835 template <SearchMode search_mode, typename T>
2836 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2837 if (search_mode == VALID_ENTRIES) {
2838 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2840 SLOW_DCHECK(array->IsSortedNoDuplicates());
2843 int nof = array->number_of_entries();
2845 if (out_insertion_index != NULL) *out_insertion_index = 0;
2846 return T::kNotFound;
2849 // Fast case: do linear search for small arrays.
2850 const int kMaxElementsForLinearSearch = 8;
2851 if ((search_mode == ALL_ENTRIES &&
2852 nof <= kMaxElementsForLinearSearch) ||
2853 (search_mode == VALID_ENTRIES &&
2854 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2855 return LinearSearch<search_mode>(array, name, nof, valid_entries,
2856 out_insertion_index);
2859 // Slow case: perform binary search.
2860 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2861 out_insertion_index);
2865 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2866 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2870 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2871 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2872 if (number_of_own_descriptors == 0) return kNotFound;
2874 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2875 int number = cache->Lookup(map, name);
2877 if (number == DescriptorLookupCache::kAbsent) {
2878 number = Search(name, number_of_own_descriptors);
2879 cache->Update(map, name, number);
2886 PropertyDetails Map::GetLastDescriptorDetails() {
2887 return instance_descriptors()->GetDetails(LastAdded());
2891 int Map::LastAdded() {
2892 int number_of_own_descriptors = NumberOfOwnDescriptors();
2893 DCHECK(number_of_own_descriptors > 0);
2894 return number_of_own_descriptors - 1;
2898 int Map::NumberOfOwnDescriptors() {
2899 return NumberOfOwnDescriptorsBits::decode(bit_field3());
2903 void Map::SetNumberOfOwnDescriptors(int number) {
2904 DCHECK(number <= instance_descriptors()->number_of_descriptors());
2905 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2909 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2912 void Map::SetEnumLength(int length) {
2913 if (length != kInvalidEnumCacheSentinel) {
2914 DCHECK(length >= 0);
2915 DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2916 DCHECK(length <= NumberOfOwnDescriptors());
2918 set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2922 FixedArrayBase* Map::GetInitialElements() {
2923 if (has_fast_smi_or_object_elements() ||
2924 has_fast_double_elements()) {
2925 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2926 return GetHeap()->empty_fixed_array();
2927 } else if (has_fixed_typed_array_elements()) {
2928 FixedTypedArrayBase* empty_array =
2929 GetHeap()->EmptyFixedTypedArrayForMap(this);
2930 DCHECK(!GetHeap()->InNewSpace(empty_array));
2939 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2940 DCHECK(descriptor_number < number_of_descriptors());
2941 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2945 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2946 return GetKeySlot(descriptor_number);
2950 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2951 return GetValueSlot(descriptor_number - 1) + 1;
2955 Name* DescriptorArray::GetKey(int descriptor_number) {
2956 DCHECK(descriptor_number < number_of_descriptors());
2957 return Name::cast(get(ToKeyIndex(descriptor_number)));
2961 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2962 return GetDetails(descriptor_number).pointer();
2966 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2967 return GetKey(GetSortedKeyIndex(descriptor_number));
2971 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2972 PropertyDetails details = GetDetails(descriptor_index);
2973 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2977 void DescriptorArray::SetRepresentation(int descriptor_index,
2978 Representation representation) {
2979 DCHECK(!representation.IsNone());
2980 PropertyDetails details = GetDetails(descriptor_index);
2981 set(ToDetailsIndex(descriptor_index),
2982 details.CopyWithRepresentation(representation).AsSmi());
2986 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2987 DCHECK(descriptor_number < number_of_descriptors());
2988 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2992 int DescriptorArray::GetValueOffset(int descriptor_number) {
2993 return OffsetOfElementAt(ToValueIndex(descriptor_number));
2997 Object* DescriptorArray::GetValue(int descriptor_number) {
2998 DCHECK(descriptor_number < number_of_descriptors());
2999 return get(ToValueIndex(descriptor_number));
3003 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3004 set(ToValueIndex(descriptor_index), value);
3008 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3009 DCHECK(descriptor_number < number_of_descriptors());
3010 Object* details = get(ToDetailsIndex(descriptor_number));
3011 return PropertyDetails(Smi::cast(details));
3015 PropertyType DescriptorArray::GetType(int descriptor_number) {
3016 return GetDetails(descriptor_number).type();
3020 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3021 DCHECK(GetDetails(descriptor_number).location() == kField);
3022 return GetDetails(descriptor_number).field_index();
3026 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3027 DCHECK(GetDetails(descriptor_number).location() == kField);
3028 Object* value = GetValue(descriptor_number);
3029 if (value->IsWeakCell()) {
3030 if (WeakCell::cast(value)->cleared()) return HeapType::None();
3031 value = WeakCell::cast(value)->value();
3033 return HeapType::cast(value);
3037 Object* DescriptorArray::GetConstant(int descriptor_number) {
3038 return GetValue(descriptor_number);
3042 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3043 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3044 return GetValue(descriptor_number);
3048 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3049 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3050 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3051 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3055 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3056 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3057 handle(GetValue(descriptor_number), GetIsolate()),
3058 GetDetails(descriptor_number));
3062 void DescriptorArray::Set(int descriptor_number,
3064 const WhitenessWitness&) {
3066 DCHECK(descriptor_number < number_of_descriptors());
3068 NoIncrementalWriteBarrierSet(this,
3069 ToKeyIndex(descriptor_number),
3071 NoIncrementalWriteBarrierSet(this,
3072 ToValueIndex(descriptor_number),
3074 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3075 desc->GetDetails().AsSmi());
3079 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3081 DCHECK(descriptor_number < number_of_descriptors());
3083 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3084 set(ToValueIndex(descriptor_number), *desc->GetValue());
3085 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3089 void DescriptorArray::Append(Descriptor* desc) {
3090 DisallowHeapAllocation no_gc;
3091 int descriptor_number = number_of_descriptors();
3092 SetNumberOfDescriptors(descriptor_number + 1);
3093 Set(descriptor_number, desc);
3095 uint32_t hash = desc->GetKey()->Hash();
3099 for (insertion = descriptor_number; insertion > 0; --insertion) {
3100 Name* key = GetSortedKey(insertion - 1);
3101 if (key->Hash() <= hash) break;
3102 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3105 SetSortedKey(insertion, descriptor_number);
3109 void DescriptorArray::SwapSortedKeys(int first, int second) {
3110 int first_key = GetSortedKeyIndex(first);
3111 SetSortedKey(first, GetSortedKeyIndex(second));
3112 SetSortedKey(second, first_key);
3116 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3117 : marking_(array->GetHeap()->incremental_marking()) {
3118 marking_->EnterNoMarkingScope();
3119 DCHECK(!marking_->IsMarking() ||
3120 Marking::Color(array) == Marking::WHITE_OBJECT);
3124 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3125 marking_->LeaveNoMarkingScope();
3129 PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
3132 Object* DescriptorArray::Entry::GetCallbackObject() {
3133 return descs_->GetValue(index_);
3137 int HashTableBase::NumberOfElements() {
3138 return Smi::cast(get(kNumberOfElementsIndex))->value();
3142 int HashTableBase::NumberOfDeletedElements() {
3143 return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3147 int HashTableBase::Capacity() {
3148 return Smi::cast(get(kCapacityIndex))->value();
3152 void HashTableBase::ElementAdded() {
3153 SetNumberOfElements(NumberOfElements() + 1);
3157 void HashTableBase::ElementRemoved() {
3158 SetNumberOfElements(NumberOfElements() - 1);
3159 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3163 void HashTableBase::ElementsRemoved(int n) {
3164 SetNumberOfElements(NumberOfElements() - n);
3165 SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3170 int HashTableBase::ComputeCapacity(int at_least_space_for) {
3171 const int kMinCapacity = 4;
3172 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3173 return Max(capacity, kMinCapacity);
3177 bool HashTableBase::IsKey(Object* k) {
3178 return !k->IsTheHole() && !k->IsUndefined();
3182 void HashTableBase::SetNumberOfElements(int nof) {
3183 set(kNumberOfElementsIndex, Smi::FromInt(nof));
3187 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3188 set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3192 template <typename Derived, typename Shape, typename Key>
3193 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3194 return FindEntry(GetIsolate(), key);
3198 template<typename Derived, typename Shape, typename Key>
3199 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3200 return FindEntry(isolate, key, HashTable::Hash(key));
3204 // Find entry for key otherwise return kNotFound.
3205 template <typename Derived, typename Shape, typename Key>
3206 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3208 uint32_t capacity = Capacity();
3209 uint32_t entry = FirstProbe(hash, capacity);
3211 // EnsureCapacity will guarantee the hash table is never full.
3213 Object* element = KeyAt(entry);
3214 // Empty entry. Uses raw unchecked accessors because it is called by the
3215 // string table during bootstrapping.
3216 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3217 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3218 Shape::IsMatch(key, element)) return entry;
3219 entry = NextProbe(entry, count++, capacity);
3225 bool SeededNumberDictionary::requires_slow_elements() {
3226 Object* max_index_object = get(kMaxNumberKeyIndex);
3227 if (!max_index_object->IsSmi()) return false;
3229 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3233 uint32_t SeededNumberDictionary::max_number_key() {
3234 DCHECK(!requires_slow_elements());
3235 Object* max_index_object = get(kMaxNumberKeyIndex);
3236 if (!max_index_object->IsSmi()) return 0;
3237 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3238 return value >> kRequiresSlowElementsTagSize;
3242 void SeededNumberDictionary::set_requires_slow_elements() {
3243 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3247 // ------------------------------------
3251 CAST_ACCESSOR(AccessorInfo)
3252 CAST_ACCESSOR(ArrayList)
3253 CAST_ACCESSOR(Bool16x8)
3254 CAST_ACCESSOR(Bool32x4)
3255 CAST_ACCESSOR(Bool8x16)
3256 CAST_ACCESSOR(ByteArray)
3257 CAST_ACCESSOR(BytecodeArray)
3260 CAST_ACCESSOR(CodeCacheHashTable)
3261 CAST_ACCESSOR(CompilationCacheTable)
3262 CAST_ACCESSOR(ConsString)
3263 CAST_ACCESSOR(DeoptimizationInputData)
3264 CAST_ACCESSOR(DeoptimizationOutputData)
3265 CAST_ACCESSOR(DependentCode)
3266 CAST_ACCESSOR(DescriptorArray)
3267 CAST_ACCESSOR(ExternalOneByteString)
3268 CAST_ACCESSOR(ExternalString)
3269 CAST_ACCESSOR(ExternalTwoByteString)
3270 CAST_ACCESSOR(FixedArray)
3271 CAST_ACCESSOR(FixedArrayBase)
3272 CAST_ACCESSOR(FixedDoubleArray)
3273 CAST_ACCESSOR(FixedTypedArrayBase)
3274 CAST_ACCESSOR(Float32x4)
3275 CAST_ACCESSOR(Foreign)
3276 CAST_ACCESSOR(GlobalDictionary)
3277 CAST_ACCESSOR(GlobalObject)
3278 CAST_ACCESSOR(HandlerTable)
3279 CAST_ACCESSOR(HeapObject)
3280 CAST_ACCESSOR(Int16x8)
3281 CAST_ACCESSOR(Int32x4)
3282 CAST_ACCESSOR(Int8x16)
3283 CAST_ACCESSOR(JSArray)
3284 CAST_ACCESSOR(JSArrayBuffer)
3285 CAST_ACCESSOR(JSArrayBufferView)
3286 CAST_ACCESSOR(JSBuiltinsObject)
3287 CAST_ACCESSOR(JSDataView)
3288 CAST_ACCESSOR(JSDate)
3289 CAST_ACCESSOR(JSFunction)
3290 CAST_ACCESSOR(JSFunctionProxy)
3291 CAST_ACCESSOR(JSGeneratorObject)
3292 CAST_ACCESSOR(JSGlobalObject)
3293 CAST_ACCESSOR(JSGlobalProxy)
3294 CAST_ACCESSOR(JSMap)
3295 CAST_ACCESSOR(JSMapIterator)
3296 CAST_ACCESSOR(JSMessageObject)
3297 CAST_ACCESSOR(JSModule)
3298 CAST_ACCESSOR(JSObject)
3299 CAST_ACCESSOR(JSProxy)
3300 CAST_ACCESSOR(JSReceiver)
3301 CAST_ACCESSOR(JSRegExp)
3302 CAST_ACCESSOR(JSSet)
3303 CAST_ACCESSOR(JSSetIterator)
3304 CAST_ACCESSOR(JSTypedArray)
3305 CAST_ACCESSOR(JSValue)
3306 CAST_ACCESSOR(JSWeakMap)
3307 CAST_ACCESSOR(JSWeakSet)
3308 CAST_ACCESSOR(LayoutDescriptor)
3311 CAST_ACCESSOR(NameDictionary)
3312 CAST_ACCESSOR(NormalizedMapCache)
3313 CAST_ACCESSOR(Object)
3314 CAST_ACCESSOR(ObjectHashTable)
3315 CAST_ACCESSOR(Oddball)
3316 CAST_ACCESSOR(OrderedHashMap)
3317 CAST_ACCESSOR(OrderedHashSet)
3318 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3319 CAST_ACCESSOR(PropertyCell)
3320 CAST_ACCESSOR(ScopeInfo)
3321 CAST_ACCESSOR(SeededNumberDictionary)
3322 CAST_ACCESSOR(SeqOneByteString)
3323 CAST_ACCESSOR(SeqString)
3324 CAST_ACCESSOR(SeqTwoByteString)
3325 CAST_ACCESSOR(SharedFunctionInfo)
3326 CAST_ACCESSOR(Simd128Value)
3327 CAST_ACCESSOR(SlicedString)
3329 CAST_ACCESSOR(String)
3330 CAST_ACCESSOR(StringTable)
3331 CAST_ACCESSOR(Struct)
3332 CAST_ACCESSOR(Symbol)
3333 CAST_ACCESSOR(Uint16x8)
3334 CAST_ACCESSOR(Uint32x4)
3335 CAST_ACCESSOR(Uint8x16)
3336 CAST_ACCESSOR(UnseededNumberDictionary)
3337 CAST_ACCESSOR(WeakCell)
3338 CAST_ACCESSOR(WeakFixedArray)
3339 CAST_ACCESSOR(WeakHashTable)
3343 template <class Traits>
3344 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3345 FixedTypedArray<Traits>::kInstanceType;
3348 template <class Traits>
3349 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3350 SLOW_DCHECK(object->IsHeapObject() &&
3351 HeapObject::cast(object)->map()->instance_type() ==
3352 Traits::kInstanceType);
3353 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3357 template <class Traits>
3358 const FixedTypedArray<Traits>*
3359 FixedTypedArray<Traits>::cast(const Object* object) {
3360 SLOW_DCHECK(object->IsHeapObject() &&
3361 HeapObject::cast(object)->map()->instance_type() ==
3362 Traits::kInstanceType);
3363 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3367 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
3368 type* DeoptimizationInputData::name() { \
3369 return type::cast(get(k##name##Index)); \
3371 void DeoptimizationInputData::Set##name(type* value) { \
3372 set(k##name##Index, value); \
3375 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3376 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3377 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3378 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3379 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3380 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3381 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3382 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3384 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3387 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
3388 type* DeoptimizationInputData::name(int i) { \
3389 return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3391 void DeoptimizationInputData::Set##name(int i, type* value) { \
3392 set(IndexForEntry(i) + k##name##Offset, value); \
3395 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3396 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3397 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3398 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3400 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3403 BailoutId DeoptimizationInputData::AstId(int i) {
3404 return BailoutId(AstIdRaw(i)->value());
3408 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3409 SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3413 int DeoptimizationInputData::DeoptCount() {
3414 return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3418 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3421 BailoutId DeoptimizationOutputData::AstId(int index) {
3422 return BailoutId(Smi::cast(get(index * 2))->value());
3426 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3427 set(index * 2, Smi::FromInt(id.ToInt()));
3431 Smi* DeoptimizationOutputData::PcAndState(int index) {
3432 return Smi::cast(get(1 + index * 2));
3436 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3437 set(1 + index * 2, offset);
3441 void HandlerTable::SetRangeStart(int index, int value) {
3442 set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3446 void HandlerTable::SetRangeEnd(int index, int value) {
3447 set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3451 void HandlerTable::SetRangeHandler(int index, int offset,
3452 CatchPrediction prediction) {
3453 int value = HandlerOffsetField::encode(offset) |
3454 HandlerPredictionField::encode(prediction);
3455 set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3459 void HandlerTable::SetRangeDepth(int index, int value) {
3460 set(index * kRangeEntrySize + kRangeDepthIndex, Smi::FromInt(value));
3464 void HandlerTable::SetReturnOffset(int index, int value) {
3465 set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3469 void HandlerTable::SetReturnHandler(int index, int offset,
3470 CatchPrediction prediction) {
3471 int value = HandlerOffsetField::encode(offset) |
3472 HandlerPredictionField::encode(prediction);
3473 set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3477 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3478 STRUCT_LIST(MAKE_STRUCT_CAST)
3479 #undef MAKE_STRUCT_CAST
3482 template <typename Derived, typename Shape, typename Key>
3483 HashTable<Derived, Shape, Key>*
3484 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3485 SLOW_DCHECK(obj->IsHashTable());
3486 return reinterpret_cast<HashTable*>(obj);
3490 template <typename Derived, typename Shape, typename Key>
3491 const HashTable<Derived, Shape, Key>*
3492 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3493 SLOW_DCHECK(obj->IsHashTable());
3494 return reinterpret_cast<const HashTable*>(obj);
3498 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3499 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3501 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3502 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3504 SMI_ACCESSORS(String, length, kLengthOffset)
3505 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3508 int FreeSpace::Size() { return size(); }
3511 FreeSpace* FreeSpace::next() {
3512 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3513 (!GetHeap()->deserialization_complete() && map() == NULL));
3514 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3515 return reinterpret_cast<FreeSpace*>(
3516 Memory::Address_at(address() + kNextOffset));
3520 FreeSpace** FreeSpace::next_address() {
3521 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3522 (!GetHeap()->deserialization_complete() && map() == NULL));
3523 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3524 return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
3528 void FreeSpace::set_next(FreeSpace* next) {
3529 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3530 (!GetHeap()->deserialization_complete() && map() == NULL));
3531 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3532 base::NoBarrier_Store(
3533 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3534 reinterpret_cast<base::AtomicWord>(next));
3538 FreeSpace* FreeSpace::cast(HeapObject* o) {
3539 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3540 return reinterpret_cast<FreeSpace*>(o);
3544 uint32_t Name::hash_field() {
3545 return READ_UINT32_FIELD(this, kHashFieldOffset);
3549 void Name::set_hash_field(uint32_t value) {
3550 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3551 #if V8_HOST_ARCH_64_BIT
3552 #if V8_TARGET_LITTLE_ENDIAN
3553 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3555 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3561 bool Name::Equals(Name* other) {
3562 if (other == this) return true;
3563 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3564 this->IsSymbol() || other->IsSymbol()) {
3567 return String::cast(this)->SlowEquals(String::cast(other));
3571 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3572 if (one.is_identical_to(two)) return true;
3573 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3574 one->IsSymbol() || two->IsSymbol()) {
3577 return String::SlowEquals(Handle<String>::cast(one),
3578 Handle<String>::cast(two));
3582 ACCESSORS(Symbol, name, Object, kNameOffset)
3583 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3584 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3587 bool String::Equals(String* other) {
3588 if (other == this) return true;
3589 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3592 return SlowEquals(other);
3596 bool String::Equals(Handle<String> one, Handle<String> two) {
3597 if (one.is_identical_to(two)) return true;
3598 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3601 return SlowEquals(one, two);
3605 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3606 if (!string->IsConsString()) return string;
3607 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3608 if (cons->IsFlat()) return handle(cons->first());
3609 return SlowFlatten(cons, pretenure);
3613 Handle<Name> Name::Flatten(Handle<Name> name, PretenureFlag pretenure) {
3614 if (name->IsSymbol()) return name;
3615 return String::Flatten(Handle<String>::cast(name));
3619 uint16_t String::Get(int index) {
3620 DCHECK(index >= 0 && index < length());
3621 switch (StringShape(this).full_representation_tag()) {
3622 case kSeqStringTag | kOneByteStringTag:
3623 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3624 case kSeqStringTag | kTwoByteStringTag:
3625 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3626 case kConsStringTag | kOneByteStringTag:
3627 case kConsStringTag | kTwoByteStringTag:
3628 return ConsString::cast(this)->ConsStringGet(index);
3629 case kExternalStringTag | kOneByteStringTag:
3630 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3631 case kExternalStringTag | kTwoByteStringTag:
3632 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3633 case kSlicedStringTag | kOneByteStringTag:
3634 case kSlicedStringTag | kTwoByteStringTag:
3635 return SlicedString::cast(this)->SlicedStringGet(index);
3645 void String::Set(int index, uint16_t value) {
3646 DCHECK(index >= 0 && index < length());
3647 DCHECK(StringShape(this).IsSequential());
3649 return this->IsOneByteRepresentation()
3650 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3651 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3655 bool String::IsFlat() {
3656 if (!StringShape(this).IsCons()) return true;
3657 return ConsString::cast(this)->second()->length() == 0;
3661 String* String::GetUnderlying() {
3662 // Giving direct access to underlying string only makes sense if the
3663 // wrapping string is already flattened.
3664 DCHECK(this->IsFlat());
3665 DCHECK(StringShape(this).IsIndirect());
3666 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3667 const int kUnderlyingOffset = SlicedString::kParentOffset;
3668 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3672 template<class Visitor>
3673 ConsString* String::VisitFlat(Visitor* visitor,
3676 int slice_offset = offset;
3677 const int length = string->length();
3678 DCHECK(offset <= length);
3680 int32_t type = string->map()->instance_type();
3681 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3682 case kSeqStringTag | kOneByteStringTag:
3683 visitor->VisitOneByteString(
3684 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3688 case kSeqStringTag | kTwoByteStringTag:
3689 visitor->VisitTwoByteString(
3690 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3694 case kExternalStringTag | kOneByteStringTag:
3695 visitor->VisitOneByteString(
3696 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3700 case kExternalStringTag | kTwoByteStringTag:
3701 visitor->VisitTwoByteString(
3702 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3706 case kSlicedStringTag | kOneByteStringTag:
3707 case kSlicedStringTag | kTwoByteStringTag: {
3708 SlicedString* slicedString = SlicedString::cast(string);
3709 slice_offset += slicedString->offset();
3710 string = slicedString->parent();
3714 case kConsStringTag | kOneByteStringTag:
3715 case kConsStringTag | kTwoByteStringTag:
3716 return ConsString::cast(string);
3727 inline Vector<const uint8_t> String::GetCharVector() {
3728 String::FlatContent flat = GetFlatContent();
3729 DCHECK(flat.IsOneByte());
3730 return flat.ToOneByteVector();
3735 inline Vector<const uc16> String::GetCharVector() {
3736 String::FlatContent flat = GetFlatContent();
3737 DCHECK(flat.IsTwoByte());
3738 return flat.ToUC16Vector();
3742 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3743 DCHECK(index >= 0 && index < length());
3744 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3748 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3749 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3750 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3751 static_cast<byte>(value));
3755 Address SeqOneByteString::GetCharsAddress() {
3756 return FIELD_ADDR(this, kHeaderSize);
3760 uint8_t* SeqOneByteString::GetChars() {
3761 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3765 Address SeqTwoByteString::GetCharsAddress() {
3766 return FIELD_ADDR(this, kHeaderSize);
3770 uc16* SeqTwoByteString::GetChars() {
3771 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3775 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3776 DCHECK(index >= 0 && index < length());
3777 return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3781 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3782 DCHECK(index >= 0 && index < length());
3783 WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3787 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3788 return SizeFor(length());
3792 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3793 return SizeFor(length());
3797 String* SlicedString::parent() {
3798 return String::cast(READ_FIELD(this, kParentOffset));
3802 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3803 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3804 WRITE_FIELD(this, kParentOffset, parent);
3805 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3809 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3812 String* ConsString::first() {
3813 return String::cast(READ_FIELD(this, kFirstOffset));
3817 Object* ConsString::unchecked_first() {
3818 return READ_FIELD(this, kFirstOffset);
3822 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3823 WRITE_FIELD(this, kFirstOffset, value);
3824 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3828 String* ConsString::second() {
3829 return String::cast(READ_FIELD(this, kSecondOffset));
3833 Object* ConsString::unchecked_second() {
3834 return READ_FIELD(this, kSecondOffset);
3838 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3839 WRITE_FIELD(this, kSecondOffset, value);
3840 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3844 bool ExternalString::is_short() {
3845 InstanceType type = map()->instance_type();
3846 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3850 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3851 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3855 void ExternalOneByteString::update_data_cache() {
3856 if (is_short()) return;
3857 const char** data_field =
3858 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3859 *data_field = resource()->data();
3863 void ExternalOneByteString::set_resource(
3864 const ExternalOneByteString::Resource* resource) {
3865 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3866 *reinterpret_cast<const Resource**>(
3867 FIELD_ADDR(this, kResourceOffset)) = resource;
3868 if (resource != NULL) update_data_cache();
3872 const uint8_t* ExternalOneByteString::GetChars() {
3873 return reinterpret_cast<const uint8_t*>(resource()->data());
3877 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3878 DCHECK(index >= 0 && index < length());
3879 return GetChars()[index];
3883 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3884 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3888 void ExternalTwoByteString::update_data_cache() {
3889 if (is_short()) return;
3890 const uint16_t** data_field =
3891 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3892 *data_field = resource()->data();
3896 void ExternalTwoByteString::set_resource(
3897 const ExternalTwoByteString::Resource* resource) {
3898 *reinterpret_cast<const Resource**>(
3899 FIELD_ADDR(this, kResourceOffset)) = resource;
3900 if (resource != NULL) update_data_cache();
3904 const uint16_t* ExternalTwoByteString::GetChars() {
3905 return resource()->data();
3909 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3910 DCHECK(index >= 0 && index < length());
3911 return GetChars()[index];
3915 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3917 return GetChars() + start;
3921 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3924 void ConsStringIterator::PushLeft(ConsString* string) {
3925 frames_[depth_++ & kDepthMask] = string;
3929 void ConsStringIterator::PushRight(ConsString* string) {
3931 frames_[(depth_-1) & kDepthMask] = string;
3935 void ConsStringIterator::AdjustMaximumDepth() {
3936 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3940 void ConsStringIterator::Pop() {
3942 DCHECK(depth_ <= maximum_depth_);
3947 uint16_t StringCharacterStream::GetNext() {
3948 DCHECK(buffer8_ != NULL && end_ != NULL);
3949 // Advance cursor if needed.
3950 if (buffer8_ == end_) HasMore();
3951 DCHECK(buffer8_ < end_);
3952 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3956 StringCharacterStream::StringCharacterStream(String* string, int offset)
3957 : is_one_byte_(false) {
3958 Reset(string, offset);
3962 void StringCharacterStream::Reset(String* string, int offset) {
3965 ConsString* cons_string = String::VisitFlat(this, string, offset);
3966 iter_.Reset(cons_string, offset);
3967 if (cons_string != NULL) {
3968 string = iter_.Next(&offset);
3969 if (string != NULL) String::VisitFlat(this, string, offset);
3974 bool StringCharacterStream::HasMore() {
3975 if (buffer8_ != end_) return true;
3977 String* string = iter_.Next(&offset);
3978 DCHECK_EQ(offset, 0);
3979 if (string == NULL) return false;
3980 String::VisitFlat(this, string);
3981 DCHECK(buffer8_ != end_);
3986 void StringCharacterStream::VisitOneByteString(
3987 const uint8_t* chars, int length) {
3988 is_one_byte_ = true;
3990 end_ = chars + length;
3994 void StringCharacterStream::VisitTwoByteString(
3995 const uint16_t* chars, int length) {
3996 is_one_byte_ = false;
3998 end_ = reinterpret_cast<const uint8_t*>(chars + length);
4002 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
4005 byte ByteArray::get(int index) {
4006 DCHECK(index >= 0 && index < this->length());
4007 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4011 void ByteArray::set(int index, byte value) {
4012 DCHECK(index >= 0 && index < this->length());
4013 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4017 int ByteArray::get_int(int index) {
4018 DCHECK(index >= 0 && (index * kIntSize) < this->length());
4019 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
4023 ByteArray* ByteArray::FromDataStartAddress(Address address) {
4024 DCHECK_TAG_ALIGNED(address);
4025 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
4029 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
4032 Address ByteArray::GetDataStartAddress() {
4033 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4037 byte BytecodeArray::get(int index) {
4038 DCHECK(index >= 0 && index < this->length());
4039 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4043 void BytecodeArray::set(int index, byte value) {
4044 DCHECK(index >= 0 && index < this->length());
4045 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4049 void BytecodeArray::set_frame_size(int frame_size) {
4050 DCHECK_GE(frame_size, 0);
4051 DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4052 WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4056 int BytecodeArray::frame_size() const {
4057 return READ_INT_FIELD(this, kFrameSizeOffset);
4061 Address BytecodeArray::GetFirstBytecodeAddress() {
4062 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4066 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4069 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4072 void* FixedTypedArrayBase::external_pointer() const {
4073 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4074 return reinterpret_cast<void*>(ptr);
4078 void FixedTypedArrayBase::set_external_pointer(void* value,
4079 WriteBarrierMode mode) {
4080 intptr_t ptr = reinterpret_cast<intptr_t>(value);
4081 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4085 void* FixedTypedArrayBase::DataPtr() {
4086 return reinterpret_cast<void*>(
4087 reinterpret_cast<intptr_t>(base_pointer()) +
4088 reinterpret_cast<intptr_t>(external_pointer()));
4092 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4095 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4096 case FIXED_##TYPE##_ARRAY_TYPE: \
4097 element_size = size; \
4100 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4101 #undef TYPED_ARRAY_CASE
4106 return element_size;
4110 int FixedTypedArrayBase::DataSize(InstanceType type) {
4111 if (base_pointer() == Smi::FromInt(0)) return 0;
4112 return length() * ElementSize(type);
4116 int FixedTypedArrayBase::DataSize() {
4117 return DataSize(map()->instance_type());
4121 int FixedTypedArrayBase::size() {
4122 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4126 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4127 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4131 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4132 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4136 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4139 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4142 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4145 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4148 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4151 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4154 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4157 float Float32ArrayTraits::defaultValue() {
4158 return std::numeric_limits<float>::quiet_NaN();
4162 double Float64ArrayTraits::defaultValue() {
4163 return std::numeric_limits<double>::quiet_NaN();
4167 template <class Traits>
4168 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4169 DCHECK((index >= 0) && (index < this->length()));
4170 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4175 template <class Traits>
4176 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4177 DCHECK((index >= 0) && (index < this->length()));
4178 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4183 template <class Traits>
4184 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4185 return static_cast<ElementType>(value);
4190 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4191 if (value < 0) return 0;
4192 if (value > 0xFF) return 0xFF;
4193 return static_cast<uint8_t>(value);
4197 template <class Traits>
4198 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4200 return static_cast<ElementType>(DoubleToInt32(value));
4205 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4206 // Handle NaNs and less than zero values which clamp to zero.
4207 if (!(value > 0)) return 0;
4208 if (value > 0xFF) return 0xFF;
4209 return static_cast<uint8_t>(lrint(value));
4214 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4215 return static_cast<float>(value);
4220 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4225 template <class Traits>
4226 Handle<Object> FixedTypedArray<Traits>::get(
4227 Handle<FixedTypedArray<Traits> > array,
4229 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4233 template <class Traits>
4234 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4235 ElementType cast_value = Traits::defaultValue();
4236 if (value->IsSmi()) {
4237 int int_value = Smi::cast(value)->value();
4238 cast_value = from_int(int_value);
4239 } else if (value->IsHeapNumber()) {
4240 double double_value = HeapNumber::cast(value)->value();
4241 cast_value = from_double(double_value);
4243 // Clamp undefined to the default value. All other types have been
4244 // converted to a number type further up in the call chain.
4245 DCHECK(value->IsUndefined());
4247 set(index, cast_value);
4251 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4252 return handle(Smi::FromInt(scalar), isolate);
4256 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4258 return handle(Smi::FromInt(scalar), isolate);
4262 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4263 return handle(Smi::FromInt(scalar), isolate);
4267 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4268 return handle(Smi::FromInt(scalar), isolate);
4272 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4273 return handle(Smi::FromInt(scalar), isolate);
4277 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4278 return isolate->factory()->NewNumberFromUint(scalar);
4282 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4283 return isolate->factory()->NewNumberFromInt(scalar);
4287 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4288 return isolate->factory()->NewNumber(scalar);
4292 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4293 return isolate->factory()->NewNumber(scalar);
4297 int Map::visitor_id() {
4298 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4302 void Map::set_visitor_id(int id) {
4303 DCHECK(0 <= id && id < 256);
4304 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4308 int Map::instance_size() {
4309 return NOBARRIER_READ_BYTE_FIELD(
4310 this, kInstanceSizeOffset) << kPointerSizeLog2;
4314 int Map::inobject_properties_or_constructor_function_index() {
4315 return READ_BYTE_FIELD(this,
4316 kInObjectPropertiesOrConstructorFunctionIndexOffset);
4320 void Map::set_inobject_properties_or_constructor_function_index(int value) {
4321 DCHECK(0 <= value && value < 256);
4322 WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4323 static_cast<byte>(value));
4327 int Map::GetInObjectProperties() {
4328 DCHECK(IsJSObjectMap());
4329 return inobject_properties_or_constructor_function_index();
4333 void Map::SetInObjectProperties(int value) {
4334 DCHECK(IsJSObjectMap());
4335 set_inobject_properties_or_constructor_function_index(value);
4339 int Map::GetConstructorFunctionIndex() {
4340 DCHECK(IsPrimitiveMap());
4341 return inobject_properties_or_constructor_function_index();
4345 void Map::SetConstructorFunctionIndex(int value) {
4346 DCHECK(IsPrimitiveMap());
4347 set_inobject_properties_or_constructor_function_index(value);
4351 int Map::GetInObjectPropertyOffset(int index) {
4352 // Adjust for the number of properties stored in the object.
4353 index -= GetInObjectProperties();
4355 return instance_size() + (index * kPointerSize);
4359 Handle<Map> Map::CopyInstallDescriptorsForTesting(
4360 Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
4361 Handle<LayoutDescriptor> layout_descriptor) {
4362 return CopyInstallDescriptors(map, new_descriptor, descriptors,
4367 int HeapObject::SizeFromMap(Map* map) {
4368 int instance_size = map->instance_size();
4369 if (instance_size != kVariableSizeSentinel) return instance_size;
4370 // Only inline the most frequent cases.
4371 InstanceType instance_type = map->instance_type();
4372 if (instance_type == FIXED_ARRAY_TYPE) {
4373 return FixedArray::BodyDescriptor::SizeOf(map, this);
4375 if (instance_type == ONE_BYTE_STRING_TYPE ||
4376 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4377 // Strings may get concurrently truncated, hence we have to access its
4378 // length synchronized.
4379 return SeqOneByteString::SizeFor(
4380 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4382 if (instance_type == BYTE_ARRAY_TYPE) {
4383 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4385 if (instance_type == BYTECODE_ARRAY_TYPE) {
4386 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4388 if (instance_type == FREE_SPACE_TYPE) {
4389 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4391 if (instance_type == STRING_TYPE ||
4392 instance_type == INTERNALIZED_STRING_TYPE) {
4393 // Strings may get concurrently truncated, hence we have to access its
4394 // length synchronized.
4395 return SeqTwoByteString::SizeFor(
4396 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4398 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4399 return FixedDoubleArray::SizeFor(
4400 reinterpret_cast<FixedDoubleArray*>(this)->length());
4402 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4403 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4404 return reinterpret_cast<FixedTypedArrayBase*>(
4405 this)->TypedArraySize(instance_type);
4407 DCHECK(instance_type == CODE_TYPE);
4408 return reinterpret_cast<Code*>(this)->CodeSize();
4412 void Map::set_instance_size(int value) {
4413 DCHECK_EQ(0, value & (kPointerSize - 1));
4414 value >>= kPointerSizeLog2;
4415 DCHECK(0 <= value && value < 256);
4416 NOBARRIER_WRITE_BYTE_FIELD(
4417 this, kInstanceSizeOffset, static_cast<byte>(value));
4421 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4424 InstanceType Map::instance_type() {
4425 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4429 void Map::set_instance_type(InstanceType value) {
4430 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4434 int Map::unused_property_fields() {
4435 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4439 void Map::set_unused_property_fields(int value) {
4440 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4444 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4447 void Map::set_bit_field(byte value) {
4448 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4452 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4455 void Map::set_bit_field2(byte value) {
4456 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4460 void Map::set_non_instance_prototype(bool value) {
4462 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4464 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4469 bool Map::has_non_instance_prototype() {
4470 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4474 void Map::set_function_with_prototype(bool value) {
4475 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4479 bool Map::function_with_prototype() {
4480 return FunctionWithPrototype::decode(bit_field());
4484 void Map::set_is_hidden_prototype() {
4485 set_bit_field(bit_field() | (1 << kIsHiddenPrototype));
4489 bool Map::is_hidden_prototype() {
4490 return ((1 << kIsHiddenPrototype) & bit_field()) != 0;
4494 void Map::set_has_indexed_interceptor() {
4495 set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4499 bool Map::has_indexed_interceptor() {
4500 return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4504 void Map::set_is_undetectable() {
4505 set_bit_field(bit_field() | (1 << kIsUndetectable));
4509 bool Map::is_undetectable() {
4510 return ((1 << kIsUndetectable) & bit_field()) != 0;
4514 void Map::set_is_observed() { set_bit_field(bit_field() | (1 << kIsObserved)); }
4516 bool Map::is_observed() { return ((1 << kIsObserved) & bit_field()) != 0; }
4519 void Map::set_has_named_interceptor() {
4520 set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4524 bool Map::has_named_interceptor() {
4525 return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4529 void Map::set_is_access_check_needed(bool access_check_needed) {
4530 if (access_check_needed) {
4531 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4533 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4538 bool Map::is_access_check_needed() {
4539 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4543 void Map::set_is_extensible(bool value) {
4545 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4547 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4551 bool Map::is_extensible() {
4552 return ((1 << kIsExtensible) & bit_field2()) != 0;
4556 void Map::set_is_prototype_map(bool value) {
4557 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4560 bool Map::is_prototype_map() const {
4561 return IsPrototypeMapBits::decode(bit_field2());
4565 void Map::set_elements_kind(ElementsKind elements_kind) {
4566 DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4567 DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4568 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4569 DCHECK(this->elements_kind() == elements_kind);
4573 ElementsKind Map::elements_kind() {
4574 return Map::ElementsKindBits::decode(bit_field2());
4578 bool Map::has_fast_smi_elements() {
4579 return IsFastSmiElementsKind(elements_kind());
4582 bool Map::has_fast_object_elements() {
4583 return IsFastObjectElementsKind(elements_kind());
4586 bool Map::has_fast_smi_or_object_elements() {
4587 return IsFastSmiOrObjectElementsKind(elements_kind());
4590 bool Map::has_fast_double_elements() {
4591 return IsFastDoubleElementsKind(elements_kind());
4594 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4596 bool Map::has_sloppy_arguments_elements() {
4597 return IsSloppyArgumentsElements(elements_kind());
4600 bool Map::has_fixed_typed_array_elements() {
4601 return IsFixedTypedArrayElementsKind(elements_kind());
4604 bool Map::has_dictionary_elements() {
4605 return IsDictionaryElementsKind(elements_kind());
4609 void Map::set_dictionary_map(bool value) {
4610 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4611 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4612 set_bit_field3(new_bit_field3);
4616 bool Map::is_dictionary_map() {
4617 return DictionaryMap::decode(bit_field3());
4621 Code::Flags Code::flags() {
4622 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4626 void Map::set_owns_descriptors(bool owns_descriptors) {
4627 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4631 bool Map::owns_descriptors() {
4632 return OwnsDescriptors::decode(bit_field3());
4636 void Map::set_has_instance_call_handler() {
4637 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4641 bool Map::has_instance_call_handler() {
4642 return HasInstanceCallHandler::decode(bit_field3());
4646 void Map::deprecate() {
4647 set_bit_field3(Deprecated::update(bit_field3(), true));
4651 bool Map::is_deprecated() {
4652 return Deprecated::decode(bit_field3());
4656 void Map::set_migration_target(bool value) {
4657 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4661 bool Map::is_migration_target() {
4662 return IsMigrationTarget::decode(bit_field3());
4666 void Map::set_is_strong() {
4667 set_bit_field3(IsStrong::update(bit_field3(), true));
4671 bool Map::is_strong() {
4672 return IsStrong::decode(bit_field3());
4676 void Map::set_counter(int value) {
4677 set_bit_field3(Counter::update(bit_field3(), value));
4681 int Map::counter() { return Counter::decode(bit_field3()); }
4684 void Map::mark_unstable() {
4685 set_bit_field3(IsUnstable::update(bit_field3(), true));
4689 bool Map::is_stable() {
4690 return !IsUnstable::decode(bit_field3());
4694 bool Map::has_code_cache() {
4695 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4699 bool Map::CanBeDeprecated() {
4700 int descriptor = LastAdded();
4701 for (int i = 0; i <= descriptor; i++) {
4702 PropertyDetails details = instance_descriptors()->GetDetails(i);
4703 if (details.representation().IsNone()) return true;
4704 if (details.representation().IsSmi()) return true;
4705 if (details.representation().IsDouble()) return true;
4706 if (details.representation().IsHeapObject()) return true;
4707 if (details.type() == DATA_CONSTANT) return true;
4713 void Map::NotifyLeafMapLayoutChange() {
4716 dependent_code()->DeoptimizeDependentCodeGroup(
4718 DependentCode::kPrototypeCheckGroup);
4723 bool Map::CanTransition() {
4724 // Only JSObject and subtypes have map transitions and back pointers.
4725 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4726 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4730 bool Map::IsPrimitiveMap() {
4731 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4732 return instance_type() <= LAST_PRIMITIVE_TYPE;
4734 bool Map::IsJSObjectMap() {
4735 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4736 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4738 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
4739 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
4740 bool Map::IsJSProxyMap() {
4741 InstanceType type = instance_type();
4742 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
4744 bool Map::IsJSGlobalProxyMap() {
4745 return instance_type() == JS_GLOBAL_PROXY_TYPE;
4747 bool Map::IsJSGlobalObjectMap() {
4748 return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4750 bool Map::IsGlobalObjectMap() {
4751 const InstanceType type = instance_type();
4752 return type == JS_GLOBAL_OBJECT_TYPE || type == JS_BUILTINS_OBJECT_TYPE;
4756 bool Map::CanOmitMapChecks() {
4757 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4761 int DependentCode::number_of_entries(DependencyGroup group) {
4762 if (length() == 0) return 0;
4763 return Smi::cast(get(group))->value();
4767 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4768 set(group, Smi::FromInt(value));
4772 void DependentCode::set_object_at(int i, Object* object) {
4773 set(kCodesStartIndex + i, object);
4777 Object* DependentCode::object_at(int i) {
4778 return get(kCodesStartIndex + i);
4782 void DependentCode::clear_at(int i) {
4783 set_undefined(kCodesStartIndex + i);
4787 void DependentCode::copy(int from, int to) {
4788 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4792 void DependentCode::ExtendGroup(DependencyGroup group) {
4793 GroupStartIndexes starts(this);
4794 for (int g = kGroupCount - 1; g > group; g--) {
4795 if (starts.at(g) < starts.at(g + 1)) {
4796 copy(starts.at(g), starts.at(g + 1));
4802 void Code::set_flags(Code::Flags flags) {
4803 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4804 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4808 Code::Kind Code::kind() {
4809 return ExtractKindFromFlags(flags());
4813 bool Code::IsCodeStubOrIC() {
4814 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4815 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4816 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4817 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4818 kind() == TO_BOOLEAN_IC;
4822 bool Code::IsJavaScriptCode() {
4823 if (kind() == FUNCTION || kind() == OPTIMIZED_FUNCTION) {
4826 Handle<Code> interpreter_entry =
4827 GetIsolate()->builtins()->InterpreterEntryTrampoline();
4828 return interpreter_entry.location() != nullptr && *interpreter_entry == this;
4832 InlineCacheState Code::ic_state() {
4833 InlineCacheState result = ExtractICStateFromFlags(flags());
4834 // Only allow uninitialized or debugger states for non-IC code
4835 // objects. This is used in the debugger to determine whether or not
4836 // a call to code object has been replaced with a debug break call.
4837 DCHECK(is_inline_cache_stub() ||
4838 result == UNINITIALIZED ||
4839 result == DEBUG_STUB);
4844 ExtraICState Code::extra_ic_state() {
4845 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4846 return ExtractExtraICStateFromFlags(flags());
4850 Code::StubType Code::type() {
4851 return ExtractTypeFromFlags(flags());
4855 // For initialization.
4856 void Code::set_raw_kind_specific_flags1(int value) {
4857 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4861 void Code::set_raw_kind_specific_flags2(int value) {
4862 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4866 inline bool Code::is_crankshafted() {
4867 return IsCrankshaftedField::decode(
4868 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4872 inline bool Code::is_hydrogen_stub() {
4873 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4877 inline void Code::set_is_crankshafted(bool value) {
4878 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4879 int updated = IsCrankshaftedField::update(previous, value);
4880 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4884 inline bool Code::is_turbofanned() {
4885 return IsTurbofannedField::decode(
4886 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4890 inline void Code::set_is_turbofanned(bool value) {
4891 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4892 int updated = IsTurbofannedField::update(previous, value);
4893 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4897 inline bool Code::can_have_weak_objects() {
4898 DCHECK(kind() == OPTIMIZED_FUNCTION);
4899 return CanHaveWeakObjectsField::decode(
4900 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4904 inline void Code::set_can_have_weak_objects(bool value) {
4905 DCHECK(kind() == OPTIMIZED_FUNCTION);
4906 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4907 int updated = CanHaveWeakObjectsField::update(previous, value);
4908 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4912 bool Code::has_deoptimization_support() {
4913 DCHECK_EQ(FUNCTION, kind());
4914 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4915 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4919 void Code::set_has_deoptimization_support(bool value) {
4920 DCHECK_EQ(FUNCTION, kind());
4921 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4922 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4923 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4927 bool Code::has_debug_break_slots() {
4928 DCHECK_EQ(FUNCTION, kind());
4929 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4930 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4934 void Code::set_has_debug_break_slots(bool value) {
4935 DCHECK_EQ(FUNCTION, kind());
4936 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4937 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4938 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4942 bool Code::has_reloc_info_for_serialization() {
4943 DCHECK_EQ(FUNCTION, kind());
4944 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4945 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4949 void Code::set_has_reloc_info_for_serialization(bool value) {
4950 DCHECK_EQ(FUNCTION, kind());
4951 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4952 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4953 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4957 int Code::allow_osr_at_loop_nesting_level() {
4958 DCHECK_EQ(FUNCTION, kind());
4959 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4960 return AllowOSRAtLoopNestingLevelField::decode(fields);
4964 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4965 DCHECK_EQ(FUNCTION, kind());
4966 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4967 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4968 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4969 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4973 int Code::profiler_ticks() {
4974 DCHECK_EQ(FUNCTION, kind());
4975 return ProfilerTicksField::decode(
4976 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4980 void Code::set_profiler_ticks(int ticks) {
4981 if (kind() == FUNCTION) {
4982 unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4983 unsigned updated = ProfilerTicksField::update(previous, ticks);
4984 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4989 int Code::builtin_index() {
4990 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
4994 void Code::set_builtin_index(int index) {
4995 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
4999 unsigned Code::stack_slots() {
5000 DCHECK(is_crankshafted());
5001 return StackSlotsField::decode(
5002 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5006 void Code::set_stack_slots(unsigned slots) {
5007 CHECK(slots <= (1 << kStackSlotsBitCount));
5008 DCHECK(is_crankshafted());
5009 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5010 int updated = StackSlotsField::update(previous, slots);
5011 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5015 unsigned Code::safepoint_table_offset() {
5016 DCHECK(is_crankshafted());
5017 return SafepointTableOffsetField::decode(
5018 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5022 void Code::set_safepoint_table_offset(unsigned offset) {
5023 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5024 DCHECK(is_crankshafted());
5025 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5026 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5027 int updated = SafepointTableOffsetField::update(previous, offset);
5028 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5032 unsigned Code::back_edge_table_offset() {
5033 DCHECK_EQ(FUNCTION, kind());
5034 return BackEdgeTableOffsetField::decode(
5035 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5039 void Code::set_back_edge_table_offset(unsigned offset) {
5040 DCHECK_EQ(FUNCTION, kind());
5041 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5042 offset = offset >> kPointerSizeLog2;
5043 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5044 int updated = BackEdgeTableOffsetField::update(previous, offset);
5045 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5049 bool Code::back_edges_patched_for_osr() {
5050 DCHECK_EQ(FUNCTION, kind());
5051 return allow_osr_at_loop_nesting_level() > 0;
5055 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5058 bool Code::has_function_cache() {
5059 DCHECK(kind() == STUB);
5060 return HasFunctionCacheField::decode(
5061 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5065 void Code::set_has_function_cache(bool flag) {
5066 DCHECK(kind() == STUB);
5067 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5068 int updated = HasFunctionCacheField::update(previous, flag);
5069 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5073 bool Code::marked_for_deoptimization() {
5074 DCHECK(kind() == OPTIMIZED_FUNCTION);
5075 return MarkedForDeoptimizationField::decode(
5076 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5080 void Code::set_marked_for_deoptimization(bool flag) {
5081 DCHECK(kind() == OPTIMIZED_FUNCTION);
5082 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5083 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5084 int updated = MarkedForDeoptimizationField::update(previous, flag);
5085 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5089 bool Code::is_inline_cache_stub() {
5090 Kind kind = this->kind();
5092 #define CASE(name) case name: return true;
5095 default: return false;
5100 bool Code::is_keyed_stub() {
5101 return is_keyed_load_stub() || is_keyed_store_stub();
5105 bool Code::is_debug_stub() { return ic_state() == DEBUG_STUB; }
5106 bool Code::is_handler() { return kind() == HANDLER; }
5107 bool Code::is_load_stub() { return kind() == LOAD_IC; }
5108 bool Code::is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
5109 bool Code::is_store_stub() { return kind() == STORE_IC; }
5110 bool Code::is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
5111 bool Code::is_call_stub() { return kind() == CALL_IC; }
5112 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
5113 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
5114 bool Code::is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
5115 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
5116 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
5119 bool Code::embeds_maps_weakly() {
5121 return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
5122 k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
5123 ic_state() == MONOMORPHIC;
5127 Address Code::constant_pool() {
5128 Address constant_pool = NULL;
5129 if (FLAG_enable_embedded_constant_pool) {
5130 int offset = constant_pool_offset();
5131 if (offset < instruction_size()) {
5132 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5135 return constant_pool;
5139 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5140 ExtraICState extra_ic_state, StubType type,
5141 CacheHolderFlag holder) {
5142 // Compute the bit mask.
5143 unsigned int bits = KindField::encode(kind)
5144 | ICStateField::encode(ic_state)
5145 | TypeField::encode(type)
5146 | ExtraICStateField::encode(extra_ic_state)
5147 | CacheHolderField::encode(holder);
5148 return static_cast<Flags>(bits);
5152 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5153 ExtraICState extra_ic_state,
5154 CacheHolderFlag holder,
5156 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5160 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5161 CacheHolderFlag holder) {
5162 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5166 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5167 return KindField::decode(flags);
5171 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5172 return ICStateField::decode(flags);
5176 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5177 return ExtraICStateField::decode(flags);
5181 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5182 return TypeField::decode(flags);
5186 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5187 return CacheHolderField::decode(flags);
5191 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5192 int bits = flags & ~TypeField::kMask;
5193 return static_cast<Flags>(bits);
5197 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5198 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5199 return static_cast<Flags>(bits);
5203 Code* Code::GetCodeFromTargetAddress(Address address) {
5204 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5205 // GetCodeFromTargetAddress might be called when marking objects during mark
5206 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5207 // Code::cast. Code::cast does not work when the object's map is
5209 Code* result = reinterpret_cast<Code*>(code);
5214 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5216 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5220 bool Code::CanContainWeakObjects() {
5221 // is_turbofanned() implies !can_have_weak_objects().
5222 DCHECK(!is_optimized_code() || !is_turbofanned() || !can_have_weak_objects());
5223 return is_optimized_code() && can_have_weak_objects();
5227 bool Code::IsWeakObject(Object* object) {
5228 return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5232 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5233 if (object->IsMap()) {
5234 return Map::cast(object)->CanTransition() &&
5235 FLAG_weak_embedded_maps_in_optimized_code;
5237 if (object->IsCell()) {
5238 object = Cell::cast(object)->value();
5239 } else if (object->IsPropertyCell()) {
5240 object = PropertyCell::cast(object)->value();
5242 if (object->IsJSObject() || object->IsJSProxy()) {
5243 // JSProxy is handled like JSObject because it can morph into one.
5244 return FLAG_weak_embedded_objects_in_optimized_code;
5246 if (object->IsFixedArray()) {
5247 // Contexts of inlined functions are embedded in optimized code.
5248 Map* map = HeapObject::cast(object)->map();
5249 Heap* heap = map->GetHeap();
5250 return FLAG_weak_embedded_objects_in_optimized_code &&
5251 map == heap->function_context_map();
5257 class Code::FindAndReplacePattern {
5259 FindAndReplacePattern() : count_(0) { }
5260 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5261 DCHECK(count_ < kMaxCount);
5262 find_[count_] = map_to_find;
5263 replace_[count_] = obj_to_replace;
5267 static const int kMaxCount = 4;
5269 Handle<Map> find_[kMaxCount];
5270 Handle<Object> replace_[kMaxCount];
5275 Object* Map::prototype() const {
5276 return READ_FIELD(this, kPrototypeOffset);
5280 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5281 DCHECK(value->IsNull() || value->IsJSReceiver());
5282 WRITE_FIELD(this, kPrototypeOffset, value);
5283 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5287 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5288 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5289 return LayoutDescriptor::cast_gc_safe(layout_desc);
5293 bool Map::HasFastPointerLayout() const {
5294 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5295 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5299 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5300 LayoutDescriptor* layout_desc) {
5301 set_instance_descriptors(descriptors);
5302 if (FLAG_unbox_double_fields) {
5303 if (layout_descriptor()->IsSlowLayout()) {
5304 set_layout_descriptor(layout_desc);
5307 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5308 if (FLAG_verify_heap) {
5309 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5310 CHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5313 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5314 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5320 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5321 LayoutDescriptor* layout_desc) {
5322 int len = descriptors->number_of_descriptors();
5323 set_instance_descriptors(descriptors);
5324 SetNumberOfOwnDescriptors(len);
5326 if (FLAG_unbox_double_fields) {
5327 set_layout_descriptor(layout_desc);
5329 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5330 if (FLAG_verify_heap) {
5331 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5334 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5336 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5341 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5342 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5345 void Map::set_bit_field3(uint32_t bits) {
5346 if (kInt32Size != kPointerSize) {
5347 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5349 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5353 uint32_t Map::bit_field3() const {
5354 return READ_UINT32_FIELD(this, kBitField3Offset);
5358 LayoutDescriptor* Map::GetLayoutDescriptor() {
5359 return FLAG_unbox_double_fields ? layout_descriptor()
5360 : LayoutDescriptor::FastPointerLayout();
5364 void Map::AppendDescriptor(Descriptor* desc) {
5365 DescriptorArray* descriptors = instance_descriptors();
5366 int number_of_own_descriptors = NumberOfOwnDescriptors();
5367 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5368 descriptors->Append(desc);
5369 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5371 // This function does not support appending double field descriptors and
5372 // it should never try to (otherwise, layout descriptor must be updated too).
5374 PropertyDetails details = desc->GetDetails();
5375 CHECK(details.type() != DATA || !details.representation().IsDouble());
5380 Object* Map::GetBackPointer() {
5381 Object* object = constructor_or_backpointer();
5382 if (object->IsMap()) {
5385 return GetIsolate()->heap()->undefined_value();
5389 Map* Map::ElementsTransitionMap() {
5390 return TransitionArray::SearchSpecial(
5391 this, GetHeap()->elements_transition_symbol());
5395 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5398 Object* Map::prototype_info() const {
5399 DCHECK(is_prototype_map());
5400 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5404 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5405 DCHECK(is_prototype_map());
5406 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5407 CONDITIONAL_WRITE_BARRIER(
5408 GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5412 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5413 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5414 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5415 (value->IsMap() && GetBackPointer()->IsUndefined()));
5416 DCHECK(!value->IsMap() ||
5417 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5418 set_constructor_or_backpointer(value, mode);
5422 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5423 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5424 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5425 ACCESSORS(Map, constructor_or_backpointer, Object,
5426 kConstructorOrBackPointerOffset)
5429 Object* Map::GetConstructor() const {
5430 Object* maybe_constructor = constructor_or_backpointer();
5431 // Follow any back pointers.
5432 while (maybe_constructor->IsMap()) {
5434 Map::cast(maybe_constructor)->constructor_or_backpointer();
5436 return maybe_constructor;
5440 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5441 // Never overwrite a back pointer with a constructor.
5442 DCHECK(!constructor_or_backpointer()->IsMap());
5443 set_constructor_or_backpointer(constructor, mode);
5447 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5448 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5449 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5451 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5452 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5453 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5455 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5456 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5458 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5459 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5460 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5461 kExpectedReceiverTypeOffset)
5463 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5464 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5465 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5467 ACCESSORS(Box, value, Object, kValueOffset)
5469 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5470 SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5471 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5472 ACCESSORS(PrototypeInfo, constructor_name, Object, kConstructorNameOffset)
5474 ACCESSORS(SloppyBlockWithEvalContextExtension, scope_info, ScopeInfo,
5476 ACCESSORS(SloppyBlockWithEvalContextExtension, extension, JSObject,
5479 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5480 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5482 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5483 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5484 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5486 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5487 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5488 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5489 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5490 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5491 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5492 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5493 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5494 kCanInterceptSymbolsBit)
5495 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5496 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5498 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5499 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5501 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5502 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5503 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5504 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5506 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5507 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5508 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5509 kPrototypeTemplateOffset)
5510 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5511 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5512 kNamedPropertyHandlerOffset)
5513 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5514 kIndexedPropertyHandlerOffset)
5515 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5516 kInstanceTemplateOffset)
5517 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5518 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5519 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5520 kInstanceCallHandlerOffset)
5521 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5522 kAccessCheckInfoOffset)
5523 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5525 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5526 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5527 kInternalFieldCountOffset)
5529 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5531 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5532 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5533 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5534 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5535 kPretenureCreateCountOffset)
5536 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5537 kDependentCodeOffset)
5538 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5539 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5541 ACCESSORS(Script, source, Object, kSourceOffset)
5542 ACCESSORS(Script, name, Object, kNameOffset)
5543 ACCESSORS(Script, id, Smi, kIdOffset)
5544 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5545 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5546 ACCESSORS(Script, context_data, Object, kContextOffset)
5547 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5548 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5549 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5550 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5551 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5552 kEvalFrominstructionsOffsetOffset)
5553 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5554 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5555 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5556 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5558 Script::CompilationType Script::compilation_type() {
5559 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5560 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5562 void Script::set_compilation_type(CompilationType type) {
5563 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5564 type == COMPILATION_TYPE_EVAL));
5566 bool Script::hide_source() { return BooleanBit::get(flags(), kHideSourceBit); }
5567 void Script::set_hide_source(bool value) {
5568 set_flags(BooleanBit::set(flags(), kHideSourceBit, value));
5570 Script::CompilationState Script::compilation_state() {
5571 return BooleanBit::get(flags(), kCompilationStateBit) ?
5572 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5574 void Script::set_compilation_state(CompilationState state) {
5575 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5576 state == COMPILATION_STATE_COMPILED));
5578 ScriptOriginOptions Script::origin_options() {
5579 return ScriptOriginOptions((flags()->value() & kOriginOptionsMask) >>
5580 kOriginOptionsShift);
5582 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5583 DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5584 set_flags(Smi::FromInt((flags()->value() & ~kOriginOptionsMask) |
5585 (origin_options.Flags() << kOriginOptionsShift)));
5589 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5590 ACCESSORS(DebugInfo, code, Code, kCodeIndex)
5591 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5593 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5594 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5595 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5596 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5598 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5599 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5600 kOptimizedCodeMapOffset)
5601 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5602 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5603 kFeedbackVectorOffset)
5605 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5607 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5608 kInstanceClassNameOffset)
5609 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5610 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5611 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5612 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5615 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5616 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5617 kHiddenPrototypeBit)
5618 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5619 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5620 kNeedsAccessCheckBit)
5621 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5622 kReadOnlyPrototypeBit)
5623 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5624 kRemovePrototypeBit)
5625 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5627 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5628 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5630 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5632 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5635 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5636 kAllowLazyCompilation)
5637 BOOL_ACCESSORS(SharedFunctionInfo,
5639 allows_lazy_compilation_without_context,
5640 kAllowLazyCompilationWithoutContext)
5641 BOOL_ACCESSORS(SharedFunctionInfo,
5645 BOOL_ACCESSORS(SharedFunctionInfo,
5647 has_duplicate_parameters,
5648 kHasDuplicateParameters)
5649 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5650 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5651 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
5655 #if V8_HOST_ARCH_32_BIT
5656 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5657 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5658 kFormalParameterCountOffset)
5659 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5660 kExpectedNofPropertiesOffset)
5661 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5662 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5663 kStartPositionAndTypeOffset)
5664 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5665 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5666 kFunctionTokenPositionOffset)
5667 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5668 kCompilerHintsOffset)
5669 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5670 kOptCountAndBailoutReasonOffset)
5671 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5672 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5673 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5677 #if V8_TARGET_LITTLE_ENDIAN
5678 #define PSEUDO_SMI_LO_ALIGN 0
5679 #define PSEUDO_SMI_HI_ALIGN kIntSize
5681 #define PSEUDO_SMI_LO_ALIGN kIntSize
5682 #define PSEUDO_SMI_HI_ALIGN 0
5685 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5686 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5687 int holder::name() const { \
5688 int value = READ_INT_FIELD(this, offset); \
5689 DCHECK(kHeapObjectTag == 1); \
5690 DCHECK((value & kHeapObjectTag) == 0); \
5691 return value >> 1; \
5693 void holder::set_##name(int value) { \
5694 DCHECK(kHeapObjectTag == 1); \
5695 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5696 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5699 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5700 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5701 INT_ACCESSORS(holder, name, offset)
5704 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5705 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5706 kFormalParameterCountOffset)
5708 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5709 expected_nof_properties,
5710 kExpectedNofPropertiesOffset)
5711 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5713 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5714 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5715 start_position_and_type,
5716 kStartPositionAndTypeOffset)
5718 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5719 function_token_position,
5720 kFunctionTokenPositionOffset)
5721 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5723 kCompilerHintsOffset)
5725 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5726 opt_count_and_bailout_reason,
5727 kOptCountAndBailoutReasonOffset)
5728 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5730 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5732 kAstNodeCountOffset)
5733 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5735 kProfilerTicksOffset)
5740 BOOL_GETTER(SharedFunctionInfo,
5742 optimization_disabled,
5743 kOptimizationDisabled)
5746 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5747 set_compiler_hints(BooleanBit::set(compiler_hints(),
5748 kOptimizationDisabled,
5753 LanguageMode SharedFunctionInfo::language_mode() {
5754 STATIC_ASSERT(LANGUAGE_END == 3);
5755 return construct_language_mode(
5756 BooleanBit::get(compiler_hints(), kStrictModeFunction),
5757 BooleanBit::get(compiler_hints(), kStrongModeFunction));
5761 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5762 STATIC_ASSERT(LANGUAGE_END == 3);
5763 // We only allow language mode transitions that set the same language mode
5764 // again or go up in the chain:
5765 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5766 int hints = compiler_hints();
5767 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5768 hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5769 set_compiler_hints(hints);
5773 FunctionKind SharedFunctionInfo::kind() {
5774 return FunctionKindBits::decode(compiler_hints());
5778 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5779 DCHECK(IsValidFunctionKind(kind));
5780 int hints = compiler_hints();
5781 hints = FunctionKindBits::update(hints, kind);
5782 set_compiler_hints(hints);
5786 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5788 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5789 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5790 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5791 name_should_print_as_anonymous,
5792 kNameShouldPrintAsAnonymous)
5793 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5794 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5795 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5796 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
5798 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5799 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5800 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5801 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5803 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5804 kIsAccessorFunction)
5805 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5806 kIsDefaultConstructor)
5808 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5809 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5811 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5813 bool Script::HasValidSource() {
5814 Object* src = this->source();
5815 if (!src->IsString()) return true;
5816 String* src_str = String::cast(src);
5817 if (!StringShape(src_str).IsExternal()) return true;
5818 if (src_str->IsOneByteRepresentation()) {
5819 return ExternalOneByteString::cast(src)->resource() != NULL;
5820 } else if (src_str->IsTwoByteRepresentation()) {
5821 return ExternalTwoByteString::cast(src)->resource() != NULL;
5827 void SharedFunctionInfo::DontAdaptArguments() {
5828 DCHECK(code()->kind() == Code::BUILTIN);
5829 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5833 int SharedFunctionInfo::start_position() const {
5834 return start_position_and_type() >> kStartPositionShift;
5838 void SharedFunctionInfo::set_start_position(int start_position) {
5839 set_start_position_and_type((start_position << kStartPositionShift)
5840 | (start_position_and_type() & ~kStartPositionMask));
5844 Code* SharedFunctionInfo::code() const {
5845 return Code::cast(READ_FIELD(this, kCodeOffset));
5849 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5850 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5851 WRITE_FIELD(this, kCodeOffset, value);
5852 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5856 void SharedFunctionInfo::ReplaceCode(Code* value) {
5857 // If the GC metadata field is already used then the function was
5858 // enqueued as a code flushing candidate and we remove it now.
5859 if (code()->gc_metadata() != NULL) {
5860 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5861 flusher->EvictCandidate(this);
5864 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5866 Code::VerifyRecompiledCode(code(), value);
5871 if (is_compiled()) set_never_compiled(false);
5875 ScopeInfo* SharedFunctionInfo::scope_info() const {
5876 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5880 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5881 WriteBarrierMode mode) {
5882 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5883 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5886 reinterpret_cast<Object*>(value),
5891 bool SharedFunctionInfo::is_compiled() {
5892 Builtins* builtins = GetIsolate()->builtins();
5893 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5894 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5895 return code() != builtins->builtin(Builtins::kCompileLazy);
5899 bool SharedFunctionInfo::has_simple_parameters() {
5900 return scope_info()->HasSimpleParameters();
5904 bool SharedFunctionInfo::HasDebugInfo() {
5905 bool has_debug_info = debug_info()->IsStruct();
5906 DCHECK(!has_debug_info || HasDebugCode());
5907 return has_debug_info;
5911 DebugInfo* SharedFunctionInfo::GetDebugInfo() {
5912 DCHECK(HasDebugInfo());
5913 return DebugInfo::cast(debug_info());
5917 bool SharedFunctionInfo::HasDebugCode() {
5918 return code()->kind() == Code::FUNCTION && code()->has_debug_break_slots();
5922 bool SharedFunctionInfo::IsApiFunction() {
5923 return function_data()->IsFunctionTemplateInfo();
5927 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5928 DCHECK(IsApiFunction());
5929 return FunctionTemplateInfo::cast(function_data());
5933 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5934 return function_data()->IsSmi();
5938 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5939 DCHECK(HasBuiltinFunctionId());
5940 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5944 bool SharedFunctionInfo::HasBytecodeArray() {
5945 return function_data()->IsBytecodeArray();
5949 BytecodeArray* SharedFunctionInfo::bytecode_array() {
5950 DCHECK(HasBytecodeArray());
5951 return BytecodeArray::cast(function_data());
5955 int SharedFunctionInfo::ic_age() {
5956 return ICAgeBits::decode(counters());
5960 void SharedFunctionInfo::set_ic_age(int ic_age) {
5961 set_counters(ICAgeBits::update(counters(), ic_age));
5965 int SharedFunctionInfo::deopt_count() {
5966 return DeoptCountBits::decode(counters());
5970 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5971 set_counters(DeoptCountBits::update(counters(), deopt_count));
5975 void SharedFunctionInfo::increment_deopt_count() {
5976 int value = counters();
5977 int deopt_count = DeoptCountBits::decode(value);
5978 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5979 set_counters(DeoptCountBits::update(value, deopt_count));
5983 int SharedFunctionInfo::opt_reenable_tries() {
5984 return OptReenableTriesBits::decode(counters());
5988 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5989 set_counters(OptReenableTriesBits::update(counters(), tries));
5993 int SharedFunctionInfo::opt_count() {
5994 return OptCountBits::decode(opt_count_and_bailout_reason());
5998 void SharedFunctionInfo::set_opt_count(int opt_count) {
5999 set_opt_count_and_bailout_reason(
6000 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6004 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6005 return static_cast<BailoutReason>(
6006 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6010 bool SharedFunctionInfo::has_deoptimization_support() {
6011 Code* code = this->code();
6012 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6016 void SharedFunctionInfo::TryReenableOptimization() {
6017 int tries = opt_reenable_tries();
6018 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6019 // We reenable optimization whenever the number of tries is a large
6020 // enough power of 2.
6021 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6022 set_optimization_disabled(false);
6029 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6030 set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6031 opt_count_and_bailout_reason(), reason));
6035 bool SharedFunctionInfo::IsBuiltin() {
6036 Object* script_obj = script();
6037 if (script_obj->IsUndefined()) return true;
6038 Script* script = Script::cast(script_obj);
6039 Script::Type type = static_cast<Script::Type>(script->type()->value());
6040 return type != Script::TYPE_NORMAL;
6044 bool SharedFunctionInfo::IsSubjectToDebugging() { return !IsBuiltin(); }
6047 bool JSFunction::IsBuiltin() { return shared()->IsBuiltin(); }
6050 bool JSFunction::IsSubjectToDebugging() {
6051 return shared()->IsSubjectToDebugging();
6055 bool JSFunction::NeedsArgumentsAdaption() {
6056 return shared()->internal_formal_parameter_count() !=
6057 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
6061 bool JSFunction::IsOptimized() {
6062 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6066 bool JSFunction::IsMarkedForOptimization() {
6067 return code() == GetIsolate()->builtins()->builtin(
6068 Builtins::kCompileOptimized);
6072 bool JSFunction::IsMarkedForConcurrentOptimization() {
6073 return code() == GetIsolate()->builtins()->builtin(
6074 Builtins::kCompileOptimizedConcurrent);
6078 bool JSFunction::IsInOptimizationQueue() {
6079 return code() == GetIsolate()->builtins()->builtin(
6080 Builtins::kInOptimizationQueue);
6084 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6085 return has_initial_map() &&
6086 initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
6090 Code* JSFunction::code() {
6092 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6096 void JSFunction::set_code(Code* value) {
6097 DCHECK(!GetHeap()->InNewSpace(value));
6098 Address entry = value->entry();
6099 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6100 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6102 HeapObject::RawField(this, kCodeEntryOffset),
6107 void JSFunction::set_code_no_write_barrier(Code* value) {
6108 DCHECK(!GetHeap()->InNewSpace(value));
6109 Address entry = value->entry();
6110 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6114 void JSFunction::ReplaceCode(Code* code) {
6115 bool was_optimized = IsOptimized();
6116 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6118 if (was_optimized && is_optimized) {
6119 shared()->EvictFromOptimizedCodeMap(this->code(),
6120 "Replacing with another optimized code");
6125 // Add/remove the function from the list of optimized functions for this
6126 // context based on the state change.
6127 if (!was_optimized && is_optimized) {
6128 context()->native_context()->AddOptimizedFunction(this);
6130 if (was_optimized && !is_optimized) {
6131 // TODO(titzer): linear in the number of optimized functions; fix!
6132 context()->native_context()->RemoveOptimizedFunction(this);
6137 Context* JSFunction::context() {
6138 return Context::cast(READ_FIELD(this, kContextOffset));
6142 JSObject* JSFunction::global_proxy() {
6143 return context()->global_proxy();
6147 void JSFunction::set_context(Object* value) {
6148 DCHECK(value->IsUndefined() || value->IsContext());
6149 WRITE_FIELD(this, kContextOffset, value);
6150 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6153 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6154 kPrototypeOrInitialMapOffset)
6157 Map* JSFunction::initial_map() {
6158 return Map::cast(prototype_or_initial_map());
6162 bool JSFunction::has_initial_map() {
6163 return prototype_or_initial_map()->IsMap();
6167 bool JSFunction::has_instance_prototype() {
6168 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6172 bool JSFunction::has_prototype() {
6173 return map()->has_non_instance_prototype() || has_instance_prototype();
6177 Object* JSFunction::instance_prototype() {
6178 DCHECK(has_instance_prototype());
6179 if (has_initial_map()) return initial_map()->prototype();
6180 // When there is no initial map and the prototype is a JSObject, the
6181 // initial map field is used for the prototype field.
6182 return prototype_or_initial_map();
6186 Object* JSFunction::prototype() {
6187 DCHECK(has_prototype());
6188 // If the function's prototype property has been set to a non-JSObject
6189 // value, that value is stored in the constructor field of the map.
6190 if (map()->has_non_instance_prototype()) {
6191 Object* prototype = map()->GetConstructor();
6192 // The map must have a prototype in that field, not a back pointer.
6193 DCHECK(!prototype->IsMap());
6196 return instance_prototype();
6200 bool JSFunction::should_have_prototype() {
6201 return map()->function_with_prototype();
6205 bool JSFunction::is_compiled() {
6206 Builtins* builtins = GetIsolate()->builtins();
6207 return code() != builtins->builtin(Builtins::kCompileLazy) &&
6208 code() != builtins->builtin(Builtins::kCompileOptimized) &&
6209 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6213 bool JSFunction::has_simple_parameters() {
6214 return shared()->has_simple_parameters();
6218 FixedArray* JSFunction::literals() {
6219 DCHECK(!shared()->bound());
6220 return literals_or_bindings();
6224 void JSFunction::set_literals(FixedArray* literals) {
6225 DCHECK(!shared()->bound());
6226 set_literals_or_bindings(literals);
6230 FixedArray* JSFunction::function_bindings() {
6231 DCHECK(shared()->bound());
6232 return literals_or_bindings();
6236 void JSFunction::set_function_bindings(FixedArray* bindings) {
6237 DCHECK(shared()->bound());
6238 // Bound function literal may be initialized to the empty fixed array
6239 // before the bindings are set.
6240 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6241 bindings->map() == GetHeap()->fixed_array_map());
6242 set_literals_or_bindings(bindings);
6246 int JSFunction::NumberOfLiterals() {
6247 DCHECK(!shared()->bound());
6248 return literals()->length();
6252 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6253 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6254 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6258 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6260 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6261 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6262 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6266 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6267 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6268 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6269 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6272 void JSProxy::InitializeBody(int object_size, Object* value) {
6273 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6274 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6275 WRITE_FIELD(this, offset, value);
6280 ACCESSORS(JSCollection, table, Object, kTableOffset)
6283 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6284 template<class Derived, class TableType> \
6285 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6286 return type::cast(READ_FIELD(this, offset)); \
6288 template<class Derived, class TableType> \
6289 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6290 type* value, WriteBarrierMode mode) { \
6291 WRITE_FIELD(this, offset, value); \
6292 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6295 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6296 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6297 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6299 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6302 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6303 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6306 Address Foreign::foreign_address() {
6307 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6311 void Foreign::set_foreign_address(Address value) {
6312 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6316 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6317 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6318 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6319 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6320 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6322 bool JSGeneratorObject::is_suspended() {
6323 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6324 DCHECK_EQ(kGeneratorClosed, 0);
6325 return continuation() > 0;
6328 bool JSGeneratorObject::is_closed() {
6329 return continuation() == kGeneratorClosed;
6332 bool JSGeneratorObject::is_executing() {
6333 return continuation() == kGeneratorExecuting;
6336 ACCESSORS(JSModule, context, Object, kContextOffset)
6337 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6340 ACCESSORS(JSValue, value, Object, kValueOffset)
6343 HeapNumber* HeapNumber::cast(Object* object) {
6344 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6345 return reinterpret_cast<HeapNumber*>(object);
6349 const HeapNumber* HeapNumber::cast(const Object* object) {
6350 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6351 return reinterpret_cast<const HeapNumber*>(object);
6355 ACCESSORS(JSDate, value, Object, kValueOffset)
6356 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6357 ACCESSORS(JSDate, year, Object, kYearOffset)
6358 ACCESSORS(JSDate, month, Object, kMonthOffset)
6359 ACCESSORS(JSDate, day, Object, kDayOffset)
6360 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6361 ACCESSORS(JSDate, hour, Object, kHourOffset)
6362 ACCESSORS(JSDate, min, Object, kMinOffset)
6363 ACCESSORS(JSDate, sec, Object, kSecOffset)
6366 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6367 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6368 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6369 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6370 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6371 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6374 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6375 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6376 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6377 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6378 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6379 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6380 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6381 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6384 void Code::WipeOutHeader() {
6385 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6386 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6387 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6388 // Do not wipe out major/minor keys on a code stub or IC
6389 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6390 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6395 Object* Code::type_feedback_info() {
6396 DCHECK(kind() == FUNCTION);
6397 return raw_type_feedback_info();
6401 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6402 DCHECK(kind() == FUNCTION);
6403 set_raw_type_feedback_info(value, mode);
6404 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6409 uint32_t Code::stub_key() {
6410 DCHECK(IsCodeStubOrIC());
6411 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6412 return static_cast<uint32_t>(smi_key->value());
6416 void Code::set_stub_key(uint32_t key) {
6417 DCHECK(IsCodeStubOrIC());
6418 set_raw_type_feedback_info(Smi::FromInt(key));
6422 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6423 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6426 byte* Code::instruction_start() {
6427 return FIELD_ADDR(this, kHeaderSize);
6431 byte* Code::instruction_end() {
6432 return instruction_start() + instruction_size();
6436 int Code::body_size() {
6437 return RoundUp(instruction_size(), kObjectAlignment);
6441 ByteArray* Code::unchecked_relocation_info() {
6442 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6446 byte* Code::relocation_start() {
6447 return unchecked_relocation_info()->GetDataStartAddress();
6451 int Code::relocation_size() {
6452 return unchecked_relocation_info()->length();
6456 byte* Code::entry() {
6457 return instruction_start();
6461 bool Code::contains(byte* inner_pointer) {
6462 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6466 int Code::ExecutableSize() {
6467 // Check that the assumptions about the layout of the code object holds.
6468 DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6470 return instruction_size() + Code::kHeaderSize;
6474 int Code::CodeSize() { return SizeFor(body_size()); }
6477 ACCESSORS(JSArray, length, Object, kLengthOffset)
6480 void* JSArrayBuffer::backing_store() const {
6481 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6482 return reinterpret_cast<void*>(ptr);
6486 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6487 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6488 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6492 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6495 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6496 if (kInt32Size != kPointerSize) {
6497 #if V8_TARGET_LITTLE_ENDIAN
6498 WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6500 WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6503 WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6507 uint32_t JSArrayBuffer::bit_field() const {
6508 return READ_UINT32_FIELD(this, kBitFieldOffset);
6512 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6515 void JSArrayBuffer::set_is_external(bool value) {
6516 set_bit_field(IsExternal::update(bit_field(), value));
6520 bool JSArrayBuffer::is_neuterable() {
6521 return IsNeuterable::decode(bit_field());
6525 void JSArrayBuffer::set_is_neuterable(bool value) {
6526 set_bit_field(IsNeuterable::update(bit_field(), value));
6530 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6533 void JSArrayBuffer::set_was_neutered(bool value) {
6534 set_bit_field(WasNeutered::update(bit_field(), value));
6538 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6541 void JSArrayBuffer::set_is_shared(bool value) {
6542 set_bit_field(IsShared::update(bit_field(), value));
6546 Object* JSArrayBufferView::byte_offset() const {
6547 if (WasNeutered()) return Smi::FromInt(0);
6548 return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6552 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6553 WRITE_FIELD(this, kByteOffsetOffset, value);
6554 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6558 Object* JSArrayBufferView::byte_length() const {
6559 if (WasNeutered()) return Smi::FromInt(0);
6560 return Object::cast(READ_FIELD(this, kByteLengthOffset));
6564 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6565 WRITE_FIELD(this, kByteLengthOffset, value);
6566 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6570 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6572 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6573 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6577 bool JSArrayBufferView::WasNeutered() const {
6578 return JSArrayBuffer::cast(buffer())->was_neutered();
6582 Object* JSTypedArray::length() const {
6583 if (WasNeutered()) return Smi::FromInt(0);
6584 return Object::cast(READ_FIELD(this, kLengthOffset));
6588 uint32_t JSTypedArray::length_value() const {
6589 if (WasNeutered()) return 0;
6591 CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6596 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6597 WRITE_FIELD(this, kLengthOffset, value);
6598 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6603 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6607 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6610 JSRegExp::Type JSRegExp::TypeTag() {
6611 Object* data = this->data();
6612 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6613 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6614 return static_cast<JSRegExp::Type>(smi->value());
6618 int JSRegExp::CaptureCount() {
6619 switch (TypeTag()) {
6623 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6631 JSRegExp::Flags JSRegExp::GetFlags() {
6632 DCHECK(this->data()->IsFixedArray());
6633 Object* data = this->data();
6634 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6635 return Flags(smi->value());
6639 String* JSRegExp::Pattern() {
6640 DCHECK(this->data()->IsFixedArray());
6641 Object* data = this->data();
6642 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6647 Object* JSRegExp::DataAt(int index) {
6648 DCHECK(TypeTag() != NOT_COMPILED);
6649 return FixedArray::cast(data())->get(index);
6653 void JSRegExp::SetDataAt(int index, Object* value) {
6654 DCHECK(TypeTag() != NOT_COMPILED);
6655 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6656 FixedArray::cast(data())->set(index, value);
6660 ElementsKind JSObject::GetElementsKind() {
6661 ElementsKind kind = map()->elements_kind();
6662 #if VERIFY_HEAP && DEBUG
6663 FixedArrayBase* fixed_array =
6664 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6666 // If a GC was caused while constructing this object, the elements
6667 // pointer may point to a one pointer filler map.
6668 if (ElementsAreSafeToExamine()) {
6669 Map* map = fixed_array->map();
6670 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6671 (map == GetHeap()->fixed_array_map() ||
6672 map == GetHeap()->fixed_cow_array_map())) ||
6673 (IsFastDoubleElementsKind(kind) &&
6674 (fixed_array->IsFixedDoubleArray() ||
6675 fixed_array == GetHeap()->empty_fixed_array())) ||
6676 (kind == DICTIONARY_ELEMENTS &&
6677 fixed_array->IsFixedArray() &&
6678 fixed_array->IsDictionary()) ||
6679 (kind > DICTIONARY_ELEMENTS));
6680 DCHECK(!IsSloppyArgumentsElements(kind) ||
6681 (elements()->IsFixedArray() && elements()->length() >= 2));
6688 bool JSObject::HasFastObjectElements() {
6689 return IsFastObjectElementsKind(GetElementsKind());
6693 bool JSObject::HasFastSmiElements() {
6694 return IsFastSmiElementsKind(GetElementsKind());
6698 bool JSObject::HasFastSmiOrObjectElements() {
6699 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6703 bool JSObject::HasFastDoubleElements() {
6704 return IsFastDoubleElementsKind(GetElementsKind());
6708 bool JSObject::HasFastHoleyElements() {
6709 return IsFastHoleyElementsKind(GetElementsKind());
6713 bool JSObject::HasFastElements() {
6714 return IsFastElementsKind(GetElementsKind());
6718 bool JSObject::HasDictionaryElements() {
6719 return GetElementsKind() == DICTIONARY_ELEMENTS;
6723 bool JSObject::HasFastArgumentsElements() {
6724 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
6728 bool JSObject::HasSlowArgumentsElements() {
6729 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
6733 bool JSObject::HasSloppyArgumentsElements() {
6734 return IsSloppyArgumentsElements(GetElementsKind());
6738 bool JSObject::HasFixedTypedArrayElements() {
6739 HeapObject* array = elements();
6740 DCHECK(array != NULL);
6741 return array->IsFixedTypedArrayBase();
6745 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6746 bool JSObject::HasFixed##Type##Elements() { \
6747 HeapObject* array = elements(); \
6748 DCHECK(array != NULL); \
6749 if (!array->IsHeapObject()) \
6751 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6754 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6756 #undef FIXED_TYPED_ELEMENTS_CHECK
6759 bool JSObject::HasNamedInterceptor() {
6760 return map()->has_named_interceptor();
6764 bool JSObject::HasIndexedInterceptor() {
6765 return map()->has_indexed_interceptor();
6769 NameDictionary* JSObject::property_dictionary() {
6770 DCHECK(!HasFastProperties());
6771 DCHECK(!IsGlobalObject());
6772 return NameDictionary::cast(properties());
6776 GlobalDictionary* JSObject::global_dictionary() {
6777 DCHECK(!HasFastProperties());
6778 DCHECK(IsGlobalObject());
6779 return GlobalDictionary::cast(properties());
6783 SeededNumberDictionary* JSObject::element_dictionary() {
6784 DCHECK(HasDictionaryElements());
6785 return SeededNumberDictionary::cast(elements());
6789 bool Name::IsHashFieldComputed(uint32_t field) {
6790 return (field & kHashNotComputedMask) == 0;
6794 bool Name::HasHashCode() {
6795 return IsHashFieldComputed(hash_field());
6799 uint32_t Name::Hash() {
6800 // Fast case: has hash code already been computed?
6801 uint32_t field = hash_field();
6802 if (IsHashFieldComputed(field)) return field >> kHashShift;
6803 // Slow case: compute hash code and set it. Has to be a string.
6804 return String::cast(this)->ComputeAndSetHash();
6808 bool Name::IsPrivate() {
6809 return this->IsSymbol() && Symbol::cast(this)->is_private();
6813 StringHasher::StringHasher(int length, uint32_t seed)
6815 raw_running_hash_(seed),
6817 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6818 is_first_char_(true) {
6819 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6823 bool StringHasher::has_trivial_hash() {
6824 return length_ > String::kMaxHashCalcLength;
6828 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6830 running_hash += (running_hash << 10);
6831 running_hash ^= (running_hash >> 6);
6832 return running_hash;
6836 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6837 running_hash += (running_hash << 3);
6838 running_hash ^= (running_hash >> 11);
6839 running_hash += (running_hash << 15);
6840 if ((running_hash & String::kHashBitMask) == 0) {
6843 return running_hash;
6847 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6848 const uc16* chars, int length) {
6849 DCHECK_NOT_NULL(chars);
6850 DCHECK(length >= 0);
6851 for (int i = 0; i < length; ++i) {
6852 running_hash = AddCharacterCore(running_hash, *chars++);
6854 return running_hash;
6858 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6861 DCHECK_NOT_NULL(chars);
6862 DCHECK(length >= 0);
6863 for (int i = 0; i < length; ++i) {
6864 uint16_t c = static_cast<uint16_t>(*chars++);
6865 running_hash = AddCharacterCore(running_hash, c);
6867 return running_hash;
6871 void StringHasher::AddCharacter(uint16_t c) {
6872 // Use the Jenkins one-at-a-time hash function to update the hash
6873 // for the given character.
6874 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6878 bool StringHasher::UpdateIndex(uint16_t c) {
6879 DCHECK(is_array_index_);
6880 if (c < '0' || c > '9') {
6881 is_array_index_ = false;
6885 if (is_first_char_) {
6886 is_first_char_ = false;
6887 if (c == '0' && length_ > 1) {
6888 is_array_index_ = false;
6892 if (array_index_ > 429496729U - ((d + 3) >> 3)) {
6893 is_array_index_ = false;
6896 array_index_ = array_index_ * 10 + d;
6901 template<typename Char>
6902 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6903 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6905 if (is_array_index_) {
6906 for (; i < length; i++) {
6907 AddCharacter(chars[i]);
6908 if (!UpdateIndex(chars[i])) {
6914 for (; i < length; i++) {
6915 DCHECK(!is_array_index_);
6916 AddCharacter(chars[i]);
6921 template <typename schar>
6922 uint32_t StringHasher::HashSequentialString(const schar* chars,
6925 StringHasher hasher(length, seed);
6926 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6927 return hasher.GetHashField();
6931 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
6932 : StringHasher(len, seed) {}
6935 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6936 IteratingStringHasher hasher(string->length(), seed);
6938 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6939 ConsString* cons_string = String::VisitFlat(&hasher, string);
6940 if (cons_string == nullptr) return hasher.GetHashField();
6941 hasher.VisitConsString(cons_string);
6942 return hasher.GetHashField();
6946 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6948 AddCharacters(chars, length);
6952 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6954 AddCharacters(chars, length);
6958 bool Name::AsArrayIndex(uint32_t* index) {
6959 return IsString() && String::cast(this)->AsArrayIndex(index);
6963 bool String::AsArrayIndex(uint32_t* index) {
6964 uint32_t field = hash_field();
6965 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6968 return SlowAsArrayIndex(index);
6972 void String::SetForwardedInternalizedString(String* canonical) {
6973 DCHECK(IsInternalizedString());
6974 DCHECK(HasHashCode());
6975 if (canonical == this) return; // No need to forward.
6976 DCHECK(SlowEquals(canonical));
6977 DCHECK(canonical->IsInternalizedString());
6978 DCHECK(canonical->HasHashCode());
6979 WRITE_FIELD(this, kHashFieldSlot, canonical);
6980 // Setting the hash field to a tagged value sets the LSB, causing the hash
6981 // code to be interpreted as uninitialized. We use this fact to recognize
6982 // that we have a forwarded string.
6983 DCHECK(!HasHashCode());
6987 String* String::GetForwardedInternalizedString() {
6988 DCHECK(IsInternalizedString());
6989 if (HasHashCode()) return this;
6990 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
6991 DCHECK(canonical->IsInternalizedString());
6992 DCHECK(SlowEquals(canonical));
6993 DCHECK(canonical->HasHashCode());
6998 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7000 LanguageMode language_mode) {
7002 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7003 return GetProperty(&it, language_mode);
7007 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7008 Handle<Name> name) {
7009 // Call the "has" trap on proxies.
7010 if (object->IsJSProxy()) {
7011 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7012 return JSProxy::HasPropertyWithHandler(proxy, name);
7015 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
7016 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7020 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7021 Handle<Name> name) {
7022 // Call the "has" trap on proxies.
7023 if (object->IsJSProxy()) {
7024 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7025 return JSProxy::HasPropertyWithHandler(proxy, name);
7028 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
7029 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7033 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7034 Handle<JSReceiver> object, Handle<Name> name) {
7036 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7037 return GetPropertyAttributes(&it);
7041 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7042 Handle<JSReceiver> object, Handle<Name> name) {
7043 LookupIterator it = LookupIterator::PropertyOrElement(
7044 name->GetIsolate(), object, name, LookupIterator::HIDDEN);
7045 return GetPropertyAttributes(&it);
7049 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7050 // Call the "has" trap on proxies.
7051 if (object->IsJSProxy()) {
7052 Isolate* isolate = object->GetIsolate();
7053 Handle<Name> name = isolate->factory()->Uint32ToString(index);
7054 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7055 return JSProxy::HasPropertyWithHandler(proxy, name);
7058 Maybe<PropertyAttributes> result = GetElementAttributes(object, index);
7059 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7063 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
7065 // Call the "has" trap on proxies.
7066 if (object->IsJSProxy()) {
7067 Isolate* isolate = object->GetIsolate();
7068 Handle<Name> name = isolate->factory()->Uint32ToString(index);
7069 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7070 return JSProxy::HasPropertyWithHandler(proxy, name);
7073 Maybe<PropertyAttributes> result = GetOwnElementAttributes(object, index);
7074 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7078 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7079 Handle<JSReceiver> object, uint32_t index) {
7080 Isolate* isolate = object->GetIsolate();
7081 LookupIterator it(isolate, object, index);
7082 return GetPropertyAttributes(&it);
7086 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7087 Handle<JSReceiver> object, uint32_t index) {
7088 Isolate* isolate = object->GetIsolate();
7089 LookupIterator it(isolate, object, index, LookupIterator::HIDDEN);
7090 return GetPropertyAttributes(&it);
7094 bool JSGlobalObject::IsDetached() {
7095 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7099 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
7100 const PrototypeIterator iter(this->GetIsolate(),
7101 const_cast<JSGlobalProxy*>(this));
7102 return iter.GetCurrent() != global;
7106 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
7107 return object->IsJSProxy()
7108 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
7109 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
7113 Object* JSReceiver::GetIdentityHash() {
7115 ? JSProxy::cast(this)->GetIdentityHash()
7116 : JSObject::cast(this)->GetIdentityHash();
7120 bool AccessorInfo::all_can_read() {
7121 return BooleanBit::get(flag(), kAllCanReadBit);
7125 void AccessorInfo::set_all_can_read(bool value) {
7126 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7130 bool AccessorInfo::all_can_write() {
7131 return BooleanBit::get(flag(), kAllCanWriteBit);
7135 void AccessorInfo::set_all_can_write(bool value) {
7136 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7140 bool AccessorInfo::is_special_data_property() {
7141 return BooleanBit::get(flag(), kSpecialDataProperty);
7145 void AccessorInfo::set_is_special_data_property(bool value) {
7146 set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7150 PropertyAttributes AccessorInfo::property_attributes() {
7151 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
7155 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7156 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
7160 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7161 if (!HasExpectedReceiverType()) return true;
7162 if (!receiver->IsJSObject()) return false;
7163 return FunctionTemplateInfo::cast(expected_receiver_type())
7164 ->IsTemplateFor(JSObject::cast(receiver)->map());
7168 bool AccessorInfo::HasExpectedReceiverType() {
7169 return expected_receiver_type()->IsFunctionTemplateInfo();
7173 Object* AccessorPair::get(AccessorComponent component) {
7174 return component == ACCESSOR_GETTER ? getter() : setter();
7178 void AccessorPair::set(AccessorComponent component, Object* value) {
7179 if (component == ACCESSOR_GETTER) {
7187 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7188 if (!getter->IsNull()) set_getter(getter);
7189 if (!setter->IsNull()) set_setter(setter);
7193 bool AccessorPair::Equals(AccessorPair* pair) {
7194 return (this == pair) || pair->Equals(getter(), setter());
7198 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7199 return (getter() == getter_value) && (setter() == setter_value);
7203 bool AccessorPair::ContainsAccessor() {
7204 return IsJSAccessor(getter()) || IsJSAccessor(setter());
7208 bool AccessorPair::IsJSAccessor(Object* obj) {
7209 return obj->IsSpecFunction() || obj->IsUndefined();
7213 template<typename Derived, typename Shape, typename Key>
7214 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7216 Handle<Object> value) {
7217 this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7221 template<typename Derived, typename Shape, typename Key>
7222 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7224 Handle<Object> value,
7225 PropertyDetails details) {
7226 Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7230 template <typename Key>
7231 template <typename Dictionary>
7232 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7234 Handle<Object> value,
7235 PropertyDetails details) {
7236 STATIC_ASSERT(Dictionary::kEntrySize == 3);
7237 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7238 int index = dict->EntryToIndex(entry);
7239 DisallowHeapAllocation no_gc;
7240 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7241 dict->set(index, *key, mode);
7242 dict->set(index + 1, *value, mode);
7243 dict->set(index + 2, details.AsSmi());
7247 template <typename Dictionary>
7248 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7249 Handle<Object> key, Handle<Object> value,
7250 PropertyDetails details) {
7251 STATIC_ASSERT(Dictionary::kEntrySize == 2);
7252 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7253 DCHECK(value->IsPropertyCell());
7254 int index = dict->EntryToIndex(entry);
7255 DisallowHeapAllocation no_gc;
7256 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7257 dict->set(index, *key, mode);
7258 dict->set(index + 1, *value, mode);
7259 PropertyCell::cast(*value)->set_property_details(details);
7263 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7264 DCHECK(other->IsNumber());
7265 return key == static_cast<uint32_t>(other->Number());
7269 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7270 return ComputeIntegerHash(key, 0);
7274 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7276 DCHECK(other->IsNumber());
7277 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7281 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7282 return ComputeIntegerHash(key, seed);
7286 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7289 DCHECK(other->IsNumber());
7290 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7294 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7295 return isolate->factory()->NewNumberFromUint(key);
7299 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7300 // We know that all entries in a hash table had their hash keys created.
7301 // Use that knowledge to have fast failure.
7302 if (key->Hash() != Name::cast(other)->Hash()) return false;
7303 return key->Equals(Name::cast(other));
7307 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7312 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7313 return Name::cast(other)->Hash();
7317 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7319 DCHECK(key->IsUniqueName());
7324 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7325 Handle<NameDictionary> dictionary) {
7326 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7330 template <typename Dictionary>
7331 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7332 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7333 Object* raw_value = dict->ValueAt(entry);
7334 DCHECK(raw_value->IsPropertyCell());
7335 PropertyCell* cell = PropertyCell::cast(raw_value);
7336 return cell->property_details();
7340 template <typename Dictionary>
7341 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7342 PropertyDetails value) {
7343 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7344 Object* raw_value = dict->ValueAt(entry);
7345 DCHECK(raw_value->IsPropertyCell());
7346 PropertyCell* cell = PropertyCell::cast(raw_value);
7347 cell->set_property_details(value);
7351 template <typename Dictionary>
7352 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7353 DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7354 return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole();
7358 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7359 return key->SameValue(other);
7363 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7364 return Smi::cast(key->GetHash())->value();
7368 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7370 return Smi::cast(other->GetHash())->value();
7374 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7375 Handle<Object> key) {
7380 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7381 Handle<ObjectHashTable> table, Handle<Object> key) {
7382 return DerivedHashTable::Shrink(table, key);
7386 Object* OrderedHashMap::ValueAt(int entry) {
7387 return get(EntryToIndex(entry) + kValueOffset);
7391 template <int entrysize>
7392 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7393 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7394 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7399 template <int entrysize>
7400 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7403 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7404 : reinterpret_cast<intptr_t>(*key);
7405 return (uint32_t)(hash & 0xFFFFFFFF);
7409 template <int entrysize>
7410 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7412 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7413 intptr_t hash = reinterpret_cast<intptr_t>(other);
7414 return (uint32_t)(hash & 0xFFFFFFFF);
7418 template <int entrysize>
7419 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7420 Handle<Object> key) {
7425 bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
7428 bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
7431 bool ScopeInfo::HasSimpleParameters() {
7432 return HasSimpleParametersField::decode(Flags());
7436 #define SCOPE_INFO_FIELD_ACCESSORS(name) \
7437 void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
7438 int ScopeInfo::name() { \
7439 if (length() > 0) { \
7440 return Smi::cast(get(k##name))->value(); \
7445 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
7446 #undef SCOPE_INFO_FIELD_ACCESSORS
7449 void Map::ClearCodeCache(Heap* heap) {
7450 // No write barrier is needed since empty_fixed_array is not in new space.
7451 // Please note this function is used during marking:
7452 // - MarkCompactCollector::MarkUnmarkedObject
7453 // - IncrementalMarking::Step
7454 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7455 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7459 int Map::SlackForArraySize(int old_size, int size_limit) {
7460 const int max_slack = size_limit - old_size;
7461 CHECK_LE(0, max_slack);
7463 DCHECK_LE(1, max_slack);
7466 return Min(max_slack, old_size / 4);
7470 void JSArray::set_length(Smi* length) {
7471 // Don't need a write barrier for a Smi.
7472 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7476 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
7477 // If the new array won't fit in a some non-trivial fraction of the max old
7478 // space size, then force it to go dictionary mode.
7479 uint32_t max_fast_array_size =
7480 static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7481 return new_length >= max_fast_array_size;
7485 bool JSArray::AllowsSetLength() {
7486 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7487 DCHECK(result == !HasFixedTypedArrayElements());
7492 void JSArray::SetContent(Handle<JSArray> array,
7493 Handle<FixedArrayBase> storage) {
7494 EnsureCanContainElements(array, storage, storage->length(),
7495 ALLOW_COPIED_DOUBLE_ELEMENTS);
7497 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7498 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7499 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7500 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7501 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7502 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7503 array->set_elements(*storage);
7504 array->set_length(Smi::FromInt(storage->length()));
7508 int TypeFeedbackInfo::ic_total_count() {
7509 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7510 return ICTotalCountField::decode(current);
7514 void TypeFeedbackInfo::set_ic_total_count(int count) {
7515 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7516 value = ICTotalCountField::update(value,
7517 ICTotalCountField::decode(count));
7518 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7522 int TypeFeedbackInfo::ic_with_type_info_count() {
7523 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7524 return ICsWithTypeInfoCountField::decode(current);
7528 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7529 if (delta == 0) return;
7530 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7531 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7532 // We can get negative count here when the type-feedback info is
7533 // shared between two code objects. The can only happen when
7534 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7535 // Since we do not optimize when the debugger is active, we can skip
7536 // this counter update.
7537 if (new_count >= 0) {
7538 new_count &= ICsWithTypeInfoCountField::kMask;
7539 value = ICsWithTypeInfoCountField::update(value, new_count);
7540 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7545 int TypeFeedbackInfo::ic_generic_count() {
7546 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7550 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7551 if (delta == 0) return;
7552 int new_count = ic_generic_count() + delta;
7553 if (new_count >= 0) {
7554 new_count &= ~Smi::kMinValue;
7555 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7560 void TypeFeedbackInfo::initialize_storage() {
7561 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7562 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7563 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7567 void TypeFeedbackInfo::change_own_type_change_checksum() {
7568 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7569 int checksum = OwnTypeChangeChecksum::decode(value);
7570 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7571 value = OwnTypeChangeChecksum::update(value, checksum);
7572 // Ensure packed bit field is in Smi range.
7573 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7574 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7575 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7579 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7580 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7581 int mask = (1 << kTypeChangeChecksumBits) - 1;
7582 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7583 // Ensure packed bit field is in Smi range.
7584 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7585 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7586 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7590 int TypeFeedbackInfo::own_type_change_checksum() {
7591 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7592 return OwnTypeChangeChecksum::decode(value);
7596 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7597 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7598 int mask = (1 << kTypeChangeChecksumBits) - 1;
7599 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7603 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7606 Relocatable::Relocatable(Isolate* isolate) {
7608 prev_ = isolate->relocatable_top();
7609 isolate->set_relocatable_top(this);
7613 Relocatable::~Relocatable() {
7614 DCHECK_EQ(isolate_->relocatable_top(), this);
7615 isolate_->set_relocatable_top(prev_);
7620 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7621 return map->instance_size();
7626 int FixedArray::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7627 return SizeFor(reinterpret_cast<FixedArray*>(object)->synchronized_length());
7632 int StructBodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7633 return map->instance_size();
7637 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7638 v->VisitExternalReference(
7639 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7643 template<typename StaticVisitor>
7644 void Foreign::ForeignIterateBody() {
7645 StaticVisitor::VisitExternalReference(
7646 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7650 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody(ObjectVisitor* v) {
7652 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7656 template <typename StaticVisitor>
7657 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody() {
7658 StaticVisitor::VisitPointer(
7659 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7663 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7664 typedef v8::String::ExternalOneByteStringResource Resource;
7665 v->VisitExternalOneByteString(
7666 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7670 template <typename StaticVisitor>
7671 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7672 typedef v8::String::ExternalOneByteStringResource Resource;
7673 StaticVisitor::VisitExternalOneByteString(
7674 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7678 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7679 typedef v8::String::ExternalStringResource Resource;
7680 v->VisitExternalTwoByteString(
7681 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7685 template<typename StaticVisitor>
7686 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7687 typedef v8::String::ExternalStringResource Resource;
7688 StaticVisitor::VisitExternalTwoByteString(
7689 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7693 static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7697 DCHECK(FLAG_unbox_double_fields);
7698 DCHECK(IsAligned(start_offset, kPointerSize) &&
7699 IsAligned(end_offset, kPointerSize));
7701 LayoutDescriptorHelper helper(object->map());
7702 DCHECK(!helper.all_fields_tagged());
7704 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7705 // Visit all tagged fields.
7706 if (helper.IsTagged(offset)) {
7707 v->VisitPointer(HeapObject::RawField(object, offset));
7713 template<int start_offset, int end_offset, int size>
7714 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7717 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7718 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7719 HeapObject::RawField(obj, end_offset));
7721 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7726 template<int start_offset>
7727 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7730 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7731 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7732 HeapObject::RawField(obj, object_size));
7734 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7739 template<class Derived, class TableType>
7740 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7741 TableType* table(TableType::cast(this->table()));
7742 int index = Smi::cast(this->index())->value();
7743 Object* key = table->KeyAt(index);
7744 DCHECK(!key->IsTheHole());
7749 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7750 array->set(0, CurrentKey());
7754 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7755 array->set(0, CurrentKey());
7756 array->set(1, CurrentValue());
7760 Object* JSMapIterator::CurrentValue() {
7761 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7762 int index = Smi::cast(this->index())->value();
7763 Object* value = table->ValueAt(index);
7764 DCHECK(!value->IsTheHole());
7769 String::SubStringRange::SubStringRange(String* string, int first, int length)
7772 length_(length == -1 ? string->length() : length) {}
7775 class String::SubStringRange::iterator final {
7777 typedef std::forward_iterator_tag iterator_category;
7778 typedef int difference_type;
7779 typedef uc16 value_type;
7780 typedef uc16* pointer;
7781 typedef uc16& reference;
7783 iterator(const iterator& other)
7784 : content_(other.content_), offset_(other.offset_) {}
7786 uc16 operator*() { return content_.Get(offset_); }
7787 bool operator==(const iterator& other) const {
7788 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7790 bool operator!=(const iterator& other) const {
7791 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7793 iterator& operator++() {
7797 iterator operator++(int);
7800 friend class String;
7801 iterator(String* from, int offset)
7802 : content_(from->GetFlatContent()), offset_(offset) {}
7803 String::FlatContent content_;
7808 String::SubStringRange::iterator String::SubStringRange::begin() {
7809 return String::SubStringRange::iterator(string_, first_);
7813 String::SubStringRange::iterator String::SubStringRange::end() {
7814 return String::SubStringRange::iterator(string_, first_ + length_);
7819 #undef CAST_ACCESSOR
7820 #undef INT_ACCESSORS
7822 #undef ACCESSORS_TO_SMI
7823 #undef SMI_ACCESSORS
7824 #undef SYNCHRONIZED_SMI_ACCESSORS
7825 #undef NOBARRIER_SMI_ACCESSORS
7827 #undef BOOL_ACCESSORS
7829 #undef FIELD_ADDR_CONST
7831 #undef NOBARRIER_READ_FIELD
7833 #undef NOBARRIER_WRITE_FIELD
7834 #undef WRITE_BARRIER
7835 #undef CONDITIONAL_WRITE_BARRIER
7836 #undef READ_DOUBLE_FIELD
7837 #undef WRITE_DOUBLE_FIELD
7838 #undef READ_INT_FIELD
7839 #undef WRITE_INT_FIELD
7840 #undef READ_INTPTR_FIELD
7841 #undef WRITE_INTPTR_FIELD
7842 #undef READ_UINT8_FIELD
7843 #undef WRITE_UINT8_FIELD
7844 #undef READ_INT8_FIELD
7845 #undef WRITE_INT8_FIELD
7846 #undef READ_UINT16_FIELD
7847 #undef WRITE_UINT16_FIELD
7848 #undef READ_INT16_FIELD
7849 #undef WRITE_INT16_FIELD
7850 #undef READ_UINT32_FIELD
7851 #undef WRITE_UINT32_FIELD
7852 #undef READ_INT32_FIELD
7853 #undef WRITE_INT32_FIELD
7854 #undef READ_FLOAT_FIELD
7855 #undef WRITE_FLOAT_FIELD
7856 #undef READ_UINT64_FIELD
7857 #undef WRITE_UINT64_FIELD
7858 #undef READ_INT64_FIELD
7859 #undef WRITE_INT64_FIELD
7860 #undef READ_BYTE_FIELD
7861 #undef WRITE_BYTE_FIELD
7862 #undef NOBARRIER_READ_BYTE_FIELD
7863 #undef NOBARRIER_WRITE_BYTE_FIELD
7865 } } // namespace v8::internal
7867 #endif // V8_OBJECTS_INL_H_