1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts-inl.h"
18 #include "src/conversions-inl.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/isolate.h"
24 #include "src/layout-descriptor-inl.h"
25 #include "src/lookup.h"
26 #include "src/objects.h"
27 #include "src/property.h"
28 #include "src/prototype.h"
29 #include "src/transitions-inl.h"
30 #include "src/type-feedback-vector-inl.h"
31 #include "src/types-inl.h"
32 #include "src/v8memory.h"
37 PropertyDetails::PropertyDetails(Smi* smi) {
38 value_ = smi->value();
42 Smi* PropertyDetails::AsSmi() const {
43 // Ensure the upper 2 bits have the same value by sign extending it. This is
44 // necessary to be able to use the 31st bit of the property details.
45 int value = value_ << 1;
46 return Smi::FromInt(value >> 1);
50 int PropertyDetails::field_width_in_words() const {
51 DCHECK(location() == kField);
52 if (!FLAG_unbox_double_fields) return 1;
53 if (kDoubleSize == kPointerSize) return 1;
54 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
58 #define TYPE_CHECKER(type, instancetype) \
59 bool Object::Is##type() const { \
60 return Object::IsHeapObject() && \
61 HeapObject::cast(this)->map()->instance_type() == instancetype; \
65 #define CAST_ACCESSOR(type) \
66 type* type::cast(Object* object) { \
67 SLOW_DCHECK(object->Is##type()); \
68 return reinterpret_cast<type*>(object); \
70 const type* type::cast(const Object* object) { \
71 SLOW_DCHECK(object->Is##type()); \
72 return reinterpret_cast<const type*>(object); \
76 #define INT_ACCESSORS(holder, name, offset) \
77 int holder::name() const { return READ_INT_FIELD(this, offset); } \
78 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
81 #define ACCESSORS(holder, name, type, offset) \
82 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
83 void holder::set_##name(type* value, WriteBarrierMode mode) { \
84 WRITE_FIELD(this, offset, value); \
85 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
89 // Getter that returns a Smi as an int and writes an int as a Smi.
90 #define SMI_ACCESSORS(holder, name, offset) \
91 int holder::name() const { \
92 Object* value = READ_FIELD(this, offset); \
93 return Smi::cast(value)->value(); \
95 void holder::set_##name(int value) { \
96 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
99 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
100 int holder::synchronized_##name() const { \
101 Object* value = ACQUIRE_READ_FIELD(this, offset); \
102 return Smi::cast(value)->value(); \
104 void holder::synchronized_set_##name(int value) { \
105 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
108 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
109 int holder::nobarrier_##name() const { \
110 Object* value = NOBARRIER_READ_FIELD(this, offset); \
111 return Smi::cast(value)->value(); \
113 void holder::nobarrier_set_##name(int value) { \
114 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
117 #define BOOL_GETTER(holder, field, name, offset) \
118 bool holder::name() const { \
119 return BooleanBit::get(field(), offset); \
123 #define BOOL_ACCESSORS(holder, field, name, offset) \
124 bool holder::name() const { \
125 return BooleanBit::get(field(), offset); \
127 void holder::set_##name(bool value) { \
128 set_##field(BooleanBit::set(field(), offset, value)); \
132 bool Object::IsFixedArrayBase() const {
133 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
137 // External objects are not extensible, so the map check is enough.
138 bool Object::IsExternal() const {
139 return Object::IsHeapObject() &&
140 HeapObject::cast(this)->map() ==
141 HeapObject::cast(this)->GetHeap()->external_map();
145 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
148 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
149 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
150 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
151 TYPE_CHECKER(Simd128Value, SIMD128_VALUE_TYPE)
154 #define SIMD128_TYPE_CHECKER(TYPE, Type, type, lane_count, lane_type) \
155 bool Object::Is##Type() const { \
156 return Object::IsHeapObject() && \
157 HeapObject::cast(this)->map() == \
158 HeapObject::cast(this)->GetHeap()->type##_map(); \
160 SIMD128_TYPES(SIMD128_TYPE_CHECKER)
161 #undef SIMD128_TYPE_CHECKER
164 bool Object::IsString() const {
165 return Object::IsHeapObject()
166 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
170 bool Object::IsName() const {
171 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
172 return Object::IsHeapObject() &&
173 HeapObject::cast(this)->map()->instance_type() <= LAST_NAME_TYPE;
177 bool Object::IsUniqueName() const {
178 return IsInternalizedString() || IsSymbol();
182 bool Object::IsCallable() const {
183 return Object::IsHeapObject() && HeapObject::cast(this)->map()->is_callable();
187 bool Object::IsConstructor() const {
188 return Object::IsHeapObject() &&
189 HeapObject::cast(this)->map()->is_constructor();
193 bool Object::IsSpecObject() const {
194 return Object::IsHeapObject()
195 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
199 bool Object::IsTemplateInfo() const {
200 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
204 bool Object::IsInternalizedString() const {
205 if (!this->IsHeapObject()) return false;
206 uint32_t type = HeapObject::cast(this)->map()->instance_type();
207 STATIC_ASSERT(kNotInternalizedTag != 0);
208 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
209 (kStringTag | kInternalizedTag);
213 bool Object::IsConsString() const {
214 if (!IsString()) return false;
215 return StringShape(String::cast(this)).IsCons();
219 bool Object::IsSlicedString() const {
220 if (!IsString()) return false;
221 return StringShape(String::cast(this)).IsSliced();
225 bool Object::IsSeqString() const {
226 if (!IsString()) return false;
227 return StringShape(String::cast(this)).IsSequential();
231 bool Object::IsSeqOneByteString() const {
232 if (!IsString()) return false;
233 return StringShape(String::cast(this)).IsSequential() &&
234 String::cast(this)->IsOneByteRepresentation();
238 bool Object::IsSeqTwoByteString() const {
239 if (!IsString()) return false;
240 return StringShape(String::cast(this)).IsSequential() &&
241 String::cast(this)->IsTwoByteRepresentation();
245 bool Object::IsExternalString() const {
246 if (!IsString()) return false;
247 return StringShape(String::cast(this)).IsExternal();
251 bool Object::IsExternalOneByteString() const {
252 if (!IsString()) return false;
253 return StringShape(String::cast(this)).IsExternal() &&
254 String::cast(this)->IsOneByteRepresentation();
258 bool Object::IsExternalTwoByteString() const {
259 if (!IsString()) return false;
260 return StringShape(String::cast(this)).IsExternal() &&
261 String::cast(this)->IsTwoByteRepresentation();
265 bool Object::HasValidElements() {
266 // Dictionary is covered under FixedArray.
267 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
271 bool Object::KeyEquals(Object* second) {
272 Object* first = this;
273 if (second->IsNumber()) {
274 if (first->IsNumber()) return first->Number() == second->Number();
275 Object* temp = first;
279 if (first->IsNumber()) {
280 DCHECK_LE(0, first->Number());
281 uint32_t expected = static_cast<uint32_t>(first->Number());
283 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
285 return Name::cast(first)->Equals(Name::cast(second));
289 Handle<Object> Object::NewStorageFor(Isolate* isolate,
290 Handle<Object> object,
291 Representation representation) {
292 if (representation.IsSmi() && object->IsUninitialized()) {
293 return handle(Smi::FromInt(0), isolate);
295 if (!representation.IsDouble()) return object;
297 if (object->IsUninitialized()) {
299 } else if (object->IsMutableHeapNumber()) {
300 value = HeapNumber::cast(*object)->value();
302 value = object->Number();
304 return isolate->factory()->NewHeapNumber(value, MUTABLE);
308 Handle<Object> Object::WrapForRead(Isolate* isolate,
309 Handle<Object> object,
310 Representation representation) {
311 DCHECK(!object->IsUninitialized());
312 if (!representation.IsDouble()) {
313 DCHECK(object->FitsRepresentation(representation));
316 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
320 StringShape::StringShape(const String* str)
321 : type_(str->map()->instance_type()) {
323 DCHECK((type_ & kIsNotStringMask) == kStringTag);
327 StringShape::StringShape(Map* map)
328 : type_(map->instance_type()) {
330 DCHECK((type_ & kIsNotStringMask) == kStringTag);
334 StringShape::StringShape(InstanceType t)
335 : type_(static_cast<uint32_t>(t)) {
337 DCHECK((type_ & kIsNotStringMask) == kStringTag);
341 bool StringShape::IsInternalized() {
343 STATIC_ASSERT(kNotInternalizedTag != 0);
344 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
345 (kStringTag | kInternalizedTag);
349 bool String::IsOneByteRepresentation() const {
350 uint32_t type = map()->instance_type();
351 return (type & kStringEncodingMask) == kOneByteStringTag;
355 bool String::IsTwoByteRepresentation() const {
356 uint32_t type = map()->instance_type();
357 return (type & kStringEncodingMask) == kTwoByteStringTag;
361 bool String::IsOneByteRepresentationUnderneath() {
362 uint32_t type = map()->instance_type();
363 STATIC_ASSERT(kIsIndirectStringTag != 0);
364 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
366 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
367 case kOneByteStringTag:
369 case kTwoByteStringTag:
371 default: // Cons or sliced string. Need to go deeper.
372 return GetUnderlying()->IsOneByteRepresentation();
377 bool String::IsTwoByteRepresentationUnderneath() {
378 uint32_t type = map()->instance_type();
379 STATIC_ASSERT(kIsIndirectStringTag != 0);
380 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
382 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
383 case kOneByteStringTag:
385 case kTwoByteStringTag:
387 default: // Cons or sliced string. Need to go deeper.
388 return GetUnderlying()->IsTwoByteRepresentation();
393 bool String::HasOnlyOneByteChars() {
394 uint32_t type = map()->instance_type();
395 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
396 IsOneByteRepresentation();
400 bool StringShape::IsCons() {
401 return (type_ & kStringRepresentationMask) == kConsStringTag;
405 bool StringShape::IsSliced() {
406 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
410 bool StringShape::IsIndirect() {
411 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
415 bool StringShape::IsExternal() {
416 return (type_ & kStringRepresentationMask) == kExternalStringTag;
420 bool StringShape::IsSequential() {
421 return (type_ & kStringRepresentationMask) == kSeqStringTag;
425 StringRepresentationTag StringShape::representation_tag() {
426 uint32_t tag = (type_ & kStringRepresentationMask);
427 return static_cast<StringRepresentationTag>(tag);
431 uint32_t StringShape::encoding_tag() {
432 return type_ & kStringEncodingMask;
436 uint32_t StringShape::full_representation_tag() {
437 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
441 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
442 Internals::kFullStringRepresentationMask);
444 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
445 Internals::kStringEncodingMask);
448 bool StringShape::IsSequentialOneByte() {
449 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
453 bool StringShape::IsSequentialTwoByte() {
454 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
458 bool StringShape::IsExternalOneByte() {
459 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
463 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
464 Internals::kExternalOneByteRepresentationTag);
466 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
469 bool StringShape::IsExternalTwoByte() {
470 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
474 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
475 Internals::kExternalTwoByteRepresentationTag);
477 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
480 uc32 FlatStringReader::Get(int index) {
482 return Get<uint8_t>(index);
484 return Get<uc16>(index);
489 template <typename Char>
490 Char FlatStringReader::Get(int index) {
491 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
492 DCHECK(0 <= index && index <= length_);
493 if (sizeof(Char) == 1) {
494 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
496 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
501 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
502 return key->AsHandle(isolate);
506 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
508 return key->AsHandle(isolate);
512 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
514 return key->AsHandle(isolate);
517 template <typename Char>
518 class SequentialStringKey : public HashTableKey {
520 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
521 : string_(string), hash_field_(0), seed_(seed) { }
523 uint32_t Hash() override {
524 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
528 uint32_t result = hash_field_ >> String::kHashShift;
529 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
534 uint32_t HashForObject(Object* other) override {
535 return String::cast(other)->Hash();
538 Vector<const Char> string_;
539 uint32_t hash_field_;
544 class OneByteStringKey : public SequentialStringKey<uint8_t> {
546 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
547 : SequentialStringKey<uint8_t>(str, seed) { }
549 bool IsMatch(Object* string) override {
550 return String::cast(string)->IsOneByteEqualTo(string_);
553 Handle<Object> AsHandle(Isolate* isolate) override;
557 class SeqOneByteSubStringKey : public HashTableKey {
559 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
560 : string_(string), from_(from), length_(length) {
561 DCHECK(string_->IsSeqOneByteString());
564 uint32_t Hash() override {
565 DCHECK(length_ >= 0);
566 DCHECK(from_ + length_ <= string_->length());
567 const uint8_t* chars = string_->GetChars() + from_;
568 hash_field_ = StringHasher::HashSequentialString(
569 chars, length_, string_->GetHeap()->HashSeed());
570 uint32_t result = hash_field_ >> String::kHashShift;
571 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
575 uint32_t HashForObject(Object* other) override {
576 return String::cast(other)->Hash();
579 bool IsMatch(Object* string) override;
580 Handle<Object> AsHandle(Isolate* isolate) override;
583 Handle<SeqOneByteString> string_;
586 uint32_t hash_field_;
590 class TwoByteStringKey : public SequentialStringKey<uc16> {
592 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
593 : SequentialStringKey<uc16>(str, seed) { }
595 bool IsMatch(Object* string) override {
596 return String::cast(string)->IsTwoByteEqualTo(string_);
599 Handle<Object> AsHandle(Isolate* isolate) override;
603 // Utf8StringKey carries a vector of chars as key.
604 class Utf8StringKey : public HashTableKey {
606 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
607 : string_(string), hash_field_(0), seed_(seed) { }
609 bool IsMatch(Object* string) override {
610 return String::cast(string)->IsUtf8EqualTo(string_);
613 uint32_t Hash() override {
614 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
615 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
616 uint32_t result = hash_field_ >> String::kHashShift;
617 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
621 uint32_t HashForObject(Object* other) override {
622 return String::cast(other)->Hash();
625 Handle<Object> AsHandle(Isolate* isolate) override {
626 if (hash_field_ == 0) Hash();
627 return isolate->factory()->NewInternalizedStringFromUtf8(
628 string_, chars_, hash_field_);
631 Vector<const char> string_;
632 uint32_t hash_field_;
633 int chars_; // Caches the number of characters when computing the hash code.
638 bool Object::IsNumber() const {
639 return IsSmi() || IsHeapNumber();
643 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
644 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
645 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
648 bool Object::IsFiller() const {
649 if (!Object::IsHeapObject()) return false;
650 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
651 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
656 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
657 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
659 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
660 #undef TYPED_ARRAY_TYPE_CHECKER
663 bool Object::IsFixedTypedArrayBase() const {
664 if (!Object::IsHeapObject()) return false;
666 InstanceType instance_type =
667 HeapObject::cast(this)->map()->instance_type();
668 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
669 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
673 bool Object::IsJSReceiver() const {
674 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
675 return IsHeapObject() &&
676 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
680 bool Object::IsJSObject() const {
681 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
682 return IsHeapObject() && HeapObject::cast(this)->map()->IsJSObjectMap();
686 bool Object::IsJSProxy() const {
687 if (!Object::IsHeapObject()) return false;
688 return HeapObject::cast(this)->map()->IsJSProxyMap();
692 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
693 TYPE_CHECKER(JSSet, JS_SET_TYPE)
694 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
695 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
696 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
697 TYPE_CHECKER(JSIteratorResult, JS_ITERATOR_RESULT_TYPE)
698 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
699 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
700 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
701 TYPE_CHECKER(Map, MAP_TYPE)
702 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
703 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
704 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
707 bool Object::IsJSWeakCollection() const {
708 return IsJSWeakMap() || IsJSWeakSet();
712 bool Object::IsDescriptorArray() const {
713 return IsFixedArray();
717 bool Object::IsArrayList() const { return IsFixedArray(); }
720 bool Object::IsLayoutDescriptor() const {
721 return IsSmi() || IsFixedTypedArrayBase();
725 bool Object::IsTransitionArray() const {
726 return IsFixedArray();
730 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
733 bool Object::IsLiteralsArray() const { return IsFixedArray(); }
736 bool Object::IsDeoptimizationInputData() const {
737 // Must be a fixed array.
738 if (!IsFixedArray()) return false;
740 // There's no sure way to detect the difference between a fixed array and
741 // a deoptimization data array. Since this is used for asserts we can
742 // check that the length is zero or else the fixed size plus a multiple of
744 int length = FixedArray::cast(this)->length();
745 if (length == 0) return true;
747 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
748 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
752 bool Object::IsDeoptimizationOutputData() const {
753 if (!IsFixedArray()) return false;
754 // There's actually no way to see the difference between a fixed array and
755 // a deoptimization data array. Since this is used for asserts we can check
756 // that the length is plausible though.
757 if (FixedArray::cast(this)->length() % 2 != 0) return false;
762 bool Object::IsHandlerTable() const {
763 if (!IsFixedArray()) return false;
764 // There's actually no way to see the difference between a fixed array and
765 // a handler table array.
770 bool Object::IsDependentCode() const {
771 if (!IsFixedArray()) return false;
772 // There's actually no way to see the difference between a fixed array and
773 // a dependent codes array.
778 bool Object::IsContext() const {
779 if (!Object::IsHeapObject()) return false;
780 Map* map = HeapObject::cast(this)->map();
781 Heap* heap = map->GetHeap();
782 return (map == heap->function_context_map() ||
783 map == heap->catch_context_map() ||
784 map == heap->with_context_map() ||
785 map == heap->native_context_map() ||
786 map == heap->block_context_map() ||
787 map == heap->module_context_map() ||
788 map == heap->script_context_map());
792 bool Object::IsNativeContext() const {
793 return Object::IsHeapObject() &&
794 HeapObject::cast(this)->map() ==
795 HeapObject::cast(this)->GetHeap()->native_context_map();
799 bool Object::IsScriptContextTable() const {
800 if (!Object::IsHeapObject()) return false;
801 Map* map = HeapObject::cast(this)->map();
802 Heap* heap = map->GetHeap();
803 return map == heap->script_context_table_map();
807 bool Object::IsScopeInfo() const {
808 return Object::IsHeapObject() &&
809 HeapObject::cast(this)->map() ==
810 HeapObject::cast(this)->GetHeap()->scope_info_map();
814 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
817 template <> inline bool Is<JSFunction>(Object* obj) {
818 return obj->IsJSFunction();
822 TYPE_CHECKER(Code, CODE_TYPE)
823 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
824 TYPE_CHECKER(Cell, CELL_TYPE)
825 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
826 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
827 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
828 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
829 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
830 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
831 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
832 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
835 bool Object::IsStringWrapper() const {
836 return IsJSValue() && JSValue::cast(this)->value()->IsString();
840 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
843 bool Object::IsBoolean() const {
844 return IsOddball() &&
845 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
849 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
850 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
851 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
852 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
855 bool Object::IsJSArrayBufferView() const {
856 return IsJSDataView() || IsJSTypedArray();
860 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
863 template <> inline bool Is<JSArray>(Object* obj) {
864 return obj->IsJSArray();
868 bool Object::IsHashTable() const {
869 return Object::IsHeapObject() &&
870 HeapObject::cast(this)->map() ==
871 HeapObject::cast(this)->GetHeap()->hash_table_map();
875 bool Object::IsWeakHashTable() const {
876 return IsHashTable();
880 bool Object::IsDictionary() const {
881 return IsHashTable() &&
882 this != HeapObject::cast(this)->GetHeap()->string_table();
886 bool Object::IsNameDictionary() const {
887 return IsDictionary();
891 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
894 bool Object::IsSeededNumberDictionary() const {
895 return IsDictionary();
899 bool Object::IsUnseededNumberDictionary() const {
900 return IsDictionary();
904 bool Object::IsStringTable() const {
905 return IsHashTable();
909 bool Object::IsNormalizedMapCache() const {
910 return NormalizedMapCache::IsNormalizedMapCache(this);
914 int NormalizedMapCache::GetIndex(Handle<Map> map) {
915 return map->Hash() % NormalizedMapCache::kEntries;
919 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
920 if (!obj->IsFixedArray()) return false;
921 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
925 if (FLAG_verify_heap) {
926 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
927 NormalizedMapCacheVerify();
934 bool Object::IsCompilationCacheTable() const {
935 return IsHashTable();
939 bool Object::IsCodeCacheHashTable() const {
940 return IsHashTable();
944 bool Object::IsPolymorphicCodeCacheHashTable() const {
945 return IsHashTable();
949 bool Object::IsMapCache() const {
950 return IsHashTable();
954 bool Object::IsObjectHashTable() const {
955 return IsHashTable();
959 bool Object::IsOrderedHashTable() const {
960 return IsHeapObject() &&
961 HeapObject::cast(this)->map() ==
962 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
966 bool Object::IsOrderedHashSet() const {
967 return IsOrderedHashTable();
971 bool Object::IsOrderedHashMap() const {
972 return IsOrderedHashTable();
976 bool Object::IsPrimitive() const {
977 return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
981 bool Object::IsJSGlobalProxy() const {
982 bool result = IsHeapObject() &&
983 (HeapObject::cast(this)->map()->instance_type() ==
984 JS_GLOBAL_PROXY_TYPE);
986 HeapObject::cast(this)->map()->is_access_check_needed());
991 bool Object::IsGlobalObject() const {
992 if (!IsHeapObject()) return false;
993 return HeapObject::cast(this)->map()->IsGlobalObjectMap();
997 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
998 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1001 bool Object::IsUndetectableObject() const {
1002 return IsHeapObject()
1003 && HeapObject::cast(this)->map()->is_undetectable();
1007 bool Object::IsAccessCheckNeeded() const {
1008 if (!IsHeapObject()) return false;
1009 if (IsJSGlobalProxy()) {
1010 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1011 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1012 return proxy->IsDetachedFrom(global);
1014 return HeapObject::cast(this)->map()->is_access_check_needed();
1018 bool Object::IsStruct() const {
1019 if (!IsHeapObject()) return false;
1020 switch (HeapObject::cast(this)->map()->instance_type()) {
1021 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1022 STRUCT_LIST(MAKE_STRUCT_CASE)
1023 #undef MAKE_STRUCT_CASE
1024 default: return false;
1029 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1030 bool Object::Is##Name() const { \
1031 return Object::IsHeapObject() \
1032 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1034 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1035 #undef MAKE_STRUCT_PREDICATE
1038 bool Object::IsUndefined() const {
1039 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1043 bool Object::IsNull() const {
1044 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1048 bool Object::IsTheHole() const {
1049 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1053 bool Object::IsException() const {
1054 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1058 bool Object::IsUninitialized() const {
1059 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1063 bool Object::IsTrue() const {
1064 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1068 bool Object::IsFalse() const {
1069 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1073 bool Object::IsArgumentsMarker() const {
1074 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1078 double Object::Number() const {
1081 ? static_cast<double>(reinterpret_cast<const Smi*>(this)->value())
1082 : reinterpret_cast<const HeapNumber*>(this)->value();
1086 bool Object::IsNaN() const {
1087 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1091 bool Object::IsMinusZero() const {
1092 return this->IsHeapNumber() &&
1093 i::IsMinusZero(HeapNumber::cast(this)->value());
1097 Representation Object::OptimalRepresentation() {
1098 if (!FLAG_track_fields) return Representation::Tagged();
1100 return Representation::Smi();
1101 } else if (FLAG_track_double_fields && IsHeapNumber()) {
1102 return Representation::Double();
1103 } else if (FLAG_track_computed_fields && IsUninitialized()) {
1104 return Representation::None();
1105 } else if (FLAG_track_heap_object_fields) {
1106 DCHECK(IsHeapObject());
1107 return Representation::HeapObject();
1109 return Representation::Tagged();
1114 ElementsKind Object::OptimalElementsKind() {
1115 if (IsSmi()) return FAST_SMI_ELEMENTS;
1116 if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1117 return FAST_ELEMENTS;
1121 bool Object::FitsRepresentation(Representation representation) {
1122 if (FLAG_track_fields && representation.IsNone()) {
1124 } else if (FLAG_track_fields && representation.IsSmi()) {
1126 } else if (FLAG_track_double_fields && representation.IsDouble()) {
1127 return IsMutableHeapNumber() || IsNumber();
1128 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1129 return IsHeapObject();
1136 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1137 Handle<Object> object) {
1139 isolate, object, handle(isolate->context()->native_context(), isolate));
1144 MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
1145 ToPrimitiveHint hint) {
1146 if (input->IsPrimitive()) return input;
1147 return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
1151 bool Object::HasSpecificClassOf(String* name) {
1152 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1156 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1158 LanguageMode language_mode) {
1159 LookupIterator it(object, name);
1160 return GetProperty(&it, language_mode);
1164 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1166 LanguageMode language_mode) {
1167 LookupIterator it(isolate, object, index);
1168 return GetProperty(&it, language_mode);
1172 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1173 uint32_t index, Handle<Object> value,
1174 LanguageMode language_mode) {
1175 LookupIterator it(isolate, object, index);
1176 return SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED);
1180 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1181 Isolate* isolate, Handle<Object> receiver) {
1182 PrototypeIterator iter(isolate, receiver);
1183 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1184 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1185 return PrototypeIterator::GetCurrent(iter);
1189 return PrototypeIterator::GetCurrent(iter);
1193 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
1195 LanguageMode language_mode) {
1196 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1197 return GetProperty(object, str, language_mode);
1201 #define FIELD_ADDR(p, offset) \
1202 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1204 #define FIELD_ADDR_CONST(p, offset) \
1205 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1207 #define READ_FIELD(p, offset) \
1208 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1210 #define ACQUIRE_READ_FIELD(p, offset) \
1211 reinterpret_cast<Object*>(base::Acquire_Load( \
1212 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1214 #define NOBARRIER_READ_FIELD(p, offset) \
1215 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1216 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1218 #define WRITE_FIELD(p, offset, value) \
1219 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1221 #define RELEASE_WRITE_FIELD(p, offset, value) \
1222 base::Release_Store( \
1223 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1224 reinterpret_cast<base::AtomicWord>(value));
1226 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1227 base::NoBarrier_Store( \
1228 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1229 reinterpret_cast<base::AtomicWord>(value));
1231 #define WRITE_BARRIER(heap, object, offset, value) \
1232 heap->incremental_marking()->RecordWrite( \
1233 object, HeapObject::RawField(object, offset), value); \
1234 if (heap->InNewSpace(value)) { \
1235 heap->RecordWrite(object->address(), offset); \
1238 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1239 if (mode != SKIP_WRITE_BARRIER) { \
1240 if (mode == UPDATE_WRITE_BARRIER) { \
1241 heap->incremental_marking()->RecordWrite( \
1242 object, HeapObject::RawField(object, offset), value); \
1244 if (heap->InNewSpace(value)) { \
1245 heap->RecordWrite(object->address(), offset); \
1249 #define READ_DOUBLE_FIELD(p, offset) \
1250 ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1252 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1253 WriteDoubleValue(FIELD_ADDR(p, offset), value)
1255 #define READ_INT_FIELD(p, offset) \
1256 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1258 #define WRITE_INT_FIELD(p, offset, value) \
1259 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1261 #define READ_INTPTR_FIELD(p, offset) \
1262 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1264 #define WRITE_INTPTR_FIELD(p, offset, value) \
1265 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1267 #define READ_UINT8_FIELD(p, offset) \
1268 (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1270 #define WRITE_UINT8_FIELD(p, offset, value) \
1271 (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1273 #define READ_INT8_FIELD(p, offset) \
1274 (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1276 #define WRITE_INT8_FIELD(p, offset, value) \
1277 (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1279 #define READ_UINT16_FIELD(p, offset) \
1280 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1282 #define WRITE_UINT16_FIELD(p, offset, value) \
1283 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1285 #define READ_INT16_FIELD(p, offset) \
1286 (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1288 #define WRITE_INT16_FIELD(p, offset, value) \
1289 (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1291 #define READ_UINT32_FIELD(p, offset) \
1292 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1294 #define WRITE_UINT32_FIELD(p, offset, value) \
1295 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1297 #define READ_INT32_FIELD(p, offset) \
1298 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1300 #define WRITE_INT32_FIELD(p, offset, value) \
1301 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1303 #define READ_FLOAT_FIELD(p, offset) \
1304 (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1306 #define WRITE_FLOAT_FIELD(p, offset, value) \
1307 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1309 #define READ_UINT64_FIELD(p, offset) \
1310 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1312 #define WRITE_UINT64_FIELD(p, offset, value) \
1313 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1315 #define READ_INT64_FIELD(p, offset) \
1316 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1318 #define WRITE_INT64_FIELD(p, offset, value) \
1319 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1321 #define READ_BYTE_FIELD(p, offset) \
1322 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1324 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1325 static_cast<byte>(base::NoBarrier_Load( \
1326 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1328 #define WRITE_BYTE_FIELD(p, offset, value) \
1329 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1331 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1332 base::NoBarrier_Store( \
1333 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1334 static_cast<base::Atomic8>(value));
1336 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1337 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1341 MapWord MapWord::FromMap(const Map* map) {
1342 return MapWord(reinterpret_cast<uintptr_t>(map));
1346 Map* MapWord::ToMap() {
1347 return reinterpret_cast<Map*>(value_);
1351 bool MapWord::IsForwardingAddress() {
1352 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1356 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1357 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1358 return MapWord(reinterpret_cast<uintptr_t>(raw));
1362 HeapObject* MapWord::ToForwardingAddress() {
1363 DCHECK(IsForwardingAddress());
1364 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1369 void HeapObject::VerifyObjectField(int offset) {
1370 VerifyPointer(READ_FIELD(this, offset));
1373 void HeapObject::VerifySmiField(int offset) {
1374 CHECK(READ_FIELD(this, offset)->IsSmi());
1379 Heap* HeapObject::GetHeap() const {
1381 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1382 SLOW_DCHECK(heap != NULL);
1387 Isolate* HeapObject::GetIsolate() const {
1388 return GetHeap()->isolate();
1392 Map* HeapObject::map() const {
1394 // Clear mark potentially added by PathTracer.
1395 uintptr_t raw_value =
1396 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1397 return MapWord::FromRawValue(raw_value).ToMap();
1399 return map_word().ToMap();
1404 void HeapObject::set_map(Map* value) {
1405 set_map_word(MapWord::FromMap(value));
1406 if (value != NULL) {
1407 // TODO(1600) We are passing NULL as a slot because maps can never be on
1408 // evacuation candidate.
1409 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1414 Map* HeapObject::synchronized_map() {
1415 return synchronized_map_word().ToMap();
1419 void HeapObject::synchronized_set_map(Map* value) {
1420 synchronized_set_map_word(MapWord::FromMap(value));
1421 if (value != NULL) {
1422 // TODO(1600) We are passing NULL as a slot because maps can never be on
1423 // evacuation candidate.
1424 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1429 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1430 synchronized_set_map_word(MapWord::FromMap(value));
1434 // Unsafe accessor omitting write barrier.
1435 void HeapObject::set_map_no_write_barrier(Map* value) {
1436 set_map_word(MapWord::FromMap(value));
1440 MapWord HeapObject::map_word() const {
1442 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1446 void HeapObject::set_map_word(MapWord map_word) {
1447 NOBARRIER_WRITE_FIELD(
1448 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1452 MapWord HeapObject::synchronized_map_word() const {
1454 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1458 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1459 RELEASE_WRITE_FIELD(
1460 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1464 int HeapObject::Size() {
1465 return SizeFromMap(map());
1469 HeapObjectContents HeapObject::ContentType() {
1470 InstanceType type = map()->instance_type();
1471 if (type <= LAST_NAME_TYPE) {
1472 if (type == SYMBOL_TYPE) {
1473 return HeapObjectContents::kTaggedValues;
1475 DCHECK(type < FIRST_NONSTRING_TYPE);
1476 // There are four string representations: sequential strings, external
1477 // strings, cons strings, and sliced strings.
1478 // Only the former two contain raw values and no heap pointers (besides the
1480 if (((type & kIsIndirectStringMask) != kIsIndirectStringTag))
1481 return HeapObjectContents::kRawValues;
1483 return HeapObjectContents::kTaggedValues;
1485 // TODO(jochen): Enable eventually.
1486 } else if (type == JS_FUNCTION_TYPE) {
1487 return HeapObjectContents::kMixedValues;
1489 } else if (type == BYTECODE_ARRAY_TYPE) {
1490 return HeapObjectContents::kMixedValues;
1491 } else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
1492 type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
1493 return HeapObjectContents::kMixedValues;
1494 } else if (type == JS_ARRAY_BUFFER_TYPE) {
1495 return HeapObjectContents::kMixedValues;
1496 } else if (type <= LAST_DATA_TYPE) {
1497 // TODO(jochen): Why do we claim that Code and Map contain only raw values?
1498 return HeapObjectContents::kRawValues;
1500 if (FLAG_unbox_double_fields) {
1501 LayoutDescriptorHelper helper(map());
1502 if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues;
1504 return HeapObjectContents::kTaggedValues;
1509 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1510 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1511 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1515 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1516 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1520 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1521 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1525 double HeapNumber::value() const {
1526 return READ_DOUBLE_FIELD(this, kValueOffset);
1530 void HeapNumber::set_value(double value) {
1531 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1535 int HeapNumber::get_exponent() {
1536 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1537 kExponentShift) - kExponentBias;
1541 int HeapNumber::get_sign() {
1542 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1546 bool Simd128Value::Equals(Simd128Value* that) {
1547 #define SIMD128_VALUE(TYPE, Type, type, lane_count, lane_type) \
1548 if (this->Is##Type()) { \
1549 if (!that->Is##Type()) return false; \
1550 return Type::cast(this)->Equals(Type::cast(that)); \
1552 SIMD128_TYPES(SIMD128_VALUE)
1553 #undef SIMD128_VALUE
1559 bool Simd128Value::Equals(Handle<Simd128Value> one, Handle<Simd128Value> two) {
1560 return one->Equals(*two);
1564 #define SIMD128_VALUE_EQUALS(TYPE, Type, type, lane_count, lane_type) \
1565 bool Type::Equals(Type* that) { \
1566 for (int lane = 0; lane < lane_count; ++lane) { \
1567 if (this->get_lane(lane) != that->get_lane(lane)) return false; \
1571 SIMD128_TYPES(SIMD128_VALUE_EQUALS)
1572 #undef SIMD128_VALUE_EQUALS
1575 #if defined(V8_TARGET_LITTLE_ENDIAN)
1576 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1578 READ_##field_type##_FIELD(this, kValueOffset + lane * field_size);
1579 #elif defined(V8_TARGET_BIG_ENDIAN)
1580 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1581 lane_type value = READ_##field_type##_FIELD( \
1582 this, kValueOffset + (lane_count - lane - 1) * field_size);
1584 #error Unknown byte ordering
1587 #if defined(V8_TARGET_LITTLE_ENDIAN)
1588 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1589 WRITE_##field_type##_FIELD(this, kValueOffset + lane * field_size, value);
1590 #elif defined(V8_TARGET_BIG_ENDIAN)
1591 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1592 WRITE_##field_type##_FIELD( \
1593 this, kValueOffset + (lane_count - lane - 1) * field_size, value);
1595 #error Unknown byte ordering
1598 #define SIMD128_NUMERIC_LANE_FNS(type, lane_type, lane_count, field_type, \
1600 lane_type type::get_lane(int lane) const { \
1601 DCHECK(lane < lane_count && lane >= 0); \
1602 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1606 void type::set_lane(int lane, lane_type value) { \
1607 DCHECK(lane < lane_count && lane >= 0); \
1608 SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1611 SIMD128_NUMERIC_LANE_FNS(Float32x4, float, 4, FLOAT, kFloatSize)
1612 SIMD128_NUMERIC_LANE_FNS(Int32x4, int32_t, 4, INT32, kInt32Size)
1613 SIMD128_NUMERIC_LANE_FNS(Uint32x4, uint32_t, 4, UINT32, kInt32Size)
1614 SIMD128_NUMERIC_LANE_FNS(Int16x8, int16_t, 8, INT16, kShortSize)
1615 SIMD128_NUMERIC_LANE_FNS(Uint16x8, uint16_t, 8, UINT16, kShortSize)
1616 SIMD128_NUMERIC_LANE_FNS(Int8x16, int8_t, 16, INT8, kCharSize)
1617 SIMD128_NUMERIC_LANE_FNS(Uint8x16, uint8_t, 16, UINT8, kCharSize)
1618 #undef SIMD128_NUMERIC_LANE_FNS
1621 #define SIMD128_BOOLEAN_LANE_FNS(type, lane_type, lane_count, field_type, \
1623 bool type::get_lane(int lane) const { \
1624 DCHECK(lane < lane_count && lane >= 0); \
1625 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1626 DCHECK(value == 0 || value == -1); \
1627 return value != 0; \
1630 void type::set_lane(int lane, bool value) { \
1631 DCHECK(lane < lane_count && lane >= 0); \
1632 int32_t int_val = value ? -1 : 0; \
1633 SIMD128_WRITE_LANE(lane_count, field_type, field_size, int_val) \
1636 SIMD128_BOOLEAN_LANE_FNS(Bool32x4, int32_t, 4, INT32, kInt32Size)
1637 SIMD128_BOOLEAN_LANE_FNS(Bool16x8, int16_t, 8, INT16, kShortSize)
1638 SIMD128_BOOLEAN_LANE_FNS(Bool8x16, int8_t, 16, INT8, kCharSize)
1639 #undef SIMD128_BOOLEAN_LANE_FNS
1641 #undef SIMD128_READ_LANE
1642 #undef SIMD128_WRITE_LANE
1645 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1648 Object** FixedArray::GetFirstElementAddress() {
1649 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1653 bool FixedArray::ContainsOnlySmisOrHoles() {
1654 Object* the_hole = GetHeap()->the_hole_value();
1655 Object** current = GetFirstElementAddress();
1656 for (int i = 0; i < length(); ++i) {
1657 Object* candidate = *current++;
1658 if (!candidate->IsSmi() && candidate != the_hole) return false;
1664 FixedArrayBase* JSObject::elements() const {
1665 Object* array = READ_FIELD(this, kElementsOffset);
1666 return static_cast<FixedArrayBase*>(array);
1670 void AllocationSite::Initialize() {
1671 set_transition_info(Smi::FromInt(0));
1672 SetElementsKind(GetInitialFastElementsKind());
1673 set_nested_site(Smi::FromInt(0));
1674 set_pretenure_data(0);
1675 set_pretenure_create_count(0);
1676 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1677 SKIP_WRITE_BARRIER);
1681 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1684 bool AllocationSite::IsMaybeTenure() {
1685 return pretenure_decision() == kMaybeTenure;
1689 bool AllocationSite::PretenuringDecisionMade() {
1690 return pretenure_decision() != kUndecided;
1694 void AllocationSite::MarkZombie() {
1695 DCHECK(!IsZombie());
1697 set_pretenure_decision(kZombie);
1701 ElementsKind AllocationSite::GetElementsKind() {
1702 DCHECK(!SitePointsToLiteral());
1703 int value = Smi::cast(transition_info())->value();
1704 return ElementsKindBits::decode(value);
1708 void AllocationSite::SetElementsKind(ElementsKind kind) {
1709 int value = Smi::cast(transition_info())->value();
1710 set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1711 SKIP_WRITE_BARRIER);
1715 bool AllocationSite::CanInlineCall() {
1716 int value = Smi::cast(transition_info())->value();
1717 return DoNotInlineBit::decode(value) == 0;
1721 void AllocationSite::SetDoNotInlineCall() {
1722 int value = Smi::cast(transition_info())->value();
1723 set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1724 SKIP_WRITE_BARRIER);
1728 bool AllocationSite::SitePointsToLiteral() {
1729 // If transition_info is a smi, then it represents an ElementsKind
1730 // for a constructed array. Otherwise, it must be a boilerplate
1731 // for an object or array literal.
1732 return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1736 // Heuristic: We only need to create allocation site info if the boilerplate
1737 // elements kind is the initial elements kind.
1738 AllocationSiteMode AllocationSite::GetMode(
1739 ElementsKind boilerplate_elements_kind) {
1740 if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
1741 return TRACK_ALLOCATION_SITE;
1744 return DONT_TRACK_ALLOCATION_SITE;
1748 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1750 if (IsFastSmiElementsKind(from) &&
1751 IsMoreGeneralElementsKindTransition(from, to)) {
1752 return TRACK_ALLOCATION_SITE;
1755 return DONT_TRACK_ALLOCATION_SITE;
1759 inline bool AllocationSite::CanTrack(InstanceType type) {
1760 if (FLAG_allocation_site_pretenuring) {
1761 return type == JS_ARRAY_TYPE ||
1762 type == JS_OBJECT_TYPE ||
1763 type < FIRST_NONSTRING_TYPE;
1765 return type == JS_ARRAY_TYPE;
1769 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1770 int value = pretenure_data();
1771 return PretenureDecisionBits::decode(value);
1775 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1776 int value = pretenure_data();
1777 set_pretenure_data(PretenureDecisionBits::update(value, decision));
1781 bool AllocationSite::deopt_dependent_code() {
1782 int value = pretenure_data();
1783 return DeoptDependentCodeBit::decode(value);
1787 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1788 int value = pretenure_data();
1789 set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
1793 int AllocationSite::memento_found_count() {
1794 int value = pretenure_data();
1795 return MementoFoundCountBits::decode(value);
1799 inline void AllocationSite::set_memento_found_count(int count) {
1800 int value = pretenure_data();
1801 // Verify that we can count more mementos than we can possibly find in one
1802 // new space collection.
1803 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1804 (Heap::kMinObjectSizeInWords * kPointerSize +
1805 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1806 DCHECK(count < MementoFoundCountBits::kMax);
1807 set_pretenure_data(MementoFoundCountBits::update(value, count));
1811 int AllocationSite::memento_create_count() { return pretenure_create_count(); }
1814 void AllocationSite::set_memento_create_count(int count) {
1815 set_pretenure_create_count(count);
1819 inline bool AllocationSite::IncrementMementoFoundCount() {
1820 if (IsZombie()) return false;
1822 int value = memento_found_count();
1823 set_memento_found_count(value + 1);
1824 return memento_found_count() == kPretenureMinimumCreated;
1828 inline void AllocationSite::IncrementMementoCreateCount() {
1829 DCHECK(FLAG_allocation_site_pretenuring);
1830 int value = memento_create_count();
1831 set_memento_create_count(value + 1);
1835 inline bool AllocationSite::MakePretenureDecision(
1836 PretenureDecision current_decision,
1838 bool maximum_size_scavenge) {
1839 // Here we just allow state transitions from undecided or maybe tenure
1840 // to don't tenure, maybe tenure, or tenure.
1841 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1842 if (ratio >= kPretenureRatio) {
1843 // We just transition into tenure state when the semi-space was at
1844 // maximum capacity.
1845 if (maximum_size_scavenge) {
1846 set_deopt_dependent_code(true);
1847 set_pretenure_decision(kTenure);
1848 // Currently we just need to deopt when we make a state transition to
1852 set_pretenure_decision(kMaybeTenure);
1854 set_pretenure_decision(kDontTenure);
1861 inline bool AllocationSite::DigestPretenuringFeedback(
1862 bool maximum_size_scavenge) {
1864 int create_count = memento_create_count();
1865 int found_count = memento_found_count();
1866 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1868 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1869 static_cast<double>(found_count) / create_count : 0.0;
1870 PretenureDecision current_decision = pretenure_decision();
1872 if (minimum_mementos_created) {
1873 deopt = MakePretenureDecision(
1874 current_decision, ratio, maximum_size_scavenge);
1877 if (FLAG_trace_pretenuring_statistics) {
1879 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1880 static_cast<void*>(this), create_count, found_count, ratio,
1881 PretenureDecisionName(current_decision),
1882 PretenureDecisionName(pretenure_decision()));
1885 // Clear feedback calculation fields until the next gc.
1886 set_memento_found_count(0);
1887 set_memento_create_count(0);
1892 bool AllocationMemento::IsValid() {
1893 return allocation_site()->IsAllocationSite() &&
1894 !AllocationSite::cast(allocation_site())->IsZombie();
1898 AllocationSite* AllocationMemento::GetAllocationSite() {
1900 return AllocationSite::cast(allocation_site());
1904 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1905 JSObject::ValidateElements(object);
1906 ElementsKind elements_kind = object->map()->elements_kind();
1907 if (!IsFastObjectElementsKind(elements_kind)) {
1908 if (IsFastHoleyElementsKind(elements_kind)) {
1909 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1911 TransitionElementsKind(object, FAST_ELEMENTS);
1917 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1920 EnsureElementsMode mode) {
1921 ElementsKind current_kind = object->map()->elements_kind();
1922 ElementsKind target_kind = current_kind;
1924 DisallowHeapAllocation no_allocation;
1925 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1926 bool is_holey = IsFastHoleyElementsKind(current_kind);
1927 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1928 Heap* heap = object->GetHeap();
1929 Object* the_hole = heap->the_hole_value();
1930 for (uint32_t i = 0; i < count; ++i) {
1931 Object* current = *objects++;
1932 if (current == the_hole) {
1934 target_kind = GetHoleyElementsKind(target_kind);
1935 } else if (!current->IsSmi()) {
1936 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1937 if (IsFastSmiElementsKind(target_kind)) {
1939 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1941 target_kind = FAST_DOUBLE_ELEMENTS;
1944 } else if (is_holey) {
1945 target_kind = FAST_HOLEY_ELEMENTS;
1948 target_kind = FAST_ELEMENTS;
1953 if (target_kind != current_kind) {
1954 TransitionElementsKind(object, target_kind);
1959 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1960 Handle<FixedArrayBase> elements,
1962 EnsureElementsMode mode) {
1963 Heap* heap = object->GetHeap();
1964 if (elements->map() != heap->fixed_double_array_map()) {
1965 DCHECK(elements->map() == heap->fixed_array_map() ||
1966 elements->map() == heap->fixed_cow_array_map());
1967 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1968 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1971 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1972 EnsureCanContainElements(object, objects, length, mode);
1976 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1977 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1978 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1979 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1980 Handle<FixedDoubleArray> double_array =
1981 Handle<FixedDoubleArray>::cast(elements);
1982 for (uint32_t i = 0; i < length; ++i) {
1983 if (double_array->is_the_hole(i)) {
1984 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1988 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1993 void JSObject::SetMapAndElements(Handle<JSObject> object,
1994 Handle<Map> new_map,
1995 Handle<FixedArrayBase> value) {
1996 JSObject::MigrateToMap(object, new_map);
1997 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1998 (*value == object->GetHeap()->empty_fixed_array())) ==
1999 (value->map() == object->GetHeap()->fixed_array_map() ||
2000 value->map() == object->GetHeap()->fixed_cow_array_map()));
2001 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
2002 (object->map()->has_fast_double_elements() ==
2003 value->IsFixedDoubleArray()));
2004 object->set_elements(*value);
2008 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
2009 WRITE_FIELD(this, kElementsOffset, value);
2010 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
2014 void JSObject::initialize_properties() {
2015 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2016 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
2020 void JSObject::initialize_elements() {
2021 FixedArrayBase* elements = map()->GetInitialElements();
2022 WRITE_FIELD(this, kElementsOffset, elements);
2026 ACCESSORS(Oddball, to_string, String, kToStringOffset)
2027 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
2028 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
2031 byte Oddball::kind() const {
2032 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
2036 void Oddball::set_kind(byte value) {
2037 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2042 Handle<Object> Oddball::ToNumber(Handle<Oddball> input) {
2043 return handle(input->to_number(), input->GetIsolate());
2047 ACCESSORS(Cell, value, Object, kValueOffset)
2048 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2049 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
2050 ACCESSORS(PropertyCell, value, Object, kValueOffset)
2053 PropertyDetails PropertyCell::property_details() {
2054 return PropertyDetails(Smi::cast(property_details_raw()));
2058 void PropertyCell::set_property_details(PropertyDetails details) {
2059 set_property_details_raw(details.AsSmi());
2063 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2066 void WeakCell::clear() {
2067 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
2068 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
2072 void WeakCell::initialize(HeapObject* val) {
2073 WRITE_FIELD(this, kValueOffset, val);
2074 Heap* heap = GetHeap();
2075 // We just have to execute the generational barrier here because we never
2076 // mark through a weak cell and collect evacuation candidates when we process
2078 if (heap->InNewSpace(val)) {
2079 heap->RecordWrite(address(), kValueOffset);
2084 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
2087 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
2090 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2091 WRITE_FIELD(this, kNextOffset, val);
2092 if (mode == UPDATE_WRITE_BARRIER) {
2093 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2098 void WeakCell::clear_next(Heap* heap) {
2099 set_next(heap->the_hole_value(), SKIP_WRITE_BARRIER);
2103 bool WeakCell::next_cleared() { return next()->IsTheHole(); }
2106 int JSObject::GetHeaderSize() {
2107 InstanceType type = map()->instance_type();
2108 // Check for the most common kind of JavaScript object before
2109 // falling into the generic switch. This speeds up the internal
2110 // field operations considerably on average.
2111 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2113 case JS_GENERATOR_OBJECT_TYPE:
2114 return JSGeneratorObject::kSize;
2115 case JS_MODULE_TYPE:
2116 return JSModule::kSize;
2117 case JS_GLOBAL_PROXY_TYPE:
2118 return JSGlobalProxy::kSize;
2119 case JS_GLOBAL_OBJECT_TYPE:
2120 return JSGlobalObject::kSize;
2121 case JS_BUILTINS_OBJECT_TYPE:
2122 return JSBuiltinsObject::kSize;
2123 case JS_FUNCTION_TYPE:
2124 return JSFunction::kSize;
2126 return JSValue::kSize;
2128 return JSDate::kSize;
2130 return JSArray::kSize;
2131 case JS_ARRAY_BUFFER_TYPE:
2132 return JSArrayBuffer::kSize;
2133 case JS_TYPED_ARRAY_TYPE:
2134 return JSTypedArray::kSize;
2135 case JS_DATA_VIEW_TYPE:
2136 return JSDataView::kSize;
2138 return JSSet::kSize;
2140 return JSMap::kSize;
2141 case JS_SET_ITERATOR_TYPE:
2142 return JSSetIterator::kSize;
2143 case JS_MAP_ITERATOR_TYPE:
2144 return JSMapIterator::kSize;
2145 case JS_ITERATOR_RESULT_TYPE:
2146 return JSIteratorResult::kSize;
2147 case JS_WEAK_MAP_TYPE:
2148 return JSWeakMap::kSize;
2149 case JS_WEAK_SET_TYPE:
2150 return JSWeakSet::kSize;
2151 case JS_REGEXP_TYPE:
2152 return JSRegExp::kSize;
2153 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2154 return JSObject::kHeaderSize;
2155 case JS_MESSAGE_OBJECT_TYPE:
2156 return JSMessageObject::kSize;
2164 int JSObject::GetInternalFieldCount() {
2165 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2166 // Make sure to adjust for the number of in-object properties. These
2167 // properties do contribute to the size, but are not internal fields.
2168 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2169 map()->GetInObjectProperties();
2173 int JSObject::GetInternalFieldOffset(int index) {
2174 DCHECK(index < GetInternalFieldCount() && index >= 0);
2175 return GetHeaderSize() + (kPointerSize * index);
2179 Object* JSObject::GetInternalField(int index) {
2180 DCHECK(index < GetInternalFieldCount() && index >= 0);
2181 // Internal objects do follow immediately after the header, whereas in-object
2182 // properties are at the end of the object. Therefore there is no need
2183 // to adjust the index here.
2184 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2188 void JSObject::SetInternalField(int index, Object* value) {
2189 DCHECK(index < GetInternalFieldCount() && index >= 0);
2190 // Internal objects do follow immediately after the header, whereas in-object
2191 // properties are at the end of the object. Therefore there is no need
2192 // to adjust the index here.
2193 int offset = GetHeaderSize() + (kPointerSize * index);
2194 WRITE_FIELD(this, offset, value);
2195 WRITE_BARRIER(GetHeap(), this, offset, value);
2199 void JSObject::SetInternalField(int index, Smi* value) {
2200 DCHECK(index < GetInternalFieldCount() && index >= 0);
2201 // Internal objects do follow immediately after the header, whereas in-object
2202 // properties are at the end of the object. Therefore there is no need
2203 // to adjust the index here.
2204 int offset = GetHeaderSize() + (kPointerSize * index);
2205 WRITE_FIELD(this, offset, value);
2209 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2210 if (!FLAG_unbox_double_fields) return false;
2211 return map()->IsUnboxedDoubleField(index);
2215 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2216 if (!FLAG_unbox_double_fields) return false;
2217 if (index.is_hidden_field() || !index.is_inobject()) return false;
2218 return !layout_descriptor()->IsTagged(index.property_index());
2222 // Access fast-case object properties at index. The use of these routines
2223 // is needed to correctly distinguish between properties stored in-object and
2224 // properties stored in the properties array.
2225 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2226 DCHECK(!IsUnboxedDoubleField(index));
2227 if (index.is_inobject()) {
2228 return READ_FIELD(this, index.offset());
2230 return properties()->get(index.outobject_array_index());
2235 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2236 DCHECK(IsUnboxedDoubleField(index));
2237 return READ_DOUBLE_FIELD(this, index.offset());
2241 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2242 if (index.is_inobject()) {
2243 int offset = index.offset();
2244 WRITE_FIELD(this, offset, value);
2245 WRITE_BARRIER(GetHeap(), this, offset, value);
2247 properties()->set(index.outobject_array_index(), value);
2252 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2253 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2257 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2258 if (IsUnboxedDoubleField(index)) {
2259 DCHECK(value->IsMutableHeapNumber());
2260 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2262 RawFastPropertyAtPut(index, value);
2267 void JSObject::WriteToField(int descriptor, Object* value) {
2268 DisallowHeapAllocation no_gc;
2270 DescriptorArray* desc = map()->instance_descriptors();
2271 PropertyDetails details = desc->GetDetails(descriptor);
2273 DCHECK(details.type() == DATA);
2275 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2276 if (details.representation().IsDouble()) {
2277 // Nothing more to be done.
2278 if (value->IsUninitialized()) return;
2279 if (IsUnboxedDoubleField(index)) {
2280 RawFastDoublePropertyAtPut(index, value->Number());
2282 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2283 DCHECK(box->IsMutableHeapNumber());
2284 box->set_value(value->Number());
2287 RawFastPropertyAtPut(index, value);
2292 int JSObject::GetInObjectPropertyOffset(int index) {
2293 return map()->GetInObjectPropertyOffset(index);
2297 Object* JSObject::InObjectPropertyAt(int index) {
2298 int offset = GetInObjectPropertyOffset(index);
2299 return READ_FIELD(this, offset);
2303 Object* JSObject::InObjectPropertyAtPut(int index,
2305 WriteBarrierMode mode) {
2306 // Adjust for the number of properties stored in the object.
2307 int offset = GetInObjectPropertyOffset(index);
2308 WRITE_FIELD(this, offset, value);
2309 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2315 void JSObject::InitializeBody(Map* map,
2316 Object* pre_allocated_value,
2317 Object* filler_value) {
2318 DCHECK(!filler_value->IsHeapObject() ||
2319 !GetHeap()->InNewSpace(filler_value));
2320 DCHECK(!pre_allocated_value->IsHeapObject() ||
2321 !GetHeap()->InNewSpace(pre_allocated_value));
2322 int size = map->instance_size();
2323 int offset = kHeaderSize;
2324 if (filler_value != pre_allocated_value) {
2326 map->GetInObjectProperties() - map->unused_property_fields();
2327 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2328 for (int i = 0; i < pre_allocated; i++) {
2329 WRITE_FIELD(this, offset, pre_allocated_value);
2330 offset += kPointerSize;
2333 while (offset < size) {
2334 WRITE_FIELD(this, offset, filler_value);
2335 offset += kPointerSize;
2340 bool JSObject::HasFastProperties() {
2341 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2342 return !properties()->IsDictionary();
2346 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2347 if (unused_property_fields() != 0) return false;
2348 if (is_prototype_map()) return false;
2349 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2350 int limit = Max(minimum, GetInObjectProperties());
2351 int external = NumberOfFields() - GetInObjectProperties();
2352 return external > limit;
2356 void Struct::InitializeBody(int object_size) {
2357 Object* value = GetHeap()->undefined_value();
2358 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2359 WRITE_FIELD(this, offset, value);
2364 bool Object::ToArrayLength(uint32_t* index) {
2366 int value = Smi::cast(this)->value();
2367 if (value < 0) return false;
2371 if (IsHeapNumber()) {
2372 double value = HeapNumber::cast(this)->value();
2373 uint32_t uint_value = static_cast<uint32_t>(value);
2374 if (value == static_cast<double>(uint_value)) {
2375 *index = uint_value;
2383 bool Object::ToArrayIndex(uint32_t* index) {
2384 return ToArrayLength(index) && *index != kMaxUInt32;
2388 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2389 if (!this->IsJSValue()) return false;
2391 JSValue* js_value = JSValue::cast(this);
2392 if (!js_value->value()->IsString()) return false;
2394 String* str = String::cast(js_value->value());
2395 if (index >= static_cast<uint32_t>(str->length())) return false;
2401 void Object::VerifyApiCallResultType() {
2403 if (!(IsSmi() || IsString() || IsSymbol() || IsSpecObject() ||
2404 IsHeapNumber() || IsSimd128Value() || IsUndefined() || IsTrue() ||
2405 IsFalse() || IsNull())) {
2406 FATAL("API call returned invalid object");
2412 Object* FixedArray::get(int index) const {
2413 SLOW_DCHECK(index >= 0 && index < this->length());
2414 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2418 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2419 return handle(array->get(index), array->GetIsolate());
2423 bool FixedArray::is_the_hole(int index) {
2424 return get(index) == GetHeap()->the_hole_value();
2428 void FixedArray::set(int index, Smi* value) {
2429 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2430 DCHECK(index >= 0 && index < this->length());
2431 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2432 int offset = kHeaderSize + index * kPointerSize;
2433 WRITE_FIELD(this, offset, value);
2437 void FixedArray::set(int index, Object* value) {
2438 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2439 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2440 DCHECK(index >= 0 && index < this->length());
2441 int offset = kHeaderSize + index * kPointerSize;
2442 WRITE_FIELD(this, offset, value);
2443 WRITE_BARRIER(GetHeap(), this, offset, value);
2447 double FixedDoubleArray::get_scalar(int index) {
2448 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2449 map() != GetHeap()->fixed_array_map());
2450 DCHECK(index >= 0 && index < this->length());
2451 DCHECK(!is_the_hole(index));
2452 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2456 uint64_t FixedDoubleArray::get_representation(int index) {
2457 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2458 map() != GetHeap()->fixed_array_map());
2459 DCHECK(index >= 0 && index < this->length());
2460 int offset = kHeaderSize + index * kDoubleSize;
2461 return READ_UINT64_FIELD(this, offset);
2465 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2467 if (array->is_the_hole(index)) {
2468 return array->GetIsolate()->factory()->the_hole_value();
2470 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2475 void FixedDoubleArray::set(int index, double value) {
2476 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2477 map() != GetHeap()->fixed_array_map());
2478 int offset = kHeaderSize + index * kDoubleSize;
2479 if (std::isnan(value)) {
2480 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2482 WRITE_DOUBLE_FIELD(this, offset, value);
2484 DCHECK(!is_the_hole(index));
2488 void FixedDoubleArray::set_the_hole(int index) {
2489 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2490 map() != GetHeap()->fixed_array_map());
2491 int offset = kHeaderSize + index * kDoubleSize;
2492 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2496 bool FixedDoubleArray::is_the_hole(int index) {
2497 return get_representation(index) == kHoleNanInt64;
2501 double* FixedDoubleArray::data_start() {
2502 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2506 void FixedDoubleArray::FillWithHoles(int from, int to) {
2507 for (int i = from; i < to; i++) {
2513 Object* WeakFixedArray::Get(int index) const {
2514 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2515 if (raw->IsSmi()) return raw;
2516 DCHECK(raw->IsWeakCell());
2517 return WeakCell::cast(raw)->value();
2521 bool WeakFixedArray::IsEmptySlot(int index) const {
2522 DCHECK(index < Length());
2523 return Get(index)->IsSmi();
2527 void WeakFixedArray::Clear(int index) {
2528 FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2532 int WeakFixedArray::Length() const {
2533 return FixedArray::cast(this)->length() - kFirstIndex;
2537 int WeakFixedArray::last_used_index() const {
2538 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2542 void WeakFixedArray::set_last_used_index(int index) {
2543 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2548 T* WeakFixedArray::Iterator::Next() {
2549 if (list_ != NULL) {
2550 // Assert that list did not change during iteration.
2551 DCHECK_EQ(last_used_index_, list_->last_used_index());
2552 while (index_ < list_->Length()) {
2553 Object* item = list_->Get(index_++);
2554 if (item != Empty()) return T::cast(item);
2562 int ArrayList::Length() {
2563 if (FixedArray::cast(this)->length() == 0) return 0;
2564 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2568 void ArrayList::SetLength(int length) {
2569 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2573 Object* ArrayList::Get(int index) {
2574 return FixedArray::cast(this)->get(kFirstIndex + index);
2578 Object** ArrayList::Slot(int index) {
2579 return data_start() + kFirstIndex + index;
2583 void ArrayList::Set(int index, Object* obj) {
2584 FixedArray::cast(this)->set(kFirstIndex + index, obj);
2588 void ArrayList::Clear(int index, Object* undefined) {
2589 DCHECK(undefined->IsUndefined());
2590 FixedArray::cast(this)
2591 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2595 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2596 const DisallowHeapAllocation& promise) {
2597 Heap* heap = GetHeap();
2598 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2599 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2600 return UPDATE_WRITE_BARRIER;
2604 AllocationAlignment HeapObject::RequiredAlignment() {
2605 #ifdef V8_HOST_ARCH_32_BIT
2606 if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2607 FixedArrayBase::cast(this)->length() != 0) {
2608 return kDoubleAligned;
2610 if (IsHeapNumber()) return kDoubleUnaligned;
2611 if (IsSimd128Value()) return kSimd128Unaligned;
2612 #endif // V8_HOST_ARCH_32_BIT
2613 return kWordAligned;
2617 void FixedArray::set(int index,
2619 WriteBarrierMode mode) {
2620 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2621 DCHECK(index >= 0 && index < this->length());
2622 int offset = kHeaderSize + index * kPointerSize;
2623 WRITE_FIELD(this, offset, value);
2624 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2628 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2631 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2632 DCHECK(index >= 0 && index < array->length());
2633 int offset = kHeaderSize + index * kPointerSize;
2634 WRITE_FIELD(array, offset, value);
2635 Heap* heap = array->GetHeap();
2636 if (heap->InNewSpace(value)) {
2637 heap->RecordWrite(array->address(), offset);
2642 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2645 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2646 DCHECK(index >= 0 && index < array->length());
2647 DCHECK(!array->GetHeap()->InNewSpace(value));
2648 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2652 void FixedArray::set_undefined(int index) {
2653 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2654 DCHECK(index >= 0 && index < this->length());
2655 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2657 kHeaderSize + index * kPointerSize,
2658 GetHeap()->undefined_value());
2662 void FixedArray::set_null(int index) {
2663 DCHECK(index >= 0 && index < this->length());
2664 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2666 kHeaderSize + index * kPointerSize,
2667 GetHeap()->null_value());
2671 void FixedArray::set_the_hole(int index) {
2672 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2673 DCHECK(index >= 0 && index < this->length());
2674 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2676 kHeaderSize + index * kPointerSize,
2677 GetHeap()->the_hole_value());
2681 void FixedArray::FillWithHoles(int from, int to) {
2682 for (int i = from; i < to; i++) {
2688 Object** FixedArray::data_start() {
2689 return HeapObject::RawField(this, kHeaderSize);
2693 Object** FixedArray::RawFieldOfElementAt(int index) {
2694 return HeapObject::RawField(this, OffsetOfElementAt(index));
2698 bool DescriptorArray::IsEmpty() {
2699 DCHECK(length() >= kFirstIndex ||
2700 this == GetHeap()->empty_descriptor_array());
2701 return length() < kFirstIndex;
2705 int DescriptorArray::number_of_descriptors() {
2706 DCHECK(length() >= kFirstIndex || IsEmpty());
2708 return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2712 int DescriptorArray::number_of_descriptors_storage() {
2714 return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
2718 int DescriptorArray::NumberOfSlackDescriptors() {
2719 return number_of_descriptors_storage() - number_of_descriptors();
2723 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2725 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2729 inline int DescriptorArray::number_of_entries() {
2730 return number_of_descriptors();
2734 bool DescriptorArray::HasEnumCache() {
2735 return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2739 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2740 set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2744 FixedArray* DescriptorArray::GetEnumCache() {
2745 DCHECK(HasEnumCache());
2746 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2747 return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2751 bool DescriptorArray::HasEnumIndicesCache() {
2752 if (IsEmpty()) return false;
2753 Object* object = get(kEnumCacheIndex);
2754 if (object->IsSmi()) return false;
2755 FixedArray* bridge = FixedArray::cast(object);
2756 return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2760 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2761 DCHECK(HasEnumIndicesCache());
2762 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2763 return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2767 Object** DescriptorArray::GetEnumCacheSlot() {
2768 DCHECK(HasEnumCache());
2769 return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2774 // Perform a binary search in a fixed array. Low and high are entry indices. If
2775 // there are three entries in this array it should be called with low=0 and
2777 template <SearchMode search_mode, typename T>
2778 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2779 int* out_insertion_index) {
2780 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2781 uint32_t hash = name->Hash();
2784 DCHECK(low <= high);
2786 while (low != high) {
2787 int mid = low + (high - low) / 2;
2788 Name* mid_name = array->GetSortedKey(mid);
2789 uint32_t mid_hash = mid_name->Hash();
2791 if (mid_hash >= hash) {
2798 for (; low <= limit; ++low) {
2799 int sort_index = array->GetSortedKeyIndex(low);
2800 Name* entry = array->GetKey(sort_index);
2801 uint32_t current_hash = entry->Hash();
2802 if (current_hash != hash) {
2803 if (out_insertion_index != NULL) {
2804 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2806 return T::kNotFound;
2808 if (entry->Equals(name)) {
2809 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2812 return T::kNotFound;
2816 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2817 return T::kNotFound;
2821 // Perform a linear search in this fixed array. len is the number of entry
2822 // indices that are valid.
2823 template <SearchMode search_mode, typename T>
2824 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2825 int* out_insertion_index) {
2826 uint32_t hash = name->Hash();
2827 if (search_mode == ALL_ENTRIES) {
2828 for (int number = 0; number < len; number++) {
2829 int sorted_index = array->GetSortedKeyIndex(number);
2830 Name* entry = array->GetKey(sorted_index);
2831 uint32_t current_hash = entry->Hash();
2832 if (current_hash > hash) {
2833 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2834 return T::kNotFound;
2836 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2838 if (out_insertion_index != NULL) *out_insertion_index = len;
2839 return T::kNotFound;
2841 DCHECK(len >= valid_entries);
2842 DCHECK_NULL(out_insertion_index); // Not supported here.
2843 for (int number = 0; number < valid_entries; number++) {
2844 Name* entry = array->GetKey(number);
2845 uint32_t current_hash = entry->Hash();
2846 if (current_hash == hash && entry->Equals(name)) return number;
2848 return T::kNotFound;
2853 template <SearchMode search_mode, typename T>
2854 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2855 if (search_mode == VALID_ENTRIES) {
2856 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2858 SLOW_DCHECK(array->IsSortedNoDuplicates());
2861 int nof = array->number_of_entries();
2863 if (out_insertion_index != NULL) *out_insertion_index = 0;
2864 return T::kNotFound;
2867 // Fast case: do linear search for small arrays.
2868 const int kMaxElementsForLinearSearch = 8;
2869 if ((search_mode == ALL_ENTRIES &&
2870 nof <= kMaxElementsForLinearSearch) ||
2871 (search_mode == VALID_ENTRIES &&
2872 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2873 return LinearSearch<search_mode>(array, name, nof, valid_entries,
2874 out_insertion_index);
2877 // Slow case: perform binary search.
2878 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2879 out_insertion_index);
2883 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2884 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2888 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2889 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2890 if (number_of_own_descriptors == 0) return kNotFound;
2892 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2893 int number = cache->Lookup(map, name);
2895 if (number == DescriptorLookupCache::kAbsent) {
2896 number = Search(name, number_of_own_descriptors);
2897 cache->Update(map, name, number);
2904 PropertyDetails Map::GetLastDescriptorDetails() {
2905 return instance_descriptors()->GetDetails(LastAdded());
2909 int Map::LastAdded() {
2910 int number_of_own_descriptors = NumberOfOwnDescriptors();
2911 DCHECK(number_of_own_descriptors > 0);
2912 return number_of_own_descriptors - 1;
2916 int Map::NumberOfOwnDescriptors() {
2917 return NumberOfOwnDescriptorsBits::decode(bit_field3());
2921 void Map::SetNumberOfOwnDescriptors(int number) {
2922 DCHECK(number <= instance_descriptors()->number_of_descriptors());
2923 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2927 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2930 void Map::SetEnumLength(int length) {
2931 if (length != kInvalidEnumCacheSentinel) {
2932 DCHECK(length >= 0);
2933 DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2934 DCHECK(length <= NumberOfOwnDescriptors());
2936 set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2940 FixedArrayBase* Map::GetInitialElements() {
2941 if (has_fast_smi_or_object_elements() ||
2942 has_fast_double_elements()) {
2943 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2944 return GetHeap()->empty_fixed_array();
2945 } else if (has_fixed_typed_array_elements()) {
2946 FixedTypedArrayBase* empty_array =
2947 GetHeap()->EmptyFixedTypedArrayForMap(this);
2948 DCHECK(!GetHeap()->InNewSpace(empty_array));
2957 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2958 DCHECK(descriptor_number < number_of_descriptors());
2959 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2963 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2964 return GetKeySlot(descriptor_number);
2968 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2969 return GetValueSlot(descriptor_number - 1) + 1;
2973 Name* DescriptorArray::GetKey(int descriptor_number) {
2974 DCHECK(descriptor_number < number_of_descriptors());
2975 return Name::cast(get(ToKeyIndex(descriptor_number)));
2979 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2980 return GetDetails(descriptor_number).pointer();
2984 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2985 return GetKey(GetSortedKeyIndex(descriptor_number));
2989 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2990 PropertyDetails details = GetDetails(descriptor_index);
2991 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2995 void DescriptorArray::SetRepresentation(int descriptor_index,
2996 Representation representation) {
2997 DCHECK(!representation.IsNone());
2998 PropertyDetails details = GetDetails(descriptor_index);
2999 set(ToDetailsIndex(descriptor_index),
3000 details.CopyWithRepresentation(representation).AsSmi());
3004 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3005 DCHECK(descriptor_number < number_of_descriptors());
3006 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3010 int DescriptorArray::GetValueOffset(int descriptor_number) {
3011 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3015 Object* DescriptorArray::GetValue(int descriptor_number) {
3016 DCHECK(descriptor_number < number_of_descriptors());
3017 return get(ToValueIndex(descriptor_number));
3021 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3022 set(ToValueIndex(descriptor_index), value);
3026 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3027 DCHECK(descriptor_number < number_of_descriptors());
3028 Object* details = get(ToDetailsIndex(descriptor_number));
3029 return PropertyDetails(Smi::cast(details));
3033 PropertyType DescriptorArray::GetType(int descriptor_number) {
3034 return GetDetails(descriptor_number).type();
3038 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3039 DCHECK(GetDetails(descriptor_number).location() == kField);
3040 return GetDetails(descriptor_number).field_index();
3044 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3045 DCHECK(GetDetails(descriptor_number).location() == kField);
3046 Object* value = GetValue(descriptor_number);
3047 if (value->IsWeakCell()) {
3048 if (WeakCell::cast(value)->cleared()) return HeapType::None();
3049 value = WeakCell::cast(value)->value();
3051 return HeapType::cast(value);
3055 Object* DescriptorArray::GetConstant(int descriptor_number) {
3056 return GetValue(descriptor_number);
3060 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3061 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3062 return GetValue(descriptor_number);
3066 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3067 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3068 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3069 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3073 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3074 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3075 handle(GetValue(descriptor_number), GetIsolate()),
3076 GetDetails(descriptor_number));
3080 void DescriptorArray::Set(int descriptor_number,
3082 const WhitenessWitness&) {
3084 DCHECK(descriptor_number < number_of_descriptors());
3086 NoIncrementalWriteBarrierSet(this,
3087 ToKeyIndex(descriptor_number),
3089 NoIncrementalWriteBarrierSet(this,
3090 ToValueIndex(descriptor_number),
3092 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3093 desc->GetDetails().AsSmi());
3097 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3099 DCHECK(descriptor_number < number_of_descriptors());
3101 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3102 set(ToValueIndex(descriptor_number), *desc->GetValue());
3103 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3107 void DescriptorArray::Append(Descriptor* desc) {
3108 DisallowHeapAllocation no_gc;
3109 int descriptor_number = number_of_descriptors();
3110 SetNumberOfDescriptors(descriptor_number + 1);
3111 Set(descriptor_number, desc);
3113 uint32_t hash = desc->GetKey()->Hash();
3117 for (insertion = descriptor_number; insertion > 0; --insertion) {
3118 Name* key = GetSortedKey(insertion - 1);
3119 if (key->Hash() <= hash) break;
3120 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3123 SetSortedKey(insertion, descriptor_number);
3127 void DescriptorArray::SwapSortedKeys(int first, int second) {
3128 int first_key = GetSortedKeyIndex(first);
3129 SetSortedKey(first, GetSortedKeyIndex(second));
3130 SetSortedKey(second, first_key);
3134 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3135 : marking_(array->GetHeap()->incremental_marking()) {
3136 marking_->EnterNoMarkingScope();
3137 DCHECK(!marking_->IsMarking() ||
3138 Marking::Color(array) == Marking::WHITE_OBJECT);
3142 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3143 marking_->LeaveNoMarkingScope();
3147 PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
3150 Object* DescriptorArray::Entry::GetCallbackObject() {
3151 return descs_->GetValue(index_);
3155 int HashTableBase::NumberOfElements() {
3156 return Smi::cast(get(kNumberOfElementsIndex))->value();
3160 int HashTableBase::NumberOfDeletedElements() {
3161 return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3165 int HashTableBase::Capacity() {
3166 return Smi::cast(get(kCapacityIndex))->value();
3170 void HashTableBase::ElementAdded() {
3171 SetNumberOfElements(NumberOfElements() + 1);
3175 void HashTableBase::ElementRemoved() {
3176 SetNumberOfElements(NumberOfElements() - 1);
3177 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3181 void HashTableBase::ElementsRemoved(int n) {
3182 SetNumberOfElements(NumberOfElements() - n);
3183 SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3188 int HashTableBase::ComputeCapacity(int at_least_space_for) {
3189 const int kMinCapacity = 4;
3190 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3191 return Max(capacity, kMinCapacity);
3195 bool HashTableBase::IsKey(Object* k) {
3196 return !k->IsTheHole() && !k->IsUndefined();
3200 void HashTableBase::SetNumberOfElements(int nof) {
3201 set(kNumberOfElementsIndex, Smi::FromInt(nof));
3205 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3206 set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3210 template <typename Derived, typename Shape, typename Key>
3211 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3212 return FindEntry(GetIsolate(), key);
3216 template<typename Derived, typename Shape, typename Key>
3217 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3218 return FindEntry(isolate, key, HashTable::Hash(key));
3222 // Find entry for key otherwise return kNotFound.
3223 template <typename Derived, typename Shape, typename Key>
3224 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3226 uint32_t capacity = Capacity();
3227 uint32_t entry = FirstProbe(hash, capacity);
3229 // EnsureCapacity will guarantee the hash table is never full.
3231 Object* element = KeyAt(entry);
3232 // Empty entry. Uses raw unchecked accessors because it is called by the
3233 // string table during bootstrapping.
3234 if (element == isolate->heap()->root(Heap::kUndefinedValueRootIndex)) break;
3235 if (element != isolate->heap()->root(Heap::kTheHoleValueRootIndex) &&
3236 Shape::IsMatch(key, element)) return entry;
3237 entry = NextProbe(entry, count++, capacity);
3243 bool SeededNumberDictionary::requires_slow_elements() {
3244 Object* max_index_object = get(kMaxNumberKeyIndex);
3245 if (!max_index_object->IsSmi()) return false;
3247 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3251 uint32_t SeededNumberDictionary::max_number_key() {
3252 DCHECK(!requires_slow_elements());
3253 Object* max_index_object = get(kMaxNumberKeyIndex);
3254 if (!max_index_object->IsSmi()) return 0;
3255 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3256 return value >> kRequiresSlowElementsTagSize;
3260 void SeededNumberDictionary::set_requires_slow_elements() {
3261 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3265 // ------------------------------------
3269 CAST_ACCESSOR(AccessorInfo)
3270 CAST_ACCESSOR(ArrayList)
3271 CAST_ACCESSOR(Bool16x8)
3272 CAST_ACCESSOR(Bool32x4)
3273 CAST_ACCESSOR(Bool8x16)
3274 CAST_ACCESSOR(ByteArray)
3275 CAST_ACCESSOR(BytecodeArray)
3278 CAST_ACCESSOR(CodeCacheHashTable)
3279 CAST_ACCESSOR(CompilationCacheTable)
3280 CAST_ACCESSOR(ConsString)
3281 CAST_ACCESSOR(DeoptimizationInputData)
3282 CAST_ACCESSOR(DeoptimizationOutputData)
3283 CAST_ACCESSOR(DependentCode)
3284 CAST_ACCESSOR(DescriptorArray)
3285 CAST_ACCESSOR(ExternalOneByteString)
3286 CAST_ACCESSOR(ExternalString)
3287 CAST_ACCESSOR(ExternalTwoByteString)
3288 CAST_ACCESSOR(FixedArray)
3289 CAST_ACCESSOR(FixedArrayBase)
3290 CAST_ACCESSOR(FixedDoubleArray)
3291 CAST_ACCESSOR(FixedTypedArrayBase)
3292 CAST_ACCESSOR(Float32x4)
3293 CAST_ACCESSOR(Foreign)
3294 CAST_ACCESSOR(GlobalDictionary)
3295 CAST_ACCESSOR(GlobalObject)
3296 CAST_ACCESSOR(HandlerTable)
3297 CAST_ACCESSOR(HeapObject)
3298 CAST_ACCESSOR(Int16x8)
3299 CAST_ACCESSOR(Int32x4)
3300 CAST_ACCESSOR(Int8x16)
3301 CAST_ACCESSOR(JSArray)
3302 CAST_ACCESSOR(JSArrayBuffer)
3303 CAST_ACCESSOR(JSArrayBufferView)
3304 CAST_ACCESSOR(JSBuiltinsObject)
3305 CAST_ACCESSOR(JSDataView)
3306 CAST_ACCESSOR(JSDate)
3307 CAST_ACCESSOR(JSFunction)
3308 CAST_ACCESSOR(JSFunctionProxy)
3309 CAST_ACCESSOR(JSGeneratorObject)
3310 CAST_ACCESSOR(JSGlobalObject)
3311 CAST_ACCESSOR(JSGlobalProxy)
3312 CAST_ACCESSOR(JSMap)
3313 CAST_ACCESSOR(JSMapIterator)
3314 CAST_ACCESSOR(JSMessageObject)
3315 CAST_ACCESSOR(JSModule)
3316 CAST_ACCESSOR(JSObject)
3317 CAST_ACCESSOR(JSProxy)
3318 CAST_ACCESSOR(JSReceiver)
3319 CAST_ACCESSOR(JSRegExp)
3320 CAST_ACCESSOR(JSSet)
3321 CAST_ACCESSOR(JSSetIterator)
3322 CAST_ACCESSOR(JSIteratorResult)
3323 CAST_ACCESSOR(JSTypedArray)
3324 CAST_ACCESSOR(JSValue)
3325 CAST_ACCESSOR(JSWeakMap)
3326 CAST_ACCESSOR(JSWeakSet)
3327 CAST_ACCESSOR(LayoutDescriptor)
3330 CAST_ACCESSOR(NameDictionary)
3331 CAST_ACCESSOR(NormalizedMapCache)
3332 CAST_ACCESSOR(Object)
3333 CAST_ACCESSOR(ObjectHashTable)
3334 CAST_ACCESSOR(Oddball)
3335 CAST_ACCESSOR(OrderedHashMap)
3336 CAST_ACCESSOR(OrderedHashSet)
3337 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3338 CAST_ACCESSOR(PropertyCell)
3339 CAST_ACCESSOR(ScopeInfo)
3340 CAST_ACCESSOR(SeededNumberDictionary)
3341 CAST_ACCESSOR(SeqOneByteString)
3342 CAST_ACCESSOR(SeqString)
3343 CAST_ACCESSOR(SeqTwoByteString)
3344 CAST_ACCESSOR(SharedFunctionInfo)
3345 CAST_ACCESSOR(Simd128Value)
3346 CAST_ACCESSOR(SlicedString)
3348 CAST_ACCESSOR(String)
3349 CAST_ACCESSOR(StringTable)
3350 CAST_ACCESSOR(Struct)
3351 CAST_ACCESSOR(Symbol)
3352 CAST_ACCESSOR(Uint16x8)
3353 CAST_ACCESSOR(Uint32x4)
3354 CAST_ACCESSOR(Uint8x16)
3355 CAST_ACCESSOR(UnseededNumberDictionary)
3356 CAST_ACCESSOR(WeakCell)
3357 CAST_ACCESSOR(WeakFixedArray)
3358 CAST_ACCESSOR(WeakHashTable)
3362 template <class Traits>
3363 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3364 FixedTypedArray<Traits>::kInstanceType;
3367 template <class Traits>
3368 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3369 SLOW_DCHECK(object->IsHeapObject() &&
3370 HeapObject::cast(object)->map()->instance_type() ==
3371 Traits::kInstanceType);
3372 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3376 template <class Traits>
3377 const FixedTypedArray<Traits>*
3378 FixedTypedArray<Traits>::cast(const Object* object) {
3379 SLOW_DCHECK(object->IsHeapObject() &&
3380 HeapObject::cast(object)->map()->instance_type() ==
3381 Traits::kInstanceType);
3382 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3386 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
3387 type* DeoptimizationInputData::name() { \
3388 return type::cast(get(k##name##Index)); \
3390 void DeoptimizationInputData::Set##name(type* value) { \
3391 set(k##name##Index, value); \
3394 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3395 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3396 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3397 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3398 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3399 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3400 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3401 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3403 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3406 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
3407 type* DeoptimizationInputData::name(int i) { \
3408 return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3410 void DeoptimizationInputData::Set##name(int i, type* value) { \
3411 set(IndexForEntry(i) + k##name##Offset, value); \
3414 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3415 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3416 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3417 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3419 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3422 BailoutId DeoptimizationInputData::AstId(int i) {
3423 return BailoutId(AstIdRaw(i)->value());
3427 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3428 SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3432 int DeoptimizationInputData::DeoptCount() {
3433 return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3437 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3440 BailoutId DeoptimizationOutputData::AstId(int index) {
3441 return BailoutId(Smi::cast(get(index * 2))->value());
3445 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3446 set(index * 2, Smi::FromInt(id.ToInt()));
3450 Smi* DeoptimizationOutputData::PcAndState(int index) {
3451 return Smi::cast(get(1 + index * 2));
3455 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3456 set(1 + index * 2, offset);
3460 Object* LiteralsArray::get(int index) const { return FixedArray::get(index); }
3463 void LiteralsArray::set(int index, Object* value) {
3464 FixedArray::set(index, value);
3468 void LiteralsArray::set(int index, Smi* value) {
3469 FixedArray::set(index, value);
3473 void LiteralsArray::set(int index, Object* value, WriteBarrierMode mode) {
3474 FixedArray::set(index, value, mode);
3478 LiteralsArray* LiteralsArray::cast(Object* object) {
3479 SLOW_DCHECK(object->IsLiteralsArray());
3480 return reinterpret_cast<LiteralsArray*>(object);
3484 TypeFeedbackVector* LiteralsArray::feedback_vector() const {
3485 return TypeFeedbackVector::cast(get(kVectorIndex));
3489 void LiteralsArray::set_feedback_vector(TypeFeedbackVector* vector) {
3490 set(kVectorIndex, vector);
3494 Object* LiteralsArray::literal(int literal_index) const {
3495 return get(kFirstLiteralIndex + literal_index);
3499 void LiteralsArray::set_literal(int literal_index, Object* literal) {
3500 set(kFirstLiteralIndex + literal_index, literal);
3504 int LiteralsArray::literals_count() const {
3505 return length() - kFirstLiteralIndex;
3509 void HandlerTable::SetRangeStart(int index, int value) {
3510 set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3514 void HandlerTable::SetRangeEnd(int index, int value) {
3515 set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3519 void HandlerTable::SetRangeHandler(int index, int offset,
3520 CatchPrediction prediction) {
3521 int value = HandlerOffsetField::encode(offset) |
3522 HandlerPredictionField::encode(prediction);
3523 set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3527 void HandlerTable::SetRangeDepth(int index, int value) {
3528 set(index * kRangeEntrySize + kRangeDepthIndex, Smi::FromInt(value));
3532 void HandlerTable::SetReturnOffset(int index, int value) {
3533 set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3537 void HandlerTable::SetReturnHandler(int index, int offset,
3538 CatchPrediction prediction) {
3539 int value = HandlerOffsetField::encode(offset) |
3540 HandlerPredictionField::encode(prediction);
3541 set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3545 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3546 STRUCT_LIST(MAKE_STRUCT_CAST)
3547 #undef MAKE_STRUCT_CAST
3550 template <typename Derived, typename Shape, typename Key>
3551 HashTable<Derived, Shape, Key>*
3552 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3553 SLOW_DCHECK(obj->IsHashTable());
3554 return reinterpret_cast<HashTable*>(obj);
3558 template <typename Derived, typename Shape, typename Key>
3559 const HashTable<Derived, Shape, Key>*
3560 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3561 SLOW_DCHECK(obj->IsHashTable());
3562 return reinterpret_cast<const HashTable*>(obj);
3566 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3567 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3569 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3570 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3572 SMI_ACCESSORS(String, length, kLengthOffset)
3573 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3576 int FreeSpace::Size() { return size(); }
3579 FreeSpace* FreeSpace::next() {
3580 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3581 (!GetHeap()->deserialization_complete() && map() == NULL));
3582 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3583 return reinterpret_cast<FreeSpace*>(
3584 Memory::Address_at(address() + kNextOffset));
3588 FreeSpace** FreeSpace::next_address() {
3589 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3590 (!GetHeap()->deserialization_complete() && map() == NULL));
3591 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3592 return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
3596 void FreeSpace::set_next(FreeSpace* next) {
3597 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3598 (!GetHeap()->deserialization_complete() && map() == NULL));
3599 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3600 base::NoBarrier_Store(
3601 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3602 reinterpret_cast<base::AtomicWord>(next));
3606 FreeSpace* FreeSpace::cast(HeapObject* o) {
3607 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3608 return reinterpret_cast<FreeSpace*>(o);
3612 uint32_t Name::hash_field() {
3613 return READ_UINT32_FIELD(this, kHashFieldOffset);
3617 void Name::set_hash_field(uint32_t value) {
3618 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3619 #if V8_HOST_ARCH_64_BIT
3620 #if V8_TARGET_LITTLE_ENDIAN
3621 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3623 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3629 bool Name::Equals(Name* other) {
3630 if (other == this) return true;
3631 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3632 this->IsSymbol() || other->IsSymbol()) {
3635 return String::cast(this)->SlowEquals(String::cast(other));
3639 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3640 if (one.is_identical_to(two)) return true;
3641 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3642 one->IsSymbol() || two->IsSymbol()) {
3645 return String::SlowEquals(Handle<String>::cast(one),
3646 Handle<String>::cast(two));
3650 ACCESSORS(Symbol, name, Object, kNameOffset)
3651 SMI_ACCESSORS(Symbol, flags, kFlagsOffset)
3652 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3655 bool String::Equals(String* other) {
3656 if (other == this) return true;
3657 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3660 return SlowEquals(other);
3664 bool String::Equals(Handle<String> one, Handle<String> two) {
3665 if (one.is_identical_to(two)) return true;
3666 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3669 return SlowEquals(one, two);
3673 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3674 if (!string->IsConsString()) return string;
3675 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3676 if (cons->IsFlat()) return handle(cons->first());
3677 return SlowFlatten(cons, pretenure);
3681 Handle<Name> Name::Flatten(Handle<Name> name, PretenureFlag pretenure) {
3682 if (name->IsSymbol()) return name;
3683 return String::Flatten(Handle<String>::cast(name));
3687 uint16_t String::Get(int index) {
3688 DCHECK(index >= 0 && index < length());
3689 switch (StringShape(this).full_representation_tag()) {
3690 case kSeqStringTag | kOneByteStringTag:
3691 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3692 case kSeqStringTag | kTwoByteStringTag:
3693 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3694 case kConsStringTag | kOneByteStringTag:
3695 case kConsStringTag | kTwoByteStringTag:
3696 return ConsString::cast(this)->ConsStringGet(index);
3697 case kExternalStringTag | kOneByteStringTag:
3698 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3699 case kExternalStringTag | kTwoByteStringTag:
3700 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3701 case kSlicedStringTag | kOneByteStringTag:
3702 case kSlicedStringTag | kTwoByteStringTag:
3703 return SlicedString::cast(this)->SlicedStringGet(index);
3713 void String::Set(int index, uint16_t value) {
3714 DCHECK(index >= 0 && index < length());
3715 DCHECK(StringShape(this).IsSequential());
3717 return this->IsOneByteRepresentation()
3718 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3719 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3723 bool String::IsFlat() {
3724 if (!StringShape(this).IsCons()) return true;
3725 return ConsString::cast(this)->second()->length() == 0;
3729 String* String::GetUnderlying() {
3730 // Giving direct access to underlying string only makes sense if the
3731 // wrapping string is already flattened.
3732 DCHECK(this->IsFlat());
3733 DCHECK(StringShape(this).IsIndirect());
3734 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3735 const int kUnderlyingOffset = SlicedString::kParentOffset;
3736 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3740 template<class Visitor>
3741 ConsString* String::VisitFlat(Visitor* visitor,
3744 int slice_offset = offset;
3745 const int length = string->length();
3746 DCHECK(offset <= length);
3748 int32_t type = string->map()->instance_type();
3749 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3750 case kSeqStringTag | kOneByteStringTag:
3751 visitor->VisitOneByteString(
3752 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3756 case kSeqStringTag | kTwoByteStringTag:
3757 visitor->VisitTwoByteString(
3758 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3762 case kExternalStringTag | kOneByteStringTag:
3763 visitor->VisitOneByteString(
3764 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3768 case kExternalStringTag | kTwoByteStringTag:
3769 visitor->VisitTwoByteString(
3770 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3774 case kSlicedStringTag | kOneByteStringTag:
3775 case kSlicedStringTag | kTwoByteStringTag: {
3776 SlicedString* slicedString = SlicedString::cast(string);
3777 slice_offset += slicedString->offset();
3778 string = slicedString->parent();
3782 case kConsStringTag | kOneByteStringTag:
3783 case kConsStringTag | kTwoByteStringTag:
3784 return ConsString::cast(string);
3795 inline Vector<const uint8_t> String::GetCharVector() {
3796 String::FlatContent flat = GetFlatContent();
3797 DCHECK(flat.IsOneByte());
3798 return flat.ToOneByteVector();
3803 inline Vector<const uc16> String::GetCharVector() {
3804 String::FlatContent flat = GetFlatContent();
3805 DCHECK(flat.IsTwoByte());
3806 return flat.ToUC16Vector();
3810 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3811 DCHECK(index >= 0 && index < length());
3812 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3816 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3817 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3818 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3819 static_cast<byte>(value));
3823 Address SeqOneByteString::GetCharsAddress() {
3824 return FIELD_ADDR(this, kHeaderSize);
3828 uint8_t* SeqOneByteString::GetChars() {
3829 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3833 Address SeqTwoByteString::GetCharsAddress() {
3834 return FIELD_ADDR(this, kHeaderSize);
3838 uc16* SeqTwoByteString::GetChars() {
3839 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3843 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3844 DCHECK(index >= 0 && index < length());
3845 return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3849 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3850 DCHECK(index >= 0 && index < length());
3851 WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3855 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3856 return SizeFor(length());
3860 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3861 return SizeFor(length());
3865 String* SlicedString::parent() {
3866 return String::cast(READ_FIELD(this, kParentOffset));
3870 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3871 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3872 WRITE_FIELD(this, kParentOffset, parent);
3873 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3877 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3880 String* ConsString::first() {
3881 return String::cast(READ_FIELD(this, kFirstOffset));
3885 Object* ConsString::unchecked_first() {
3886 return READ_FIELD(this, kFirstOffset);
3890 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3891 WRITE_FIELD(this, kFirstOffset, value);
3892 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3896 String* ConsString::second() {
3897 return String::cast(READ_FIELD(this, kSecondOffset));
3901 Object* ConsString::unchecked_second() {
3902 return READ_FIELD(this, kSecondOffset);
3906 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3907 WRITE_FIELD(this, kSecondOffset, value);
3908 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3912 bool ExternalString::is_short() {
3913 InstanceType type = map()->instance_type();
3914 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3918 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3919 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3923 void ExternalOneByteString::update_data_cache() {
3924 if (is_short()) return;
3925 const char** data_field =
3926 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3927 *data_field = resource()->data();
3931 void ExternalOneByteString::set_resource(
3932 const ExternalOneByteString::Resource* resource) {
3933 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3934 *reinterpret_cast<const Resource**>(
3935 FIELD_ADDR(this, kResourceOffset)) = resource;
3936 if (resource != NULL) update_data_cache();
3940 const uint8_t* ExternalOneByteString::GetChars() {
3941 return reinterpret_cast<const uint8_t*>(resource()->data());
3945 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3946 DCHECK(index >= 0 && index < length());
3947 return GetChars()[index];
3951 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3952 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3956 void ExternalTwoByteString::update_data_cache() {
3957 if (is_short()) return;
3958 const uint16_t** data_field =
3959 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3960 *data_field = resource()->data();
3964 void ExternalTwoByteString::set_resource(
3965 const ExternalTwoByteString::Resource* resource) {
3966 *reinterpret_cast<const Resource**>(
3967 FIELD_ADDR(this, kResourceOffset)) = resource;
3968 if (resource != NULL) update_data_cache();
3972 const uint16_t* ExternalTwoByteString::GetChars() {
3973 return resource()->data();
3977 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3978 DCHECK(index >= 0 && index < length());
3979 return GetChars()[index];
3983 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3985 return GetChars() + start;
3989 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3992 void ConsStringIterator::PushLeft(ConsString* string) {
3993 frames_[depth_++ & kDepthMask] = string;
3997 void ConsStringIterator::PushRight(ConsString* string) {
3999 frames_[(depth_-1) & kDepthMask] = string;
4003 void ConsStringIterator::AdjustMaximumDepth() {
4004 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
4008 void ConsStringIterator::Pop() {
4010 DCHECK(depth_ <= maximum_depth_);
4015 uint16_t StringCharacterStream::GetNext() {
4016 DCHECK(buffer8_ != NULL && end_ != NULL);
4017 // Advance cursor if needed.
4018 if (buffer8_ == end_) HasMore();
4019 DCHECK(buffer8_ < end_);
4020 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
4024 StringCharacterStream::StringCharacterStream(String* string, int offset)
4025 : is_one_byte_(false) {
4026 Reset(string, offset);
4030 void StringCharacterStream::Reset(String* string, int offset) {
4033 ConsString* cons_string = String::VisitFlat(this, string, offset);
4034 iter_.Reset(cons_string, offset);
4035 if (cons_string != NULL) {
4036 string = iter_.Next(&offset);
4037 if (string != NULL) String::VisitFlat(this, string, offset);
4042 bool StringCharacterStream::HasMore() {
4043 if (buffer8_ != end_) return true;
4045 String* string = iter_.Next(&offset);
4046 DCHECK_EQ(offset, 0);
4047 if (string == NULL) return false;
4048 String::VisitFlat(this, string);
4049 DCHECK(buffer8_ != end_);
4054 void StringCharacterStream::VisitOneByteString(
4055 const uint8_t* chars, int length) {
4056 is_one_byte_ = true;
4058 end_ = chars + length;
4062 void StringCharacterStream::VisitTwoByteString(
4063 const uint16_t* chars, int length) {
4064 is_one_byte_ = false;
4066 end_ = reinterpret_cast<const uint8_t*>(chars + length);
4070 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
4073 byte ByteArray::get(int index) {
4074 DCHECK(index >= 0 && index < this->length());
4075 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4079 void ByteArray::set(int index, byte value) {
4080 DCHECK(index >= 0 && index < this->length());
4081 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4085 int ByteArray::get_int(int index) {
4086 DCHECK(index >= 0 && (index * kIntSize) < this->length());
4087 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
4091 ByteArray* ByteArray::FromDataStartAddress(Address address) {
4092 DCHECK_TAG_ALIGNED(address);
4093 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
4097 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
4100 Address ByteArray::GetDataStartAddress() {
4101 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4105 void BytecodeArray::BytecodeArrayIterateBody(ObjectVisitor* v) {
4106 IteratePointer(v, kConstantPoolOffset);
4110 byte BytecodeArray::get(int index) {
4111 DCHECK(index >= 0 && index < this->length());
4112 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4116 void BytecodeArray::set(int index, byte value) {
4117 DCHECK(index >= 0 && index < this->length());
4118 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4122 void BytecodeArray::set_frame_size(int frame_size) {
4123 DCHECK_GE(frame_size, 0);
4124 DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4125 WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4129 int BytecodeArray::frame_size() const {
4130 return READ_INT_FIELD(this, kFrameSizeOffset);
4134 int BytecodeArray::register_count() const {
4135 return frame_size() / kPointerSize;
4139 void BytecodeArray::set_parameter_count(int number_of_parameters) {
4140 DCHECK_GE(number_of_parameters, 0);
4141 // Parameter count is stored as the size on stack of the parameters to allow
4142 // it to be used directly by generated code.
4143 WRITE_INT_FIELD(this, kParameterSizeOffset,
4144 (number_of_parameters << kPointerSizeLog2));
4148 int BytecodeArray::parameter_count() const {
4149 // Parameter count is stored as the size on stack of the parameters to allow
4150 // it to be used directly by generated code.
4151 return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
4155 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
4158 Address BytecodeArray::GetFirstBytecodeAddress() {
4159 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4163 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4166 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4169 void* FixedTypedArrayBase::external_pointer() const {
4170 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4171 return reinterpret_cast<void*>(ptr);
4175 void FixedTypedArrayBase::set_external_pointer(void* value,
4176 WriteBarrierMode mode) {
4177 intptr_t ptr = reinterpret_cast<intptr_t>(value);
4178 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4182 void* FixedTypedArrayBase::DataPtr() {
4183 return reinterpret_cast<void*>(
4184 reinterpret_cast<intptr_t>(base_pointer()) +
4185 reinterpret_cast<intptr_t>(external_pointer()));
4189 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4192 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4193 case FIXED_##TYPE##_ARRAY_TYPE: \
4194 element_size = size; \
4197 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4198 #undef TYPED_ARRAY_CASE
4203 return element_size;
4207 int FixedTypedArrayBase::DataSize(InstanceType type) {
4208 if (base_pointer() == Smi::FromInt(0)) return 0;
4209 return length() * ElementSize(type);
4213 int FixedTypedArrayBase::DataSize() {
4214 return DataSize(map()->instance_type());
4218 int FixedTypedArrayBase::size() {
4219 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4223 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4224 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4228 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4229 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4233 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4236 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4239 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4242 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4245 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4248 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4251 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4254 float Float32ArrayTraits::defaultValue() {
4255 return std::numeric_limits<float>::quiet_NaN();
4259 double Float64ArrayTraits::defaultValue() {
4260 return std::numeric_limits<double>::quiet_NaN();
4264 template <class Traits>
4265 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4266 DCHECK((index >= 0) && (index < this->length()));
4267 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4272 template <class Traits>
4273 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4274 DCHECK((index >= 0) && (index < this->length()));
4275 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4280 template <class Traits>
4281 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4282 return static_cast<ElementType>(value);
4287 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4288 if (value < 0) return 0;
4289 if (value > 0xFF) return 0xFF;
4290 return static_cast<uint8_t>(value);
4294 template <class Traits>
4295 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4297 return static_cast<ElementType>(DoubleToInt32(value));
4302 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4303 // Handle NaNs and less than zero values which clamp to zero.
4304 if (!(value > 0)) return 0;
4305 if (value > 0xFF) return 0xFF;
4306 return static_cast<uint8_t>(lrint(value));
4311 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4312 return static_cast<float>(value);
4317 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4322 template <class Traits>
4323 Handle<Object> FixedTypedArray<Traits>::get(
4324 Handle<FixedTypedArray<Traits> > array,
4326 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4330 template <class Traits>
4331 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4332 ElementType cast_value = Traits::defaultValue();
4333 if (value->IsSmi()) {
4334 int int_value = Smi::cast(value)->value();
4335 cast_value = from_int(int_value);
4336 } else if (value->IsHeapNumber()) {
4337 double double_value = HeapNumber::cast(value)->value();
4338 cast_value = from_double(double_value);
4340 // Clamp undefined to the default value. All other types have been
4341 // converted to a number type further up in the call chain.
4342 DCHECK(value->IsUndefined());
4344 set(index, cast_value);
4348 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4349 return handle(Smi::FromInt(scalar), isolate);
4353 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4355 return handle(Smi::FromInt(scalar), isolate);
4359 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4360 return handle(Smi::FromInt(scalar), isolate);
4364 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4365 return handle(Smi::FromInt(scalar), isolate);
4369 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4370 return handle(Smi::FromInt(scalar), isolate);
4374 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4375 return isolate->factory()->NewNumberFromUint(scalar);
4379 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4380 return isolate->factory()->NewNumberFromInt(scalar);
4384 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4385 return isolate->factory()->NewNumber(scalar);
4389 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4390 return isolate->factory()->NewNumber(scalar);
4394 int Map::visitor_id() {
4395 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4399 void Map::set_visitor_id(int id) {
4400 DCHECK(0 <= id && id < 256);
4401 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4405 int Map::instance_size() {
4406 return NOBARRIER_READ_BYTE_FIELD(
4407 this, kInstanceSizeOffset) << kPointerSizeLog2;
4411 int Map::inobject_properties_or_constructor_function_index() {
4412 return READ_BYTE_FIELD(this,
4413 kInObjectPropertiesOrConstructorFunctionIndexOffset);
4417 void Map::set_inobject_properties_or_constructor_function_index(int value) {
4418 DCHECK(0 <= value && value < 256);
4419 WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4420 static_cast<byte>(value));
4424 int Map::GetInObjectProperties() {
4425 DCHECK(IsJSObjectMap());
4426 return inobject_properties_or_constructor_function_index();
4430 void Map::SetInObjectProperties(int value) {
4431 DCHECK(IsJSObjectMap());
4432 set_inobject_properties_or_constructor_function_index(value);
4436 int Map::GetConstructorFunctionIndex() {
4437 DCHECK(IsPrimitiveMap());
4438 return inobject_properties_or_constructor_function_index();
4442 void Map::SetConstructorFunctionIndex(int value) {
4443 DCHECK(IsPrimitiveMap());
4444 set_inobject_properties_or_constructor_function_index(value);
4448 int Map::GetInObjectPropertyOffset(int index) {
4449 // Adjust for the number of properties stored in the object.
4450 index -= GetInObjectProperties();
4452 return instance_size() + (index * kPointerSize);
4456 Handle<Map> Map::CopyInstallDescriptorsForTesting(
4457 Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
4458 Handle<LayoutDescriptor> layout_descriptor) {
4459 return CopyInstallDescriptors(map, new_descriptor, descriptors,
4464 int HeapObject::SizeFromMap(Map* map) {
4465 int instance_size = map->instance_size();
4466 if (instance_size != kVariableSizeSentinel) return instance_size;
4467 // Only inline the most frequent cases.
4468 InstanceType instance_type = map->instance_type();
4469 if (instance_type == FIXED_ARRAY_TYPE) {
4470 return FixedArray::BodyDescriptor::SizeOf(map, this);
4472 if (instance_type == ONE_BYTE_STRING_TYPE ||
4473 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4474 // Strings may get concurrently truncated, hence we have to access its
4475 // length synchronized.
4476 return SeqOneByteString::SizeFor(
4477 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4479 if (instance_type == BYTE_ARRAY_TYPE) {
4480 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4482 if (instance_type == BYTECODE_ARRAY_TYPE) {
4483 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4485 if (instance_type == FREE_SPACE_TYPE) {
4486 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4488 if (instance_type == STRING_TYPE ||
4489 instance_type == INTERNALIZED_STRING_TYPE) {
4490 // Strings may get concurrently truncated, hence we have to access its
4491 // length synchronized.
4492 return SeqTwoByteString::SizeFor(
4493 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4495 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4496 return FixedDoubleArray::SizeFor(
4497 reinterpret_cast<FixedDoubleArray*>(this)->length());
4499 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4500 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4501 return reinterpret_cast<FixedTypedArrayBase*>(
4502 this)->TypedArraySize(instance_type);
4504 DCHECK(instance_type == CODE_TYPE);
4505 return reinterpret_cast<Code*>(this)->CodeSize();
4509 void Map::set_instance_size(int value) {
4510 DCHECK_EQ(0, value & (kPointerSize - 1));
4511 value >>= kPointerSizeLog2;
4512 DCHECK(0 <= value && value < 256);
4513 NOBARRIER_WRITE_BYTE_FIELD(
4514 this, kInstanceSizeOffset, static_cast<byte>(value));
4518 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4521 InstanceType Map::instance_type() {
4522 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4526 void Map::set_instance_type(InstanceType value) {
4527 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4531 int Map::unused_property_fields() {
4532 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4536 void Map::set_unused_property_fields(int value) {
4537 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4541 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4544 void Map::set_bit_field(byte value) {
4545 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4549 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4552 void Map::set_bit_field2(byte value) {
4553 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4557 void Map::set_non_instance_prototype(bool value) {
4559 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4561 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4566 bool Map::has_non_instance_prototype() {
4567 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4571 void Map::set_is_constructor(bool value) {
4573 set_bit_field(bit_field() | (1 << kIsConstructor));
4575 set_bit_field(bit_field() & ~(1 << kIsConstructor));
4580 bool Map::is_constructor() const {
4581 return ((1 << kIsConstructor) & bit_field()) != 0;
4585 void Map::set_is_hidden_prototype() {
4586 set_bit_field3(IsHiddenPrototype::update(bit_field3(), true));
4590 bool Map::is_hidden_prototype() const {
4591 return IsHiddenPrototype::decode(bit_field3());
4595 void Map::set_has_indexed_interceptor() {
4596 set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4600 bool Map::has_indexed_interceptor() {
4601 return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4605 void Map::set_is_undetectable() {
4606 set_bit_field(bit_field() | (1 << kIsUndetectable));
4610 bool Map::is_undetectable() {
4611 return ((1 << kIsUndetectable) & bit_field()) != 0;
4615 void Map::set_is_observed() { set_bit_field(bit_field() | (1 << kIsObserved)); }
4617 bool Map::is_observed() { return ((1 << kIsObserved) & bit_field()) != 0; }
4620 void Map::set_has_named_interceptor() {
4621 set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4625 bool Map::has_named_interceptor() {
4626 return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4630 void Map::set_is_access_check_needed(bool access_check_needed) {
4631 if (access_check_needed) {
4632 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4634 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4639 bool Map::is_access_check_needed() {
4640 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4644 void Map::set_is_extensible(bool value) {
4646 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4648 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4652 bool Map::is_extensible() {
4653 return ((1 << kIsExtensible) & bit_field2()) != 0;
4657 void Map::set_is_prototype_map(bool value) {
4658 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4661 bool Map::is_prototype_map() const {
4662 return IsPrototypeMapBits::decode(bit_field2());
4666 void Map::set_elements_kind(ElementsKind elements_kind) {
4667 DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4668 DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4669 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4670 DCHECK(this->elements_kind() == elements_kind);
4674 ElementsKind Map::elements_kind() {
4675 return Map::ElementsKindBits::decode(bit_field2());
4679 bool Map::has_fast_smi_elements() {
4680 return IsFastSmiElementsKind(elements_kind());
4683 bool Map::has_fast_object_elements() {
4684 return IsFastObjectElementsKind(elements_kind());
4687 bool Map::has_fast_smi_or_object_elements() {
4688 return IsFastSmiOrObjectElementsKind(elements_kind());
4691 bool Map::has_fast_double_elements() {
4692 return IsFastDoubleElementsKind(elements_kind());
4695 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4697 bool Map::has_sloppy_arguments_elements() {
4698 return IsSloppyArgumentsElements(elements_kind());
4701 bool Map::has_fixed_typed_array_elements() {
4702 return IsFixedTypedArrayElementsKind(elements_kind());
4705 bool Map::has_dictionary_elements() {
4706 return IsDictionaryElementsKind(elements_kind());
4710 void Map::set_dictionary_map(bool value) {
4711 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4712 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4713 set_bit_field3(new_bit_field3);
4717 bool Map::is_dictionary_map() {
4718 return DictionaryMap::decode(bit_field3());
4722 Code::Flags Code::flags() {
4723 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4727 void Map::set_owns_descriptors(bool owns_descriptors) {
4728 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4732 bool Map::owns_descriptors() {
4733 return OwnsDescriptors::decode(bit_field3());
4737 void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
4740 bool Map::is_callable() const {
4741 return ((1 << kIsCallable) & bit_field()) != 0;
4745 void Map::deprecate() {
4746 set_bit_field3(Deprecated::update(bit_field3(), true));
4750 bool Map::is_deprecated() {
4751 return Deprecated::decode(bit_field3());
4755 void Map::set_migration_target(bool value) {
4756 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4760 bool Map::is_migration_target() {
4761 return IsMigrationTarget::decode(bit_field3());
4765 void Map::set_is_strong() {
4766 set_bit_field3(IsStrong::update(bit_field3(), true));
4770 bool Map::is_strong() {
4771 return IsStrong::decode(bit_field3());
4775 void Map::set_counter(int value) {
4776 set_bit_field3(Counter::update(bit_field3(), value));
4780 int Map::counter() { return Counter::decode(bit_field3()); }
4783 void Map::mark_unstable() {
4784 set_bit_field3(IsUnstable::update(bit_field3(), true));
4788 bool Map::is_stable() {
4789 return !IsUnstable::decode(bit_field3());
4793 bool Map::has_code_cache() {
4794 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4798 bool Map::CanBeDeprecated() {
4799 int descriptor = LastAdded();
4800 for (int i = 0; i <= descriptor; i++) {
4801 PropertyDetails details = instance_descriptors()->GetDetails(i);
4802 if (details.representation().IsNone()) return true;
4803 if (details.representation().IsSmi()) return true;
4804 if (details.representation().IsDouble()) return true;
4805 if (details.representation().IsHeapObject()) return true;
4806 if (details.type() == DATA_CONSTANT) return true;
4812 void Map::NotifyLeafMapLayoutChange() {
4815 dependent_code()->DeoptimizeDependentCodeGroup(
4817 DependentCode::kPrototypeCheckGroup);
4822 bool Map::CanTransition() {
4823 // Only JSObject and subtypes have map transitions and back pointers.
4824 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4825 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4829 bool Map::IsPrimitiveMap() {
4830 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4831 return instance_type() <= LAST_PRIMITIVE_TYPE;
4833 bool Map::IsJSObjectMap() {
4834 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4835 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4837 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
4838 bool Map::IsJSFunctionMap() { return instance_type() == JS_FUNCTION_TYPE; }
4839 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
4840 bool Map::IsJSProxyMap() {
4841 InstanceType type = instance_type();
4842 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
4844 bool Map::IsJSGlobalProxyMap() {
4845 return instance_type() == JS_GLOBAL_PROXY_TYPE;
4847 bool Map::IsJSGlobalObjectMap() {
4848 return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4850 bool Map::IsGlobalObjectMap() {
4851 const InstanceType type = instance_type();
4852 return type == JS_GLOBAL_OBJECT_TYPE || type == JS_BUILTINS_OBJECT_TYPE;
4856 bool Map::CanOmitMapChecks() {
4857 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4861 int DependentCode::number_of_entries(DependencyGroup group) {
4862 if (length() == 0) return 0;
4863 return Smi::cast(get(group))->value();
4867 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4868 set(group, Smi::FromInt(value));
4872 void DependentCode::set_object_at(int i, Object* object) {
4873 set(kCodesStartIndex + i, object);
4877 Object* DependentCode::object_at(int i) {
4878 return get(kCodesStartIndex + i);
4882 void DependentCode::clear_at(int i) {
4883 set_undefined(kCodesStartIndex + i);
4887 void DependentCode::copy(int from, int to) {
4888 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4892 void DependentCode::ExtendGroup(DependencyGroup group) {
4893 GroupStartIndexes starts(this);
4894 for (int g = kGroupCount - 1; g > group; g--) {
4895 if (starts.at(g) < starts.at(g + 1)) {
4896 copy(starts.at(g), starts.at(g + 1));
4902 void Code::set_flags(Code::Flags flags) {
4903 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4904 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4908 Code::Kind Code::kind() {
4909 return ExtractKindFromFlags(flags());
4913 bool Code::IsCodeStubOrIC() {
4914 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4915 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4916 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4917 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4918 kind() == TO_BOOLEAN_IC;
4922 bool Code::IsJavaScriptCode() {
4923 if (kind() == FUNCTION || kind() == OPTIMIZED_FUNCTION) {
4926 Handle<Code> interpreter_entry =
4927 GetIsolate()->builtins()->InterpreterEntryTrampoline();
4928 return interpreter_entry.location() != nullptr && *interpreter_entry == this;
4932 InlineCacheState Code::ic_state() {
4933 InlineCacheState result = ExtractICStateFromFlags(flags());
4934 // Only allow uninitialized or debugger states for non-IC code
4935 // objects. This is used in the debugger to determine whether or not
4936 // a call to code object has been replaced with a debug break call.
4937 DCHECK(is_inline_cache_stub() ||
4938 result == UNINITIALIZED ||
4939 result == DEBUG_STUB);
4944 ExtraICState Code::extra_ic_state() {
4945 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4946 return ExtractExtraICStateFromFlags(flags());
4950 Code::StubType Code::type() {
4951 return ExtractTypeFromFlags(flags());
4955 // For initialization.
4956 void Code::set_raw_kind_specific_flags1(int value) {
4957 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4961 void Code::set_raw_kind_specific_flags2(int value) {
4962 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4966 inline bool Code::is_crankshafted() {
4967 return IsCrankshaftedField::decode(
4968 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4972 inline bool Code::is_hydrogen_stub() {
4973 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4977 inline void Code::set_is_crankshafted(bool value) {
4978 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4979 int updated = IsCrankshaftedField::update(previous, value);
4980 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4984 inline bool Code::is_turbofanned() {
4985 return IsTurbofannedField::decode(
4986 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4990 inline void Code::set_is_turbofanned(bool value) {
4991 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4992 int updated = IsTurbofannedField::update(previous, value);
4993 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4997 inline bool Code::can_have_weak_objects() {
4998 DCHECK(kind() == OPTIMIZED_FUNCTION);
4999 return CanHaveWeakObjectsField::decode(
5000 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5004 inline void Code::set_can_have_weak_objects(bool value) {
5005 DCHECK(kind() == OPTIMIZED_FUNCTION);
5006 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5007 int updated = CanHaveWeakObjectsField::update(previous, value);
5008 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5012 bool Code::has_deoptimization_support() {
5013 DCHECK_EQ(FUNCTION, kind());
5014 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5015 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
5019 void Code::set_has_deoptimization_support(bool value) {
5020 DCHECK_EQ(FUNCTION, kind());
5021 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5022 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
5023 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5027 bool Code::has_debug_break_slots() {
5028 DCHECK_EQ(FUNCTION, kind());
5029 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5030 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
5034 void Code::set_has_debug_break_slots(bool value) {
5035 DCHECK_EQ(FUNCTION, kind());
5036 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5037 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
5038 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5042 bool Code::has_reloc_info_for_serialization() {
5043 DCHECK_EQ(FUNCTION, kind());
5044 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5045 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
5049 void Code::set_has_reloc_info_for_serialization(bool value) {
5050 DCHECK_EQ(FUNCTION, kind());
5051 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5052 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
5053 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5057 int Code::allow_osr_at_loop_nesting_level() {
5058 DCHECK_EQ(FUNCTION, kind());
5059 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5060 return AllowOSRAtLoopNestingLevelField::decode(fields);
5064 void Code::set_allow_osr_at_loop_nesting_level(int level) {
5065 DCHECK_EQ(FUNCTION, kind());
5066 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
5067 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5068 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5069 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5073 int Code::profiler_ticks() {
5074 DCHECK_EQ(FUNCTION, kind());
5075 return ProfilerTicksField::decode(
5076 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5080 void Code::set_profiler_ticks(int ticks) {
5081 if (kind() == FUNCTION) {
5082 unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5083 unsigned updated = ProfilerTicksField::update(previous, ticks);
5084 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5089 int Code::builtin_index() {
5090 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
5094 void Code::set_builtin_index(int index) {
5095 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
5099 unsigned Code::stack_slots() {
5100 DCHECK(is_crankshafted());
5101 return StackSlotsField::decode(
5102 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5106 void Code::set_stack_slots(unsigned slots) {
5107 CHECK(slots <= (1 << kStackSlotsBitCount));
5108 DCHECK(is_crankshafted());
5109 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5110 int updated = StackSlotsField::update(previous, slots);
5111 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5115 unsigned Code::safepoint_table_offset() {
5116 DCHECK(is_crankshafted());
5117 return SafepointTableOffsetField::decode(
5118 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5122 void Code::set_safepoint_table_offset(unsigned offset) {
5123 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5124 DCHECK(is_crankshafted());
5125 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5126 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5127 int updated = SafepointTableOffsetField::update(previous, offset);
5128 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5132 unsigned Code::back_edge_table_offset() {
5133 DCHECK_EQ(FUNCTION, kind());
5134 return BackEdgeTableOffsetField::decode(
5135 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5139 void Code::set_back_edge_table_offset(unsigned offset) {
5140 DCHECK_EQ(FUNCTION, kind());
5141 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5142 offset = offset >> kPointerSizeLog2;
5143 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5144 int updated = BackEdgeTableOffsetField::update(previous, offset);
5145 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5149 bool Code::back_edges_patched_for_osr() {
5150 DCHECK_EQ(FUNCTION, kind());
5151 return allow_osr_at_loop_nesting_level() > 0;
5155 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5158 bool Code::has_function_cache() {
5159 DCHECK(kind() == STUB);
5160 return HasFunctionCacheField::decode(
5161 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5165 void Code::set_has_function_cache(bool flag) {
5166 DCHECK(kind() == STUB);
5167 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5168 int updated = HasFunctionCacheField::update(previous, flag);
5169 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5173 bool Code::marked_for_deoptimization() {
5174 DCHECK(kind() == OPTIMIZED_FUNCTION);
5175 return MarkedForDeoptimizationField::decode(
5176 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5180 void Code::set_marked_for_deoptimization(bool flag) {
5181 DCHECK(kind() == OPTIMIZED_FUNCTION);
5182 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5183 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5184 int updated = MarkedForDeoptimizationField::update(previous, flag);
5185 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5189 bool Code::is_inline_cache_stub() {
5190 Kind kind = this->kind();
5192 #define CASE(name) case name: return true;
5195 default: return false;
5200 bool Code::is_keyed_stub() {
5201 return is_keyed_load_stub() || is_keyed_store_stub();
5205 bool Code::is_debug_stub() { return ic_state() == DEBUG_STUB; }
5206 bool Code::is_handler() { return kind() == HANDLER; }
5207 bool Code::is_load_stub() { return kind() == LOAD_IC; }
5208 bool Code::is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
5209 bool Code::is_store_stub() { return kind() == STORE_IC; }
5210 bool Code::is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
5211 bool Code::is_call_stub() { return kind() == CALL_IC; }
5212 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
5213 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
5214 bool Code::is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
5215 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
5216 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
5219 bool Code::embeds_maps_weakly() {
5221 return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
5222 k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
5223 ic_state() == MONOMORPHIC;
5227 Address Code::constant_pool() {
5228 Address constant_pool = NULL;
5229 if (FLAG_enable_embedded_constant_pool) {
5230 int offset = constant_pool_offset();
5231 if (offset < instruction_size()) {
5232 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5235 return constant_pool;
5239 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5240 ExtraICState extra_ic_state, StubType type,
5241 CacheHolderFlag holder) {
5242 // Compute the bit mask.
5243 unsigned int bits = KindField::encode(kind)
5244 | ICStateField::encode(ic_state)
5245 | TypeField::encode(type)
5246 | ExtraICStateField::encode(extra_ic_state)
5247 | CacheHolderField::encode(holder);
5248 return static_cast<Flags>(bits);
5252 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5253 ExtraICState extra_ic_state,
5254 CacheHolderFlag holder,
5256 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5260 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5261 CacheHolderFlag holder) {
5262 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5266 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5267 return KindField::decode(flags);
5271 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5272 return ICStateField::decode(flags);
5276 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5277 return ExtraICStateField::decode(flags);
5281 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5282 return TypeField::decode(flags);
5286 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5287 return CacheHolderField::decode(flags);
5291 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5292 int bits = flags & ~TypeField::kMask;
5293 return static_cast<Flags>(bits);
5297 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5298 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5299 return static_cast<Flags>(bits);
5303 Code* Code::GetCodeFromTargetAddress(Address address) {
5304 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5305 // GetCodeFromTargetAddress might be called when marking objects during mark
5306 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5307 // Code::cast. Code::cast does not work when the object's map is
5309 Code* result = reinterpret_cast<Code*>(code);
5314 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5316 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5320 bool Code::CanContainWeakObjects() {
5321 // is_turbofanned() implies !can_have_weak_objects().
5322 DCHECK(!is_optimized_code() || !is_turbofanned() || !can_have_weak_objects());
5323 return is_optimized_code() && can_have_weak_objects();
5327 bool Code::IsWeakObject(Object* object) {
5328 return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5332 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5333 if (object->IsMap()) {
5334 return Map::cast(object)->CanTransition() &&
5335 FLAG_weak_embedded_maps_in_optimized_code;
5337 if (object->IsCell()) {
5338 object = Cell::cast(object)->value();
5339 } else if (object->IsPropertyCell()) {
5340 object = PropertyCell::cast(object)->value();
5342 if (object->IsJSObject() || object->IsJSProxy()) {
5343 // JSProxy is handled like JSObject because it can morph into one.
5344 return FLAG_weak_embedded_objects_in_optimized_code;
5346 if (object->IsFixedArray()) {
5347 // Contexts of inlined functions are embedded in optimized code.
5348 Map* map = HeapObject::cast(object)->map();
5349 Heap* heap = map->GetHeap();
5350 return FLAG_weak_embedded_objects_in_optimized_code &&
5351 map == heap->function_context_map();
5357 class Code::FindAndReplacePattern {
5359 FindAndReplacePattern() : count_(0) { }
5360 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5361 DCHECK(count_ < kMaxCount);
5362 find_[count_] = map_to_find;
5363 replace_[count_] = obj_to_replace;
5367 static const int kMaxCount = 4;
5369 Handle<Map> find_[kMaxCount];
5370 Handle<Object> replace_[kMaxCount];
5375 Object* Map::prototype() const {
5376 return READ_FIELD(this, kPrototypeOffset);
5380 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5381 DCHECK(value->IsNull() || value->IsJSReceiver());
5382 WRITE_FIELD(this, kPrototypeOffset, value);
5383 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5387 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5388 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5389 return LayoutDescriptor::cast_gc_safe(layout_desc);
5393 bool Map::HasFastPointerLayout() const {
5394 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5395 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5399 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5400 LayoutDescriptor* layout_desc) {
5401 set_instance_descriptors(descriptors);
5402 if (FLAG_unbox_double_fields) {
5403 if (layout_descriptor()->IsSlowLayout()) {
5404 set_layout_descriptor(layout_desc);
5407 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5408 if (FLAG_verify_heap) {
5409 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5410 CHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5413 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5414 DCHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5420 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5421 LayoutDescriptor* layout_desc) {
5422 int len = descriptors->number_of_descriptors();
5423 set_instance_descriptors(descriptors);
5424 SetNumberOfOwnDescriptors(len);
5426 if (FLAG_unbox_double_fields) {
5427 set_layout_descriptor(layout_desc);
5429 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5430 if (FLAG_verify_heap) {
5431 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5434 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5436 set_visitor_id(Heap::GetStaticVisitorIdForMap(this));
5441 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5442 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5445 void Map::set_bit_field3(uint32_t bits) {
5446 if (kInt32Size != kPointerSize) {
5447 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5449 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5453 uint32_t Map::bit_field3() const {
5454 return READ_UINT32_FIELD(this, kBitField3Offset);
5458 LayoutDescriptor* Map::GetLayoutDescriptor() {
5459 return FLAG_unbox_double_fields ? layout_descriptor()
5460 : LayoutDescriptor::FastPointerLayout();
5464 void Map::AppendDescriptor(Descriptor* desc) {
5465 DescriptorArray* descriptors = instance_descriptors();
5466 int number_of_own_descriptors = NumberOfOwnDescriptors();
5467 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5468 descriptors->Append(desc);
5469 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5471 // This function does not support appending double field descriptors and
5472 // it should never try to (otherwise, layout descriptor must be updated too).
5474 PropertyDetails details = desc->GetDetails();
5475 CHECK(details.type() != DATA || !details.representation().IsDouble());
5480 Object* Map::GetBackPointer() {
5481 Object* object = constructor_or_backpointer();
5482 if (object->IsMap()) {
5485 return GetIsolate()->heap()->undefined_value();
5489 Map* Map::ElementsTransitionMap() {
5490 return TransitionArray::SearchSpecial(
5491 this, GetHeap()->elements_transition_symbol());
5495 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5498 Object* Map::prototype_info() const {
5499 DCHECK(is_prototype_map());
5500 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5504 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5505 DCHECK(is_prototype_map());
5506 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5507 CONDITIONAL_WRITE_BARRIER(
5508 GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5512 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5513 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5514 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5515 (value->IsMap() && GetBackPointer()->IsUndefined()));
5516 DCHECK(!value->IsMap() ||
5517 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5518 set_constructor_or_backpointer(value, mode);
5522 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5523 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5524 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5525 ACCESSORS(Map, constructor_or_backpointer, Object,
5526 kConstructorOrBackPointerOffset)
5529 Object* Map::GetConstructor() const {
5530 Object* maybe_constructor = constructor_or_backpointer();
5531 // Follow any back pointers.
5532 while (maybe_constructor->IsMap()) {
5534 Map::cast(maybe_constructor)->constructor_or_backpointer();
5536 return maybe_constructor;
5540 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5541 // Never overwrite a back pointer with a constructor.
5542 DCHECK(!constructor_or_backpointer()->IsMap());
5543 set_constructor_or_backpointer(constructor, mode);
5547 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5548 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5549 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5551 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5552 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5553 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5555 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5556 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5558 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5559 SMI_ACCESSORS(AccessorInfo, flag, kFlagOffset)
5560 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5561 kExpectedReceiverTypeOffset)
5563 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5564 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5565 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5567 ACCESSORS(Box, value, Object, kValueOffset)
5569 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5570 SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5571 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5572 ACCESSORS(PrototypeInfo, constructor_name, Object, kConstructorNameOffset)
5574 ACCESSORS(SloppyBlockWithEvalContextExtension, scope_info, ScopeInfo,
5576 ACCESSORS(SloppyBlockWithEvalContextExtension, extension, JSObject,
5579 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5580 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5582 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5583 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5584 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5586 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5587 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5588 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5589 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5590 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5591 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5592 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5593 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5594 kCanInterceptSymbolsBit)
5595 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5596 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5598 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5599 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5601 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5602 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5603 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5604 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5606 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5607 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5608 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5609 kPrototypeTemplateOffset)
5610 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5611 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5612 kNamedPropertyHandlerOffset)
5613 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5614 kIndexedPropertyHandlerOffset)
5615 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5616 kInstanceTemplateOffset)
5617 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5618 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5619 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5620 kInstanceCallHandlerOffset)
5621 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5622 kAccessCheckInfoOffset)
5623 SMI_ACCESSORS(FunctionTemplateInfo, flag, kFlagOffset)
5625 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5626 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5627 kInternalFieldCountOffset)
5629 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5631 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5632 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5633 SMI_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
5634 SMI_ACCESSORS(AllocationSite, pretenure_create_count,
5635 kPretenureCreateCountOffset)
5636 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5637 kDependentCodeOffset)
5638 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5639 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5641 ACCESSORS(Script, source, Object, kSourceOffset)
5642 ACCESSORS(Script, name, Object, kNameOffset)
5643 SMI_ACCESSORS(Script, id, kIdOffset)
5644 SMI_ACCESSORS(Script, line_offset, kLineOffsetOffset)
5645 SMI_ACCESSORS(Script, column_offset, kColumnOffsetOffset)
5646 ACCESSORS(Script, context_data, Object, kContextOffset)
5647 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5648 SMI_ACCESSORS(Script, type, kTypeOffset)
5649 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5650 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5651 SMI_ACCESSORS(Script, eval_from_instructions_offset,
5652 kEvalFrominstructionsOffsetOffset)
5653 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5654 SMI_ACCESSORS(Script, flags, kFlagsOffset)
5655 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5656 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5658 Script::CompilationType Script::compilation_type() {
5659 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5660 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5662 void Script::set_compilation_type(CompilationType type) {
5663 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5664 type == COMPILATION_TYPE_EVAL));
5666 bool Script::hide_source() { return BooleanBit::get(flags(), kHideSourceBit); }
5667 void Script::set_hide_source(bool value) {
5668 set_flags(BooleanBit::set(flags(), kHideSourceBit, value));
5670 Script::CompilationState Script::compilation_state() {
5671 return BooleanBit::get(flags(), kCompilationStateBit) ?
5672 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5674 void Script::set_compilation_state(CompilationState state) {
5675 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5676 state == COMPILATION_STATE_COMPILED));
5678 ScriptOriginOptions Script::origin_options() {
5679 return ScriptOriginOptions((flags() & kOriginOptionsMask) >>
5680 kOriginOptionsShift);
5682 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5683 DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5684 set_flags((flags() & ~kOriginOptionsMask) |
5685 (origin_options.Flags() << kOriginOptionsShift));
5689 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5690 ACCESSORS(DebugInfo, code, Code, kCodeIndex)
5691 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5693 SMI_ACCESSORS(BreakPointInfo, code_position, kCodePositionIndex)
5694 SMI_ACCESSORS(BreakPointInfo, source_position, kSourcePositionIndex)
5695 SMI_ACCESSORS(BreakPointInfo, statement_position, kStatementPositionIndex)
5696 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5698 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5699 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5700 kOptimizedCodeMapOffset)
5701 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5702 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5703 kFeedbackVectorOffset)
5705 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5707 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5708 kInstanceClassNameOffset)
5709 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5710 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5711 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5712 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5715 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5716 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5717 kHiddenPrototypeBit)
5718 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5719 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5720 kNeedsAccessCheckBit)
5721 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5722 kReadOnlyPrototypeBit)
5723 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5724 kRemovePrototypeBit)
5725 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5727 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5728 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5730 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5732 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5735 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5736 kAllowLazyCompilation)
5737 BOOL_ACCESSORS(SharedFunctionInfo,
5739 allows_lazy_compilation_without_context,
5740 kAllowLazyCompilationWithoutContext)
5741 BOOL_ACCESSORS(SharedFunctionInfo,
5745 BOOL_ACCESSORS(SharedFunctionInfo,
5747 has_duplicate_parameters,
5748 kHasDuplicateParameters)
5749 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5750 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5751 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
5755 #if V8_HOST_ARCH_32_BIT
5756 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5757 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5758 kFormalParameterCountOffset)
5759 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5760 kExpectedNofPropertiesOffset)
5761 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5762 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5763 kStartPositionAndTypeOffset)
5764 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5765 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5766 kFunctionTokenPositionOffset)
5767 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5768 kCompilerHintsOffset)
5769 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5770 kOptCountAndBailoutReasonOffset)
5771 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5772 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5773 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5777 #if V8_TARGET_LITTLE_ENDIAN
5778 #define PSEUDO_SMI_LO_ALIGN 0
5779 #define PSEUDO_SMI_HI_ALIGN kIntSize
5781 #define PSEUDO_SMI_LO_ALIGN kIntSize
5782 #define PSEUDO_SMI_HI_ALIGN 0
5785 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5786 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5787 int holder::name() const { \
5788 int value = READ_INT_FIELD(this, offset); \
5789 DCHECK(kHeapObjectTag == 1); \
5790 DCHECK((value & kHeapObjectTag) == 0); \
5791 return value >> 1; \
5793 void holder::set_##name(int value) { \
5794 DCHECK(kHeapObjectTag == 1); \
5795 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5796 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5799 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5800 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5801 INT_ACCESSORS(holder, name, offset)
5804 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5805 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5806 kFormalParameterCountOffset)
5808 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5809 expected_nof_properties,
5810 kExpectedNofPropertiesOffset)
5811 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5813 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5814 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5815 start_position_and_type,
5816 kStartPositionAndTypeOffset)
5818 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5819 function_token_position,
5820 kFunctionTokenPositionOffset)
5821 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5823 kCompilerHintsOffset)
5825 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5826 opt_count_and_bailout_reason,
5827 kOptCountAndBailoutReasonOffset)
5828 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5830 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5832 kAstNodeCountOffset)
5833 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5835 kProfilerTicksOffset)
5840 BOOL_GETTER(SharedFunctionInfo,
5842 optimization_disabled,
5843 kOptimizationDisabled)
5846 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5847 set_compiler_hints(BooleanBit::set(compiler_hints(),
5848 kOptimizationDisabled,
5853 LanguageMode SharedFunctionInfo::language_mode() {
5854 STATIC_ASSERT(LANGUAGE_END == 3);
5855 return construct_language_mode(
5856 BooleanBit::get(compiler_hints(), kStrictModeFunction),
5857 BooleanBit::get(compiler_hints(), kStrongModeFunction));
5861 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5862 STATIC_ASSERT(LANGUAGE_END == 3);
5863 // We only allow language mode transitions that set the same language mode
5864 // again or go up in the chain:
5865 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5866 int hints = compiler_hints();
5867 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5868 hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5869 set_compiler_hints(hints);
5873 FunctionKind SharedFunctionInfo::kind() {
5874 return FunctionKindBits::decode(compiler_hints());
5878 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5879 DCHECK(IsValidFunctionKind(kind));
5880 int hints = compiler_hints();
5881 hints = FunctionKindBits::update(hints, kind);
5882 set_compiler_hints(hints);
5886 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5888 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5889 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5890 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5891 name_should_print_as_anonymous,
5892 kNameShouldPrintAsAnonymous)
5893 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5894 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5895 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5896 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
5898 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5899 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5900 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5901 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5903 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5904 kIsAccessorFunction)
5905 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5906 kIsDefaultConstructor)
5908 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5909 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5911 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5913 bool Script::HasValidSource() {
5914 Object* src = this->source();
5915 if (!src->IsString()) return true;
5916 String* src_str = String::cast(src);
5917 if (!StringShape(src_str).IsExternal()) return true;
5918 if (src_str->IsOneByteRepresentation()) {
5919 return ExternalOneByteString::cast(src)->resource() != NULL;
5920 } else if (src_str->IsTwoByteRepresentation()) {
5921 return ExternalTwoByteString::cast(src)->resource() != NULL;
5927 void SharedFunctionInfo::DontAdaptArguments() {
5928 DCHECK(code()->kind() == Code::BUILTIN);
5929 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5933 int SharedFunctionInfo::start_position() const {
5934 return start_position_and_type() >> kStartPositionShift;
5938 void SharedFunctionInfo::set_start_position(int start_position) {
5939 set_start_position_and_type((start_position << kStartPositionShift)
5940 | (start_position_and_type() & ~kStartPositionMask));
5944 Code* SharedFunctionInfo::code() const {
5945 return Code::cast(READ_FIELD(this, kCodeOffset));
5949 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5950 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5951 WRITE_FIELD(this, kCodeOffset, value);
5952 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5956 void SharedFunctionInfo::ReplaceCode(Code* value) {
5957 // If the GC metadata field is already used then the function was
5958 // enqueued as a code flushing candidate and we remove it now.
5959 if (code()->gc_metadata() != NULL) {
5960 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5961 flusher->EvictCandidate(this);
5964 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5966 Code::VerifyRecompiledCode(code(), value);
5971 if (is_compiled()) set_never_compiled(false);
5975 ScopeInfo* SharedFunctionInfo::scope_info() const {
5976 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5980 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5981 WriteBarrierMode mode) {
5982 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5983 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5986 reinterpret_cast<Object*>(value),
5991 bool SharedFunctionInfo::is_compiled() {
5992 Builtins* builtins = GetIsolate()->builtins();
5993 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5994 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5995 return code() != builtins->builtin(Builtins::kCompileLazy);
5999 bool SharedFunctionInfo::has_simple_parameters() {
6000 return scope_info()->HasSimpleParameters();
6004 bool SharedFunctionInfo::HasDebugInfo() {
6005 bool has_debug_info = debug_info()->IsStruct();
6006 DCHECK(!has_debug_info || HasDebugCode());
6007 return has_debug_info;
6011 DebugInfo* SharedFunctionInfo::GetDebugInfo() {
6012 DCHECK(HasDebugInfo());
6013 return DebugInfo::cast(debug_info());
6017 bool SharedFunctionInfo::HasDebugCode() {
6018 return code()->kind() == Code::FUNCTION && code()->has_debug_break_slots();
6022 bool SharedFunctionInfo::IsApiFunction() {
6023 return function_data()->IsFunctionTemplateInfo();
6027 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
6028 DCHECK(IsApiFunction());
6029 return FunctionTemplateInfo::cast(function_data());
6033 bool SharedFunctionInfo::HasBuiltinFunctionId() {
6034 return function_data()->IsSmi();
6038 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
6039 DCHECK(HasBuiltinFunctionId());
6040 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
6044 bool SharedFunctionInfo::HasBytecodeArray() {
6045 return function_data()->IsBytecodeArray();
6049 BytecodeArray* SharedFunctionInfo::bytecode_array() {
6050 DCHECK(HasBytecodeArray());
6051 return BytecodeArray::cast(function_data());
6055 int SharedFunctionInfo::ic_age() {
6056 return ICAgeBits::decode(counters());
6060 void SharedFunctionInfo::set_ic_age(int ic_age) {
6061 set_counters(ICAgeBits::update(counters(), ic_age));
6065 int SharedFunctionInfo::deopt_count() {
6066 return DeoptCountBits::decode(counters());
6070 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6071 set_counters(DeoptCountBits::update(counters(), deopt_count));
6075 void SharedFunctionInfo::increment_deopt_count() {
6076 int value = counters();
6077 int deopt_count = DeoptCountBits::decode(value);
6078 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6079 set_counters(DeoptCountBits::update(value, deopt_count));
6083 int SharedFunctionInfo::opt_reenable_tries() {
6084 return OptReenableTriesBits::decode(counters());
6088 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6089 set_counters(OptReenableTriesBits::update(counters(), tries));
6093 int SharedFunctionInfo::opt_count() {
6094 return OptCountBits::decode(opt_count_and_bailout_reason());
6098 void SharedFunctionInfo::set_opt_count(int opt_count) {
6099 set_opt_count_and_bailout_reason(
6100 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6104 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6105 return static_cast<BailoutReason>(
6106 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6110 bool SharedFunctionInfo::has_deoptimization_support() {
6111 Code* code = this->code();
6112 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6116 void SharedFunctionInfo::TryReenableOptimization() {
6117 int tries = opt_reenable_tries();
6118 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6119 // We reenable optimization whenever the number of tries is a large
6120 // enough power of 2.
6121 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6122 set_optimization_disabled(false);
6129 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6130 set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6131 opt_count_and_bailout_reason(), reason));
6135 bool SharedFunctionInfo::IsBuiltin() {
6136 Object* script_obj = script();
6137 if (script_obj->IsUndefined()) return true;
6138 Script* script = Script::cast(script_obj);
6139 Script::Type type = static_cast<Script::Type>(script->type());
6140 return type != Script::TYPE_NORMAL;
6144 bool SharedFunctionInfo::IsSubjectToDebugging() { return !IsBuiltin(); }
6147 bool JSFunction::IsBuiltin() { return shared()->IsBuiltin(); }
6150 bool JSFunction::IsSubjectToDebugging() {
6151 return shared()->IsSubjectToDebugging();
6155 bool JSFunction::NeedsArgumentsAdaption() {
6156 return shared()->internal_formal_parameter_count() !=
6157 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
6161 bool JSFunction::IsOptimized() {
6162 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6166 bool JSFunction::IsMarkedForOptimization() {
6167 return code() == GetIsolate()->builtins()->builtin(
6168 Builtins::kCompileOptimized);
6172 bool JSFunction::IsMarkedForConcurrentOptimization() {
6173 return code() == GetIsolate()->builtins()->builtin(
6174 Builtins::kCompileOptimizedConcurrent);
6178 bool JSFunction::IsInOptimizationQueue() {
6179 return code() == GetIsolate()->builtins()->builtin(
6180 Builtins::kInOptimizationQueue);
6184 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6185 return has_initial_map() &&
6186 initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
6190 Code* JSFunction::code() {
6192 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6196 void JSFunction::set_code(Code* value) {
6197 DCHECK(!GetHeap()->InNewSpace(value));
6198 Address entry = value->entry();
6199 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6200 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6202 HeapObject::RawField(this, kCodeEntryOffset),
6207 void JSFunction::set_code_no_write_barrier(Code* value) {
6208 DCHECK(!GetHeap()->InNewSpace(value));
6209 Address entry = value->entry();
6210 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6214 void JSFunction::ReplaceCode(Code* code) {
6215 bool was_optimized = IsOptimized();
6216 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6218 if (was_optimized && is_optimized) {
6219 shared()->EvictFromOptimizedCodeMap(this->code(),
6220 "Replacing with another optimized code");
6225 // Add/remove the function from the list of optimized functions for this
6226 // context based on the state change.
6227 if (!was_optimized && is_optimized) {
6228 context()->native_context()->AddOptimizedFunction(this);
6230 if (was_optimized && !is_optimized) {
6231 // TODO(titzer): linear in the number of optimized functions; fix!
6232 context()->native_context()->RemoveOptimizedFunction(this);
6237 Context* JSFunction::context() {
6238 return Context::cast(READ_FIELD(this, kContextOffset));
6242 JSObject* JSFunction::global_proxy() {
6243 return context()->global_proxy();
6247 void JSFunction::set_context(Object* value) {
6248 DCHECK(value->IsUndefined() || value->IsContext());
6249 WRITE_FIELD(this, kContextOffset, value);
6250 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6253 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6254 kPrototypeOrInitialMapOffset)
6257 Map* JSFunction::initial_map() {
6258 return Map::cast(prototype_or_initial_map());
6262 bool JSFunction::has_initial_map() {
6263 return prototype_or_initial_map()->IsMap();
6267 bool JSFunction::has_instance_prototype() {
6268 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6272 bool JSFunction::has_prototype() {
6273 return map()->has_non_instance_prototype() || has_instance_prototype();
6277 Object* JSFunction::instance_prototype() {
6278 DCHECK(has_instance_prototype());
6279 if (has_initial_map()) return initial_map()->prototype();
6280 // When there is no initial map and the prototype is a JSObject, the
6281 // initial map field is used for the prototype field.
6282 return prototype_or_initial_map();
6286 Object* JSFunction::prototype() {
6287 DCHECK(has_prototype());
6288 // If the function's prototype property has been set to a non-JSObject
6289 // value, that value is stored in the constructor field of the map.
6290 if (map()->has_non_instance_prototype()) {
6291 Object* prototype = map()->GetConstructor();
6292 // The map must have a prototype in that field, not a back pointer.
6293 DCHECK(!prototype->IsMap());
6296 return instance_prototype();
6300 bool JSFunction::is_compiled() {
6301 Builtins* builtins = GetIsolate()->builtins();
6302 return code() != builtins->builtin(Builtins::kCompileLazy) &&
6303 code() != builtins->builtin(Builtins::kCompileOptimized) &&
6304 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6308 bool JSFunction::has_simple_parameters() {
6309 return shared()->has_simple_parameters();
6313 LiteralsArray* JSFunction::literals() {
6314 DCHECK(!shared()->bound());
6315 return LiteralsArray::cast(literals_or_bindings());
6319 void JSFunction::set_literals(LiteralsArray* literals) {
6320 DCHECK(!shared()->bound());
6321 set_literals_or_bindings(literals);
6325 FixedArray* JSFunction::function_bindings() {
6326 DCHECK(shared()->bound());
6327 return literals_or_bindings();
6331 void JSFunction::set_function_bindings(FixedArray* bindings) {
6332 DCHECK(shared()->bound());
6333 // Bound function literal may be initialized to the empty fixed array
6334 // before the bindings are set.
6335 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6336 bindings->map() == GetHeap()->fixed_array_map());
6337 set_literals_or_bindings(bindings);
6341 int JSFunction::NumberOfLiterals() {
6342 DCHECK(!shared()->bound());
6343 return literals()->length();
6347 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6348 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6349 ACCESSORS(JSFunctionProxy, call_trap, JSReceiver, kCallTrapOffset)
6350 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6353 void JSProxy::InitializeBody(int object_size, Object* value) {
6354 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6355 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6356 WRITE_FIELD(this, offset, value);
6361 ACCESSORS(JSCollection, table, Object, kTableOffset)
6364 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6365 template<class Derived, class TableType> \
6366 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6367 return type::cast(READ_FIELD(this, offset)); \
6369 template<class Derived, class TableType> \
6370 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6371 type* value, WriteBarrierMode mode) { \
6372 WRITE_FIELD(this, offset, value); \
6373 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6376 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6377 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6378 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6380 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6383 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6384 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6387 Address Foreign::foreign_address() {
6388 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6392 void Foreign::set_foreign_address(Address value) {
6393 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6397 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6398 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6399 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6400 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6401 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6403 bool JSGeneratorObject::is_suspended() {
6404 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6405 DCHECK_EQ(kGeneratorClosed, 0);
6406 return continuation() > 0;
6409 bool JSGeneratorObject::is_closed() {
6410 return continuation() == kGeneratorClosed;
6413 bool JSGeneratorObject::is_executing() {
6414 return continuation() == kGeneratorExecuting;
6417 ACCESSORS(JSModule, context, Object, kContextOffset)
6418 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6421 ACCESSORS(JSValue, value, Object, kValueOffset)
6424 HeapNumber* HeapNumber::cast(Object* object) {
6425 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6426 return reinterpret_cast<HeapNumber*>(object);
6430 const HeapNumber* HeapNumber::cast(const Object* object) {
6431 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6432 return reinterpret_cast<const HeapNumber*>(object);
6436 ACCESSORS(JSDate, value, Object, kValueOffset)
6437 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6438 ACCESSORS(JSDate, year, Object, kYearOffset)
6439 ACCESSORS(JSDate, month, Object, kMonthOffset)
6440 ACCESSORS(JSDate, day, Object, kDayOffset)
6441 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6442 ACCESSORS(JSDate, hour, Object, kHourOffset)
6443 ACCESSORS(JSDate, min, Object, kMinOffset)
6444 ACCESSORS(JSDate, sec, Object, kSecOffset)
6447 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6448 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6449 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6450 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6451 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6452 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6455 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6456 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6457 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6458 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6459 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6460 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6461 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6462 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6465 void Code::WipeOutHeader() {
6466 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6467 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6468 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6469 // Do not wipe out major/minor keys on a code stub or IC
6470 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6471 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6473 WRITE_FIELD(this, kNextCodeLinkOffset, NULL);
6474 WRITE_FIELD(this, kGCMetadataOffset, NULL);
6478 Object* Code::type_feedback_info() {
6479 DCHECK(kind() == FUNCTION);
6480 return raw_type_feedback_info();
6484 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6485 DCHECK(kind() == FUNCTION);
6486 set_raw_type_feedback_info(value, mode);
6487 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6492 uint32_t Code::stub_key() {
6493 DCHECK(IsCodeStubOrIC());
6494 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6495 return static_cast<uint32_t>(smi_key->value());
6499 void Code::set_stub_key(uint32_t key) {
6500 DCHECK(IsCodeStubOrIC());
6501 set_raw_type_feedback_info(Smi::FromInt(key));
6505 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6506 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6509 byte* Code::instruction_start() {
6510 return FIELD_ADDR(this, kHeaderSize);
6514 byte* Code::instruction_end() {
6515 return instruction_start() + instruction_size();
6519 int Code::body_size() {
6520 return RoundUp(instruction_size(), kObjectAlignment);
6524 ByteArray* Code::unchecked_relocation_info() {
6525 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6529 byte* Code::relocation_start() {
6530 return unchecked_relocation_info()->GetDataStartAddress();
6534 int Code::relocation_size() {
6535 return unchecked_relocation_info()->length();
6539 byte* Code::entry() {
6540 return instruction_start();
6544 bool Code::contains(byte* inner_pointer) {
6545 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6549 int Code::ExecutableSize() {
6550 // Check that the assumptions about the layout of the code object holds.
6551 DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6553 return instruction_size() + Code::kHeaderSize;
6557 int Code::CodeSize() { return SizeFor(body_size()); }
6560 ACCESSORS(JSArray, length, Object, kLengthOffset)
6563 void* JSArrayBuffer::backing_store() const {
6564 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6565 return reinterpret_cast<void*>(ptr);
6569 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6570 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6571 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6575 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6578 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6579 if (kInt32Size != kPointerSize) {
6580 #if V8_TARGET_LITTLE_ENDIAN
6581 WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6583 WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6586 WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6590 uint32_t JSArrayBuffer::bit_field() const {
6591 return READ_UINT32_FIELD(this, kBitFieldOffset);
6595 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6598 void JSArrayBuffer::set_is_external(bool value) {
6599 set_bit_field(IsExternal::update(bit_field(), value));
6603 bool JSArrayBuffer::is_neuterable() {
6604 return IsNeuterable::decode(bit_field());
6608 void JSArrayBuffer::set_is_neuterable(bool value) {
6609 set_bit_field(IsNeuterable::update(bit_field(), value));
6613 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6616 void JSArrayBuffer::set_was_neutered(bool value) {
6617 set_bit_field(WasNeutered::update(bit_field(), value));
6621 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6624 void JSArrayBuffer::set_is_shared(bool value) {
6625 set_bit_field(IsShared::update(bit_field(), value));
6630 template <typename StaticVisitor>
6631 void JSArrayBuffer::JSArrayBufferIterateBody(Heap* heap, HeapObject* obj) {
6632 StaticVisitor::VisitPointers(
6634 HeapObject::RawField(obj, JSArrayBuffer::BodyDescriptor::kStartOffset),
6635 HeapObject::RawField(obj,
6636 JSArrayBuffer::kByteLengthOffset + kPointerSize));
6637 StaticVisitor::VisitPointers(
6638 heap, obj, HeapObject::RawField(obj, JSArrayBuffer::kSize),
6639 HeapObject::RawField(obj, JSArrayBuffer::kSizeWithInternalFields));
6643 void JSArrayBuffer::JSArrayBufferIterateBody(HeapObject* obj,
6646 HeapObject::RawField(obj, JSArrayBuffer::BodyDescriptor::kStartOffset),
6647 HeapObject::RawField(obj,
6648 JSArrayBuffer::kByteLengthOffset + kPointerSize));
6650 HeapObject::RawField(obj, JSArrayBuffer::kSize),
6651 HeapObject::RawField(obj, JSArrayBuffer::kSizeWithInternalFields));
6655 Object* JSArrayBufferView::byte_offset() const {
6656 if (WasNeutered()) return Smi::FromInt(0);
6657 return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6661 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6662 WRITE_FIELD(this, kByteOffsetOffset, value);
6663 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6667 Object* JSArrayBufferView::byte_length() const {
6668 if (WasNeutered()) return Smi::FromInt(0);
6669 return Object::cast(READ_FIELD(this, kByteLengthOffset));
6673 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6674 WRITE_FIELD(this, kByteLengthOffset, value);
6675 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6679 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6681 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6682 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6686 bool JSArrayBufferView::WasNeutered() const {
6687 return JSArrayBuffer::cast(buffer())->was_neutered();
6691 Object* JSTypedArray::length() const {
6692 if (WasNeutered()) return Smi::FromInt(0);
6693 return Object::cast(READ_FIELD(this, kLengthOffset));
6697 uint32_t JSTypedArray::length_value() const {
6698 if (WasNeutered()) return 0;
6700 CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6705 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6706 WRITE_FIELD(this, kLengthOffset, value);
6707 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6712 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6716 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6719 JSRegExp::Type JSRegExp::TypeTag() {
6720 Object* data = this->data();
6721 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6722 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6723 return static_cast<JSRegExp::Type>(smi->value());
6727 int JSRegExp::CaptureCount() {
6728 switch (TypeTag()) {
6732 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6740 JSRegExp::Flags JSRegExp::GetFlags() {
6741 DCHECK(this->data()->IsFixedArray());
6742 Object* data = this->data();
6743 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6744 return Flags(smi->value());
6748 String* JSRegExp::Pattern() {
6749 DCHECK(this->data()->IsFixedArray());
6750 Object* data = this->data();
6751 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6756 Object* JSRegExp::DataAt(int index) {
6757 DCHECK(TypeTag() != NOT_COMPILED);
6758 return FixedArray::cast(data())->get(index);
6762 void JSRegExp::SetDataAt(int index, Object* value) {
6763 DCHECK(TypeTag() != NOT_COMPILED);
6764 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6765 FixedArray::cast(data())->set(index, value);
6769 ElementsKind JSObject::GetElementsKind() {
6770 ElementsKind kind = map()->elements_kind();
6771 #if VERIFY_HEAP && DEBUG
6772 FixedArrayBase* fixed_array =
6773 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6775 // If a GC was caused while constructing this object, the elements
6776 // pointer may point to a one pointer filler map.
6777 if (ElementsAreSafeToExamine()) {
6778 Map* map = fixed_array->map();
6779 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6780 (map == GetHeap()->fixed_array_map() ||
6781 map == GetHeap()->fixed_cow_array_map())) ||
6782 (IsFastDoubleElementsKind(kind) &&
6783 (fixed_array->IsFixedDoubleArray() ||
6784 fixed_array == GetHeap()->empty_fixed_array())) ||
6785 (kind == DICTIONARY_ELEMENTS &&
6786 fixed_array->IsFixedArray() &&
6787 fixed_array->IsDictionary()) ||
6788 (kind > DICTIONARY_ELEMENTS));
6789 DCHECK(!IsSloppyArgumentsElements(kind) ||
6790 (elements()->IsFixedArray() && elements()->length() >= 2));
6797 bool JSObject::HasFastObjectElements() {
6798 return IsFastObjectElementsKind(GetElementsKind());
6802 bool JSObject::HasFastSmiElements() {
6803 return IsFastSmiElementsKind(GetElementsKind());
6807 bool JSObject::HasFastSmiOrObjectElements() {
6808 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6812 bool JSObject::HasFastDoubleElements() {
6813 return IsFastDoubleElementsKind(GetElementsKind());
6817 bool JSObject::HasFastHoleyElements() {
6818 return IsFastHoleyElementsKind(GetElementsKind());
6822 bool JSObject::HasFastElements() {
6823 return IsFastElementsKind(GetElementsKind());
6827 bool JSObject::HasDictionaryElements() {
6828 return GetElementsKind() == DICTIONARY_ELEMENTS;
6832 bool JSObject::HasFastArgumentsElements() {
6833 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
6837 bool JSObject::HasSlowArgumentsElements() {
6838 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
6842 bool JSObject::HasSloppyArgumentsElements() {
6843 return IsSloppyArgumentsElements(GetElementsKind());
6847 bool JSObject::HasFixedTypedArrayElements() {
6848 HeapObject* array = elements();
6849 DCHECK(array != NULL);
6850 return array->IsFixedTypedArrayBase();
6854 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6855 bool JSObject::HasFixed##Type##Elements() { \
6856 HeapObject* array = elements(); \
6857 DCHECK(array != NULL); \
6858 if (!array->IsHeapObject()) \
6860 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6863 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6865 #undef FIXED_TYPED_ELEMENTS_CHECK
6868 bool JSObject::HasNamedInterceptor() {
6869 return map()->has_named_interceptor();
6873 bool JSObject::HasIndexedInterceptor() {
6874 return map()->has_indexed_interceptor();
6878 NameDictionary* JSObject::property_dictionary() {
6879 DCHECK(!HasFastProperties());
6880 DCHECK(!IsGlobalObject());
6881 return NameDictionary::cast(properties());
6885 GlobalDictionary* JSObject::global_dictionary() {
6886 DCHECK(!HasFastProperties());
6887 DCHECK(IsGlobalObject());
6888 return GlobalDictionary::cast(properties());
6892 SeededNumberDictionary* JSObject::element_dictionary() {
6893 DCHECK(HasDictionaryElements());
6894 return SeededNumberDictionary::cast(elements());
6898 bool Name::IsHashFieldComputed(uint32_t field) {
6899 return (field & kHashNotComputedMask) == 0;
6903 bool Name::HasHashCode() {
6904 return IsHashFieldComputed(hash_field());
6908 uint32_t Name::Hash() {
6909 // Fast case: has hash code already been computed?
6910 uint32_t field = hash_field();
6911 if (IsHashFieldComputed(field)) return field >> kHashShift;
6912 // Slow case: compute hash code and set it. Has to be a string.
6913 return String::cast(this)->ComputeAndSetHash();
6917 bool Name::IsPrivate() {
6918 return this->IsSymbol() && Symbol::cast(this)->is_private();
6922 StringHasher::StringHasher(int length, uint32_t seed)
6924 raw_running_hash_(seed),
6926 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6927 is_first_char_(true) {
6928 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6932 bool StringHasher::has_trivial_hash() {
6933 return length_ > String::kMaxHashCalcLength;
6937 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6939 running_hash += (running_hash << 10);
6940 running_hash ^= (running_hash >> 6);
6941 return running_hash;
6945 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6946 running_hash += (running_hash << 3);
6947 running_hash ^= (running_hash >> 11);
6948 running_hash += (running_hash << 15);
6949 if ((running_hash & String::kHashBitMask) == 0) {
6952 return running_hash;
6956 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6957 const uc16* chars, int length) {
6958 DCHECK_NOT_NULL(chars);
6959 DCHECK(length >= 0);
6960 for (int i = 0; i < length; ++i) {
6961 running_hash = AddCharacterCore(running_hash, *chars++);
6963 return running_hash;
6967 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6970 DCHECK_NOT_NULL(chars);
6971 DCHECK(length >= 0);
6972 for (int i = 0; i < length; ++i) {
6973 uint16_t c = static_cast<uint16_t>(*chars++);
6974 running_hash = AddCharacterCore(running_hash, c);
6976 return running_hash;
6980 void StringHasher::AddCharacter(uint16_t c) {
6981 // Use the Jenkins one-at-a-time hash function to update the hash
6982 // for the given character.
6983 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6987 bool StringHasher::UpdateIndex(uint16_t c) {
6988 DCHECK(is_array_index_);
6989 if (c < '0' || c > '9') {
6990 is_array_index_ = false;
6994 if (is_first_char_) {
6995 is_first_char_ = false;
6996 if (c == '0' && length_ > 1) {
6997 is_array_index_ = false;
7001 if (array_index_ > 429496729U - ((d + 3) >> 3)) {
7002 is_array_index_ = false;
7005 array_index_ = array_index_ * 10 + d;
7010 template<typename Char>
7011 inline void StringHasher::AddCharacters(const Char* chars, int length) {
7012 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
7014 if (is_array_index_) {
7015 for (; i < length; i++) {
7016 AddCharacter(chars[i]);
7017 if (!UpdateIndex(chars[i])) {
7023 for (; i < length; i++) {
7024 DCHECK(!is_array_index_);
7025 AddCharacter(chars[i]);
7030 template <typename schar>
7031 uint32_t StringHasher::HashSequentialString(const schar* chars,
7034 StringHasher hasher(length, seed);
7035 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
7036 return hasher.GetHashField();
7040 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
7041 : StringHasher(len, seed) {}
7044 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
7045 IteratingStringHasher hasher(string->length(), seed);
7047 if (hasher.has_trivial_hash()) return hasher.GetHashField();
7048 ConsString* cons_string = String::VisitFlat(&hasher, string);
7049 if (cons_string == nullptr) return hasher.GetHashField();
7050 hasher.VisitConsString(cons_string);
7051 return hasher.GetHashField();
7055 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
7057 AddCharacters(chars, length);
7061 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
7063 AddCharacters(chars, length);
7067 bool Name::AsArrayIndex(uint32_t* index) {
7068 return IsString() && String::cast(this)->AsArrayIndex(index);
7072 bool String::AsArrayIndex(uint32_t* index) {
7073 uint32_t field = hash_field();
7074 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
7077 return SlowAsArrayIndex(index);
7081 void String::SetForwardedInternalizedString(String* canonical) {
7082 DCHECK(IsInternalizedString());
7083 DCHECK(HasHashCode());
7084 if (canonical == this) return; // No need to forward.
7085 DCHECK(SlowEquals(canonical));
7086 DCHECK(canonical->IsInternalizedString());
7087 DCHECK(canonical->HasHashCode());
7088 WRITE_FIELD(this, kHashFieldSlot, canonical);
7089 // Setting the hash field to a tagged value sets the LSB, causing the hash
7090 // code to be interpreted as uninitialized. We use this fact to recognize
7091 // that we have a forwarded string.
7092 DCHECK(!HasHashCode());
7096 String* String::GetForwardedInternalizedString() {
7097 DCHECK(IsInternalizedString());
7098 if (HasHashCode()) return this;
7099 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
7100 DCHECK(canonical->IsInternalizedString());
7101 DCHECK(SlowEquals(canonical));
7102 DCHECK(canonical->HasHashCode());
7108 Maybe<bool> Object::GreaterThan(Handle<Object> x, Handle<Object> y,
7109 Strength strength) {
7110 Maybe<ComparisonResult> result = Compare(x, y, strength);
7111 if (result.IsJust()) {
7112 switch (result.FromJust()) {
7113 case ComparisonResult::kGreaterThan:
7115 case ComparisonResult::kLessThan:
7116 case ComparisonResult::kEqual:
7117 case ComparisonResult::kUndefined:
7121 return Nothing<bool>();
7126 Maybe<bool> Object::GreaterThanOrEqual(Handle<Object> x, Handle<Object> y,
7127 Strength strength) {
7128 Maybe<ComparisonResult> result = Compare(x, y, strength);
7129 if (result.IsJust()) {
7130 switch (result.FromJust()) {
7131 case ComparisonResult::kEqual:
7132 case ComparisonResult::kGreaterThan:
7134 case ComparisonResult::kLessThan:
7135 case ComparisonResult::kUndefined:
7139 return Nothing<bool>();
7144 Maybe<bool> Object::LessThan(Handle<Object> x, Handle<Object> y,
7145 Strength strength) {
7146 Maybe<ComparisonResult> result = Compare(x, y, strength);
7147 if (result.IsJust()) {
7148 switch (result.FromJust()) {
7149 case ComparisonResult::kLessThan:
7151 case ComparisonResult::kEqual:
7152 case ComparisonResult::kGreaterThan:
7153 case ComparisonResult::kUndefined:
7157 return Nothing<bool>();
7162 Maybe<bool> Object::LessThanOrEqual(Handle<Object> x, Handle<Object> y,
7163 Strength strength) {
7164 Maybe<ComparisonResult> result = Compare(x, y, strength);
7165 if (result.IsJust()) {
7166 switch (result.FromJust()) {
7167 case ComparisonResult::kEqual:
7168 case ComparisonResult::kLessThan:
7170 case ComparisonResult::kGreaterThan:
7171 case ComparisonResult::kUndefined:
7175 return Nothing<bool>();
7179 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7181 LanguageMode language_mode) {
7183 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7184 return GetProperty(&it, language_mode);
7188 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7189 Handle<Name> name) {
7190 // Call the "has" trap on proxies.
7191 if (object->IsJSProxy()) {
7192 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7193 return JSProxy::HasPropertyWithHandler(proxy, name);
7196 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
7197 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7201 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7202 Handle<Name> name) {
7203 // Call the "has" trap on proxies.
7204 if (object->IsJSProxy()) {
7205 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7206 return JSProxy::HasPropertyWithHandler(proxy, name);
7209 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
7210 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7214 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7215 Handle<JSReceiver> object, Handle<Name> name) {
7217 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7218 return GetPropertyAttributes(&it);
7222 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7223 Handle<JSReceiver> object, Handle<Name> name) {
7224 LookupIterator it = LookupIterator::PropertyOrElement(
7225 name->GetIsolate(), object, name, LookupIterator::HIDDEN);
7226 return GetPropertyAttributes(&it);
7230 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7231 // Call the "has" trap on proxies.
7232 if (object->IsJSProxy()) {
7233 Isolate* isolate = object->GetIsolate();
7234 Handle<Name> name = isolate->factory()->Uint32ToString(index);
7235 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7236 return JSProxy::HasPropertyWithHandler(proxy, name);
7239 Maybe<PropertyAttributes> result = GetElementAttributes(object, index);
7240 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7244 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
7246 // Call the "has" trap on proxies.
7247 if (object->IsJSProxy()) {
7248 Isolate* isolate = object->GetIsolate();
7249 Handle<Name> name = isolate->factory()->Uint32ToString(index);
7250 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7251 return JSProxy::HasPropertyWithHandler(proxy, name);
7254 Maybe<PropertyAttributes> result = GetOwnElementAttributes(object, index);
7255 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7259 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7260 Handle<JSReceiver> object, uint32_t index) {
7261 Isolate* isolate = object->GetIsolate();
7262 LookupIterator it(isolate, object, index);
7263 return GetPropertyAttributes(&it);
7267 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7268 Handle<JSReceiver> object, uint32_t index) {
7269 Isolate* isolate = object->GetIsolate();
7270 LookupIterator it(isolate, object, index, LookupIterator::HIDDEN);
7271 return GetPropertyAttributes(&it);
7275 bool JSGlobalObject::IsDetached() {
7276 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7280 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
7281 const PrototypeIterator iter(this->GetIsolate(),
7282 const_cast<JSGlobalProxy*>(this));
7283 return iter.GetCurrent() != global;
7287 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
7288 return object->IsJSProxy()
7289 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
7290 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
7294 Object* JSReceiver::GetIdentityHash() {
7296 ? JSProxy::cast(this)->GetIdentityHash()
7297 : JSObject::cast(this)->GetIdentityHash();
7301 bool AccessorInfo::all_can_read() {
7302 return BooleanBit::get(flag(), kAllCanReadBit);
7306 void AccessorInfo::set_all_can_read(bool value) {
7307 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7311 bool AccessorInfo::all_can_write() {
7312 return BooleanBit::get(flag(), kAllCanWriteBit);
7316 void AccessorInfo::set_all_can_write(bool value) {
7317 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7321 bool AccessorInfo::is_special_data_property() {
7322 return BooleanBit::get(flag(), kSpecialDataProperty);
7326 void AccessorInfo::set_is_special_data_property(bool value) {
7327 set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7331 PropertyAttributes AccessorInfo::property_attributes() {
7332 return AttributesField::decode(static_cast<uint32_t>(flag()));
7336 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7337 set_flag(AttributesField::update(flag(), attributes));
7341 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7342 if (!HasExpectedReceiverType()) return true;
7343 if (!receiver->IsJSObject()) return false;
7344 return FunctionTemplateInfo::cast(expected_receiver_type())
7345 ->IsTemplateFor(JSObject::cast(receiver)->map());
7349 bool AccessorInfo::HasExpectedReceiverType() {
7350 return expected_receiver_type()->IsFunctionTemplateInfo();
7354 Object* AccessorPair::get(AccessorComponent component) {
7355 return component == ACCESSOR_GETTER ? getter() : setter();
7359 void AccessorPair::set(AccessorComponent component, Object* value) {
7360 if (component == ACCESSOR_GETTER) {
7368 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7369 if (!getter->IsNull()) set_getter(getter);
7370 if (!setter->IsNull()) set_setter(setter);
7374 bool AccessorPair::Equals(AccessorPair* pair) {
7375 return (this == pair) || pair->Equals(getter(), setter());
7379 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7380 return (getter() == getter_value) && (setter() == setter_value);
7384 bool AccessorPair::ContainsAccessor() {
7385 return IsJSAccessor(getter()) || IsJSAccessor(setter());
7389 bool AccessorPair::IsJSAccessor(Object* obj) {
7390 return obj->IsCallable() || obj->IsUndefined();
7394 template<typename Derived, typename Shape, typename Key>
7395 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7397 Handle<Object> value) {
7398 this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7402 template<typename Derived, typename Shape, typename Key>
7403 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7405 Handle<Object> value,
7406 PropertyDetails details) {
7407 Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7411 template <typename Key>
7412 template <typename Dictionary>
7413 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7415 Handle<Object> value,
7416 PropertyDetails details) {
7417 STATIC_ASSERT(Dictionary::kEntrySize == 3);
7418 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7419 int index = dict->EntryToIndex(entry);
7420 DisallowHeapAllocation no_gc;
7421 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7422 dict->set(index, *key, mode);
7423 dict->set(index + 1, *value, mode);
7424 dict->set(index + 2, details.AsSmi());
7428 template <typename Dictionary>
7429 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7430 Handle<Object> key, Handle<Object> value,
7431 PropertyDetails details) {
7432 STATIC_ASSERT(Dictionary::kEntrySize == 2);
7433 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7434 DCHECK(value->IsPropertyCell());
7435 int index = dict->EntryToIndex(entry);
7436 DisallowHeapAllocation no_gc;
7437 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7438 dict->set(index, *key, mode);
7439 dict->set(index + 1, *value, mode);
7440 PropertyCell::cast(*value)->set_property_details(details);
7444 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7445 DCHECK(other->IsNumber());
7446 return key == static_cast<uint32_t>(other->Number());
7450 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7451 return ComputeIntegerHash(key, 0);
7455 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7457 DCHECK(other->IsNumber());
7458 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7462 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7463 return ComputeIntegerHash(key, seed);
7467 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7470 DCHECK(other->IsNumber());
7471 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7475 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7476 return isolate->factory()->NewNumberFromUint(key);
7480 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7481 // We know that all entries in a hash table had their hash keys created.
7482 // Use that knowledge to have fast failure.
7483 if (key->Hash() != Name::cast(other)->Hash()) return false;
7484 return key->Equals(Name::cast(other));
7488 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7493 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7494 return Name::cast(other)->Hash();
7498 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7500 DCHECK(key->IsUniqueName());
7505 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7506 Handle<NameDictionary> dictionary) {
7507 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7511 template <typename Dictionary>
7512 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7513 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7514 Object* raw_value = dict->ValueAt(entry);
7515 DCHECK(raw_value->IsPropertyCell());
7516 PropertyCell* cell = PropertyCell::cast(raw_value);
7517 return cell->property_details();
7521 template <typename Dictionary>
7522 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7523 PropertyDetails value) {
7524 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7525 Object* raw_value = dict->ValueAt(entry);
7526 DCHECK(raw_value->IsPropertyCell());
7527 PropertyCell* cell = PropertyCell::cast(raw_value);
7528 cell->set_property_details(value);
7532 template <typename Dictionary>
7533 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7534 DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7535 return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole();
7539 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7540 return key->SameValue(other);
7544 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7545 return Smi::cast(key->GetHash())->value();
7549 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7551 return Smi::cast(other->GetHash())->value();
7555 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7556 Handle<Object> key) {
7561 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7562 Handle<ObjectHashTable> table, Handle<Object> key) {
7563 return DerivedHashTable::Shrink(table, key);
7567 Object* OrderedHashMap::ValueAt(int entry) {
7568 return get(EntryToIndex(entry) + kValueOffset);
7572 template <int entrysize>
7573 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7574 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7575 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7580 template <int entrysize>
7581 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7584 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7585 : reinterpret_cast<intptr_t>(*key);
7586 return (uint32_t)(hash & 0xFFFFFFFF);
7590 template <int entrysize>
7591 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7593 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7594 intptr_t hash = reinterpret_cast<intptr_t>(other);
7595 return (uint32_t)(hash & 0xFFFFFFFF);
7599 template <int entrysize>
7600 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7601 Handle<Object> key) {
7606 bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
7609 bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
7612 bool ScopeInfo::HasSimpleParameters() {
7613 return HasSimpleParametersField::decode(Flags());
7617 #define SCOPE_INFO_FIELD_ACCESSORS(name) \
7618 void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
7619 int ScopeInfo::name() { \
7620 if (length() > 0) { \
7621 return Smi::cast(get(k##name))->value(); \
7626 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
7627 #undef SCOPE_INFO_FIELD_ACCESSORS
7630 void Map::ClearCodeCache(Heap* heap) {
7631 // No write barrier is needed since empty_fixed_array is not in new space.
7632 // Please note this function is used during marking:
7633 // - MarkCompactCollector::MarkUnmarkedObject
7634 // - IncrementalMarking::Step
7635 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7636 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7640 int Map::SlackForArraySize(int old_size, int size_limit) {
7641 const int max_slack = size_limit - old_size;
7642 CHECK_LE(0, max_slack);
7644 DCHECK_LE(1, max_slack);
7647 return Min(max_slack, old_size / 4);
7651 void JSArray::set_length(Smi* length) {
7652 // Don't need a write barrier for a Smi.
7653 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7657 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
7658 // If the new array won't fit in a some non-trivial fraction of the max old
7659 // space size, then force it to go dictionary mode.
7660 uint32_t max_fast_array_size =
7661 static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7662 return new_length >= max_fast_array_size;
7666 bool JSArray::AllowsSetLength() {
7667 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7668 DCHECK(result == !HasFixedTypedArrayElements());
7673 void JSArray::SetContent(Handle<JSArray> array,
7674 Handle<FixedArrayBase> storage) {
7675 EnsureCanContainElements(array, storage, storage->length(),
7676 ALLOW_COPIED_DOUBLE_ELEMENTS);
7678 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7679 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7680 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7681 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7682 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7683 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7684 array->set_elements(*storage);
7685 array->set_length(Smi::FromInt(storage->length()));
7689 int TypeFeedbackInfo::ic_total_count() {
7690 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7691 return ICTotalCountField::decode(current);
7695 void TypeFeedbackInfo::set_ic_total_count(int count) {
7696 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7697 value = ICTotalCountField::update(value,
7698 ICTotalCountField::decode(count));
7699 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7703 int TypeFeedbackInfo::ic_with_type_info_count() {
7704 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7705 return ICsWithTypeInfoCountField::decode(current);
7709 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7710 if (delta == 0) return;
7711 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7712 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7713 // We can get negative count here when the type-feedback info is
7714 // shared between two code objects. The can only happen when
7715 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7716 // Since we do not optimize when the debugger is active, we can skip
7717 // this counter update.
7718 if (new_count >= 0) {
7719 new_count &= ICsWithTypeInfoCountField::kMask;
7720 value = ICsWithTypeInfoCountField::update(value, new_count);
7721 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7726 int TypeFeedbackInfo::ic_generic_count() {
7727 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7731 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7732 if (delta == 0) return;
7733 int new_count = ic_generic_count() + delta;
7734 if (new_count >= 0) {
7735 new_count &= ~Smi::kMinValue;
7736 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7741 void TypeFeedbackInfo::initialize_storage() {
7742 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7743 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7744 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7748 void TypeFeedbackInfo::change_own_type_change_checksum() {
7749 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7750 int checksum = OwnTypeChangeChecksum::decode(value);
7751 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7752 value = OwnTypeChangeChecksum::update(value, checksum);
7753 // Ensure packed bit field is in Smi range.
7754 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7755 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7756 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7760 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7761 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7762 int mask = (1 << kTypeChangeChecksumBits) - 1;
7763 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7764 // Ensure packed bit field is in Smi range.
7765 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7766 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7767 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7771 int TypeFeedbackInfo::own_type_change_checksum() {
7772 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7773 return OwnTypeChangeChecksum::decode(value);
7777 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7778 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7779 int mask = (1 << kTypeChangeChecksumBits) - 1;
7780 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7784 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7787 Relocatable::Relocatable(Isolate* isolate) {
7789 prev_ = isolate->relocatable_top();
7790 isolate->set_relocatable_top(this);
7794 Relocatable::~Relocatable() {
7795 DCHECK_EQ(isolate_->relocatable_top(), this);
7796 isolate_->set_relocatable_top(prev_);
7801 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7802 return map->instance_size();
7807 int FixedArray::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7808 return SizeFor(reinterpret_cast<FixedArray*>(object)->synchronized_length());
7813 int StructBodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7814 return map->instance_size();
7818 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7819 v->VisitExternalReference(
7820 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7824 template<typename StaticVisitor>
7825 void Foreign::ForeignIterateBody() {
7826 StaticVisitor::VisitExternalReference(
7827 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7831 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody(ObjectVisitor* v) {
7833 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7837 template <typename StaticVisitor>
7838 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody() {
7839 StaticVisitor::VisitPointer(
7840 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7844 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7845 typedef v8::String::ExternalOneByteStringResource Resource;
7846 v->VisitExternalOneByteString(
7847 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7851 template <typename StaticVisitor>
7852 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7853 typedef v8::String::ExternalOneByteStringResource Resource;
7854 StaticVisitor::VisitExternalOneByteString(
7855 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7859 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7860 typedef v8::String::ExternalStringResource Resource;
7861 v->VisitExternalTwoByteString(
7862 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7866 template<typename StaticVisitor>
7867 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7868 typedef v8::String::ExternalStringResource Resource;
7869 StaticVisitor::VisitExternalTwoByteString(
7870 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7874 static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7878 DCHECK(FLAG_unbox_double_fields);
7879 DCHECK(IsAligned(start_offset, kPointerSize) &&
7880 IsAligned(end_offset, kPointerSize));
7882 LayoutDescriptorHelper helper(object->map());
7883 DCHECK(!helper.all_fields_tagged());
7885 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7886 // Visit all tagged fields.
7887 if (helper.IsTagged(offset)) {
7888 v->VisitPointer(HeapObject::RawField(object, offset));
7894 template<int start_offset, int end_offset, int size>
7895 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7898 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7899 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7900 HeapObject::RawField(obj, end_offset));
7902 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7907 template<int start_offset>
7908 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7911 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7912 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7913 HeapObject::RawField(obj, object_size));
7915 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7920 template<class Derived, class TableType>
7921 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7922 TableType* table(TableType::cast(this->table()));
7923 int index = Smi::cast(this->index())->value();
7924 Object* key = table->KeyAt(index);
7925 DCHECK(!key->IsTheHole());
7930 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7931 array->set(0, CurrentKey());
7935 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7936 array->set(0, CurrentKey());
7937 array->set(1, CurrentValue());
7941 Object* JSMapIterator::CurrentValue() {
7942 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7943 int index = Smi::cast(this->index())->value();
7944 Object* value = table->ValueAt(index);
7945 DCHECK(!value->IsTheHole());
7950 ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
7951 ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
7954 String::SubStringRange::SubStringRange(String* string, int first, int length)
7957 length_(length == -1 ? string->length() : length) {}
7960 class String::SubStringRange::iterator final {
7962 typedef std::forward_iterator_tag iterator_category;
7963 typedef int difference_type;
7964 typedef uc16 value_type;
7965 typedef uc16* pointer;
7966 typedef uc16& reference;
7968 iterator(const iterator& other)
7969 : content_(other.content_), offset_(other.offset_) {}
7971 uc16 operator*() { return content_.Get(offset_); }
7972 bool operator==(const iterator& other) const {
7973 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7975 bool operator!=(const iterator& other) const {
7976 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7978 iterator& operator++() {
7982 iterator operator++(int);
7985 friend class String;
7986 iterator(String* from, int offset)
7987 : content_(from->GetFlatContent()), offset_(offset) {}
7988 String::FlatContent content_;
7993 String::SubStringRange::iterator String::SubStringRange::begin() {
7994 return String::SubStringRange::iterator(string_, first_);
7998 String::SubStringRange::iterator String::SubStringRange::end() {
7999 return String::SubStringRange::iterator(string_, first_ + length_);
8004 #undef CAST_ACCESSOR
8005 #undef INT_ACCESSORS
8007 #undef SMI_ACCESSORS
8008 #undef SYNCHRONIZED_SMI_ACCESSORS
8009 #undef NOBARRIER_SMI_ACCESSORS
8011 #undef BOOL_ACCESSORS
8013 #undef FIELD_ADDR_CONST
8015 #undef NOBARRIER_READ_FIELD
8017 #undef NOBARRIER_WRITE_FIELD
8018 #undef WRITE_BARRIER
8019 #undef CONDITIONAL_WRITE_BARRIER
8020 #undef READ_DOUBLE_FIELD
8021 #undef WRITE_DOUBLE_FIELD
8022 #undef READ_INT_FIELD
8023 #undef WRITE_INT_FIELD
8024 #undef READ_INTPTR_FIELD
8025 #undef WRITE_INTPTR_FIELD
8026 #undef READ_UINT8_FIELD
8027 #undef WRITE_UINT8_FIELD
8028 #undef READ_INT8_FIELD
8029 #undef WRITE_INT8_FIELD
8030 #undef READ_UINT16_FIELD
8031 #undef WRITE_UINT16_FIELD
8032 #undef READ_INT16_FIELD
8033 #undef WRITE_INT16_FIELD
8034 #undef READ_UINT32_FIELD
8035 #undef WRITE_UINT32_FIELD
8036 #undef READ_INT32_FIELD
8037 #undef WRITE_INT32_FIELD
8038 #undef READ_FLOAT_FIELD
8039 #undef WRITE_FLOAT_FIELD
8040 #undef READ_UINT64_FIELD
8041 #undef WRITE_UINT64_FIELD
8042 #undef READ_INT64_FIELD
8043 #undef WRITE_INT64_FIELD
8044 #undef READ_BYTE_FIELD
8045 #undef WRITE_BYTE_FIELD
8046 #undef NOBARRIER_READ_BYTE_FIELD
8047 #undef NOBARRIER_WRITE_BYTE_FIELD
8049 } // namespace internal
8052 #endif // V8_OBJECTS_INL_H_