1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/isolate.h"
24 #include "src/layout-descriptor-inl.h"
25 #include "src/lookup.h"
26 #include "src/objects.h"
27 #include "src/property.h"
28 #include "src/prototype.h"
29 #include "src/transitions-inl.h"
30 #include "src/type-feedback-vector-inl.h"
31 #include "src/types-inl.h"
32 #include "src/v8memory.h"
37 PropertyDetails::PropertyDetails(Smi* smi) {
38 value_ = smi->value();
42 Smi* PropertyDetails::AsSmi() const {
43 // Ensure the upper 2 bits have the same value by sign extending it. This is
44 // necessary to be able to use the 31st bit of the property details.
45 int value = value_ << 1;
46 return Smi::FromInt(value >> 1);
50 int PropertyDetails::field_width_in_words() const {
51 DCHECK(location() == kField);
52 if (!FLAG_unbox_double_fields) return 1;
53 if (kDoubleSize == kPointerSize) return 1;
54 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
58 #define TYPE_CHECKER(type, instancetype) \
59 bool Object::Is##type() const { \
60 return Object::IsHeapObject() && \
61 HeapObject::cast(this)->map()->instance_type() == instancetype; \
65 #define CAST_ACCESSOR(type) \
66 type* type::cast(Object* object) { \
67 SLOW_DCHECK(object->Is##type()); \
68 return reinterpret_cast<type*>(object); \
70 const type* type::cast(const Object* object) { \
71 SLOW_DCHECK(object->Is##type()); \
72 return reinterpret_cast<const type*>(object); \
76 #define INT_ACCESSORS(holder, name, offset) \
77 int holder::name() const { return READ_INT_FIELD(this, offset); } \
78 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
81 #define ACCESSORS(holder, name, type, offset) \
82 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
83 void holder::set_##name(type* value, WriteBarrierMode mode) { \
84 WRITE_FIELD(this, offset, value); \
85 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
89 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
90 #define ACCESSORS_TO_SMI(holder, name, offset) \
91 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
92 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
93 WRITE_FIELD(this, offset, value); \
97 // Getter that returns a Smi as an int and writes an int as a Smi.
98 #define SMI_ACCESSORS(holder, name, offset) \
99 int holder::name() const { \
100 Object* value = READ_FIELD(this, offset); \
101 return Smi::cast(value)->value(); \
103 void holder::set_##name(int value) { \
104 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
108 int holder::synchronized_##name() const { \
109 Object* value = ACQUIRE_READ_FIELD(this, offset); \
110 return Smi::cast(value)->value(); \
112 void holder::synchronized_set_##name(int value) { \
113 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
117 int holder::nobarrier_##name() const { \
118 Object* value = NOBARRIER_READ_FIELD(this, offset); \
119 return Smi::cast(value)->value(); \
121 void holder::nobarrier_set_##name(int value) { \
122 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
125 #define BOOL_GETTER(holder, field, name, offset) \
126 bool holder::name() const { \
127 return BooleanBit::get(field(), offset); \
131 #define BOOL_ACCESSORS(holder, field, name, offset) \
132 bool holder::name() const { \
133 return BooleanBit::get(field(), offset); \
135 void holder::set_##name(bool value) { \
136 set_##field(BooleanBit::set(field(), offset, value)); \
140 bool Object::IsFixedArrayBase() const {
141 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
145 // External objects are not extensible, so the map check is enough.
146 bool Object::IsExternal() const {
147 return Object::IsHeapObject() &&
148 HeapObject::cast(this)->map() ==
149 HeapObject::cast(this)->GetHeap()->external_map();
153 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
156 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
157 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
158 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
159 TYPE_CHECKER(Simd128Value, SIMD128_VALUE_TYPE)
162 #define SIMD128_TYPE_CHECKER(TYPE, Type, type, lane_count, lane_type) \
163 bool Object::Is##Type() const { \
164 return Object::IsHeapObject() && \
165 HeapObject::cast(this)->map() == \
166 HeapObject::cast(this)->GetHeap()->type##_map(); \
168 SIMD128_TYPES(SIMD128_TYPE_CHECKER)
169 #undef SIMD128_TYPE_CHECKER
172 bool Object::IsString() const {
173 return Object::IsHeapObject()
174 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
178 bool Object::IsName() const {
179 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
180 return Object::IsHeapObject() &&
181 HeapObject::cast(this)->map()->instance_type() <= LAST_NAME_TYPE;
185 bool Object::IsUniqueName() const {
186 return IsInternalizedString() || IsSymbol();
190 bool Object::IsSpecObject() const {
191 return Object::IsHeapObject()
192 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
196 bool Object::IsSpecFunction() const {
197 if (!Object::IsHeapObject()) return false;
198 InstanceType type = HeapObject::cast(this)->map()->instance_type();
199 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
203 bool Object::IsTemplateInfo() const {
204 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
208 bool Object::IsInternalizedString() const {
209 if (!this->IsHeapObject()) return false;
210 uint32_t type = HeapObject::cast(this)->map()->instance_type();
211 STATIC_ASSERT(kNotInternalizedTag != 0);
212 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
213 (kStringTag | kInternalizedTag);
217 bool Object::IsConsString() const {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() const {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() const {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqOneByteString() const {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsOneByteRepresentation();
242 bool Object::IsSeqTwoByteString() const {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() const {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalOneByteString() const {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsOneByteRepresentation();
262 bool Object::IsExternalTwoByteString() const {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
269 bool Object::HasValidElements() {
270 // Dictionary is covered under FixedArray.
271 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
275 bool Object::KeyEquals(Object* second) {
276 Object* first = this;
277 if (second->IsNumber()) {
278 if (first->IsNumber()) return first->Number() == second->Number();
279 Object* temp = first;
283 if (first->IsNumber()) {
284 DCHECK_LE(0, first->Number());
285 uint32_t expected = static_cast<uint32_t>(first->Number());
287 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
289 return Name::cast(first)->Equals(Name::cast(second));
293 Handle<Object> Object::NewStorageFor(Isolate* isolate,
294 Handle<Object> object,
295 Representation representation) {
296 if (representation.IsSmi() && object->IsUninitialized()) {
297 return handle(Smi::FromInt(0), isolate);
299 if (!representation.IsDouble()) return object;
301 if (object->IsUninitialized()) {
303 } else if (object->IsMutableHeapNumber()) {
304 value = HeapNumber::cast(*object)->value();
306 value = object->Number();
308 return isolate->factory()->NewHeapNumber(value, MUTABLE);
312 Handle<Object> Object::WrapForRead(Isolate* isolate,
313 Handle<Object> object,
314 Representation representation) {
315 DCHECK(!object->IsUninitialized());
316 if (!representation.IsDouble()) {
317 DCHECK(object->FitsRepresentation(representation));
320 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
324 StringShape::StringShape(const String* str)
325 : type_(str->map()->instance_type()) {
327 DCHECK((type_ & kIsNotStringMask) == kStringTag);
331 StringShape::StringShape(Map* map)
332 : type_(map->instance_type()) {
334 DCHECK((type_ & kIsNotStringMask) == kStringTag);
338 StringShape::StringShape(InstanceType t)
339 : type_(static_cast<uint32_t>(t)) {
341 DCHECK((type_ & kIsNotStringMask) == kStringTag);
345 bool StringShape::IsInternalized() {
347 STATIC_ASSERT(kNotInternalizedTag != 0);
348 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
349 (kStringTag | kInternalizedTag);
353 bool String::IsOneByteRepresentation() const {
354 uint32_t type = map()->instance_type();
355 return (type & kStringEncodingMask) == kOneByteStringTag;
359 bool String::IsTwoByteRepresentation() const {
360 uint32_t type = map()->instance_type();
361 return (type & kStringEncodingMask) == kTwoByteStringTag;
365 bool String::IsOneByteRepresentationUnderneath() {
366 uint32_t type = map()->instance_type();
367 STATIC_ASSERT(kIsIndirectStringTag != 0);
368 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
370 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
371 case kOneByteStringTag:
373 case kTwoByteStringTag:
375 default: // Cons or sliced string. Need to go deeper.
376 return GetUnderlying()->IsOneByteRepresentation();
381 bool String::IsTwoByteRepresentationUnderneath() {
382 uint32_t type = map()->instance_type();
383 STATIC_ASSERT(kIsIndirectStringTag != 0);
384 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
386 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
387 case kOneByteStringTag:
389 case kTwoByteStringTag:
391 default: // Cons or sliced string. Need to go deeper.
392 return GetUnderlying()->IsTwoByteRepresentation();
397 bool String::HasOnlyOneByteChars() {
398 uint32_t type = map()->instance_type();
399 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
400 IsOneByteRepresentation();
404 bool StringShape::IsCons() {
405 return (type_ & kStringRepresentationMask) == kConsStringTag;
409 bool StringShape::IsSliced() {
410 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
414 bool StringShape::IsIndirect() {
415 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
419 bool StringShape::IsExternal() {
420 return (type_ & kStringRepresentationMask) == kExternalStringTag;
424 bool StringShape::IsSequential() {
425 return (type_ & kStringRepresentationMask) == kSeqStringTag;
429 StringRepresentationTag StringShape::representation_tag() {
430 uint32_t tag = (type_ & kStringRepresentationMask);
431 return static_cast<StringRepresentationTag>(tag);
435 uint32_t StringShape::encoding_tag() {
436 return type_ & kStringEncodingMask;
440 uint32_t StringShape::full_representation_tag() {
441 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
445 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
446 Internals::kFullStringRepresentationMask);
448 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
449 Internals::kStringEncodingMask);
452 bool StringShape::IsSequentialOneByte() {
453 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
457 bool StringShape::IsSequentialTwoByte() {
458 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
462 bool StringShape::IsExternalOneByte() {
463 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
467 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
468 Internals::kExternalOneByteRepresentationTag);
470 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
473 bool StringShape::IsExternalTwoByte() {
474 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
478 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
479 Internals::kExternalTwoByteRepresentationTag);
481 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
484 uc32 FlatStringReader::Get(int index) {
486 return Get<uint8_t>(index);
488 return Get<uc16>(index);
493 template <typename Char>
494 Char FlatStringReader::Get(int index) {
495 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
496 DCHECK(0 <= index && index <= length_);
497 if (sizeof(Char) == 1) {
498 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
500 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
505 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
506 return key->AsHandle(isolate);
510 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
512 return key->AsHandle(isolate);
516 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
518 return key->AsHandle(isolate);
521 template <typename Char>
522 class SequentialStringKey : public HashTableKey {
524 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
525 : string_(string), hash_field_(0), seed_(seed) { }
527 uint32_t Hash() override {
528 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
532 uint32_t result = hash_field_ >> String::kHashShift;
533 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
538 uint32_t HashForObject(Object* other) override {
539 return String::cast(other)->Hash();
542 Vector<const Char> string_;
543 uint32_t hash_field_;
548 class OneByteStringKey : public SequentialStringKey<uint8_t> {
550 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
551 : SequentialStringKey<uint8_t>(str, seed) { }
553 bool IsMatch(Object* string) override {
554 return String::cast(string)->IsOneByteEqualTo(string_);
557 Handle<Object> AsHandle(Isolate* isolate) override;
561 class SeqOneByteSubStringKey : public HashTableKey {
563 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
564 : string_(string), from_(from), length_(length) {
565 DCHECK(string_->IsSeqOneByteString());
568 uint32_t Hash() override {
569 DCHECK(length_ >= 0);
570 DCHECK(from_ + length_ <= string_->length());
571 const uint8_t* chars = string_->GetChars() + from_;
572 hash_field_ = StringHasher::HashSequentialString(
573 chars, length_, string_->GetHeap()->HashSeed());
574 uint32_t result = hash_field_ >> String::kHashShift;
575 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
579 uint32_t HashForObject(Object* other) override {
580 return String::cast(other)->Hash();
583 bool IsMatch(Object* string) override;
584 Handle<Object> AsHandle(Isolate* isolate) override;
587 Handle<SeqOneByteString> string_;
590 uint32_t hash_field_;
594 class TwoByteStringKey : public SequentialStringKey<uc16> {
596 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
597 : SequentialStringKey<uc16>(str, seed) { }
599 bool IsMatch(Object* string) override {
600 return String::cast(string)->IsTwoByteEqualTo(string_);
603 Handle<Object> AsHandle(Isolate* isolate) override;
607 // Utf8StringKey carries a vector of chars as key.
608 class Utf8StringKey : public HashTableKey {
610 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
611 : string_(string), hash_field_(0), seed_(seed) { }
613 bool IsMatch(Object* string) override {
614 return String::cast(string)->IsUtf8EqualTo(string_);
617 uint32_t Hash() override {
618 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
619 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
620 uint32_t result = hash_field_ >> String::kHashShift;
621 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
625 uint32_t HashForObject(Object* other) override {
626 return String::cast(other)->Hash();
629 Handle<Object> AsHandle(Isolate* isolate) override {
630 if (hash_field_ == 0) Hash();
631 return isolate->factory()->NewInternalizedStringFromUtf8(
632 string_, chars_, hash_field_);
635 Vector<const char> string_;
636 uint32_t hash_field_;
637 int chars_; // Caches the number of characters when computing the hash code.
642 bool Object::IsNumber() const {
643 return IsSmi() || IsHeapNumber();
647 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
648 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
649 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
652 bool Object::IsFiller() const {
653 if (!Object::IsHeapObject()) return false;
654 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
655 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
660 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
661 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
663 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
664 #undef TYPED_ARRAY_TYPE_CHECKER
667 bool Object::IsFixedTypedArrayBase() const {
668 if (!Object::IsHeapObject()) return false;
670 InstanceType instance_type =
671 HeapObject::cast(this)->map()->instance_type();
672 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
673 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
677 bool Object::IsJSReceiver() const {
678 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
679 return IsHeapObject() &&
680 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
684 bool Object::IsJSObject() const {
685 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
686 return IsHeapObject() && HeapObject::cast(this)->map()->IsJSObjectMap();
690 bool Object::IsJSProxy() const {
691 if (!Object::IsHeapObject()) return false;
692 return HeapObject::cast(this)->map()->IsJSProxyMap();
696 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
697 TYPE_CHECKER(JSSet, JS_SET_TYPE)
698 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
699 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
700 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
701 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
702 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
703 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
704 TYPE_CHECKER(Map, MAP_TYPE)
705 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
706 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
707 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
710 bool Object::IsJSWeakCollection() const {
711 return IsJSWeakMap() || IsJSWeakSet();
715 bool Object::IsDescriptorArray() const {
716 return IsFixedArray();
720 bool Object::IsArrayList() const { return IsFixedArray(); }
723 bool Object::IsLayoutDescriptor() const {
724 return IsSmi() || IsFixedTypedArrayBase();
728 bool Object::IsTransitionArray() const {
729 return IsFixedArray();
733 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
736 bool Object::IsDeoptimizationInputData() const {
737 // Must be a fixed array.
738 if (!IsFixedArray()) return false;
740 // There's no sure way to detect the difference between a fixed array and
741 // a deoptimization data array. Since this is used for asserts we can
742 // check that the length is zero or else the fixed size plus a multiple of
744 int length = FixedArray::cast(this)->length();
745 if (length == 0) return true;
747 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
748 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
752 bool Object::IsDeoptimizationOutputData() const {
753 if (!IsFixedArray()) return false;
754 // There's actually no way to see the difference between a fixed array and
755 // a deoptimization data array. Since this is used for asserts we can check
756 // that the length is plausible though.
757 if (FixedArray::cast(this)->length() % 2 != 0) return false;
762 bool Object::IsHandlerTable() const {
763 if (!IsFixedArray()) return false;
764 // There's actually no way to see the difference between a fixed array and
765 // a handler table array.
770 bool Object::IsDependentCode() const {
771 if (!IsFixedArray()) return false;
772 // There's actually no way to see the difference between a fixed array and
773 // a dependent codes array.
778 bool Object::IsContext() const {
779 if (!Object::IsHeapObject()) return false;
780 Map* map = HeapObject::cast(this)->map();
781 Heap* heap = map->GetHeap();
782 return (map == heap->function_context_map() ||
783 map == heap->catch_context_map() ||
784 map == heap->with_context_map() ||
785 map == heap->native_context_map() ||
786 map == heap->block_context_map() ||
787 map == heap->module_context_map() ||
788 map == heap->script_context_map());
792 bool Object::IsNativeContext() const {
793 return Object::IsHeapObject() &&
794 HeapObject::cast(this)->map() ==
795 HeapObject::cast(this)->GetHeap()->native_context_map();
799 bool Object::IsScriptContextTable() const {
800 if (!Object::IsHeapObject()) return false;
801 Map* map = HeapObject::cast(this)->map();
802 Heap* heap = map->GetHeap();
803 return map == heap->script_context_table_map();
807 bool Object::IsScopeInfo() const {
808 return Object::IsHeapObject() &&
809 HeapObject::cast(this)->map() ==
810 HeapObject::cast(this)->GetHeap()->scope_info_map();
814 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
817 template <> inline bool Is<JSFunction>(Object* obj) {
818 return obj->IsJSFunction();
822 TYPE_CHECKER(Code, CODE_TYPE)
823 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
824 TYPE_CHECKER(Cell, CELL_TYPE)
825 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
826 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
827 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
828 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
829 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
830 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
831 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
832 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
835 bool Object::IsStringWrapper() const {
836 return IsJSValue() && JSValue::cast(this)->value()->IsString();
840 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
843 bool Object::IsBoolean() const {
844 return IsOddball() &&
845 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
849 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
850 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
851 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
852 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
855 bool Object::IsJSArrayBufferView() const {
856 return IsJSDataView() || IsJSTypedArray();
860 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
863 template <> inline bool Is<JSArray>(Object* obj) {
864 return obj->IsJSArray();
868 bool Object::IsHashTable() const {
869 return Object::IsHeapObject() &&
870 HeapObject::cast(this)->map() ==
871 HeapObject::cast(this)->GetHeap()->hash_table_map();
875 bool Object::IsWeakHashTable() const {
876 return IsHashTable();
880 bool Object::IsDictionary() const {
881 return IsHashTable() &&
882 this != HeapObject::cast(this)->GetHeap()->string_table();
886 bool Object::IsNameDictionary() const {
887 return IsDictionary();
891 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
894 bool Object::IsSeededNumberDictionary() const {
895 return IsDictionary();
899 bool Object::IsUnseededNumberDictionary() const {
900 return IsDictionary();
904 bool Object::IsStringTable() const {
905 return IsHashTable();
909 bool Object::IsNormalizedMapCache() const {
910 return NormalizedMapCache::IsNormalizedMapCache(this);
914 int NormalizedMapCache::GetIndex(Handle<Map> map) {
915 return map->Hash() % NormalizedMapCache::kEntries;
919 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
920 if (!obj->IsFixedArray()) return false;
921 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
925 if (FLAG_verify_heap) {
926 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
927 NormalizedMapCacheVerify();
934 bool Object::IsCompilationCacheTable() const {
935 return IsHashTable();
939 bool Object::IsCodeCacheHashTable() const {
940 return IsHashTable();
944 bool Object::IsPolymorphicCodeCacheHashTable() const {
945 return IsHashTable();
949 bool Object::IsMapCache() const {
950 return IsHashTable();
954 bool Object::IsObjectHashTable() const {
955 return IsHashTable();
959 bool Object::IsOrderedHashTable() const {
960 return IsHeapObject() &&
961 HeapObject::cast(this)->map() ==
962 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
966 bool Object::IsOrderedHashSet() const {
967 return IsOrderedHashTable();
971 bool Object::IsOrderedHashMap() const {
972 return IsOrderedHashTable();
976 bool Object::IsPrimitive() const {
977 return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
981 bool Object::IsJSGlobalProxy() const {
982 bool result = IsHeapObject() &&
983 (HeapObject::cast(this)->map()->instance_type() ==
984 JS_GLOBAL_PROXY_TYPE);
986 HeapObject::cast(this)->map()->is_access_check_needed());
991 bool Object::IsGlobalObject() const {
992 if (!IsHeapObject()) return false;
993 return HeapObject::cast(this)->map()->IsGlobalObjectMap();
997 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
998 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1001 bool Object::IsUndetectableObject() const {
1002 return IsHeapObject()
1003 && HeapObject::cast(this)->map()->is_undetectable();
1007 bool Object::IsAccessCheckNeeded() const {
1008 if (!IsHeapObject()) return false;
1009 if (IsJSGlobalProxy()) {
1010 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1011 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1012 return proxy->IsDetachedFrom(global);
1014 return HeapObject::cast(this)->map()->is_access_check_needed();
1018 bool Object::IsStruct() const {
1019 if (!IsHeapObject()) return false;
1020 switch (HeapObject::cast(this)->map()->instance_type()) {
1021 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1022 STRUCT_LIST(MAKE_STRUCT_CASE)
1023 #undef MAKE_STRUCT_CASE
1024 default: return false;
1029 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1030 bool Object::Is##Name() const { \
1031 return Object::IsHeapObject() \
1032 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1034 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1035 #undef MAKE_STRUCT_PREDICATE
1038 bool Object::IsUndefined() const {
1039 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1043 bool Object::IsNull() const {
1044 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1048 bool Object::IsTheHole() const {
1049 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1053 bool Object::IsException() const {
1054 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1058 bool Object::IsUninitialized() const {
1059 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1063 bool Object::IsTrue() const {
1064 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1068 bool Object::IsFalse() const {
1069 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1073 bool Object::IsArgumentsMarker() const {
1074 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1078 double Object::Number() {
1081 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1082 : reinterpret_cast<HeapNumber*>(this)->value();
1086 bool Object::IsNaN() const {
1087 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1091 bool Object::IsMinusZero() const {
1092 return this->IsHeapNumber() &&
1093 i::IsMinusZero(HeapNumber::cast(this)->value());
1097 Representation Object::OptimalRepresentation() {
1098 if (!FLAG_track_fields) return Representation::Tagged();
1100 return Representation::Smi();
1101 } else if (FLAG_track_double_fields && IsHeapNumber()) {
1102 return Representation::Double();
1103 } else if (FLAG_track_computed_fields && IsUninitialized()) {
1104 return Representation::None();
1105 } else if (FLAG_track_heap_object_fields) {
1106 DCHECK(IsHeapObject());
1107 return Representation::HeapObject();
1109 return Representation::Tagged();
1114 ElementsKind Object::OptimalElementsKind() {
1115 if (IsSmi()) return FAST_SMI_ELEMENTS;
1116 if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1117 return FAST_ELEMENTS;
1121 bool Object::FitsRepresentation(Representation representation) {
1122 if (FLAG_track_fields && representation.IsNone()) {
1124 } else if (FLAG_track_fields && representation.IsSmi()) {
1126 } else if (FLAG_track_double_fields && representation.IsDouble()) {
1127 return IsMutableHeapNumber() || IsNumber();
1128 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1129 return IsHeapObject();
1136 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1137 Handle<Object> object) {
1139 isolate, object, handle(isolate->context()->native_context(), isolate));
1144 MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
1145 if (input->IsName()) return Handle<Name>::cast(input);
1146 return ToString(isolate, input);
1151 MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
1152 ToPrimitiveHint hint) {
1153 if (input->IsPrimitive()) return input;
1154 return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
1158 bool Object::HasSpecificClassOf(String* name) {
1159 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1163 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1165 LanguageMode language_mode) {
1166 LookupIterator it(object, name);
1167 return GetProperty(&it, language_mode);
1171 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1173 LanguageMode language_mode) {
1174 LookupIterator it(isolate, object, index);
1175 return GetProperty(&it, language_mode);
1179 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1180 uint32_t index, Handle<Object> value,
1181 LanguageMode language_mode) {
1182 LookupIterator it(isolate, object, index);
1183 return SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED);
1187 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1188 Isolate* isolate, Handle<Object> receiver) {
1189 PrototypeIterator iter(isolate, receiver);
1190 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1191 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1192 return PrototypeIterator::GetCurrent(iter);
1196 return PrototypeIterator::GetCurrent(iter);
1200 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
1202 LanguageMode language_mode) {
1203 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1204 return GetProperty(object, str, language_mode);
1208 #define FIELD_ADDR(p, offset) \
1209 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1211 #define FIELD_ADDR_CONST(p, offset) \
1212 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1214 #define READ_FIELD(p, offset) \
1215 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1217 #define ACQUIRE_READ_FIELD(p, offset) \
1218 reinterpret_cast<Object*>(base::Acquire_Load( \
1219 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1221 #define NOBARRIER_READ_FIELD(p, offset) \
1222 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1223 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1225 #define WRITE_FIELD(p, offset, value) \
1226 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1228 #define RELEASE_WRITE_FIELD(p, offset, value) \
1229 base::Release_Store( \
1230 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1231 reinterpret_cast<base::AtomicWord>(value));
1233 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1234 base::NoBarrier_Store( \
1235 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1236 reinterpret_cast<base::AtomicWord>(value));
1238 #define WRITE_BARRIER(heap, object, offset, value) \
1239 heap->incremental_marking()->RecordWrite( \
1240 object, HeapObject::RawField(object, offset), value); \
1241 if (heap->InNewSpace(value)) { \
1242 heap->RecordWrite(object->address(), offset); \
1245 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1246 if (mode != SKIP_WRITE_BARRIER) { \
1247 if (mode == UPDATE_WRITE_BARRIER) { \
1248 heap->incremental_marking()->RecordWrite( \
1249 object, HeapObject::RawField(object, offset), value); \
1251 if (heap->InNewSpace(value)) { \
1252 heap->RecordWrite(object->address(), offset); \
1256 #define READ_DOUBLE_FIELD(p, offset) \
1257 ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1259 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1260 WriteDoubleValue(FIELD_ADDR(p, offset), value)
1262 #define READ_INT_FIELD(p, offset) \
1263 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1265 #define WRITE_INT_FIELD(p, offset, value) \
1266 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1268 #define READ_INTPTR_FIELD(p, offset) \
1269 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1271 #define WRITE_INTPTR_FIELD(p, offset, value) \
1272 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1274 #define READ_UINT8_FIELD(p, offset) \
1275 (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1277 #define WRITE_UINT8_FIELD(p, offset, value) \
1278 (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1280 #define READ_INT8_FIELD(p, offset) \
1281 (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1283 #define WRITE_INT8_FIELD(p, offset, value) \
1284 (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1286 #define READ_UINT16_FIELD(p, offset) \
1287 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1289 #define WRITE_UINT16_FIELD(p, offset, value) \
1290 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1292 #define READ_INT16_FIELD(p, offset) \
1293 (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1295 #define WRITE_INT16_FIELD(p, offset, value) \
1296 (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1298 #define READ_UINT32_FIELD(p, offset) \
1299 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1301 #define WRITE_UINT32_FIELD(p, offset, value) \
1302 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1304 #define READ_INT32_FIELD(p, offset) \
1305 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1307 #define WRITE_INT32_FIELD(p, offset, value) \
1308 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1310 #define READ_FLOAT_FIELD(p, offset) \
1311 (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1313 #define WRITE_FLOAT_FIELD(p, offset, value) \
1314 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1316 #define READ_UINT64_FIELD(p, offset) \
1317 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1319 #define WRITE_UINT64_FIELD(p, offset, value) \
1320 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1322 #define READ_INT64_FIELD(p, offset) \
1323 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1325 #define WRITE_INT64_FIELD(p, offset, value) \
1326 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1328 #define READ_BYTE_FIELD(p, offset) \
1329 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1331 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1332 static_cast<byte>(base::NoBarrier_Load( \
1333 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1335 #define WRITE_BYTE_FIELD(p, offset, value) \
1336 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1338 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1339 base::NoBarrier_Store( \
1340 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1341 static_cast<base::Atomic8>(value));
1343 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1344 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1348 MapWord MapWord::FromMap(const Map* map) {
1349 return MapWord(reinterpret_cast<uintptr_t>(map));
1353 Map* MapWord::ToMap() {
1354 return reinterpret_cast<Map*>(value_);
1358 bool MapWord::IsForwardingAddress() {
1359 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1363 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1364 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1365 return MapWord(reinterpret_cast<uintptr_t>(raw));
1369 HeapObject* MapWord::ToForwardingAddress() {
1370 DCHECK(IsForwardingAddress());
1371 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1376 void HeapObject::VerifyObjectField(int offset) {
1377 VerifyPointer(READ_FIELD(this, offset));
1380 void HeapObject::VerifySmiField(int offset) {
1381 CHECK(READ_FIELD(this, offset)->IsSmi());
1386 Heap* HeapObject::GetHeap() const {
1388 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1389 SLOW_DCHECK(heap != NULL);
1394 Isolate* HeapObject::GetIsolate() const {
1395 return GetHeap()->isolate();
1399 Map* HeapObject::map() const {
1401 // Clear mark potentially added by PathTracer.
1402 uintptr_t raw_value =
1403 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1404 return MapWord::FromRawValue(raw_value).ToMap();
1406 return map_word().ToMap();
1411 void HeapObject::set_map(Map* value) {
1412 set_map_word(MapWord::FromMap(value));
1413 if (value != NULL) {
1414 // TODO(1600) We are passing NULL as a slot because maps can never be on
1415 // evacuation candidate.
1416 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1421 Map* HeapObject::synchronized_map() {
1422 return synchronized_map_word().ToMap();
1426 void HeapObject::synchronized_set_map(Map* value) {
1427 synchronized_set_map_word(MapWord::FromMap(value));
1428 if (value != NULL) {
1429 // TODO(1600) We are passing NULL as a slot because maps can never be on
1430 // evacuation candidate.
1431 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1436 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1437 synchronized_set_map_word(MapWord::FromMap(value));
1441 // Unsafe accessor omitting write barrier.
1442 void HeapObject::set_map_no_write_barrier(Map* value) {
1443 set_map_word(MapWord::FromMap(value));
1447 MapWord HeapObject::map_word() const {
1449 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1453 void HeapObject::set_map_word(MapWord map_word) {
1454 NOBARRIER_WRITE_FIELD(
1455 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1459 MapWord HeapObject::synchronized_map_word() const {
1461 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1465 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1466 RELEASE_WRITE_FIELD(
1467 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1471 int HeapObject::Size() {
1472 return SizeFromMap(map());
1476 HeapObjectContents HeapObject::ContentType() {
1477 InstanceType type = map()->instance_type();
1478 if (type <= LAST_NAME_TYPE) {
1479 if (type == SYMBOL_TYPE) {
1480 return HeapObjectContents::kTaggedValues;
1482 DCHECK(type < FIRST_NONSTRING_TYPE);
1483 // There are four string representations: sequential strings, external
1484 // strings, cons strings, and sliced strings.
1485 // Only the former two contain raw values and no heap pointers (besides the
1487 if (((type & kIsIndirectStringMask) != kIsIndirectStringTag))
1488 return HeapObjectContents::kRawValues;
1490 return HeapObjectContents::kTaggedValues;
1492 // TODO(jochen): Enable eventually.
1493 } else if (type == JS_FUNCTION_TYPE) {
1494 return HeapObjectContents::kMixedValues;
1496 } else if (type == BYTECODE_ARRAY_TYPE) {
1497 return HeapObjectContents::kMixedValues;
1498 } else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
1499 type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
1500 return HeapObjectContents::kMixedValues;
1501 } else if (type <= LAST_DATA_TYPE) {
1502 // TODO(jochen): Why do we claim that Code and Map contain only raw values?
1503 return HeapObjectContents::kRawValues;
1505 if (FLAG_unbox_double_fields) {
1506 LayoutDescriptorHelper helper(map());
1507 if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues;
1509 return HeapObjectContents::kTaggedValues;
1514 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1515 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1516 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1520 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1521 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1525 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1526 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1530 double HeapNumber::value() const {
1531 return READ_DOUBLE_FIELD(this, kValueOffset);
1535 void HeapNumber::set_value(double value) {
1536 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1540 int HeapNumber::get_exponent() {
1541 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1542 kExponentShift) - kExponentBias;
1546 int HeapNumber::get_sign() {
1547 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1551 bool Simd128Value::Equals(Simd128Value* that) {
1552 #define SIMD128_VALUE(TYPE, Type, type, lane_count, lane_type) \
1553 if (this->Is##Type()) { \
1554 if (!that->Is##Type()) return false; \
1555 return Type::cast(this)->Equals(Type::cast(that)); \
1557 SIMD128_TYPES(SIMD128_VALUE)
1558 #undef SIMD128_VALUE
1563 #define SIMD128_VALUE_EQUALS(TYPE, Type, type, lane_count, lane_type) \
1564 bool Type::Equals(Type* that) { \
1565 for (int lane = 0; lane < lane_count; ++lane) { \
1566 if (this->get_lane(lane) != that->get_lane(lane)) return false; \
1570 SIMD128_TYPES(SIMD128_VALUE_EQUALS)
1571 #undef SIMD128_VALUE_EQUALS
1574 #if defined(V8_TARGET_LITTLE_ENDIAN)
1575 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1577 READ_##field_type##_FIELD(this, kValueOffset + lane * field_size);
1578 #elif defined(V8_TARGET_BIG_ENDIAN)
1579 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1580 lane_type value = READ_##field_type##_FIELD( \
1581 this, kValueOffset + (lane_count - lane - 1) * field_size);
1583 #error Unknown byte ordering
1586 #if defined(V8_TARGET_LITTLE_ENDIAN)
1587 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1588 WRITE_##field_type##_FIELD(this, kValueOffset + lane * field_size, value);
1589 #elif defined(V8_TARGET_BIG_ENDIAN)
1590 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1591 WRITE_##field_type##_FIELD( \
1592 this, kValueOffset + (lane_count - lane - 1) * field_size, value);
1594 #error Unknown byte ordering
1597 #define SIMD128_NUMERIC_LANE_FNS(type, lane_type, lane_count, field_type, \
1599 lane_type type::get_lane(int lane) const { \
1600 DCHECK(lane < lane_count && lane >= 0); \
1601 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1605 void type::set_lane(int lane, lane_type value) { \
1606 DCHECK(lane < lane_count && lane >= 0); \
1607 SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1610 SIMD128_NUMERIC_LANE_FNS(Float32x4, float, 4, FLOAT, kFloatSize)
1611 SIMD128_NUMERIC_LANE_FNS(Int32x4, int32_t, 4, INT32, kInt32Size)
1612 SIMD128_NUMERIC_LANE_FNS(Uint32x4, uint32_t, 4, UINT32, kInt32Size)
1613 SIMD128_NUMERIC_LANE_FNS(Int16x8, int16_t, 8, INT16, kShortSize)
1614 SIMD128_NUMERIC_LANE_FNS(Uint16x8, uint16_t, 8, UINT16, kShortSize)
1615 SIMD128_NUMERIC_LANE_FNS(Int8x16, int8_t, 16, INT8, kCharSize)
1616 SIMD128_NUMERIC_LANE_FNS(Uint8x16, uint8_t, 16, UINT8, kCharSize)
1617 #undef SIMD128_NUMERIC_LANE_FNS
1620 #define SIMD128_BOOLEAN_LANE_FNS(type, lane_type, lane_count, field_type, \
1622 bool type::get_lane(int lane) const { \
1623 DCHECK(lane < lane_count && lane >= 0); \
1624 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1625 DCHECK(value == 0 || value == -1); \
1626 return value != 0; \
1629 void type::set_lane(int lane, bool value) { \
1630 DCHECK(lane < lane_count && lane >= 0); \
1631 int32_t int_val = value ? -1 : 0; \
1632 SIMD128_WRITE_LANE(lane_count, field_type, field_size, int_val) \
1635 SIMD128_BOOLEAN_LANE_FNS(Bool32x4, int32_t, 4, INT32, kInt32Size)
1636 SIMD128_BOOLEAN_LANE_FNS(Bool16x8, int16_t, 8, INT16, kShortSize)
1637 SIMD128_BOOLEAN_LANE_FNS(Bool8x16, int8_t, 16, INT8, kCharSize)
1638 #undef SIMD128_BOOLEAN_LANE_FNS
1640 #undef SIMD128_READ_LANE
1641 #undef SIMD128_WRITE_LANE
1644 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1647 Object** FixedArray::GetFirstElementAddress() {
1648 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1652 bool FixedArray::ContainsOnlySmisOrHoles() {
1653 Object* the_hole = GetHeap()->the_hole_value();
1654 Object** current = GetFirstElementAddress();
1655 for (int i = 0; i < length(); ++i) {
1656 Object* candidate = *current++;
1657 if (!candidate->IsSmi() && candidate != the_hole) return false;
1663 FixedArrayBase* JSObject::elements() const {
1664 Object* array = READ_FIELD(this, kElementsOffset);
1665 return static_cast<FixedArrayBase*>(array);
1669 void AllocationSite::Initialize() {
1670 set_transition_info(Smi::FromInt(0));
1671 SetElementsKind(GetInitialFastElementsKind());
1672 set_nested_site(Smi::FromInt(0));
1673 set_pretenure_data(Smi::FromInt(0));
1674 set_pretenure_create_count(Smi::FromInt(0));
1675 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1676 SKIP_WRITE_BARRIER);
1680 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1683 bool AllocationSite::IsMaybeTenure() {
1684 return pretenure_decision() == kMaybeTenure;
1688 bool AllocationSite::PretenuringDecisionMade() {
1689 return pretenure_decision() != kUndecided;
1693 void AllocationSite::MarkZombie() {
1694 DCHECK(!IsZombie());
1696 set_pretenure_decision(kZombie);
1700 ElementsKind AllocationSite::GetElementsKind() {
1701 DCHECK(!SitePointsToLiteral());
1702 int value = Smi::cast(transition_info())->value();
1703 return ElementsKindBits::decode(value);
1707 void AllocationSite::SetElementsKind(ElementsKind kind) {
1708 int value = Smi::cast(transition_info())->value();
1709 set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1710 SKIP_WRITE_BARRIER);
1714 bool AllocationSite::CanInlineCall() {
1715 int value = Smi::cast(transition_info())->value();
1716 return DoNotInlineBit::decode(value) == 0;
1720 void AllocationSite::SetDoNotInlineCall() {
1721 int value = Smi::cast(transition_info())->value();
1722 set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1723 SKIP_WRITE_BARRIER);
1727 bool AllocationSite::SitePointsToLiteral() {
1728 // If transition_info is a smi, then it represents an ElementsKind
1729 // for a constructed array. Otherwise, it must be a boilerplate
1730 // for an object or array literal.
1731 return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1735 // Heuristic: We only need to create allocation site info if the boilerplate
1736 // elements kind is the initial elements kind.
1737 AllocationSiteMode AllocationSite::GetMode(
1738 ElementsKind boilerplate_elements_kind) {
1739 if (FLAG_pretenuring_call_new ||
1740 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1741 return TRACK_ALLOCATION_SITE;
1744 return DONT_TRACK_ALLOCATION_SITE;
1748 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1750 if (FLAG_pretenuring_call_new ||
1751 (IsFastSmiElementsKind(from) &&
1752 IsMoreGeneralElementsKindTransition(from, to))) {
1753 return TRACK_ALLOCATION_SITE;
1756 return DONT_TRACK_ALLOCATION_SITE;
1760 inline bool AllocationSite::CanTrack(InstanceType type) {
1761 if (FLAG_allocation_site_pretenuring) {
1762 return type == JS_ARRAY_TYPE ||
1763 type == JS_OBJECT_TYPE ||
1764 type < FIRST_NONSTRING_TYPE;
1766 return type == JS_ARRAY_TYPE;
1770 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1771 int value = pretenure_data()->value();
1772 return PretenureDecisionBits::decode(value);
1776 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1777 int value = pretenure_data()->value();
1779 Smi::FromInt(PretenureDecisionBits::update(value, decision)),
1780 SKIP_WRITE_BARRIER);
1784 bool AllocationSite::deopt_dependent_code() {
1785 int value = pretenure_data()->value();
1786 return DeoptDependentCodeBit::decode(value);
1790 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1791 int value = pretenure_data()->value();
1792 set_pretenure_data(Smi::FromInt(DeoptDependentCodeBit::update(value, deopt)),
1793 SKIP_WRITE_BARRIER);
1797 int AllocationSite::memento_found_count() {
1798 int value = pretenure_data()->value();
1799 return MementoFoundCountBits::decode(value);
1803 inline void AllocationSite::set_memento_found_count(int count) {
1804 int value = pretenure_data()->value();
1805 // Verify that we can count more mementos than we can possibly find in one
1806 // new space collection.
1807 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1808 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1809 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1810 DCHECK(count < MementoFoundCountBits::kMax);
1812 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1813 SKIP_WRITE_BARRIER);
1817 int AllocationSite::memento_create_count() {
1818 return pretenure_create_count()->value();
1822 void AllocationSite::set_memento_create_count(int count) {
1823 set_pretenure_create_count(Smi::FromInt(count), SKIP_WRITE_BARRIER);
1827 inline bool AllocationSite::IncrementMementoFoundCount() {
1828 if (IsZombie()) return false;
1830 int value = memento_found_count();
1831 set_memento_found_count(value + 1);
1832 return memento_found_count() == kPretenureMinimumCreated;
1836 inline void AllocationSite::IncrementMementoCreateCount() {
1837 DCHECK(FLAG_allocation_site_pretenuring);
1838 int value = memento_create_count();
1839 set_memento_create_count(value + 1);
1843 inline bool AllocationSite::MakePretenureDecision(
1844 PretenureDecision current_decision,
1846 bool maximum_size_scavenge) {
1847 // Here we just allow state transitions from undecided or maybe tenure
1848 // to don't tenure, maybe tenure, or tenure.
1849 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1850 if (ratio >= kPretenureRatio) {
1851 // We just transition into tenure state when the semi-space was at
1852 // maximum capacity.
1853 if (maximum_size_scavenge) {
1854 set_deopt_dependent_code(true);
1855 set_pretenure_decision(kTenure);
1856 // Currently we just need to deopt when we make a state transition to
1860 set_pretenure_decision(kMaybeTenure);
1862 set_pretenure_decision(kDontTenure);
1869 inline bool AllocationSite::DigestPretenuringFeedback(
1870 bool maximum_size_scavenge) {
1872 int create_count = memento_create_count();
1873 int found_count = memento_found_count();
1874 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1876 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1877 static_cast<double>(found_count) / create_count : 0.0;
1878 PretenureDecision current_decision = pretenure_decision();
1880 if (minimum_mementos_created) {
1881 deopt = MakePretenureDecision(
1882 current_decision, ratio, maximum_size_scavenge);
1885 if (FLAG_trace_pretenuring_statistics) {
1887 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1888 static_cast<void*>(this), create_count, found_count, ratio,
1889 PretenureDecisionName(current_decision),
1890 PretenureDecisionName(pretenure_decision()));
1893 // Clear feedback calculation fields until the next gc.
1894 set_memento_found_count(0);
1895 set_memento_create_count(0);
1900 bool AllocationMemento::IsValid() {
1901 return allocation_site()->IsAllocationSite() &&
1902 !AllocationSite::cast(allocation_site())->IsZombie();
1906 AllocationSite* AllocationMemento::GetAllocationSite() {
1908 return AllocationSite::cast(allocation_site());
1912 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1913 JSObject::ValidateElements(object);
1914 ElementsKind elements_kind = object->map()->elements_kind();
1915 if (!IsFastObjectElementsKind(elements_kind)) {
1916 if (IsFastHoleyElementsKind(elements_kind)) {
1917 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1919 TransitionElementsKind(object, FAST_ELEMENTS);
1925 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1928 EnsureElementsMode mode) {
1929 ElementsKind current_kind = object->map()->elements_kind();
1930 ElementsKind target_kind = current_kind;
1932 DisallowHeapAllocation no_allocation;
1933 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1934 bool is_holey = IsFastHoleyElementsKind(current_kind);
1935 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1936 Heap* heap = object->GetHeap();
1937 Object* the_hole = heap->the_hole_value();
1938 for (uint32_t i = 0; i < count; ++i) {
1939 Object* current = *objects++;
1940 if (current == the_hole) {
1942 target_kind = GetHoleyElementsKind(target_kind);
1943 } else if (!current->IsSmi()) {
1944 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1945 if (IsFastSmiElementsKind(target_kind)) {
1947 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1949 target_kind = FAST_DOUBLE_ELEMENTS;
1952 } else if (is_holey) {
1953 target_kind = FAST_HOLEY_ELEMENTS;
1956 target_kind = FAST_ELEMENTS;
1961 if (target_kind != current_kind) {
1962 TransitionElementsKind(object, target_kind);
1967 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1968 Handle<FixedArrayBase> elements,
1970 EnsureElementsMode mode) {
1971 Heap* heap = object->GetHeap();
1972 if (elements->map() != heap->fixed_double_array_map()) {
1973 DCHECK(elements->map() == heap->fixed_array_map() ||
1974 elements->map() == heap->fixed_cow_array_map());
1975 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1976 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1979 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1980 EnsureCanContainElements(object, objects, length, mode);
1984 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1985 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1986 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1987 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1988 Handle<FixedDoubleArray> double_array =
1989 Handle<FixedDoubleArray>::cast(elements);
1990 for (uint32_t i = 0; i < length; ++i) {
1991 if (double_array->is_the_hole(i)) {
1992 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1996 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
2001 void JSObject::SetMapAndElements(Handle<JSObject> object,
2002 Handle<Map> new_map,
2003 Handle<FixedArrayBase> value) {
2004 JSObject::MigrateToMap(object, new_map);
2005 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
2006 (*value == object->GetHeap()->empty_fixed_array())) ==
2007 (value->map() == object->GetHeap()->fixed_array_map() ||
2008 value->map() == object->GetHeap()->fixed_cow_array_map()));
2009 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
2010 (object->map()->has_fast_double_elements() ==
2011 value->IsFixedDoubleArray()));
2012 object->set_elements(*value);
2016 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
2017 WRITE_FIELD(this, kElementsOffset, value);
2018 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
2022 void JSObject::initialize_properties() {
2023 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2024 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
2028 void JSObject::initialize_elements() {
2029 FixedArrayBase* elements = map()->GetInitialElements();
2030 WRITE_FIELD(this, kElementsOffset, elements);
2034 ACCESSORS(Oddball, to_string, String, kToStringOffset)
2035 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
2036 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
2039 byte Oddball::kind() const {
2040 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
2044 void Oddball::set_kind(byte value) {
2045 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2049 ACCESSORS(Cell, value, Object, kValueOffset)
2050 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2051 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
2052 ACCESSORS(PropertyCell, value, Object, kValueOffset)
2055 PropertyDetails PropertyCell::property_details() {
2056 return PropertyDetails(Smi::cast(property_details_raw()));
2060 void PropertyCell::set_property_details(PropertyDetails details) {
2061 set_property_details_raw(details.AsSmi());
2065 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2068 void WeakCell::clear() {
2069 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
2070 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
2074 void WeakCell::initialize(HeapObject* val) {
2075 WRITE_FIELD(this, kValueOffset, val);
2076 Heap* heap = GetHeap();
2077 // We just have to execute the generational barrier here because we never
2078 // mark through a weak cell and collect evacuation candidates when we process
2080 if (heap->InNewSpace(val)) {
2081 heap->RecordWrite(address(), kValueOffset);
2086 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
2089 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
2092 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2093 WRITE_FIELD(this, kNextOffset, val);
2094 if (mode == UPDATE_WRITE_BARRIER) {
2095 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2100 void WeakCell::clear_next(Heap* heap) {
2101 set_next(heap->the_hole_value(), SKIP_WRITE_BARRIER);
2105 bool WeakCell::next_cleared() { return next()->IsTheHole(); }
2108 int JSObject::GetHeaderSize() {
2109 InstanceType type = map()->instance_type();
2110 // Check for the most common kind of JavaScript object before
2111 // falling into the generic switch. This speeds up the internal
2112 // field operations considerably on average.
2113 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2115 case JS_GENERATOR_OBJECT_TYPE:
2116 return JSGeneratorObject::kSize;
2117 case JS_MODULE_TYPE:
2118 return JSModule::kSize;
2119 case JS_GLOBAL_PROXY_TYPE:
2120 return JSGlobalProxy::kSize;
2121 case JS_GLOBAL_OBJECT_TYPE:
2122 return JSGlobalObject::kSize;
2123 case JS_BUILTINS_OBJECT_TYPE:
2124 return JSBuiltinsObject::kSize;
2125 case JS_FUNCTION_TYPE:
2126 return JSFunction::kSize;
2128 return JSValue::kSize;
2130 return JSDate::kSize;
2132 return JSArray::kSize;
2133 case JS_ARRAY_BUFFER_TYPE:
2134 return JSArrayBuffer::kSize;
2135 case JS_TYPED_ARRAY_TYPE:
2136 return JSTypedArray::kSize;
2137 case JS_DATA_VIEW_TYPE:
2138 return JSDataView::kSize;
2140 return JSSet::kSize;
2142 return JSMap::kSize;
2143 case JS_SET_ITERATOR_TYPE:
2144 return JSSetIterator::kSize;
2145 case JS_MAP_ITERATOR_TYPE:
2146 return JSMapIterator::kSize;
2147 case JS_WEAK_MAP_TYPE:
2148 return JSWeakMap::kSize;
2149 case JS_WEAK_SET_TYPE:
2150 return JSWeakSet::kSize;
2151 case JS_REGEXP_TYPE:
2152 return JSRegExp::kSize;
2153 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2154 return JSObject::kHeaderSize;
2155 case JS_MESSAGE_OBJECT_TYPE:
2156 return JSMessageObject::kSize;
2164 int JSObject::GetInternalFieldCount() {
2165 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2166 // Make sure to adjust for the number of in-object properties. These
2167 // properties do contribute to the size, but are not internal fields.
2168 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2169 map()->GetInObjectProperties();
2173 int JSObject::GetInternalFieldOffset(int index) {
2174 DCHECK(index < GetInternalFieldCount() && index >= 0);
2175 return GetHeaderSize() + (kPointerSize * index);
2179 Object* JSObject::GetInternalField(int index) {
2180 DCHECK(index < GetInternalFieldCount() && index >= 0);
2181 // Internal objects do follow immediately after the header, whereas in-object
2182 // properties are at the end of the object. Therefore there is no need
2183 // to adjust the index here.
2184 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2188 void JSObject::SetInternalField(int index, Object* value) {
2189 DCHECK(index < GetInternalFieldCount() && index >= 0);
2190 // Internal objects do follow immediately after the header, whereas in-object
2191 // properties are at the end of the object. Therefore there is no need
2192 // to adjust the index here.
2193 int offset = GetHeaderSize() + (kPointerSize * index);
2194 WRITE_FIELD(this, offset, value);
2195 WRITE_BARRIER(GetHeap(), this, offset, value);
2199 void JSObject::SetInternalField(int index, Smi* value) {
2200 DCHECK(index < GetInternalFieldCount() && index >= 0);
2201 // Internal objects do follow immediately after the header, whereas in-object
2202 // properties are at the end of the object. Therefore there is no need
2203 // to adjust the index here.
2204 int offset = GetHeaderSize() + (kPointerSize * index);
2205 WRITE_FIELD(this, offset, value);
2209 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2210 if (!FLAG_unbox_double_fields) return false;
2211 return map()->IsUnboxedDoubleField(index);
2215 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2216 if (!FLAG_unbox_double_fields) return false;
2217 if (index.is_hidden_field() || !index.is_inobject()) return false;
2218 return !layout_descriptor()->IsTagged(index.property_index());
2222 // Access fast-case object properties at index. The use of these routines
2223 // is needed to correctly distinguish between properties stored in-object and
2224 // properties stored in the properties array.
2225 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2226 DCHECK(!IsUnboxedDoubleField(index));
2227 if (index.is_inobject()) {
2228 return READ_FIELD(this, index.offset());
2230 return properties()->get(index.outobject_array_index());
2235 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2236 DCHECK(IsUnboxedDoubleField(index));
2237 return READ_DOUBLE_FIELD(this, index.offset());
2241 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2242 if (index.is_inobject()) {
2243 int offset = index.offset();
2244 WRITE_FIELD(this, offset, value);
2245 WRITE_BARRIER(GetHeap(), this, offset, value);
2247 properties()->set(index.outobject_array_index(), value);
2252 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2253 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2257 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2258 if (IsUnboxedDoubleField(index)) {
2259 DCHECK(value->IsMutableHeapNumber());
2260 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2262 RawFastPropertyAtPut(index, value);
2267 void JSObject::WriteToField(int descriptor, Object* value) {
2268 DisallowHeapAllocation no_gc;
2270 DescriptorArray* desc = map()->instance_descriptors();
2271 PropertyDetails details = desc->GetDetails(descriptor);
2273 DCHECK(details.type() == DATA);
2275 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2276 if (details.representation().IsDouble()) {
2277 // Nothing more to be done.
2278 if (value->IsUninitialized()) return;
2279 if (IsUnboxedDoubleField(index)) {
2280 RawFastDoublePropertyAtPut(index, value->Number());
2282 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2283 DCHECK(box->IsMutableHeapNumber());
2284 box->set_value(value->Number());
2287 RawFastPropertyAtPut(index, value);
2292 int JSObject::GetInObjectPropertyOffset(int index) {
2293 return map()->GetInObjectPropertyOffset(index);
2297 Object* JSObject::InObjectPropertyAt(int index) {
2298 int offset = GetInObjectPropertyOffset(index);
2299 return READ_FIELD(this, offset);
2303 Object* JSObject::InObjectPropertyAtPut(int index,
2305 WriteBarrierMode mode) {
2306 // Adjust for the number of properties stored in the object.
2307 int offset = GetInObjectPropertyOffset(index);
2308 WRITE_FIELD(this, offset, value);
2309 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2315 void JSObject::InitializeBody(Map* map,
2316 Object* pre_allocated_value,
2317 Object* filler_value) {
2318 DCHECK(!filler_value->IsHeapObject() ||
2319 !GetHeap()->InNewSpace(filler_value));
2320 DCHECK(!pre_allocated_value->IsHeapObject() ||
2321 !GetHeap()->InNewSpace(pre_allocated_value));
2322 int size = map->instance_size();
2323 int offset = kHeaderSize;
2324 if (filler_value != pre_allocated_value) {
2326 map->GetInObjectProperties() - map->unused_property_fields();
2327 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2328 for (int i = 0; i < pre_allocated; i++) {
2329 WRITE_FIELD(this, offset, pre_allocated_value);
2330 offset += kPointerSize;
2333 while (offset < size) {
2334 WRITE_FIELD(this, offset, filler_value);
2335 offset += kPointerSize;
2340 bool JSObject::HasFastProperties() {
2341 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2342 return !properties()->IsDictionary();
2346 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2347 if (unused_property_fields() != 0) return false;
2348 if (is_prototype_map()) return false;
2349 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2350 int limit = Max(minimum, GetInObjectProperties());
2351 int external = NumberOfFields() - GetInObjectProperties();
2352 return external > limit;
2356 void Struct::InitializeBody(int object_size) {
2357 Object* value = GetHeap()->undefined_value();
2358 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2359 WRITE_FIELD(this, offset, value);
2364 bool Object::ToArrayLength(uint32_t* index) {
2366 int value = Smi::cast(this)->value();
2367 if (value < 0) return false;
2371 if (IsHeapNumber()) {
2372 double value = HeapNumber::cast(this)->value();
2373 uint32_t uint_value = static_cast<uint32_t>(value);
2374 if (value == static_cast<double>(uint_value)) {
2375 *index = uint_value;
2383 bool Object::ToArrayIndex(uint32_t* index) {
2384 return ToArrayLength(index) && *index != kMaxUInt32;
2388 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2389 if (!this->IsJSValue()) return false;
2391 JSValue* js_value = JSValue::cast(this);
2392 if (!js_value->value()->IsString()) return false;
2394 String* str = String::cast(js_value->value());
2395 if (index >= static_cast<uint32_t>(str->length())) return false;
2401 void Object::VerifyApiCallResultType() {
2403 if (!(IsSmi() || IsString() || IsSymbol() || IsSpecObject() ||
2404 IsHeapNumber() || IsSimd128Value() || IsUndefined() || IsTrue() ||
2405 IsFalse() || IsNull())) {
2406 FATAL("API call returned invalid object");
2412 Object* FixedArray::get(int index) const {
2413 SLOW_DCHECK(index >= 0 && index < this->length());
2414 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2418 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2419 return handle(array->get(index), array->GetIsolate());
2423 bool FixedArray::is_the_hole(int index) {
2424 return get(index) == GetHeap()->the_hole_value();
2428 void FixedArray::set(int index, Smi* value) {
2429 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2430 DCHECK(index >= 0 && index < this->length());
2431 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2432 int offset = kHeaderSize + index * kPointerSize;
2433 WRITE_FIELD(this, offset, value);
2437 void FixedArray::set(int index, Object* value) {
2438 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2439 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2440 DCHECK(index >= 0 && index < this->length());
2441 int offset = kHeaderSize + index * kPointerSize;
2442 WRITE_FIELD(this, offset, value);
2443 WRITE_BARRIER(GetHeap(), this, offset, value);
2447 double FixedDoubleArray::get_scalar(int index) {
2448 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2449 map() != GetHeap()->fixed_array_map());
2450 DCHECK(index >= 0 && index < this->length());
2451 DCHECK(!is_the_hole(index));
2452 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2456 uint64_t FixedDoubleArray::get_representation(int index) {
2457 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2458 map() != GetHeap()->fixed_array_map());
2459 DCHECK(index >= 0 && index < this->length());
2460 int offset = kHeaderSize + index * kDoubleSize;
2461 return READ_UINT64_FIELD(this, offset);
2465 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2467 if (array->is_the_hole(index)) {
2468 return array->GetIsolate()->factory()->the_hole_value();
2470 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2475 void FixedDoubleArray::set(int index, double value) {
2476 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2477 map() != GetHeap()->fixed_array_map());
2478 int offset = kHeaderSize + index * kDoubleSize;
2479 if (std::isnan(value)) {
2480 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2482 WRITE_DOUBLE_FIELD(this, offset, value);
2484 DCHECK(!is_the_hole(index));
2488 void FixedDoubleArray::set_the_hole(int index) {
2489 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2490 map() != GetHeap()->fixed_array_map());
2491 int offset = kHeaderSize + index * kDoubleSize;
2492 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2496 bool FixedDoubleArray::is_the_hole(int index) {
2497 return get_representation(index) == kHoleNanInt64;
2501 double* FixedDoubleArray::data_start() {
2502 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2506 void FixedDoubleArray::FillWithHoles(int from, int to) {
2507 for (int i = from; i < to; i++) {
2513 Object* WeakFixedArray::Get(int index) const {
2514 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2515 if (raw->IsSmi()) return raw;
2516 DCHECK(raw->IsWeakCell());
2517 return WeakCell::cast(raw)->value();
2521 bool WeakFixedArray::IsEmptySlot(int index) const {
2522 DCHECK(index < Length());
2523 return Get(index)->IsSmi();
2527 void WeakFixedArray::Clear(int index) {
2528 FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2532 int WeakFixedArray::Length() const {
2533 return FixedArray::cast(this)->length() - kFirstIndex;
2537 int WeakFixedArray::last_used_index() const {
2538 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2542 void WeakFixedArray::set_last_used_index(int index) {
2543 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2548 T* WeakFixedArray::Iterator::Next() {
2549 if (list_ != NULL) {
2550 // Assert that list did not change during iteration.
2551 DCHECK_EQ(last_used_index_, list_->last_used_index());
2552 while (index_ < list_->Length()) {
2553 Object* item = list_->Get(index_++);
2554 if (item != Empty()) return T::cast(item);
2562 int ArrayList::Length() {
2563 if (FixedArray::cast(this)->length() == 0) return 0;
2564 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2568 void ArrayList::SetLength(int length) {
2569 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2573 Object* ArrayList::Get(int index) {
2574 return FixedArray::cast(this)->get(kFirstIndex + index);
2578 Object** ArrayList::Slot(int index) {
2579 return data_start() + kFirstIndex + index;
2583 void ArrayList::Set(int index, Object* obj) {
2584 FixedArray::cast(this)->set(kFirstIndex + index, obj);
2588 void ArrayList::Clear(int index, Object* undefined) {
2589 DCHECK(undefined->IsUndefined());
2590 FixedArray::cast(this)
2591 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2595 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2596 const DisallowHeapAllocation& promise) {
2597 Heap* heap = GetHeap();
2598 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2599 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2600 return UPDATE_WRITE_BARRIER;
2604 AllocationAlignment HeapObject::RequiredAlignment() {
2605 #ifdef V8_HOST_ARCH_32_BIT
2606 if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2607 FixedArrayBase::cast(this)->length() != 0) {
2608 return kDoubleAligned;
2610 if (IsHeapNumber()) return kDoubleUnaligned;
2611 if (IsSimd128Value()) return kSimd128Unaligned;
2612 #endif // V8_HOST_ARCH_32_BIT
2613 return kWordAligned;
2617 void FixedArray::set(int index,
2619 WriteBarrierMode mode) {
2620 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2621 DCHECK(index >= 0 && index < this->length());
2622 int offset = kHeaderSize + index * kPointerSize;
2623 WRITE_FIELD(this, offset, value);
2624 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2628 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2631 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2632 DCHECK(index >= 0 && index < array->length());
2633 int offset = kHeaderSize + index * kPointerSize;
2634 WRITE_FIELD(array, offset, value);
2635 Heap* heap = array->GetHeap();
2636 if (heap->InNewSpace(value)) {
2637 heap->RecordWrite(array->address(), offset);
2642 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2645 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2646 DCHECK(index >= 0 && index < array->length());
2647 DCHECK(!array->GetHeap()->InNewSpace(value));
2648 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2652 void FixedArray::set_undefined(int index) {
2653 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2654 DCHECK(index >= 0 && index < this->length());
2655 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2657 kHeaderSize + index * kPointerSize,
2658 GetHeap()->undefined_value());
2662 void FixedArray::set_null(int index) {
2663 DCHECK(index >= 0 && index < this->length());
2664 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2666 kHeaderSize + index * kPointerSize,
2667 GetHeap()->null_value());
2671 void FixedArray::set_the_hole(int index) {
2672 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2673 DCHECK(index >= 0 && index < this->length());
2674 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2676 kHeaderSize + index * kPointerSize,
2677 GetHeap()->the_hole_value());
2681 void FixedArray::FillWithHoles(int from, int to) {
2682 for (int i = from; i < to; i++) {
2688 Object** FixedArray::data_start() {
2689 return HeapObject::RawField(this, kHeaderSize);
2693 Object** FixedArray::RawFieldOfElementAt(int index) {
2694 return HeapObject::RawField(this, OffsetOfElementAt(index));
2698 bool DescriptorArray::IsEmpty() {
2699 DCHECK(length() >= kFirstIndex ||
2700 this == GetHeap()->empty_descriptor_array());
2701 return length() < kFirstIndex;
2705 int DescriptorArray::number_of_descriptors() {
2706 DCHECK(length() >= kFirstIndex || IsEmpty());
2708 return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2712 int DescriptorArray::number_of_descriptors_storage() {
2714 return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
2718 int DescriptorArray::NumberOfSlackDescriptors() {
2719 return number_of_descriptors_storage() - number_of_descriptors();
2723 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2725 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2729 inline int DescriptorArray::number_of_entries() {
2730 return number_of_descriptors();
2734 bool DescriptorArray::HasEnumCache() {
2735 return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2739 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2740 set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2744 FixedArray* DescriptorArray::GetEnumCache() {
2745 DCHECK(HasEnumCache());
2746 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2747 return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2751 bool DescriptorArray::HasEnumIndicesCache() {
2752 if (IsEmpty()) return false;
2753 Object* object = get(kEnumCacheIndex);
2754 if (object->IsSmi()) return false;
2755 FixedArray* bridge = FixedArray::cast(object);
2756 return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2760 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2761 DCHECK(HasEnumIndicesCache());
2762 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2763 return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2767 Object** DescriptorArray::GetEnumCacheSlot() {
2768 DCHECK(HasEnumCache());
2769 return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2774 // Perform a binary search in a fixed array. Low and high are entry indices. If
2775 // there are three entries in this array it should be called with low=0 and
2777 template <SearchMode search_mode, typename T>
2778 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2779 int* out_insertion_index) {
2780 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2781 uint32_t hash = name->Hash();
2784 DCHECK(low <= high);
2786 while (low != high) {
2787 int mid = (low + high) / 2;
2788 Name* mid_name = array->GetSortedKey(mid);
2789 uint32_t mid_hash = mid_name->Hash();
2791 if (mid_hash >= hash) {
2798 for (; low <= limit; ++low) {
2799 int sort_index = array->GetSortedKeyIndex(low);
2800 Name* entry = array->GetKey(sort_index);
2801 uint32_t current_hash = entry->Hash();
2802 if (current_hash != hash) {
2803 if (out_insertion_index != NULL) {
2804 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2806 return T::kNotFound;
2808 if (entry->Equals(name)) {
2809 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2812 return T::kNotFound;
2816 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2817 return T::kNotFound;
2821 // Perform a linear search in this fixed array. len is the number of entry
2822 // indices that are valid.
2823 template <SearchMode search_mode, typename T>
2824 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2825 int* out_insertion_index) {
2826 uint32_t hash = name->Hash();
2827 if (search_mode == ALL_ENTRIES) {
2828 for (int number = 0; number < len; number++) {
2829 int sorted_index = array->GetSortedKeyIndex(number);
2830 Name* entry = array->GetKey(sorted_index);
2831 uint32_t current_hash = entry->Hash();
2832 if (current_hash > hash) {
2833 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2834 return T::kNotFound;
2836 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2838 if (out_insertion_index != NULL) *out_insertion_index = len;
2839 return T::kNotFound;
2841 DCHECK(len >= valid_entries);
2842 DCHECK_NULL(out_insertion_index); // Not supported here.
2843 for (int number = 0; number < valid_entries; number++) {
2844 Name* entry = array->GetKey(number);
2845 uint32_t current_hash = entry->Hash();
2846 if (current_hash == hash && entry->Equals(name)) return number;
2848 return T::kNotFound;
2853 template <SearchMode search_mode, typename T>
2854 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2855 if (search_mode == VALID_ENTRIES) {
2856 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2858 SLOW_DCHECK(array->IsSortedNoDuplicates());
2861 int nof = array->number_of_entries();
2863 if (out_insertion_index != NULL) *out_insertion_index = 0;
2864 return T::kNotFound;
2867 // Fast case: do linear search for small arrays.
2868 const int kMaxElementsForLinearSearch = 8;
2869 if ((search_mode == ALL_ENTRIES &&
2870 nof <= kMaxElementsForLinearSearch) ||
2871 (search_mode == VALID_ENTRIES &&
2872 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2873 return LinearSearch<search_mode>(array, name, nof, valid_entries,
2874 out_insertion_index);
2877 // Slow case: perform binary search.
2878 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2879 out_insertion_index);
2883 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2884 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2888 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2889 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2890 if (number_of_own_descriptors == 0) return kNotFound;
2892 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2893 int number = cache->Lookup(map, name);
2895 if (number == DescriptorLookupCache::kAbsent) {
2896 number = Search(name, number_of_own_descriptors);
2897 cache->Update(map, name, number);
2904 PropertyDetails Map::GetLastDescriptorDetails() {
2905 return instance_descriptors()->GetDetails(LastAdded());
2909 int Map::LastAdded() {
2910 int number_of_own_descriptors = NumberOfOwnDescriptors();
2911 DCHECK(number_of_own_descriptors > 0);
2912 return number_of_own_descriptors - 1;
2916 int Map::NumberOfOwnDescriptors() {
2917 return NumberOfOwnDescriptorsBits::decode(bit_field3());
2921 void Map::SetNumberOfOwnDescriptors(int number) {
2922 DCHECK(number <= instance_descriptors()->number_of_descriptors());
2923 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2927 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2930 void Map::SetEnumLength(int length) {
2931 if (length != kInvalidEnumCacheSentinel) {
2932 DCHECK(length >= 0);
2933 DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2934 DCHECK(length <= NumberOfOwnDescriptors());
2936 set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2940 FixedArrayBase* Map::GetInitialElements() {
2941 if (has_fast_smi_or_object_elements() ||
2942 has_fast_double_elements()) {
2943 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2944 return GetHeap()->empty_fixed_array();
2945 } else if (has_fixed_typed_array_elements()) {
2946 FixedTypedArrayBase* empty_array =
2947 GetHeap()->EmptyFixedTypedArrayForMap(this);
2948 DCHECK(!GetHeap()->InNewSpace(empty_array));
2957 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2958 DCHECK(descriptor_number < number_of_descriptors());
2959 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2963 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2964 return GetKeySlot(descriptor_number);
2968 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2969 return GetValueSlot(descriptor_number - 1) + 1;
2973 Name* DescriptorArray::GetKey(int descriptor_number) {
2974 DCHECK(descriptor_number < number_of_descriptors());
2975 return Name::cast(get(ToKeyIndex(descriptor_number)));
2979 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2980 return GetDetails(descriptor_number).pointer();
2984 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2985 return GetKey(GetSortedKeyIndex(descriptor_number));
2989 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2990 PropertyDetails details = GetDetails(descriptor_index);
2991 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2995 void DescriptorArray::SetRepresentation(int descriptor_index,
2996 Representation representation) {
2997 DCHECK(!representation.IsNone());
2998 PropertyDetails details = GetDetails(descriptor_index);
2999 set(ToDetailsIndex(descriptor_index),
3000 details.CopyWithRepresentation(representation).AsSmi());
3004 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3005 DCHECK(descriptor_number < number_of_descriptors());
3006 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3010 int DescriptorArray::GetValueOffset(int descriptor_number) {
3011 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3015 Object* DescriptorArray::GetValue(int descriptor_number) {
3016 DCHECK(descriptor_number < number_of_descriptors());
3017 return get(ToValueIndex(descriptor_number));
3021 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3022 set(ToValueIndex(descriptor_index), value);
3026 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3027 DCHECK(descriptor_number < number_of_descriptors());
3028 Object* details = get(ToDetailsIndex(descriptor_number));
3029 return PropertyDetails(Smi::cast(details));
3033 PropertyType DescriptorArray::GetType(int descriptor_number) {
3034 return GetDetails(descriptor_number).type();
3038 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3039 DCHECK(GetDetails(descriptor_number).location() == kField);
3040 return GetDetails(descriptor_number).field_index();
3044 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3045 DCHECK(GetDetails(descriptor_number).location() == kField);
3046 Object* value = GetValue(descriptor_number);
3047 if (value->IsWeakCell()) {
3048 if (WeakCell::cast(value)->cleared()) return HeapType::None();
3049 value = WeakCell::cast(value)->value();
3051 return HeapType::cast(value);
3055 Object* DescriptorArray::GetConstant(int descriptor_number) {
3056 return GetValue(descriptor_number);
3060 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3061 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3062 return GetValue(descriptor_number);
3066 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3067 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3068 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3069 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3073 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3074 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3075 handle(GetValue(descriptor_number), GetIsolate()),
3076 GetDetails(descriptor_number));
3080 void DescriptorArray::Set(int descriptor_number,
3082 const WhitenessWitness&) {
3084 DCHECK(descriptor_number < number_of_descriptors());
3086 NoIncrementalWriteBarrierSet(this,
3087 ToKeyIndex(descriptor_number),
3089 NoIncrementalWriteBarrierSet(this,
3090 ToValueIndex(descriptor_number),
3092 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3093 desc->GetDetails().AsSmi());
3097 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3099 DCHECK(descriptor_number < number_of_descriptors());
3101 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3102 set(ToValueIndex(descriptor_number), *desc->GetValue());
3103 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3107 void DescriptorArray::Append(Descriptor* desc) {
3108 DisallowHeapAllocation no_gc;
3109 int descriptor_number = number_of_descriptors();
3110 SetNumberOfDescriptors(descriptor_number + 1);
3111 Set(descriptor_number, desc);
3113 uint32_t hash = desc->GetKey()->Hash();
3117 for (insertion = descriptor_number; insertion > 0; --insertion) {
3118 Name* key = GetSortedKey(insertion - 1);
3119 if (key->Hash() <= hash) break;
3120 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3123 SetSortedKey(insertion, descriptor_number);
3127 void DescriptorArray::SwapSortedKeys(int first, int second) {
3128 int first_key = GetSortedKeyIndex(first);
3129 SetSortedKey(first, GetSortedKeyIndex(second));
3130 SetSortedKey(second, first_key);
3134 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3135 : marking_(array->GetHeap()->incremental_marking()) {
3136 marking_->EnterNoMarkingScope();
3137 DCHECK(!marking_->IsMarking() ||
3138 Marking::Color(array) == Marking::WHITE_OBJECT);
3142 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3143 marking_->LeaveNoMarkingScope();
3147 PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
3150 Object* DescriptorArray::Entry::GetCallbackObject() {
3151 return descs_->GetValue(index_);
3155 int HashTableBase::NumberOfElements() {
3156 return Smi::cast(get(kNumberOfElementsIndex))->value();
3160 int HashTableBase::NumberOfDeletedElements() {
3161 return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3165 int HashTableBase::Capacity() {
3166 return Smi::cast(get(kCapacityIndex))->value();
3170 void HashTableBase::ElementAdded() {
3171 SetNumberOfElements(NumberOfElements() + 1);
3175 void HashTableBase::ElementRemoved() {
3176 SetNumberOfElements(NumberOfElements() - 1);
3177 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3181 void HashTableBase::ElementsRemoved(int n) {
3182 SetNumberOfElements(NumberOfElements() - n);
3183 SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3188 int HashTableBase::ComputeCapacity(int at_least_space_for) {
3189 const int kMinCapacity = 4;
3190 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3191 return Max(capacity, kMinCapacity);
3195 bool HashTableBase::IsKey(Object* k) {
3196 return !k->IsTheHole() && !k->IsUndefined();
3200 void HashTableBase::SetNumberOfElements(int nof) {
3201 set(kNumberOfElementsIndex, Smi::FromInt(nof));
3205 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3206 set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3210 template <typename Derived, typename Shape, typename Key>
3211 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3212 return FindEntry(GetIsolate(), key);
3216 template<typename Derived, typename Shape, typename Key>
3217 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3218 return FindEntry(isolate, key, HashTable::Hash(key));
3222 // Find entry for key otherwise return kNotFound.
3223 template <typename Derived, typename Shape, typename Key>
3224 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3226 uint32_t capacity = Capacity();
3227 uint32_t entry = FirstProbe(hash, capacity);
3229 // EnsureCapacity will guarantee the hash table is never full.
3231 Object* element = KeyAt(entry);
3232 // Empty entry. Uses raw unchecked accessors because it is called by the
3233 // string table during bootstrapping.
3234 if (element == isolate->heap()->root(Heap::kUndefinedValueRootIndex)) break;
3235 if (element != isolate->heap()->root(Heap::kTheHoleValueRootIndex) &&
3236 Shape::IsMatch(key, element)) return entry;
3237 entry = NextProbe(entry, count++, capacity);
3243 bool SeededNumberDictionary::requires_slow_elements() {
3244 Object* max_index_object = get(kMaxNumberKeyIndex);
3245 if (!max_index_object->IsSmi()) return false;
3247 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3251 uint32_t SeededNumberDictionary::max_number_key() {
3252 DCHECK(!requires_slow_elements());
3253 Object* max_index_object = get(kMaxNumberKeyIndex);
3254 if (!max_index_object->IsSmi()) return 0;
3255 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3256 return value >> kRequiresSlowElementsTagSize;
3260 void SeededNumberDictionary::set_requires_slow_elements() {
3261 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3265 // ------------------------------------
3269 CAST_ACCESSOR(AccessorInfo)
3270 CAST_ACCESSOR(ArrayList)
3271 CAST_ACCESSOR(Bool16x8)
3272 CAST_ACCESSOR(Bool32x4)
3273 CAST_ACCESSOR(Bool8x16)
3274 CAST_ACCESSOR(ByteArray)
3275 CAST_ACCESSOR(BytecodeArray)
3278 CAST_ACCESSOR(CodeCacheHashTable)
3279 CAST_ACCESSOR(CompilationCacheTable)
3280 CAST_ACCESSOR(ConsString)
3281 CAST_ACCESSOR(DeoptimizationInputData)
3282 CAST_ACCESSOR(DeoptimizationOutputData)
3283 CAST_ACCESSOR(DependentCode)
3284 CAST_ACCESSOR(DescriptorArray)
3285 CAST_ACCESSOR(ExternalOneByteString)
3286 CAST_ACCESSOR(ExternalString)
3287 CAST_ACCESSOR(ExternalTwoByteString)
3288 CAST_ACCESSOR(FixedArray)
3289 CAST_ACCESSOR(FixedArrayBase)
3290 CAST_ACCESSOR(FixedDoubleArray)
3291 CAST_ACCESSOR(FixedTypedArrayBase)
3292 CAST_ACCESSOR(Float32x4)
3293 CAST_ACCESSOR(Foreign)
3294 CAST_ACCESSOR(GlobalDictionary)
3295 CAST_ACCESSOR(GlobalObject)
3296 CAST_ACCESSOR(HandlerTable)
3297 CAST_ACCESSOR(HeapObject)
3298 CAST_ACCESSOR(Int16x8)
3299 CAST_ACCESSOR(Int32x4)
3300 CAST_ACCESSOR(Int8x16)
3301 CAST_ACCESSOR(JSArray)
3302 CAST_ACCESSOR(JSArrayBuffer)
3303 CAST_ACCESSOR(JSArrayBufferView)
3304 CAST_ACCESSOR(JSBuiltinsObject)
3305 CAST_ACCESSOR(JSDataView)
3306 CAST_ACCESSOR(JSDate)
3307 CAST_ACCESSOR(JSFunction)
3308 CAST_ACCESSOR(JSFunctionProxy)
3309 CAST_ACCESSOR(JSGeneratorObject)
3310 CAST_ACCESSOR(JSGlobalObject)
3311 CAST_ACCESSOR(JSGlobalProxy)
3312 CAST_ACCESSOR(JSMap)
3313 CAST_ACCESSOR(JSMapIterator)
3314 CAST_ACCESSOR(JSMessageObject)
3315 CAST_ACCESSOR(JSModule)
3316 CAST_ACCESSOR(JSObject)
3317 CAST_ACCESSOR(JSProxy)
3318 CAST_ACCESSOR(JSReceiver)
3319 CAST_ACCESSOR(JSRegExp)
3320 CAST_ACCESSOR(JSSet)
3321 CAST_ACCESSOR(JSSetIterator)
3322 CAST_ACCESSOR(JSTypedArray)
3323 CAST_ACCESSOR(JSValue)
3324 CAST_ACCESSOR(JSWeakMap)
3325 CAST_ACCESSOR(JSWeakSet)
3326 CAST_ACCESSOR(LayoutDescriptor)
3329 CAST_ACCESSOR(NameDictionary)
3330 CAST_ACCESSOR(NormalizedMapCache)
3331 CAST_ACCESSOR(Object)
3332 CAST_ACCESSOR(ObjectHashTable)
3333 CAST_ACCESSOR(Oddball)
3334 CAST_ACCESSOR(OrderedHashMap)
3335 CAST_ACCESSOR(OrderedHashSet)
3336 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3337 CAST_ACCESSOR(PropertyCell)
3338 CAST_ACCESSOR(ScopeInfo)
3339 CAST_ACCESSOR(SeededNumberDictionary)
3340 CAST_ACCESSOR(SeqOneByteString)
3341 CAST_ACCESSOR(SeqString)
3342 CAST_ACCESSOR(SeqTwoByteString)
3343 CAST_ACCESSOR(SharedFunctionInfo)
3344 CAST_ACCESSOR(Simd128Value)
3345 CAST_ACCESSOR(SlicedString)
3347 CAST_ACCESSOR(String)
3348 CAST_ACCESSOR(StringTable)
3349 CAST_ACCESSOR(Struct)
3350 CAST_ACCESSOR(Symbol)
3351 CAST_ACCESSOR(Uint16x8)
3352 CAST_ACCESSOR(Uint32x4)
3353 CAST_ACCESSOR(Uint8x16)
3354 CAST_ACCESSOR(UnseededNumberDictionary)
3355 CAST_ACCESSOR(WeakCell)
3356 CAST_ACCESSOR(WeakFixedArray)
3357 CAST_ACCESSOR(WeakHashTable)
3361 template <class Traits>
3362 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3363 FixedTypedArray<Traits>::kInstanceType;
3366 template <class Traits>
3367 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3368 SLOW_DCHECK(object->IsHeapObject() &&
3369 HeapObject::cast(object)->map()->instance_type() ==
3370 Traits::kInstanceType);
3371 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3375 template <class Traits>
3376 const FixedTypedArray<Traits>*
3377 FixedTypedArray<Traits>::cast(const Object* object) {
3378 SLOW_DCHECK(object->IsHeapObject() &&
3379 HeapObject::cast(object)->map()->instance_type() ==
3380 Traits::kInstanceType);
3381 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3385 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
3386 type* DeoptimizationInputData::name() { \
3387 return type::cast(get(k##name##Index)); \
3389 void DeoptimizationInputData::Set##name(type* value) { \
3390 set(k##name##Index, value); \
3393 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3394 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3395 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3396 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3397 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3398 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3399 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3400 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3402 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3405 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
3406 type* DeoptimizationInputData::name(int i) { \
3407 return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3409 void DeoptimizationInputData::Set##name(int i, type* value) { \
3410 set(IndexForEntry(i) + k##name##Offset, value); \
3413 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3414 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3415 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3416 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3418 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3421 BailoutId DeoptimizationInputData::AstId(int i) {
3422 return BailoutId(AstIdRaw(i)->value());
3426 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3427 SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3431 int DeoptimizationInputData::DeoptCount() {
3432 return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3436 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3439 BailoutId DeoptimizationOutputData::AstId(int index) {
3440 return BailoutId(Smi::cast(get(index * 2))->value());
3444 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3445 set(index * 2, Smi::FromInt(id.ToInt()));
3449 Smi* DeoptimizationOutputData::PcAndState(int index) {
3450 return Smi::cast(get(1 + index * 2));
3454 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3455 set(1 + index * 2, offset);
3459 void HandlerTable::SetRangeStart(int index, int value) {
3460 set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3464 void HandlerTable::SetRangeEnd(int index, int value) {
3465 set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3469 void HandlerTable::SetRangeHandler(int index, int offset,
3470 CatchPrediction prediction) {
3471 int value = HandlerOffsetField::encode(offset) |
3472 HandlerPredictionField::encode(prediction);
3473 set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3477 void HandlerTable::SetRangeDepth(int index, int value) {
3478 set(index * kRangeEntrySize + kRangeDepthIndex, Smi::FromInt(value));
3482 void HandlerTable::SetReturnOffset(int index, int value) {
3483 set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3487 void HandlerTable::SetReturnHandler(int index, int offset,
3488 CatchPrediction prediction) {
3489 int value = HandlerOffsetField::encode(offset) |
3490 HandlerPredictionField::encode(prediction);
3491 set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3495 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3496 STRUCT_LIST(MAKE_STRUCT_CAST)
3497 #undef MAKE_STRUCT_CAST
3500 template <typename Derived, typename Shape, typename Key>
3501 HashTable<Derived, Shape, Key>*
3502 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3503 SLOW_DCHECK(obj->IsHashTable());
3504 return reinterpret_cast<HashTable*>(obj);
3508 template <typename Derived, typename Shape, typename Key>
3509 const HashTable<Derived, Shape, Key>*
3510 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3511 SLOW_DCHECK(obj->IsHashTable());
3512 return reinterpret_cast<const HashTable*>(obj);
3516 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3517 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3519 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3520 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3522 SMI_ACCESSORS(String, length, kLengthOffset)
3523 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3526 int FreeSpace::Size() { return size(); }
3529 FreeSpace* FreeSpace::next() {
3530 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3531 (!GetHeap()->deserialization_complete() && map() == NULL));
3532 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3533 return reinterpret_cast<FreeSpace*>(
3534 Memory::Address_at(address() + kNextOffset));
3538 FreeSpace** FreeSpace::next_address() {
3539 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3540 (!GetHeap()->deserialization_complete() && map() == NULL));
3541 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3542 return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
3546 void FreeSpace::set_next(FreeSpace* next) {
3547 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3548 (!GetHeap()->deserialization_complete() && map() == NULL));
3549 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3550 base::NoBarrier_Store(
3551 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3552 reinterpret_cast<base::AtomicWord>(next));
3556 FreeSpace* FreeSpace::cast(HeapObject* o) {
3557 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3558 return reinterpret_cast<FreeSpace*>(o);
3562 uint32_t Name::hash_field() {
3563 return READ_UINT32_FIELD(this, kHashFieldOffset);
3567 void Name::set_hash_field(uint32_t value) {
3568 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3569 #if V8_HOST_ARCH_64_BIT
3570 #if V8_TARGET_LITTLE_ENDIAN
3571 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3573 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3579 bool Name::Equals(Name* other) {
3580 if (other == this) return true;
3581 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3582 this->IsSymbol() || other->IsSymbol()) {
3585 return String::cast(this)->SlowEquals(String::cast(other));
3589 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3590 if (one.is_identical_to(two)) return true;
3591 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3592 one->IsSymbol() || two->IsSymbol()) {
3595 return String::SlowEquals(Handle<String>::cast(one),
3596 Handle<String>::cast(two));
3600 ACCESSORS(Symbol, name, Object, kNameOffset)
3601 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3602 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3605 bool String::Equals(String* other) {
3606 if (other == this) return true;
3607 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3610 return SlowEquals(other);
3614 bool String::Equals(Handle<String> one, Handle<String> two) {
3615 if (one.is_identical_to(two)) return true;
3616 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3619 return SlowEquals(one, two);
3623 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3624 if (!string->IsConsString()) return string;
3625 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3626 if (cons->IsFlat()) return handle(cons->first());
3627 return SlowFlatten(cons, pretenure);
3631 Handle<Name> Name::Flatten(Handle<Name> name, PretenureFlag pretenure) {
3632 if (name->IsSymbol()) return name;
3633 return String::Flatten(Handle<String>::cast(name));
3637 uint16_t String::Get(int index) {
3638 DCHECK(index >= 0 && index < length());
3639 switch (StringShape(this).full_representation_tag()) {
3640 case kSeqStringTag | kOneByteStringTag:
3641 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3642 case kSeqStringTag | kTwoByteStringTag:
3643 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3644 case kConsStringTag | kOneByteStringTag:
3645 case kConsStringTag | kTwoByteStringTag:
3646 return ConsString::cast(this)->ConsStringGet(index);
3647 case kExternalStringTag | kOneByteStringTag:
3648 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3649 case kExternalStringTag | kTwoByteStringTag:
3650 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3651 case kSlicedStringTag | kOneByteStringTag:
3652 case kSlicedStringTag | kTwoByteStringTag:
3653 return SlicedString::cast(this)->SlicedStringGet(index);
3663 void String::Set(int index, uint16_t value) {
3664 DCHECK(index >= 0 && index < length());
3665 DCHECK(StringShape(this).IsSequential());
3667 return this->IsOneByteRepresentation()
3668 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3669 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3673 bool String::IsFlat() {
3674 if (!StringShape(this).IsCons()) return true;
3675 return ConsString::cast(this)->second()->length() == 0;
3679 String* String::GetUnderlying() {
3680 // Giving direct access to underlying string only makes sense if the
3681 // wrapping string is already flattened.
3682 DCHECK(this->IsFlat());
3683 DCHECK(StringShape(this).IsIndirect());
3684 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3685 const int kUnderlyingOffset = SlicedString::kParentOffset;
3686 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3690 template<class Visitor>
3691 ConsString* String::VisitFlat(Visitor* visitor,
3694 int slice_offset = offset;
3695 const int length = string->length();
3696 DCHECK(offset <= length);
3698 int32_t type = string->map()->instance_type();
3699 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3700 case kSeqStringTag | kOneByteStringTag:
3701 visitor->VisitOneByteString(
3702 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3706 case kSeqStringTag | kTwoByteStringTag:
3707 visitor->VisitTwoByteString(
3708 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3712 case kExternalStringTag | kOneByteStringTag:
3713 visitor->VisitOneByteString(
3714 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3718 case kExternalStringTag | kTwoByteStringTag:
3719 visitor->VisitTwoByteString(
3720 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3724 case kSlicedStringTag | kOneByteStringTag:
3725 case kSlicedStringTag | kTwoByteStringTag: {
3726 SlicedString* slicedString = SlicedString::cast(string);
3727 slice_offset += slicedString->offset();
3728 string = slicedString->parent();
3732 case kConsStringTag | kOneByteStringTag:
3733 case kConsStringTag | kTwoByteStringTag:
3734 return ConsString::cast(string);
3745 inline Vector<const uint8_t> String::GetCharVector() {
3746 String::FlatContent flat = GetFlatContent();
3747 DCHECK(flat.IsOneByte());
3748 return flat.ToOneByteVector();
3753 inline Vector<const uc16> String::GetCharVector() {
3754 String::FlatContent flat = GetFlatContent();
3755 DCHECK(flat.IsTwoByte());
3756 return flat.ToUC16Vector();
3760 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3761 DCHECK(index >= 0 && index < length());
3762 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3766 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3767 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3768 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3769 static_cast<byte>(value));
3773 Address SeqOneByteString::GetCharsAddress() {
3774 return FIELD_ADDR(this, kHeaderSize);
3778 uint8_t* SeqOneByteString::GetChars() {
3779 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3783 Address SeqTwoByteString::GetCharsAddress() {
3784 return FIELD_ADDR(this, kHeaderSize);
3788 uc16* SeqTwoByteString::GetChars() {
3789 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3793 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3794 DCHECK(index >= 0 && index < length());
3795 return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3799 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3800 DCHECK(index >= 0 && index < length());
3801 WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3805 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3806 return SizeFor(length());
3810 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3811 return SizeFor(length());
3815 String* SlicedString::parent() {
3816 return String::cast(READ_FIELD(this, kParentOffset));
3820 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3821 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3822 WRITE_FIELD(this, kParentOffset, parent);
3823 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3827 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3830 String* ConsString::first() {
3831 return String::cast(READ_FIELD(this, kFirstOffset));
3835 Object* ConsString::unchecked_first() {
3836 return READ_FIELD(this, kFirstOffset);
3840 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3841 WRITE_FIELD(this, kFirstOffset, value);
3842 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3846 String* ConsString::second() {
3847 return String::cast(READ_FIELD(this, kSecondOffset));
3851 Object* ConsString::unchecked_second() {
3852 return READ_FIELD(this, kSecondOffset);
3856 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3857 WRITE_FIELD(this, kSecondOffset, value);
3858 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3862 bool ExternalString::is_short() {
3863 InstanceType type = map()->instance_type();
3864 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3868 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3869 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3873 void ExternalOneByteString::update_data_cache() {
3874 if (is_short()) return;
3875 const char** data_field =
3876 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3877 *data_field = resource()->data();
3881 void ExternalOneByteString::set_resource(
3882 const ExternalOneByteString::Resource* resource) {
3883 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3884 *reinterpret_cast<const Resource**>(
3885 FIELD_ADDR(this, kResourceOffset)) = resource;
3886 if (resource != NULL) update_data_cache();
3890 const uint8_t* ExternalOneByteString::GetChars() {
3891 return reinterpret_cast<const uint8_t*>(resource()->data());
3895 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3896 DCHECK(index >= 0 && index < length());
3897 return GetChars()[index];
3901 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3902 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3906 void ExternalTwoByteString::update_data_cache() {
3907 if (is_short()) return;
3908 const uint16_t** data_field =
3909 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3910 *data_field = resource()->data();
3914 void ExternalTwoByteString::set_resource(
3915 const ExternalTwoByteString::Resource* resource) {
3916 *reinterpret_cast<const Resource**>(
3917 FIELD_ADDR(this, kResourceOffset)) = resource;
3918 if (resource != NULL) update_data_cache();
3922 const uint16_t* ExternalTwoByteString::GetChars() {
3923 return resource()->data();
3927 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3928 DCHECK(index >= 0 && index < length());
3929 return GetChars()[index];
3933 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3935 return GetChars() + start;
3939 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3942 void ConsStringIterator::PushLeft(ConsString* string) {
3943 frames_[depth_++ & kDepthMask] = string;
3947 void ConsStringIterator::PushRight(ConsString* string) {
3949 frames_[(depth_-1) & kDepthMask] = string;
3953 void ConsStringIterator::AdjustMaximumDepth() {
3954 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3958 void ConsStringIterator::Pop() {
3960 DCHECK(depth_ <= maximum_depth_);
3965 uint16_t StringCharacterStream::GetNext() {
3966 DCHECK(buffer8_ != NULL && end_ != NULL);
3967 // Advance cursor if needed.
3968 if (buffer8_ == end_) HasMore();
3969 DCHECK(buffer8_ < end_);
3970 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3974 StringCharacterStream::StringCharacterStream(String* string, int offset)
3975 : is_one_byte_(false) {
3976 Reset(string, offset);
3980 void StringCharacterStream::Reset(String* string, int offset) {
3983 ConsString* cons_string = String::VisitFlat(this, string, offset);
3984 iter_.Reset(cons_string, offset);
3985 if (cons_string != NULL) {
3986 string = iter_.Next(&offset);
3987 if (string != NULL) String::VisitFlat(this, string, offset);
3992 bool StringCharacterStream::HasMore() {
3993 if (buffer8_ != end_) return true;
3995 String* string = iter_.Next(&offset);
3996 DCHECK_EQ(offset, 0);
3997 if (string == NULL) return false;
3998 String::VisitFlat(this, string);
3999 DCHECK(buffer8_ != end_);
4004 void StringCharacterStream::VisitOneByteString(
4005 const uint8_t* chars, int length) {
4006 is_one_byte_ = true;
4008 end_ = chars + length;
4012 void StringCharacterStream::VisitTwoByteString(
4013 const uint16_t* chars, int length) {
4014 is_one_byte_ = false;
4016 end_ = reinterpret_cast<const uint8_t*>(chars + length);
4020 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
4023 byte ByteArray::get(int index) {
4024 DCHECK(index >= 0 && index < this->length());
4025 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4029 void ByteArray::set(int index, byte value) {
4030 DCHECK(index >= 0 && index < this->length());
4031 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4035 int ByteArray::get_int(int index) {
4036 DCHECK(index >= 0 && (index * kIntSize) < this->length());
4037 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
4041 ByteArray* ByteArray::FromDataStartAddress(Address address) {
4042 DCHECK_TAG_ALIGNED(address);
4043 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
4047 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
4050 Address ByteArray::GetDataStartAddress() {
4051 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4055 void BytecodeArray::BytecodeArrayIterateBody(ObjectVisitor* v) {
4056 IteratePointer(v, kConstantPoolOffset);
4060 byte BytecodeArray::get(int index) {
4061 DCHECK(index >= 0 && index < this->length());
4062 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4066 void BytecodeArray::set(int index, byte value) {
4067 DCHECK(index >= 0 && index < this->length());
4068 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4072 void BytecodeArray::set_frame_size(int frame_size) {
4073 DCHECK_GE(frame_size, 0);
4074 DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4075 WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4079 int BytecodeArray::frame_size() const {
4080 return READ_INT_FIELD(this, kFrameSizeOffset);
4084 void BytecodeArray::set_parameter_count(int number_of_parameters) {
4085 DCHECK_GE(number_of_parameters, 0);
4086 // Parameter count is stored as the size on stack of the parameters to allow
4087 // it to be used directly by generated code.
4088 WRITE_INT_FIELD(this, kParameterSizeOffset,
4089 (number_of_parameters << kPointerSizeLog2));
4093 int BytecodeArray::parameter_count() const {
4094 // Parameter count is stored as the size on stack of the parameters to allow
4095 // it to be used directly by generated code.
4096 return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
4100 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
4103 Address BytecodeArray::GetFirstBytecodeAddress() {
4104 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4108 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4111 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4114 void* FixedTypedArrayBase::external_pointer() const {
4115 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4116 return reinterpret_cast<void*>(ptr);
4120 void FixedTypedArrayBase::set_external_pointer(void* value,
4121 WriteBarrierMode mode) {
4122 intptr_t ptr = reinterpret_cast<intptr_t>(value);
4123 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4127 void* FixedTypedArrayBase::DataPtr() {
4128 return reinterpret_cast<void*>(
4129 reinterpret_cast<intptr_t>(base_pointer()) +
4130 reinterpret_cast<intptr_t>(external_pointer()));
4134 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4137 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4138 case FIXED_##TYPE##_ARRAY_TYPE: \
4139 element_size = size; \
4142 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4143 #undef TYPED_ARRAY_CASE
4148 return element_size;
4152 int FixedTypedArrayBase::DataSize(InstanceType type) {
4153 if (base_pointer() == Smi::FromInt(0)) return 0;
4154 return length() * ElementSize(type);
4158 int FixedTypedArrayBase::DataSize() {
4159 return DataSize(map()->instance_type());
4163 int FixedTypedArrayBase::size() {
4164 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4168 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4169 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4173 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4174 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4178 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4181 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4184 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4187 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4190 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4193 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4196 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4199 float Float32ArrayTraits::defaultValue() {
4200 return std::numeric_limits<float>::quiet_NaN();
4204 double Float64ArrayTraits::defaultValue() {
4205 return std::numeric_limits<double>::quiet_NaN();
4209 template <class Traits>
4210 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4211 DCHECK((index >= 0) && (index < this->length()));
4212 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4217 template <class Traits>
4218 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4219 DCHECK((index >= 0) && (index < this->length()));
4220 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4225 template <class Traits>
4226 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4227 return static_cast<ElementType>(value);
4232 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4233 if (value < 0) return 0;
4234 if (value > 0xFF) return 0xFF;
4235 return static_cast<uint8_t>(value);
4239 template <class Traits>
4240 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4242 return static_cast<ElementType>(DoubleToInt32(value));
4247 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4248 // Handle NaNs and less than zero values which clamp to zero.
4249 if (!(value > 0)) return 0;
4250 if (value > 0xFF) return 0xFF;
4251 return static_cast<uint8_t>(lrint(value));
4256 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4257 return static_cast<float>(value);
4262 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4267 template <class Traits>
4268 Handle<Object> FixedTypedArray<Traits>::get(
4269 Handle<FixedTypedArray<Traits> > array,
4271 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4275 template <class Traits>
4276 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4277 ElementType cast_value = Traits::defaultValue();
4278 if (value->IsSmi()) {
4279 int int_value = Smi::cast(value)->value();
4280 cast_value = from_int(int_value);
4281 } else if (value->IsHeapNumber()) {
4282 double double_value = HeapNumber::cast(value)->value();
4283 cast_value = from_double(double_value);
4285 // Clamp undefined to the default value. All other types have been
4286 // converted to a number type further up in the call chain.
4287 DCHECK(value->IsUndefined());
4289 set(index, cast_value);
4293 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4294 return handle(Smi::FromInt(scalar), isolate);
4298 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4300 return handle(Smi::FromInt(scalar), isolate);
4304 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4305 return handle(Smi::FromInt(scalar), isolate);
4309 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4310 return handle(Smi::FromInt(scalar), isolate);
4314 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4315 return handle(Smi::FromInt(scalar), isolate);
4319 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4320 return isolate->factory()->NewNumberFromUint(scalar);
4324 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4325 return isolate->factory()->NewNumberFromInt(scalar);
4329 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4330 return isolate->factory()->NewNumber(scalar);
4334 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4335 return isolate->factory()->NewNumber(scalar);
4339 int Map::visitor_id() {
4340 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4344 void Map::set_visitor_id(int id) {
4345 DCHECK(0 <= id && id < 256);
4346 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4350 int Map::instance_size() {
4351 return NOBARRIER_READ_BYTE_FIELD(
4352 this, kInstanceSizeOffset) << kPointerSizeLog2;
4356 int Map::inobject_properties_or_constructor_function_index() {
4357 return READ_BYTE_FIELD(this,
4358 kInObjectPropertiesOrConstructorFunctionIndexOffset);
4362 void Map::set_inobject_properties_or_constructor_function_index(int value) {
4363 DCHECK(0 <= value && value < 256);
4364 WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4365 static_cast<byte>(value));
4369 int Map::GetInObjectProperties() {
4370 DCHECK(IsJSObjectMap());
4371 return inobject_properties_or_constructor_function_index();
4375 void Map::SetInObjectProperties(int value) {
4376 DCHECK(IsJSObjectMap());
4377 set_inobject_properties_or_constructor_function_index(value);
4381 int Map::GetConstructorFunctionIndex() {
4382 DCHECK(IsPrimitiveMap());
4383 return inobject_properties_or_constructor_function_index();
4387 void Map::SetConstructorFunctionIndex(int value) {
4388 DCHECK(IsPrimitiveMap());
4389 set_inobject_properties_or_constructor_function_index(value);
4393 int Map::GetInObjectPropertyOffset(int index) {
4394 // Adjust for the number of properties stored in the object.
4395 index -= GetInObjectProperties();
4397 return instance_size() + (index * kPointerSize);
4401 Handle<Map> Map::CopyInstallDescriptorsForTesting(
4402 Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
4403 Handle<LayoutDescriptor> layout_descriptor) {
4404 return CopyInstallDescriptors(map, new_descriptor, descriptors,
4409 int HeapObject::SizeFromMap(Map* map) {
4410 int instance_size = map->instance_size();
4411 if (instance_size != kVariableSizeSentinel) return instance_size;
4412 // Only inline the most frequent cases.
4413 InstanceType instance_type = map->instance_type();
4414 if (instance_type == FIXED_ARRAY_TYPE) {
4415 return FixedArray::BodyDescriptor::SizeOf(map, this);
4417 if (instance_type == ONE_BYTE_STRING_TYPE ||
4418 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4419 // Strings may get concurrently truncated, hence we have to access its
4420 // length synchronized.
4421 return SeqOneByteString::SizeFor(
4422 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4424 if (instance_type == BYTE_ARRAY_TYPE) {
4425 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4427 if (instance_type == BYTECODE_ARRAY_TYPE) {
4428 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4430 if (instance_type == FREE_SPACE_TYPE) {
4431 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4433 if (instance_type == STRING_TYPE ||
4434 instance_type == INTERNALIZED_STRING_TYPE) {
4435 // Strings may get concurrently truncated, hence we have to access its
4436 // length synchronized.
4437 return SeqTwoByteString::SizeFor(
4438 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4440 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4441 return FixedDoubleArray::SizeFor(
4442 reinterpret_cast<FixedDoubleArray*>(this)->length());
4444 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4445 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4446 return reinterpret_cast<FixedTypedArrayBase*>(
4447 this)->TypedArraySize(instance_type);
4449 DCHECK(instance_type == CODE_TYPE);
4450 return reinterpret_cast<Code*>(this)->CodeSize();
4454 void Map::set_instance_size(int value) {
4455 DCHECK_EQ(0, value & (kPointerSize - 1));
4456 value >>= kPointerSizeLog2;
4457 DCHECK(0 <= value && value < 256);
4458 NOBARRIER_WRITE_BYTE_FIELD(
4459 this, kInstanceSizeOffset, static_cast<byte>(value));
4463 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4466 InstanceType Map::instance_type() {
4467 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4471 void Map::set_instance_type(InstanceType value) {
4472 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4476 int Map::unused_property_fields() {
4477 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4481 void Map::set_unused_property_fields(int value) {
4482 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4486 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4489 void Map::set_bit_field(byte value) {
4490 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4494 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4497 void Map::set_bit_field2(byte value) {
4498 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4502 void Map::set_non_instance_prototype(bool value) {
4504 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4506 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4511 bool Map::has_non_instance_prototype() {
4512 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4516 void Map::set_function_with_prototype(bool value) {
4517 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4521 bool Map::function_with_prototype() {
4522 return FunctionWithPrototype::decode(bit_field());
4526 void Map::set_is_hidden_prototype() {
4527 set_bit_field(bit_field() | (1 << kIsHiddenPrototype));
4531 bool Map::is_hidden_prototype() {
4532 return ((1 << kIsHiddenPrototype) & bit_field()) != 0;
4536 void Map::set_has_indexed_interceptor() {
4537 set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4541 bool Map::has_indexed_interceptor() {
4542 return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4546 void Map::set_is_undetectable() {
4547 set_bit_field(bit_field() | (1 << kIsUndetectable));
4551 bool Map::is_undetectable() {
4552 return ((1 << kIsUndetectable) & bit_field()) != 0;
4556 void Map::set_is_observed() { set_bit_field(bit_field() | (1 << kIsObserved)); }
4558 bool Map::is_observed() { return ((1 << kIsObserved) & bit_field()) != 0; }
4561 void Map::set_has_named_interceptor() {
4562 set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4566 bool Map::has_named_interceptor() {
4567 return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4571 void Map::set_is_access_check_needed(bool access_check_needed) {
4572 if (access_check_needed) {
4573 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4575 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4580 bool Map::is_access_check_needed() {
4581 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4585 void Map::set_is_extensible(bool value) {
4587 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4589 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4593 bool Map::is_extensible() {
4594 return ((1 << kIsExtensible) & bit_field2()) != 0;
4598 void Map::set_is_prototype_map(bool value) {
4599 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4602 bool Map::is_prototype_map() const {
4603 return IsPrototypeMapBits::decode(bit_field2());
4607 void Map::set_elements_kind(ElementsKind elements_kind) {
4608 DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4609 DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4610 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4611 DCHECK(this->elements_kind() == elements_kind);
4615 ElementsKind Map::elements_kind() {
4616 return Map::ElementsKindBits::decode(bit_field2());
4620 bool Map::has_fast_smi_elements() {
4621 return IsFastSmiElementsKind(elements_kind());
4624 bool Map::has_fast_object_elements() {
4625 return IsFastObjectElementsKind(elements_kind());
4628 bool Map::has_fast_smi_or_object_elements() {
4629 return IsFastSmiOrObjectElementsKind(elements_kind());
4632 bool Map::has_fast_double_elements() {
4633 return IsFastDoubleElementsKind(elements_kind());
4636 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4638 bool Map::has_sloppy_arguments_elements() {
4639 return IsSloppyArgumentsElements(elements_kind());
4642 bool Map::has_fixed_typed_array_elements() {
4643 return IsFixedTypedArrayElementsKind(elements_kind());
4646 bool Map::has_dictionary_elements() {
4647 return IsDictionaryElementsKind(elements_kind());
4651 void Map::set_dictionary_map(bool value) {
4652 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4653 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4654 set_bit_field3(new_bit_field3);
4658 bool Map::is_dictionary_map() {
4659 return DictionaryMap::decode(bit_field3());
4663 Code::Flags Code::flags() {
4664 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4668 void Map::set_owns_descriptors(bool owns_descriptors) {
4669 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4673 bool Map::owns_descriptors() {
4674 return OwnsDescriptors::decode(bit_field3());
4678 void Map::set_has_instance_call_handler() {
4679 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4683 bool Map::has_instance_call_handler() {
4684 return HasInstanceCallHandler::decode(bit_field3());
4688 void Map::deprecate() {
4689 set_bit_field3(Deprecated::update(bit_field3(), true));
4693 bool Map::is_deprecated() {
4694 return Deprecated::decode(bit_field3());
4698 void Map::set_migration_target(bool value) {
4699 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4703 bool Map::is_migration_target() {
4704 return IsMigrationTarget::decode(bit_field3());
4708 void Map::set_is_strong() {
4709 set_bit_field3(IsStrong::update(bit_field3(), true));
4713 bool Map::is_strong() {
4714 return IsStrong::decode(bit_field3());
4718 void Map::set_counter(int value) {
4719 set_bit_field3(Counter::update(bit_field3(), value));
4723 int Map::counter() { return Counter::decode(bit_field3()); }
4726 void Map::mark_unstable() {
4727 set_bit_field3(IsUnstable::update(bit_field3(), true));
4731 bool Map::is_stable() {
4732 return !IsUnstable::decode(bit_field3());
4736 bool Map::has_code_cache() {
4737 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4741 bool Map::CanBeDeprecated() {
4742 int descriptor = LastAdded();
4743 for (int i = 0; i <= descriptor; i++) {
4744 PropertyDetails details = instance_descriptors()->GetDetails(i);
4745 if (details.representation().IsNone()) return true;
4746 if (details.representation().IsSmi()) return true;
4747 if (details.representation().IsDouble()) return true;
4748 if (details.representation().IsHeapObject()) return true;
4749 if (details.type() == DATA_CONSTANT) return true;
4755 void Map::NotifyLeafMapLayoutChange() {
4758 dependent_code()->DeoptimizeDependentCodeGroup(
4760 DependentCode::kPrototypeCheckGroup);
4765 bool Map::CanTransition() {
4766 // Only JSObject and subtypes have map transitions and back pointers.
4767 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4768 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4772 bool Map::IsPrimitiveMap() {
4773 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4774 return instance_type() <= LAST_PRIMITIVE_TYPE;
4776 bool Map::IsJSObjectMap() {
4777 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4778 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4780 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
4781 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
4782 bool Map::IsJSProxyMap() {
4783 InstanceType type = instance_type();
4784 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
4786 bool Map::IsJSGlobalProxyMap() {
4787 return instance_type() == JS_GLOBAL_PROXY_TYPE;
4789 bool Map::IsJSGlobalObjectMap() {
4790 return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4792 bool Map::IsGlobalObjectMap() {
4793 const InstanceType type = instance_type();
4794 return type == JS_GLOBAL_OBJECT_TYPE || type == JS_BUILTINS_OBJECT_TYPE;
4798 bool Map::CanOmitMapChecks() {
4799 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4803 int DependentCode::number_of_entries(DependencyGroup group) {
4804 if (length() == 0) return 0;
4805 return Smi::cast(get(group))->value();
4809 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4810 set(group, Smi::FromInt(value));
4814 void DependentCode::set_object_at(int i, Object* object) {
4815 set(kCodesStartIndex + i, object);
4819 Object* DependentCode::object_at(int i) {
4820 return get(kCodesStartIndex + i);
4824 void DependentCode::clear_at(int i) {
4825 set_undefined(kCodesStartIndex + i);
4829 void DependentCode::copy(int from, int to) {
4830 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4834 void DependentCode::ExtendGroup(DependencyGroup group) {
4835 GroupStartIndexes starts(this);
4836 for (int g = kGroupCount - 1; g > group; g--) {
4837 if (starts.at(g) < starts.at(g + 1)) {
4838 copy(starts.at(g), starts.at(g + 1));
4844 void Code::set_flags(Code::Flags flags) {
4845 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4846 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4850 Code::Kind Code::kind() {
4851 return ExtractKindFromFlags(flags());
4855 bool Code::IsCodeStubOrIC() {
4856 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4857 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4858 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4859 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4860 kind() == TO_BOOLEAN_IC;
4864 bool Code::IsJavaScriptCode() {
4865 if (kind() == FUNCTION || kind() == OPTIMIZED_FUNCTION) {
4868 Handle<Code> interpreter_entry =
4869 GetIsolate()->builtins()->InterpreterEntryTrampoline();
4870 return interpreter_entry.location() != nullptr && *interpreter_entry == this;
4874 InlineCacheState Code::ic_state() {
4875 InlineCacheState result = ExtractICStateFromFlags(flags());
4876 // Only allow uninitialized or debugger states for non-IC code
4877 // objects. This is used in the debugger to determine whether or not
4878 // a call to code object has been replaced with a debug break call.
4879 DCHECK(is_inline_cache_stub() ||
4880 result == UNINITIALIZED ||
4881 result == DEBUG_STUB);
4886 ExtraICState Code::extra_ic_state() {
4887 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4888 return ExtractExtraICStateFromFlags(flags());
4892 Code::StubType Code::type() {
4893 return ExtractTypeFromFlags(flags());
4897 // For initialization.
4898 void Code::set_raw_kind_specific_flags1(int value) {
4899 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4903 void Code::set_raw_kind_specific_flags2(int value) {
4904 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4908 inline bool Code::is_crankshafted() {
4909 return IsCrankshaftedField::decode(
4910 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4914 inline bool Code::is_hydrogen_stub() {
4915 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4919 inline void Code::set_is_crankshafted(bool value) {
4920 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4921 int updated = IsCrankshaftedField::update(previous, value);
4922 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4926 inline bool Code::is_turbofanned() {
4927 return IsTurbofannedField::decode(
4928 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4932 inline void Code::set_is_turbofanned(bool value) {
4933 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4934 int updated = IsTurbofannedField::update(previous, value);
4935 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4939 inline bool Code::can_have_weak_objects() {
4940 DCHECK(kind() == OPTIMIZED_FUNCTION);
4941 return CanHaveWeakObjectsField::decode(
4942 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4946 inline void Code::set_can_have_weak_objects(bool value) {
4947 DCHECK(kind() == OPTIMIZED_FUNCTION);
4948 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4949 int updated = CanHaveWeakObjectsField::update(previous, value);
4950 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4954 bool Code::has_deoptimization_support() {
4955 DCHECK_EQ(FUNCTION, kind());
4956 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4957 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4961 void Code::set_has_deoptimization_support(bool value) {
4962 DCHECK_EQ(FUNCTION, kind());
4963 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4964 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4965 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4969 bool Code::has_debug_break_slots() {
4970 DCHECK_EQ(FUNCTION, kind());
4971 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4972 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4976 void Code::set_has_debug_break_slots(bool value) {
4977 DCHECK_EQ(FUNCTION, kind());
4978 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4979 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4980 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4984 bool Code::has_reloc_info_for_serialization() {
4985 DCHECK_EQ(FUNCTION, kind());
4986 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4987 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4991 void Code::set_has_reloc_info_for_serialization(bool value) {
4992 DCHECK_EQ(FUNCTION, kind());
4993 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4994 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4995 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4999 int Code::allow_osr_at_loop_nesting_level() {
5000 DCHECK_EQ(FUNCTION, kind());
5001 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5002 return AllowOSRAtLoopNestingLevelField::decode(fields);
5006 void Code::set_allow_osr_at_loop_nesting_level(int level) {
5007 DCHECK_EQ(FUNCTION, kind());
5008 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
5009 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5010 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5011 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5015 int Code::profiler_ticks() {
5016 DCHECK_EQ(FUNCTION, kind());
5017 return ProfilerTicksField::decode(
5018 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5022 void Code::set_profiler_ticks(int ticks) {
5023 if (kind() == FUNCTION) {
5024 unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5025 unsigned updated = ProfilerTicksField::update(previous, ticks);
5026 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5031 int Code::builtin_index() {
5032 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
5036 void Code::set_builtin_index(int index) {
5037 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
5041 unsigned Code::stack_slots() {
5042 DCHECK(is_crankshafted());
5043 return StackSlotsField::decode(
5044 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5048 void Code::set_stack_slots(unsigned slots) {
5049 CHECK(slots <= (1 << kStackSlotsBitCount));
5050 DCHECK(is_crankshafted());
5051 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5052 int updated = StackSlotsField::update(previous, slots);
5053 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5057 unsigned Code::safepoint_table_offset() {
5058 DCHECK(is_crankshafted());
5059 return SafepointTableOffsetField::decode(
5060 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5064 void Code::set_safepoint_table_offset(unsigned offset) {
5065 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5066 DCHECK(is_crankshafted());
5067 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5068 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5069 int updated = SafepointTableOffsetField::update(previous, offset);
5070 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5074 unsigned Code::back_edge_table_offset() {
5075 DCHECK_EQ(FUNCTION, kind());
5076 return BackEdgeTableOffsetField::decode(
5077 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5081 void Code::set_back_edge_table_offset(unsigned offset) {
5082 DCHECK_EQ(FUNCTION, kind());
5083 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5084 offset = offset >> kPointerSizeLog2;
5085 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5086 int updated = BackEdgeTableOffsetField::update(previous, offset);
5087 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5091 bool Code::back_edges_patched_for_osr() {
5092 DCHECK_EQ(FUNCTION, kind());
5093 return allow_osr_at_loop_nesting_level() > 0;
5097 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5100 bool Code::has_function_cache() {
5101 DCHECK(kind() == STUB);
5102 return HasFunctionCacheField::decode(
5103 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5107 void Code::set_has_function_cache(bool flag) {
5108 DCHECK(kind() == STUB);
5109 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5110 int updated = HasFunctionCacheField::update(previous, flag);
5111 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5115 bool Code::marked_for_deoptimization() {
5116 DCHECK(kind() == OPTIMIZED_FUNCTION);
5117 return MarkedForDeoptimizationField::decode(
5118 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5122 void Code::set_marked_for_deoptimization(bool flag) {
5123 DCHECK(kind() == OPTIMIZED_FUNCTION);
5124 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5125 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5126 int updated = MarkedForDeoptimizationField::update(previous, flag);
5127 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5131 bool Code::is_inline_cache_stub() {
5132 Kind kind = this->kind();
5134 #define CASE(name) case name: return true;
5137 default: return false;
5142 bool Code::is_keyed_stub() {
5143 return is_keyed_load_stub() || is_keyed_store_stub();
5147 bool Code::is_debug_stub() { return ic_state() == DEBUG_STUB; }
5148 bool Code::is_handler() { return kind() == HANDLER; }
5149 bool Code::is_load_stub() { return kind() == LOAD_IC; }
5150 bool Code::is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
5151 bool Code::is_store_stub() { return kind() == STORE_IC; }
5152 bool Code::is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
5153 bool Code::is_call_stub() { return kind() == CALL_IC; }
5154 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
5155 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
5156 bool Code::is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
5157 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
5158 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
5161 bool Code::embeds_maps_weakly() {
5163 return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
5164 k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
5165 ic_state() == MONOMORPHIC;
5169 Address Code::constant_pool() {
5170 Address constant_pool = NULL;
5171 if (FLAG_enable_embedded_constant_pool) {
5172 int offset = constant_pool_offset();
5173 if (offset < instruction_size()) {
5174 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5177 return constant_pool;
5181 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5182 ExtraICState extra_ic_state, StubType type,
5183 CacheHolderFlag holder) {
5184 // Compute the bit mask.
5185 unsigned int bits = KindField::encode(kind)
5186 | ICStateField::encode(ic_state)
5187 | TypeField::encode(type)
5188 | ExtraICStateField::encode(extra_ic_state)
5189 | CacheHolderField::encode(holder);
5190 return static_cast<Flags>(bits);
5194 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5195 ExtraICState extra_ic_state,
5196 CacheHolderFlag holder,
5198 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5202 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5203 CacheHolderFlag holder) {
5204 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5208 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5209 return KindField::decode(flags);
5213 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5214 return ICStateField::decode(flags);
5218 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5219 return ExtraICStateField::decode(flags);
5223 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5224 return TypeField::decode(flags);
5228 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5229 return CacheHolderField::decode(flags);
5233 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5234 int bits = flags & ~TypeField::kMask;
5235 return static_cast<Flags>(bits);
5239 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5240 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5241 return static_cast<Flags>(bits);
5245 Code* Code::GetCodeFromTargetAddress(Address address) {
5246 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5247 // GetCodeFromTargetAddress might be called when marking objects during mark
5248 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5249 // Code::cast. Code::cast does not work when the object's map is
5251 Code* result = reinterpret_cast<Code*>(code);
5256 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5258 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5262 bool Code::CanContainWeakObjects() {
5263 // is_turbofanned() implies !can_have_weak_objects().
5264 DCHECK(!is_optimized_code() || !is_turbofanned() || !can_have_weak_objects());
5265 return is_optimized_code() && can_have_weak_objects();
5269 bool Code::IsWeakObject(Object* object) {
5270 return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5274 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5275 if (object->IsMap()) {
5276 return Map::cast(object)->CanTransition() &&
5277 FLAG_weak_embedded_maps_in_optimized_code;
5279 if (object->IsCell()) {
5280 object = Cell::cast(object)->value();
5281 } else if (object->IsPropertyCell()) {
5282 object = PropertyCell::cast(object)->value();
5284 if (object->IsJSObject() || object->IsJSProxy()) {
5285 // JSProxy is handled like JSObject because it can morph into one.
5286 return FLAG_weak_embedded_objects_in_optimized_code;
5288 if (object->IsFixedArray()) {
5289 // Contexts of inlined functions are embedded in optimized code.
5290 Map* map = HeapObject::cast(object)->map();
5291 Heap* heap = map->GetHeap();
5292 return FLAG_weak_embedded_objects_in_optimized_code &&
5293 map == heap->function_context_map();
5299 class Code::FindAndReplacePattern {
5301 FindAndReplacePattern() : count_(0) { }
5302 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5303 DCHECK(count_ < kMaxCount);
5304 find_[count_] = map_to_find;
5305 replace_[count_] = obj_to_replace;
5309 static const int kMaxCount = 4;
5311 Handle<Map> find_[kMaxCount];
5312 Handle<Object> replace_[kMaxCount];
5317 Object* Map::prototype() const {
5318 return READ_FIELD(this, kPrototypeOffset);
5322 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5323 DCHECK(value->IsNull() || value->IsJSReceiver());
5324 WRITE_FIELD(this, kPrototypeOffset, value);
5325 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5329 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5330 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5331 return LayoutDescriptor::cast_gc_safe(layout_desc);
5335 bool Map::HasFastPointerLayout() const {
5336 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5337 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5341 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5342 LayoutDescriptor* layout_desc) {
5343 set_instance_descriptors(descriptors);
5344 if (FLAG_unbox_double_fields) {
5345 if (layout_descriptor()->IsSlowLayout()) {
5346 set_layout_descriptor(layout_desc);
5349 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5350 if (FLAG_verify_heap) {
5351 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5352 CHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5355 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5356 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5362 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5363 LayoutDescriptor* layout_desc) {
5364 int len = descriptors->number_of_descriptors();
5365 set_instance_descriptors(descriptors);
5366 SetNumberOfOwnDescriptors(len);
5368 if (FLAG_unbox_double_fields) {
5369 set_layout_descriptor(layout_desc);
5371 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5372 if (FLAG_verify_heap) {
5373 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5376 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5378 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5383 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5384 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5387 void Map::set_bit_field3(uint32_t bits) {
5388 if (kInt32Size != kPointerSize) {
5389 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5391 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5395 uint32_t Map::bit_field3() const {
5396 return READ_UINT32_FIELD(this, kBitField3Offset);
5400 LayoutDescriptor* Map::GetLayoutDescriptor() {
5401 return FLAG_unbox_double_fields ? layout_descriptor()
5402 : LayoutDescriptor::FastPointerLayout();
5406 void Map::AppendDescriptor(Descriptor* desc) {
5407 DescriptorArray* descriptors = instance_descriptors();
5408 int number_of_own_descriptors = NumberOfOwnDescriptors();
5409 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5410 descriptors->Append(desc);
5411 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5413 // This function does not support appending double field descriptors and
5414 // it should never try to (otherwise, layout descriptor must be updated too).
5416 PropertyDetails details = desc->GetDetails();
5417 CHECK(details.type() != DATA || !details.representation().IsDouble());
5422 Object* Map::GetBackPointer() {
5423 Object* object = constructor_or_backpointer();
5424 if (object->IsMap()) {
5427 return GetIsolate()->heap()->undefined_value();
5431 Map* Map::ElementsTransitionMap() {
5432 return TransitionArray::SearchSpecial(
5433 this, GetHeap()->elements_transition_symbol());
5437 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5440 Object* Map::prototype_info() const {
5441 DCHECK(is_prototype_map());
5442 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5446 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5447 DCHECK(is_prototype_map());
5448 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5449 CONDITIONAL_WRITE_BARRIER(
5450 GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5454 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5455 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5456 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5457 (value->IsMap() && GetBackPointer()->IsUndefined()));
5458 DCHECK(!value->IsMap() ||
5459 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5460 set_constructor_or_backpointer(value, mode);
5464 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5465 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5466 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5467 ACCESSORS(Map, constructor_or_backpointer, Object,
5468 kConstructorOrBackPointerOffset)
5471 Object* Map::GetConstructor() const {
5472 Object* maybe_constructor = constructor_or_backpointer();
5473 // Follow any back pointers.
5474 while (maybe_constructor->IsMap()) {
5476 Map::cast(maybe_constructor)->constructor_or_backpointer();
5478 return maybe_constructor;
5482 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5483 // Never overwrite a back pointer with a constructor.
5484 DCHECK(!constructor_or_backpointer()->IsMap());
5485 set_constructor_or_backpointer(constructor, mode);
5489 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5490 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5491 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5493 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5494 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5495 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5497 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5498 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5500 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5501 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5502 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5503 kExpectedReceiverTypeOffset)
5505 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5506 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5507 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5509 ACCESSORS(Box, value, Object, kValueOffset)
5511 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5512 SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5513 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5514 ACCESSORS(PrototypeInfo, constructor_name, Object, kConstructorNameOffset)
5516 ACCESSORS(SloppyBlockWithEvalContextExtension, scope_info, ScopeInfo,
5518 ACCESSORS(SloppyBlockWithEvalContextExtension, extension, JSObject,
5521 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5522 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5524 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5525 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5526 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5528 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5529 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5530 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5531 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5532 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5533 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5534 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5535 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5536 kCanInterceptSymbolsBit)
5537 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5538 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5540 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5541 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5543 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5544 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5545 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5546 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5548 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5549 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5550 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5551 kPrototypeTemplateOffset)
5552 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5553 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5554 kNamedPropertyHandlerOffset)
5555 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5556 kIndexedPropertyHandlerOffset)
5557 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5558 kInstanceTemplateOffset)
5559 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5560 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5561 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5562 kInstanceCallHandlerOffset)
5563 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5564 kAccessCheckInfoOffset)
5565 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5567 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5568 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5569 kInternalFieldCountOffset)
5571 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5573 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5574 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5575 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5576 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5577 kPretenureCreateCountOffset)
5578 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5579 kDependentCodeOffset)
5580 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5581 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5583 ACCESSORS(Script, source, Object, kSourceOffset)
5584 ACCESSORS(Script, name, Object, kNameOffset)
5585 ACCESSORS(Script, id, Smi, kIdOffset)
5586 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5587 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5588 ACCESSORS(Script, context_data, Object, kContextOffset)
5589 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5590 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5591 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5592 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5593 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5594 kEvalFrominstructionsOffsetOffset)
5595 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5596 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5597 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5598 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5600 Script::CompilationType Script::compilation_type() {
5601 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5602 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5604 void Script::set_compilation_type(CompilationType type) {
5605 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5606 type == COMPILATION_TYPE_EVAL));
5608 bool Script::hide_source() { return BooleanBit::get(flags(), kHideSourceBit); }
5609 void Script::set_hide_source(bool value) {
5610 set_flags(BooleanBit::set(flags(), kHideSourceBit, value));
5612 Script::CompilationState Script::compilation_state() {
5613 return BooleanBit::get(flags(), kCompilationStateBit) ?
5614 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5616 void Script::set_compilation_state(CompilationState state) {
5617 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5618 state == COMPILATION_STATE_COMPILED));
5620 ScriptOriginOptions Script::origin_options() {
5621 return ScriptOriginOptions((flags()->value() & kOriginOptionsMask) >>
5622 kOriginOptionsShift);
5624 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5625 DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5626 set_flags(Smi::FromInt((flags()->value() & ~kOriginOptionsMask) |
5627 (origin_options.Flags() << kOriginOptionsShift)));
5631 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5632 ACCESSORS(DebugInfo, code, Code, kCodeIndex)
5633 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5635 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5636 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5637 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5638 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5640 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5641 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5642 kOptimizedCodeMapOffset)
5643 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5644 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5645 kFeedbackVectorOffset)
5647 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5649 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5650 kInstanceClassNameOffset)
5651 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5652 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5653 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5654 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5657 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5658 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5659 kHiddenPrototypeBit)
5660 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5661 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5662 kNeedsAccessCheckBit)
5663 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5664 kReadOnlyPrototypeBit)
5665 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5666 kRemovePrototypeBit)
5667 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5669 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5670 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5672 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5674 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5677 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5678 kAllowLazyCompilation)
5679 BOOL_ACCESSORS(SharedFunctionInfo,
5681 allows_lazy_compilation_without_context,
5682 kAllowLazyCompilationWithoutContext)
5683 BOOL_ACCESSORS(SharedFunctionInfo,
5687 BOOL_ACCESSORS(SharedFunctionInfo,
5689 has_duplicate_parameters,
5690 kHasDuplicateParameters)
5691 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5692 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5693 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
5697 #if V8_HOST_ARCH_32_BIT
5698 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5699 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5700 kFormalParameterCountOffset)
5701 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5702 kExpectedNofPropertiesOffset)
5703 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5704 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5705 kStartPositionAndTypeOffset)
5706 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5707 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5708 kFunctionTokenPositionOffset)
5709 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5710 kCompilerHintsOffset)
5711 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5712 kOptCountAndBailoutReasonOffset)
5713 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5714 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5715 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5719 #if V8_TARGET_LITTLE_ENDIAN
5720 #define PSEUDO_SMI_LO_ALIGN 0
5721 #define PSEUDO_SMI_HI_ALIGN kIntSize
5723 #define PSEUDO_SMI_LO_ALIGN kIntSize
5724 #define PSEUDO_SMI_HI_ALIGN 0
5727 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5728 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5729 int holder::name() const { \
5730 int value = READ_INT_FIELD(this, offset); \
5731 DCHECK(kHeapObjectTag == 1); \
5732 DCHECK((value & kHeapObjectTag) == 0); \
5733 return value >> 1; \
5735 void holder::set_##name(int value) { \
5736 DCHECK(kHeapObjectTag == 1); \
5737 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5738 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5741 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5742 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5743 INT_ACCESSORS(holder, name, offset)
5746 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5747 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5748 kFormalParameterCountOffset)
5750 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5751 expected_nof_properties,
5752 kExpectedNofPropertiesOffset)
5753 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5755 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5756 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5757 start_position_and_type,
5758 kStartPositionAndTypeOffset)
5760 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5761 function_token_position,
5762 kFunctionTokenPositionOffset)
5763 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5765 kCompilerHintsOffset)
5767 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5768 opt_count_and_bailout_reason,
5769 kOptCountAndBailoutReasonOffset)
5770 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5772 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5774 kAstNodeCountOffset)
5775 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5777 kProfilerTicksOffset)
5782 BOOL_GETTER(SharedFunctionInfo,
5784 optimization_disabled,
5785 kOptimizationDisabled)
5788 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5789 set_compiler_hints(BooleanBit::set(compiler_hints(),
5790 kOptimizationDisabled,
5795 LanguageMode SharedFunctionInfo::language_mode() {
5796 STATIC_ASSERT(LANGUAGE_END == 3);
5797 return construct_language_mode(
5798 BooleanBit::get(compiler_hints(), kStrictModeFunction),
5799 BooleanBit::get(compiler_hints(), kStrongModeFunction));
5803 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5804 STATIC_ASSERT(LANGUAGE_END == 3);
5805 // We only allow language mode transitions that set the same language mode
5806 // again or go up in the chain:
5807 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5808 int hints = compiler_hints();
5809 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5810 hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5811 set_compiler_hints(hints);
5815 FunctionKind SharedFunctionInfo::kind() {
5816 return FunctionKindBits::decode(compiler_hints());
5820 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5821 DCHECK(IsValidFunctionKind(kind));
5822 int hints = compiler_hints();
5823 hints = FunctionKindBits::update(hints, kind);
5824 set_compiler_hints(hints);
5828 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5830 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5831 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5832 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5833 name_should_print_as_anonymous,
5834 kNameShouldPrintAsAnonymous)
5835 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5836 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5837 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5838 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
5840 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5841 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5842 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5843 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5845 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5846 kIsAccessorFunction)
5847 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5848 kIsDefaultConstructor)
5850 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5851 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5853 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5855 bool Script::HasValidSource() {
5856 Object* src = this->source();
5857 if (!src->IsString()) return true;
5858 String* src_str = String::cast(src);
5859 if (!StringShape(src_str).IsExternal()) return true;
5860 if (src_str->IsOneByteRepresentation()) {
5861 return ExternalOneByteString::cast(src)->resource() != NULL;
5862 } else if (src_str->IsTwoByteRepresentation()) {
5863 return ExternalTwoByteString::cast(src)->resource() != NULL;
5869 void SharedFunctionInfo::DontAdaptArguments() {
5870 DCHECK(code()->kind() == Code::BUILTIN);
5871 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5875 int SharedFunctionInfo::start_position() const {
5876 return start_position_and_type() >> kStartPositionShift;
5880 void SharedFunctionInfo::set_start_position(int start_position) {
5881 set_start_position_and_type((start_position << kStartPositionShift)
5882 | (start_position_and_type() & ~kStartPositionMask));
5886 Code* SharedFunctionInfo::code() const {
5887 return Code::cast(READ_FIELD(this, kCodeOffset));
5891 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5892 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5893 WRITE_FIELD(this, kCodeOffset, value);
5894 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5898 void SharedFunctionInfo::ReplaceCode(Code* value) {
5899 // If the GC metadata field is already used then the function was
5900 // enqueued as a code flushing candidate and we remove it now.
5901 if (code()->gc_metadata() != NULL) {
5902 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5903 flusher->EvictCandidate(this);
5906 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5908 Code::VerifyRecompiledCode(code(), value);
5913 if (is_compiled()) set_never_compiled(false);
5917 ScopeInfo* SharedFunctionInfo::scope_info() const {
5918 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5922 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5923 WriteBarrierMode mode) {
5924 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5925 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5928 reinterpret_cast<Object*>(value),
5933 bool SharedFunctionInfo::is_compiled() {
5934 Builtins* builtins = GetIsolate()->builtins();
5935 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5936 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5937 return code() != builtins->builtin(Builtins::kCompileLazy);
5941 bool SharedFunctionInfo::has_simple_parameters() {
5942 return scope_info()->HasSimpleParameters();
5946 bool SharedFunctionInfo::HasDebugInfo() {
5947 bool has_debug_info = debug_info()->IsStruct();
5948 DCHECK(!has_debug_info || HasDebugCode());
5949 return has_debug_info;
5953 DebugInfo* SharedFunctionInfo::GetDebugInfo() {
5954 DCHECK(HasDebugInfo());
5955 return DebugInfo::cast(debug_info());
5959 bool SharedFunctionInfo::HasDebugCode() {
5960 return code()->kind() == Code::FUNCTION && code()->has_debug_break_slots();
5964 bool SharedFunctionInfo::IsApiFunction() {
5965 return function_data()->IsFunctionTemplateInfo();
5969 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5970 DCHECK(IsApiFunction());
5971 return FunctionTemplateInfo::cast(function_data());
5975 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5976 return function_data()->IsSmi();
5980 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5981 DCHECK(HasBuiltinFunctionId());
5982 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5986 bool SharedFunctionInfo::HasBytecodeArray() {
5987 return function_data()->IsBytecodeArray();
5991 BytecodeArray* SharedFunctionInfo::bytecode_array() {
5992 DCHECK(HasBytecodeArray());
5993 return BytecodeArray::cast(function_data());
5997 int SharedFunctionInfo::ic_age() {
5998 return ICAgeBits::decode(counters());
6002 void SharedFunctionInfo::set_ic_age(int ic_age) {
6003 set_counters(ICAgeBits::update(counters(), ic_age));
6007 int SharedFunctionInfo::deopt_count() {
6008 return DeoptCountBits::decode(counters());
6012 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6013 set_counters(DeoptCountBits::update(counters(), deopt_count));
6017 void SharedFunctionInfo::increment_deopt_count() {
6018 int value = counters();
6019 int deopt_count = DeoptCountBits::decode(value);
6020 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6021 set_counters(DeoptCountBits::update(value, deopt_count));
6025 int SharedFunctionInfo::opt_reenable_tries() {
6026 return OptReenableTriesBits::decode(counters());
6030 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6031 set_counters(OptReenableTriesBits::update(counters(), tries));
6035 int SharedFunctionInfo::opt_count() {
6036 return OptCountBits::decode(opt_count_and_bailout_reason());
6040 void SharedFunctionInfo::set_opt_count(int opt_count) {
6041 set_opt_count_and_bailout_reason(
6042 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6046 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6047 return static_cast<BailoutReason>(
6048 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6052 bool SharedFunctionInfo::has_deoptimization_support() {
6053 Code* code = this->code();
6054 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6058 void SharedFunctionInfo::TryReenableOptimization() {
6059 int tries = opt_reenable_tries();
6060 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6061 // We reenable optimization whenever the number of tries is a large
6062 // enough power of 2.
6063 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6064 set_optimization_disabled(false);
6071 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6072 set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6073 opt_count_and_bailout_reason(), reason));
6077 bool SharedFunctionInfo::IsBuiltin() {
6078 Object* script_obj = script();
6079 if (script_obj->IsUndefined()) return true;
6080 Script* script = Script::cast(script_obj);
6081 Script::Type type = static_cast<Script::Type>(script->type()->value());
6082 return type != Script::TYPE_NORMAL;
6086 bool SharedFunctionInfo::IsSubjectToDebugging() { return !IsBuiltin(); }
6089 bool JSFunction::IsBuiltin() { return shared()->IsBuiltin(); }
6092 bool JSFunction::IsSubjectToDebugging() {
6093 return shared()->IsSubjectToDebugging();
6097 bool JSFunction::NeedsArgumentsAdaption() {
6098 return shared()->internal_formal_parameter_count() !=
6099 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
6103 bool JSFunction::IsOptimized() {
6104 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6108 bool JSFunction::IsMarkedForOptimization() {
6109 return code() == GetIsolate()->builtins()->builtin(
6110 Builtins::kCompileOptimized);
6114 bool JSFunction::IsMarkedForConcurrentOptimization() {
6115 return code() == GetIsolate()->builtins()->builtin(
6116 Builtins::kCompileOptimizedConcurrent);
6120 bool JSFunction::IsInOptimizationQueue() {
6121 return code() == GetIsolate()->builtins()->builtin(
6122 Builtins::kInOptimizationQueue);
6126 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6127 return has_initial_map() &&
6128 initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
6132 Code* JSFunction::code() {
6134 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6138 void JSFunction::set_code(Code* value) {
6139 DCHECK(!GetHeap()->InNewSpace(value));
6140 Address entry = value->entry();
6141 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6142 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6144 HeapObject::RawField(this, kCodeEntryOffset),
6149 void JSFunction::set_code_no_write_barrier(Code* value) {
6150 DCHECK(!GetHeap()->InNewSpace(value));
6151 Address entry = value->entry();
6152 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6156 void JSFunction::ReplaceCode(Code* code) {
6157 bool was_optimized = IsOptimized();
6158 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6160 if (was_optimized && is_optimized) {
6161 shared()->EvictFromOptimizedCodeMap(this->code(),
6162 "Replacing with another optimized code");
6167 // Add/remove the function from the list of optimized functions for this
6168 // context based on the state change.
6169 if (!was_optimized && is_optimized) {
6170 context()->native_context()->AddOptimizedFunction(this);
6172 if (was_optimized && !is_optimized) {
6173 // TODO(titzer): linear in the number of optimized functions; fix!
6174 context()->native_context()->RemoveOptimizedFunction(this);
6179 Context* JSFunction::context() {
6180 return Context::cast(READ_FIELD(this, kContextOffset));
6184 JSObject* JSFunction::global_proxy() {
6185 return context()->global_proxy();
6189 void JSFunction::set_context(Object* value) {
6190 DCHECK(value->IsUndefined() || value->IsContext());
6191 WRITE_FIELD(this, kContextOffset, value);
6192 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6195 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6196 kPrototypeOrInitialMapOffset)
6199 Map* JSFunction::initial_map() {
6200 return Map::cast(prototype_or_initial_map());
6204 bool JSFunction::has_initial_map() {
6205 return prototype_or_initial_map()->IsMap();
6209 bool JSFunction::has_instance_prototype() {
6210 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6214 bool JSFunction::has_prototype() {
6215 return map()->has_non_instance_prototype() || has_instance_prototype();
6219 Object* JSFunction::instance_prototype() {
6220 DCHECK(has_instance_prototype());
6221 if (has_initial_map()) return initial_map()->prototype();
6222 // When there is no initial map and the prototype is a JSObject, the
6223 // initial map field is used for the prototype field.
6224 return prototype_or_initial_map();
6228 Object* JSFunction::prototype() {
6229 DCHECK(has_prototype());
6230 // If the function's prototype property has been set to a non-JSObject
6231 // value, that value is stored in the constructor field of the map.
6232 if (map()->has_non_instance_prototype()) {
6233 Object* prototype = map()->GetConstructor();
6234 // The map must have a prototype in that field, not a back pointer.
6235 DCHECK(!prototype->IsMap());
6238 return instance_prototype();
6242 bool JSFunction::should_have_prototype() {
6243 return map()->function_with_prototype();
6247 bool JSFunction::is_compiled() {
6248 Builtins* builtins = GetIsolate()->builtins();
6249 return code() != builtins->builtin(Builtins::kCompileLazy) &&
6250 code() != builtins->builtin(Builtins::kCompileOptimized) &&
6251 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6255 bool JSFunction::has_simple_parameters() {
6256 return shared()->has_simple_parameters();
6260 FixedArray* JSFunction::literals() {
6261 DCHECK(!shared()->bound());
6262 return literals_or_bindings();
6266 void JSFunction::set_literals(FixedArray* literals) {
6267 DCHECK(!shared()->bound());
6268 set_literals_or_bindings(literals);
6272 FixedArray* JSFunction::function_bindings() {
6273 DCHECK(shared()->bound());
6274 return literals_or_bindings();
6278 void JSFunction::set_function_bindings(FixedArray* bindings) {
6279 DCHECK(shared()->bound());
6280 // Bound function literal may be initialized to the empty fixed array
6281 // before the bindings are set.
6282 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6283 bindings->map() == GetHeap()->fixed_array_map());
6284 set_literals_or_bindings(bindings);
6288 int JSFunction::NumberOfLiterals() {
6289 DCHECK(!shared()->bound());
6290 return literals()->length();
6294 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6295 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6296 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6297 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6300 void JSProxy::InitializeBody(int object_size, Object* value) {
6301 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6302 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6303 WRITE_FIELD(this, offset, value);
6308 ACCESSORS(JSCollection, table, Object, kTableOffset)
6311 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6312 template<class Derived, class TableType> \
6313 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6314 return type::cast(READ_FIELD(this, offset)); \
6316 template<class Derived, class TableType> \
6317 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6318 type* value, WriteBarrierMode mode) { \
6319 WRITE_FIELD(this, offset, value); \
6320 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6323 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6324 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6325 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6327 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6330 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6331 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6334 Address Foreign::foreign_address() {
6335 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6339 void Foreign::set_foreign_address(Address value) {
6340 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6344 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6345 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6346 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6347 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6348 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6350 bool JSGeneratorObject::is_suspended() {
6351 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6352 DCHECK_EQ(kGeneratorClosed, 0);
6353 return continuation() > 0;
6356 bool JSGeneratorObject::is_closed() {
6357 return continuation() == kGeneratorClosed;
6360 bool JSGeneratorObject::is_executing() {
6361 return continuation() == kGeneratorExecuting;
6364 ACCESSORS(JSModule, context, Object, kContextOffset)
6365 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6368 ACCESSORS(JSValue, value, Object, kValueOffset)
6371 HeapNumber* HeapNumber::cast(Object* object) {
6372 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6373 return reinterpret_cast<HeapNumber*>(object);
6377 const HeapNumber* HeapNumber::cast(const Object* object) {
6378 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6379 return reinterpret_cast<const HeapNumber*>(object);
6383 ACCESSORS(JSDate, value, Object, kValueOffset)
6384 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6385 ACCESSORS(JSDate, year, Object, kYearOffset)
6386 ACCESSORS(JSDate, month, Object, kMonthOffset)
6387 ACCESSORS(JSDate, day, Object, kDayOffset)
6388 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6389 ACCESSORS(JSDate, hour, Object, kHourOffset)
6390 ACCESSORS(JSDate, min, Object, kMinOffset)
6391 ACCESSORS(JSDate, sec, Object, kSecOffset)
6394 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6395 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6396 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6397 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6398 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6399 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6402 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6403 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6404 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6405 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6406 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6407 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6408 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6409 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6412 void Code::WipeOutHeader() {
6413 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6414 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6415 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6416 // Do not wipe out major/minor keys on a code stub or IC
6417 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6418 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6423 Object* Code::type_feedback_info() {
6424 DCHECK(kind() == FUNCTION);
6425 return raw_type_feedback_info();
6429 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6430 DCHECK(kind() == FUNCTION);
6431 set_raw_type_feedback_info(value, mode);
6432 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6437 uint32_t Code::stub_key() {
6438 DCHECK(IsCodeStubOrIC());
6439 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6440 return static_cast<uint32_t>(smi_key->value());
6444 void Code::set_stub_key(uint32_t key) {
6445 DCHECK(IsCodeStubOrIC());
6446 set_raw_type_feedback_info(Smi::FromInt(key));
6450 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6451 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6454 byte* Code::instruction_start() {
6455 return FIELD_ADDR(this, kHeaderSize);
6459 byte* Code::instruction_end() {
6460 return instruction_start() + instruction_size();
6464 int Code::body_size() {
6465 return RoundUp(instruction_size(), kObjectAlignment);
6469 ByteArray* Code::unchecked_relocation_info() {
6470 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6474 byte* Code::relocation_start() {
6475 return unchecked_relocation_info()->GetDataStartAddress();
6479 int Code::relocation_size() {
6480 return unchecked_relocation_info()->length();
6484 byte* Code::entry() {
6485 return instruction_start();
6489 bool Code::contains(byte* inner_pointer) {
6490 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6494 int Code::ExecutableSize() {
6495 // Check that the assumptions about the layout of the code object holds.
6496 DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6498 return instruction_size() + Code::kHeaderSize;
6502 int Code::CodeSize() { return SizeFor(body_size()); }
6505 ACCESSORS(JSArray, length, Object, kLengthOffset)
6508 void* JSArrayBuffer::backing_store() const {
6509 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6510 return reinterpret_cast<void*>(ptr);
6514 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6515 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6516 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6520 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6523 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6524 if (kInt32Size != kPointerSize) {
6525 #if V8_TARGET_LITTLE_ENDIAN
6526 WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6528 WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6531 WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6535 uint32_t JSArrayBuffer::bit_field() const {
6536 return READ_UINT32_FIELD(this, kBitFieldOffset);
6540 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6543 void JSArrayBuffer::set_is_external(bool value) {
6544 set_bit_field(IsExternal::update(bit_field(), value));
6548 bool JSArrayBuffer::is_neuterable() {
6549 return IsNeuterable::decode(bit_field());
6553 void JSArrayBuffer::set_is_neuterable(bool value) {
6554 set_bit_field(IsNeuterable::update(bit_field(), value));
6558 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6561 void JSArrayBuffer::set_was_neutered(bool value) {
6562 set_bit_field(WasNeutered::update(bit_field(), value));
6566 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6569 void JSArrayBuffer::set_is_shared(bool value) {
6570 set_bit_field(IsShared::update(bit_field(), value));
6574 Object* JSArrayBufferView::byte_offset() const {
6575 if (WasNeutered()) return Smi::FromInt(0);
6576 return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6580 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6581 WRITE_FIELD(this, kByteOffsetOffset, value);
6582 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6586 Object* JSArrayBufferView::byte_length() const {
6587 if (WasNeutered()) return Smi::FromInt(0);
6588 return Object::cast(READ_FIELD(this, kByteLengthOffset));
6592 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6593 WRITE_FIELD(this, kByteLengthOffset, value);
6594 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6598 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6600 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6601 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6605 bool JSArrayBufferView::WasNeutered() const {
6606 return JSArrayBuffer::cast(buffer())->was_neutered();
6610 Object* JSTypedArray::length() const {
6611 if (WasNeutered()) return Smi::FromInt(0);
6612 return Object::cast(READ_FIELD(this, kLengthOffset));
6616 uint32_t JSTypedArray::length_value() const {
6617 if (WasNeutered()) return 0;
6619 CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6624 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6625 WRITE_FIELD(this, kLengthOffset, value);
6626 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6631 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6635 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6638 JSRegExp::Type JSRegExp::TypeTag() {
6639 Object* data = this->data();
6640 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6641 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6642 return static_cast<JSRegExp::Type>(smi->value());
6646 int JSRegExp::CaptureCount() {
6647 switch (TypeTag()) {
6651 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6659 JSRegExp::Flags JSRegExp::GetFlags() {
6660 DCHECK(this->data()->IsFixedArray());
6661 Object* data = this->data();
6662 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6663 return Flags(smi->value());
6667 String* JSRegExp::Pattern() {
6668 DCHECK(this->data()->IsFixedArray());
6669 Object* data = this->data();
6670 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6675 Object* JSRegExp::DataAt(int index) {
6676 DCHECK(TypeTag() != NOT_COMPILED);
6677 return FixedArray::cast(data())->get(index);
6681 void JSRegExp::SetDataAt(int index, Object* value) {
6682 DCHECK(TypeTag() != NOT_COMPILED);
6683 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6684 FixedArray::cast(data())->set(index, value);
6688 ElementsKind JSObject::GetElementsKind() {
6689 ElementsKind kind = map()->elements_kind();
6690 #if VERIFY_HEAP && DEBUG
6691 FixedArrayBase* fixed_array =
6692 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6694 // If a GC was caused while constructing this object, the elements
6695 // pointer may point to a one pointer filler map.
6696 if (ElementsAreSafeToExamine()) {
6697 Map* map = fixed_array->map();
6698 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6699 (map == GetHeap()->fixed_array_map() ||
6700 map == GetHeap()->fixed_cow_array_map())) ||
6701 (IsFastDoubleElementsKind(kind) &&
6702 (fixed_array->IsFixedDoubleArray() ||
6703 fixed_array == GetHeap()->empty_fixed_array())) ||
6704 (kind == DICTIONARY_ELEMENTS &&
6705 fixed_array->IsFixedArray() &&
6706 fixed_array->IsDictionary()) ||
6707 (kind > DICTIONARY_ELEMENTS));
6708 DCHECK(!IsSloppyArgumentsElements(kind) ||
6709 (elements()->IsFixedArray() && elements()->length() >= 2));
6716 bool JSObject::HasFastObjectElements() {
6717 return IsFastObjectElementsKind(GetElementsKind());
6721 bool JSObject::HasFastSmiElements() {
6722 return IsFastSmiElementsKind(GetElementsKind());
6726 bool JSObject::HasFastSmiOrObjectElements() {
6727 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6731 bool JSObject::HasFastDoubleElements() {
6732 return IsFastDoubleElementsKind(GetElementsKind());
6736 bool JSObject::HasFastHoleyElements() {
6737 return IsFastHoleyElementsKind(GetElementsKind());
6741 bool JSObject::HasFastElements() {
6742 return IsFastElementsKind(GetElementsKind());
6746 bool JSObject::HasDictionaryElements() {
6747 return GetElementsKind() == DICTIONARY_ELEMENTS;
6751 bool JSObject::HasFastArgumentsElements() {
6752 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
6756 bool JSObject::HasSlowArgumentsElements() {
6757 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
6761 bool JSObject::HasSloppyArgumentsElements() {
6762 return IsSloppyArgumentsElements(GetElementsKind());
6766 bool JSObject::HasFixedTypedArrayElements() {
6767 HeapObject* array = elements();
6768 DCHECK(array != NULL);
6769 return array->IsFixedTypedArrayBase();
6773 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6774 bool JSObject::HasFixed##Type##Elements() { \
6775 HeapObject* array = elements(); \
6776 DCHECK(array != NULL); \
6777 if (!array->IsHeapObject()) \
6779 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6782 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6784 #undef FIXED_TYPED_ELEMENTS_CHECK
6787 bool JSObject::HasNamedInterceptor() {
6788 return map()->has_named_interceptor();
6792 bool JSObject::HasIndexedInterceptor() {
6793 return map()->has_indexed_interceptor();
6797 NameDictionary* JSObject::property_dictionary() {
6798 DCHECK(!HasFastProperties());
6799 DCHECK(!IsGlobalObject());
6800 return NameDictionary::cast(properties());
6804 GlobalDictionary* JSObject::global_dictionary() {
6805 DCHECK(!HasFastProperties());
6806 DCHECK(IsGlobalObject());
6807 return GlobalDictionary::cast(properties());
6811 SeededNumberDictionary* JSObject::element_dictionary() {
6812 DCHECK(HasDictionaryElements());
6813 return SeededNumberDictionary::cast(elements());
6817 bool Name::IsHashFieldComputed(uint32_t field) {
6818 return (field & kHashNotComputedMask) == 0;
6822 bool Name::HasHashCode() {
6823 return IsHashFieldComputed(hash_field());
6827 uint32_t Name::Hash() {
6828 // Fast case: has hash code already been computed?
6829 uint32_t field = hash_field();
6830 if (IsHashFieldComputed(field)) return field >> kHashShift;
6831 // Slow case: compute hash code and set it. Has to be a string.
6832 return String::cast(this)->ComputeAndSetHash();
6836 bool Name::IsPrivate() {
6837 return this->IsSymbol() && Symbol::cast(this)->is_private();
6841 StringHasher::StringHasher(int length, uint32_t seed)
6843 raw_running_hash_(seed),
6845 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6846 is_first_char_(true) {
6847 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6851 bool StringHasher::has_trivial_hash() {
6852 return length_ > String::kMaxHashCalcLength;
6856 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6858 running_hash += (running_hash << 10);
6859 running_hash ^= (running_hash >> 6);
6860 return running_hash;
6864 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6865 running_hash += (running_hash << 3);
6866 running_hash ^= (running_hash >> 11);
6867 running_hash += (running_hash << 15);
6868 if ((running_hash & String::kHashBitMask) == 0) {
6871 return running_hash;
6875 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6876 const uc16* chars, int length) {
6877 DCHECK_NOT_NULL(chars);
6878 DCHECK(length >= 0);
6879 for (int i = 0; i < length; ++i) {
6880 running_hash = AddCharacterCore(running_hash, *chars++);
6882 return running_hash;
6886 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6889 DCHECK_NOT_NULL(chars);
6890 DCHECK(length >= 0);
6891 for (int i = 0; i < length; ++i) {
6892 uint16_t c = static_cast<uint16_t>(*chars++);
6893 running_hash = AddCharacterCore(running_hash, c);
6895 return running_hash;
6899 void StringHasher::AddCharacter(uint16_t c) {
6900 // Use the Jenkins one-at-a-time hash function to update the hash
6901 // for the given character.
6902 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6906 bool StringHasher::UpdateIndex(uint16_t c) {
6907 DCHECK(is_array_index_);
6908 if (c < '0' || c > '9') {
6909 is_array_index_ = false;
6913 if (is_first_char_) {
6914 is_first_char_ = false;
6915 if (c == '0' && length_ > 1) {
6916 is_array_index_ = false;
6920 if (array_index_ > 429496729U - ((d + 3) >> 3)) {
6921 is_array_index_ = false;
6924 array_index_ = array_index_ * 10 + d;
6929 template<typename Char>
6930 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6931 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6933 if (is_array_index_) {
6934 for (; i < length; i++) {
6935 AddCharacter(chars[i]);
6936 if (!UpdateIndex(chars[i])) {
6942 for (; i < length; i++) {
6943 DCHECK(!is_array_index_);
6944 AddCharacter(chars[i]);
6949 template <typename schar>
6950 uint32_t StringHasher::HashSequentialString(const schar* chars,
6953 StringHasher hasher(length, seed);
6954 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6955 return hasher.GetHashField();
6959 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
6960 : StringHasher(len, seed) {}
6963 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6964 IteratingStringHasher hasher(string->length(), seed);
6966 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6967 ConsString* cons_string = String::VisitFlat(&hasher, string);
6968 if (cons_string == nullptr) return hasher.GetHashField();
6969 hasher.VisitConsString(cons_string);
6970 return hasher.GetHashField();
6974 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6976 AddCharacters(chars, length);
6980 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6982 AddCharacters(chars, length);
6986 bool Name::AsArrayIndex(uint32_t* index) {
6987 return IsString() && String::cast(this)->AsArrayIndex(index);
6991 bool String::AsArrayIndex(uint32_t* index) {
6992 uint32_t field = hash_field();
6993 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6996 return SlowAsArrayIndex(index);
7000 void String::SetForwardedInternalizedString(String* canonical) {
7001 DCHECK(IsInternalizedString());
7002 DCHECK(HasHashCode());
7003 if (canonical == this) return; // No need to forward.
7004 DCHECK(SlowEquals(canonical));
7005 DCHECK(canonical->IsInternalizedString());
7006 DCHECK(canonical->HasHashCode());
7007 WRITE_FIELD(this, kHashFieldSlot, canonical);
7008 // Setting the hash field to a tagged value sets the LSB, causing the hash
7009 // code to be interpreted as uninitialized. We use this fact to recognize
7010 // that we have a forwarded string.
7011 DCHECK(!HasHashCode());
7015 String* String::GetForwardedInternalizedString() {
7016 DCHECK(IsInternalizedString());
7017 if (HasHashCode()) return this;
7018 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
7019 DCHECK(canonical->IsInternalizedString());
7020 DCHECK(SlowEquals(canonical));
7021 DCHECK(canonical->HasHashCode());
7026 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7028 LanguageMode language_mode) {
7030 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7031 return GetProperty(&it, language_mode);
7035 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7036 Handle<Name> name) {
7037 // Call the "has" trap on proxies.
7038 if (object->IsJSProxy()) {
7039 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7040 return JSProxy::HasPropertyWithHandler(proxy, name);
7043 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
7044 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7048 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7049 Handle<Name> name) {
7050 // Call the "has" trap on proxies.
7051 if (object->IsJSProxy()) {
7052 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7053 return JSProxy::HasPropertyWithHandler(proxy, name);
7056 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
7057 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7061 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7062 Handle<JSReceiver> object, Handle<Name> name) {
7064 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7065 return GetPropertyAttributes(&it);
7069 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7070 Handle<JSReceiver> object, Handle<Name> name) {
7071 LookupIterator it = LookupIterator::PropertyOrElement(
7072 name->GetIsolate(), object, name, LookupIterator::HIDDEN);
7073 return GetPropertyAttributes(&it);
7077 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7078 // Call the "has" trap on proxies.
7079 if (object->IsJSProxy()) {
7080 Isolate* isolate = object->GetIsolate();
7081 Handle<Name> name = isolate->factory()->Uint32ToString(index);
7082 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7083 return JSProxy::HasPropertyWithHandler(proxy, name);
7086 Maybe<PropertyAttributes> result = GetElementAttributes(object, index);
7087 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7091 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
7093 // Call the "has" trap on proxies.
7094 if (object->IsJSProxy()) {
7095 Isolate* isolate = object->GetIsolate();
7096 Handle<Name> name = isolate->factory()->Uint32ToString(index);
7097 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7098 return JSProxy::HasPropertyWithHandler(proxy, name);
7101 Maybe<PropertyAttributes> result = GetOwnElementAttributes(object, index);
7102 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7106 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7107 Handle<JSReceiver> object, uint32_t index) {
7108 Isolate* isolate = object->GetIsolate();
7109 LookupIterator it(isolate, object, index);
7110 return GetPropertyAttributes(&it);
7114 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7115 Handle<JSReceiver> object, uint32_t index) {
7116 Isolate* isolate = object->GetIsolate();
7117 LookupIterator it(isolate, object, index, LookupIterator::HIDDEN);
7118 return GetPropertyAttributes(&it);
7122 bool JSGlobalObject::IsDetached() {
7123 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7127 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
7128 const PrototypeIterator iter(this->GetIsolate(),
7129 const_cast<JSGlobalProxy*>(this));
7130 return iter.GetCurrent() != global;
7134 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
7135 return object->IsJSProxy()
7136 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
7137 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
7141 Object* JSReceiver::GetIdentityHash() {
7143 ? JSProxy::cast(this)->GetIdentityHash()
7144 : JSObject::cast(this)->GetIdentityHash();
7148 bool AccessorInfo::all_can_read() {
7149 return BooleanBit::get(flag(), kAllCanReadBit);
7153 void AccessorInfo::set_all_can_read(bool value) {
7154 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7158 bool AccessorInfo::all_can_write() {
7159 return BooleanBit::get(flag(), kAllCanWriteBit);
7163 void AccessorInfo::set_all_can_write(bool value) {
7164 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7168 bool AccessorInfo::is_special_data_property() {
7169 return BooleanBit::get(flag(), kSpecialDataProperty);
7173 void AccessorInfo::set_is_special_data_property(bool value) {
7174 set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7178 PropertyAttributes AccessorInfo::property_attributes() {
7179 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
7183 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7184 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
7188 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7189 if (!HasExpectedReceiverType()) return true;
7190 if (!receiver->IsJSObject()) return false;
7191 return FunctionTemplateInfo::cast(expected_receiver_type())
7192 ->IsTemplateFor(JSObject::cast(receiver)->map());
7196 bool AccessorInfo::HasExpectedReceiverType() {
7197 return expected_receiver_type()->IsFunctionTemplateInfo();
7201 Object* AccessorPair::get(AccessorComponent component) {
7202 return component == ACCESSOR_GETTER ? getter() : setter();
7206 void AccessorPair::set(AccessorComponent component, Object* value) {
7207 if (component == ACCESSOR_GETTER) {
7215 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7216 if (!getter->IsNull()) set_getter(getter);
7217 if (!setter->IsNull()) set_setter(setter);
7221 bool AccessorPair::Equals(AccessorPair* pair) {
7222 return (this == pair) || pair->Equals(getter(), setter());
7226 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7227 return (getter() == getter_value) && (setter() == setter_value);
7231 bool AccessorPair::ContainsAccessor() {
7232 return IsJSAccessor(getter()) || IsJSAccessor(setter());
7236 bool AccessorPair::IsJSAccessor(Object* obj) {
7237 return obj->IsSpecFunction() || obj->IsUndefined();
7241 template<typename Derived, typename Shape, typename Key>
7242 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7244 Handle<Object> value) {
7245 this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7249 template<typename Derived, typename Shape, typename Key>
7250 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7252 Handle<Object> value,
7253 PropertyDetails details) {
7254 Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7258 template <typename Key>
7259 template <typename Dictionary>
7260 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7262 Handle<Object> value,
7263 PropertyDetails details) {
7264 STATIC_ASSERT(Dictionary::kEntrySize == 3);
7265 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7266 int index = dict->EntryToIndex(entry);
7267 DisallowHeapAllocation no_gc;
7268 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7269 dict->set(index, *key, mode);
7270 dict->set(index + 1, *value, mode);
7271 dict->set(index + 2, details.AsSmi());
7275 template <typename Dictionary>
7276 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7277 Handle<Object> key, Handle<Object> value,
7278 PropertyDetails details) {
7279 STATIC_ASSERT(Dictionary::kEntrySize == 2);
7280 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7281 DCHECK(value->IsPropertyCell());
7282 int index = dict->EntryToIndex(entry);
7283 DisallowHeapAllocation no_gc;
7284 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7285 dict->set(index, *key, mode);
7286 dict->set(index + 1, *value, mode);
7287 PropertyCell::cast(*value)->set_property_details(details);
7291 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7292 DCHECK(other->IsNumber());
7293 return key == static_cast<uint32_t>(other->Number());
7297 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7298 return ComputeIntegerHash(key, 0);
7302 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7304 DCHECK(other->IsNumber());
7305 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7309 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7310 return ComputeIntegerHash(key, seed);
7314 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7317 DCHECK(other->IsNumber());
7318 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7322 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7323 return isolate->factory()->NewNumberFromUint(key);
7327 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7328 // We know that all entries in a hash table had their hash keys created.
7329 // Use that knowledge to have fast failure.
7330 if (key->Hash() != Name::cast(other)->Hash()) return false;
7331 return key->Equals(Name::cast(other));
7335 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7340 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7341 return Name::cast(other)->Hash();
7345 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7347 DCHECK(key->IsUniqueName());
7352 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7353 Handle<NameDictionary> dictionary) {
7354 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7358 template <typename Dictionary>
7359 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7360 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7361 Object* raw_value = dict->ValueAt(entry);
7362 DCHECK(raw_value->IsPropertyCell());
7363 PropertyCell* cell = PropertyCell::cast(raw_value);
7364 return cell->property_details();
7368 template <typename Dictionary>
7369 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7370 PropertyDetails value) {
7371 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7372 Object* raw_value = dict->ValueAt(entry);
7373 DCHECK(raw_value->IsPropertyCell());
7374 PropertyCell* cell = PropertyCell::cast(raw_value);
7375 cell->set_property_details(value);
7379 template <typename Dictionary>
7380 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7381 DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7382 return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole();
7386 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7387 return key->SameValue(other);
7391 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7392 return Smi::cast(key->GetHash())->value();
7396 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7398 return Smi::cast(other->GetHash())->value();
7402 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7403 Handle<Object> key) {
7408 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7409 Handle<ObjectHashTable> table, Handle<Object> key) {
7410 return DerivedHashTable::Shrink(table, key);
7414 Object* OrderedHashMap::ValueAt(int entry) {
7415 return get(EntryToIndex(entry) + kValueOffset);
7419 template <int entrysize>
7420 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7421 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7422 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7427 template <int entrysize>
7428 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7431 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7432 : reinterpret_cast<intptr_t>(*key);
7433 return (uint32_t)(hash & 0xFFFFFFFF);
7437 template <int entrysize>
7438 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7440 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7441 intptr_t hash = reinterpret_cast<intptr_t>(other);
7442 return (uint32_t)(hash & 0xFFFFFFFF);
7446 template <int entrysize>
7447 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7448 Handle<Object> key) {
7453 bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
7456 bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
7459 bool ScopeInfo::HasSimpleParameters() {
7460 return HasSimpleParametersField::decode(Flags());
7464 #define SCOPE_INFO_FIELD_ACCESSORS(name) \
7465 void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
7466 int ScopeInfo::name() { \
7467 if (length() > 0) { \
7468 return Smi::cast(get(k##name))->value(); \
7473 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
7474 #undef SCOPE_INFO_FIELD_ACCESSORS
7477 void Map::ClearCodeCache(Heap* heap) {
7478 // No write barrier is needed since empty_fixed_array is not in new space.
7479 // Please note this function is used during marking:
7480 // - MarkCompactCollector::MarkUnmarkedObject
7481 // - IncrementalMarking::Step
7482 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7483 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7487 int Map::SlackForArraySize(int old_size, int size_limit) {
7488 const int max_slack = size_limit - old_size;
7489 CHECK_LE(0, max_slack);
7491 DCHECK_LE(1, max_slack);
7494 return Min(max_slack, old_size / 4);
7498 void JSArray::set_length(Smi* length) {
7499 // Don't need a write barrier for a Smi.
7500 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7504 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
7505 // If the new array won't fit in a some non-trivial fraction of the max old
7506 // space size, then force it to go dictionary mode.
7507 uint32_t max_fast_array_size =
7508 static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7509 return new_length >= max_fast_array_size;
7513 bool JSArray::AllowsSetLength() {
7514 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7515 DCHECK(result == !HasFixedTypedArrayElements());
7520 void JSArray::SetContent(Handle<JSArray> array,
7521 Handle<FixedArrayBase> storage) {
7522 EnsureCanContainElements(array, storage, storage->length(),
7523 ALLOW_COPIED_DOUBLE_ELEMENTS);
7525 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7526 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7527 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7528 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7529 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7530 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7531 array->set_elements(*storage);
7532 array->set_length(Smi::FromInt(storage->length()));
7536 int TypeFeedbackInfo::ic_total_count() {
7537 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7538 return ICTotalCountField::decode(current);
7542 void TypeFeedbackInfo::set_ic_total_count(int count) {
7543 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7544 value = ICTotalCountField::update(value,
7545 ICTotalCountField::decode(count));
7546 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7550 int TypeFeedbackInfo::ic_with_type_info_count() {
7551 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7552 return ICsWithTypeInfoCountField::decode(current);
7556 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7557 if (delta == 0) return;
7558 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7559 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7560 // We can get negative count here when the type-feedback info is
7561 // shared between two code objects. The can only happen when
7562 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7563 // Since we do not optimize when the debugger is active, we can skip
7564 // this counter update.
7565 if (new_count >= 0) {
7566 new_count &= ICsWithTypeInfoCountField::kMask;
7567 value = ICsWithTypeInfoCountField::update(value, new_count);
7568 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7573 int TypeFeedbackInfo::ic_generic_count() {
7574 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7578 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7579 if (delta == 0) return;
7580 int new_count = ic_generic_count() + delta;
7581 if (new_count >= 0) {
7582 new_count &= ~Smi::kMinValue;
7583 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7588 void TypeFeedbackInfo::initialize_storage() {
7589 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7590 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7591 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7595 void TypeFeedbackInfo::change_own_type_change_checksum() {
7596 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7597 int checksum = OwnTypeChangeChecksum::decode(value);
7598 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7599 value = OwnTypeChangeChecksum::update(value, checksum);
7600 // Ensure packed bit field is in Smi range.
7601 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7602 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7603 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7607 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7608 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7609 int mask = (1 << kTypeChangeChecksumBits) - 1;
7610 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7611 // Ensure packed bit field is in Smi range.
7612 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7613 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7614 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7618 int TypeFeedbackInfo::own_type_change_checksum() {
7619 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7620 return OwnTypeChangeChecksum::decode(value);
7624 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7625 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7626 int mask = (1 << kTypeChangeChecksumBits) - 1;
7627 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7631 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7634 Relocatable::Relocatable(Isolate* isolate) {
7636 prev_ = isolate->relocatable_top();
7637 isolate->set_relocatable_top(this);
7641 Relocatable::~Relocatable() {
7642 DCHECK_EQ(isolate_->relocatable_top(), this);
7643 isolate_->set_relocatable_top(prev_);
7648 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7649 return map->instance_size();
7654 int FixedArray::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7655 return SizeFor(reinterpret_cast<FixedArray*>(object)->synchronized_length());
7660 int StructBodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7661 return map->instance_size();
7665 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7666 v->VisitExternalReference(
7667 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7671 template<typename StaticVisitor>
7672 void Foreign::ForeignIterateBody() {
7673 StaticVisitor::VisitExternalReference(
7674 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7678 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody(ObjectVisitor* v) {
7680 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7684 template <typename StaticVisitor>
7685 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody() {
7686 StaticVisitor::VisitPointer(
7687 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7691 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7692 typedef v8::String::ExternalOneByteStringResource Resource;
7693 v->VisitExternalOneByteString(
7694 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7698 template <typename StaticVisitor>
7699 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7700 typedef v8::String::ExternalOneByteStringResource Resource;
7701 StaticVisitor::VisitExternalOneByteString(
7702 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7706 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7707 typedef v8::String::ExternalStringResource Resource;
7708 v->VisitExternalTwoByteString(
7709 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7713 template<typename StaticVisitor>
7714 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7715 typedef v8::String::ExternalStringResource Resource;
7716 StaticVisitor::VisitExternalTwoByteString(
7717 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7721 static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7725 DCHECK(FLAG_unbox_double_fields);
7726 DCHECK(IsAligned(start_offset, kPointerSize) &&
7727 IsAligned(end_offset, kPointerSize));
7729 LayoutDescriptorHelper helper(object->map());
7730 DCHECK(!helper.all_fields_tagged());
7732 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7733 // Visit all tagged fields.
7734 if (helper.IsTagged(offset)) {
7735 v->VisitPointer(HeapObject::RawField(object, offset));
7741 template<int start_offset, int end_offset, int size>
7742 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7745 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7746 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7747 HeapObject::RawField(obj, end_offset));
7749 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7754 template<int start_offset>
7755 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7758 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7759 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7760 HeapObject::RawField(obj, object_size));
7762 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7767 template<class Derived, class TableType>
7768 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7769 TableType* table(TableType::cast(this->table()));
7770 int index = Smi::cast(this->index())->value();
7771 Object* key = table->KeyAt(index);
7772 DCHECK(!key->IsTheHole());
7777 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7778 array->set(0, CurrentKey());
7782 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7783 array->set(0, CurrentKey());
7784 array->set(1, CurrentValue());
7788 Object* JSMapIterator::CurrentValue() {
7789 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7790 int index = Smi::cast(this->index())->value();
7791 Object* value = table->ValueAt(index);
7792 DCHECK(!value->IsTheHole());
7797 String::SubStringRange::SubStringRange(String* string, int first, int length)
7800 length_(length == -1 ? string->length() : length) {}
7803 class String::SubStringRange::iterator final {
7805 typedef std::forward_iterator_tag iterator_category;
7806 typedef int difference_type;
7807 typedef uc16 value_type;
7808 typedef uc16* pointer;
7809 typedef uc16& reference;
7811 iterator(const iterator& other)
7812 : content_(other.content_), offset_(other.offset_) {}
7814 uc16 operator*() { return content_.Get(offset_); }
7815 bool operator==(const iterator& other) const {
7816 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7818 bool operator!=(const iterator& other) const {
7819 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7821 iterator& operator++() {
7825 iterator operator++(int);
7828 friend class String;
7829 iterator(String* from, int offset)
7830 : content_(from->GetFlatContent()), offset_(offset) {}
7831 String::FlatContent content_;
7836 String::SubStringRange::iterator String::SubStringRange::begin() {
7837 return String::SubStringRange::iterator(string_, first_);
7841 String::SubStringRange::iterator String::SubStringRange::end() {
7842 return String::SubStringRange::iterator(string_, first_ + length_);
7847 #undef CAST_ACCESSOR
7848 #undef INT_ACCESSORS
7850 #undef ACCESSORS_TO_SMI
7851 #undef SMI_ACCESSORS
7852 #undef SYNCHRONIZED_SMI_ACCESSORS
7853 #undef NOBARRIER_SMI_ACCESSORS
7855 #undef BOOL_ACCESSORS
7857 #undef FIELD_ADDR_CONST
7859 #undef NOBARRIER_READ_FIELD
7861 #undef NOBARRIER_WRITE_FIELD
7862 #undef WRITE_BARRIER
7863 #undef CONDITIONAL_WRITE_BARRIER
7864 #undef READ_DOUBLE_FIELD
7865 #undef WRITE_DOUBLE_FIELD
7866 #undef READ_INT_FIELD
7867 #undef WRITE_INT_FIELD
7868 #undef READ_INTPTR_FIELD
7869 #undef WRITE_INTPTR_FIELD
7870 #undef READ_UINT8_FIELD
7871 #undef WRITE_UINT8_FIELD
7872 #undef READ_INT8_FIELD
7873 #undef WRITE_INT8_FIELD
7874 #undef READ_UINT16_FIELD
7875 #undef WRITE_UINT16_FIELD
7876 #undef READ_INT16_FIELD
7877 #undef WRITE_INT16_FIELD
7878 #undef READ_UINT32_FIELD
7879 #undef WRITE_UINT32_FIELD
7880 #undef READ_INT32_FIELD
7881 #undef WRITE_INT32_FIELD
7882 #undef READ_FLOAT_FIELD
7883 #undef WRITE_FLOAT_FIELD
7884 #undef READ_UINT64_FIELD
7885 #undef WRITE_UINT64_FIELD
7886 #undef READ_INT64_FIELD
7887 #undef WRITE_INT64_FIELD
7888 #undef READ_BYTE_FIELD
7889 #undef WRITE_BYTE_FIELD
7890 #undef NOBARRIER_READ_BYTE_FIELD
7891 #undef NOBARRIER_WRITE_BYTE_FIELD
7893 } } // namespace v8::internal
7895 #endif // V8_OBJECTS_INL_H_