1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/isolate.h"
24 #include "src/layout-descriptor-inl.h"
25 #include "src/lookup.h"
26 #include "src/objects.h"
27 #include "src/property.h"
28 #include "src/prototype.h"
29 #include "src/transitions-inl.h"
30 #include "src/type-feedback-vector-inl.h"
31 #include "src/types-inl.h"
32 #include "src/v8memory.h"
37 PropertyDetails::PropertyDetails(Smi* smi) {
38 value_ = smi->value();
42 Smi* PropertyDetails::AsSmi() const {
43 // Ensure the upper 2 bits have the same value by sign extending it. This is
44 // necessary to be able to use the 31st bit of the property details.
45 int value = value_ << 1;
46 return Smi::FromInt(value >> 1);
50 int PropertyDetails::field_width_in_words() const {
51 DCHECK(location() == kField);
52 if (!FLAG_unbox_double_fields) return 1;
53 if (kDoubleSize == kPointerSize) return 1;
54 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
58 #define TYPE_CHECKER(type, instancetype) \
59 bool Object::Is##type() const { \
60 return Object::IsHeapObject() && \
61 HeapObject::cast(this)->map()->instance_type() == instancetype; \
65 #define CAST_ACCESSOR(type) \
66 type* type::cast(Object* object) { \
67 SLOW_DCHECK(object->Is##type()); \
68 return reinterpret_cast<type*>(object); \
70 const type* type::cast(const Object* object) { \
71 SLOW_DCHECK(object->Is##type()); \
72 return reinterpret_cast<const type*>(object); \
76 #define INT_ACCESSORS(holder, name, offset) \
77 int holder::name() const { return READ_INT_FIELD(this, offset); } \
78 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
81 #define ACCESSORS(holder, name, type, offset) \
82 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
83 void holder::set_##name(type* value, WriteBarrierMode mode) { \
84 WRITE_FIELD(this, offset, value); \
85 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
89 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
90 #define ACCESSORS_TO_SMI(holder, name, offset) \
91 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
92 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
93 WRITE_FIELD(this, offset, value); \
97 // Getter that returns a Smi as an int and writes an int as a Smi.
98 #define SMI_ACCESSORS(holder, name, offset) \
99 int holder::name() const { \
100 Object* value = READ_FIELD(this, offset); \
101 return Smi::cast(value)->value(); \
103 void holder::set_##name(int value) { \
104 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
108 int holder::synchronized_##name() const { \
109 Object* value = ACQUIRE_READ_FIELD(this, offset); \
110 return Smi::cast(value)->value(); \
112 void holder::synchronized_set_##name(int value) { \
113 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
117 int holder::nobarrier_##name() const { \
118 Object* value = NOBARRIER_READ_FIELD(this, offset); \
119 return Smi::cast(value)->value(); \
121 void holder::nobarrier_set_##name(int value) { \
122 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
125 #define BOOL_GETTER(holder, field, name, offset) \
126 bool holder::name() const { \
127 return BooleanBit::get(field(), offset); \
131 #define BOOL_ACCESSORS(holder, field, name, offset) \
132 bool holder::name() const { \
133 return BooleanBit::get(field(), offset); \
135 void holder::set_##name(bool value) { \
136 set_##field(BooleanBit::set(field(), offset, value)); \
140 bool Object::IsFixedArrayBase() const {
141 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
145 // External objects are not extensible, so the map check is enough.
146 bool Object::IsExternal() const {
147 return Object::IsHeapObject() &&
148 HeapObject::cast(this)->map() ==
149 HeapObject::cast(this)->GetHeap()->external_map();
153 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
156 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
157 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
158 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
159 TYPE_CHECKER(Simd128Value, SIMD128_VALUE_TYPE)
162 #define SIMD128_TYPE_CHECKER(TYPE, Type, type, lane_count, lane_type) \
163 bool Object::Is##Type() const { \
164 return Object::IsHeapObject() && \
165 HeapObject::cast(this)->map() == \
166 HeapObject::cast(this)->GetHeap()->type##_map(); \
168 SIMD128_TYPES(SIMD128_TYPE_CHECKER)
169 #undef SIMD128_TYPE_CHECKER
172 bool Object::IsString() const {
173 return Object::IsHeapObject()
174 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
178 bool Object::IsName() const {
179 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
180 return Object::IsHeapObject() &&
181 HeapObject::cast(this)->map()->instance_type() <= LAST_NAME_TYPE;
185 bool Object::IsUniqueName() const {
186 return IsInternalizedString() || IsSymbol();
190 bool Object::IsSpecObject() const {
191 return Object::IsHeapObject()
192 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
196 bool Object::IsSpecFunction() const {
197 if (!Object::IsHeapObject()) return false;
198 InstanceType type = HeapObject::cast(this)->map()->instance_type();
199 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
203 bool Object::IsTemplateInfo() const {
204 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
208 bool Object::IsInternalizedString() const {
209 if (!this->IsHeapObject()) return false;
210 uint32_t type = HeapObject::cast(this)->map()->instance_type();
211 STATIC_ASSERT(kNotInternalizedTag != 0);
212 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
213 (kStringTag | kInternalizedTag);
217 bool Object::IsConsString() const {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() const {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() const {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqOneByteString() const {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsOneByteRepresentation();
242 bool Object::IsSeqTwoByteString() const {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() const {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalOneByteString() const {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsOneByteRepresentation();
262 bool Object::IsExternalTwoByteString() const {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
269 bool Object::HasValidElements() {
270 // Dictionary is covered under FixedArray.
271 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
275 bool Object::KeyEquals(Object* second) {
276 Object* first = this;
277 if (second->IsNumber()) {
278 if (first->IsNumber()) return first->Number() == second->Number();
279 Object* temp = first;
283 if (first->IsNumber()) {
284 DCHECK_LE(0, first->Number());
285 uint32_t expected = static_cast<uint32_t>(first->Number());
287 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
289 return Name::cast(first)->Equals(Name::cast(second));
293 Handle<Object> Object::NewStorageFor(Isolate* isolate,
294 Handle<Object> object,
295 Representation representation) {
296 if (representation.IsSmi() && object->IsUninitialized()) {
297 return handle(Smi::FromInt(0), isolate);
299 if (!representation.IsDouble()) return object;
301 if (object->IsUninitialized()) {
303 } else if (object->IsMutableHeapNumber()) {
304 value = HeapNumber::cast(*object)->value();
306 value = object->Number();
308 return isolate->factory()->NewHeapNumber(value, MUTABLE);
312 Handle<Object> Object::WrapForRead(Isolate* isolate,
313 Handle<Object> object,
314 Representation representation) {
315 DCHECK(!object->IsUninitialized());
316 if (!representation.IsDouble()) {
317 DCHECK(object->FitsRepresentation(representation));
320 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
324 StringShape::StringShape(const String* str)
325 : type_(str->map()->instance_type()) {
327 DCHECK((type_ & kIsNotStringMask) == kStringTag);
331 StringShape::StringShape(Map* map)
332 : type_(map->instance_type()) {
334 DCHECK((type_ & kIsNotStringMask) == kStringTag);
338 StringShape::StringShape(InstanceType t)
339 : type_(static_cast<uint32_t>(t)) {
341 DCHECK((type_ & kIsNotStringMask) == kStringTag);
345 bool StringShape::IsInternalized() {
347 STATIC_ASSERT(kNotInternalizedTag != 0);
348 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
349 (kStringTag | kInternalizedTag);
353 bool String::IsOneByteRepresentation() const {
354 uint32_t type = map()->instance_type();
355 return (type & kStringEncodingMask) == kOneByteStringTag;
359 bool String::IsTwoByteRepresentation() const {
360 uint32_t type = map()->instance_type();
361 return (type & kStringEncodingMask) == kTwoByteStringTag;
365 bool String::IsOneByteRepresentationUnderneath() {
366 uint32_t type = map()->instance_type();
367 STATIC_ASSERT(kIsIndirectStringTag != 0);
368 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
370 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
371 case kOneByteStringTag:
373 case kTwoByteStringTag:
375 default: // Cons or sliced string. Need to go deeper.
376 return GetUnderlying()->IsOneByteRepresentation();
381 bool String::IsTwoByteRepresentationUnderneath() {
382 uint32_t type = map()->instance_type();
383 STATIC_ASSERT(kIsIndirectStringTag != 0);
384 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
386 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
387 case kOneByteStringTag:
389 case kTwoByteStringTag:
391 default: // Cons or sliced string. Need to go deeper.
392 return GetUnderlying()->IsTwoByteRepresentation();
397 bool String::HasOnlyOneByteChars() {
398 uint32_t type = map()->instance_type();
399 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
400 IsOneByteRepresentation();
404 bool StringShape::IsCons() {
405 return (type_ & kStringRepresentationMask) == kConsStringTag;
409 bool StringShape::IsSliced() {
410 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
414 bool StringShape::IsIndirect() {
415 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
419 bool StringShape::IsExternal() {
420 return (type_ & kStringRepresentationMask) == kExternalStringTag;
424 bool StringShape::IsSequential() {
425 return (type_ & kStringRepresentationMask) == kSeqStringTag;
429 StringRepresentationTag StringShape::representation_tag() {
430 uint32_t tag = (type_ & kStringRepresentationMask);
431 return static_cast<StringRepresentationTag>(tag);
435 uint32_t StringShape::encoding_tag() {
436 return type_ & kStringEncodingMask;
440 uint32_t StringShape::full_representation_tag() {
441 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
445 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
446 Internals::kFullStringRepresentationMask);
448 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
449 Internals::kStringEncodingMask);
452 bool StringShape::IsSequentialOneByte() {
453 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
457 bool StringShape::IsSequentialTwoByte() {
458 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
462 bool StringShape::IsExternalOneByte() {
463 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
467 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
468 Internals::kExternalOneByteRepresentationTag);
470 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
473 bool StringShape::IsExternalTwoByte() {
474 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
478 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
479 Internals::kExternalTwoByteRepresentationTag);
481 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
484 uc32 FlatStringReader::Get(int index) {
486 return Get<uint8_t>(index);
488 return Get<uc16>(index);
493 template <typename Char>
494 Char FlatStringReader::Get(int index) {
495 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
496 DCHECK(0 <= index && index <= length_);
497 if (sizeof(Char) == 1) {
498 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
500 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
505 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
506 return key->AsHandle(isolate);
510 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
512 return key->AsHandle(isolate);
516 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
518 return key->AsHandle(isolate);
521 template <typename Char>
522 class SequentialStringKey : public HashTableKey {
524 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
525 : string_(string), hash_field_(0), seed_(seed) { }
527 uint32_t Hash() override {
528 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
532 uint32_t result = hash_field_ >> String::kHashShift;
533 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
538 uint32_t HashForObject(Object* other) override {
539 return String::cast(other)->Hash();
542 Vector<const Char> string_;
543 uint32_t hash_field_;
548 class OneByteStringKey : public SequentialStringKey<uint8_t> {
550 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
551 : SequentialStringKey<uint8_t>(str, seed) { }
553 bool IsMatch(Object* string) override {
554 return String::cast(string)->IsOneByteEqualTo(string_);
557 Handle<Object> AsHandle(Isolate* isolate) override;
561 class SeqOneByteSubStringKey : public HashTableKey {
563 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
564 : string_(string), from_(from), length_(length) {
565 DCHECK(string_->IsSeqOneByteString());
568 uint32_t Hash() override {
569 DCHECK(length_ >= 0);
570 DCHECK(from_ + length_ <= string_->length());
571 const uint8_t* chars = string_->GetChars() + from_;
572 hash_field_ = StringHasher::HashSequentialString(
573 chars, length_, string_->GetHeap()->HashSeed());
574 uint32_t result = hash_field_ >> String::kHashShift;
575 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
579 uint32_t HashForObject(Object* other) override {
580 return String::cast(other)->Hash();
583 bool IsMatch(Object* string) override;
584 Handle<Object> AsHandle(Isolate* isolate) override;
587 Handle<SeqOneByteString> string_;
590 uint32_t hash_field_;
594 class TwoByteStringKey : public SequentialStringKey<uc16> {
596 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
597 : SequentialStringKey<uc16>(str, seed) { }
599 bool IsMatch(Object* string) override {
600 return String::cast(string)->IsTwoByteEqualTo(string_);
603 Handle<Object> AsHandle(Isolate* isolate) override;
607 // Utf8StringKey carries a vector of chars as key.
608 class Utf8StringKey : public HashTableKey {
610 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
611 : string_(string), hash_field_(0), seed_(seed) { }
613 bool IsMatch(Object* string) override {
614 return String::cast(string)->IsUtf8EqualTo(string_);
617 uint32_t Hash() override {
618 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
619 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
620 uint32_t result = hash_field_ >> String::kHashShift;
621 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
625 uint32_t HashForObject(Object* other) override {
626 return String::cast(other)->Hash();
629 Handle<Object> AsHandle(Isolate* isolate) override {
630 if (hash_field_ == 0) Hash();
631 return isolate->factory()->NewInternalizedStringFromUtf8(
632 string_, chars_, hash_field_);
635 Vector<const char> string_;
636 uint32_t hash_field_;
637 int chars_; // Caches the number of characters when computing the hash code.
642 bool Object::IsNumber() const {
643 return IsSmi() || IsHeapNumber();
647 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
648 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
649 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
652 bool Object::IsFiller() const {
653 if (!Object::IsHeapObject()) return false;
654 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
655 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
660 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
661 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
663 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
664 #undef TYPED_ARRAY_TYPE_CHECKER
667 bool Object::IsFixedTypedArrayBase() const {
668 if (!Object::IsHeapObject()) return false;
670 InstanceType instance_type =
671 HeapObject::cast(this)->map()->instance_type();
672 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
673 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
677 bool Object::IsJSReceiver() const {
678 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
679 return IsHeapObject() &&
680 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
684 bool Object::IsJSObject() const {
685 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
686 return IsHeapObject() && HeapObject::cast(this)->map()->IsJSObjectMap();
690 bool Object::IsJSProxy() const {
691 if (!Object::IsHeapObject()) return false;
692 return HeapObject::cast(this)->map()->IsJSProxyMap();
696 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
697 TYPE_CHECKER(JSSet, JS_SET_TYPE)
698 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
699 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
700 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
701 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
702 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
703 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
704 TYPE_CHECKER(Map, MAP_TYPE)
705 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
706 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
707 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
710 bool Object::IsJSWeakCollection() const {
711 return IsJSWeakMap() || IsJSWeakSet();
715 bool Object::IsDescriptorArray() const {
716 return IsFixedArray();
720 bool Object::IsArrayList() const { return IsFixedArray(); }
723 bool Object::IsLayoutDescriptor() const {
724 return IsSmi() || IsFixedTypedArrayBase();
728 bool Object::IsTransitionArray() const {
729 return IsFixedArray();
733 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
736 bool Object::IsDeoptimizationInputData() const {
737 // Must be a fixed array.
738 if (!IsFixedArray()) return false;
740 // There's no sure way to detect the difference between a fixed array and
741 // a deoptimization data array. Since this is used for asserts we can
742 // check that the length is zero or else the fixed size plus a multiple of
744 int length = FixedArray::cast(this)->length();
745 if (length == 0) return true;
747 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
748 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
752 bool Object::IsDeoptimizationOutputData() const {
753 if (!IsFixedArray()) return false;
754 // There's actually no way to see the difference between a fixed array and
755 // a deoptimization data array. Since this is used for asserts we can check
756 // that the length is plausible though.
757 if (FixedArray::cast(this)->length() % 2 != 0) return false;
762 bool Object::IsHandlerTable() const {
763 if (!IsFixedArray()) return false;
764 // There's actually no way to see the difference between a fixed array and
765 // a handler table array.
770 bool Object::IsDependentCode() const {
771 if (!IsFixedArray()) return false;
772 // There's actually no way to see the difference between a fixed array and
773 // a dependent codes array.
778 bool Object::IsContext() const {
779 if (!Object::IsHeapObject()) return false;
780 Map* map = HeapObject::cast(this)->map();
781 Heap* heap = map->GetHeap();
782 return (map == heap->function_context_map() ||
783 map == heap->catch_context_map() ||
784 map == heap->with_context_map() ||
785 map == heap->native_context_map() ||
786 map == heap->block_context_map() ||
787 map == heap->module_context_map() ||
788 map == heap->script_context_map());
792 bool Object::IsNativeContext() const {
793 return Object::IsHeapObject() &&
794 HeapObject::cast(this)->map() ==
795 HeapObject::cast(this)->GetHeap()->native_context_map();
799 bool Object::IsScriptContextTable() const {
800 if (!Object::IsHeapObject()) return false;
801 Map* map = HeapObject::cast(this)->map();
802 Heap* heap = map->GetHeap();
803 return map == heap->script_context_table_map();
807 bool Object::IsScopeInfo() const {
808 return Object::IsHeapObject() &&
809 HeapObject::cast(this)->map() ==
810 HeapObject::cast(this)->GetHeap()->scope_info_map();
814 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
817 template <> inline bool Is<JSFunction>(Object* obj) {
818 return obj->IsJSFunction();
822 TYPE_CHECKER(Code, CODE_TYPE)
823 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
824 TYPE_CHECKER(Cell, CELL_TYPE)
825 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
826 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
827 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
828 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
829 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
830 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
831 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
832 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
835 bool Object::IsStringWrapper() const {
836 return IsJSValue() && JSValue::cast(this)->value()->IsString();
840 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
843 bool Object::IsBoolean() const {
844 return IsOddball() &&
845 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
849 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
850 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
851 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
852 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
855 bool Object::IsJSArrayBufferView() const {
856 return IsJSDataView() || IsJSTypedArray();
860 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
863 template <> inline bool Is<JSArray>(Object* obj) {
864 return obj->IsJSArray();
868 bool Object::IsHashTable() const {
869 return Object::IsHeapObject() &&
870 HeapObject::cast(this)->map() ==
871 HeapObject::cast(this)->GetHeap()->hash_table_map();
875 bool Object::IsWeakHashTable() const {
876 return IsHashTable();
880 bool Object::IsDictionary() const {
881 return IsHashTable() &&
882 this != HeapObject::cast(this)->GetHeap()->string_table();
886 bool Object::IsNameDictionary() const {
887 return IsDictionary();
891 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
894 bool Object::IsSeededNumberDictionary() const {
895 return IsDictionary();
899 bool Object::IsUnseededNumberDictionary() const {
900 return IsDictionary();
904 bool Object::IsStringTable() const {
905 return IsHashTable();
909 bool Object::IsNormalizedMapCache() const {
910 return NormalizedMapCache::IsNormalizedMapCache(this);
914 int NormalizedMapCache::GetIndex(Handle<Map> map) {
915 return map->Hash() % NormalizedMapCache::kEntries;
919 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
920 if (!obj->IsFixedArray()) return false;
921 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
925 if (FLAG_verify_heap) {
926 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
927 NormalizedMapCacheVerify();
934 bool Object::IsCompilationCacheTable() const {
935 return IsHashTable();
939 bool Object::IsCodeCacheHashTable() const {
940 return IsHashTable();
944 bool Object::IsPolymorphicCodeCacheHashTable() const {
945 return IsHashTable();
949 bool Object::IsMapCache() const {
950 return IsHashTable();
954 bool Object::IsObjectHashTable() const {
955 return IsHashTable();
959 bool Object::IsOrderedHashTable() const {
960 return IsHeapObject() &&
961 HeapObject::cast(this)->map() ==
962 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
966 bool Object::IsOrderedHashSet() const {
967 return IsOrderedHashTable();
971 bool Object::IsOrderedHashMap() const {
972 return IsOrderedHashTable();
976 bool Object::IsPrimitive() const {
977 return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
981 bool Object::IsJSGlobalProxy() const {
982 bool result = IsHeapObject() &&
983 (HeapObject::cast(this)->map()->instance_type() ==
984 JS_GLOBAL_PROXY_TYPE);
986 HeapObject::cast(this)->map()->is_access_check_needed());
991 bool Object::IsGlobalObject() const {
992 if (!IsHeapObject()) return false;
993 return HeapObject::cast(this)->map()->IsGlobalObjectMap();
997 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
998 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1001 bool Object::IsUndetectableObject() const {
1002 return IsHeapObject()
1003 && HeapObject::cast(this)->map()->is_undetectable();
1007 bool Object::IsAccessCheckNeeded() const {
1008 if (!IsHeapObject()) return false;
1009 if (IsJSGlobalProxy()) {
1010 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1011 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1012 return proxy->IsDetachedFrom(global);
1014 return HeapObject::cast(this)->map()->is_access_check_needed();
1018 bool Object::IsStruct() const {
1019 if (!IsHeapObject()) return false;
1020 switch (HeapObject::cast(this)->map()->instance_type()) {
1021 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1022 STRUCT_LIST(MAKE_STRUCT_CASE)
1023 #undef MAKE_STRUCT_CASE
1024 default: return false;
1029 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1030 bool Object::Is##Name() const { \
1031 return Object::IsHeapObject() \
1032 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1034 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1035 #undef MAKE_STRUCT_PREDICATE
1038 bool Object::IsUndefined() const {
1039 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1043 bool Object::IsNull() const {
1044 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1048 bool Object::IsTheHole() const {
1049 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1053 bool Object::IsException() const {
1054 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1058 bool Object::IsUninitialized() const {
1059 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1063 bool Object::IsTrue() const {
1064 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1068 bool Object::IsFalse() const {
1069 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1073 bool Object::IsArgumentsMarker() const {
1074 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1078 double Object::Number() {
1081 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1082 : reinterpret_cast<HeapNumber*>(this)->value();
1086 bool Object::IsNaN() const {
1087 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1091 bool Object::IsMinusZero() const {
1092 return this->IsHeapNumber() &&
1093 i::IsMinusZero(HeapNumber::cast(this)->value());
1097 Representation Object::OptimalRepresentation() {
1098 if (!FLAG_track_fields) return Representation::Tagged();
1100 return Representation::Smi();
1101 } else if (FLAG_track_double_fields && IsHeapNumber()) {
1102 return Representation::Double();
1103 } else if (FLAG_track_computed_fields && IsUninitialized()) {
1104 return Representation::None();
1105 } else if (FLAG_track_heap_object_fields) {
1106 DCHECK(IsHeapObject());
1107 return Representation::HeapObject();
1109 return Representation::Tagged();
1114 ElementsKind Object::OptimalElementsKind() {
1115 if (IsSmi()) return FAST_SMI_ELEMENTS;
1116 if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1117 return FAST_ELEMENTS;
1121 bool Object::FitsRepresentation(Representation representation) {
1122 if (FLAG_track_fields && representation.IsNone()) {
1124 } else if (FLAG_track_fields && representation.IsSmi()) {
1126 } else if (FLAG_track_double_fields && representation.IsDouble()) {
1127 return IsMutableHeapNumber() || IsNumber();
1128 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1129 return IsHeapObject();
1135 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1136 Handle<Object> object) {
1138 isolate, object, handle(isolate->context()->native_context(), isolate));
1142 bool Object::HasSpecificClassOf(String* name) {
1143 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1147 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1149 LanguageMode language_mode) {
1150 LookupIterator it(object, name);
1151 return GetProperty(&it, language_mode);
1155 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1157 LanguageMode language_mode) {
1158 LookupIterator it(isolate, object, index);
1159 return GetProperty(&it, language_mode);
1163 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1164 uint32_t index, Handle<Object> value,
1165 LanguageMode language_mode) {
1166 LookupIterator it(isolate, object, index);
1167 return SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED);
1171 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1172 Isolate* isolate, Handle<Object> receiver) {
1173 PrototypeIterator iter(isolate, receiver);
1174 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1175 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1176 return PrototypeIterator::GetCurrent(iter);
1180 return PrototypeIterator::GetCurrent(iter);
1184 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
1186 LanguageMode language_mode) {
1187 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1188 return GetProperty(object, str, language_mode);
1192 #define FIELD_ADDR(p, offset) \
1193 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1195 #define FIELD_ADDR_CONST(p, offset) \
1196 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1198 #define READ_FIELD(p, offset) \
1199 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1201 #define ACQUIRE_READ_FIELD(p, offset) \
1202 reinterpret_cast<Object*>(base::Acquire_Load( \
1203 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1205 #define NOBARRIER_READ_FIELD(p, offset) \
1206 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1207 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1209 #define WRITE_FIELD(p, offset, value) \
1210 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1212 #define RELEASE_WRITE_FIELD(p, offset, value) \
1213 base::Release_Store( \
1214 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1215 reinterpret_cast<base::AtomicWord>(value));
1217 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1218 base::NoBarrier_Store( \
1219 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1220 reinterpret_cast<base::AtomicWord>(value));
1222 #define WRITE_BARRIER(heap, object, offset, value) \
1223 heap->incremental_marking()->RecordWrite( \
1224 object, HeapObject::RawField(object, offset), value); \
1225 if (heap->InNewSpace(value)) { \
1226 heap->RecordWrite(object->address(), offset); \
1229 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1230 if (mode != SKIP_WRITE_BARRIER) { \
1231 if (mode == UPDATE_WRITE_BARRIER) { \
1232 heap->incremental_marking()->RecordWrite( \
1233 object, HeapObject::RawField(object, offset), value); \
1235 if (heap->InNewSpace(value)) { \
1236 heap->RecordWrite(object->address(), offset); \
1240 #define READ_DOUBLE_FIELD(p, offset) \
1241 ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1243 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1244 WriteDoubleValue(FIELD_ADDR(p, offset), value)
1246 #define READ_INT_FIELD(p, offset) \
1247 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1249 #define WRITE_INT_FIELD(p, offset, value) \
1250 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1252 #define READ_INTPTR_FIELD(p, offset) \
1253 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1255 #define WRITE_INTPTR_FIELD(p, offset, value) \
1256 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1258 #define READ_UINT8_FIELD(p, offset) \
1259 (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1261 #define WRITE_UINT8_FIELD(p, offset, value) \
1262 (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1264 #define READ_INT8_FIELD(p, offset) \
1265 (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1267 #define WRITE_INT8_FIELD(p, offset, value) \
1268 (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1270 #define READ_UINT16_FIELD(p, offset) \
1271 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1273 #define WRITE_UINT16_FIELD(p, offset, value) \
1274 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1276 #define READ_INT16_FIELD(p, offset) \
1277 (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1279 #define WRITE_INT16_FIELD(p, offset, value) \
1280 (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1282 #define READ_UINT32_FIELD(p, offset) \
1283 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1285 #define WRITE_UINT32_FIELD(p, offset, value) \
1286 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1288 #define READ_INT32_FIELD(p, offset) \
1289 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1291 #define WRITE_INT32_FIELD(p, offset, value) \
1292 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1294 #define READ_FLOAT_FIELD(p, offset) \
1295 (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1297 #define WRITE_FLOAT_FIELD(p, offset, value) \
1298 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1300 #define READ_UINT64_FIELD(p, offset) \
1301 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1303 #define WRITE_UINT64_FIELD(p, offset, value) \
1304 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1306 #define READ_INT64_FIELD(p, offset) \
1307 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1309 #define WRITE_INT64_FIELD(p, offset, value) \
1310 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1312 #define READ_BYTE_FIELD(p, offset) \
1313 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1315 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1316 static_cast<byte>(base::NoBarrier_Load( \
1317 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1319 #define WRITE_BYTE_FIELD(p, offset, value) \
1320 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1322 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1323 base::NoBarrier_Store( \
1324 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1325 static_cast<base::Atomic8>(value));
1327 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1328 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1332 MapWord MapWord::FromMap(const Map* map) {
1333 return MapWord(reinterpret_cast<uintptr_t>(map));
1337 Map* MapWord::ToMap() {
1338 return reinterpret_cast<Map*>(value_);
1342 bool MapWord::IsForwardingAddress() {
1343 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1347 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1348 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1349 return MapWord(reinterpret_cast<uintptr_t>(raw));
1353 HeapObject* MapWord::ToForwardingAddress() {
1354 DCHECK(IsForwardingAddress());
1355 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1360 void HeapObject::VerifyObjectField(int offset) {
1361 VerifyPointer(READ_FIELD(this, offset));
1364 void HeapObject::VerifySmiField(int offset) {
1365 CHECK(READ_FIELD(this, offset)->IsSmi());
1370 Heap* HeapObject::GetHeap() const {
1372 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1373 SLOW_DCHECK(heap != NULL);
1378 Isolate* HeapObject::GetIsolate() const {
1379 return GetHeap()->isolate();
1383 Map* HeapObject::map() const {
1385 // Clear mark potentially added by PathTracer.
1386 uintptr_t raw_value =
1387 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1388 return MapWord::FromRawValue(raw_value).ToMap();
1390 return map_word().ToMap();
1395 void HeapObject::set_map(Map* value) {
1396 set_map_word(MapWord::FromMap(value));
1397 if (value != NULL) {
1398 // TODO(1600) We are passing NULL as a slot because maps can never be on
1399 // evacuation candidate.
1400 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1405 Map* HeapObject::synchronized_map() {
1406 return synchronized_map_word().ToMap();
1410 void HeapObject::synchronized_set_map(Map* value) {
1411 synchronized_set_map_word(MapWord::FromMap(value));
1412 if (value != NULL) {
1413 // TODO(1600) We are passing NULL as a slot because maps can never be on
1414 // evacuation candidate.
1415 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1420 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1421 synchronized_set_map_word(MapWord::FromMap(value));
1425 // Unsafe accessor omitting write barrier.
1426 void HeapObject::set_map_no_write_barrier(Map* value) {
1427 set_map_word(MapWord::FromMap(value));
1431 MapWord HeapObject::map_word() const {
1433 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1437 void HeapObject::set_map_word(MapWord map_word) {
1438 NOBARRIER_WRITE_FIELD(
1439 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1443 MapWord HeapObject::synchronized_map_word() const {
1445 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1449 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1450 RELEASE_WRITE_FIELD(
1451 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1455 int HeapObject::Size() {
1456 return SizeFromMap(map());
1460 HeapObjectContents HeapObject::ContentType() {
1461 InstanceType type = map()->instance_type();
1462 if (type <= LAST_NAME_TYPE) {
1463 if (type == SYMBOL_TYPE) {
1464 return HeapObjectContents::kTaggedValues;
1466 DCHECK(type < FIRST_NONSTRING_TYPE);
1467 // There are four string representations: sequential strings, external
1468 // strings, cons strings, and sliced strings.
1469 // Only the former two contain raw values and no heap pointers (besides the
1471 if (((type & kIsIndirectStringMask) != kIsIndirectStringTag))
1472 return HeapObjectContents::kRawValues;
1474 return HeapObjectContents::kTaggedValues;
1476 // TODO(jochen): Enable eventually.
1477 } else if (type == JS_FUNCTION_TYPE) {
1478 return HeapObjectContents::kMixedValues;
1480 } else if (type == BYTECODE_ARRAY_TYPE) {
1481 return HeapObjectContents::kMixedValues;
1482 } else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
1483 type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
1484 return HeapObjectContents::kMixedValues;
1485 } else if (type <= LAST_DATA_TYPE) {
1486 // TODO(jochen): Why do we claim that Code and Map contain only raw values?
1487 return HeapObjectContents::kRawValues;
1489 if (FLAG_unbox_double_fields) {
1490 LayoutDescriptorHelper helper(map());
1491 if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues;
1493 return HeapObjectContents::kTaggedValues;
1498 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1499 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1500 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1504 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1505 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1509 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1510 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1514 double HeapNumber::value() const {
1515 return READ_DOUBLE_FIELD(this, kValueOffset);
1519 void HeapNumber::set_value(double value) {
1520 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1524 int HeapNumber::get_exponent() {
1525 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1526 kExponentShift) - kExponentBias;
1530 int HeapNumber::get_sign() {
1531 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1535 bool Simd128Value::Equals(Simd128Value* that) {
1536 #define SIMD128_VALUE(TYPE, Type, type, lane_count, lane_type) \
1537 if (this->Is##Type()) { \
1538 if (!that->Is##Type()) return false; \
1539 return Type::cast(this)->Equals(Type::cast(that)); \
1541 SIMD128_TYPES(SIMD128_VALUE)
1542 #undef SIMD128_VALUE
1547 #define SIMD128_VALUE_EQUALS(TYPE, Type, type, lane_count, lane_type) \
1548 bool Type::Equals(Type* that) { \
1549 for (int lane = 0; lane < lane_count; ++lane) { \
1550 if (this->get_lane(lane) != that->get_lane(lane)) return false; \
1554 SIMD128_TYPES(SIMD128_VALUE_EQUALS)
1555 #undef SIMD128_VALUE_EQUALS
1558 #if defined(V8_TARGET_LITTLE_ENDIAN)
1559 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1561 READ_##field_type##_FIELD(this, kValueOffset + lane * field_size);
1562 #elif defined(V8_TARGET_BIG_ENDIAN)
1563 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1564 lane_type value = READ_##field_type##_FIELD( \
1565 this, kValueOffset + (lane_count - lane - 1) * field_size);
1567 #error Unknown byte ordering
1570 #if defined(V8_TARGET_LITTLE_ENDIAN)
1571 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1572 WRITE_##field_type##_FIELD(this, kValueOffset + lane * field_size, value);
1573 #elif defined(V8_TARGET_BIG_ENDIAN)
1574 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1575 WRITE_##field_type##_FIELD( \
1576 this, kValueOffset + (lane_count - lane - 1) * field_size, value);
1578 #error Unknown byte ordering
1581 #define SIMD128_NUMERIC_LANE_FNS(type, lane_type, lane_count, field_type, \
1583 lane_type type::get_lane(int lane) const { \
1584 DCHECK(lane < lane_count && lane >= 0); \
1585 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1589 void type::set_lane(int lane, lane_type value) { \
1590 DCHECK(lane < lane_count && lane >= 0); \
1591 SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1594 SIMD128_NUMERIC_LANE_FNS(Float32x4, float, 4, FLOAT, kFloatSize)
1595 SIMD128_NUMERIC_LANE_FNS(Int32x4, int32_t, 4, INT32, kInt32Size)
1596 SIMD128_NUMERIC_LANE_FNS(Uint32x4, uint32_t, 4, UINT32, kInt32Size)
1597 SIMD128_NUMERIC_LANE_FNS(Int16x8, int16_t, 8, INT16, kShortSize)
1598 SIMD128_NUMERIC_LANE_FNS(Uint16x8, uint16_t, 8, UINT16, kShortSize)
1599 SIMD128_NUMERIC_LANE_FNS(Int8x16, int8_t, 16, INT8, kCharSize)
1600 SIMD128_NUMERIC_LANE_FNS(Uint8x16, uint8_t, 16, UINT8, kCharSize)
1601 #undef SIMD128_NUMERIC_LANE_FNS
1604 #define SIMD128_BOOLEAN_LANE_FNS(type, lane_type, lane_count, field_type, \
1606 bool type::get_lane(int lane) const { \
1607 DCHECK(lane < lane_count && lane >= 0); \
1608 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1609 DCHECK(value == 0 || value == -1); \
1610 return value != 0; \
1613 void type::set_lane(int lane, bool value) { \
1614 DCHECK(lane < lane_count && lane >= 0); \
1615 int32_t int_val = value ? -1 : 0; \
1616 SIMD128_WRITE_LANE(lane_count, field_type, field_size, int_val) \
1619 SIMD128_BOOLEAN_LANE_FNS(Bool32x4, int32_t, 4, INT32, kInt32Size)
1620 SIMD128_BOOLEAN_LANE_FNS(Bool16x8, int16_t, 8, INT16, kShortSize)
1621 SIMD128_BOOLEAN_LANE_FNS(Bool8x16, int8_t, 16, INT8, kCharSize)
1622 #undef SIMD128_BOOLEAN_LANE_FNS
1624 #undef SIMD128_READ_LANE
1625 #undef SIMD128_WRITE_LANE
1628 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1631 Object** FixedArray::GetFirstElementAddress() {
1632 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1636 bool FixedArray::ContainsOnlySmisOrHoles() {
1637 Object* the_hole = GetHeap()->the_hole_value();
1638 Object** current = GetFirstElementAddress();
1639 for (int i = 0; i < length(); ++i) {
1640 Object* candidate = *current++;
1641 if (!candidate->IsSmi() && candidate != the_hole) return false;
1647 FixedArrayBase* JSObject::elements() const {
1648 Object* array = READ_FIELD(this, kElementsOffset);
1649 return static_cast<FixedArrayBase*>(array);
1653 void AllocationSite::Initialize() {
1654 set_transition_info(Smi::FromInt(0));
1655 SetElementsKind(GetInitialFastElementsKind());
1656 set_nested_site(Smi::FromInt(0));
1657 set_pretenure_data(Smi::FromInt(0));
1658 set_pretenure_create_count(Smi::FromInt(0));
1659 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1660 SKIP_WRITE_BARRIER);
1664 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1667 bool AllocationSite::IsMaybeTenure() {
1668 return pretenure_decision() == kMaybeTenure;
1672 bool AllocationSite::PretenuringDecisionMade() {
1673 return pretenure_decision() != kUndecided;
1677 void AllocationSite::MarkZombie() {
1678 DCHECK(!IsZombie());
1680 set_pretenure_decision(kZombie);
1684 ElementsKind AllocationSite::GetElementsKind() {
1685 DCHECK(!SitePointsToLiteral());
1686 int value = Smi::cast(transition_info())->value();
1687 return ElementsKindBits::decode(value);
1691 void AllocationSite::SetElementsKind(ElementsKind kind) {
1692 int value = Smi::cast(transition_info())->value();
1693 set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1694 SKIP_WRITE_BARRIER);
1698 bool AllocationSite::CanInlineCall() {
1699 int value = Smi::cast(transition_info())->value();
1700 return DoNotInlineBit::decode(value) == 0;
1704 void AllocationSite::SetDoNotInlineCall() {
1705 int value = Smi::cast(transition_info())->value();
1706 set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1707 SKIP_WRITE_BARRIER);
1711 bool AllocationSite::SitePointsToLiteral() {
1712 // If transition_info is a smi, then it represents an ElementsKind
1713 // for a constructed array. Otherwise, it must be a boilerplate
1714 // for an object or array literal.
1715 return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1719 // Heuristic: We only need to create allocation site info if the boilerplate
1720 // elements kind is the initial elements kind.
1721 AllocationSiteMode AllocationSite::GetMode(
1722 ElementsKind boilerplate_elements_kind) {
1723 if (FLAG_pretenuring_call_new ||
1724 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1725 return TRACK_ALLOCATION_SITE;
1728 return DONT_TRACK_ALLOCATION_SITE;
1732 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1734 if (FLAG_pretenuring_call_new ||
1735 (IsFastSmiElementsKind(from) &&
1736 IsMoreGeneralElementsKindTransition(from, to))) {
1737 return TRACK_ALLOCATION_SITE;
1740 return DONT_TRACK_ALLOCATION_SITE;
1744 inline bool AllocationSite::CanTrack(InstanceType type) {
1745 if (FLAG_allocation_site_pretenuring) {
1746 return type == JS_ARRAY_TYPE ||
1747 type == JS_OBJECT_TYPE ||
1748 type < FIRST_NONSTRING_TYPE;
1750 return type == JS_ARRAY_TYPE;
1754 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1755 int value = pretenure_data()->value();
1756 return PretenureDecisionBits::decode(value);
1760 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1761 int value = pretenure_data()->value();
1763 Smi::FromInt(PretenureDecisionBits::update(value, decision)),
1764 SKIP_WRITE_BARRIER);
1768 bool AllocationSite::deopt_dependent_code() {
1769 int value = pretenure_data()->value();
1770 return DeoptDependentCodeBit::decode(value);
1774 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1775 int value = pretenure_data()->value();
1776 set_pretenure_data(Smi::FromInt(DeoptDependentCodeBit::update(value, deopt)),
1777 SKIP_WRITE_BARRIER);
1781 int AllocationSite::memento_found_count() {
1782 int value = pretenure_data()->value();
1783 return MementoFoundCountBits::decode(value);
1787 inline void AllocationSite::set_memento_found_count(int count) {
1788 int value = pretenure_data()->value();
1789 // Verify that we can count more mementos than we can possibly find in one
1790 // new space collection.
1791 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1792 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1793 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1794 DCHECK(count < MementoFoundCountBits::kMax);
1796 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1797 SKIP_WRITE_BARRIER);
1801 int AllocationSite::memento_create_count() {
1802 return pretenure_create_count()->value();
1806 void AllocationSite::set_memento_create_count(int count) {
1807 set_pretenure_create_count(Smi::FromInt(count), SKIP_WRITE_BARRIER);
1811 inline bool AllocationSite::IncrementMementoFoundCount() {
1812 if (IsZombie()) return false;
1814 int value = memento_found_count();
1815 set_memento_found_count(value + 1);
1816 return memento_found_count() == kPretenureMinimumCreated;
1820 inline void AllocationSite::IncrementMementoCreateCount() {
1821 DCHECK(FLAG_allocation_site_pretenuring);
1822 int value = memento_create_count();
1823 set_memento_create_count(value + 1);
1827 inline bool AllocationSite::MakePretenureDecision(
1828 PretenureDecision current_decision,
1830 bool maximum_size_scavenge) {
1831 // Here we just allow state transitions from undecided or maybe tenure
1832 // to don't tenure, maybe tenure, or tenure.
1833 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1834 if (ratio >= kPretenureRatio) {
1835 // We just transition into tenure state when the semi-space was at
1836 // maximum capacity.
1837 if (maximum_size_scavenge) {
1838 set_deopt_dependent_code(true);
1839 set_pretenure_decision(kTenure);
1840 // Currently we just need to deopt when we make a state transition to
1844 set_pretenure_decision(kMaybeTenure);
1846 set_pretenure_decision(kDontTenure);
1853 inline bool AllocationSite::DigestPretenuringFeedback(
1854 bool maximum_size_scavenge) {
1856 int create_count = memento_create_count();
1857 int found_count = memento_found_count();
1858 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1860 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1861 static_cast<double>(found_count) / create_count : 0.0;
1862 PretenureDecision current_decision = pretenure_decision();
1864 if (minimum_mementos_created) {
1865 deopt = MakePretenureDecision(
1866 current_decision, ratio, maximum_size_scavenge);
1869 if (FLAG_trace_pretenuring_statistics) {
1871 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1872 static_cast<void*>(this), create_count, found_count, ratio,
1873 PretenureDecisionName(current_decision),
1874 PretenureDecisionName(pretenure_decision()));
1877 // Clear feedback calculation fields until the next gc.
1878 set_memento_found_count(0);
1879 set_memento_create_count(0);
1884 bool AllocationMemento::IsValid() {
1885 return allocation_site()->IsAllocationSite() &&
1886 !AllocationSite::cast(allocation_site())->IsZombie();
1890 AllocationSite* AllocationMemento::GetAllocationSite() {
1892 return AllocationSite::cast(allocation_site());
1896 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1897 JSObject::ValidateElements(object);
1898 ElementsKind elements_kind = object->map()->elements_kind();
1899 if (!IsFastObjectElementsKind(elements_kind)) {
1900 if (IsFastHoleyElementsKind(elements_kind)) {
1901 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1903 TransitionElementsKind(object, FAST_ELEMENTS);
1909 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1912 EnsureElementsMode mode) {
1913 ElementsKind current_kind = object->map()->elements_kind();
1914 ElementsKind target_kind = current_kind;
1916 DisallowHeapAllocation no_allocation;
1917 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1918 bool is_holey = IsFastHoleyElementsKind(current_kind);
1919 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1920 Heap* heap = object->GetHeap();
1921 Object* the_hole = heap->the_hole_value();
1922 for (uint32_t i = 0; i < count; ++i) {
1923 Object* current = *objects++;
1924 if (current == the_hole) {
1926 target_kind = GetHoleyElementsKind(target_kind);
1927 } else if (!current->IsSmi()) {
1928 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1929 if (IsFastSmiElementsKind(target_kind)) {
1931 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1933 target_kind = FAST_DOUBLE_ELEMENTS;
1936 } else if (is_holey) {
1937 target_kind = FAST_HOLEY_ELEMENTS;
1940 target_kind = FAST_ELEMENTS;
1945 if (target_kind != current_kind) {
1946 TransitionElementsKind(object, target_kind);
1951 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1952 Handle<FixedArrayBase> elements,
1954 EnsureElementsMode mode) {
1955 Heap* heap = object->GetHeap();
1956 if (elements->map() != heap->fixed_double_array_map()) {
1957 DCHECK(elements->map() == heap->fixed_array_map() ||
1958 elements->map() == heap->fixed_cow_array_map());
1959 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1960 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1963 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1964 EnsureCanContainElements(object, objects, length, mode);
1968 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1969 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1970 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1971 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1972 Handle<FixedDoubleArray> double_array =
1973 Handle<FixedDoubleArray>::cast(elements);
1974 for (uint32_t i = 0; i < length; ++i) {
1975 if (double_array->is_the_hole(i)) {
1976 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1980 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1985 void JSObject::SetMapAndElements(Handle<JSObject> object,
1986 Handle<Map> new_map,
1987 Handle<FixedArrayBase> value) {
1988 JSObject::MigrateToMap(object, new_map);
1989 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1990 (*value == object->GetHeap()->empty_fixed_array())) ==
1991 (value->map() == object->GetHeap()->fixed_array_map() ||
1992 value->map() == object->GetHeap()->fixed_cow_array_map()));
1993 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1994 (object->map()->has_fast_double_elements() ==
1995 value->IsFixedDoubleArray()));
1996 object->set_elements(*value);
2000 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
2001 WRITE_FIELD(this, kElementsOffset, value);
2002 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
2006 void JSObject::initialize_properties() {
2007 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2008 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
2012 void JSObject::initialize_elements() {
2013 FixedArrayBase* elements = map()->GetInitialElements();
2014 WRITE_FIELD(this, kElementsOffset, elements);
2018 ACCESSORS(Oddball, to_string, String, kToStringOffset)
2019 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
2020 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
2023 byte Oddball::kind() const {
2024 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
2028 void Oddball::set_kind(byte value) {
2029 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2033 ACCESSORS(Cell, value, Object, kValueOffset)
2034 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2035 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
2036 ACCESSORS(PropertyCell, value, Object, kValueOffset)
2039 PropertyDetails PropertyCell::property_details() {
2040 return PropertyDetails(Smi::cast(property_details_raw()));
2044 void PropertyCell::set_property_details(PropertyDetails details) {
2045 set_property_details_raw(details.AsSmi());
2049 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2052 void WeakCell::clear() {
2053 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
2054 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
2058 void WeakCell::initialize(HeapObject* val) {
2059 WRITE_FIELD(this, kValueOffset, val);
2060 Heap* heap = GetHeap();
2061 // We just have to execute the generational barrier here because we never
2062 // mark through a weak cell and collect evacuation candidates when we process
2064 if (heap->InNewSpace(val)) {
2065 heap->RecordWrite(address(), kValueOffset);
2070 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
2073 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
2076 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2077 WRITE_FIELD(this, kNextOffset, val);
2078 if (mode == UPDATE_WRITE_BARRIER) {
2079 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2084 void WeakCell::clear_next(Heap* heap) {
2085 set_next(heap->the_hole_value(), SKIP_WRITE_BARRIER);
2089 bool WeakCell::next_cleared() { return next()->IsTheHole(); }
2092 int JSObject::GetHeaderSize() {
2093 InstanceType type = map()->instance_type();
2094 // Check for the most common kind of JavaScript object before
2095 // falling into the generic switch. This speeds up the internal
2096 // field operations considerably on average.
2097 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2099 case JS_GENERATOR_OBJECT_TYPE:
2100 return JSGeneratorObject::kSize;
2101 case JS_MODULE_TYPE:
2102 return JSModule::kSize;
2103 case JS_GLOBAL_PROXY_TYPE:
2104 return JSGlobalProxy::kSize;
2105 case JS_GLOBAL_OBJECT_TYPE:
2106 return JSGlobalObject::kSize;
2107 case JS_BUILTINS_OBJECT_TYPE:
2108 return JSBuiltinsObject::kSize;
2109 case JS_FUNCTION_TYPE:
2110 return JSFunction::kSize;
2112 return JSValue::kSize;
2114 return JSDate::kSize;
2116 return JSArray::kSize;
2117 case JS_ARRAY_BUFFER_TYPE:
2118 return JSArrayBuffer::kSize;
2119 case JS_TYPED_ARRAY_TYPE:
2120 return JSTypedArray::kSize;
2121 case JS_DATA_VIEW_TYPE:
2122 return JSDataView::kSize;
2124 return JSSet::kSize;
2126 return JSMap::kSize;
2127 case JS_SET_ITERATOR_TYPE:
2128 return JSSetIterator::kSize;
2129 case JS_MAP_ITERATOR_TYPE:
2130 return JSMapIterator::kSize;
2131 case JS_WEAK_MAP_TYPE:
2132 return JSWeakMap::kSize;
2133 case JS_WEAK_SET_TYPE:
2134 return JSWeakSet::kSize;
2135 case JS_REGEXP_TYPE:
2136 return JSRegExp::kSize;
2137 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2138 return JSObject::kHeaderSize;
2139 case JS_MESSAGE_OBJECT_TYPE:
2140 return JSMessageObject::kSize;
2148 int JSObject::GetInternalFieldCount() {
2149 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2150 // Make sure to adjust for the number of in-object properties. These
2151 // properties do contribute to the size, but are not internal fields.
2152 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2153 map()->GetInObjectProperties();
2157 int JSObject::GetInternalFieldOffset(int index) {
2158 DCHECK(index < GetInternalFieldCount() && index >= 0);
2159 return GetHeaderSize() + (kPointerSize * index);
2163 Object* JSObject::GetInternalField(int index) {
2164 DCHECK(index < GetInternalFieldCount() && index >= 0);
2165 // Internal objects do follow immediately after the header, whereas in-object
2166 // properties are at the end of the object. Therefore there is no need
2167 // to adjust the index here.
2168 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2172 void JSObject::SetInternalField(int index, Object* value) {
2173 DCHECK(index < GetInternalFieldCount() && index >= 0);
2174 // Internal objects do follow immediately after the header, whereas in-object
2175 // properties are at the end of the object. Therefore there is no need
2176 // to adjust the index here.
2177 int offset = GetHeaderSize() + (kPointerSize * index);
2178 WRITE_FIELD(this, offset, value);
2179 WRITE_BARRIER(GetHeap(), this, offset, value);
2183 void JSObject::SetInternalField(int index, Smi* value) {
2184 DCHECK(index < GetInternalFieldCount() && index >= 0);
2185 // Internal objects do follow immediately after the header, whereas in-object
2186 // properties are at the end of the object. Therefore there is no need
2187 // to adjust the index here.
2188 int offset = GetHeaderSize() + (kPointerSize * index);
2189 WRITE_FIELD(this, offset, value);
2193 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2194 if (!FLAG_unbox_double_fields) return false;
2195 return map()->IsUnboxedDoubleField(index);
2199 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2200 if (!FLAG_unbox_double_fields) return false;
2201 if (index.is_hidden_field() || !index.is_inobject()) return false;
2202 return !layout_descriptor()->IsTagged(index.property_index());
2206 // Access fast-case object properties at index. The use of these routines
2207 // is needed to correctly distinguish between properties stored in-object and
2208 // properties stored in the properties array.
2209 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2210 DCHECK(!IsUnboxedDoubleField(index));
2211 if (index.is_inobject()) {
2212 return READ_FIELD(this, index.offset());
2214 return properties()->get(index.outobject_array_index());
2219 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2220 DCHECK(IsUnboxedDoubleField(index));
2221 return READ_DOUBLE_FIELD(this, index.offset());
2225 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2226 if (index.is_inobject()) {
2227 int offset = index.offset();
2228 WRITE_FIELD(this, offset, value);
2229 WRITE_BARRIER(GetHeap(), this, offset, value);
2231 properties()->set(index.outobject_array_index(), value);
2236 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2237 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2241 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2242 if (IsUnboxedDoubleField(index)) {
2243 DCHECK(value->IsMutableHeapNumber());
2244 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2246 RawFastPropertyAtPut(index, value);
2251 void JSObject::WriteToField(int descriptor, Object* value) {
2252 DisallowHeapAllocation no_gc;
2254 DescriptorArray* desc = map()->instance_descriptors();
2255 PropertyDetails details = desc->GetDetails(descriptor);
2257 DCHECK(details.type() == DATA);
2259 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2260 if (details.representation().IsDouble()) {
2261 // Nothing more to be done.
2262 if (value->IsUninitialized()) return;
2263 if (IsUnboxedDoubleField(index)) {
2264 RawFastDoublePropertyAtPut(index, value->Number());
2266 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2267 DCHECK(box->IsMutableHeapNumber());
2268 box->set_value(value->Number());
2271 RawFastPropertyAtPut(index, value);
2276 int JSObject::GetInObjectPropertyOffset(int index) {
2277 return map()->GetInObjectPropertyOffset(index);
2281 Object* JSObject::InObjectPropertyAt(int index) {
2282 int offset = GetInObjectPropertyOffset(index);
2283 return READ_FIELD(this, offset);
2287 Object* JSObject::InObjectPropertyAtPut(int index,
2289 WriteBarrierMode mode) {
2290 // Adjust for the number of properties stored in the object.
2291 int offset = GetInObjectPropertyOffset(index);
2292 WRITE_FIELD(this, offset, value);
2293 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2299 void JSObject::InitializeBody(Map* map,
2300 Object* pre_allocated_value,
2301 Object* filler_value) {
2302 DCHECK(!filler_value->IsHeapObject() ||
2303 !GetHeap()->InNewSpace(filler_value));
2304 DCHECK(!pre_allocated_value->IsHeapObject() ||
2305 !GetHeap()->InNewSpace(pre_allocated_value));
2306 int size = map->instance_size();
2307 int offset = kHeaderSize;
2308 if (filler_value != pre_allocated_value) {
2310 map->GetInObjectProperties() - map->unused_property_fields();
2311 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2312 for (int i = 0; i < pre_allocated; i++) {
2313 WRITE_FIELD(this, offset, pre_allocated_value);
2314 offset += kPointerSize;
2317 while (offset < size) {
2318 WRITE_FIELD(this, offset, filler_value);
2319 offset += kPointerSize;
2324 bool JSObject::HasFastProperties() {
2325 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2326 return !properties()->IsDictionary();
2330 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2331 if (unused_property_fields() != 0) return false;
2332 if (is_prototype_map()) return false;
2333 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2334 int limit = Max(minimum, GetInObjectProperties());
2335 int external = NumberOfFields() - GetInObjectProperties();
2336 return external > limit;
2340 void Struct::InitializeBody(int object_size) {
2341 Object* value = GetHeap()->undefined_value();
2342 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2343 WRITE_FIELD(this, offset, value);
2348 bool Object::ToArrayLength(uint32_t* index) {
2350 int value = Smi::cast(this)->value();
2351 if (value < 0) return false;
2355 if (IsHeapNumber()) {
2356 double value = HeapNumber::cast(this)->value();
2357 uint32_t uint_value = static_cast<uint32_t>(value);
2358 if (value == static_cast<double>(uint_value)) {
2359 *index = uint_value;
2367 bool Object::ToArrayIndex(uint32_t* index) {
2368 return ToArrayLength(index) && *index != kMaxUInt32;
2372 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2373 if (!this->IsJSValue()) return false;
2375 JSValue* js_value = JSValue::cast(this);
2376 if (!js_value->value()->IsString()) return false;
2378 String* str = String::cast(js_value->value());
2379 if (index >= static_cast<uint32_t>(str->length())) return false;
2385 void Object::VerifyApiCallResultType() {
2387 if (!(IsSmi() || IsString() || IsSymbol() || IsSpecObject() ||
2388 IsHeapNumber() || IsSimd128Value() || IsUndefined() || IsTrue() ||
2389 IsFalse() || IsNull())) {
2390 FATAL("API call returned invalid object");
2396 Object* FixedArray::get(int index) const {
2397 SLOW_DCHECK(index >= 0 && index < this->length());
2398 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2402 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2403 return handle(array->get(index), array->GetIsolate());
2407 bool FixedArray::is_the_hole(int index) {
2408 return get(index) == GetHeap()->the_hole_value();
2412 void FixedArray::set(int index, Smi* value) {
2413 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2414 DCHECK(index >= 0 && index < this->length());
2415 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2416 int offset = kHeaderSize + index * kPointerSize;
2417 WRITE_FIELD(this, offset, value);
2421 void FixedArray::set(int index, Object* value) {
2422 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2423 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2424 DCHECK(index >= 0 && index < this->length());
2425 int offset = kHeaderSize + index * kPointerSize;
2426 WRITE_FIELD(this, offset, value);
2427 WRITE_BARRIER(GetHeap(), this, offset, value);
2431 double FixedDoubleArray::get_scalar(int index) {
2432 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2433 map() != GetHeap()->fixed_array_map());
2434 DCHECK(index >= 0 && index < this->length());
2435 DCHECK(!is_the_hole(index));
2436 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2440 uint64_t FixedDoubleArray::get_representation(int index) {
2441 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2442 map() != GetHeap()->fixed_array_map());
2443 DCHECK(index >= 0 && index < this->length());
2444 int offset = kHeaderSize + index * kDoubleSize;
2445 return READ_UINT64_FIELD(this, offset);
2449 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2451 if (array->is_the_hole(index)) {
2452 return array->GetIsolate()->factory()->the_hole_value();
2454 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2459 void FixedDoubleArray::set(int index, double value) {
2460 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2461 map() != GetHeap()->fixed_array_map());
2462 int offset = kHeaderSize + index * kDoubleSize;
2463 if (std::isnan(value)) {
2464 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2466 WRITE_DOUBLE_FIELD(this, offset, value);
2468 DCHECK(!is_the_hole(index));
2472 void FixedDoubleArray::set_the_hole(int index) {
2473 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2474 map() != GetHeap()->fixed_array_map());
2475 int offset = kHeaderSize + index * kDoubleSize;
2476 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2480 bool FixedDoubleArray::is_the_hole(int index) {
2481 return get_representation(index) == kHoleNanInt64;
2485 double* FixedDoubleArray::data_start() {
2486 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2490 void FixedDoubleArray::FillWithHoles(int from, int to) {
2491 for (int i = from; i < to; i++) {
2497 Object* WeakFixedArray::Get(int index) const {
2498 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2499 if (raw->IsSmi()) return raw;
2500 DCHECK(raw->IsWeakCell());
2501 return WeakCell::cast(raw)->value();
2505 bool WeakFixedArray::IsEmptySlot(int index) const {
2506 DCHECK(index < Length());
2507 return Get(index)->IsSmi();
2511 void WeakFixedArray::Clear(int index) {
2512 FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2516 int WeakFixedArray::Length() const {
2517 return FixedArray::cast(this)->length() - kFirstIndex;
2521 int WeakFixedArray::last_used_index() const {
2522 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2526 void WeakFixedArray::set_last_used_index(int index) {
2527 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2532 T* WeakFixedArray::Iterator::Next() {
2533 if (list_ != NULL) {
2534 // Assert that list did not change during iteration.
2535 DCHECK_EQ(last_used_index_, list_->last_used_index());
2536 while (index_ < list_->Length()) {
2537 Object* item = list_->Get(index_++);
2538 if (item != Empty()) return T::cast(item);
2546 int ArrayList::Length() {
2547 if (FixedArray::cast(this)->length() == 0) return 0;
2548 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2552 void ArrayList::SetLength(int length) {
2553 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2557 Object* ArrayList::Get(int index) {
2558 return FixedArray::cast(this)->get(kFirstIndex + index);
2562 Object** ArrayList::Slot(int index) {
2563 return data_start() + kFirstIndex + index;
2567 void ArrayList::Set(int index, Object* obj) {
2568 FixedArray::cast(this)->set(kFirstIndex + index, obj);
2572 void ArrayList::Clear(int index, Object* undefined) {
2573 DCHECK(undefined->IsUndefined());
2574 FixedArray::cast(this)
2575 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2579 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2580 const DisallowHeapAllocation& promise) {
2581 Heap* heap = GetHeap();
2582 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2583 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2584 return UPDATE_WRITE_BARRIER;
2588 AllocationAlignment HeapObject::RequiredAlignment() {
2589 #ifdef V8_HOST_ARCH_32_BIT
2590 if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2591 FixedArrayBase::cast(this)->length() != 0) {
2592 return kDoubleAligned;
2594 if (IsHeapNumber()) return kDoubleUnaligned;
2595 if (IsSimd128Value()) return kSimd128Unaligned;
2596 #endif // V8_HOST_ARCH_32_BIT
2597 return kWordAligned;
2601 void FixedArray::set(int index,
2603 WriteBarrierMode mode) {
2604 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2605 DCHECK(index >= 0 && index < this->length());
2606 int offset = kHeaderSize + index * kPointerSize;
2607 WRITE_FIELD(this, offset, value);
2608 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2612 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2615 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2616 DCHECK(index >= 0 && index < array->length());
2617 int offset = kHeaderSize + index * kPointerSize;
2618 WRITE_FIELD(array, offset, value);
2619 Heap* heap = array->GetHeap();
2620 if (heap->InNewSpace(value)) {
2621 heap->RecordWrite(array->address(), offset);
2626 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2629 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2630 DCHECK(index >= 0 && index < array->length());
2631 DCHECK(!array->GetHeap()->InNewSpace(value));
2632 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2636 void FixedArray::set_undefined(int index) {
2637 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2638 DCHECK(index >= 0 && index < this->length());
2639 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2641 kHeaderSize + index * kPointerSize,
2642 GetHeap()->undefined_value());
2646 void FixedArray::set_null(int index) {
2647 DCHECK(index >= 0 && index < this->length());
2648 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2650 kHeaderSize + index * kPointerSize,
2651 GetHeap()->null_value());
2655 void FixedArray::set_the_hole(int index) {
2656 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2657 DCHECK(index >= 0 && index < this->length());
2658 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2660 kHeaderSize + index * kPointerSize,
2661 GetHeap()->the_hole_value());
2665 void FixedArray::FillWithHoles(int from, int to) {
2666 for (int i = from; i < to; i++) {
2672 Object** FixedArray::data_start() {
2673 return HeapObject::RawField(this, kHeaderSize);
2677 Object** FixedArray::RawFieldOfElementAt(int index) {
2678 return HeapObject::RawField(this, OffsetOfElementAt(index));
2682 bool DescriptorArray::IsEmpty() {
2683 DCHECK(length() >= kFirstIndex ||
2684 this == GetHeap()->empty_descriptor_array());
2685 return length() < kFirstIndex;
2689 int DescriptorArray::number_of_descriptors() {
2690 DCHECK(length() >= kFirstIndex || IsEmpty());
2692 return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2696 int DescriptorArray::number_of_descriptors_storage() {
2698 return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
2702 int DescriptorArray::NumberOfSlackDescriptors() {
2703 return number_of_descriptors_storage() - number_of_descriptors();
2707 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2709 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2713 inline int DescriptorArray::number_of_entries() {
2714 return number_of_descriptors();
2718 bool DescriptorArray::HasEnumCache() {
2719 return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2723 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2724 set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2728 FixedArray* DescriptorArray::GetEnumCache() {
2729 DCHECK(HasEnumCache());
2730 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2731 return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2735 bool DescriptorArray::HasEnumIndicesCache() {
2736 if (IsEmpty()) return false;
2737 Object* object = get(kEnumCacheIndex);
2738 if (object->IsSmi()) return false;
2739 FixedArray* bridge = FixedArray::cast(object);
2740 return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2744 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2745 DCHECK(HasEnumIndicesCache());
2746 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2747 return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2751 Object** DescriptorArray::GetEnumCacheSlot() {
2752 DCHECK(HasEnumCache());
2753 return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2758 // Perform a binary search in a fixed array. Low and high are entry indices. If
2759 // there are three entries in this array it should be called with low=0 and
2761 template <SearchMode search_mode, typename T>
2762 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2763 int* out_insertion_index) {
2764 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2765 uint32_t hash = name->Hash();
2768 DCHECK(low <= high);
2770 while (low != high) {
2771 int mid = (low + high) / 2;
2772 Name* mid_name = array->GetSortedKey(mid);
2773 uint32_t mid_hash = mid_name->Hash();
2775 if (mid_hash >= hash) {
2782 for (; low <= limit; ++low) {
2783 int sort_index = array->GetSortedKeyIndex(low);
2784 Name* entry = array->GetKey(sort_index);
2785 uint32_t current_hash = entry->Hash();
2786 if (current_hash != hash) {
2787 if (out_insertion_index != NULL) {
2788 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2790 return T::kNotFound;
2792 if (entry->Equals(name)) {
2793 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2796 return T::kNotFound;
2800 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2801 return T::kNotFound;
2805 // Perform a linear search in this fixed array. len is the number of entry
2806 // indices that are valid.
2807 template <SearchMode search_mode, typename T>
2808 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2809 int* out_insertion_index) {
2810 uint32_t hash = name->Hash();
2811 if (search_mode == ALL_ENTRIES) {
2812 for (int number = 0; number < len; number++) {
2813 int sorted_index = array->GetSortedKeyIndex(number);
2814 Name* entry = array->GetKey(sorted_index);
2815 uint32_t current_hash = entry->Hash();
2816 if (current_hash > hash) {
2817 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2818 return T::kNotFound;
2820 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2822 if (out_insertion_index != NULL) *out_insertion_index = len;
2823 return T::kNotFound;
2825 DCHECK(len >= valid_entries);
2826 DCHECK_NULL(out_insertion_index); // Not supported here.
2827 for (int number = 0; number < valid_entries; number++) {
2828 Name* entry = array->GetKey(number);
2829 uint32_t current_hash = entry->Hash();
2830 if (current_hash == hash && entry->Equals(name)) return number;
2832 return T::kNotFound;
2837 template <SearchMode search_mode, typename T>
2838 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2839 if (search_mode == VALID_ENTRIES) {
2840 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2842 SLOW_DCHECK(array->IsSortedNoDuplicates());
2845 int nof = array->number_of_entries();
2847 if (out_insertion_index != NULL) *out_insertion_index = 0;
2848 return T::kNotFound;
2851 // Fast case: do linear search for small arrays.
2852 const int kMaxElementsForLinearSearch = 8;
2853 if ((search_mode == ALL_ENTRIES &&
2854 nof <= kMaxElementsForLinearSearch) ||
2855 (search_mode == VALID_ENTRIES &&
2856 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2857 return LinearSearch<search_mode>(array, name, nof, valid_entries,
2858 out_insertion_index);
2861 // Slow case: perform binary search.
2862 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2863 out_insertion_index);
2867 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2868 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2872 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2873 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2874 if (number_of_own_descriptors == 0) return kNotFound;
2876 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2877 int number = cache->Lookup(map, name);
2879 if (number == DescriptorLookupCache::kAbsent) {
2880 number = Search(name, number_of_own_descriptors);
2881 cache->Update(map, name, number);
2888 PropertyDetails Map::GetLastDescriptorDetails() {
2889 return instance_descriptors()->GetDetails(LastAdded());
2893 int Map::LastAdded() {
2894 int number_of_own_descriptors = NumberOfOwnDescriptors();
2895 DCHECK(number_of_own_descriptors > 0);
2896 return number_of_own_descriptors - 1;
2900 int Map::NumberOfOwnDescriptors() {
2901 return NumberOfOwnDescriptorsBits::decode(bit_field3());
2905 void Map::SetNumberOfOwnDescriptors(int number) {
2906 DCHECK(number <= instance_descriptors()->number_of_descriptors());
2907 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2911 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2914 void Map::SetEnumLength(int length) {
2915 if (length != kInvalidEnumCacheSentinel) {
2916 DCHECK(length >= 0);
2917 DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2918 DCHECK(length <= NumberOfOwnDescriptors());
2920 set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2924 FixedArrayBase* Map::GetInitialElements() {
2925 if (has_fast_smi_or_object_elements() ||
2926 has_fast_double_elements()) {
2927 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2928 return GetHeap()->empty_fixed_array();
2929 } else if (has_fixed_typed_array_elements()) {
2930 FixedTypedArrayBase* empty_array =
2931 GetHeap()->EmptyFixedTypedArrayForMap(this);
2932 DCHECK(!GetHeap()->InNewSpace(empty_array));
2941 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2942 DCHECK(descriptor_number < number_of_descriptors());
2943 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2947 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2948 return GetKeySlot(descriptor_number);
2952 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2953 return GetValueSlot(descriptor_number - 1) + 1;
2957 Name* DescriptorArray::GetKey(int descriptor_number) {
2958 DCHECK(descriptor_number < number_of_descriptors());
2959 return Name::cast(get(ToKeyIndex(descriptor_number)));
2963 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2964 return GetDetails(descriptor_number).pointer();
2968 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2969 return GetKey(GetSortedKeyIndex(descriptor_number));
2973 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2974 PropertyDetails details = GetDetails(descriptor_index);
2975 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2979 void DescriptorArray::SetRepresentation(int descriptor_index,
2980 Representation representation) {
2981 DCHECK(!representation.IsNone());
2982 PropertyDetails details = GetDetails(descriptor_index);
2983 set(ToDetailsIndex(descriptor_index),
2984 details.CopyWithRepresentation(representation).AsSmi());
2988 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2989 DCHECK(descriptor_number < number_of_descriptors());
2990 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2994 int DescriptorArray::GetValueOffset(int descriptor_number) {
2995 return OffsetOfElementAt(ToValueIndex(descriptor_number));
2999 Object* DescriptorArray::GetValue(int descriptor_number) {
3000 DCHECK(descriptor_number < number_of_descriptors());
3001 return get(ToValueIndex(descriptor_number));
3005 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3006 set(ToValueIndex(descriptor_index), value);
3010 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3011 DCHECK(descriptor_number < number_of_descriptors());
3012 Object* details = get(ToDetailsIndex(descriptor_number));
3013 return PropertyDetails(Smi::cast(details));
3017 PropertyType DescriptorArray::GetType(int descriptor_number) {
3018 return GetDetails(descriptor_number).type();
3022 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3023 DCHECK(GetDetails(descriptor_number).location() == kField);
3024 return GetDetails(descriptor_number).field_index();
3028 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3029 DCHECK(GetDetails(descriptor_number).location() == kField);
3030 Object* value = GetValue(descriptor_number);
3031 if (value->IsWeakCell()) {
3032 if (WeakCell::cast(value)->cleared()) return HeapType::None();
3033 value = WeakCell::cast(value)->value();
3035 return HeapType::cast(value);
3039 Object* DescriptorArray::GetConstant(int descriptor_number) {
3040 return GetValue(descriptor_number);
3044 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3045 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3046 return GetValue(descriptor_number);
3050 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3051 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3052 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3053 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3057 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3058 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3059 handle(GetValue(descriptor_number), GetIsolate()),
3060 GetDetails(descriptor_number));
3064 void DescriptorArray::Set(int descriptor_number,
3066 const WhitenessWitness&) {
3068 DCHECK(descriptor_number < number_of_descriptors());
3070 NoIncrementalWriteBarrierSet(this,
3071 ToKeyIndex(descriptor_number),
3073 NoIncrementalWriteBarrierSet(this,
3074 ToValueIndex(descriptor_number),
3076 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3077 desc->GetDetails().AsSmi());
3081 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3083 DCHECK(descriptor_number < number_of_descriptors());
3085 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3086 set(ToValueIndex(descriptor_number), *desc->GetValue());
3087 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3091 void DescriptorArray::Append(Descriptor* desc) {
3092 DisallowHeapAllocation no_gc;
3093 int descriptor_number = number_of_descriptors();
3094 SetNumberOfDescriptors(descriptor_number + 1);
3095 Set(descriptor_number, desc);
3097 uint32_t hash = desc->GetKey()->Hash();
3101 for (insertion = descriptor_number; insertion > 0; --insertion) {
3102 Name* key = GetSortedKey(insertion - 1);
3103 if (key->Hash() <= hash) break;
3104 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3107 SetSortedKey(insertion, descriptor_number);
3111 void DescriptorArray::SwapSortedKeys(int first, int second) {
3112 int first_key = GetSortedKeyIndex(first);
3113 SetSortedKey(first, GetSortedKeyIndex(second));
3114 SetSortedKey(second, first_key);
3118 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3119 : marking_(array->GetHeap()->incremental_marking()) {
3120 marking_->EnterNoMarkingScope();
3121 DCHECK(!marking_->IsMarking() ||
3122 Marking::Color(array) == Marking::WHITE_OBJECT);
3126 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3127 marking_->LeaveNoMarkingScope();
3131 PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
3134 Object* DescriptorArray::Entry::GetCallbackObject() {
3135 return descs_->GetValue(index_);
3139 int HashTableBase::NumberOfElements() {
3140 return Smi::cast(get(kNumberOfElementsIndex))->value();
3144 int HashTableBase::NumberOfDeletedElements() {
3145 return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3149 int HashTableBase::Capacity() {
3150 return Smi::cast(get(kCapacityIndex))->value();
3154 void HashTableBase::ElementAdded() {
3155 SetNumberOfElements(NumberOfElements() + 1);
3159 void HashTableBase::ElementRemoved() {
3160 SetNumberOfElements(NumberOfElements() - 1);
3161 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3165 void HashTableBase::ElementsRemoved(int n) {
3166 SetNumberOfElements(NumberOfElements() - n);
3167 SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3172 int HashTableBase::ComputeCapacity(int at_least_space_for) {
3173 const int kMinCapacity = 4;
3174 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3175 return Max(capacity, kMinCapacity);
3179 bool HashTableBase::IsKey(Object* k) {
3180 return !k->IsTheHole() && !k->IsUndefined();
3184 void HashTableBase::SetNumberOfElements(int nof) {
3185 set(kNumberOfElementsIndex, Smi::FromInt(nof));
3189 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3190 set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3194 template <typename Derived, typename Shape, typename Key>
3195 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3196 return FindEntry(GetIsolate(), key);
3200 template<typename Derived, typename Shape, typename Key>
3201 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3202 return FindEntry(isolate, key, HashTable::Hash(key));
3206 // Find entry for key otherwise return kNotFound.
3207 template <typename Derived, typename Shape, typename Key>
3208 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3210 uint32_t capacity = Capacity();
3211 uint32_t entry = FirstProbe(hash, capacity);
3213 // EnsureCapacity will guarantee the hash table is never full.
3215 Object* element = KeyAt(entry);
3216 // Empty entry. Uses raw unchecked accessors because it is called by the
3217 // string table during bootstrapping.
3218 if (element == isolate->heap()->root(Heap::kUndefinedValueRootIndex)) break;
3219 if (element != isolate->heap()->root(Heap::kTheHoleValueRootIndex) &&
3220 Shape::IsMatch(key, element)) return entry;
3221 entry = NextProbe(entry, count++, capacity);
3227 bool SeededNumberDictionary::requires_slow_elements() {
3228 Object* max_index_object = get(kMaxNumberKeyIndex);
3229 if (!max_index_object->IsSmi()) return false;
3231 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3235 uint32_t SeededNumberDictionary::max_number_key() {
3236 DCHECK(!requires_slow_elements());
3237 Object* max_index_object = get(kMaxNumberKeyIndex);
3238 if (!max_index_object->IsSmi()) return 0;
3239 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3240 return value >> kRequiresSlowElementsTagSize;
3244 void SeededNumberDictionary::set_requires_slow_elements() {
3245 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3249 // ------------------------------------
3253 CAST_ACCESSOR(AccessorInfo)
3254 CAST_ACCESSOR(ArrayList)
3255 CAST_ACCESSOR(Bool16x8)
3256 CAST_ACCESSOR(Bool32x4)
3257 CAST_ACCESSOR(Bool8x16)
3258 CAST_ACCESSOR(ByteArray)
3259 CAST_ACCESSOR(BytecodeArray)
3262 CAST_ACCESSOR(CodeCacheHashTable)
3263 CAST_ACCESSOR(CompilationCacheTable)
3264 CAST_ACCESSOR(ConsString)
3265 CAST_ACCESSOR(DeoptimizationInputData)
3266 CAST_ACCESSOR(DeoptimizationOutputData)
3267 CAST_ACCESSOR(DependentCode)
3268 CAST_ACCESSOR(DescriptorArray)
3269 CAST_ACCESSOR(ExternalOneByteString)
3270 CAST_ACCESSOR(ExternalString)
3271 CAST_ACCESSOR(ExternalTwoByteString)
3272 CAST_ACCESSOR(FixedArray)
3273 CAST_ACCESSOR(FixedArrayBase)
3274 CAST_ACCESSOR(FixedDoubleArray)
3275 CAST_ACCESSOR(FixedTypedArrayBase)
3276 CAST_ACCESSOR(Float32x4)
3277 CAST_ACCESSOR(Foreign)
3278 CAST_ACCESSOR(GlobalDictionary)
3279 CAST_ACCESSOR(GlobalObject)
3280 CAST_ACCESSOR(HandlerTable)
3281 CAST_ACCESSOR(HeapObject)
3282 CAST_ACCESSOR(Int16x8)
3283 CAST_ACCESSOR(Int32x4)
3284 CAST_ACCESSOR(Int8x16)
3285 CAST_ACCESSOR(JSArray)
3286 CAST_ACCESSOR(JSArrayBuffer)
3287 CAST_ACCESSOR(JSArrayBufferView)
3288 CAST_ACCESSOR(JSBuiltinsObject)
3289 CAST_ACCESSOR(JSDataView)
3290 CAST_ACCESSOR(JSDate)
3291 CAST_ACCESSOR(JSFunction)
3292 CAST_ACCESSOR(JSFunctionProxy)
3293 CAST_ACCESSOR(JSGeneratorObject)
3294 CAST_ACCESSOR(JSGlobalObject)
3295 CAST_ACCESSOR(JSGlobalProxy)
3296 CAST_ACCESSOR(JSMap)
3297 CAST_ACCESSOR(JSMapIterator)
3298 CAST_ACCESSOR(JSMessageObject)
3299 CAST_ACCESSOR(JSModule)
3300 CAST_ACCESSOR(JSObject)
3301 CAST_ACCESSOR(JSProxy)
3302 CAST_ACCESSOR(JSReceiver)
3303 CAST_ACCESSOR(JSRegExp)
3304 CAST_ACCESSOR(JSSet)
3305 CAST_ACCESSOR(JSSetIterator)
3306 CAST_ACCESSOR(JSTypedArray)
3307 CAST_ACCESSOR(JSValue)
3308 CAST_ACCESSOR(JSWeakMap)
3309 CAST_ACCESSOR(JSWeakSet)
3310 CAST_ACCESSOR(LayoutDescriptor)
3313 CAST_ACCESSOR(NameDictionary)
3314 CAST_ACCESSOR(NormalizedMapCache)
3315 CAST_ACCESSOR(Object)
3316 CAST_ACCESSOR(ObjectHashTable)
3317 CAST_ACCESSOR(Oddball)
3318 CAST_ACCESSOR(OrderedHashMap)
3319 CAST_ACCESSOR(OrderedHashSet)
3320 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3321 CAST_ACCESSOR(PropertyCell)
3322 CAST_ACCESSOR(ScopeInfo)
3323 CAST_ACCESSOR(SeededNumberDictionary)
3324 CAST_ACCESSOR(SeqOneByteString)
3325 CAST_ACCESSOR(SeqString)
3326 CAST_ACCESSOR(SeqTwoByteString)
3327 CAST_ACCESSOR(SharedFunctionInfo)
3328 CAST_ACCESSOR(Simd128Value)
3329 CAST_ACCESSOR(SlicedString)
3331 CAST_ACCESSOR(String)
3332 CAST_ACCESSOR(StringTable)
3333 CAST_ACCESSOR(Struct)
3334 CAST_ACCESSOR(Symbol)
3335 CAST_ACCESSOR(Uint16x8)
3336 CAST_ACCESSOR(Uint32x4)
3337 CAST_ACCESSOR(Uint8x16)
3338 CAST_ACCESSOR(UnseededNumberDictionary)
3339 CAST_ACCESSOR(WeakCell)
3340 CAST_ACCESSOR(WeakFixedArray)
3341 CAST_ACCESSOR(WeakHashTable)
3345 template <class Traits>
3346 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3347 FixedTypedArray<Traits>::kInstanceType;
3350 template <class Traits>
3351 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3352 SLOW_DCHECK(object->IsHeapObject() &&
3353 HeapObject::cast(object)->map()->instance_type() ==
3354 Traits::kInstanceType);
3355 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3359 template <class Traits>
3360 const FixedTypedArray<Traits>*
3361 FixedTypedArray<Traits>::cast(const Object* object) {
3362 SLOW_DCHECK(object->IsHeapObject() &&
3363 HeapObject::cast(object)->map()->instance_type() ==
3364 Traits::kInstanceType);
3365 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3369 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
3370 type* DeoptimizationInputData::name() { \
3371 return type::cast(get(k##name##Index)); \
3373 void DeoptimizationInputData::Set##name(type* value) { \
3374 set(k##name##Index, value); \
3377 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3378 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3379 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3380 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3381 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3382 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3383 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3384 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3386 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3389 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
3390 type* DeoptimizationInputData::name(int i) { \
3391 return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3393 void DeoptimizationInputData::Set##name(int i, type* value) { \
3394 set(IndexForEntry(i) + k##name##Offset, value); \
3397 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3398 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3399 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3400 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3402 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3405 BailoutId DeoptimizationInputData::AstId(int i) {
3406 return BailoutId(AstIdRaw(i)->value());
3410 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3411 SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3415 int DeoptimizationInputData::DeoptCount() {
3416 return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3420 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3423 BailoutId DeoptimizationOutputData::AstId(int index) {
3424 return BailoutId(Smi::cast(get(index * 2))->value());
3428 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3429 set(index * 2, Smi::FromInt(id.ToInt()));
3433 Smi* DeoptimizationOutputData::PcAndState(int index) {
3434 return Smi::cast(get(1 + index * 2));
3438 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3439 set(1 + index * 2, offset);
3443 void HandlerTable::SetRangeStart(int index, int value) {
3444 set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3448 void HandlerTable::SetRangeEnd(int index, int value) {
3449 set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3453 void HandlerTable::SetRangeHandler(int index, int offset,
3454 CatchPrediction prediction) {
3455 int value = HandlerOffsetField::encode(offset) |
3456 HandlerPredictionField::encode(prediction);
3457 set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3461 void HandlerTable::SetRangeDepth(int index, int value) {
3462 set(index * kRangeEntrySize + kRangeDepthIndex, Smi::FromInt(value));
3466 void HandlerTable::SetReturnOffset(int index, int value) {
3467 set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3471 void HandlerTable::SetReturnHandler(int index, int offset,
3472 CatchPrediction prediction) {
3473 int value = HandlerOffsetField::encode(offset) |
3474 HandlerPredictionField::encode(prediction);
3475 set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3479 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3480 STRUCT_LIST(MAKE_STRUCT_CAST)
3481 #undef MAKE_STRUCT_CAST
3484 template <typename Derived, typename Shape, typename Key>
3485 HashTable<Derived, Shape, Key>*
3486 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3487 SLOW_DCHECK(obj->IsHashTable());
3488 return reinterpret_cast<HashTable*>(obj);
3492 template <typename Derived, typename Shape, typename Key>
3493 const HashTable<Derived, Shape, Key>*
3494 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3495 SLOW_DCHECK(obj->IsHashTable());
3496 return reinterpret_cast<const HashTable*>(obj);
3500 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3501 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3503 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3504 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3506 SMI_ACCESSORS(String, length, kLengthOffset)
3507 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3510 int FreeSpace::Size() { return size(); }
3513 FreeSpace* FreeSpace::next() {
3514 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3515 (!GetHeap()->deserialization_complete() && map() == NULL));
3516 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3517 return reinterpret_cast<FreeSpace*>(
3518 Memory::Address_at(address() + kNextOffset));
3522 FreeSpace** FreeSpace::next_address() {
3523 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3524 (!GetHeap()->deserialization_complete() && map() == NULL));
3525 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3526 return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
3530 void FreeSpace::set_next(FreeSpace* next) {
3531 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3532 (!GetHeap()->deserialization_complete() && map() == NULL));
3533 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3534 base::NoBarrier_Store(
3535 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3536 reinterpret_cast<base::AtomicWord>(next));
3540 FreeSpace* FreeSpace::cast(HeapObject* o) {
3541 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3542 return reinterpret_cast<FreeSpace*>(o);
3546 uint32_t Name::hash_field() {
3547 return READ_UINT32_FIELD(this, kHashFieldOffset);
3551 void Name::set_hash_field(uint32_t value) {
3552 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3553 #if V8_HOST_ARCH_64_BIT
3554 #if V8_TARGET_LITTLE_ENDIAN
3555 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3557 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3563 bool Name::Equals(Name* other) {
3564 if (other == this) return true;
3565 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3566 this->IsSymbol() || other->IsSymbol()) {
3569 return String::cast(this)->SlowEquals(String::cast(other));
3573 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3574 if (one.is_identical_to(two)) return true;
3575 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3576 one->IsSymbol() || two->IsSymbol()) {
3579 return String::SlowEquals(Handle<String>::cast(one),
3580 Handle<String>::cast(two));
3584 ACCESSORS(Symbol, name, Object, kNameOffset)
3585 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3586 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3589 bool String::Equals(String* other) {
3590 if (other == this) return true;
3591 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3594 return SlowEquals(other);
3598 bool String::Equals(Handle<String> one, Handle<String> two) {
3599 if (one.is_identical_to(two)) return true;
3600 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3603 return SlowEquals(one, two);
3607 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3608 if (!string->IsConsString()) return string;
3609 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3610 if (cons->IsFlat()) return handle(cons->first());
3611 return SlowFlatten(cons, pretenure);
3615 Handle<Name> Name::Flatten(Handle<Name> name, PretenureFlag pretenure) {
3616 if (name->IsSymbol()) return name;
3617 return String::Flatten(Handle<String>::cast(name));
3621 uint16_t String::Get(int index) {
3622 DCHECK(index >= 0 && index < length());
3623 switch (StringShape(this).full_representation_tag()) {
3624 case kSeqStringTag | kOneByteStringTag:
3625 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3626 case kSeqStringTag | kTwoByteStringTag:
3627 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3628 case kConsStringTag | kOneByteStringTag:
3629 case kConsStringTag | kTwoByteStringTag:
3630 return ConsString::cast(this)->ConsStringGet(index);
3631 case kExternalStringTag | kOneByteStringTag:
3632 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3633 case kExternalStringTag | kTwoByteStringTag:
3634 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3635 case kSlicedStringTag | kOneByteStringTag:
3636 case kSlicedStringTag | kTwoByteStringTag:
3637 return SlicedString::cast(this)->SlicedStringGet(index);
3647 void String::Set(int index, uint16_t value) {
3648 DCHECK(index >= 0 && index < length());
3649 DCHECK(StringShape(this).IsSequential());
3651 return this->IsOneByteRepresentation()
3652 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3653 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3657 bool String::IsFlat() {
3658 if (!StringShape(this).IsCons()) return true;
3659 return ConsString::cast(this)->second()->length() == 0;
3663 String* String::GetUnderlying() {
3664 // Giving direct access to underlying string only makes sense if the
3665 // wrapping string is already flattened.
3666 DCHECK(this->IsFlat());
3667 DCHECK(StringShape(this).IsIndirect());
3668 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3669 const int kUnderlyingOffset = SlicedString::kParentOffset;
3670 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3674 template<class Visitor>
3675 ConsString* String::VisitFlat(Visitor* visitor,
3678 int slice_offset = offset;
3679 const int length = string->length();
3680 DCHECK(offset <= length);
3682 int32_t type = string->map()->instance_type();
3683 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3684 case kSeqStringTag | kOneByteStringTag:
3685 visitor->VisitOneByteString(
3686 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3690 case kSeqStringTag | kTwoByteStringTag:
3691 visitor->VisitTwoByteString(
3692 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3696 case kExternalStringTag | kOneByteStringTag:
3697 visitor->VisitOneByteString(
3698 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3702 case kExternalStringTag | kTwoByteStringTag:
3703 visitor->VisitTwoByteString(
3704 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3708 case kSlicedStringTag | kOneByteStringTag:
3709 case kSlicedStringTag | kTwoByteStringTag: {
3710 SlicedString* slicedString = SlicedString::cast(string);
3711 slice_offset += slicedString->offset();
3712 string = slicedString->parent();
3716 case kConsStringTag | kOneByteStringTag:
3717 case kConsStringTag | kTwoByteStringTag:
3718 return ConsString::cast(string);
3729 inline Vector<const uint8_t> String::GetCharVector() {
3730 String::FlatContent flat = GetFlatContent();
3731 DCHECK(flat.IsOneByte());
3732 return flat.ToOneByteVector();
3737 inline Vector<const uc16> String::GetCharVector() {
3738 String::FlatContent flat = GetFlatContent();
3739 DCHECK(flat.IsTwoByte());
3740 return flat.ToUC16Vector();
3744 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3745 DCHECK(index >= 0 && index < length());
3746 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3750 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3751 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3752 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3753 static_cast<byte>(value));
3757 Address SeqOneByteString::GetCharsAddress() {
3758 return FIELD_ADDR(this, kHeaderSize);
3762 uint8_t* SeqOneByteString::GetChars() {
3763 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3767 Address SeqTwoByteString::GetCharsAddress() {
3768 return FIELD_ADDR(this, kHeaderSize);
3772 uc16* SeqTwoByteString::GetChars() {
3773 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3777 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3778 DCHECK(index >= 0 && index < length());
3779 return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3783 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3784 DCHECK(index >= 0 && index < length());
3785 WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3789 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3790 return SizeFor(length());
3794 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3795 return SizeFor(length());
3799 String* SlicedString::parent() {
3800 return String::cast(READ_FIELD(this, kParentOffset));
3804 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3805 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3806 WRITE_FIELD(this, kParentOffset, parent);
3807 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3811 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3814 String* ConsString::first() {
3815 return String::cast(READ_FIELD(this, kFirstOffset));
3819 Object* ConsString::unchecked_first() {
3820 return READ_FIELD(this, kFirstOffset);
3824 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3825 WRITE_FIELD(this, kFirstOffset, value);
3826 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3830 String* ConsString::second() {
3831 return String::cast(READ_FIELD(this, kSecondOffset));
3835 Object* ConsString::unchecked_second() {
3836 return READ_FIELD(this, kSecondOffset);
3840 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3841 WRITE_FIELD(this, kSecondOffset, value);
3842 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3846 bool ExternalString::is_short() {
3847 InstanceType type = map()->instance_type();
3848 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3852 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3853 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3857 void ExternalOneByteString::update_data_cache() {
3858 if (is_short()) return;
3859 const char** data_field =
3860 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3861 *data_field = resource()->data();
3865 void ExternalOneByteString::set_resource(
3866 const ExternalOneByteString::Resource* resource) {
3867 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3868 *reinterpret_cast<const Resource**>(
3869 FIELD_ADDR(this, kResourceOffset)) = resource;
3870 if (resource != NULL) update_data_cache();
3874 const uint8_t* ExternalOneByteString::GetChars() {
3875 return reinterpret_cast<const uint8_t*>(resource()->data());
3879 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3880 DCHECK(index >= 0 && index < length());
3881 return GetChars()[index];
3885 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3886 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3890 void ExternalTwoByteString::update_data_cache() {
3891 if (is_short()) return;
3892 const uint16_t** data_field =
3893 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3894 *data_field = resource()->data();
3898 void ExternalTwoByteString::set_resource(
3899 const ExternalTwoByteString::Resource* resource) {
3900 *reinterpret_cast<const Resource**>(
3901 FIELD_ADDR(this, kResourceOffset)) = resource;
3902 if (resource != NULL) update_data_cache();
3906 const uint16_t* ExternalTwoByteString::GetChars() {
3907 return resource()->data();
3911 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3912 DCHECK(index >= 0 && index < length());
3913 return GetChars()[index];
3917 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3919 return GetChars() + start;
3923 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3926 void ConsStringIterator::PushLeft(ConsString* string) {
3927 frames_[depth_++ & kDepthMask] = string;
3931 void ConsStringIterator::PushRight(ConsString* string) {
3933 frames_[(depth_-1) & kDepthMask] = string;
3937 void ConsStringIterator::AdjustMaximumDepth() {
3938 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3942 void ConsStringIterator::Pop() {
3944 DCHECK(depth_ <= maximum_depth_);
3949 uint16_t StringCharacterStream::GetNext() {
3950 DCHECK(buffer8_ != NULL && end_ != NULL);
3951 // Advance cursor if needed.
3952 if (buffer8_ == end_) HasMore();
3953 DCHECK(buffer8_ < end_);
3954 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3958 StringCharacterStream::StringCharacterStream(String* string, int offset)
3959 : is_one_byte_(false) {
3960 Reset(string, offset);
3964 void StringCharacterStream::Reset(String* string, int offset) {
3967 ConsString* cons_string = String::VisitFlat(this, string, offset);
3968 iter_.Reset(cons_string, offset);
3969 if (cons_string != NULL) {
3970 string = iter_.Next(&offset);
3971 if (string != NULL) String::VisitFlat(this, string, offset);
3976 bool StringCharacterStream::HasMore() {
3977 if (buffer8_ != end_) return true;
3979 String* string = iter_.Next(&offset);
3980 DCHECK_EQ(offset, 0);
3981 if (string == NULL) return false;
3982 String::VisitFlat(this, string);
3983 DCHECK(buffer8_ != end_);
3988 void StringCharacterStream::VisitOneByteString(
3989 const uint8_t* chars, int length) {
3990 is_one_byte_ = true;
3992 end_ = chars + length;
3996 void StringCharacterStream::VisitTwoByteString(
3997 const uint16_t* chars, int length) {
3998 is_one_byte_ = false;
4000 end_ = reinterpret_cast<const uint8_t*>(chars + length);
4004 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
4007 byte ByteArray::get(int index) {
4008 DCHECK(index >= 0 && index < this->length());
4009 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4013 void ByteArray::set(int index, byte value) {
4014 DCHECK(index >= 0 && index < this->length());
4015 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4019 int ByteArray::get_int(int index) {
4020 DCHECK(index >= 0 && (index * kIntSize) < this->length());
4021 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
4025 ByteArray* ByteArray::FromDataStartAddress(Address address) {
4026 DCHECK_TAG_ALIGNED(address);
4027 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
4031 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
4034 Address ByteArray::GetDataStartAddress() {
4035 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4039 void BytecodeArray::BytecodeArrayIterateBody(ObjectVisitor* v) {
4040 IteratePointer(v, kConstantPoolOffset);
4044 byte BytecodeArray::get(int index) {
4045 DCHECK(index >= 0 && index < this->length());
4046 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4050 void BytecodeArray::set(int index, byte value) {
4051 DCHECK(index >= 0 && index < this->length());
4052 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4056 void BytecodeArray::set_frame_size(int frame_size) {
4057 DCHECK_GE(frame_size, 0);
4058 DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4059 WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4063 int BytecodeArray::frame_size() const {
4064 return READ_INT_FIELD(this, kFrameSizeOffset);
4068 void BytecodeArray::set_parameter_count(int number_of_parameters) {
4069 DCHECK_GE(number_of_parameters, 0);
4070 // Parameter count is stored as the size on stack of the parameters to allow
4071 // it to be used directly by generated code.
4072 WRITE_INT_FIELD(this, kParameterSizeOffset,
4073 (number_of_parameters << kPointerSizeLog2));
4077 int BytecodeArray::parameter_count() const {
4078 // Parameter count is stored as the size on stack of the parameters to allow
4079 // it to be used directly by generated code.
4080 return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
4084 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
4087 Address BytecodeArray::GetFirstBytecodeAddress() {
4088 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4092 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4095 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4098 void* FixedTypedArrayBase::external_pointer() const {
4099 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4100 return reinterpret_cast<void*>(ptr);
4104 void FixedTypedArrayBase::set_external_pointer(void* value,
4105 WriteBarrierMode mode) {
4106 intptr_t ptr = reinterpret_cast<intptr_t>(value);
4107 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4111 void* FixedTypedArrayBase::DataPtr() {
4112 return reinterpret_cast<void*>(
4113 reinterpret_cast<intptr_t>(base_pointer()) +
4114 reinterpret_cast<intptr_t>(external_pointer()));
4118 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4121 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4122 case FIXED_##TYPE##_ARRAY_TYPE: \
4123 element_size = size; \
4126 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4127 #undef TYPED_ARRAY_CASE
4132 return element_size;
4136 int FixedTypedArrayBase::DataSize(InstanceType type) {
4137 if (base_pointer() == Smi::FromInt(0)) return 0;
4138 return length() * ElementSize(type);
4142 int FixedTypedArrayBase::DataSize() {
4143 return DataSize(map()->instance_type());
4147 int FixedTypedArrayBase::size() {
4148 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4152 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4153 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4157 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4158 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4162 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4165 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4168 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4171 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4174 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4177 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4180 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4183 float Float32ArrayTraits::defaultValue() {
4184 return std::numeric_limits<float>::quiet_NaN();
4188 double Float64ArrayTraits::defaultValue() {
4189 return std::numeric_limits<double>::quiet_NaN();
4193 template <class Traits>
4194 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4195 DCHECK((index >= 0) && (index < this->length()));
4196 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4201 template <class Traits>
4202 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4203 DCHECK((index >= 0) && (index < this->length()));
4204 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4209 template <class Traits>
4210 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4211 return static_cast<ElementType>(value);
4216 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4217 if (value < 0) return 0;
4218 if (value > 0xFF) return 0xFF;
4219 return static_cast<uint8_t>(value);
4223 template <class Traits>
4224 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4226 return static_cast<ElementType>(DoubleToInt32(value));
4231 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4232 // Handle NaNs and less than zero values which clamp to zero.
4233 if (!(value > 0)) return 0;
4234 if (value > 0xFF) return 0xFF;
4235 return static_cast<uint8_t>(lrint(value));
4240 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4241 return static_cast<float>(value);
4246 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4251 template <class Traits>
4252 Handle<Object> FixedTypedArray<Traits>::get(
4253 Handle<FixedTypedArray<Traits> > array,
4255 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4259 template <class Traits>
4260 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4261 ElementType cast_value = Traits::defaultValue();
4262 if (value->IsSmi()) {
4263 int int_value = Smi::cast(value)->value();
4264 cast_value = from_int(int_value);
4265 } else if (value->IsHeapNumber()) {
4266 double double_value = HeapNumber::cast(value)->value();
4267 cast_value = from_double(double_value);
4269 // Clamp undefined to the default value. All other types have been
4270 // converted to a number type further up in the call chain.
4271 DCHECK(value->IsUndefined());
4273 set(index, cast_value);
4277 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4278 return handle(Smi::FromInt(scalar), isolate);
4282 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4284 return handle(Smi::FromInt(scalar), isolate);
4288 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4289 return handle(Smi::FromInt(scalar), isolate);
4293 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4294 return handle(Smi::FromInt(scalar), isolate);
4298 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4299 return handle(Smi::FromInt(scalar), isolate);
4303 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4304 return isolate->factory()->NewNumberFromUint(scalar);
4308 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4309 return isolate->factory()->NewNumberFromInt(scalar);
4313 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4314 return isolate->factory()->NewNumber(scalar);
4318 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4319 return isolate->factory()->NewNumber(scalar);
4323 int Map::visitor_id() {
4324 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4328 void Map::set_visitor_id(int id) {
4329 DCHECK(0 <= id && id < 256);
4330 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4334 int Map::instance_size() {
4335 return NOBARRIER_READ_BYTE_FIELD(
4336 this, kInstanceSizeOffset) << kPointerSizeLog2;
4340 int Map::inobject_properties_or_constructor_function_index() {
4341 return READ_BYTE_FIELD(this,
4342 kInObjectPropertiesOrConstructorFunctionIndexOffset);
4346 void Map::set_inobject_properties_or_constructor_function_index(int value) {
4347 DCHECK(0 <= value && value < 256);
4348 WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4349 static_cast<byte>(value));
4353 int Map::GetInObjectProperties() {
4354 DCHECK(IsJSObjectMap());
4355 return inobject_properties_or_constructor_function_index();
4359 void Map::SetInObjectProperties(int value) {
4360 DCHECK(IsJSObjectMap());
4361 set_inobject_properties_or_constructor_function_index(value);
4365 int Map::GetConstructorFunctionIndex() {
4366 DCHECK(IsPrimitiveMap());
4367 return inobject_properties_or_constructor_function_index();
4371 void Map::SetConstructorFunctionIndex(int value) {
4372 DCHECK(IsPrimitiveMap());
4373 set_inobject_properties_or_constructor_function_index(value);
4377 int Map::GetInObjectPropertyOffset(int index) {
4378 // Adjust for the number of properties stored in the object.
4379 index -= GetInObjectProperties();
4381 return instance_size() + (index * kPointerSize);
4385 Handle<Map> Map::CopyInstallDescriptorsForTesting(
4386 Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
4387 Handle<LayoutDescriptor> layout_descriptor) {
4388 return CopyInstallDescriptors(map, new_descriptor, descriptors,
4393 int HeapObject::SizeFromMap(Map* map) {
4394 int instance_size = map->instance_size();
4395 if (instance_size != kVariableSizeSentinel) return instance_size;
4396 // Only inline the most frequent cases.
4397 InstanceType instance_type = map->instance_type();
4398 if (instance_type == FIXED_ARRAY_TYPE) {
4399 return FixedArray::BodyDescriptor::SizeOf(map, this);
4401 if (instance_type == ONE_BYTE_STRING_TYPE ||
4402 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4403 // Strings may get concurrently truncated, hence we have to access its
4404 // length synchronized.
4405 return SeqOneByteString::SizeFor(
4406 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4408 if (instance_type == BYTE_ARRAY_TYPE) {
4409 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4411 if (instance_type == BYTECODE_ARRAY_TYPE) {
4412 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4414 if (instance_type == FREE_SPACE_TYPE) {
4415 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4417 if (instance_type == STRING_TYPE ||
4418 instance_type == INTERNALIZED_STRING_TYPE) {
4419 // Strings may get concurrently truncated, hence we have to access its
4420 // length synchronized.
4421 return SeqTwoByteString::SizeFor(
4422 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4424 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4425 return FixedDoubleArray::SizeFor(
4426 reinterpret_cast<FixedDoubleArray*>(this)->length());
4428 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4429 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4430 return reinterpret_cast<FixedTypedArrayBase*>(
4431 this)->TypedArraySize(instance_type);
4433 DCHECK(instance_type == CODE_TYPE);
4434 return reinterpret_cast<Code*>(this)->CodeSize();
4438 void Map::set_instance_size(int value) {
4439 DCHECK_EQ(0, value & (kPointerSize - 1));
4440 value >>= kPointerSizeLog2;
4441 DCHECK(0 <= value && value < 256);
4442 NOBARRIER_WRITE_BYTE_FIELD(
4443 this, kInstanceSizeOffset, static_cast<byte>(value));
4447 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4450 InstanceType Map::instance_type() {
4451 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4455 void Map::set_instance_type(InstanceType value) {
4456 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4460 int Map::unused_property_fields() {
4461 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4465 void Map::set_unused_property_fields(int value) {
4466 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4470 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4473 void Map::set_bit_field(byte value) {
4474 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4478 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4481 void Map::set_bit_field2(byte value) {
4482 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4486 void Map::set_non_instance_prototype(bool value) {
4488 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4490 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4495 bool Map::has_non_instance_prototype() {
4496 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4500 void Map::set_function_with_prototype(bool value) {
4501 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4505 bool Map::function_with_prototype() {
4506 return FunctionWithPrototype::decode(bit_field());
4510 void Map::set_is_hidden_prototype() {
4511 set_bit_field(bit_field() | (1 << kIsHiddenPrototype));
4515 bool Map::is_hidden_prototype() {
4516 return ((1 << kIsHiddenPrototype) & bit_field()) != 0;
4520 void Map::set_has_indexed_interceptor() {
4521 set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4525 bool Map::has_indexed_interceptor() {
4526 return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4530 void Map::set_is_undetectable() {
4531 set_bit_field(bit_field() | (1 << kIsUndetectable));
4535 bool Map::is_undetectable() {
4536 return ((1 << kIsUndetectable) & bit_field()) != 0;
4540 void Map::set_is_observed() { set_bit_field(bit_field() | (1 << kIsObserved)); }
4542 bool Map::is_observed() { return ((1 << kIsObserved) & bit_field()) != 0; }
4545 void Map::set_has_named_interceptor() {
4546 set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4550 bool Map::has_named_interceptor() {
4551 return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4555 void Map::set_is_access_check_needed(bool access_check_needed) {
4556 if (access_check_needed) {
4557 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4559 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4564 bool Map::is_access_check_needed() {
4565 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4569 void Map::set_is_extensible(bool value) {
4571 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4573 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4577 bool Map::is_extensible() {
4578 return ((1 << kIsExtensible) & bit_field2()) != 0;
4582 void Map::set_is_prototype_map(bool value) {
4583 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4586 bool Map::is_prototype_map() const {
4587 return IsPrototypeMapBits::decode(bit_field2());
4591 void Map::set_elements_kind(ElementsKind elements_kind) {
4592 DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4593 DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4594 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4595 DCHECK(this->elements_kind() == elements_kind);
4599 ElementsKind Map::elements_kind() {
4600 return Map::ElementsKindBits::decode(bit_field2());
4604 bool Map::has_fast_smi_elements() {
4605 return IsFastSmiElementsKind(elements_kind());
4608 bool Map::has_fast_object_elements() {
4609 return IsFastObjectElementsKind(elements_kind());
4612 bool Map::has_fast_smi_or_object_elements() {
4613 return IsFastSmiOrObjectElementsKind(elements_kind());
4616 bool Map::has_fast_double_elements() {
4617 return IsFastDoubleElementsKind(elements_kind());
4620 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4622 bool Map::has_sloppy_arguments_elements() {
4623 return IsSloppyArgumentsElements(elements_kind());
4626 bool Map::has_fixed_typed_array_elements() {
4627 return IsFixedTypedArrayElementsKind(elements_kind());
4630 bool Map::has_dictionary_elements() {
4631 return IsDictionaryElementsKind(elements_kind());
4635 void Map::set_dictionary_map(bool value) {
4636 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4637 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4638 set_bit_field3(new_bit_field3);
4642 bool Map::is_dictionary_map() {
4643 return DictionaryMap::decode(bit_field3());
4647 Code::Flags Code::flags() {
4648 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4652 void Map::set_owns_descriptors(bool owns_descriptors) {
4653 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4657 bool Map::owns_descriptors() {
4658 return OwnsDescriptors::decode(bit_field3());
4662 void Map::set_has_instance_call_handler() {
4663 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4667 bool Map::has_instance_call_handler() {
4668 return HasInstanceCallHandler::decode(bit_field3());
4672 void Map::deprecate() {
4673 set_bit_field3(Deprecated::update(bit_field3(), true));
4677 bool Map::is_deprecated() {
4678 return Deprecated::decode(bit_field3());
4682 void Map::set_migration_target(bool value) {
4683 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4687 bool Map::is_migration_target() {
4688 return IsMigrationTarget::decode(bit_field3());
4692 void Map::set_is_strong() {
4693 set_bit_field3(IsStrong::update(bit_field3(), true));
4697 bool Map::is_strong() {
4698 return IsStrong::decode(bit_field3());
4702 void Map::set_counter(int value) {
4703 set_bit_field3(Counter::update(bit_field3(), value));
4707 int Map::counter() { return Counter::decode(bit_field3()); }
4710 void Map::mark_unstable() {
4711 set_bit_field3(IsUnstable::update(bit_field3(), true));
4715 bool Map::is_stable() {
4716 return !IsUnstable::decode(bit_field3());
4720 bool Map::has_code_cache() {
4721 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4725 bool Map::CanBeDeprecated() {
4726 int descriptor = LastAdded();
4727 for (int i = 0; i <= descriptor; i++) {
4728 PropertyDetails details = instance_descriptors()->GetDetails(i);
4729 if (details.representation().IsNone()) return true;
4730 if (details.representation().IsSmi()) return true;
4731 if (details.representation().IsDouble()) return true;
4732 if (details.representation().IsHeapObject()) return true;
4733 if (details.type() == DATA_CONSTANT) return true;
4739 void Map::NotifyLeafMapLayoutChange() {
4742 dependent_code()->DeoptimizeDependentCodeGroup(
4744 DependentCode::kPrototypeCheckGroup);
4749 bool Map::CanTransition() {
4750 // Only JSObject and subtypes have map transitions and back pointers.
4751 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4752 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4756 bool Map::IsPrimitiveMap() {
4757 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4758 return instance_type() <= LAST_PRIMITIVE_TYPE;
4760 bool Map::IsJSObjectMap() {
4761 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4762 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4764 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
4765 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
4766 bool Map::IsJSProxyMap() {
4767 InstanceType type = instance_type();
4768 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
4770 bool Map::IsJSGlobalProxyMap() {
4771 return instance_type() == JS_GLOBAL_PROXY_TYPE;
4773 bool Map::IsJSGlobalObjectMap() {
4774 return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4776 bool Map::IsGlobalObjectMap() {
4777 const InstanceType type = instance_type();
4778 return type == JS_GLOBAL_OBJECT_TYPE || type == JS_BUILTINS_OBJECT_TYPE;
4782 bool Map::CanOmitMapChecks() {
4783 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4787 int DependentCode::number_of_entries(DependencyGroup group) {
4788 if (length() == 0) return 0;
4789 return Smi::cast(get(group))->value();
4793 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4794 set(group, Smi::FromInt(value));
4798 void DependentCode::set_object_at(int i, Object* object) {
4799 set(kCodesStartIndex + i, object);
4803 Object* DependentCode::object_at(int i) {
4804 return get(kCodesStartIndex + i);
4808 void DependentCode::clear_at(int i) {
4809 set_undefined(kCodesStartIndex + i);
4813 void DependentCode::copy(int from, int to) {
4814 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4818 void DependentCode::ExtendGroup(DependencyGroup group) {
4819 GroupStartIndexes starts(this);
4820 for (int g = kGroupCount - 1; g > group; g--) {
4821 if (starts.at(g) < starts.at(g + 1)) {
4822 copy(starts.at(g), starts.at(g + 1));
4828 void Code::set_flags(Code::Flags flags) {
4829 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4830 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4834 Code::Kind Code::kind() {
4835 return ExtractKindFromFlags(flags());
4839 bool Code::IsCodeStubOrIC() {
4840 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4841 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4842 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4843 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4844 kind() == TO_BOOLEAN_IC;
4848 bool Code::IsJavaScriptCode() {
4849 if (kind() == FUNCTION || kind() == OPTIMIZED_FUNCTION) {
4852 Handle<Code> interpreter_entry =
4853 GetIsolate()->builtins()->InterpreterEntryTrampoline();
4854 return interpreter_entry.location() != nullptr && *interpreter_entry == this;
4858 InlineCacheState Code::ic_state() {
4859 InlineCacheState result = ExtractICStateFromFlags(flags());
4860 // Only allow uninitialized or debugger states for non-IC code
4861 // objects. This is used in the debugger to determine whether or not
4862 // a call to code object has been replaced with a debug break call.
4863 DCHECK(is_inline_cache_stub() ||
4864 result == UNINITIALIZED ||
4865 result == DEBUG_STUB);
4870 ExtraICState Code::extra_ic_state() {
4871 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4872 return ExtractExtraICStateFromFlags(flags());
4876 Code::StubType Code::type() {
4877 return ExtractTypeFromFlags(flags());
4881 // For initialization.
4882 void Code::set_raw_kind_specific_flags1(int value) {
4883 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4887 void Code::set_raw_kind_specific_flags2(int value) {
4888 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4892 inline bool Code::is_crankshafted() {
4893 return IsCrankshaftedField::decode(
4894 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4898 inline bool Code::is_hydrogen_stub() {
4899 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4903 inline void Code::set_is_crankshafted(bool value) {
4904 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4905 int updated = IsCrankshaftedField::update(previous, value);
4906 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4910 inline bool Code::is_turbofanned() {
4911 return IsTurbofannedField::decode(
4912 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4916 inline void Code::set_is_turbofanned(bool value) {
4917 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4918 int updated = IsTurbofannedField::update(previous, value);
4919 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4923 inline bool Code::can_have_weak_objects() {
4924 DCHECK(kind() == OPTIMIZED_FUNCTION);
4925 return CanHaveWeakObjectsField::decode(
4926 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4930 inline void Code::set_can_have_weak_objects(bool value) {
4931 DCHECK(kind() == OPTIMIZED_FUNCTION);
4932 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4933 int updated = CanHaveWeakObjectsField::update(previous, value);
4934 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4938 bool Code::has_deoptimization_support() {
4939 DCHECK_EQ(FUNCTION, kind());
4940 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4941 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4945 void Code::set_has_deoptimization_support(bool value) {
4946 DCHECK_EQ(FUNCTION, kind());
4947 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4948 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4949 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4953 bool Code::has_debug_break_slots() {
4954 DCHECK_EQ(FUNCTION, kind());
4955 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4956 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4960 void Code::set_has_debug_break_slots(bool value) {
4961 DCHECK_EQ(FUNCTION, kind());
4962 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4963 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4964 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4968 bool Code::has_reloc_info_for_serialization() {
4969 DCHECK_EQ(FUNCTION, kind());
4970 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4971 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4975 void Code::set_has_reloc_info_for_serialization(bool value) {
4976 DCHECK_EQ(FUNCTION, kind());
4977 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4978 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4979 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4983 int Code::allow_osr_at_loop_nesting_level() {
4984 DCHECK_EQ(FUNCTION, kind());
4985 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4986 return AllowOSRAtLoopNestingLevelField::decode(fields);
4990 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4991 DCHECK_EQ(FUNCTION, kind());
4992 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4993 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4994 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4995 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4999 int Code::profiler_ticks() {
5000 DCHECK_EQ(FUNCTION, kind());
5001 return ProfilerTicksField::decode(
5002 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5006 void Code::set_profiler_ticks(int ticks) {
5007 if (kind() == FUNCTION) {
5008 unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5009 unsigned updated = ProfilerTicksField::update(previous, ticks);
5010 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5015 int Code::builtin_index() {
5016 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
5020 void Code::set_builtin_index(int index) {
5021 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
5025 unsigned Code::stack_slots() {
5026 DCHECK(is_crankshafted());
5027 return StackSlotsField::decode(
5028 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5032 void Code::set_stack_slots(unsigned slots) {
5033 CHECK(slots <= (1 << kStackSlotsBitCount));
5034 DCHECK(is_crankshafted());
5035 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5036 int updated = StackSlotsField::update(previous, slots);
5037 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5041 unsigned Code::safepoint_table_offset() {
5042 DCHECK(is_crankshafted());
5043 return SafepointTableOffsetField::decode(
5044 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5048 void Code::set_safepoint_table_offset(unsigned offset) {
5049 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5050 DCHECK(is_crankshafted());
5051 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5052 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5053 int updated = SafepointTableOffsetField::update(previous, offset);
5054 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5058 unsigned Code::back_edge_table_offset() {
5059 DCHECK_EQ(FUNCTION, kind());
5060 return BackEdgeTableOffsetField::decode(
5061 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5065 void Code::set_back_edge_table_offset(unsigned offset) {
5066 DCHECK_EQ(FUNCTION, kind());
5067 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5068 offset = offset >> kPointerSizeLog2;
5069 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5070 int updated = BackEdgeTableOffsetField::update(previous, offset);
5071 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5075 bool Code::back_edges_patched_for_osr() {
5076 DCHECK_EQ(FUNCTION, kind());
5077 return allow_osr_at_loop_nesting_level() > 0;
5081 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5084 bool Code::has_function_cache() {
5085 DCHECK(kind() == STUB);
5086 return HasFunctionCacheField::decode(
5087 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5091 void Code::set_has_function_cache(bool flag) {
5092 DCHECK(kind() == STUB);
5093 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5094 int updated = HasFunctionCacheField::update(previous, flag);
5095 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5099 bool Code::marked_for_deoptimization() {
5100 DCHECK(kind() == OPTIMIZED_FUNCTION);
5101 return MarkedForDeoptimizationField::decode(
5102 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5106 void Code::set_marked_for_deoptimization(bool flag) {
5107 DCHECK(kind() == OPTIMIZED_FUNCTION);
5108 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5109 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5110 int updated = MarkedForDeoptimizationField::update(previous, flag);
5111 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5115 bool Code::is_inline_cache_stub() {
5116 Kind kind = this->kind();
5118 #define CASE(name) case name: return true;
5121 default: return false;
5126 bool Code::is_keyed_stub() {
5127 return is_keyed_load_stub() || is_keyed_store_stub();
5131 bool Code::is_debug_stub() { return ic_state() == DEBUG_STUB; }
5132 bool Code::is_handler() { return kind() == HANDLER; }
5133 bool Code::is_load_stub() { return kind() == LOAD_IC; }
5134 bool Code::is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
5135 bool Code::is_store_stub() { return kind() == STORE_IC; }
5136 bool Code::is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
5137 bool Code::is_call_stub() { return kind() == CALL_IC; }
5138 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
5139 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
5140 bool Code::is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
5141 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
5142 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
5145 bool Code::embeds_maps_weakly() {
5147 return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
5148 k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
5149 ic_state() == MONOMORPHIC;
5153 Address Code::constant_pool() {
5154 Address constant_pool = NULL;
5155 if (FLAG_enable_embedded_constant_pool) {
5156 int offset = constant_pool_offset();
5157 if (offset < instruction_size()) {
5158 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5161 return constant_pool;
5165 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5166 ExtraICState extra_ic_state, StubType type,
5167 CacheHolderFlag holder) {
5168 // Compute the bit mask.
5169 unsigned int bits = KindField::encode(kind)
5170 | ICStateField::encode(ic_state)
5171 | TypeField::encode(type)
5172 | ExtraICStateField::encode(extra_ic_state)
5173 | CacheHolderField::encode(holder);
5174 return static_cast<Flags>(bits);
5178 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5179 ExtraICState extra_ic_state,
5180 CacheHolderFlag holder,
5182 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5186 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5187 CacheHolderFlag holder) {
5188 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5192 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5193 return KindField::decode(flags);
5197 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5198 return ICStateField::decode(flags);
5202 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5203 return ExtraICStateField::decode(flags);
5207 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5208 return TypeField::decode(flags);
5212 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5213 return CacheHolderField::decode(flags);
5217 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5218 int bits = flags & ~TypeField::kMask;
5219 return static_cast<Flags>(bits);
5223 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5224 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5225 return static_cast<Flags>(bits);
5229 Code* Code::GetCodeFromTargetAddress(Address address) {
5230 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5231 // GetCodeFromTargetAddress might be called when marking objects during mark
5232 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5233 // Code::cast. Code::cast does not work when the object's map is
5235 Code* result = reinterpret_cast<Code*>(code);
5240 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5242 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5246 bool Code::CanContainWeakObjects() {
5247 // is_turbofanned() implies !can_have_weak_objects().
5248 DCHECK(!is_optimized_code() || !is_turbofanned() || !can_have_weak_objects());
5249 return is_optimized_code() && can_have_weak_objects();
5253 bool Code::IsWeakObject(Object* object) {
5254 return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5258 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5259 if (object->IsMap()) {
5260 return Map::cast(object)->CanTransition() &&
5261 FLAG_weak_embedded_maps_in_optimized_code;
5263 if (object->IsCell()) {
5264 object = Cell::cast(object)->value();
5265 } else if (object->IsPropertyCell()) {
5266 object = PropertyCell::cast(object)->value();
5268 if (object->IsJSObject() || object->IsJSProxy()) {
5269 // JSProxy is handled like JSObject because it can morph into one.
5270 return FLAG_weak_embedded_objects_in_optimized_code;
5272 if (object->IsFixedArray()) {
5273 // Contexts of inlined functions are embedded in optimized code.
5274 Map* map = HeapObject::cast(object)->map();
5275 Heap* heap = map->GetHeap();
5276 return FLAG_weak_embedded_objects_in_optimized_code &&
5277 map == heap->function_context_map();
5283 class Code::FindAndReplacePattern {
5285 FindAndReplacePattern() : count_(0) { }
5286 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5287 DCHECK(count_ < kMaxCount);
5288 find_[count_] = map_to_find;
5289 replace_[count_] = obj_to_replace;
5293 static const int kMaxCount = 4;
5295 Handle<Map> find_[kMaxCount];
5296 Handle<Object> replace_[kMaxCount];
5301 Object* Map::prototype() const {
5302 return READ_FIELD(this, kPrototypeOffset);
5306 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5307 DCHECK(value->IsNull() || value->IsJSReceiver());
5308 WRITE_FIELD(this, kPrototypeOffset, value);
5309 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5313 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5314 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5315 return LayoutDescriptor::cast_gc_safe(layout_desc);
5319 bool Map::HasFastPointerLayout() const {
5320 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5321 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5325 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5326 LayoutDescriptor* layout_desc) {
5327 set_instance_descriptors(descriptors);
5328 if (FLAG_unbox_double_fields) {
5329 if (layout_descriptor()->IsSlowLayout()) {
5330 set_layout_descriptor(layout_desc);
5333 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5334 if (FLAG_verify_heap) {
5335 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5336 CHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5339 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5340 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5346 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5347 LayoutDescriptor* layout_desc) {
5348 int len = descriptors->number_of_descriptors();
5349 set_instance_descriptors(descriptors);
5350 SetNumberOfOwnDescriptors(len);
5352 if (FLAG_unbox_double_fields) {
5353 set_layout_descriptor(layout_desc);
5355 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5356 if (FLAG_verify_heap) {
5357 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5360 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5362 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5367 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5368 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5371 void Map::set_bit_field3(uint32_t bits) {
5372 if (kInt32Size != kPointerSize) {
5373 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5375 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5379 uint32_t Map::bit_field3() const {
5380 return READ_UINT32_FIELD(this, kBitField3Offset);
5384 LayoutDescriptor* Map::GetLayoutDescriptor() {
5385 return FLAG_unbox_double_fields ? layout_descriptor()
5386 : LayoutDescriptor::FastPointerLayout();
5390 void Map::AppendDescriptor(Descriptor* desc) {
5391 DescriptorArray* descriptors = instance_descriptors();
5392 int number_of_own_descriptors = NumberOfOwnDescriptors();
5393 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5394 descriptors->Append(desc);
5395 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5397 // This function does not support appending double field descriptors and
5398 // it should never try to (otherwise, layout descriptor must be updated too).
5400 PropertyDetails details = desc->GetDetails();
5401 CHECK(details.type() != DATA || !details.representation().IsDouble());
5406 Object* Map::GetBackPointer() {
5407 Object* object = constructor_or_backpointer();
5408 if (object->IsMap()) {
5411 return GetIsolate()->heap()->undefined_value();
5415 Map* Map::ElementsTransitionMap() {
5416 return TransitionArray::SearchSpecial(
5417 this, GetHeap()->elements_transition_symbol());
5421 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5424 Object* Map::prototype_info() const {
5425 DCHECK(is_prototype_map());
5426 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5430 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5431 DCHECK(is_prototype_map());
5432 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5433 CONDITIONAL_WRITE_BARRIER(
5434 GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5438 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5439 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5440 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5441 (value->IsMap() && GetBackPointer()->IsUndefined()));
5442 DCHECK(!value->IsMap() ||
5443 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5444 set_constructor_or_backpointer(value, mode);
5448 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5449 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5450 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5451 ACCESSORS(Map, constructor_or_backpointer, Object,
5452 kConstructorOrBackPointerOffset)
5455 Object* Map::GetConstructor() const {
5456 Object* maybe_constructor = constructor_or_backpointer();
5457 // Follow any back pointers.
5458 while (maybe_constructor->IsMap()) {
5460 Map::cast(maybe_constructor)->constructor_or_backpointer();
5462 return maybe_constructor;
5466 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5467 // Never overwrite a back pointer with a constructor.
5468 DCHECK(!constructor_or_backpointer()->IsMap());
5469 set_constructor_or_backpointer(constructor, mode);
5473 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5474 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5475 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5477 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5478 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5479 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5481 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5482 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5484 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5485 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5486 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5487 kExpectedReceiverTypeOffset)
5489 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5490 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5491 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5493 ACCESSORS(Box, value, Object, kValueOffset)
5495 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5496 SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5497 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5498 ACCESSORS(PrototypeInfo, constructor_name, Object, kConstructorNameOffset)
5500 ACCESSORS(SloppyBlockWithEvalContextExtension, scope_info, ScopeInfo,
5502 ACCESSORS(SloppyBlockWithEvalContextExtension, extension, JSObject,
5505 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5506 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5508 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5509 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5510 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5512 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5513 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5514 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5515 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5516 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5517 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5518 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5519 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5520 kCanInterceptSymbolsBit)
5521 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5522 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5524 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5525 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5527 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5528 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5529 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5530 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5532 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5533 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5534 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5535 kPrototypeTemplateOffset)
5536 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5537 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5538 kNamedPropertyHandlerOffset)
5539 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5540 kIndexedPropertyHandlerOffset)
5541 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5542 kInstanceTemplateOffset)
5543 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5544 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5545 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5546 kInstanceCallHandlerOffset)
5547 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5548 kAccessCheckInfoOffset)
5549 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5551 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5552 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5553 kInternalFieldCountOffset)
5555 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5557 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5558 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5559 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5560 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5561 kPretenureCreateCountOffset)
5562 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5563 kDependentCodeOffset)
5564 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5565 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5567 ACCESSORS(Script, source, Object, kSourceOffset)
5568 ACCESSORS(Script, name, Object, kNameOffset)
5569 ACCESSORS(Script, id, Smi, kIdOffset)
5570 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5571 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5572 ACCESSORS(Script, context_data, Object, kContextOffset)
5573 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5574 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5575 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5576 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5577 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5578 kEvalFrominstructionsOffsetOffset)
5579 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5580 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5581 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5582 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5584 Script::CompilationType Script::compilation_type() {
5585 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5586 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5588 void Script::set_compilation_type(CompilationType type) {
5589 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5590 type == COMPILATION_TYPE_EVAL));
5592 bool Script::hide_source() { return BooleanBit::get(flags(), kHideSourceBit); }
5593 void Script::set_hide_source(bool value) {
5594 set_flags(BooleanBit::set(flags(), kHideSourceBit, value));
5596 Script::CompilationState Script::compilation_state() {
5597 return BooleanBit::get(flags(), kCompilationStateBit) ?
5598 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5600 void Script::set_compilation_state(CompilationState state) {
5601 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5602 state == COMPILATION_STATE_COMPILED));
5604 ScriptOriginOptions Script::origin_options() {
5605 return ScriptOriginOptions((flags()->value() & kOriginOptionsMask) >>
5606 kOriginOptionsShift);
5608 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5609 DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5610 set_flags(Smi::FromInt((flags()->value() & ~kOriginOptionsMask) |
5611 (origin_options.Flags() << kOriginOptionsShift)));
5615 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5616 ACCESSORS(DebugInfo, code, Code, kCodeIndex)
5617 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5619 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5620 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5621 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5622 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5624 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5625 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5626 kOptimizedCodeMapOffset)
5627 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5628 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5629 kFeedbackVectorOffset)
5631 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5633 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5634 kInstanceClassNameOffset)
5635 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5636 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5637 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5638 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5641 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5642 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5643 kHiddenPrototypeBit)
5644 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5645 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5646 kNeedsAccessCheckBit)
5647 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5648 kReadOnlyPrototypeBit)
5649 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5650 kRemovePrototypeBit)
5651 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5653 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5654 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5656 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5658 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5661 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5662 kAllowLazyCompilation)
5663 BOOL_ACCESSORS(SharedFunctionInfo,
5665 allows_lazy_compilation_without_context,
5666 kAllowLazyCompilationWithoutContext)
5667 BOOL_ACCESSORS(SharedFunctionInfo,
5671 BOOL_ACCESSORS(SharedFunctionInfo,
5673 has_duplicate_parameters,
5674 kHasDuplicateParameters)
5675 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5676 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5677 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
5681 #if V8_HOST_ARCH_32_BIT
5682 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5683 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5684 kFormalParameterCountOffset)
5685 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5686 kExpectedNofPropertiesOffset)
5687 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5688 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5689 kStartPositionAndTypeOffset)
5690 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5691 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5692 kFunctionTokenPositionOffset)
5693 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5694 kCompilerHintsOffset)
5695 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5696 kOptCountAndBailoutReasonOffset)
5697 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5698 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5699 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5703 #if V8_TARGET_LITTLE_ENDIAN
5704 #define PSEUDO_SMI_LO_ALIGN 0
5705 #define PSEUDO_SMI_HI_ALIGN kIntSize
5707 #define PSEUDO_SMI_LO_ALIGN kIntSize
5708 #define PSEUDO_SMI_HI_ALIGN 0
5711 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5712 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5713 int holder::name() const { \
5714 int value = READ_INT_FIELD(this, offset); \
5715 DCHECK(kHeapObjectTag == 1); \
5716 DCHECK((value & kHeapObjectTag) == 0); \
5717 return value >> 1; \
5719 void holder::set_##name(int value) { \
5720 DCHECK(kHeapObjectTag == 1); \
5721 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5722 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5725 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5726 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5727 INT_ACCESSORS(holder, name, offset)
5730 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5731 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5732 kFormalParameterCountOffset)
5734 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5735 expected_nof_properties,
5736 kExpectedNofPropertiesOffset)
5737 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5739 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5740 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5741 start_position_and_type,
5742 kStartPositionAndTypeOffset)
5744 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5745 function_token_position,
5746 kFunctionTokenPositionOffset)
5747 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5749 kCompilerHintsOffset)
5751 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5752 opt_count_and_bailout_reason,
5753 kOptCountAndBailoutReasonOffset)
5754 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5756 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5758 kAstNodeCountOffset)
5759 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5761 kProfilerTicksOffset)
5766 BOOL_GETTER(SharedFunctionInfo,
5768 optimization_disabled,
5769 kOptimizationDisabled)
5772 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5773 set_compiler_hints(BooleanBit::set(compiler_hints(),
5774 kOptimizationDisabled,
5779 LanguageMode SharedFunctionInfo::language_mode() {
5780 STATIC_ASSERT(LANGUAGE_END == 3);
5781 return construct_language_mode(
5782 BooleanBit::get(compiler_hints(), kStrictModeFunction),
5783 BooleanBit::get(compiler_hints(), kStrongModeFunction));
5787 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5788 STATIC_ASSERT(LANGUAGE_END == 3);
5789 // We only allow language mode transitions that set the same language mode
5790 // again or go up in the chain:
5791 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5792 int hints = compiler_hints();
5793 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5794 hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5795 set_compiler_hints(hints);
5799 FunctionKind SharedFunctionInfo::kind() {
5800 return FunctionKindBits::decode(compiler_hints());
5804 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5805 DCHECK(IsValidFunctionKind(kind));
5806 int hints = compiler_hints();
5807 hints = FunctionKindBits::update(hints, kind);
5808 set_compiler_hints(hints);
5812 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5814 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5815 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5816 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5817 name_should_print_as_anonymous,
5818 kNameShouldPrintAsAnonymous)
5819 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5820 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5821 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5822 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
5824 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5825 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5826 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5827 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5829 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5830 kIsAccessorFunction)
5831 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5832 kIsDefaultConstructor)
5834 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5835 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5837 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5839 bool Script::HasValidSource() {
5840 Object* src = this->source();
5841 if (!src->IsString()) return true;
5842 String* src_str = String::cast(src);
5843 if (!StringShape(src_str).IsExternal()) return true;
5844 if (src_str->IsOneByteRepresentation()) {
5845 return ExternalOneByteString::cast(src)->resource() != NULL;
5846 } else if (src_str->IsTwoByteRepresentation()) {
5847 return ExternalTwoByteString::cast(src)->resource() != NULL;
5853 void SharedFunctionInfo::DontAdaptArguments() {
5854 DCHECK(code()->kind() == Code::BUILTIN);
5855 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5859 int SharedFunctionInfo::start_position() const {
5860 return start_position_and_type() >> kStartPositionShift;
5864 void SharedFunctionInfo::set_start_position(int start_position) {
5865 set_start_position_and_type((start_position << kStartPositionShift)
5866 | (start_position_and_type() & ~kStartPositionMask));
5870 Code* SharedFunctionInfo::code() const {
5871 return Code::cast(READ_FIELD(this, kCodeOffset));
5875 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5876 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5877 WRITE_FIELD(this, kCodeOffset, value);
5878 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5882 void SharedFunctionInfo::ReplaceCode(Code* value) {
5883 // If the GC metadata field is already used then the function was
5884 // enqueued as a code flushing candidate and we remove it now.
5885 if (code()->gc_metadata() != NULL) {
5886 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5887 flusher->EvictCandidate(this);
5890 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5892 Code::VerifyRecompiledCode(code(), value);
5897 if (is_compiled()) set_never_compiled(false);
5901 ScopeInfo* SharedFunctionInfo::scope_info() const {
5902 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5906 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5907 WriteBarrierMode mode) {
5908 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5909 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5912 reinterpret_cast<Object*>(value),
5917 bool SharedFunctionInfo::is_compiled() {
5918 Builtins* builtins = GetIsolate()->builtins();
5919 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5920 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5921 return code() != builtins->builtin(Builtins::kCompileLazy);
5925 bool SharedFunctionInfo::has_simple_parameters() {
5926 return scope_info()->HasSimpleParameters();
5930 bool SharedFunctionInfo::HasDebugInfo() {
5931 bool has_debug_info = debug_info()->IsStruct();
5932 DCHECK(!has_debug_info || HasDebugCode());
5933 return has_debug_info;
5937 DebugInfo* SharedFunctionInfo::GetDebugInfo() {
5938 DCHECK(HasDebugInfo());
5939 return DebugInfo::cast(debug_info());
5943 bool SharedFunctionInfo::HasDebugCode() {
5944 return code()->kind() == Code::FUNCTION && code()->has_debug_break_slots();
5948 bool SharedFunctionInfo::IsApiFunction() {
5949 return function_data()->IsFunctionTemplateInfo();
5953 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5954 DCHECK(IsApiFunction());
5955 return FunctionTemplateInfo::cast(function_data());
5959 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5960 return function_data()->IsSmi();
5964 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5965 DCHECK(HasBuiltinFunctionId());
5966 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5970 bool SharedFunctionInfo::HasBytecodeArray() {
5971 return function_data()->IsBytecodeArray();
5975 BytecodeArray* SharedFunctionInfo::bytecode_array() {
5976 DCHECK(HasBytecodeArray());
5977 return BytecodeArray::cast(function_data());
5981 int SharedFunctionInfo::ic_age() {
5982 return ICAgeBits::decode(counters());
5986 void SharedFunctionInfo::set_ic_age(int ic_age) {
5987 set_counters(ICAgeBits::update(counters(), ic_age));
5991 int SharedFunctionInfo::deopt_count() {
5992 return DeoptCountBits::decode(counters());
5996 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5997 set_counters(DeoptCountBits::update(counters(), deopt_count));
6001 void SharedFunctionInfo::increment_deopt_count() {
6002 int value = counters();
6003 int deopt_count = DeoptCountBits::decode(value);
6004 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6005 set_counters(DeoptCountBits::update(value, deopt_count));
6009 int SharedFunctionInfo::opt_reenable_tries() {
6010 return OptReenableTriesBits::decode(counters());
6014 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6015 set_counters(OptReenableTriesBits::update(counters(), tries));
6019 int SharedFunctionInfo::opt_count() {
6020 return OptCountBits::decode(opt_count_and_bailout_reason());
6024 void SharedFunctionInfo::set_opt_count(int opt_count) {
6025 set_opt_count_and_bailout_reason(
6026 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6030 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6031 return static_cast<BailoutReason>(
6032 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6036 bool SharedFunctionInfo::has_deoptimization_support() {
6037 Code* code = this->code();
6038 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6042 void SharedFunctionInfo::TryReenableOptimization() {
6043 int tries = opt_reenable_tries();
6044 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6045 // We reenable optimization whenever the number of tries is a large
6046 // enough power of 2.
6047 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6048 set_optimization_disabled(false);
6055 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6056 set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6057 opt_count_and_bailout_reason(), reason));
6061 bool SharedFunctionInfo::IsBuiltin() {
6062 Object* script_obj = script();
6063 if (script_obj->IsUndefined()) return true;
6064 Script* script = Script::cast(script_obj);
6065 Script::Type type = static_cast<Script::Type>(script->type()->value());
6066 return type != Script::TYPE_NORMAL;
6070 bool SharedFunctionInfo::IsSubjectToDebugging() { return !IsBuiltin(); }
6073 bool JSFunction::IsBuiltin() { return shared()->IsBuiltin(); }
6076 bool JSFunction::IsSubjectToDebugging() {
6077 return shared()->IsSubjectToDebugging();
6081 bool JSFunction::NeedsArgumentsAdaption() {
6082 return shared()->internal_formal_parameter_count() !=
6083 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
6087 bool JSFunction::IsOptimized() {
6088 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6092 bool JSFunction::IsMarkedForOptimization() {
6093 return code() == GetIsolate()->builtins()->builtin(
6094 Builtins::kCompileOptimized);
6098 bool JSFunction::IsMarkedForConcurrentOptimization() {
6099 return code() == GetIsolate()->builtins()->builtin(
6100 Builtins::kCompileOptimizedConcurrent);
6104 bool JSFunction::IsInOptimizationQueue() {
6105 return code() == GetIsolate()->builtins()->builtin(
6106 Builtins::kInOptimizationQueue);
6110 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6111 return has_initial_map() &&
6112 initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
6116 Code* JSFunction::code() {
6118 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6122 void JSFunction::set_code(Code* value) {
6123 DCHECK(!GetHeap()->InNewSpace(value));
6124 Address entry = value->entry();
6125 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6126 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6128 HeapObject::RawField(this, kCodeEntryOffset),
6133 void JSFunction::set_code_no_write_barrier(Code* value) {
6134 DCHECK(!GetHeap()->InNewSpace(value));
6135 Address entry = value->entry();
6136 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6140 void JSFunction::ReplaceCode(Code* code) {
6141 bool was_optimized = IsOptimized();
6142 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6144 if (was_optimized && is_optimized) {
6145 shared()->EvictFromOptimizedCodeMap(this->code(),
6146 "Replacing with another optimized code");
6151 // Add/remove the function from the list of optimized functions for this
6152 // context based on the state change.
6153 if (!was_optimized && is_optimized) {
6154 context()->native_context()->AddOptimizedFunction(this);
6156 if (was_optimized && !is_optimized) {
6157 // TODO(titzer): linear in the number of optimized functions; fix!
6158 context()->native_context()->RemoveOptimizedFunction(this);
6163 Context* JSFunction::context() {
6164 return Context::cast(READ_FIELD(this, kContextOffset));
6168 JSObject* JSFunction::global_proxy() {
6169 return context()->global_proxy();
6173 void JSFunction::set_context(Object* value) {
6174 DCHECK(value->IsUndefined() || value->IsContext());
6175 WRITE_FIELD(this, kContextOffset, value);
6176 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6179 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6180 kPrototypeOrInitialMapOffset)
6183 Map* JSFunction::initial_map() {
6184 return Map::cast(prototype_or_initial_map());
6188 bool JSFunction::has_initial_map() {
6189 return prototype_or_initial_map()->IsMap();
6193 bool JSFunction::has_instance_prototype() {
6194 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6198 bool JSFunction::has_prototype() {
6199 return map()->has_non_instance_prototype() || has_instance_prototype();
6203 Object* JSFunction::instance_prototype() {
6204 DCHECK(has_instance_prototype());
6205 if (has_initial_map()) return initial_map()->prototype();
6206 // When there is no initial map and the prototype is a JSObject, the
6207 // initial map field is used for the prototype field.
6208 return prototype_or_initial_map();
6212 Object* JSFunction::prototype() {
6213 DCHECK(has_prototype());
6214 // If the function's prototype property has been set to a non-JSObject
6215 // value, that value is stored in the constructor field of the map.
6216 if (map()->has_non_instance_prototype()) {
6217 Object* prototype = map()->GetConstructor();
6218 // The map must have a prototype in that field, not a back pointer.
6219 DCHECK(!prototype->IsMap());
6222 return instance_prototype();
6226 bool JSFunction::should_have_prototype() {
6227 return map()->function_with_prototype();
6231 bool JSFunction::is_compiled() {
6232 Builtins* builtins = GetIsolate()->builtins();
6233 return code() != builtins->builtin(Builtins::kCompileLazy) &&
6234 code() != builtins->builtin(Builtins::kCompileOptimized) &&
6235 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6239 bool JSFunction::has_simple_parameters() {
6240 return shared()->has_simple_parameters();
6244 FixedArray* JSFunction::literals() {
6245 DCHECK(!shared()->bound());
6246 return literals_or_bindings();
6250 void JSFunction::set_literals(FixedArray* literals) {
6251 DCHECK(!shared()->bound());
6252 set_literals_or_bindings(literals);
6256 FixedArray* JSFunction::function_bindings() {
6257 DCHECK(shared()->bound());
6258 return literals_or_bindings();
6262 void JSFunction::set_function_bindings(FixedArray* bindings) {
6263 DCHECK(shared()->bound());
6264 // Bound function literal may be initialized to the empty fixed array
6265 // before the bindings are set.
6266 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6267 bindings->map() == GetHeap()->fixed_array_map());
6268 set_literals_or_bindings(bindings);
6272 int JSFunction::NumberOfLiterals() {
6273 DCHECK(!shared()->bound());
6274 return literals()->length();
6278 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6279 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6280 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6281 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6284 void JSProxy::InitializeBody(int object_size, Object* value) {
6285 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6286 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6287 WRITE_FIELD(this, offset, value);
6292 ACCESSORS(JSCollection, table, Object, kTableOffset)
6295 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6296 template<class Derived, class TableType> \
6297 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6298 return type::cast(READ_FIELD(this, offset)); \
6300 template<class Derived, class TableType> \
6301 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6302 type* value, WriteBarrierMode mode) { \
6303 WRITE_FIELD(this, offset, value); \
6304 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6307 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6308 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6309 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6311 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6314 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6315 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6318 Address Foreign::foreign_address() {
6319 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6323 void Foreign::set_foreign_address(Address value) {
6324 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6328 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6329 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6330 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6331 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6332 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6334 bool JSGeneratorObject::is_suspended() {
6335 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6336 DCHECK_EQ(kGeneratorClosed, 0);
6337 return continuation() > 0;
6340 bool JSGeneratorObject::is_closed() {
6341 return continuation() == kGeneratorClosed;
6344 bool JSGeneratorObject::is_executing() {
6345 return continuation() == kGeneratorExecuting;
6348 ACCESSORS(JSModule, context, Object, kContextOffset)
6349 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6352 ACCESSORS(JSValue, value, Object, kValueOffset)
6355 HeapNumber* HeapNumber::cast(Object* object) {
6356 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6357 return reinterpret_cast<HeapNumber*>(object);
6361 const HeapNumber* HeapNumber::cast(const Object* object) {
6362 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6363 return reinterpret_cast<const HeapNumber*>(object);
6367 ACCESSORS(JSDate, value, Object, kValueOffset)
6368 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6369 ACCESSORS(JSDate, year, Object, kYearOffset)
6370 ACCESSORS(JSDate, month, Object, kMonthOffset)
6371 ACCESSORS(JSDate, day, Object, kDayOffset)
6372 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6373 ACCESSORS(JSDate, hour, Object, kHourOffset)
6374 ACCESSORS(JSDate, min, Object, kMinOffset)
6375 ACCESSORS(JSDate, sec, Object, kSecOffset)
6378 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6379 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6380 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6381 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6382 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6383 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6386 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6387 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6388 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6389 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6390 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6391 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6392 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6393 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6396 void Code::WipeOutHeader() {
6397 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6398 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6399 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6400 // Do not wipe out major/minor keys on a code stub or IC
6401 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6402 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6407 Object* Code::type_feedback_info() {
6408 DCHECK(kind() == FUNCTION);
6409 return raw_type_feedback_info();
6413 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6414 DCHECK(kind() == FUNCTION);
6415 set_raw_type_feedback_info(value, mode);
6416 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6421 uint32_t Code::stub_key() {
6422 DCHECK(IsCodeStubOrIC());
6423 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6424 return static_cast<uint32_t>(smi_key->value());
6428 void Code::set_stub_key(uint32_t key) {
6429 DCHECK(IsCodeStubOrIC());
6430 set_raw_type_feedback_info(Smi::FromInt(key));
6434 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6435 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6438 byte* Code::instruction_start() {
6439 return FIELD_ADDR(this, kHeaderSize);
6443 byte* Code::instruction_end() {
6444 return instruction_start() + instruction_size();
6448 int Code::body_size() {
6449 return RoundUp(instruction_size(), kObjectAlignment);
6453 ByteArray* Code::unchecked_relocation_info() {
6454 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6458 byte* Code::relocation_start() {
6459 return unchecked_relocation_info()->GetDataStartAddress();
6463 int Code::relocation_size() {
6464 return unchecked_relocation_info()->length();
6468 byte* Code::entry() {
6469 return instruction_start();
6473 bool Code::contains(byte* inner_pointer) {
6474 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6478 int Code::ExecutableSize() {
6479 // Check that the assumptions about the layout of the code object holds.
6480 DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6482 return instruction_size() + Code::kHeaderSize;
6486 int Code::CodeSize() { return SizeFor(body_size()); }
6489 ACCESSORS(JSArray, length, Object, kLengthOffset)
6492 void* JSArrayBuffer::backing_store() const {
6493 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6494 return reinterpret_cast<void*>(ptr);
6498 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6499 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6500 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6504 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6507 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6508 if (kInt32Size != kPointerSize) {
6509 #if V8_TARGET_LITTLE_ENDIAN
6510 WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6512 WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6515 WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6519 uint32_t JSArrayBuffer::bit_field() const {
6520 return READ_UINT32_FIELD(this, kBitFieldOffset);
6524 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6527 void JSArrayBuffer::set_is_external(bool value) {
6528 set_bit_field(IsExternal::update(bit_field(), value));
6532 bool JSArrayBuffer::is_neuterable() {
6533 return IsNeuterable::decode(bit_field());
6537 void JSArrayBuffer::set_is_neuterable(bool value) {
6538 set_bit_field(IsNeuterable::update(bit_field(), value));
6542 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6545 void JSArrayBuffer::set_was_neutered(bool value) {
6546 set_bit_field(WasNeutered::update(bit_field(), value));
6550 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6553 void JSArrayBuffer::set_is_shared(bool value) {
6554 set_bit_field(IsShared::update(bit_field(), value));
6558 Object* JSArrayBufferView::byte_offset() const {
6559 if (WasNeutered()) return Smi::FromInt(0);
6560 return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6564 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6565 WRITE_FIELD(this, kByteOffsetOffset, value);
6566 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6570 Object* JSArrayBufferView::byte_length() const {
6571 if (WasNeutered()) return Smi::FromInt(0);
6572 return Object::cast(READ_FIELD(this, kByteLengthOffset));
6576 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6577 WRITE_FIELD(this, kByteLengthOffset, value);
6578 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6582 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6584 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6585 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6589 bool JSArrayBufferView::WasNeutered() const {
6590 return JSArrayBuffer::cast(buffer())->was_neutered();
6594 Object* JSTypedArray::length() const {
6595 if (WasNeutered()) return Smi::FromInt(0);
6596 return Object::cast(READ_FIELD(this, kLengthOffset));
6600 uint32_t JSTypedArray::length_value() const {
6601 if (WasNeutered()) return 0;
6603 CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6608 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6609 WRITE_FIELD(this, kLengthOffset, value);
6610 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6615 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6619 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6622 JSRegExp::Type JSRegExp::TypeTag() {
6623 Object* data = this->data();
6624 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6625 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6626 return static_cast<JSRegExp::Type>(smi->value());
6630 int JSRegExp::CaptureCount() {
6631 switch (TypeTag()) {
6635 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6643 JSRegExp::Flags JSRegExp::GetFlags() {
6644 DCHECK(this->data()->IsFixedArray());
6645 Object* data = this->data();
6646 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6647 return Flags(smi->value());
6651 String* JSRegExp::Pattern() {
6652 DCHECK(this->data()->IsFixedArray());
6653 Object* data = this->data();
6654 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6659 Object* JSRegExp::DataAt(int index) {
6660 DCHECK(TypeTag() != NOT_COMPILED);
6661 return FixedArray::cast(data())->get(index);
6665 void JSRegExp::SetDataAt(int index, Object* value) {
6666 DCHECK(TypeTag() != NOT_COMPILED);
6667 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6668 FixedArray::cast(data())->set(index, value);
6672 ElementsKind JSObject::GetElementsKind() {
6673 ElementsKind kind = map()->elements_kind();
6674 #if VERIFY_HEAP && DEBUG
6675 FixedArrayBase* fixed_array =
6676 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6678 // If a GC was caused while constructing this object, the elements
6679 // pointer may point to a one pointer filler map.
6680 if (ElementsAreSafeToExamine()) {
6681 Map* map = fixed_array->map();
6682 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6683 (map == GetHeap()->fixed_array_map() ||
6684 map == GetHeap()->fixed_cow_array_map())) ||
6685 (IsFastDoubleElementsKind(kind) &&
6686 (fixed_array->IsFixedDoubleArray() ||
6687 fixed_array == GetHeap()->empty_fixed_array())) ||
6688 (kind == DICTIONARY_ELEMENTS &&
6689 fixed_array->IsFixedArray() &&
6690 fixed_array->IsDictionary()) ||
6691 (kind > DICTIONARY_ELEMENTS));
6692 DCHECK(!IsSloppyArgumentsElements(kind) ||
6693 (elements()->IsFixedArray() && elements()->length() >= 2));
6700 bool JSObject::HasFastObjectElements() {
6701 return IsFastObjectElementsKind(GetElementsKind());
6705 bool JSObject::HasFastSmiElements() {
6706 return IsFastSmiElementsKind(GetElementsKind());
6710 bool JSObject::HasFastSmiOrObjectElements() {
6711 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6715 bool JSObject::HasFastDoubleElements() {
6716 return IsFastDoubleElementsKind(GetElementsKind());
6720 bool JSObject::HasFastHoleyElements() {
6721 return IsFastHoleyElementsKind(GetElementsKind());
6725 bool JSObject::HasFastElements() {
6726 return IsFastElementsKind(GetElementsKind());
6730 bool JSObject::HasDictionaryElements() {
6731 return GetElementsKind() == DICTIONARY_ELEMENTS;
6735 bool JSObject::HasFastArgumentsElements() {
6736 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
6740 bool JSObject::HasSlowArgumentsElements() {
6741 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
6745 bool JSObject::HasSloppyArgumentsElements() {
6746 return IsSloppyArgumentsElements(GetElementsKind());
6750 bool JSObject::HasFixedTypedArrayElements() {
6751 HeapObject* array = elements();
6752 DCHECK(array != NULL);
6753 return array->IsFixedTypedArrayBase();
6757 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6758 bool JSObject::HasFixed##Type##Elements() { \
6759 HeapObject* array = elements(); \
6760 DCHECK(array != NULL); \
6761 if (!array->IsHeapObject()) \
6763 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6766 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6768 #undef FIXED_TYPED_ELEMENTS_CHECK
6771 bool JSObject::HasNamedInterceptor() {
6772 return map()->has_named_interceptor();
6776 bool JSObject::HasIndexedInterceptor() {
6777 return map()->has_indexed_interceptor();
6781 NameDictionary* JSObject::property_dictionary() {
6782 DCHECK(!HasFastProperties());
6783 DCHECK(!IsGlobalObject());
6784 return NameDictionary::cast(properties());
6788 GlobalDictionary* JSObject::global_dictionary() {
6789 DCHECK(!HasFastProperties());
6790 DCHECK(IsGlobalObject());
6791 return GlobalDictionary::cast(properties());
6795 SeededNumberDictionary* JSObject::element_dictionary() {
6796 DCHECK(HasDictionaryElements());
6797 return SeededNumberDictionary::cast(elements());
6801 bool Name::IsHashFieldComputed(uint32_t field) {
6802 return (field & kHashNotComputedMask) == 0;
6806 bool Name::HasHashCode() {
6807 return IsHashFieldComputed(hash_field());
6811 uint32_t Name::Hash() {
6812 // Fast case: has hash code already been computed?
6813 uint32_t field = hash_field();
6814 if (IsHashFieldComputed(field)) return field >> kHashShift;
6815 // Slow case: compute hash code and set it. Has to be a string.
6816 return String::cast(this)->ComputeAndSetHash();
6820 bool Name::IsPrivate() {
6821 return this->IsSymbol() && Symbol::cast(this)->is_private();
6825 StringHasher::StringHasher(int length, uint32_t seed)
6827 raw_running_hash_(seed),
6829 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6830 is_first_char_(true) {
6831 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6835 bool StringHasher::has_trivial_hash() {
6836 return length_ > String::kMaxHashCalcLength;
6840 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6842 running_hash += (running_hash << 10);
6843 running_hash ^= (running_hash >> 6);
6844 return running_hash;
6848 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6849 running_hash += (running_hash << 3);
6850 running_hash ^= (running_hash >> 11);
6851 running_hash += (running_hash << 15);
6852 if ((running_hash & String::kHashBitMask) == 0) {
6855 return running_hash;
6859 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6860 const uc16* chars, int length) {
6861 DCHECK_NOT_NULL(chars);
6862 DCHECK(length >= 0);
6863 for (int i = 0; i < length; ++i) {
6864 running_hash = AddCharacterCore(running_hash, *chars++);
6866 return running_hash;
6870 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6873 DCHECK_NOT_NULL(chars);
6874 DCHECK(length >= 0);
6875 for (int i = 0; i < length; ++i) {
6876 uint16_t c = static_cast<uint16_t>(*chars++);
6877 running_hash = AddCharacterCore(running_hash, c);
6879 return running_hash;
6883 void StringHasher::AddCharacter(uint16_t c) {
6884 // Use the Jenkins one-at-a-time hash function to update the hash
6885 // for the given character.
6886 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6890 bool StringHasher::UpdateIndex(uint16_t c) {
6891 DCHECK(is_array_index_);
6892 if (c < '0' || c > '9') {
6893 is_array_index_ = false;
6897 if (is_first_char_) {
6898 is_first_char_ = false;
6899 if (c == '0' && length_ > 1) {
6900 is_array_index_ = false;
6904 if (array_index_ > 429496729U - ((d + 3) >> 3)) {
6905 is_array_index_ = false;
6908 array_index_ = array_index_ * 10 + d;
6913 template<typename Char>
6914 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6915 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6917 if (is_array_index_) {
6918 for (; i < length; i++) {
6919 AddCharacter(chars[i]);
6920 if (!UpdateIndex(chars[i])) {
6926 for (; i < length; i++) {
6927 DCHECK(!is_array_index_);
6928 AddCharacter(chars[i]);
6933 template <typename schar>
6934 uint32_t StringHasher::HashSequentialString(const schar* chars,
6937 StringHasher hasher(length, seed);
6938 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6939 return hasher.GetHashField();
6943 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
6944 : StringHasher(len, seed) {}
6947 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6948 IteratingStringHasher hasher(string->length(), seed);
6950 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6951 ConsString* cons_string = String::VisitFlat(&hasher, string);
6952 if (cons_string == nullptr) return hasher.GetHashField();
6953 hasher.VisitConsString(cons_string);
6954 return hasher.GetHashField();
6958 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6960 AddCharacters(chars, length);
6964 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6966 AddCharacters(chars, length);
6970 bool Name::AsArrayIndex(uint32_t* index) {
6971 return IsString() && String::cast(this)->AsArrayIndex(index);
6975 bool String::AsArrayIndex(uint32_t* index) {
6976 uint32_t field = hash_field();
6977 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6980 return SlowAsArrayIndex(index);
6984 void String::SetForwardedInternalizedString(String* canonical) {
6985 DCHECK(IsInternalizedString());
6986 DCHECK(HasHashCode());
6987 if (canonical == this) return; // No need to forward.
6988 DCHECK(SlowEquals(canonical));
6989 DCHECK(canonical->IsInternalizedString());
6990 DCHECK(canonical->HasHashCode());
6991 WRITE_FIELD(this, kHashFieldSlot, canonical);
6992 // Setting the hash field to a tagged value sets the LSB, causing the hash
6993 // code to be interpreted as uninitialized. We use this fact to recognize
6994 // that we have a forwarded string.
6995 DCHECK(!HasHashCode());
6999 String* String::GetForwardedInternalizedString() {
7000 DCHECK(IsInternalizedString());
7001 if (HasHashCode()) return this;
7002 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
7003 DCHECK(canonical->IsInternalizedString());
7004 DCHECK(SlowEquals(canonical));
7005 DCHECK(canonical->HasHashCode());
7010 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7012 LanguageMode language_mode) {
7014 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7015 return GetProperty(&it, language_mode);
7019 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7020 Handle<Name> name) {
7021 // Call the "has" trap on proxies.
7022 if (object->IsJSProxy()) {
7023 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7024 return JSProxy::HasPropertyWithHandler(proxy, name);
7027 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
7028 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7032 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7033 Handle<Name> name) {
7034 // Call the "has" trap on proxies.
7035 if (object->IsJSProxy()) {
7036 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7037 return JSProxy::HasPropertyWithHandler(proxy, name);
7040 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
7041 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7045 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7046 Handle<JSReceiver> object, Handle<Name> name) {
7048 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7049 return GetPropertyAttributes(&it);
7053 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7054 Handle<JSReceiver> object, Handle<Name> name) {
7055 LookupIterator it = LookupIterator::PropertyOrElement(
7056 name->GetIsolate(), object, name, LookupIterator::HIDDEN);
7057 return GetPropertyAttributes(&it);
7061 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7062 // Call the "has" trap on proxies.
7063 if (object->IsJSProxy()) {
7064 Isolate* isolate = object->GetIsolate();
7065 Handle<Name> name = isolate->factory()->Uint32ToString(index);
7066 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7067 return JSProxy::HasPropertyWithHandler(proxy, name);
7070 Maybe<PropertyAttributes> result = GetElementAttributes(object, index);
7071 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7075 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
7077 // Call the "has" trap on proxies.
7078 if (object->IsJSProxy()) {
7079 Isolate* isolate = object->GetIsolate();
7080 Handle<Name> name = isolate->factory()->Uint32ToString(index);
7081 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7082 return JSProxy::HasPropertyWithHandler(proxy, name);
7085 Maybe<PropertyAttributes> result = GetOwnElementAttributes(object, index);
7086 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
7090 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7091 Handle<JSReceiver> object, uint32_t index) {
7092 Isolate* isolate = object->GetIsolate();
7093 LookupIterator it(isolate, object, index);
7094 return GetPropertyAttributes(&it);
7098 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7099 Handle<JSReceiver> object, uint32_t index) {
7100 Isolate* isolate = object->GetIsolate();
7101 LookupIterator it(isolate, object, index, LookupIterator::HIDDEN);
7102 return GetPropertyAttributes(&it);
7106 bool JSGlobalObject::IsDetached() {
7107 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7111 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
7112 const PrototypeIterator iter(this->GetIsolate(),
7113 const_cast<JSGlobalProxy*>(this));
7114 return iter.GetCurrent() != global;
7118 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
7119 return object->IsJSProxy()
7120 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
7121 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
7125 Object* JSReceiver::GetIdentityHash() {
7127 ? JSProxy::cast(this)->GetIdentityHash()
7128 : JSObject::cast(this)->GetIdentityHash();
7132 bool AccessorInfo::all_can_read() {
7133 return BooleanBit::get(flag(), kAllCanReadBit);
7137 void AccessorInfo::set_all_can_read(bool value) {
7138 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7142 bool AccessorInfo::all_can_write() {
7143 return BooleanBit::get(flag(), kAllCanWriteBit);
7147 void AccessorInfo::set_all_can_write(bool value) {
7148 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7152 bool AccessorInfo::is_special_data_property() {
7153 return BooleanBit::get(flag(), kSpecialDataProperty);
7157 void AccessorInfo::set_is_special_data_property(bool value) {
7158 set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7162 PropertyAttributes AccessorInfo::property_attributes() {
7163 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
7167 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7168 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
7172 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7173 if (!HasExpectedReceiverType()) return true;
7174 if (!receiver->IsJSObject()) return false;
7175 return FunctionTemplateInfo::cast(expected_receiver_type())
7176 ->IsTemplateFor(JSObject::cast(receiver)->map());
7180 bool AccessorInfo::HasExpectedReceiverType() {
7181 return expected_receiver_type()->IsFunctionTemplateInfo();
7185 Object* AccessorPair::get(AccessorComponent component) {
7186 return component == ACCESSOR_GETTER ? getter() : setter();
7190 void AccessorPair::set(AccessorComponent component, Object* value) {
7191 if (component == ACCESSOR_GETTER) {
7199 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7200 if (!getter->IsNull()) set_getter(getter);
7201 if (!setter->IsNull()) set_setter(setter);
7205 bool AccessorPair::Equals(AccessorPair* pair) {
7206 return (this == pair) || pair->Equals(getter(), setter());
7210 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7211 return (getter() == getter_value) && (setter() == setter_value);
7215 bool AccessorPair::ContainsAccessor() {
7216 return IsJSAccessor(getter()) || IsJSAccessor(setter());
7220 bool AccessorPair::IsJSAccessor(Object* obj) {
7221 return obj->IsSpecFunction() || obj->IsUndefined();
7225 template<typename Derived, typename Shape, typename Key>
7226 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7228 Handle<Object> value) {
7229 this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7233 template<typename Derived, typename Shape, typename Key>
7234 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7236 Handle<Object> value,
7237 PropertyDetails details) {
7238 Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7242 template <typename Key>
7243 template <typename Dictionary>
7244 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7246 Handle<Object> value,
7247 PropertyDetails details) {
7248 STATIC_ASSERT(Dictionary::kEntrySize == 3);
7249 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7250 int index = dict->EntryToIndex(entry);
7251 DisallowHeapAllocation no_gc;
7252 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7253 dict->set(index, *key, mode);
7254 dict->set(index + 1, *value, mode);
7255 dict->set(index + 2, details.AsSmi());
7259 template <typename Dictionary>
7260 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7261 Handle<Object> key, Handle<Object> value,
7262 PropertyDetails details) {
7263 STATIC_ASSERT(Dictionary::kEntrySize == 2);
7264 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7265 DCHECK(value->IsPropertyCell());
7266 int index = dict->EntryToIndex(entry);
7267 DisallowHeapAllocation no_gc;
7268 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7269 dict->set(index, *key, mode);
7270 dict->set(index + 1, *value, mode);
7271 PropertyCell::cast(*value)->set_property_details(details);
7275 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7276 DCHECK(other->IsNumber());
7277 return key == static_cast<uint32_t>(other->Number());
7281 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7282 return ComputeIntegerHash(key, 0);
7286 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7288 DCHECK(other->IsNumber());
7289 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7293 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7294 return ComputeIntegerHash(key, seed);
7298 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7301 DCHECK(other->IsNumber());
7302 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7306 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7307 return isolate->factory()->NewNumberFromUint(key);
7311 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7312 // We know that all entries in a hash table had their hash keys created.
7313 // Use that knowledge to have fast failure.
7314 if (key->Hash() != Name::cast(other)->Hash()) return false;
7315 return key->Equals(Name::cast(other));
7319 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7324 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7325 return Name::cast(other)->Hash();
7329 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7331 DCHECK(key->IsUniqueName());
7336 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7337 Handle<NameDictionary> dictionary) {
7338 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7342 template <typename Dictionary>
7343 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7344 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7345 Object* raw_value = dict->ValueAt(entry);
7346 DCHECK(raw_value->IsPropertyCell());
7347 PropertyCell* cell = PropertyCell::cast(raw_value);
7348 return cell->property_details();
7352 template <typename Dictionary>
7353 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7354 PropertyDetails value) {
7355 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7356 Object* raw_value = dict->ValueAt(entry);
7357 DCHECK(raw_value->IsPropertyCell());
7358 PropertyCell* cell = PropertyCell::cast(raw_value);
7359 cell->set_property_details(value);
7363 template <typename Dictionary>
7364 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7365 DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7366 return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole();
7370 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7371 return key->SameValue(other);
7375 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7376 return Smi::cast(key->GetHash())->value();
7380 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7382 return Smi::cast(other->GetHash())->value();
7386 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7387 Handle<Object> key) {
7392 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7393 Handle<ObjectHashTable> table, Handle<Object> key) {
7394 return DerivedHashTable::Shrink(table, key);
7398 Object* OrderedHashMap::ValueAt(int entry) {
7399 return get(EntryToIndex(entry) + kValueOffset);
7403 template <int entrysize>
7404 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7405 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7406 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7411 template <int entrysize>
7412 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7415 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7416 : reinterpret_cast<intptr_t>(*key);
7417 return (uint32_t)(hash & 0xFFFFFFFF);
7421 template <int entrysize>
7422 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7424 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7425 intptr_t hash = reinterpret_cast<intptr_t>(other);
7426 return (uint32_t)(hash & 0xFFFFFFFF);
7430 template <int entrysize>
7431 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7432 Handle<Object> key) {
7437 bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
7440 bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
7443 bool ScopeInfo::HasSimpleParameters() {
7444 return HasSimpleParametersField::decode(Flags());
7448 #define SCOPE_INFO_FIELD_ACCESSORS(name) \
7449 void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
7450 int ScopeInfo::name() { \
7451 if (length() > 0) { \
7452 return Smi::cast(get(k##name))->value(); \
7457 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
7458 #undef SCOPE_INFO_FIELD_ACCESSORS
7461 void Map::ClearCodeCache(Heap* heap) {
7462 // No write barrier is needed since empty_fixed_array is not in new space.
7463 // Please note this function is used during marking:
7464 // - MarkCompactCollector::MarkUnmarkedObject
7465 // - IncrementalMarking::Step
7466 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7467 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7471 int Map::SlackForArraySize(int old_size, int size_limit) {
7472 const int max_slack = size_limit - old_size;
7473 CHECK_LE(0, max_slack);
7475 DCHECK_LE(1, max_slack);
7478 return Min(max_slack, old_size / 4);
7482 void JSArray::set_length(Smi* length) {
7483 // Don't need a write barrier for a Smi.
7484 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7488 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
7489 // If the new array won't fit in a some non-trivial fraction of the max old
7490 // space size, then force it to go dictionary mode.
7491 uint32_t max_fast_array_size =
7492 static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7493 return new_length >= max_fast_array_size;
7497 bool JSArray::AllowsSetLength() {
7498 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7499 DCHECK(result == !HasFixedTypedArrayElements());
7504 void JSArray::SetContent(Handle<JSArray> array,
7505 Handle<FixedArrayBase> storage) {
7506 EnsureCanContainElements(array, storage, storage->length(),
7507 ALLOW_COPIED_DOUBLE_ELEMENTS);
7509 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7510 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7511 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7512 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7513 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7514 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7515 array->set_elements(*storage);
7516 array->set_length(Smi::FromInt(storage->length()));
7520 int TypeFeedbackInfo::ic_total_count() {
7521 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7522 return ICTotalCountField::decode(current);
7526 void TypeFeedbackInfo::set_ic_total_count(int count) {
7527 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7528 value = ICTotalCountField::update(value,
7529 ICTotalCountField::decode(count));
7530 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7534 int TypeFeedbackInfo::ic_with_type_info_count() {
7535 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7536 return ICsWithTypeInfoCountField::decode(current);
7540 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7541 if (delta == 0) return;
7542 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7543 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7544 // We can get negative count here when the type-feedback info is
7545 // shared between two code objects. The can only happen when
7546 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7547 // Since we do not optimize when the debugger is active, we can skip
7548 // this counter update.
7549 if (new_count >= 0) {
7550 new_count &= ICsWithTypeInfoCountField::kMask;
7551 value = ICsWithTypeInfoCountField::update(value, new_count);
7552 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7557 int TypeFeedbackInfo::ic_generic_count() {
7558 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7562 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7563 if (delta == 0) return;
7564 int new_count = ic_generic_count() + delta;
7565 if (new_count >= 0) {
7566 new_count &= ~Smi::kMinValue;
7567 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7572 void TypeFeedbackInfo::initialize_storage() {
7573 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7574 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7575 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7579 void TypeFeedbackInfo::change_own_type_change_checksum() {
7580 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7581 int checksum = OwnTypeChangeChecksum::decode(value);
7582 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7583 value = OwnTypeChangeChecksum::update(value, checksum);
7584 // Ensure packed bit field is in Smi range.
7585 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7586 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7587 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7591 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7592 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7593 int mask = (1 << kTypeChangeChecksumBits) - 1;
7594 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7595 // Ensure packed bit field is in Smi range.
7596 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7597 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7598 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7602 int TypeFeedbackInfo::own_type_change_checksum() {
7603 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7604 return OwnTypeChangeChecksum::decode(value);
7608 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7609 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7610 int mask = (1 << kTypeChangeChecksumBits) - 1;
7611 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7615 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7618 Relocatable::Relocatable(Isolate* isolate) {
7620 prev_ = isolate->relocatable_top();
7621 isolate->set_relocatable_top(this);
7625 Relocatable::~Relocatable() {
7626 DCHECK_EQ(isolate_->relocatable_top(), this);
7627 isolate_->set_relocatable_top(prev_);
7632 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7633 return map->instance_size();
7638 int FixedArray::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7639 return SizeFor(reinterpret_cast<FixedArray*>(object)->synchronized_length());
7644 int StructBodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7645 return map->instance_size();
7649 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7650 v->VisitExternalReference(
7651 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7655 template<typename StaticVisitor>
7656 void Foreign::ForeignIterateBody() {
7657 StaticVisitor::VisitExternalReference(
7658 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7662 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody(ObjectVisitor* v) {
7664 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7668 template <typename StaticVisitor>
7669 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody() {
7670 StaticVisitor::VisitPointer(
7671 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset)));
7675 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7676 typedef v8::String::ExternalOneByteStringResource Resource;
7677 v->VisitExternalOneByteString(
7678 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7682 template <typename StaticVisitor>
7683 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7684 typedef v8::String::ExternalOneByteStringResource Resource;
7685 StaticVisitor::VisitExternalOneByteString(
7686 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7690 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7691 typedef v8::String::ExternalStringResource Resource;
7692 v->VisitExternalTwoByteString(
7693 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7697 template<typename StaticVisitor>
7698 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7699 typedef v8::String::ExternalStringResource Resource;
7700 StaticVisitor::VisitExternalTwoByteString(
7701 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7705 static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7709 DCHECK(FLAG_unbox_double_fields);
7710 DCHECK(IsAligned(start_offset, kPointerSize) &&
7711 IsAligned(end_offset, kPointerSize));
7713 LayoutDescriptorHelper helper(object->map());
7714 DCHECK(!helper.all_fields_tagged());
7716 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7717 // Visit all tagged fields.
7718 if (helper.IsTagged(offset)) {
7719 v->VisitPointer(HeapObject::RawField(object, offset));
7725 template<int start_offset, int end_offset, int size>
7726 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7729 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7730 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7731 HeapObject::RawField(obj, end_offset));
7733 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7738 template<int start_offset>
7739 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7742 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7743 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7744 HeapObject::RawField(obj, object_size));
7746 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7751 template<class Derived, class TableType>
7752 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7753 TableType* table(TableType::cast(this->table()));
7754 int index = Smi::cast(this->index())->value();
7755 Object* key = table->KeyAt(index);
7756 DCHECK(!key->IsTheHole());
7761 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7762 array->set(0, CurrentKey());
7766 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7767 array->set(0, CurrentKey());
7768 array->set(1, CurrentValue());
7772 Object* JSMapIterator::CurrentValue() {
7773 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7774 int index = Smi::cast(this->index())->value();
7775 Object* value = table->ValueAt(index);
7776 DCHECK(!value->IsTheHole());
7781 String::SubStringRange::SubStringRange(String* string, int first, int length)
7784 length_(length == -1 ? string->length() : length) {}
7787 class String::SubStringRange::iterator final {
7789 typedef std::forward_iterator_tag iterator_category;
7790 typedef int difference_type;
7791 typedef uc16 value_type;
7792 typedef uc16* pointer;
7793 typedef uc16& reference;
7795 iterator(const iterator& other)
7796 : content_(other.content_), offset_(other.offset_) {}
7798 uc16 operator*() { return content_.Get(offset_); }
7799 bool operator==(const iterator& other) const {
7800 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7802 bool operator!=(const iterator& other) const {
7803 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7805 iterator& operator++() {
7809 iterator operator++(int);
7812 friend class String;
7813 iterator(String* from, int offset)
7814 : content_(from->GetFlatContent()), offset_(offset) {}
7815 String::FlatContent content_;
7820 String::SubStringRange::iterator String::SubStringRange::begin() {
7821 return String::SubStringRange::iterator(string_, first_);
7825 String::SubStringRange::iterator String::SubStringRange::end() {
7826 return String::SubStringRange::iterator(string_, first_ + length_);
7831 #undef CAST_ACCESSOR
7832 #undef INT_ACCESSORS
7834 #undef ACCESSORS_TO_SMI
7835 #undef SMI_ACCESSORS
7836 #undef SYNCHRONIZED_SMI_ACCESSORS
7837 #undef NOBARRIER_SMI_ACCESSORS
7839 #undef BOOL_ACCESSORS
7841 #undef FIELD_ADDR_CONST
7843 #undef NOBARRIER_READ_FIELD
7845 #undef NOBARRIER_WRITE_FIELD
7846 #undef WRITE_BARRIER
7847 #undef CONDITIONAL_WRITE_BARRIER
7848 #undef READ_DOUBLE_FIELD
7849 #undef WRITE_DOUBLE_FIELD
7850 #undef READ_INT_FIELD
7851 #undef WRITE_INT_FIELD
7852 #undef READ_INTPTR_FIELD
7853 #undef WRITE_INTPTR_FIELD
7854 #undef READ_UINT8_FIELD
7855 #undef WRITE_UINT8_FIELD
7856 #undef READ_INT8_FIELD
7857 #undef WRITE_INT8_FIELD
7858 #undef READ_UINT16_FIELD
7859 #undef WRITE_UINT16_FIELD
7860 #undef READ_INT16_FIELD
7861 #undef WRITE_INT16_FIELD
7862 #undef READ_UINT32_FIELD
7863 #undef WRITE_UINT32_FIELD
7864 #undef READ_INT32_FIELD
7865 #undef WRITE_INT32_FIELD
7866 #undef READ_FLOAT_FIELD
7867 #undef WRITE_FLOAT_FIELD
7868 #undef READ_UINT64_FIELD
7869 #undef WRITE_UINT64_FIELD
7870 #undef READ_INT64_FIELD
7871 #undef WRITE_INT64_FIELD
7872 #undef READ_BYTE_FIELD
7873 #undef WRITE_BYTE_FIELD
7874 #undef NOBARRIER_READ_BYTE_FIELD
7875 #undef NOBARRIER_WRITE_BYTE_FIELD
7877 } } // namespace v8::internal
7879 #endif // V8_OBJECTS_INL_H_