1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
47 #include "store-buffer.h"
50 #include "incremental-marking.h"
51 #include "transitions-inl.h"
52 #include "objects-visiting.h"
57 PropertyDetails::PropertyDetails(Smi* smi) {
58 value_ = smi->value();
62 Smi* PropertyDetails::AsSmi() {
63 // Ensure the upper 2 bits have the same value by sign extending it. This is
64 // necessary to be able to use the 31st bit of the property details.
65 int value = value_ << 1;
66 return Smi::FromInt(value >> 1);
70 PropertyDetails PropertyDetails::AsDeleted() {
71 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
72 return PropertyDetails(smi);
76 #define TYPE_CHECKER(type, instancetype) \
77 bool Object::Is##type() { \
78 return Object::IsHeapObject() && \
79 HeapObject::cast(this)->map()->instance_type() == instancetype; \
83 #define CAST_ACCESSOR(type) \
84 type* type::cast(Object* object) { \
85 SLOW_ASSERT(object->Is##type()); \
86 return reinterpret_cast<type*>(object); \
90 #define FIXED_TYPED_ARRAY_CAST_ACCESSOR(type) \
92 type* type::cast(Object* object) { \
93 SLOW_ASSERT(object->Is##type()); \
94 return reinterpret_cast<type*>(object); \
97 #define INT_ACCESSORS(holder, name, offset) \
98 int holder::name() { return READ_INT_FIELD(this, offset); } \
99 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
102 #define ACCESSORS(holder, name, type, offset) \
103 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
104 void holder::set_##name(type* value, WriteBarrierMode mode) { \
105 WRITE_FIELD(this, offset, value); \
106 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
110 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
111 #define ACCESSORS_TO_SMI(holder, name, offset) \
112 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
113 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
114 WRITE_FIELD(this, offset, value); \
118 // Getter that returns a Smi as an int and writes an int as a Smi.
119 #define SMI_ACCESSORS(holder, name, offset) \
120 int holder::name() { \
121 Object* value = READ_FIELD(this, offset); \
122 return Smi::cast(value)->value(); \
124 void holder::set_##name(int value) { \
125 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
129 #define BOOL_GETTER(holder, field, name, offset) \
130 bool holder::name() { \
131 return BooleanBit::get(field(), offset); \
135 #define BOOL_ACCESSORS(holder, field, name, offset) \
136 bool holder::name() { \
137 return BooleanBit::get(field(), offset); \
139 void holder::set_##name(bool value) { \
140 set_##field(BooleanBit::set(field(), offset, value)); \
144 bool Object::IsFixedArrayBase() {
145 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
146 IsFixedTypedArrayBase() || IsExternalArray();
150 // External objects are not extensible, so the map check is enough.
151 bool Object::IsExternal() {
152 return Object::IsHeapObject() &&
153 HeapObject::cast(this)->map() ==
154 HeapObject::cast(this)->GetHeap()->external_map();
158 bool Object::IsAccessorInfo() {
159 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
163 bool Object::IsSmi() {
164 return HAS_SMI_TAG(this);
168 bool Object::IsHeapObject() {
169 return Internals::HasHeapObjectTag(this);
173 bool Object::NonFailureIsHeapObject() {
174 ASSERT(!this->IsFailure());
175 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
179 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
180 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
183 bool Object::IsString() {
184 return Object::IsHeapObject()
185 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
189 bool Object::IsName() {
190 return IsString() || IsSymbol();
194 bool Object::IsUniqueName() {
195 return IsInternalizedString() || IsSymbol();
199 bool Object::IsSpecObject() {
200 return Object::IsHeapObject()
201 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
205 bool Object::IsSpecFunction() {
206 if (!Object::IsHeapObject()) return false;
207 InstanceType type = HeapObject::cast(this)->map()->instance_type();
208 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
212 bool Object::IsInternalizedString() {
213 if (!this->IsHeapObject()) return false;
214 uint32_t type = HeapObject::cast(this)->map()->instance_type();
215 STATIC_ASSERT(kNotInternalizedTag != 0);
216 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
217 (kStringTag | kInternalizedTag);
221 bool Object::IsConsString() {
222 if (!IsString()) return false;
223 return StringShape(String::cast(this)).IsCons();
227 bool Object::IsSlicedString() {
228 if (!IsString()) return false;
229 return StringShape(String::cast(this)).IsSliced();
233 bool Object::IsSeqString() {
234 if (!IsString()) return false;
235 return StringShape(String::cast(this)).IsSequential();
239 bool Object::IsSeqOneByteString() {
240 if (!IsString()) return false;
241 return StringShape(String::cast(this)).IsSequential() &&
242 String::cast(this)->IsOneByteRepresentation();
246 bool Object::IsSeqTwoByteString() {
247 if (!IsString()) return false;
248 return StringShape(String::cast(this)).IsSequential() &&
249 String::cast(this)->IsTwoByteRepresentation();
253 bool Object::IsExternalString() {
254 if (!IsString()) return false;
255 return StringShape(String::cast(this)).IsExternal();
259 bool Object::IsExternalAsciiString() {
260 if (!IsString()) return false;
261 return StringShape(String::cast(this)).IsExternal() &&
262 String::cast(this)->IsOneByteRepresentation();
266 bool Object::IsExternalTwoByteString() {
267 if (!IsString()) return false;
268 return StringShape(String::cast(this)).IsExternal() &&
269 String::cast(this)->IsTwoByteRepresentation();
272 bool Object::HasValidElements() {
273 // Dictionary is covered under FixedArray.
274 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
275 IsFixedTypedArrayBase();
279 MaybeObject* Object::AllocateNewStorageFor(Heap* heap,
280 Representation representation) {
281 if (FLAG_track_fields && representation.IsSmi() && IsUninitialized()) {
282 return Smi::FromInt(0);
284 if (!FLAG_track_double_fields) return this;
285 if (!representation.IsDouble()) return this;
286 if (IsUninitialized()) {
287 return heap->AllocateHeapNumber(0);
289 return heap->AllocateHeapNumber(Number());
293 StringShape::StringShape(String* str)
294 : type_(str->map()->instance_type()) {
296 ASSERT((type_ & kIsNotStringMask) == kStringTag);
300 StringShape::StringShape(Map* map)
301 : type_(map->instance_type()) {
303 ASSERT((type_ & kIsNotStringMask) == kStringTag);
307 StringShape::StringShape(InstanceType t)
308 : type_(static_cast<uint32_t>(t)) {
310 ASSERT((type_ & kIsNotStringMask) == kStringTag);
314 bool StringShape::IsInternalized() {
316 STATIC_ASSERT(kNotInternalizedTag != 0);
317 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
318 (kStringTag | kInternalizedTag);
322 bool String::IsOneByteRepresentation() {
323 uint32_t type = map()->instance_type();
324 return (type & kStringEncodingMask) == kOneByteStringTag;
328 bool String::IsTwoByteRepresentation() {
329 uint32_t type = map()->instance_type();
330 return (type & kStringEncodingMask) == kTwoByteStringTag;
334 bool String::IsOneByteRepresentationUnderneath() {
335 uint32_t type = map()->instance_type();
336 STATIC_ASSERT(kIsIndirectStringTag != 0);
337 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
339 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
340 case kOneByteStringTag:
342 case kTwoByteStringTag:
344 default: // Cons or sliced string. Need to go deeper.
345 return GetUnderlying()->IsOneByteRepresentation();
350 bool String::IsTwoByteRepresentationUnderneath() {
351 uint32_t type = map()->instance_type();
352 STATIC_ASSERT(kIsIndirectStringTag != 0);
353 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
355 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
356 case kOneByteStringTag:
358 case kTwoByteStringTag:
360 default: // Cons or sliced string. Need to go deeper.
361 return GetUnderlying()->IsTwoByteRepresentation();
366 bool String::HasOnlyOneByteChars() {
367 uint32_t type = map()->instance_type();
368 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
369 IsOneByteRepresentation();
373 bool StringShape::IsCons() {
374 return (type_ & kStringRepresentationMask) == kConsStringTag;
378 bool StringShape::IsSliced() {
379 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
383 bool StringShape::IsIndirect() {
384 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
388 bool StringShape::IsExternal() {
389 return (type_ & kStringRepresentationMask) == kExternalStringTag;
393 bool StringShape::IsSequential() {
394 return (type_ & kStringRepresentationMask) == kSeqStringTag;
398 StringRepresentationTag StringShape::representation_tag() {
399 uint32_t tag = (type_ & kStringRepresentationMask);
400 return static_cast<StringRepresentationTag>(tag);
404 uint32_t StringShape::encoding_tag() {
405 return type_ & kStringEncodingMask;
409 uint32_t StringShape::full_representation_tag() {
410 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
414 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
415 Internals::kFullStringRepresentationMask);
417 STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
418 Internals::kStringEncodingMask);
421 bool StringShape::IsSequentialAscii() {
422 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
426 bool StringShape::IsSequentialTwoByte() {
427 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
431 bool StringShape::IsExternalAscii() {
432 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
436 STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
437 Internals::kExternalAsciiRepresentationTag);
439 STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
442 bool StringShape::IsExternalTwoByte() {
443 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
447 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
448 Internals::kExternalTwoByteRepresentationTag);
450 STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
452 uc32 FlatStringReader::Get(int index) {
453 ASSERT(0 <= index && index <= length_);
455 return static_cast<const byte*>(start_)[index];
457 return static_cast<const uc16*>(start_)[index];
462 template <typename Char>
463 class SequentialStringKey : public HashTableKey {
465 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
466 : string_(string), hash_field_(0), seed_(seed) { }
468 virtual uint32_t Hash() {
469 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
473 uint32_t result = hash_field_ >> String::kHashShift;
474 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
479 virtual uint32_t HashForObject(Object* other) {
480 return String::cast(other)->Hash();
483 Vector<const Char> string_;
484 uint32_t hash_field_;
489 class OneByteStringKey : public SequentialStringKey<uint8_t> {
491 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
492 : SequentialStringKey<uint8_t>(str, seed) { }
494 virtual bool IsMatch(Object* string) {
495 return String::cast(string)->IsOneByteEqualTo(string_);
498 virtual MaybeObject* AsObject(Heap* heap);
502 class SubStringOneByteStringKey : public HashTableKey {
504 explicit SubStringOneByteStringKey(Handle<SeqOneByteString> string,
507 : string_(string), from_(from), length_(length) { }
509 virtual uint32_t Hash() {
510 ASSERT(length_ >= 0);
511 ASSERT(from_ + length_ <= string_->length());
512 uint8_t* chars = string_->GetChars() + from_;
513 hash_field_ = StringHasher::HashSequentialString(
514 chars, length_, string_->GetHeap()->HashSeed());
515 uint32_t result = hash_field_ >> String::kHashShift;
516 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
521 virtual uint32_t HashForObject(Object* other) {
522 return String::cast(other)->Hash();
525 virtual bool IsMatch(Object* string) {
526 Vector<const uint8_t> chars(string_->GetChars() + from_, length_);
527 return String::cast(string)->IsOneByteEqualTo(chars);
530 virtual MaybeObject* AsObject(Heap* heap);
533 Handle<SeqOneByteString> string_;
536 uint32_t hash_field_;
540 class TwoByteStringKey : public SequentialStringKey<uc16> {
542 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
543 : SequentialStringKey<uc16>(str, seed) { }
545 virtual bool IsMatch(Object* string) {
546 return String::cast(string)->IsTwoByteEqualTo(string_);
549 virtual MaybeObject* AsObject(Heap* heap);
553 // Utf8StringKey carries a vector of chars as key.
554 class Utf8StringKey : public HashTableKey {
556 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
557 : string_(string), hash_field_(0), seed_(seed) { }
559 virtual bool IsMatch(Object* string) {
560 return String::cast(string)->IsUtf8EqualTo(string_);
563 virtual uint32_t Hash() {
564 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
565 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
566 uint32_t result = hash_field_ >> String::kHashShift;
567 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
571 virtual uint32_t HashForObject(Object* other) {
572 return String::cast(other)->Hash();
575 virtual MaybeObject* AsObject(Heap* heap) {
576 if (hash_field_ == 0) Hash();
577 return heap->AllocateInternalizedStringFromUtf8(string_,
582 Vector<const char> string_;
583 uint32_t hash_field_;
584 int chars_; // Caches the number of characters when computing the hash code.
589 bool Object::IsNumber() {
590 return IsSmi() || IsHeapNumber();
594 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
595 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
598 bool Object::IsFiller() {
599 if (!Object::IsHeapObject()) return false;
600 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
601 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
605 bool Object::IsExternalArray() {
606 if (!Object::IsHeapObject())
608 InstanceType instance_type =
609 HeapObject::cast(this)->map()->instance_type();
610 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
611 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
615 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
616 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
617 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
619 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
620 #undef TYPED_ARRAY_TYPE_CHECKER
623 bool Object::IsFixedTypedArrayBase() {
624 if (!Object::IsHeapObject()) return false;
626 InstanceType instance_type =
627 HeapObject::cast(this)->map()->instance_type();
628 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
629 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
633 bool MaybeObject::IsFailure() {
634 return HAS_FAILURE_TAG(this);
638 bool MaybeObject::IsRetryAfterGC() {
639 return HAS_FAILURE_TAG(this)
640 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
644 bool MaybeObject::IsOutOfMemory() {
645 return HAS_FAILURE_TAG(this)
646 && Failure::cast(this)->IsOutOfMemoryException();
650 bool MaybeObject::IsException() {
651 return this == Failure::Exception();
655 bool MaybeObject::IsTheHole() {
656 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
660 bool MaybeObject::IsUninitialized() {
661 return !IsFailure() && ToObjectUnchecked()->IsUninitialized();
665 Failure* Failure::cast(MaybeObject* obj) {
666 ASSERT(HAS_FAILURE_TAG(obj));
667 return reinterpret_cast<Failure*>(obj);
671 bool Object::IsJSReceiver() {
672 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
673 return IsHeapObject() &&
674 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
678 bool Object::IsJSObject() {
679 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
680 return IsHeapObject() &&
681 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
685 bool Object::IsJSProxy() {
686 if (!Object::IsHeapObject()) return false;
687 InstanceType type = HeapObject::cast(this)->map()->instance_type();
688 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
692 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
693 TYPE_CHECKER(JSSet, JS_SET_TYPE)
694 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
695 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
696 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
697 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
698 TYPE_CHECKER(Map, MAP_TYPE)
699 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
700 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
701 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
704 bool Object::IsJSWeakCollection() {
705 return IsJSWeakMap() || IsJSWeakSet();
709 bool Object::IsDescriptorArray() {
710 return IsFixedArray();
714 bool Object::IsTransitionArray() {
715 return IsFixedArray();
719 bool Object::IsDeoptimizationInputData() {
720 // Must be a fixed array.
721 if (!IsFixedArray()) return false;
723 // There's no sure way to detect the difference between a fixed array and
724 // a deoptimization data array. Since this is used for asserts we can
725 // check that the length is zero or else the fixed size plus a multiple of
727 int length = FixedArray::cast(this)->length();
728 if (length == 0) return true;
730 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
731 return length >= 0 &&
732 length % DeoptimizationInputData::kDeoptEntrySize == 0;
736 bool Object::IsDeoptimizationOutputData() {
737 if (!IsFixedArray()) return false;
738 // There's actually no way to see the difference between a fixed array and
739 // a deoptimization data array. Since this is used for asserts we can check
740 // that the length is plausible though.
741 if (FixedArray::cast(this)->length() % 2 != 0) return false;
746 bool Object::IsDependentCode() {
747 if (!IsFixedArray()) return false;
748 // There's actually no way to see the difference between a fixed array and
749 // a dependent codes array.
754 bool Object::IsTypeFeedbackCells() {
755 if (!IsFixedArray()) return false;
756 // There's actually no way to see the difference between a fixed array and
757 // a cache cells array. Since this is used for asserts we can check that
758 // the length is plausible though.
759 if (FixedArray::cast(this)->length() % 2 != 0) return false;
764 bool Object::IsContext() {
765 if (!Object::IsHeapObject()) return false;
766 Map* map = HeapObject::cast(this)->map();
767 Heap* heap = map->GetHeap();
768 return (map == heap->function_context_map() ||
769 map == heap->catch_context_map() ||
770 map == heap->with_context_map() ||
771 map == heap->native_context_map() ||
772 map == heap->block_context_map() ||
773 map == heap->module_context_map() ||
774 map == heap->global_context_map());
778 bool Object::IsNativeContext() {
779 return Object::IsHeapObject() &&
780 HeapObject::cast(this)->map() ==
781 HeapObject::cast(this)->GetHeap()->native_context_map();
785 bool Object::IsScopeInfo() {
786 return Object::IsHeapObject() &&
787 HeapObject::cast(this)->map() ==
788 HeapObject::cast(this)->GetHeap()->scope_info_map();
792 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
795 template <> inline bool Is<JSFunction>(Object* obj) {
796 return obj->IsJSFunction();
800 TYPE_CHECKER(Code, CODE_TYPE)
801 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
802 TYPE_CHECKER(Cell, CELL_TYPE)
803 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
804 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
805 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
806 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
807 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
808 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
809 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
812 bool Object::IsStringWrapper() {
813 return IsJSValue() && JSValue::cast(this)->value()->IsString();
817 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
820 bool Object::IsBoolean() {
821 return IsOddball() &&
822 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
826 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
827 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
828 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
829 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
832 bool Object::IsJSArrayBufferView() {
833 return IsJSDataView() || IsJSTypedArray();
837 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
840 template <> inline bool Is<JSArray>(Object* obj) {
841 return obj->IsJSArray();
845 bool Object::IsHashTable() {
846 return Object::IsHeapObject() &&
847 HeapObject::cast(this)->map() ==
848 HeapObject::cast(this)->GetHeap()->hash_table_map();
852 bool Object::IsDictionary() {
853 return IsHashTable() &&
854 this != HeapObject::cast(this)->GetHeap()->string_table();
858 bool Object::IsStringTable() {
859 return IsHashTable() &&
860 this == HeapObject::cast(this)->GetHeap()->raw_unchecked_string_table();
864 bool Object::IsJSFunctionResultCache() {
865 if (!IsFixedArray()) return false;
866 FixedArray* self = FixedArray::cast(this);
867 int length = self->length();
868 if (length < JSFunctionResultCache::kEntriesIndex) return false;
869 if ((length - JSFunctionResultCache::kEntriesIndex)
870 % JSFunctionResultCache::kEntrySize != 0) {
874 if (FLAG_verify_heap) {
875 reinterpret_cast<JSFunctionResultCache*>(this)->
876 JSFunctionResultCacheVerify();
883 bool Object::IsNormalizedMapCache() {
884 if (!IsFixedArray()) return false;
885 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
889 if (FLAG_verify_heap) {
890 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
897 bool Object::IsCompilationCacheTable() {
898 return IsHashTable();
902 bool Object::IsCodeCacheHashTable() {
903 return IsHashTable();
907 bool Object::IsPolymorphicCodeCacheHashTable() {
908 return IsHashTable();
912 bool Object::IsMapCache() {
913 return IsHashTable();
917 bool Object::IsObjectHashTable() {
918 return IsHashTable();
922 bool Object::IsPrimitive() {
923 return IsOddball() || IsNumber() || IsString();
927 bool Object::IsJSGlobalProxy() {
928 bool result = IsHeapObject() &&
929 (HeapObject::cast(this)->map()->instance_type() ==
930 JS_GLOBAL_PROXY_TYPE);
931 ASSERT(!result || IsAccessCheckNeeded());
936 bool Object::IsGlobalObject() {
937 if (!IsHeapObject()) return false;
939 InstanceType type = HeapObject::cast(this)->map()->instance_type();
940 return type == JS_GLOBAL_OBJECT_TYPE ||
941 type == JS_BUILTINS_OBJECT_TYPE;
945 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
946 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
949 bool Object::IsUndetectableObject() {
950 return IsHeapObject()
951 && HeapObject::cast(this)->map()->is_undetectable();
955 bool Object::IsAccessCheckNeeded() {
956 return IsHeapObject()
957 && HeapObject::cast(this)->map()->is_access_check_needed();
961 bool Object::IsStruct() {
962 if (!IsHeapObject()) return false;
963 switch (HeapObject::cast(this)->map()->instance_type()) {
964 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
965 STRUCT_LIST(MAKE_STRUCT_CASE)
966 #undef MAKE_STRUCT_CASE
967 default: return false;
972 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
973 bool Object::Is##Name() { \
974 return Object::IsHeapObject() \
975 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
977 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
978 #undef MAKE_STRUCT_PREDICATE
981 bool Object::IsUndefined() {
982 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
986 bool Object::IsNull() {
987 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
991 bool Object::IsTheHole() {
992 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
996 bool Object::IsUninitialized() {
997 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1001 bool Object::IsTrue() {
1002 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1006 bool Object::IsFalse() {
1007 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1011 bool Object::IsArgumentsMarker() {
1012 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1016 double Object::Number() {
1019 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1020 : reinterpret_cast<HeapNumber*>(this)->value();
1024 bool Object::IsNaN() {
1025 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1029 MaybeObject* Object::ToSmi() {
1030 if (IsSmi()) return this;
1031 if (IsHeapNumber()) {
1032 double value = HeapNumber::cast(this)->value();
1033 int int_value = FastD2I(value);
1034 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1035 return Smi::FromInt(int_value);
1038 return Failure::Exception();
1042 bool Object::HasSpecificClassOf(String* name) {
1043 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1047 MaybeObject* Object::GetElement(Isolate* isolate, uint32_t index) {
1048 // GetElement can trigger a getter which can cause allocation.
1049 // This was not always the case. This ASSERT is here to catch
1050 // leftover incorrect uses.
1051 ASSERT(AllowHeapAllocation::IsAllowed());
1052 return GetElementWithReceiver(isolate, this, index);
1056 Object* Object::GetElementNoExceptionThrown(Isolate* isolate, uint32_t index) {
1057 MaybeObject* maybe = GetElementWithReceiver(isolate, this, index);
1058 ASSERT(!maybe->IsFailure());
1059 Object* result = NULL; // Initialization to please compiler.
1060 maybe->ToObject(&result);
1065 MaybeObject* Object::GetProperty(Name* key) {
1066 PropertyAttributes attributes;
1067 return GetPropertyWithReceiver(this, key, &attributes);
1071 MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) {
1072 return GetPropertyWithReceiver(this, key, attributes);
1076 #define FIELD_ADDR(p, offset) \
1077 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1079 #define READ_FIELD(p, offset) \
1080 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
1082 #define WRITE_FIELD(p, offset, value) \
1083 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1085 #define WRITE_BARRIER(heap, object, offset, value) \
1086 heap->incremental_marking()->RecordWrite( \
1087 object, HeapObject::RawField(object, offset), value); \
1088 if (heap->InNewSpace(value)) { \
1089 heap->RecordWrite(object->address(), offset); \
1092 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1093 if (mode == UPDATE_WRITE_BARRIER) { \
1094 heap->incremental_marking()->RecordWrite( \
1095 object, HeapObject::RawField(object, offset), value); \
1096 if (heap->InNewSpace(value)) { \
1097 heap->RecordWrite(object->address(), offset); \
1101 #ifndef V8_TARGET_ARCH_MIPS
1102 #define READ_DOUBLE_FIELD(p, offset) \
1103 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
1104 #else // V8_TARGET_ARCH_MIPS
1105 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1106 // non-64-bit aligned HeapNumber::value.
1107 static inline double read_double_field(void* p, int offset) {
1112 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
1113 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
1116 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1117 #endif // V8_TARGET_ARCH_MIPS
1119 #ifndef V8_TARGET_ARCH_MIPS
1120 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1121 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1122 #else // V8_TARGET_ARCH_MIPS
1123 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1124 // non-64-bit aligned HeapNumber::value.
1125 static inline void write_double_field(void* p, int offset,
1132 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1133 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1135 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1136 write_double_field(p, offset, value)
1137 #endif // V8_TARGET_ARCH_MIPS
1140 #define READ_INT_FIELD(p, offset) \
1141 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1143 #define WRITE_INT_FIELD(p, offset, value) \
1144 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1146 #define READ_INTPTR_FIELD(p, offset) \
1147 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1149 #define WRITE_INTPTR_FIELD(p, offset, value) \
1150 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1152 #define READ_UINT32_FIELD(p, offset) \
1153 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1155 #define WRITE_UINT32_FIELD(p, offset, value) \
1156 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1158 #define READ_INT32_FIELD(p, offset) \
1159 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1161 #define WRITE_INT32_FIELD(p, offset, value) \
1162 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1164 #define READ_INT64_FIELD(p, offset) \
1165 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1167 #define WRITE_INT64_FIELD(p, offset, value) \
1168 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1170 #define READ_SHORT_FIELD(p, offset) \
1171 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1173 #define WRITE_SHORT_FIELD(p, offset, value) \
1174 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1176 #define READ_BYTE_FIELD(p, offset) \
1177 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1179 #define WRITE_BYTE_FIELD(p, offset, value) \
1180 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1183 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1184 return &READ_FIELD(obj, byte_offset);
1189 return Internals::SmiValue(this);
1193 Smi* Smi::FromInt(int value) {
1194 ASSERT(Smi::IsValid(value));
1195 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1199 Smi* Smi::FromIntptr(intptr_t value) {
1200 ASSERT(Smi::IsValid(value));
1201 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1202 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1206 Failure::Type Failure::type() const {
1207 return static_cast<Type>(value() & kFailureTypeTagMask);
1211 bool Failure::IsInternalError() const {
1212 return type() == INTERNAL_ERROR;
1216 bool Failure::IsOutOfMemoryException() const {
1217 return type() == OUT_OF_MEMORY_EXCEPTION;
1221 AllocationSpace Failure::allocation_space() const {
1222 ASSERT_EQ(RETRY_AFTER_GC, type());
1223 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1228 Failure* Failure::InternalError() {
1229 return Construct(INTERNAL_ERROR);
1233 Failure* Failure::Exception() {
1234 return Construct(EXCEPTION);
1238 Failure* Failure::OutOfMemoryException(intptr_t value) {
1239 return Construct(OUT_OF_MEMORY_EXCEPTION, value);
1243 intptr_t Failure::value() const {
1244 return static_cast<intptr_t>(
1245 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1249 Failure* Failure::RetryAfterGC() {
1250 return RetryAfterGC(NEW_SPACE);
1254 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1255 ASSERT((space & ~kSpaceTagMask) == 0);
1256 return Construct(RETRY_AFTER_GC, space);
1260 Failure* Failure::Construct(Type type, intptr_t value) {
1262 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1263 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1264 // Fill the unused bits with a pattern that's easy to recognize in crash
1266 static const int kFailureMagicPattern = 0x0BAD0000;
1267 return reinterpret_cast<Failure*>(
1268 (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern);
1272 bool Smi::IsValid(intptr_t value) {
1273 bool result = Internals::IsValidSmi(value);
1274 ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1279 MapWord MapWord::FromMap(Map* map) {
1280 return MapWord(reinterpret_cast<uintptr_t>(map));
1284 Map* MapWord::ToMap() {
1285 return reinterpret_cast<Map*>(value_);
1289 bool MapWord::IsForwardingAddress() {
1290 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1294 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1295 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1296 return MapWord(reinterpret_cast<uintptr_t>(raw));
1300 HeapObject* MapWord::ToForwardingAddress() {
1301 ASSERT(IsForwardingAddress());
1302 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1307 void HeapObject::VerifyObjectField(int offset) {
1308 VerifyPointer(READ_FIELD(this, offset));
1311 void HeapObject::VerifySmiField(int offset) {
1312 CHECK(READ_FIELD(this, offset)->IsSmi());
1317 Heap* HeapObject::GetHeap() {
1319 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1320 SLOW_ASSERT(heap != NULL);
1325 Isolate* HeapObject::GetIsolate() {
1326 return GetHeap()->isolate();
1330 Map* HeapObject::map() {
1331 return map_word().ToMap();
1335 void HeapObject::set_map(Map* value) {
1336 set_map_word(MapWord::FromMap(value));
1337 if (value != NULL) {
1338 // TODO(1600) We are passing NULL as a slot because maps can never be on
1339 // evacuation candidate.
1340 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1345 // Unsafe accessor omitting write barrier.
1346 void HeapObject::set_map_no_write_barrier(Map* value) {
1347 set_map_word(MapWord::FromMap(value));
1351 MapWord HeapObject::map_word() {
1352 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1356 void HeapObject::set_map_word(MapWord map_word) {
1357 // WRITE_FIELD does not invoke write barrier, but there is no need
1359 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1363 HeapObject* HeapObject::FromAddress(Address address) {
1364 ASSERT_TAG_ALIGNED(address);
1365 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1369 Address HeapObject::address() {
1370 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1374 int HeapObject::Size() {
1375 return SizeFromMap(map());
1379 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1380 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1381 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1385 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1386 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1390 double HeapNumber::value() {
1391 return READ_DOUBLE_FIELD(this, kValueOffset);
1395 void HeapNumber::set_value(double value) {
1396 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1400 int HeapNumber::get_exponent() {
1401 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1402 kExponentShift) - kExponentBias;
1406 int HeapNumber::get_sign() {
1407 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1411 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1414 Object** FixedArray::GetFirstElementAddress() {
1415 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1419 bool FixedArray::ContainsOnlySmisOrHoles() {
1420 Object* the_hole = GetHeap()->the_hole_value();
1421 Object** current = GetFirstElementAddress();
1422 for (int i = 0; i < length(); ++i) {
1423 Object* candidate = *current++;
1424 if (!candidate->IsSmi() && candidate != the_hole) return false;
1430 FixedArrayBase* JSObject::elements() {
1431 Object* array = READ_FIELD(this, kElementsOffset);
1432 return static_cast<FixedArrayBase*>(array);
1436 void JSObject::ValidateElements() {
1437 #ifdef ENABLE_SLOW_ASSERTS
1438 if (FLAG_enable_slow_asserts) {
1439 ElementsAccessor* accessor = GetElementsAccessor();
1440 accessor->Validate(this);
1446 void AllocationSite::Initialize() {
1447 set_transition_info(Smi::FromInt(0));
1448 SetElementsKind(GetInitialFastElementsKind());
1449 set_nested_site(Smi::FromInt(0));
1450 set_pretenure_data(Smi::FromInt(0));
1451 set_pretenure_create_count(Smi::FromInt(0));
1452 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1453 SKIP_WRITE_BARRIER);
1457 void AllocationSite::MarkZombie() {
1458 ASSERT(!IsZombie());
1460 set_pretenure_decision(kZombie);
1464 // Heuristic: We only need to create allocation site info if the boilerplate
1465 // elements kind is the initial elements kind.
1466 AllocationSiteMode AllocationSite::GetMode(
1467 ElementsKind boilerplate_elements_kind) {
1468 if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
1469 return TRACK_ALLOCATION_SITE;
1472 return DONT_TRACK_ALLOCATION_SITE;
1476 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1478 if (IsFastSmiElementsKind(from) &&
1479 IsMoreGeneralElementsKindTransition(from, to)) {
1480 return TRACK_ALLOCATION_SITE;
1483 return DONT_TRACK_ALLOCATION_SITE;
1487 inline bool AllocationSite::CanTrack(InstanceType type) {
1488 if (FLAG_allocation_site_pretenuring) {
1489 return type == JS_ARRAY_TYPE ||
1490 type == JS_OBJECT_TYPE ||
1491 type < FIRST_NONSTRING_TYPE;
1493 return type == JS_ARRAY_TYPE;
1497 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1501 return DependentCode::kAllocationSiteTenuringChangedGroup;
1504 return DependentCode::kAllocationSiteTransitionChangedGroup;
1508 return DependentCode::kAllocationSiteTransitionChangedGroup;
1512 inline void AllocationSite::set_memento_found_count(int count) {
1513 int value = pretenure_data()->value();
1514 // Verify that we can count more mementos than we can possibly find in one
1515 // new space collection.
1516 ASSERT((GetHeap()->MaxSemiSpaceSize() /
1517 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1518 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1519 ASSERT(count < MementoFoundCountBits::kMax);
1521 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1522 SKIP_WRITE_BARRIER);
1525 inline bool AllocationSite::IncrementMementoFoundCount() {
1526 if (IsZombie()) return false;
1528 int value = memento_found_count();
1529 set_memento_found_count(value + 1);
1534 inline void AllocationSite::IncrementMementoCreateCount() {
1535 ASSERT(FLAG_allocation_site_pretenuring);
1536 int value = memento_create_count();
1537 set_memento_create_count(value + 1);
1541 inline bool AllocationSite::DigestPretenuringFeedback() {
1542 bool decision_changed = false;
1543 int create_count = memento_create_count();
1544 if (create_count >= kPretenureMinimumCreated) {
1545 int found_count = memento_found_count();
1546 double ratio = static_cast<double>(found_count) / create_count;
1547 if (FLAG_trace_track_allocation_sites) {
1548 PrintF("AllocationSite: %p (created, found, ratio) (%d, %d, %f)\n",
1549 static_cast<void*>(this), create_count, found_count, ratio);
1551 int current_mode = GetPretenureMode();
1552 PretenureDecision result = ratio >= kPretenureRatio
1555 set_pretenure_decision(result);
1556 if (current_mode != GetPretenureMode()) {
1557 decision_changed = true;
1558 dependent_code()->MarkCodeForDeoptimization(
1560 DependentCode::kAllocationSiteTenuringChangedGroup);
1564 // Clear feedback calculation fields until the next gc.
1565 set_memento_found_count(0);
1566 set_memento_create_count(0);
1567 return decision_changed;
1571 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1572 object->ValidateElements();
1573 ElementsKind elements_kind = object->map()->elements_kind();
1574 if (!IsFastObjectElementsKind(elements_kind)) {
1575 if (IsFastHoleyElementsKind(elements_kind)) {
1576 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1578 TransitionElementsKind(object, FAST_ELEMENTS);
1584 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1586 EnsureElementsMode mode) {
1587 ElementsKind current_kind = map()->elements_kind();
1588 ElementsKind target_kind = current_kind;
1589 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1590 bool is_holey = IsFastHoleyElementsKind(current_kind);
1591 if (current_kind == FAST_HOLEY_ELEMENTS) return this;
1592 Heap* heap = GetHeap();
1593 Object* the_hole = heap->the_hole_value();
1594 for (uint32_t i = 0; i < count; ++i) {
1595 Object* current = *objects++;
1596 if (current == the_hole) {
1598 target_kind = GetHoleyElementsKind(target_kind);
1599 } else if (!current->IsSmi()) {
1600 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1601 if (IsFastSmiElementsKind(target_kind)) {
1603 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1605 target_kind = FAST_DOUBLE_ELEMENTS;
1608 } else if (is_holey) {
1609 target_kind = FAST_HOLEY_ELEMENTS;
1612 target_kind = FAST_ELEMENTS;
1617 if (target_kind != current_kind) {
1618 return TransitionElementsKind(target_kind);
1624 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1626 EnsureElementsMode mode) {
1627 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1628 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1629 elements->map() == GetHeap()->fixed_cow_array_map());
1630 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1631 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1633 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1634 return EnsureCanContainElements(objects, length, mode);
1637 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1638 if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1639 return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
1640 } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
1641 FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
1642 for (uint32_t i = 0; i < length; ++i) {
1643 if (double_array->is_the_hole(i)) {
1644 return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
1647 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1654 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1655 ElementsKind to_kind) {
1656 Map* current_map = map();
1657 ElementsKind from_kind = current_map->elements_kind();
1658 if (from_kind == to_kind) return current_map;
1660 Context* native_context = isolate->context()->native_context();
1661 Object* maybe_array_maps = native_context->js_array_maps();
1662 if (maybe_array_maps->IsFixedArray()) {
1663 FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
1664 if (array_maps->get(from_kind) == current_map) {
1665 Object* maybe_transitioned_map = array_maps->get(to_kind);
1666 if (maybe_transitioned_map->IsMap()) {
1667 return Map::cast(maybe_transitioned_map);
1672 return GetElementsTransitionMapSlow(to_kind);
1676 void JSObject::set_map_and_elements(Map* new_map,
1677 FixedArrayBase* value,
1678 WriteBarrierMode mode) {
1679 ASSERT(value->HasValidElements());
1680 if (new_map != NULL) {
1681 if (mode == UPDATE_WRITE_BARRIER) {
1684 ASSERT(mode == SKIP_WRITE_BARRIER);
1685 set_map_no_write_barrier(new_map);
1688 ASSERT((map()->has_fast_smi_or_object_elements() ||
1689 (value == GetHeap()->empty_fixed_array())) ==
1690 (value->map() == GetHeap()->fixed_array_map() ||
1691 value->map() == GetHeap()->fixed_cow_array_map()));
1692 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1693 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1694 WRITE_FIELD(this, kElementsOffset, value);
1695 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1699 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1700 set_map_and_elements(NULL, value, mode);
1704 void JSObject::initialize_properties() {
1705 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1706 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1710 void JSObject::initialize_elements() {
1711 if (map()->has_fast_smi_or_object_elements() ||
1712 map()->has_fast_double_elements()) {
1713 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1714 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1715 } else if (map()->has_external_array_elements()) {
1716 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(map());
1717 ASSERT(!GetHeap()->InNewSpace(empty_array));
1718 WRITE_FIELD(this, kElementsOffset, empty_array);
1725 MaybeObject* JSObject::ResetElements() {
1726 if (map()->is_observed()) {
1727 // Maintain invariant that observed elements are always in dictionary mode.
1728 SeededNumberDictionary* dictionary;
1729 MaybeObject* maybe = SeededNumberDictionary::Allocate(GetHeap(), 0);
1730 if (!maybe->To(&dictionary)) return maybe;
1731 if (map() == GetHeap()->non_strict_arguments_elements_map()) {
1732 FixedArray::cast(elements())->set(1, dictionary);
1734 set_elements(dictionary);
1739 ElementsKind elements_kind = GetInitialFastElementsKind();
1740 if (!FLAG_smi_only_arrays) {
1741 elements_kind = FastSmiToObjectElementsKind(elements_kind);
1743 MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
1745 if (!maybe->To(&map)) return maybe;
1747 initialize_elements();
1753 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
1754 DisallowHeapAllocation no_gc;
1755 if (!map->HasTransitionArray()) return Handle<String>::null();
1756 TransitionArray* transitions = map->transitions();
1757 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1758 int transition = TransitionArray::kSimpleTransitionIndex;
1759 PropertyDetails details = transitions->GetTargetDetails(transition);
1760 Name* name = transitions->GetKey(transition);
1761 if (details.type() != FIELD) return Handle<String>::null();
1762 if (details.attributes() != NONE) return Handle<String>::null();
1763 if (!name->IsString()) return Handle<String>::null();
1764 return Handle<String>(String::cast(name));
1768 Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
1769 ASSERT(!ExpectedTransitionKey(map).is_null());
1770 return Handle<Map>(map->transitions()->GetTarget(
1771 TransitionArray::kSimpleTransitionIndex));
1775 Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1776 DisallowHeapAllocation no_allocation;
1777 if (!map->HasTransitionArray()) return Handle<Map>::null();
1778 TransitionArray* transitions = map->transitions();
1779 int transition = transitions->Search(*key);
1780 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1781 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1782 if (target_details.type() != FIELD) return Handle<Map>::null();
1783 if (target_details.attributes() != NONE) return Handle<Map>::null();
1784 return Handle<Map>(transitions->GetTarget(transition));
1788 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1789 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1792 byte Oddball::kind() {
1793 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1797 void Oddball::set_kind(byte value) {
1798 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1802 Object* Cell::value() {
1803 return READ_FIELD(this, kValueOffset);
1807 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1808 // The write barrier is not used for global property cells.
1809 ASSERT(!val->IsPropertyCell() && !val->IsCell());
1810 WRITE_FIELD(this, kValueOffset, val);
1813 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1815 Object* PropertyCell::type_raw() {
1816 return READ_FIELD(this, kTypeOffset);
1820 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1821 WRITE_FIELD(this, kTypeOffset, val);
1825 int JSObject::GetHeaderSize() {
1826 InstanceType type = map()->instance_type();
1827 // Check for the most common kind of JavaScript object before
1828 // falling into the generic switch. This speeds up the internal
1829 // field operations considerably on average.
1830 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1832 case JS_GENERATOR_OBJECT_TYPE:
1833 return JSGeneratorObject::kSize;
1834 case JS_MODULE_TYPE:
1835 return JSModule::kSize;
1836 case JS_GLOBAL_PROXY_TYPE:
1837 return JSGlobalProxy::kSize;
1838 case JS_GLOBAL_OBJECT_TYPE:
1839 return JSGlobalObject::kSize;
1840 case JS_BUILTINS_OBJECT_TYPE:
1841 return JSBuiltinsObject::kSize;
1842 case JS_FUNCTION_TYPE:
1843 return JSFunction::kSize;
1845 return JSValue::kSize;
1847 return JSDate::kSize;
1849 return JSArray::kSize;
1850 case JS_ARRAY_BUFFER_TYPE:
1851 return JSArrayBuffer::kSize;
1852 case JS_TYPED_ARRAY_TYPE:
1853 return JSTypedArray::kSize;
1854 case JS_DATA_VIEW_TYPE:
1855 return JSDataView::kSize;
1857 return JSSet::kSize;
1859 return JSMap::kSize;
1860 case JS_WEAK_MAP_TYPE:
1861 return JSWeakMap::kSize;
1862 case JS_WEAK_SET_TYPE:
1863 return JSWeakSet::kSize;
1864 case JS_REGEXP_TYPE:
1865 return JSRegExp::kSize;
1866 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1867 return JSObject::kHeaderSize;
1868 case JS_MESSAGE_OBJECT_TYPE:
1869 return JSMessageObject::kSize;
1871 // TODO(jkummerow): Re-enable this. Blink currently hits this
1872 // from its CustomElementConstructorBuilder.
1879 int JSObject::GetInternalFieldCount() {
1880 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1881 // Make sure to adjust for the number of in-object properties. These
1882 // properties do contribute to the size, but are not internal fields.
1883 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1884 map()->inobject_properties();
1888 int JSObject::GetInternalFieldOffset(int index) {
1889 ASSERT(index < GetInternalFieldCount() && index >= 0);
1890 return GetHeaderSize() + (kPointerSize * index);
1894 Object* JSObject::GetInternalField(int index) {
1895 ASSERT(index < GetInternalFieldCount() && index >= 0);
1896 // Internal objects do follow immediately after the header, whereas in-object
1897 // properties are at the end of the object. Therefore there is no need
1898 // to adjust the index here.
1899 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1903 void JSObject::SetInternalField(int index, Object* value) {
1904 ASSERT(index < GetInternalFieldCount() && index >= 0);
1905 // Internal objects do follow immediately after the header, whereas in-object
1906 // properties are at the end of the object. Therefore there is no need
1907 // to adjust the index here.
1908 int offset = GetHeaderSize() + (kPointerSize * index);
1909 WRITE_FIELD(this, offset, value);
1910 WRITE_BARRIER(GetHeap(), this, offset, value);
1914 void JSObject::SetInternalField(int index, Smi* value) {
1915 ASSERT(index < GetInternalFieldCount() && index >= 0);
1916 // Internal objects do follow immediately after the header, whereas in-object
1917 // properties are at the end of the object. Therefore there is no need
1918 // to adjust the index here.
1919 int offset = GetHeaderSize() + (kPointerSize * index);
1920 WRITE_FIELD(this, offset, value);
1924 MaybeObject* JSObject::FastPropertyAt(Representation representation,
1926 Object* raw_value = RawFastPropertyAt(index);
1927 return raw_value->AllocateNewStorageFor(GetHeap(), representation);
1931 // Access fast-case object properties at index. The use of these routines
1932 // is needed to correctly distinguish between properties stored in-object and
1933 // properties stored in the properties array.
1934 Object* JSObject::RawFastPropertyAt(int index) {
1935 // Adjust for the number of properties stored in the object.
1936 index -= map()->inobject_properties();
1938 int offset = map()->instance_size() + (index * kPointerSize);
1939 return READ_FIELD(this, offset);
1941 ASSERT(index < properties()->length());
1942 return properties()->get(index);
1947 void JSObject::FastPropertyAtPut(int index, Object* value) {
1948 // Adjust for the number of properties stored in the object.
1949 index -= map()->inobject_properties();
1951 int offset = map()->instance_size() + (index * kPointerSize);
1952 WRITE_FIELD(this, offset, value);
1953 WRITE_BARRIER(GetHeap(), this, offset, value);
1955 ASSERT(index < properties()->length());
1956 properties()->set(index, value);
1961 int JSObject::GetInObjectPropertyOffset(int index) {
1962 // Adjust for the number of properties stored in the object.
1963 index -= map()->inobject_properties();
1965 return map()->instance_size() + (index * kPointerSize);
1969 Object* JSObject::InObjectPropertyAt(int index) {
1970 // Adjust for the number of properties stored in the object.
1971 index -= map()->inobject_properties();
1973 int offset = map()->instance_size() + (index * kPointerSize);
1974 return READ_FIELD(this, offset);
1978 Object* JSObject::InObjectPropertyAtPut(int index,
1980 WriteBarrierMode mode) {
1981 // Adjust for the number of properties stored in the object.
1982 index -= map()->inobject_properties();
1984 int offset = map()->instance_size() + (index * kPointerSize);
1985 WRITE_FIELD(this, offset, value);
1986 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1992 void JSObject::InitializeBody(Map* map,
1993 Object* pre_allocated_value,
1994 Object* filler_value) {
1995 ASSERT(!filler_value->IsHeapObject() ||
1996 !GetHeap()->InNewSpace(filler_value));
1997 ASSERT(!pre_allocated_value->IsHeapObject() ||
1998 !GetHeap()->InNewSpace(pre_allocated_value));
1999 int size = map->instance_size();
2000 int offset = kHeaderSize;
2001 if (filler_value != pre_allocated_value) {
2002 int pre_allocated = map->pre_allocated_property_fields();
2003 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
2004 for (int i = 0; i < pre_allocated; i++) {
2005 WRITE_FIELD(this, offset, pre_allocated_value);
2006 offset += kPointerSize;
2009 while (offset < size) {
2010 WRITE_FIELD(this, offset, filler_value);
2011 offset += kPointerSize;
2016 bool JSObject::HasFastProperties() {
2017 ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
2018 return !properties()->IsDictionary();
2022 bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
2023 // Allow extra fast properties if the object has more than
2024 // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
2025 // very unlikely that the object is being used as a dictionary and there is a
2026 // good chance that allowing more map transitions will be worth it.
2027 Map* map = this->map();
2028 if (map->unused_property_fields() != 0) return false;
2030 int inobject = map->inobject_properties();
2033 if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
2034 limit = Max(inobject, kMaxFastProperties);
2036 limit = Max(inobject, kFastPropertiesSoftLimit);
2038 return properties()->length() > limit;
2042 void Struct::InitializeBody(int object_size) {
2043 Object* value = GetHeap()->undefined_value();
2044 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2045 WRITE_FIELD(this, offset, value);
2050 bool Object::ToArrayIndex(uint32_t* index) {
2052 int value = Smi::cast(this)->value();
2053 if (value < 0) return false;
2057 if (IsHeapNumber()) {
2058 double value = HeapNumber::cast(this)->value();
2059 uint32_t uint_value = static_cast<uint32_t>(value);
2060 if (value == static_cast<double>(uint_value)) {
2061 *index = uint_value;
2069 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2070 if (!this->IsJSValue()) return false;
2072 JSValue* js_value = JSValue::cast(this);
2073 if (!js_value->value()->IsString()) return false;
2075 String* str = String::cast(js_value->value());
2076 if (index >= static_cast<uint32_t>(str->length())) return false;
2083 void Object::VerifyApiCallResultType() {
2084 #if ENABLE_EXTRA_CHECKS
2093 FATAL("API call returned invalid object");
2095 #endif // ENABLE_EXTRA_CHECKS
2099 FixedArrayBase* FixedArrayBase::cast(Object* object) {
2100 ASSERT(object->IsFixedArrayBase());
2101 return reinterpret_cast<FixedArrayBase*>(object);
2105 Object* FixedArray::get(int index) {
2106 SLOW_ASSERT(index >= 0 && index < this->length());
2107 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2111 bool FixedArray::is_the_hole(int index) {
2112 return get(index) == GetHeap()->the_hole_value();
2116 void FixedArray::set(int index, Smi* value) {
2117 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2118 ASSERT(index >= 0 && index < this->length());
2119 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
2120 int offset = kHeaderSize + index * kPointerSize;
2121 WRITE_FIELD(this, offset, value);
2125 void FixedArray::set(int index, Object* value) {
2126 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2127 ASSERT(index >= 0 && index < this->length());
2128 int offset = kHeaderSize + index * kPointerSize;
2129 WRITE_FIELD(this, offset, value);
2130 WRITE_BARRIER(GetHeap(), this, offset, value);
2134 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2135 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2139 inline double FixedDoubleArray::hole_nan_as_double() {
2140 return BitCast<double, uint64_t>(kHoleNanInt64);
2144 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2145 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
2146 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
2147 return OS::nan_value();
2151 double FixedDoubleArray::get_scalar(int index) {
2152 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2153 map() != GetHeap()->fixed_array_map());
2154 ASSERT(index >= 0 && index < this->length());
2155 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2156 ASSERT(!is_the_hole_nan(result));
2160 int64_t FixedDoubleArray::get_representation(int index) {
2161 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2162 map() != GetHeap()->fixed_array_map());
2163 ASSERT(index >= 0 && index < this->length());
2164 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2167 MaybeObject* FixedDoubleArray::get(int index) {
2168 if (is_the_hole(index)) {
2169 return GetHeap()->the_hole_value();
2171 return GetHeap()->NumberFromDouble(get_scalar(index));
2176 void FixedDoubleArray::set(int index, double value) {
2177 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2178 map() != GetHeap()->fixed_array_map());
2179 int offset = kHeaderSize + index * kDoubleSize;
2180 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2181 WRITE_DOUBLE_FIELD(this, offset, value);
2185 void FixedDoubleArray::set_the_hole(int index) {
2186 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2187 map() != GetHeap()->fixed_array_map());
2188 int offset = kHeaderSize + index * kDoubleSize;
2189 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2193 bool FixedDoubleArray::is_the_hole(int index) {
2194 int offset = kHeaderSize + index * kDoubleSize;
2195 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2199 SMI_ACCESSORS(ConstantPoolArray, first_ptr_index, kFirstPointerIndexOffset)
2200 SMI_ACCESSORS(ConstantPoolArray, first_int32_index, kFirstInt32IndexOffset)
2203 int ConstantPoolArray::first_int64_index() {
2208 int ConstantPoolArray::count_of_int64_entries() {
2209 return first_ptr_index();
2213 int ConstantPoolArray::count_of_ptr_entries() {
2214 return first_int32_index() - first_ptr_index();
2218 int ConstantPoolArray::count_of_int32_entries() {
2219 return length() - first_int32_index();
2223 void ConstantPoolArray::SetEntryCounts(int number_of_int64_entries,
2224 int number_of_ptr_entries,
2225 int number_of_int32_entries) {
2226 set_first_ptr_index(number_of_int64_entries);
2227 set_first_int32_index(number_of_int64_entries + number_of_ptr_entries);
2228 set_length(number_of_int64_entries + number_of_ptr_entries +
2229 number_of_int32_entries);
2233 int64_t ConstantPoolArray::get_int64_entry(int index) {
2234 ASSERT(map() == GetHeap()->constant_pool_array_map());
2235 ASSERT(index >= 0 && index < first_ptr_index());
2236 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2239 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2240 STATIC_ASSERT(kDoubleSize == kInt64Size);
2241 ASSERT(map() == GetHeap()->constant_pool_array_map());
2242 ASSERT(index >= 0 && index < first_ptr_index());
2243 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2247 Object* ConstantPoolArray::get_ptr_entry(int index) {
2248 ASSERT(map() == GetHeap()->constant_pool_array_map());
2249 ASSERT(index >= first_ptr_index() && index < first_int32_index());
2250 return READ_FIELD(this, OffsetOfElementAt(index));
2254 int32_t ConstantPoolArray::get_int32_entry(int index) {
2255 ASSERT(map() == GetHeap()->constant_pool_array_map());
2256 ASSERT(index >= first_int32_index() && index < length());
2257 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2261 void ConstantPoolArray::set(int index, Object* value) {
2262 ASSERT(map() == GetHeap()->constant_pool_array_map());
2263 ASSERT(index >= first_ptr_index() && index < first_int32_index());
2264 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2265 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2269 void ConstantPoolArray::set(int index, int64_t value) {
2270 ASSERT(map() == GetHeap()->constant_pool_array_map());
2271 ASSERT(index >= first_int64_index() && index < first_ptr_index());
2272 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2276 void ConstantPoolArray::set(int index, double value) {
2277 STATIC_ASSERT(kDoubleSize == kInt64Size);
2278 ASSERT(map() == GetHeap()->constant_pool_array_map());
2279 ASSERT(index >= first_int64_index() && index < first_ptr_index());
2280 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2284 void ConstantPoolArray::set(int index, int32_t value) {
2285 ASSERT(map() == GetHeap()->constant_pool_array_map());
2286 ASSERT(index >= this->first_int32_index() && index < length());
2287 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2291 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2292 const DisallowHeapAllocation& promise) {
2293 Heap* heap = GetHeap();
2294 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2295 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2296 return UPDATE_WRITE_BARRIER;
2300 void FixedArray::set(int index,
2302 WriteBarrierMode mode) {
2303 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2304 ASSERT(index >= 0 && index < this->length());
2305 int offset = kHeaderSize + index * kPointerSize;
2306 WRITE_FIELD(this, offset, value);
2307 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2311 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2314 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2315 ASSERT(index >= 0 && index < array->length());
2316 int offset = kHeaderSize + index * kPointerSize;
2317 WRITE_FIELD(array, offset, value);
2318 Heap* heap = array->GetHeap();
2319 if (heap->InNewSpace(value)) {
2320 heap->RecordWrite(array->address(), offset);
2325 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2328 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2329 ASSERT(index >= 0 && index < array->length());
2330 ASSERT(!array->GetHeap()->InNewSpace(value));
2331 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2335 void FixedArray::set_undefined(int index) {
2336 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2337 ASSERT(index >= 0 && index < this->length());
2338 ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2340 kHeaderSize + index * kPointerSize,
2341 GetHeap()->undefined_value());
2345 void FixedArray::set_null(int index) {
2346 ASSERT(index >= 0 && index < this->length());
2347 ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2349 kHeaderSize + index * kPointerSize,
2350 GetHeap()->null_value());
2354 void FixedArray::set_the_hole(int index) {
2355 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2356 ASSERT(index >= 0 && index < this->length());
2357 ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2359 kHeaderSize + index * kPointerSize,
2360 GetHeap()->the_hole_value());
2364 double* FixedDoubleArray::data_start() {
2365 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2369 Object** FixedArray::data_start() {
2370 return HeapObject::RawField(this, kHeaderSize);
2374 bool DescriptorArray::IsEmpty() {
2375 ASSERT(length() >= kFirstIndex ||
2376 this == GetHeap()->empty_descriptor_array());
2377 return length() < kFirstIndex;
2381 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2383 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2387 // Perform a binary search in a fixed array. Low and high are entry indices. If
2388 // there are three entries in this array it should be called with low=0 and
2390 template<SearchMode search_mode, typename T>
2391 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2392 uint32_t hash = name->Hash();
2395 ASSERT(low <= high);
2397 while (low != high) {
2398 int mid = (low + high) / 2;
2399 Name* mid_name = array->GetSortedKey(mid);
2400 uint32_t mid_hash = mid_name->Hash();
2402 if (mid_hash >= hash) {
2409 for (; low <= limit; ++low) {
2410 int sort_index = array->GetSortedKeyIndex(low);
2411 Name* entry = array->GetKey(sort_index);
2412 if (entry->Hash() != hash) break;
2413 if (entry->Equals(name)) {
2414 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2417 return T::kNotFound;
2421 return T::kNotFound;
2425 // Perform a linear search in this fixed array. len is the number of entry
2426 // indices that are valid.
2427 template<SearchMode search_mode, typename T>
2428 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2429 uint32_t hash = name->Hash();
2430 if (search_mode == ALL_ENTRIES) {
2431 for (int number = 0; number < len; number++) {
2432 int sorted_index = array->GetSortedKeyIndex(number);
2433 Name* entry = array->GetKey(sorted_index);
2434 uint32_t current_hash = entry->Hash();
2435 if (current_hash > hash) break;
2436 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2439 ASSERT(len >= valid_entries);
2440 for (int number = 0; number < valid_entries; number++) {
2441 Name* entry = array->GetKey(number);
2442 uint32_t current_hash = entry->Hash();
2443 if (current_hash == hash && entry->Equals(name)) return number;
2446 return T::kNotFound;
2450 template<SearchMode search_mode, typename T>
2451 int Search(T* array, Name* name, int valid_entries) {
2452 if (search_mode == VALID_ENTRIES) {
2453 SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2455 SLOW_ASSERT(array->IsSortedNoDuplicates());
2458 int nof = array->number_of_entries();
2459 if (nof == 0) return T::kNotFound;
2461 // Fast case: do linear search for small arrays.
2462 const int kMaxElementsForLinearSearch = 8;
2463 if ((search_mode == ALL_ENTRIES &&
2464 nof <= kMaxElementsForLinearSearch) ||
2465 (search_mode == VALID_ENTRIES &&
2466 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2467 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2470 // Slow case: perform binary search.
2471 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2475 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2476 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2480 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2481 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2482 if (number_of_own_descriptors == 0) return kNotFound;
2484 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2485 int number = cache->Lookup(map, name);
2487 if (number == DescriptorLookupCache::kAbsent) {
2488 number = Search(name, number_of_own_descriptors);
2489 cache->Update(map, name, number);
2496 void Map::LookupDescriptor(JSObject* holder,
2498 LookupResult* result) {
2499 DescriptorArray* descriptors = this->instance_descriptors();
2500 int number = descriptors->SearchWithCache(name, this);
2501 if (number == DescriptorArray::kNotFound) return result->NotFound();
2502 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2506 void Map::LookupTransition(JSObject* holder,
2508 LookupResult* result) {
2509 if (HasTransitionArray()) {
2510 TransitionArray* transition_array = transitions();
2511 int number = transition_array->Search(name);
2512 if (number != TransitionArray::kNotFound) {
2513 return result->TransitionResult(holder, number);
2520 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2521 ASSERT(descriptor_number < number_of_descriptors());
2522 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2526 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2527 return GetKeySlot(descriptor_number);
2531 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2532 return GetValueSlot(descriptor_number - 1) + 1;
2536 Name* DescriptorArray::GetKey(int descriptor_number) {
2537 ASSERT(descriptor_number < number_of_descriptors());
2538 return Name::cast(get(ToKeyIndex(descriptor_number)));
2542 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2543 return GetDetails(descriptor_number).pointer();
2547 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2548 return GetKey(GetSortedKeyIndex(descriptor_number));
2552 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2553 PropertyDetails details = GetDetails(descriptor_index);
2554 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2558 void DescriptorArray::SetRepresentation(int descriptor_index,
2559 Representation representation) {
2560 ASSERT(!representation.IsNone());
2561 PropertyDetails details = GetDetails(descriptor_index);
2562 set(ToDetailsIndex(descriptor_index),
2563 details.CopyWithRepresentation(representation).AsSmi());
2567 void DescriptorArray::InitializeRepresentations(Representation representation) {
2568 int length = number_of_descriptors();
2569 for (int i = 0; i < length; i++) {
2570 SetRepresentation(i, representation);
2575 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2576 ASSERT(descriptor_number < number_of_descriptors());
2577 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2581 Object* DescriptorArray::GetValue(int descriptor_number) {
2582 ASSERT(descriptor_number < number_of_descriptors());
2583 return get(ToValueIndex(descriptor_number));
2587 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2588 ASSERT(descriptor_number < number_of_descriptors());
2589 Object* details = get(ToDetailsIndex(descriptor_number));
2590 return PropertyDetails(Smi::cast(details));
2594 PropertyType DescriptorArray::GetType(int descriptor_number) {
2595 return GetDetails(descriptor_number).type();
2599 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2600 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2601 return GetDetails(descriptor_number).field_index();
2605 Object* DescriptorArray::GetConstant(int descriptor_number) {
2606 return GetValue(descriptor_number);
2610 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2611 ASSERT(GetType(descriptor_number) == CALLBACKS);
2612 return GetValue(descriptor_number);
2616 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2617 ASSERT(GetType(descriptor_number) == CALLBACKS);
2618 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2619 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2623 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2624 desc->Init(GetKey(descriptor_number),
2625 GetValue(descriptor_number),
2626 GetDetails(descriptor_number));
2630 void DescriptorArray::Set(int descriptor_number,
2632 const WhitenessWitness&) {
2634 ASSERT(descriptor_number < number_of_descriptors());
2636 NoIncrementalWriteBarrierSet(this,
2637 ToKeyIndex(descriptor_number),
2639 NoIncrementalWriteBarrierSet(this,
2640 ToValueIndex(descriptor_number),
2642 NoIncrementalWriteBarrierSet(this,
2643 ToDetailsIndex(descriptor_number),
2644 desc->GetDetails().AsSmi());
2648 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2650 ASSERT(descriptor_number < number_of_descriptors());
2652 set(ToKeyIndex(descriptor_number), desc->GetKey());
2653 set(ToValueIndex(descriptor_number), desc->GetValue());
2654 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2658 void DescriptorArray::Append(Descriptor* desc,
2659 const WhitenessWitness& witness) {
2660 int descriptor_number = number_of_descriptors();
2661 SetNumberOfDescriptors(descriptor_number + 1);
2662 Set(descriptor_number, desc, witness);
2664 uint32_t hash = desc->GetKey()->Hash();
2668 for (insertion = descriptor_number; insertion > 0; --insertion) {
2669 Name* key = GetSortedKey(insertion - 1);
2670 if (key->Hash() <= hash) break;
2671 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2674 SetSortedKey(insertion, descriptor_number);
2678 void DescriptorArray::Append(Descriptor* desc) {
2679 int descriptor_number = number_of_descriptors();
2680 SetNumberOfDescriptors(descriptor_number + 1);
2681 Set(descriptor_number, desc);
2683 uint32_t hash = desc->GetKey()->Hash();
2687 for (insertion = descriptor_number; insertion > 0; --insertion) {
2688 Name* key = GetSortedKey(insertion - 1);
2689 if (key->Hash() <= hash) break;
2690 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2693 SetSortedKey(insertion, descriptor_number);
2697 void DescriptorArray::SwapSortedKeys(int first, int second) {
2698 int first_key = GetSortedKeyIndex(first);
2699 SetSortedKey(first, GetSortedKeyIndex(second));
2700 SetSortedKey(second, first_key);
2704 DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array)
2705 : marking_(array->GetHeap()->incremental_marking()) {
2706 marking_->EnterNoMarkingScope();
2707 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2711 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2712 marking_->LeaveNoMarkingScope();
2716 template<typename Shape, typename Key>
2717 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2718 const int kMinCapacity = 32;
2719 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2720 if (capacity < kMinCapacity) {
2721 capacity = kMinCapacity; // Guarantee min capacity.
2727 template<typename Shape, typename Key>
2728 int HashTable<Shape, Key>::FindEntry(Key key) {
2729 return FindEntry(GetIsolate(), key);
2733 // Find entry for key otherwise return kNotFound.
2734 template<typename Shape, typename Key>
2735 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2736 uint32_t capacity = Capacity();
2737 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2739 // EnsureCapacity will guarantee the hash table is never full.
2741 Object* element = KeyAt(entry);
2742 // Empty entry. Uses raw unchecked accessors because it is called by the
2743 // string table during bootstrapping.
2744 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2745 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2746 Shape::IsMatch(key, element)) return entry;
2747 entry = NextProbe(entry, count++, capacity);
2753 bool SeededNumberDictionary::requires_slow_elements() {
2754 Object* max_index_object = get(kMaxNumberKeyIndex);
2755 if (!max_index_object->IsSmi()) return false;
2757 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2760 uint32_t SeededNumberDictionary::max_number_key() {
2761 ASSERT(!requires_slow_elements());
2762 Object* max_index_object = get(kMaxNumberKeyIndex);
2763 if (!max_index_object->IsSmi()) return 0;
2764 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2765 return value >> kRequiresSlowElementsTagSize;
2768 void SeededNumberDictionary::set_requires_slow_elements() {
2769 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2773 // ------------------------------------
2777 CAST_ACCESSOR(FixedArray)
2778 CAST_ACCESSOR(FixedDoubleArray)
2779 CAST_ACCESSOR(FixedTypedArrayBase)
2780 CAST_ACCESSOR(ConstantPoolArray)
2781 CAST_ACCESSOR(DescriptorArray)
2782 CAST_ACCESSOR(DeoptimizationInputData)
2783 CAST_ACCESSOR(DeoptimizationOutputData)
2784 CAST_ACCESSOR(DependentCode)
2785 CAST_ACCESSOR(TypeFeedbackCells)
2786 CAST_ACCESSOR(StringTable)
2787 CAST_ACCESSOR(JSFunctionResultCache)
2788 CAST_ACCESSOR(NormalizedMapCache)
2789 CAST_ACCESSOR(ScopeInfo)
2790 CAST_ACCESSOR(CompilationCacheTable)
2791 CAST_ACCESSOR(CodeCacheHashTable)
2792 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2793 CAST_ACCESSOR(MapCache)
2794 CAST_ACCESSOR(String)
2795 CAST_ACCESSOR(SeqString)
2796 CAST_ACCESSOR(SeqOneByteString)
2797 CAST_ACCESSOR(SeqTwoByteString)
2798 CAST_ACCESSOR(SlicedString)
2799 CAST_ACCESSOR(ConsString)
2800 CAST_ACCESSOR(ExternalString)
2801 CAST_ACCESSOR(ExternalAsciiString)
2802 CAST_ACCESSOR(ExternalTwoByteString)
2803 CAST_ACCESSOR(Symbol)
2805 CAST_ACCESSOR(JSReceiver)
2806 CAST_ACCESSOR(JSObject)
2808 CAST_ACCESSOR(HeapObject)
2809 CAST_ACCESSOR(HeapNumber)
2810 CAST_ACCESSOR(Oddball)
2812 CAST_ACCESSOR(PropertyCell)
2813 CAST_ACCESSOR(SharedFunctionInfo)
2815 CAST_ACCESSOR(JSFunction)
2816 CAST_ACCESSOR(GlobalObject)
2817 CAST_ACCESSOR(JSGlobalProxy)
2818 CAST_ACCESSOR(JSGlobalObject)
2819 CAST_ACCESSOR(JSBuiltinsObject)
2821 CAST_ACCESSOR(JSArray)
2822 CAST_ACCESSOR(JSArrayBuffer)
2823 CAST_ACCESSOR(JSArrayBufferView)
2824 CAST_ACCESSOR(JSTypedArray)
2825 CAST_ACCESSOR(JSDataView)
2826 CAST_ACCESSOR(JSRegExp)
2827 CAST_ACCESSOR(JSProxy)
2828 CAST_ACCESSOR(JSFunctionProxy)
2829 CAST_ACCESSOR(JSSet)
2830 CAST_ACCESSOR(JSMap)
2831 CAST_ACCESSOR(JSWeakMap)
2832 CAST_ACCESSOR(JSWeakSet)
2833 CAST_ACCESSOR(Foreign)
2834 CAST_ACCESSOR(ByteArray)
2835 CAST_ACCESSOR(FreeSpace)
2836 CAST_ACCESSOR(ExternalArray)
2837 CAST_ACCESSOR(ExternalInt8Array)
2838 CAST_ACCESSOR(ExternalUint8Array)
2839 CAST_ACCESSOR(ExternalInt16Array)
2840 CAST_ACCESSOR(ExternalUint16Array)
2841 CAST_ACCESSOR(ExternalInt32Array)
2842 CAST_ACCESSOR(ExternalUint32Array)
2843 CAST_ACCESSOR(ExternalFloat32Array)
2844 CAST_ACCESSOR(ExternalFloat64Array)
2845 CAST_ACCESSOR(ExternalUint8ClampedArray)
2846 CAST_ACCESSOR(Struct)
2847 CAST_ACCESSOR(AccessorInfo)
2849 template <class Traits>
2850 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
2851 SLOW_ASSERT(object->IsHeapObject() &&
2852 HeapObject::cast(object)->map()->instance_type() ==
2853 Traits::kInstanceType);
2854 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
2858 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2859 STRUCT_LIST(MAKE_STRUCT_CAST)
2860 #undef MAKE_STRUCT_CAST
2863 template <typename Shape, typename Key>
2864 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2865 ASSERT(obj->IsHashTable());
2866 return reinterpret_cast<HashTable*>(obj);
2870 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2871 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2873 SMI_ACCESSORS(String, length, kLengthOffset)
2876 uint32_t Name::hash_field() {
2877 return READ_UINT32_FIELD(this, kHashFieldOffset);
2881 void Name::set_hash_field(uint32_t value) {
2882 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2883 #if V8_HOST_ARCH_64_BIT
2884 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2889 bool Name::Equals(Name* other) {
2890 if (other == this) return true;
2891 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
2892 this->IsSymbol() || other->IsSymbol()) {
2895 return String::cast(this)->SlowEquals(String::cast(other));
2899 ACCESSORS(Symbol, name, Object, kNameOffset)
2900 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
2901 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
2904 bool String::Equals(String* other) {
2905 if (other == this) return true;
2906 if (this->IsInternalizedString() && other->IsInternalizedString()) {
2909 return SlowEquals(other);
2913 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2914 if (!StringShape(this).IsCons()) return this;
2915 ConsString* cons = ConsString::cast(this);
2916 if (cons->IsFlat()) return cons->first();
2917 return SlowTryFlatten(pretenure);
2921 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2922 MaybeObject* flat = TryFlatten(pretenure);
2923 Object* successfully_flattened;
2924 if (!flat->ToObject(&successfully_flattened)) return this;
2925 return String::cast(successfully_flattened);
2929 uint16_t String::Get(int index) {
2930 ASSERT(index >= 0 && index < length());
2931 switch (StringShape(this).full_representation_tag()) {
2932 case kSeqStringTag | kOneByteStringTag:
2933 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
2934 case kSeqStringTag | kTwoByteStringTag:
2935 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2936 case kConsStringTag | kOneByteStringTag:
2937 case kConsStringTag | kTwoByteStringTag:
2938 return ConsString::cast(this)->ConsStringGet(index);
2939 case kExternalStringTag | kOneByteStringTag:
2940 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2941 case kExternalStringTag | kTwoByteStringTag:
2942 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2943 case kSlicedStringTag | kOneByteStringTag:
2944 case kSlicedStringTag | kTwoByteStringTag:
2945 return SlicedString::cast(this)->SlicedStringGet(index);
2955 void String::Set(int index, uint16_t value) {
2956 ASSERT(index >= 0 && index < length());
2957 ASSERT(StringShape(this).IsSequential());
2959 return this->IsOneByteRepresentation()
2960 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
2961 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2965 bool String::IsFlat() {
2966 if (!StringShape(this).IsCons()) return true;
2967 return ConsString::cast(this)->second()->length() == 0;
2971 String* String::GetUnderlying() {
2972 // Giving direct access to underlying string only makes sense if the
2973 // wrapping string is already flattened.
2974 ASSERT(this->IsFlat());
2975 ASSERT(StringShape(this).IsIndirect());
2976 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2977 const int kUnderlyingOffset = SlicedString::kParentOffset;
2978 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2982 template<class Visitor, class ConsOp>
2990 ASSERT(length == static_cast<unsigned>(string->length()));
2991 ASSERT(offset <= length);
2992 unsigned slice_offset = offset;
2994 ASSERT(type == string->map()->instance_type());
2996 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
2997 case kSeqStringTag | kOneByteStringTag:
2998 visitor.VisitOneByteString(
2999 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3003 case kSeqStringTag | kTwoByteStringTag:
3004 visitor.VisitTwoByteString(
3005 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3009 case kExternalStringTag | kOneByteStringTag:
3010 visitor.VisitOneByteString(
3011 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3015 case kExternalStringTag | kTwoByteStringTag:
3016 visitor.VisitTwoByteString(
3017 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3021 case kSlicedStringTag | kOneByteStringTag:
3022 case kSlicedStringTag | kTwoByteStringTag: {
3023 SlicedString* slicedString = SlicedString::cast(string);
3024 slice_offset += slicedString->offset();
3025 string = slicedString->parent();
3026 type = string->map()->instance_type();
3030 case kConsStringTag | kOneByteStringTag:
3031 case kConsStringTag | kTwoByteStringTag:
3032 string = cons_op.Operate(string, &offset, &type, &length);
3033 if (string == NULL) return;
3034 slice_offset = offset;
3035 ASSERT(length == static_cast<unsigned>(string->length()));
3046 // TODO(dcarney): Remove this class after conversion to VisitFlat.
3047 class ConsStringCaptureOp {
3049 inline ConsStringCaptureOp() : cons_string_(NULL) {}
3050 inline String* Operate(String* string, unsigned*, int32_t*, unsigned*) {
3051 cons_string_ = ConsString::cast(string);
3054 ConsString* cons_string_;
3058 template<class Visitor>
3059 ConsString* String::VisitFlat(Visitor* visitor,
3064 ASSERT(length >= 0 && length == string->length());
3065 ASSERT(offset >= 0 && offset <= length);
3066 ConsStringCaptureOp op;
3067 Visit(string, offset, *visitor, op, type, static_cast<unsigned>(length));
3068 return op.cons_string_;
3072 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3073 ASSERT(index >= 0 && index < length());
3074 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3078 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3079 ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3080 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3081 static_cast<byte>(value));
3085 Address SeqOneByteString::GetCharsAddress() {
3086 return FIELD_ADDR(this, kHeaderSize);
3090 uint8_t* SeqOneByteString::GetChars() {
3091 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3095 Address SeqTwoByteString::GetCharsAddress() {
3096 return FIELD_ADDR(this, kHeaderSize);
3100 uc16* SeqTwoByteString::GetChars() {
3101 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3105 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3106 ASSERT(index >= 0 && index < length());
3107 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3111 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3112 ASSERT(index >= 0 && index < length());
3113 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3117 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3118 return SizeFor(length());
3122 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3123 return SizeFor(length());
3127 String* SlicedString::parent() {
3128 return String::cast(READ_FIELD(this, kParentOffset));
3132 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3133 ASSERT(parent->IsSeqString() || parent->IsExternalString());
3134 WRITE_FIELD(this, kParentOffset, parent);
3135 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3139 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3142 String* ConsString::first() {
3143 return String::cast(READ_FIELD(this, kFirstOffset));
3147 Object* ConsString::unchecked_first() {
3148 return READ_FIELD(this, kFirstOffset);
3152 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3153 WRITE_FIELD(this, kFirstOffset, value);
3154 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3158 String* ConsString::second() {
3159 return String::cast(READ_FIELD(this, kSecondOffset));
3163 Object* ConsString::unchecked_second() {
3164 return READ_FIELD(this, kSecondOffset);
3168 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3169 WRITE_FIELD(this, kSecondOffset, value);
3170 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3174 bool ExternalString::is_short() {
3175 InstanceType type = map()->instance_type();
3176 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3180 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3181 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3185 void ExternalAsciiString::update_data_cache() {
3186 if (is_short()) return;
3187 const char** data_field =
3188 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3189 *data_field = resource()->data();
3193 void ExternalAsciiString::set_resource(
3194 const ExternalAsciiString::Resource* resource) {
3195 *reinterpret_cast<const Resource**>(
3196 FIELD_ADDR(this, kResourceOffset)) = resource;
3197 if (resource != NULL) update_data_cache();
3201 const uint8_t* ExternalAsciiString::GetChars() {
3202 return reinterpret_cast<const uint8_t*>(resource()->data());
3206 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3207 ASSERT(index >= 0 && index < length());
3208 return GetChars()[index];
3212 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3213 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3217 void ExternalTwoByteString::update_data_cache() {
3218 if (is_short()) return;
3219 const uint16_t** data_field =
3220 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3221 *data_field = resource()->data();
3225 void ExternalTwoByteString::set_resource(
3226 const ExternalTwoByteString::Resource* resource) {
3227 *reinterpret_cast<const Resource**>(
3228 FIELD_ADDR(this, kResourceOffset)) = resource;
3229 if (resource != NULL) update_data_cache();
3233 const uint16_t* ExternalTwoByteString::GetChars() {
3234 return resource()->data();
3238 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3239 ASSERT(index >= 0 && index < length());
3240 return GetChars()[index];
3244 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3246 return GetChars() + start;
3250 String* ConsStringNullOp::Operate(String*, unsigned*, int32_t*, unsigned*) {
3255 unsigned ConsStringIteratorOp::OffsetForDepth(unsigned depth) {
3256 return depth & kDepthMask;
3260 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3261 frames_[depth_++ & kDepthMask] = string;
3265 void ConsStringIteratorOp::PushRight(ConsString* string) {
3267 frames_[(depth_-1) & kDepthMask] = string;
3271 void ConsStringIteratorOp::AdjustMaximumDepth() {
3272 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3276 void ConsStringIteratorOp::Pop() {
3278 ASSERT(depth_ <= maximum_depth_);
3283 bool ConsStringIteratorOp::HasMore() {
3288 void ConsStringIteratorOp::Reset() {
3293 String* ConsStringIteratorOp::ContinueOperation(int32_t* type_out,
3294 unsigned* length_out) {
3295 bool blew_stack = false;
3296 String* string = NextLeaf(&blew_stack, type_out, length_out);
3298 if (string != NULL) {
3300 ASSERT(*length_out == static_cast<unsigned>(string->length()));
3301 ASSERT(*type_out == string->map()->instance_type());
3304 // Traversal complete.
3305 if (!blew_stack) return NULL;
3306 // Restart search from root.
3307 unsigned offset_out;
3308 string = Search(&offset_out, type_out, length_out);
3310 ASSERT(string == NULL || offset_out == 0);
3311 ASSERT(string == NULL ||
3312 *length_out == static_cast<unsigned>(string->length()));
3313 ASSERT(string == NULL || *type_out == string->map()->instance_type());
3318 uint16_t StringCharacterStream::GetNext() {
3319 ASSERT(buffer8_ != NULL && end_ != NULL);
3320 // Advance cursor if needed.
3321 // TODO(dcarney): Ensure uses of the api call HasMore first and avoid this.
3322 if (buffer8_ == end_) HasMore();
3323 ASSERT(buffer8_ < end_);
3324 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3328 StringCharacterStream::StringCharacterStream(String* string,
3329 ConsStringIteratorOp* op,
3331 : is_one_byte_(false),
3333 Reset(string, offset);
3337 void StringCharacterStream::Reset(String* string, unsigned offset) {
3341 int32_t type = string->map()->instance_type();
3342 unsigned length = string->length();
3343 String::Visit(string, offset, *this, *op_, type, length);
3347 bool StringCharacterStream::HasMore() {
3348 if (buffer8_ != end_) return true;
3349 if (!op_->HasMore()) return false;
3352 String* string = op_->ContinueOperation(&type, &length);
3353 if (string == NULL) return false;
3354 ASSERT(!string->IsConsString());
3355 ASSERT(string->length() != 0);
3356 ConsStringNullOp null_op;
3357 String::Visit(string, 0, *this, null_op, type, length);
3358 ASSERT(buffer8_ != end_);
3363 void StringCharacterStream::VisitOneByteString(
3364 const uint8_t* chars, unsigned length) {
3365 is_one_byte_ = true;
3367 end_ = chars + length;
3371 void StringCharacterStream::VisitTwoByteString(
3372 const uint16_t* chars, unsigned length) {
3373 is_one_byte_ = false;
3375 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3379 void JSFunctionResultCache::MakeZeroSize() {
3380 set_finger_index(kEntriesIndex);
3381 set_size(kEntriesIndex);
3385 void JSFunctionResultCache::Clear() {
3386 int cache_size = size();
3387 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3388 MemsetPointer(entries_start,
3389 GetHeap()->the_hole_value(),
3390 cache_size - kEntriesIndex);
3395 int JSFunctionResultCache::size() {
3396 return Smi::cast(get(kCacheSizeIndex))->value();
3400 void JSFunctionResultCache::set_size(int size) {
3401 set(kCacheSizeIndex, Smi::FromInt(size));
3405 int JSFunctionResultCache::finger_index() {
3406 return Smi::cast(get(kFingerIndex))->value();
3410 void JSFunctionResultCache::set_finger_index(int finger_index) {
3411 set(kFingerIndex, Smi::FromInt(finger_index));
3415 byte ByteArray::get(int index) {
3416 ASSERT(index >= 0 && index < this->length());
3417 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3421 void ByteArray::set(int index, byte value) {
3422 ASSERT(index >= 0 && index < this->length());
3423 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3427 int ByteArray::get_int(int index) {
3428 ASSERT(index >= 0 && (index * kIntSize) < this->length());
3429 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3433 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3434 ASSERT_TAG_ALIGNED(address);
3435 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3439 Address ByteArray::GetDataStartAddress() {
3440 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3444 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3445 return reinterpret_cast<uint8_t*>(external_pointer());
3449 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3450 ASSERT((index >= 0) && (index < this->length()));
3451 uint8_t* ptr = external_uint8_clamped_pointer();
3456 MaybeObject* ExternalUint8ClampedArray::get(int index) {
3457 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3461 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3462 ASSERT((index >= 0) && (index < this->length()));
3463 uint8_t* ptr = external_uint8_clamped_pointer();
3468 void* ExternalArray::external_pointer() {
3469 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3470 return reinterpret_cast<void*>(ptr);
3474 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3475 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3476 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3480 int8_t ExternalInt8Array::get_scalar(int index) {
3481 ASSERT((index >= 0) && (index < this->length()));
3482 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3487 MaybeObject* ExternalInt8Array::get(int index) {
3488 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3492 void ExternalInt8Array::set(int index, int8_t value) {
3493 ASSERT((index >= 0) && (index < this->length()));
3494 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3499 uint8_t ExternalUint8Array::get_scalar(int index) {
3500 ASSERT((index >= 0) && (index < this->length()));
3501 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3506 MaybeObject* ExternalUint8Array::get(int index) {
3507 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3511 void ExternalUint8Array::set(int index, uint8_t value) {
3512 ASSERT((index >= 0) && (index < this->length()));
3513 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3518 int16_t ExternalInt16Array::get_scalar(int index) {
3519 ASSERT((index >= 0) && (index < this->length()));
3520 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3525 MaybeObject* ExternalInt16Array::get(int index) {
3526 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3530 void ExternalInt16Array::set(int index, int16_t value) {
3531 ASSERT((index >= 0) && (index < this->length()));
3532 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3537 uint16_t ExternalUint16Array::get_scalar(int index) {
3538 ASSERT((index >= 0) && (index < this->length()));
3539 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3544 MaybeObject* ExternalUint16Array::get(int index) {
3545 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3549 void ExternalUint16Array::set(int index, uint16_t value) {
3550 ASSERT((index >= 0) && (index < this->length()));
3551 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3556 int32_t ExternalInt32Array::get_scalar(int index) {
3557 ASSERT((index >= 0) && (index < this->length()));
3558 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3563 MaybeObject* ExternalInt32Array::get(int index) {
3564 return GetHeap()->NumberFromInt32(get_scalar(index));
3568 void ExternalInt32Array::set(int index, int32_t value) {
3569 ASSERT((index >= 0) && (index < this->length()));
3570 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3575 uint32_t ExternalUint32Array::get_scalar(int index) {
3576 ASSERT((index >= 0) && (index < this->length()));
3577 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3582 MaybeObject* ExternalUint32Array::get(int index) {
3583 return GetHeap()->NumberFromUint32(get_scalar(index));
3587 void ExternalUint32Array::set(int index, uint32_t value) {
3588 ASSERT((index >= 0) && (index < this->length()));
3589 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3594 float ExternalFloat32Array::get_scalar(int index) {
3595 ASSERT((index >= 0) && (index < this->length()));
3596 float* ptr = static_cast<float*>(external_pointer());
3601 MaybeObject* ExternalFloat32Array::get(int index) {
3602 return GetHeap()->NumberFromDouble(get_scalar(index));
3606 void ExternalFloat32Array::set(int index, float value) {
3607 ASSERT((index >= 0) && (index < this->length()));
3608 float* ptr = static_cast<float*>(external_pointer());
3613 double ExternalFloat64Array::get_scalar(int index) {
3614 ASSERT((index >= 0) && (index < this->length()));
3615 double* ptr = static_cast<double*>(external_pointer());
3620 MaybeObject* ExternalFloat64Array::get(int index) {
3621 return GetHeap()->NumberFromDouble(get_scalar(index));
3625 void ExternalFloat64Array::set(int index, double value) {
3626 ASSERT((index >= 0) && (index < this->length()));
3627 double* ptr = static_cast<double*>(external_pointer());
3632 int FixedTypedArrayBase::size() {
3633 InstanceType instance_type = map()->instance_type();
3635 switch (instance_type) {
3636 case FIXED_UINT8_ARRAY_TYPE:
3637 case FIXED_INT8_ARRAY_TYPE:
3638 case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
3641 case FIXED_UINT16_ARRAY_TYPE:
3642 case FIXED_INT16_ARRAY_TYPE:
3645 case FIXED_UINT32_ARRAY_TYPE:
3646 case FIXED_INT32_ARRAY_TYPE:
3647 case FIXED_FLOAT32_ARRAY_TYPE:
3650 case FIXED_FLOAT64_ARRAY_TYPE:
3657 return OBJECT_POINTER_ALIGN(kDataOffset + length() * element_size);
3661 template <class Traits>
3662 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
3663 ASSERT((index >= 0) && (index < this->length()));
3664 ElementType* ptr = reinterpret_cast<ElementType*>(
3665 FIELD_ADDR(this, kDataOffset));
3669 template <class Traits>
3670 void FixedTypedArray<Traits>::set(int index, ElementType value) {
3671 ASSERT((index >= 0) && (index < this->length()));
3672 ElementType* ptr = reinterpret_cast<ElementType*>(
3673 FIELD_ADDR(this, kDataOffset));
3678 template <class Traits>
3679 MaybeObject* FixedTypedArray<Traits>::get(int index) {
3680 return Traits::ToObject(GetHeap(), get_scalar(index));
3683 template <class Traits>
3684 MaybeObject* FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
3685 ElementType cast_value = Traits::defaultValue();
3686 if (index < static_cast<uint32_t>(length())) {
3687 if (value->IsSmi()) {
3688 int int_value = Smi::cast(value)->value();
3689 cast_value = static_cast<ElementType>(int_value);
3690 } else if (value->IsHeapNumber()) {
3691 double double_value = HeapNumber::cast(value)->value();
3692 cast_value = static_cast<ElementType>(DoubleToInt32(double_value));
3694 // Clamp undefined to the default value. All other types have been
3695 // converted to a number type further up in the call chain.
3696 ASSERT(value->IsUndefined());
3698 set(index, cast_value);
3700 return Traits::ToObject(GetHeap(), cast_value);
3703 template <class Traits>
3704 Handle<Object> FixedTypedArray<Traits>::SetValue(
3705 Handle<FixedTypedArray<Traits> > array,
3707 Handle<Object> value) {
3708 CALL_HEAP_FUNCTION(array->GetIsolate(),
3709 array->SetValue(index, *value),
3714 MaybeObject* Uint8ArrayTraits::ToObject(Heap*, uint8_t scalar) {
3715 return Smi::FromInt(scalar);
3719 MaybeObject* Uint8ClampedArrayTraits::ToObject(Heap*, uint8_t scalar) {
3720 return Smi::FromInt(scalar);
3724 MaybeObject* Int8ArrayTraits::ToObject(Heap*, int8_t scalar) {
3725 return Smi::FromInt(scalar);
3729 MaybeObject* Uint16ArrayTraits::ToObject(Heap*, uint16_t scalar) {
3730 return Smi::FromInt(scalar);
3734 MaybeObject* Int16ArrayTraits::ToObject(Heap*, int16_t scalar) {
3735 return Smi::FromInt(scalar);
3739 MaybeObject* Uint32ArrayTraits::ToObject(Heap* heap, uint32_t scalar) {
3740 return heap->NumberFromUint32(scalar);
3744 MaybeObject* Int32ArrayTraits::ToObject(Heap* heap, int32_t scalar) {
3745 return heap->NumberFromInt32(scalar);
3749 MaybeObject* Float32ArrayTraits::ToObject(Heap* heap, float scalar) {
3750 return heap->NumberFromDouble(scalar);
3754 MaybeObject* Float64ArrayTraits::ToObject(Heap* heap, double scalar) {
3755 return heap->NumberFromDouble(scalar);
3759 int Map::visitor_id() {
3760 return READ_BYTE_FIELD(this, kVisitorIdOffset);
3764 void Map::set_visitor_id(int id) {
3765 ASSERT(0 <= id && id < 256);
3766 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
3770 int Map::instance_size() {
3771 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
3775 int Map::inobject_properties() {
3776 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
3780 int Map::pre_allocated_property_fields() {
3781 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
3785 int HeapObject::SizeFromMap(Map* map) {
3786 int instance_size = map->instance_size();
3787 if (instance_size != kVariableSizeSentinel) return instance_size;
3788 // Only inline the most frequent cases.
3789 int instance_type = static_cast<int>(map->instance_type());
3790 if (instance_type == FIXED_ARRAY_TYPE) {
3791 return FixedArray::BodyDescriptor::SizeOf(map, this);
3793 if (instance_type == ASCII_STRING_TYPE ||
3794 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
3795 return SeqOneByteString::SizeFor(
3796 reinterpret_cast<SeqOneByteString*>(this)->length());
3798 if (instance_type == BYTE_ARRAY_TYPE) {
3799 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
3801 if (instance_type == FREE_SPACE_TYPE) {
3802 return reinterpret_cast<FreeSpace*>(this)->size();
3804 if (instance_type == STRING_TYPE ||
3805 instance_type == INTERNALIZED_STRING_TYPE) {
3806 return SeqTwoByteString::SizeFor(
3807 reinterpret_cast<SeqTwoByteString*>(this)->length());
3809 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
3810 return FixedDoubleArray::SizeFor(
3811 reinterpret_cast<FixedDoubleArray*>(this)->length());
3813 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
3814 return ConstantPoolArray::SizeFor(
3815 reinterpret_cast<ConstantPoolArray*>(this)->count_of_int64_entries(),
3816 reinterpret_cast<ConstantPoolArray*>(this)->count_of_ptr_entries(),
3817 reinterpret_cast<ConstantPoolArray*>(this)->count_of_int32_entries());
3819 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
3820 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
3821 return reinterpret_cast<FixedTypedArrayBase*>(this)->size();
3823 ASSERT(instance_type == CODE_TYPE);
3824 return reinterpret_cast<Code*>(this)->CodeSize();
3828 void Map::set_instance_size(int value) {
3829 ASSERT_EQ(0, value & (kPointerSize - 1));
3830 value >>= kPointerSizeLog2;
3831 ASSERT(0 <= value && value < 256);
3832 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
3836 void Map::set_inobject_properties(int value) {
3837 ASSERT(0 <= value && value < 256);
3838 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
3842 void Map::set_pre_allocated_property_fields(int value) {
3843 ASSERT(0 <= value && value < 256);
3844 WRITE_BYTE_FIELD(this,
3845 kPreAllocatedPropertyFieldsOffset,
3846 static_cast<byte>(value));
3850 InstanceType Map::instance_type() {
3851 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
3855 void Map::set_instance_type(InstanceType value) {
3856 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
3860 int Map::unused_property_fields() {
3861 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
3865 void Map::set_unused_property_fields(int value) {
3866 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
3870 byte Map::bit_field() {
3871 return READ_BYTE_FIELD(this, kBitFieldOffset);
3875 void Map::set_bit_field(byte value) {
3876 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
3880 byte Map::bit_field2() {
3881 return READ_BYTE_FIELD(this, kBitField2Offset);
3885 void Map::set_bit_field2(byte value) {
3886 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
3890 void Map::set_non_instance_prototype(bool value) {
3892 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
3894 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
3899 bool Map::has_non_instance_prototype() {
3900 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
3904 void Map::set_function_with_prototype(bool value) {
3905 set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
3909 bool Map::function_with_prototype() {
3910 return FunctionWithPrototype::decode(bit_field3());
3914 void Map::set_is_access_check_needed(bool access_check_needed) {
3915 if (access_check_needed) {
3916 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
3918 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
3923 bool Map::is_access_check_needed() {
3924 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
3928 void Map::set_is_extensible(bool value) {
3930 set_bit_field2(bit_field2() | (1 << kIsExtensible));
3932 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
3936 bool Map::is_extensible() {
3937 return ((1 << kIsExtensible) & bit_field2()) != 0;
3941 void Map::set_attached_to_shared_function_info(bool value) {
3943 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
3945 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
3949 bool Map::attached_to_shared_function_info() {
3950 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
3954 void Map::set_is_shared(bool value) {
3955 set_bit_field3(IsShared::update(bit_field3(), value));
3959 bool Map::is_shared() {
3960 return IsShared::decode(bit_field3());
3964 void Map::set_dictionary_map(bool value) {
3965 if (value) mark_unstable();
3966 set_bit_field3(DictionaryMap::update(bit_field3(), value));
3970 bool Map::is_dictionary_map() {
3971 return DictionaryMap::decode(bit_field3());
3975 Code::Flags Code::flags() {
3976 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
3980 void Map::set_owns_descriptors(bool is_shared) {
3981 set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
3985 bool Map::owns_descriptors() {
3986 return OwnsDescriptors::decode(bit_field3());
3990 void Map::set_has_instance_call_handler() {
3991 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
3995 bool Map::has_instance_call_handler() {
3996 return HasInstanceCallHandler::decode(bit_field3());
4000 void Map::deprecate() {
4001 set_bit_field3(Deprecated::update(bit_field3(), true));
4005 bool Map::is_deprecated() {
4006 if (!FLAG_track_fields) return false;
4007 return Deprecated::decode(bit_field3());
4011 void Map::set_migration_target(bool value) {
4012 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4016 bool Map::is_migration_target() {
4017 if (!FLAG_track_fields) return false;
4018 return IsMigrationTarget::decode(bit_field3());
4022 void Map::freeze() {
4023 set_bit_field3(IsFrozen::update(bit_field3(), true));
4027 bool Map::is_frozen() {
4028 return IsFrozen::decode(bit_field3());
4032 void Map::mark_unstable() {
4033 set_bit_field3(IsUnstable::update(bit_field3(), true));
4037 bool Map::is_stable() {
4038 return !IsUnstable::decode(bit_field3());
4042 bool Map::has_code_cache() {
4043 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4047 bool Map::CanBeDeprecated() {
4048 int descriptor = LastAdded();
4049 for (int i = 0; i <= descriptor; i++) {
4050 PropertyDetails details = instance_descriptors()->GetDetails(i);
4051 if (FLAG_track_fields && details.representation().IsNone()) {
4054 if (FLAG_track_fields && details.representation().IsSmi()) {
4057 if (FLAG_track_double_fields && details.representation().IsDouble()) {
4060 if (FLAG_track_heap_object_fields &&
4061 details.representation().IsHeapObject()) {
4064 if (FLAG_track_fields && details.type() == CONSTANT) {
4072 void Map::NotifyLeafMapLayoutChange() {
4075 dependent_code()->DeoptimizeDependentCodeGroup(
4077 DependentCode::kPrototypeCheckGroup);
4082 bool Map::CanOmitMapChecks() {
4083 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4087 int DependentCode::number_of_entries(DependencyGroup group) {
4088 if (length() == 0) return 0;
4089 return Smi::cast(get(group))->value();
4093 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4094 set(group, Smi::FromInt(value));
4098 bool DependentCode::is_code_at(int i) {
4099 return get(kCodesStartIndex + i)->IsCode();
4102 Code* DependentCode::code_at(int i) {
4103 return Code::cast(get(kCodesStartIndex + i));
4107 CompilationInfo* DependentCode::compilation_info_at(int i) {
4108 return reinterpret_cast<CompilationInfo*>(
4109 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4113 void DependentCode::set_object_at(int i, Object* object) {
4114 set(kCodesStartIndex + i, object);
4118 Object* DependentCode::object_at(int i) {
4119 return get(kCodesStartIndex + i);
4123 Object** DependentCode::slot_at(int i) {
4124 return RawFieldOfElementAt(kCodesStartIndex + i);
4128 void DependentCode::clear_at(int i) {
4129 set_undefined(kCodesStartIndex + i);
4133 void DependentCode::copy(int from, int to) {
4134 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4138 void DependentCode::ExtendGroup(DependencyGroup group) {
4139 GroupStartIndexes starts(this);
4140 for (int g = kGroupCount - 1; g > group; g--) {
4141 if (starts.at(g) < starts.at(g + 1)) {
4142 copy(starts.at(g), starts.at(g + 1));
4148 void Code::set_flags(Code::Flags flags) {
4149 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4150 // Make sure that all call stubs have an arguments count.
4151 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
4152 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
4153 ExtractArgumentsCountFromFlags(flags) >= 0);
4154 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4158 Code::Kind Code::kind() {
4159 return ExtractKindFromFlags(flags());
4163 InlineCacheState Code::ic_state() {
4164 InlineCacheState result = ExtractICStateFromFlags(flags());
4165 // Only allow uninitialized or debugger states for non-IC code
4166 // objects. This is used in the debugger to determine whether or not
4167 // a call to code object has been replaced with a debug break call.
4168 ASSERT(is_inline_cache_stub() ||
4169 result == UNINITIALIZED ||
4170 result == DEBUG_STUB);
4175 ExtraICState Code::extra_ic_state() {
4176 ASSERT((is_inline_cache_stub() && !needs_extended_extra_ic_state(kind()))
4177 || ic_state() == DEBUG_STUB);
4178 return ExtractExtraICStateFromFlags(flags());
4182 ExtraICState Code::extended_extra_ic_state() {
4183 ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4184 ASSERT(needs_extended_extra_ic_state(kind()));
4185 return ExtractExtendedExtraICStateFromFlags(flags());
4189 Code::StubType Code::type() {
4190 return ExtractTypeFromFlags(flags());
4194 int Code::arguments_count() {
4195 ASSERT(is_call_stub() || is_keyed_call_stub() ||
4196 kind() == STUB || is_handler());
4197 return ExtractArgumentsCountFromFlags(flags());
4201 // For initialization.
4202 void Code::set_raw_kind_specific_flags1(int value) {
4203 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4207 void Code::set_raw_kind_specific_flags2(int value) {
4208 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4212 inline bool Code::is_crankshafted() {
4213 return IsCrankshaftedField::decode(
4214 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4218 inline void Code::set_is_crankshafted(bool value) {
4219 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4220 int updated = IsCrankshaftedField::update(previous, value);
4221 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4225 int Code::major_key() {
4226 ASSERT(has_major_key());
4227 return StubMajorKeyField::decode(
4228 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4232 void Code::set_major_key(int major) {
4233 ASSERT(has_major_key());
4234 ASSERT(0 <= major && major < 256);
4235 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4236 int updated = StubMajorKeyField::update(previous, major);
4237 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4241 bool Code::has_major_key() {
4242 return kind() == STUB ||
4243 kind() == HANDLER ||
4244 kind() == BINARY_OP_IC ||
4245 kind() == COMPARE_IC ||
4246 kind() == COMPARE_NIL_IC ||
4247 kind() == LOAD_IC ||
4248 kind() == KEYED_LOAD_IC ||
4249 kind() == STORE_IC ||
4250 kind() == KEYED_STORE_IC ||
4251 kind() == KEYED_CALL_IC ||
4252 kind() == TO_BOOLEAN_IC;
4256 bool Code::optimizable() {
4257 ASSERT_EQ(FUNCTION, kind());
4258 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4262 void Code::set_optimizable(bool value) {
4263 ASSERT_EQ(FUNCTION, kind());
4264 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4268 bool Code::has_deoptimization_support() {
4269 ASSERT_EQ(FUNCTION, kind());
4270 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4271 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4275 void Code::set_has_deoptimization_support(bool value) {
4276 ASSERT_EQ(FUNCTION, kind());
4277 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4278 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4279 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4283 bool Code::has_debug_break_slots() {
4284 ASSERT_EQ(FUNCTION, kind());
4285 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4286 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4290 void Code::set_has_debug_break_slots(bool value) {
4291 ASSERT_EQ(FUNCTION, kind());
4292 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4293 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4294 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4298 bool Code::is_compiled_optimizable() {
4299 ASSERT_EQ(FUNCTION, kind());
4300 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4301 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4305 void Code::set_compiled_optimizable(bool value) {
4306 ASSERT_EQ(FUNCTION, kind());
4307 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4308 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4309 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4313 int Code::allow_osr_at_loop_nesting_level() {
4314 ASSERT_EQ(FUNCTION, kind());
4315 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
4319 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4320 ASSERT_EQ(FUNCTION, kind());
4321 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
4322 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
4326 int Code::profiler_ticks() {
4327 ASSERT_EQ(FUNCTION, kind());
4328 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4332 void Code::set_profiler_ticks(int ticks) {
4333 ASSERT_EQ(FUNCTION, kind());
4334 ASSERT(ticks < 256);
4335 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4339 unsigned Code::stack_slots() {
4340 ASSERT(is_crankshafted());
4341 return StackSlotsField::decode(
4342 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4346 void Code::set_stack_slots(unsigned slots) {
4347 CHECK(slots <= (1 << kStackSlotsBitCount));
4348 ASSERT(is_crankshafted());
4349 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4350 int updated = StackSlotsField::update(previous, slots);
4351 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4355 unsigned Code::safepoint_table_offset() {
4356 ASSERT(is_crankshafted());
4357 return SafepointTableOffsetField::decode(
4358 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4362 void Code::set_safepoint_table_offset(unsigned offset) {
4363 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4364 ASSERT(is_crankshafted());
4365 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4366 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4367 int updated = SafepointTableOffsetField::update(previous, offset);
4368 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4372 unsigned Code::back_edge_table_offset() {
4373 ASSERT_EQ(FUNCTION, kind());
4374 return BackEdgeTableOffsetField::decode(
4375 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4379 void Code::set_back_edge_table_offset(unsigned offset) {
4380 ASSERT_EQ(FUNCTION, kind());
4381 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4382 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4383 int updated = BackEdgeTableOffsetField::update(previous, offset);
4384 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4388 bool Code::back_edges_patched_for_osr() {
4389 ASSERT_EQ(FUNCTION, kind());
4390 return BackEdgesPatchedForOSRField::decode(
4391 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4395 void Code::set_back_edges_patched_for_osr(bool value) {
4396 ASSERT_EQ(FUNCTION, kind());
4397 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4398 int updated = BackEdgesPatchedForOSRField::update(previous, value);
4399 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4404 CheckType Code::check_type() {
4405 ASSERT(is_call_stub() || is_keyed_call_stub());
4406 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
4407 return static_cast<CheckType>(type);
4411 void Code::set_check_type(CheckType value) {
4412 ASSERT(is_call_stub() || is_keyed_call_stub());
4413 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
4417 byte Code::to_boolean_state() {
4418 return extended_extra_ic_state();
4422 bool Code::has_function_cache() {
4423 ASSERT(kind() == STUB);
4424 return HasFunctionCacheField::decode(
4425 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4429 void Code::set_has_function_cache(bool flag) {
4430 ASSERT(kind() == STUB);
4431 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4432 int updated = HasFunctionCacheField::update(previous, flag);
4433 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4437 bool Code::marked_for_deoptimization() {
4438 ASSERT(kind() == OPTIMIZED_FUNCTION);
4439 return MarkedForDeoptimizationField::decode(
4440 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4444 void Code::set_marked_for_deoptimization(bool flag) {
4445 ASSERT(kind() == OPTIMIZED_FUNCTION);
4446 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4447 int updated = MarkedForDeoptimizationField::update(previous, flag);
4448 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4452 bool Code::is_inline_cache_stub() {
4453 Kind kind = this->kind();
4455 #define CASE(name) case name: return true;
4458 default: return false;
4463 bool Code::is_keyed_stub() {
4464 return is_keyed_load_stub() || is_keyed_store_stub() || is_keyed_call_stub();
4468 bool Code::is_debug_stub() {
4469 return ic_state() == DEBUG_STUB;
4473 ConstantPoolArray* Code::constant_pool() {
4474 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
4478 void Code::set_constant_pool(Object* value) {
4479 ASSERT(value->IsConstantPoolArray());
4480 WRITE_FIELD(this, kConstantPoolOffset, value);
4481 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4485 Code::Flags Code::ComputeFlags(Kind kind,
4486 InlineCacheState ic_state,
4487 ExtraICState extra_ic_state,
4490 InlineCacheHolderFlag holder) {
4491 ASSERT(argc <= Code::kMaxArguments);
4492 // Since the extended extra ic state overlaps with the argument count
4493 // for CALL_ICs, do so checks to make sure that they don't interfere.
4494 ASSERT((kind != Code::CALL_IC &&
4495 kind != Code::KEYED_CALL_IC) ||
4496 (ExtraICStateField::encode(extra_ic_state) | true));
4497 // Compute the bit mask.
4498 unsigned int bits = KindField::encode(kind)
4499 | ICStateField::encode(ic_state)
4500 | TypeField::encode(type)
4501 | ExtendedExtraICStateField::encode(extra_ic_state)
4502 | CacheHolderField::encode(holder);
4503 if (!Code::needs_extended_extra_ic_state(kind)) {
4504 bits |= (argc << kArgumentsCountShift);
4506 return static_cast<Flags>(bits);
4510 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4511 ExtraICState extra_ic_state,
4512 InlineCacheHolderFlag holder,
4515 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
4519 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4520 return KindField::decode(flags);
4524 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4525 return ICStateField::decode(flags);
4529 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4530 return ExtraICStateField::decode(flags);
4534 ExtraICState Code::ExtractExtendedExtraICStateFromFlags(
4536 return ExtendedExtraICStateField::decode(flags);
4540 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4541 return TypeField::decode(flags);
4545 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
4546 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
4550 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4551 return CacheHolderField::decode(flags);
4555 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4556 int bits = flags & ~TypeField::kMask;
4557 return static_cast<Flags>(bits);
4561 Code* Code::GetCodeFromTargetAddress(Address address) {
4562 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4563 // GetCodeFromTargetAddress might be called when marking objects during mark
4564 // sweep. reinterpret_cast is therefore used instead of the more appropriate
4565 // Code::cast. Code::cast does not work when the object's map is
4567 Code* result = reinterpret_cast<Code*>(code);
4572 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4574 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4578 Object* Map::prototype() {
4579 return READ_FIELD(this, kPrototypeOffset);
4583 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4584 ASSERT(value->IsNull() || value->IsJSReceiver());
4585 WRITE_FIELD(this, kPrototypeOffset, value);
4586 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4590 // If the descriptor is using the empty transition array, install a new empty
4591 // transition array that will have place for an element transition.
4592 static MaybeObject* EnsureHasTransitionArray(Map* map) {
4593 TransitionArray* transitions;
4594 MaybeObject* maybe_transitions;
4595 if (!map->HasTransitionArray()) {
4596 maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
4597 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4598 transitions->set_back_pointer_storage(map->GetBackPointer());
4599 } else if (!map->transitions()->IsFullTransitionArray()) {
4600 maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
4601 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4605 map->set_transitions(transitions);
4610 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
4611 int len = descriptors->number_of_descriptors();
4612 set_instance_descriptors(descriptors);
4613 SetNumberOfOwnDescriptors(len);
4617 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4620 void Map::set_bit_field3(uint32_t bits) {
4621 // Ensure the upper 2 bits have the same value by sign extending it. This is
4622 // necessary to be able to use the 31st bit.
4623 int value = bits << 1;
4624 WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
4628 uint32_t Map::bit_field3() {
4629 Object* value = READ_FIELD(this, kBitField3Offset);
4630 return Smi::cast(value)->value();
4634 void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
4635 Object* back_pointer = GetBackPointer();
4637 if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
4641 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
4642 CONDITIONAL_WRITE_BARRIER(
4643 heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
4647 void Map::AppendDescriptor(Descriptor* desc,
4648 const DescriptorArray::WhitenessWitness& witness) {
4649 DescriptorArray* descriptors = instance_descriptors();
4650 int number_of_own_descriptors = NumberOfOwnDescriptors();
4651 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
4652 descriptors->Append(desc, witness);
4653 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4657 Object* Map::GetBackPointer() {
4658 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4659 if (object->IsDescriptorArray()) {
4660 return TransitionArray::cast(object)->back_pointer_storage();
4662 ASSERT(object->IsMap() || object->IsUndefined());
4668 bool Map::HasElementsTransition() {
4669 return HasTransitionArray() && transitions()->HasElementsTransition();
4673 bool Map::HasTransitionArray() {
4674 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4675 return object->IsTransitionArray();
4679 Map* Map::elements_transition_map() {
4680 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
4681 return transitions()->GetTarget(index);
4685 bool Map::CanHaveMoreTransitions() {
4686 if (!HasTransitionArray()) return true;
4687 return FixedArray::SizeFor(transitions()->length() +
4688 TransitionArray::kTransitionSize)
4689 <= Page::kMaxRegularHeapObjectSize;
4693 MaybeObject* Map::AddTransition(Name* key,
4695 SimpleTransitionFlag flag) {
4696 if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
4697 return TransitionArray::NewWith(flag, key, target, GetBackPointer());
4701 void Map::SetTransition(int transition_index, Map* target) {
4702 transitions()->SetTarget(transition_index, target);
4706 Map* Map::GetTransition(int transition_index) {
4707 return transitions()->GetTarget(transition_index);
4711 MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
4712 TransitionArray* transitions;
4713 MaybeObject* maybe_transitions = AddTransition(
4714 GetHeap()->elements_transition_symbol(),
4717 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4718 set_transitions(transitions);
4723 FixedArray* Map::GetPrototypeTransitions() {
4724 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
4725 if (!transitions()->HasPrototypeTransitions()) {
4726 return GetHeap()->empty_fixed_array();
4728 return transitions()->GetPrototypeTransitions();
4732 MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
4733 MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
4734 if (allow_prototype->IsFailure()) return allow_prototype;
4735 int old_number_of_transitions = NumberOfProtoTransitions();
4737 if (HasPrototypeTransitions()) {
4738 ASSERT(GetPrototypeTransitions() != proto_transitions);
4739 ZapPrototypeTransitions();
4742 transitions()->SetPrototypeTransitions(proto_transitions);
4743 SetNumberOfProtoTransitions(old_number_of_transitions);
4748 bool Map::HasPrototypeTransitions() {
4749 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
4753 TransitionArray* Map::transitions() {
4754 ASSERT(HasTransitionArray());
4755 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4756 return TransitionArray::cast(object);
4760 void Map::set_transitions(TransitionArray* transition_array,
4761 WriteBarrierMode mode) {
4762 // Transition arrays are not shared. When one is replaced, it should not
4763 // keep referenced objects alive, so we zap it.
4764 // When there is another reference to the array somewhere (e.g. a handle),
4765 // not zapping turns from a waste of memory into a source of crashes.
4766 if (HasTransitionArray()) {
4768 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
4769 Map* target = transitions()->GetTarget(i);
4770 if (target->instance_descriptors() == instance_descriptors()) {
4771 Name* key = transitions()->GetKey(i);
4772 int new_target_index = transition_array->Search(key);
4773 ASSERT(new_target_index != TransitionArray::kNotFound);
4774 ASSERT(transition_array->GetTarget(new_target_index) == target);
4778 ASSERT(transitions() != transition_array);
4782 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
4783 CONDITIONAL_WRITE_BARRIER(
4784 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
4788 void Map::init_back_pointer(Object* undefined) {
4789 ASSERT(undefined->IsUndefined());
4790 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
4794 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
4795 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
4796 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
4797 (value->IsMap() && GetBackPointer()->IsUndefined()));
4798 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4799 if (object->IsTransitionArray()) {
4800 TransitionArray::cast(object)->set_back_pointer_storage(value);
4802 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
4803 CONDITIONAL_WRITE_BARRIER(
4804 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
4809 // Can either be Smi (no transitions), normal transition array, or a transition
4810 // array with the header overwritten as a Smi (thus iterating).
4811 TransitionArray* Map::unchecked_transition_array() {
4812 Object* object = *HeapObject::RawField(this,
4813 Map::kTransitionsOrBackPointerOffset);
4814 TransitionArray* transition_array = static_cast<TransitionArray*>(object);
4815 return transition_array;
4819 HeapObject* Map::UncheckedPrototypeTransitions() {
4820 ASSERT(HasTransitionArray());
4821 ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
4822 return unchecked_transition_array()->UncheckedPrototypeTransitions();
4826 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
4827 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
4828 ACCESSORS(Map, constructor, Object, kConstructorOffset)
4830 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
4831 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
4832 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
4834 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
4835 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
4836 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
4837 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
4839 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
4841 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
4842 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
4843 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
4844 kExpectedReceiverTypeOffset)
4846 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
4847 kSerializedDataOffset)
4849 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
4852 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
4853 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
4854 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
4856 ACCESSORS(Box, value, Object, kValueOffset)
4858 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
4859 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
4860 ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
4862 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
4863 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
4864 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
4866 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
4867 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
4868 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
4869 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
4870 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
4871 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
4873 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
4874 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
4876 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
4877 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
4878 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
4880 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
4881 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
4882 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
4883 kPrototypeTemplateOffset)
4884 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
4885 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
4886 kNamedPropertyHandlerOffset)
4887 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
4888 kIndexedPropertyHandlerOffset)
4889 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
4890 kInstanceTemplateOffset)
4891 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
4892 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
4893 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
4894 kInstanceCallHandlerOffset)
4895 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
4896 kAccessCheckInfoOffset)
4897 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
4899 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
4900 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
4901 kInternalFieldCountOffset)
4903 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
4904 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
4906 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
4908 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
4909 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
4910 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
4911 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
4912 kPretenureCreateCountOffset)
4913 ACCESSORS(AllocationSite, dependent_code, DependentCode,
4914 kDependentCodeOffset)
4915 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
4916 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
4918 ACCESSORS(Script, source, Object, kSourceOffset)
4919 ACCESSORS(Script, name, Object, kNameOffset)
4920 ACCESSORS(Script, id, Smi, kIdOffset)
4921 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
4922 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
4923 ACCESSORS(Script, data, Object, kDataOffset)
4924 ACCESSORS(Script, context_data, Object, kContextOffset)
4925 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
4926 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
4927 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
4928 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
4929 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
4930 kEvalFrominstructionsOffsetOffset)
4931 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
4932 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
4934 Script::CompilationType Script::compilation_type() {
4935 return BooleanBit::get(flags(), kCompilationTypeBit) ?
4936 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
4938 void Script::set_compilation_type(CompilationType type) {
4939 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
4940 type == COMPILATION_TYPE_EVAL));
4942 Script::CompilationState Script::compilation_state() {
4943 return BooleanBit::get(flags(), kCompilationStateBit) ?
4944 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
4946 void Script::set_compilation_state(CompilationState state) {
4947 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
4948 state == COMPILATION_STATE_COMPILED));
4952 #ifdef ENABLE_DEBUGGER_SUPPORT
4953 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
4954 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
4955 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
4956 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
4958 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
4959 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
4960 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
4961 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
4964 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
4965 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
4966 kOptimizedCodeMapOffset)
4967 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
4968 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
4969 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
4970 kInstanceClassNameOffset)
4971 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
4972 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
4973 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
4974 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
4975 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
4978 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
4979 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
4980 kHiddenPrototypeBit)
4981 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
4982 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
4983 kNeedsAccessCheckBit)
4984 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
4985 kReadOnlyPrototypeBit)
4986 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
4987 kRemovePrototypeBit)
4988 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
4990 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
4992 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
4995 BOOL_ACCESSORS(SharedFunctionInfo,
4997 allows_lazy_compilation,
4998 kAllowLazyCompilation)
4999 BOOL_ACCESSORS(SharedFunctionInfo,
5001 allows_lazy_compilation_without_context,
5002 kAllowLazyCompilationWithoutContext)
5003 BOOL_ACCESSORS(SharedFunctionInfo,
5007 BOOL_ACCESSORS(SharedFunctionInfo,
5009 has_duplicate_parameters,
5010 kHasDuplicateParameters)
5013 #if V8_HOST_ARCH_32_BIT
5014 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5015 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5016 kFormalParameterCountOffset)
5017 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5018 kExpectedNofPropertiesOffset)
5019 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5020 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5021 kStartPositionAndTypeOffset)
5022 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5023 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5024 kFunctionTokenPositionOffset)
5025 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5026 kCompilerHintsOffset)
5027 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5028 kOptCountAndBailoutReasonOffset)
5029 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5033 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5034 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5035 int holder::name() { \
5036 int value = READ_INT_FIELD(this, offset); \
5037 ASSERT(kHeapObjectTag == 1); \
5038 ASSERT((value & kHeapObjectTag) == 0); \
5039 return value >> 1; \
5041 void holder::set_##name(int value) { \
5042 ASSERT(kHeapObjectTag == 1); \
5043 ASSERT((value & 0xC0000000) == 0xC0000000 || \
5044 (value & 0xC0000000) == 0x000000000); \
5045 WRITE_INT_FIELD(this, \
5047 (value << 1) & ~kHeapObjectTag); \
5050 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5051 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5052 INT_ACCESSORS(holder, name, offset)
5055 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5056 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5057 formal_parameter_count,
5058 kFormalParameterCountOffset)
5060 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5061 expected_nof_properties,
5062 kExpectedNofPropertiesOffset)
5063 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5065 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5066 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5067 start_position_and_type,
5068 kStartPositionAndTypeOffset)
5070 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5071 function_token_position,
5072 kFunctionTokenPositionOffset)
5073 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5075 kCompilerHintsOffset)
5077 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5078 opt_count_and_bailout_reason,
5079 kOptCountAndBailoutReasonOffset)
5081 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5086 int SharedFunctionInfo::construction_count() {
5087 return READ_BYTE_FIELD(this, kConstructionCountOffset);
5091 void SharedFunctionInfo::set_construction_count(int value) {
5092 ASSERT(0 <= value && value < 256);
5093 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
5097 BOOL_ACCESSORS(SharedFunctionInfo,
5099 live_objects_may_exist,
5100 kLiveObjectsMayExist)
5103 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
5104 return initial_map() != GetHeap()->undefined_value();
5108 BOOL_GETTER(SharedFunctionInfo,
5110 optimization_disabled,
5111 kOptimizationDisabled)
5114 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5115 set_compiler_hints(BooleanBit::set(compiler_hints(),
5116 kOptimizationDisabled,
5118 // If disabling optimizations we reflect that in the code object so
5119 // it will not be counted as optimizable code.
5120 if ((code()->kind() == Code::FUNCTION) && disable) {
5121 code()->set_optimizable(false);
5126 int SharedFunctionInfo::profiler_ticks() {
5127 if (code()->kind() != Code::FUNCTION) return 0;
5128 return code()->profiler_ticks();
5132 LanguageMode SharedFunctionInfo::language_mode() {
5133 int hints = compiler_hints();
5134 if (BooleanBit::get(hints, kExtendedModeFunction)) {
5135 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
5136 return EXTENDED_MODE;
5138 return BooleanBit::get(hints, kStrictModeFunction)
5139 ? STRICT_MODE : CLASSIC_MODE;
5143 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5144 // We only allow language mode transitions that go set the same language mode
5145 // again or go up in the chain:
5146 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
5147 ASSERT(this->language_mode() == CLASSIC_MODE ||
5148 this->language_mode() == language_mode ||
5149 language_mode == EXTENDED_MODE);
5150 int hints = compiler_hints();
5151 hints = BooleanBit::set(
5152 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
5153 hints = BooleanBit::set(
5154 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
5155 set_compiler_hints(hints);
5159 bool SharedFunctionInfo::is_classic_mode() {
5160 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
5163 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
5164 kExtendedModeFunction)
5165 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5166 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5168 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5169 name_should_print_as_anonymous,
5170 kNameShouldPrintAsAnonymous)
5171 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5172 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5173 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5174 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
5176 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
5177 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5178 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5179 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5181 void SharedFunctionInfo::BeforeVisitingPointers() {
5182 if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
5186 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5187 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5189 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5191 bool Script::HasValidSource() {
5192 Object* src = this->source();
5193 if (!src->IsString()) return true;
5194 String* src_str = String::cast(src);
5195 if (!StringShape(src_str).IsExternal()) return true;
5196 if (src_str->IsOneByteRepresentation()) {
5197 return ExternalAsciiString::cast(src)->resource() != NULL;
5198 } else if (src_str->IsTwoByteRepresentation()) {
5199 return ExternalTwoByteString::cast(src)->resource() != NULL;
5205 void SharedFunctionInfo::DontAdaptArguments() {
5206 ASSERT(code()->kind() == Code::BUILTIN);
5207 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5211 int SharedFunctionInfo::start_position() {
5212 return start_position_and_type() >> kStartPositionShift;
5216 void SharedFunctionInfo::set_start_position(int start_position) {
5217 set_start_position_and_type((start_position << kStartPositionShift)
5218 | (start_position_and_type() & ~kStartPositionMask));
5222 Code* SharedFunctionInfo::code() {
5223 return Code::cast(READ_FIELD(this, kCodeOffset));
5227 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5228 ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
5229 WRITE_FIELD(this, kCodeOffset, value);
5230 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5234 void SharedFunctionInfo::ReplaceCode(Code* value) {
5235 // If the GC metadata field is already used then the function was
5236 // enqueued as a code flushing candidate and we remove it now.
5237 if (code()->gc_metadata() != NULL) {
5238 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5239 flusher->EvictCandidate(this);
5242 ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5247 ScopeInfo* SharedFunctionInfo::scope_info() {
5248 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5252 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5253 WriteBarrierMode mode) {
5254 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5255 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5258 reinterpret_cast<Object*>(value),
5263 bool SharedFunctionInfo::is_compiled() {
5265 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5269 bool SharedFunctionInfo::IsApiFunction() {
5270 return function_data()->IsFunctionTemplateInfo();
5274 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5275 ASSERT(IsApiFunction());
5276 return FunctionTemplateInfo::cast(function_data());
5280 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5281 return function_data()->IsSmi();
5285 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5286 ASSERT(HasBuiltinFunctionId());
5287 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5291 int SharedFunctionInfo::ic_age() {
5292 return ICAgeBits::decode(counters());
5296 void SharedFunctionInfo::set_ic_age(int ic_age) {
5297 set_counters(ICAgeBits::update(counters(), ic_age));
5301 int SharedFunctionInfo::deopt_count() {
5302 return DeoptCountBits::decode(counters());
5306 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5307 set_counters(DeoptCountBits::update(counters(), deopt_count));
5311 void SharedFunctionInfo::increment_deopt_count() {
5312 int value = counters();
5313 int deopt_count = DeoptCountBits::decode(value);
5314 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5315 set_counters(DeoptCountBits::update(value, deopt_count));
5319 int SharedFunctionInfo::opt_reenable_tries() {
5320 return OptReenableTriesBits::decode(counters());
5324 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5325 set_counters(OptReenableTriesBits::update(counters(), tries));
5329 int SharedFunctionInfo::opt_count() {
5330 return OptCountBits::decode(opt_count_and_bailout_reason());
5334 void SharedFunctionInfo::set_opt_count(int opt_count) {
5335 set_opt_count_and_bailout_reason(
5336 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5340 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5341 BailoutReason reason = static_cast<BailoutReason>(
5342 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5347 bool SharedFunctionInfo::has_deoptimization_support() {
5348 Code* code = this->code();
5349 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5353 void SharedFunctionInfo::TryReenableOptimization() {
5354 int tries = opt_reenable_tries();
5355 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5356 // We reenable optimization whenever the number of tries is a large
5357 // enough power of 2.
5358 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5359 set_optimization_disabled(false);
5362 code()->set_optimizable(true);
5367 bool JSFunction::IsBuiltin() {
5368 return context()->global_object()->IsJSBuiltinsObject();
5372 bool JSFunction::NeedsArgumentsAdaption() {
5373 return shared()->formal_parameter_count() !=
5374 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5378 bool JSFunction::IsOptimized() {
5379 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5383 bool JSFunction::IsOptimizable() {
5384 return code()->kind() == Code::FUNCTION && code()->optimizable();
5388 bool JSFunction::IsMarkedForOptimization() {
5389 return code() == GetIsolate()->builtins()->builtin(
5390 Builtins::kCompileOptimized);
5394 bool JSFunction::IsMarkedForConcurrentOptimization() {
5395 return code() == GetIsolate()->builtins()->builtin(
5396 Builtins::kCompileOptimizedConcurrent);
5400 bool JSFunction::IsInOptimizationQueue() {
5401 return code() == GetIsolate()->builtins()->builtin(
5402 Builtins::kInOptimizationQueue);
5406 Code* JSFunction::code() {
5408 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5412 void JSFunction::set_code(Code* value) {
5413 ASSERT(!GetHeap()->InNewSpace(value));
5414 Address entry = value->entry();
5415 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5416 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5418 HeapObject::RawField(this, kCodeEntryOffset),
5423 void JSFunction::set_code_no_write_barrier(Code* value) {
5424 ASSERT(!GetHeap()->InNewSpace(value));
5425 Address entry = value->entry();
5426 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5430 void JSFunction::ReplaceCode(Code* code) {
5431 bool was_optimized = IsOptimized();
5432 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5434 if (was_optimized && is_optimized) {
5435 shared()->EvictFromOptimizedCodeMap(
5436 this->code(), "Replacing with another optimized code");
5441 // Add/remove the function from the list of optimized functions for this
5442 // context based on the state change.
5443 if (!was_optimized && is_optimized) {
5444 context()->native_context()->AddOptimizedFunction(this);
5446 if (was_optimized && !is_optimized) {
5447 // TODO(titzer): linear in the number of optimized functions; fix!
5448 context()->native_context()->RemoveOptimizedFunction(this);
5453 Context* JSFunction::context() {
5454 return Context::cast(READ_FIELD(this, kContextOffset));
5458 void JSFunction::set_context(Object* value) {
5459 ASSERT(value->IsUndefined() || value->IsContext());
5460 WRITE_FIELD(this, kContextOffset, value);
5461 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5464 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5465 kPrototypeOrInitialMapOffset)
5468 Map* JSFunction::initial_map() {
5469 return Map::cast(prototype_or_initial_map());
5473 void JSFunction::set_initial_map(Map* value) {
5474 set_prototype_or_initial_map(value);
5478 bool JSFunction::has_initial_map() {
5479 return prototype_or_initial_map()->IsMap();
5483 bool JSFunction::has_instance_prototype() {
5484 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5488 bool JSFunction::has_prototype() {
5489 return map()->has_non_instance_prototype() || has_instance_prototype();
5493 Object* JSFunction::instance_prototype() {
5494 ASSERT(has_instance_prototype());
5495 if (has_initial_map()) return initial_map()->prototype();
5496 // When there is no initial map and the prototype is a JSObject, the
5497 // initial map field is used for the prototype field.
5498 return prototype_or_initial_map();
5502 Object* JSFunction::prototype() {
5503 ASSERT(has_prototype());
5504 // If the function's prototype property has been set to a non-JSObject
5505 // value, that value is stored in the constructor field of the map.
5506 if (map()->has_non_instance_prototype()) return map()->constructor();
5507 return instance_prototype();
5511 bool JSFunction::should_have_prototype() {
5512 return map()->function_with_prototype();
5516 bool JSFunction::is_compiled() {
5518 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5522 FixedArray* JSFunction::literals() {
5523 ASSERT(!shared()->bound());
5524 return literals_or_bindings();
5528 void JSFunction::set_literals(FixedArray* literals) {
5529 ASSERT(!shared()->bound());
5530 set_literals_or_bindings(literals);
5534 FixedArray* JSFunction::function_bindings() {
5535 ASSERT(shared()->bound());
5536 return literals_or_bindings();
5540 void JSFunction::set_function_bindings(FixedArray* bindings) {
5541 ASSERT(shared()->bound());
5542 // Bound function literal may be initialized to the empty fixed array
5543 // before the bindings are set.
5544 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
5545 bindings->map() == GetHeap()->fixed_cow_array_map());
5546 set_literals_or_bindings(bindings);
5550 int JSFunction::NumberOfLiterals() {
5551 ASSERT(!shared()->bound());
5552 return literals()->length();
5556 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5557 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5558 return READ_FIELD(this, OffsetOfFunctionWithId(id));
5562 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5564 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5565 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5566 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5570 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
5571 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5572 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
5576 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
5578 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5579 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
5580 ASSERT(!GetHeap()->InNewSpace(value));
5584 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5585 ACCESSORS(JSProxy, hash, Object, kHashOffset)
5586 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5587 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5590 void JSProxy::InitializeBody(int object_size, Object* value) {
5591 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5592 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5593 WRITE_FIELD(this, offset, value);
5598 ACCESSORS(JSSet, table, Object, kTableOffset)
5599 ACCESSORS(JSMap, table, Object, kTableOffset)
5600 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5601 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5604 Address Foreign::foreign_address() {
5605 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5609 void Foreign::set_foreign_address(Address value) {
5610 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5614 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5615 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5616 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5617 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5618 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5619 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
5622 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
5623 ASSERT(obj->IsJSGeneratorObject());
5624 ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
5625 return reinterpret_cast<JSGeneratorObject*>(obj);
5629 ACCESSORS(JSModule, context, Object, kContextOffset)
5630 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5633 JSModule* JSModule::cast(Object* obj) {
5634 ASSERT(obj->IsJSModule());
5635 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
5636 return reinterpret_cast<JSModule*>(obj);
5640 ACCESSORS(JSValue, value, Object, kValueOffset)
5643 JSValue* JSValue::cast(Object* obj) {
5644 ASSERT(obj->IsJSValue());
5645 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
5646 return reinterpret_cast<JSValue*>(obj);
5650 ACCESSORS(JSDate, value, Object, kValueOffset)
5651 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5652 ACCESSORS(JSDate, year, Object, kYearOffset)
5653 ACCESSORS(JSDate, month, Object, kMonthOffset)
5654 ACCESSORS(JSDate, day, Object, kDayOffset)
5655 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5656 ACCESSORS(JSDate, hour, Object, kHourOffset)
5657 ACCESSORS(JSDate, min, Object, kMinOffset)
5658 ACCESSORS(JSDate, sec, Object, kSecOffset)
5661 JSDate* JSDate::cast(Object* obj) {
5662 ASSERT(obj->IsJSDate());
5663 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
5664 return reinterpret_cast<JSDate*>(obj);
5668 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
5669 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
5670 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5671 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
5672 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5673 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5674 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5677 JSMessageObject* JSMessageObject::cast(Object* obj) {
5678 ASSERT(obj->IsJSMessageObject());
5679 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
5680 return reinterpret_cast<JSMessageObject*>(obj);
5684 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5685 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5686 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5687 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5688 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5689 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
5692 void Code::WipeOutHeader() {
5693 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
5694 WRITE_FIELD(this, kHandlerTableOffset, NULL);
5695 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
5696 // Do not wipe out e.g. a minor key.
5697 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
5698 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
5703 Object* Code::type_feedback_info() {
5704 ASSERT(kind() == FUNCTION);
5705 return raw_type_feedback_info();
5709 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
5710 ASSERT(kind() == FUNCTION);
5711 set_raw_type_feedback_info(value, mode);
5712 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5717 Object* Code::next_code_link() {
5718 CHECK(kind() == OPTIMIZED_FUNCTION);
5719 return raw_type_feedback_info();
5723 void Code::set_next_code_link(Object* value, WriteBarrierMode mode) {
5724 CHECK(kind() == OPTIMIZED_FUNCTION);
5725 set_raw_type_feedback_info(value);
5726 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5731 int Code::stub_info() {
5732 ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
5733 kind() == BINARY_OP_IC || kind() == LOAD_IC);
5734 return Smi::cast(raw_type_feedback_info())->value();
5738 void Code::set_stub_info(int value) {
5739 ASSERT(kind() == COMPARE_IC ||
5740 kind() == COMPARE_NIL_IC ||
5741 kind() == BINARY_OP_IC ||
5743 kind() == LOAD_IC ||
5744 kind() == KEYED_LOAD_IC ||
5745 kind() == STORE_IC ||
5746 kind() == KEYED_STORE_IC);
5747 set_raw_type_feedback_info(Smi::FromInt(value));
5751 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5752 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
5755 byte* Code::instruction_start() {
5756 return FIELD_ADDR(this, kHeaderSize);
5760 byte* Code::instruction_end() {
5761 return instruction_start() + instruction_size();
5765 int Code::body_size() {
5766 return RoundUp(instruction_size(), kObjectAlignment);
5770 ByteArray* Code::unchecked_relocation_info() {
5771 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
5775 byte* Code::relocation_start() {
5776 return unchecked_relocation_info()->GetDataStartAddress();
5780 int Code::relocation_size() {
5781 return unchecked_relocation_info()->length();
5785 byte* Code::entry() {
5786 return instruction_start();
5790 bool Code::contains(byte* inner_pointer) {
5791 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5795 ACCESSORS(JSArray, length, Object, kLengthOffset)
5798 void* JSArrayBuffer::backing_store() {
5799 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
5800 return reinterpret_cast<void*>(ptr);
5804 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
5805 intptr_t ptr = reinterpret_cast<intptr_t>(value);
5806 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
5810 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
5811 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
5814 bool JSArrayBuffer::is_external() {
5815 return BooleanBit::get(flag(), kIsExternalBit);
5819 void JSArrayBuffer::set_is_external(bool value) {
5820 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
5824 bool JSArrayBuffer::should_be_freed() {
5825 return BooleanBit::get(flag(), kShouldBeFreed);
5829 void JSArrayBuffer::set_should_be_freed(bool value) {
5830 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
5834 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
5835 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
5838 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
5839 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
5840 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
5841 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
5842 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
5844 ACCESSORS(JSRegExp, data, Object, kDataOffset)
5847 JSRegExp::Type JSRegExp::TypeTag() {
5848 Object* data = this->data();
5849 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
5850 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
5851 return static_cast<JSRegExp::Type>(smi->value());
5855 int JSRegExp::CaptureCount() {
5856 switch (TypeTag()) {
5860 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
5868 JSRegExp::Flags JSRegExp::GetFlags() {
5869 ASSERT(this->data()->IsFixedArray());
5870 Object* data = this->data();
5871 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
5872 return Flags(smi->value());
5876 String* JSRegExp::Pattern() {
5877 ASSERT(this->data()->IsFixedArray());
5878 Object* data = this->data();
5879 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
5884 Object* JSRegExp::DataAt(int index) {
5885 ASSERT(TypeTag() != NOT_COMPILED);
5886 return FixedArray::cast(data())->get(index);
5890 void JSRegExp::SetDataAt(int index, Object* value) {
5891 ASSERT(TypeTag() != NOT_COMPILED);
5892 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
5893 FixedArray::cast(data())->set(index, value);
5897 ElementsKind JSObject::GetElementsKind() {
5898 ElementsKind kind = map()->elements_kind();
5900 FixedArrayBase* fixed_array =
5901 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
5903 // If a GC was caused while constructing this object, the elements
5904 // pointer may point to a one pointer filler map.
5905 if (ElementsAreSafeToExamine()) {
5906 Map* map = fixed_array->map();
5907 ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
5908 (map == GetHeap()->fixed_array_map() ||
5909 map == GetHeap()->fixed_cow_array_map())) ||
5910 (IsFastDoubleElementsKind(kind) &&
5911 (fixed_array->IsFixedDoubleArray() ||
5912 fixed_array == GetHeap()->empty_fixed_array())) ||
5913 (kind == DICTIONARY_ELEMENTS &&
5914 fixed_array->IsFixedArray() &&
5915 fixed_array->IsDictionary()) ||
5916 (kind > DICTIONARY_ELEMENTS));
5917 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
5918 (elements()->IsFixedArray() && elements()->length() >= 2));
5925 ElementsAccessor* JSObject::GetElementsAccessor() {
5926 return ElementsAccessor::ForKind(GetElementsKind());
5930 bool JSObject::HasFastObjectElements() {
5931 return IsFastObjectElementsKind(GetElementsKind());
5935 bool JSObject::HasFastSmiElements() {
5936 return IsFastSmiElementsKind(GetElementsKind());
5940 bool JSObject::HasFastSmiOrObjectElements() {
5941 return IsFastSmiOrObjectElementsKind(GetElementsKind());
5945 bool JSObject::HasFastDoubleElements() {
5946 return IsFastDoubleElementsKind(GetElementsKind());
5950 bool JSObject::HasFastHoleyElements() {
5951 return IsFastHoleyElementsKind(GetElementsKind());
5955 bool JSObject::HasFastElements() {
5956 return IsFastElementsKind(GetElementsKind());
5960 bool JSObject::HasDictionaryElements() {
5961 return GetElementsKind() == DICTIONARY_ELEMENTS;
5965 bool JSObject::HasNonStrictArgumentsElements() {
5966 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
5970 bool JSObject::HasExternalArrayElements() {
5971 HeapObject* array = elements();
5972 ASSERT(array != NULL);
5973 return array->IsExternalArray();
5977 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
5978 bool JSObject::HasExternal##Type##Elements() { \
5979 HeapObject* array = elements(); \
5980 ASSERT(array != NULL); \
5981 if (!array->IsHeapObject()) \
5983 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
5986 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
5988 #undef EXTERNAL_ELEMENTS_CHECK
5991 bool JSObject::HasFixedTypedArrayElements() {
5992 HeapObject* array = elements();
5993 ASSERT(array != NULL);
5994 return array->IsFixedTypedArrayBase();
5998 bool JSObject::HasNamedInterceptor() {
5999 return map()->has_named_interceptor();
6003 bool JSObject::HasIndexedInterceptor() {
6004 return map()->has_indexed_interceptor();
6008 MaybeObject* JSObject::EnsureWritableFastElements() {
6009 ASSERT(HasFastSmiOrObjectElements());
6010 FixedArray* elems = FixedArray::cast(elements());
6011 Isolate* isolate = GetIsolate();
6012 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
6013 Object* writable_elems;
6014 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
6015 elems, isolate->heap()->fixed_array_map());
6016 if (!maybe_writable_elems->ToObject(&writable_elems)) {
6017 return maybe_writable_elems;
6020 set_elements(FixedArray::cast(writable_elems));
6021 isolate->counters()->cow_arrays_converted()->Increment();
6022 return writable_elems;
6026 NameDictionary* JSObject::property_dictionary() {
6027 ASSERT(!HasFastProperties());
6028 return NameDictionary::cast(properties());
6032 SeededNumberDictionary* JSObject::element_dictionary() {
6033 ASSERT(HasDictionaryElements());
6034 return SeededNumberDictionary::cast(elements());
6038 bool Name::IsHashFieldComputed(uint32_t field) {
6039 return (field & kHashNotComputedMask) == 0;
6043 bool Name::HasHashCode() {
6044 return IsHashFieldComputed(hash_field());
6048 uint32_t Name::Hash() {
6049 // Fast case: has hash code already been computed?
6050 uint32_t field = hash_field();
6051 if (IsHashFieldComputed(field)) return field >> kHashShift;
6052 // Slow case: compute hash code and set it. Has to be a string.
6053 return String::cast(this)->ComputeAndSetHash();
6057 StringHasher::StringHasher(int length, uint32_t seed)
6059 raw_running_hash_(seed),
6061 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6062 is_first_char_(true) {
6063 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
6067 bool StringHasher::has_trivial_hash() {
6068 return length_ > String::kMaxHashCalcLength;
6072 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6074 running_hash += (running_hash << 10);
6075 running_hash ^= (running_hash >> 6);
6076 return running_hash;
6080 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6081 running_hash += (running_hash << 3);
6082 running_hash ^= (running_hash >> 11);
6083 running_hash += (running_hash << 15);
6084 if ((running_hash & String::kHashBitMask) == 0) {
6087 return running_hash;
6091 void StringHasher::AddCharacter(uint16_t c) {
6092 // Use the Jenkins one-at-a-time hash function to update the hash
6093 // for the given character.
6094 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6098 bool StringHasher::UpdateIndex(uint16_t c) {
6099 ASSERT(is_array_index_);
6100 if (c < '0' || c > '9') {
6101 is_array_index_ = false;
6105 if (is_first_char_) {
6106 is_first_char_ = false;
6107 if (c == '0' && length_ > 1) {
6108 is_array_index_ = false;
6112 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6113 is_array_index_ = false;
6116 array_index_ = array_index_ * 10 + d;
6121 template<typename Char>
6122 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6123 ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
6125 if (is_array_index_) {
6126 for (; i < length; i++) {
6127 AddCharacter(chars[i]);
6128 if (!UpdateIndex(chars[i])) {
6134 for (; i < length; i++) {
6135 ASSERT(!is_array_index_);
6136 AddCharacter(chars[i]);
6141 template <typename schar>
6142 uint32_t StringHasher::HashSequentialString(const schar* chars,
6145 StringHasher hasher(length, seed);
6146 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6147 return hasher.GetHashField();
6151 bool Name::AsArrayIndex(uint32_t* index) {
6152 return IsString() && String::cast(this)->AsArrayIndex(index);
6156 bool String::AsArrayIndex(uint32_t* index) {
6157 uint32_t field = hash_field();
6158 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6161 return SlowAsArrayIndex(index);
6165 Object* JSReceiver::GetPrototype() {
6166 return map()->prototype();
6170 Object* JSReceiver::GetConstructor() {
6171 return map()->constructor();
6175 bool JSReceiver::HasProperty(Handle<JSReceiver> object,
6176 Handle<Name> name) {
6177 if (object->IsJSProxy()) {
6178 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6179 return JSProxy::HasPropertyWithHandler(proxy, name);
6181 return object->GetPropertyAttribute(*name) != ABSENT;
6185 bool JSReceiver::HasLocalProperty(Handle<JSReceiver> object,
6186 Handle<Name> name) {
6187 if (object->IsJSProxy()) {
6188 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6189 return JSProxy::HasPropertyWithHandler(proxy, name);
6191 return object->GetLocalPropertyAttribute(*name) != ABSENT;
6195 PropertyAttributes JSReceiver::GetPropertyAttribute(Name* key) {
6197 if (IsJSObject() && key->AsArrayIndex(&index)) {
6198 return GetElementAttribute(index);
6200 return GetPropertyAttributeWithReceiver(this, key);
6204 PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
6206 return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
6208 return JSObject::cast(this)->GetElementAttributeWithReceiver(
6213 bool JSGlobalObject::IsDetached() {
6214 return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this);
6218 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) {
6219 return GetPrototype() != global;
6223 Handle<Object> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6224 return object->IsJSProxy()
6225 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6226 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6230 Object* JSReceiver::GetIdentityHash() {
6232 ? JSProxy::cast(this)->GetIdentityHash()
6233 : JSObject::cast(this)->GetIdentityHash();
6237 bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6238 if (object->IsJSProxy()) {
6239 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6240 return JSProxy::HasElementWithHandler(proxy, index);
6242 return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
6243 *object, index, true) != ABSENT;
6247 bool JSReceiver::HasLocalElement(Handle<JSReceiver> object, uint32_t index) {
6248 if (object->IsJSProxy()) {
6249 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6250 return JSProxy::HasElementWithHandler(proxy, index);
6252 return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
6253 *object, index, false) != ABSENT;
6257 PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
6259 return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
6261 return JSObject::cast(this)->GetElementAttributeWithReceiver(
6262 this, index, false);
6266 bool AccessorInfo::all_can_read() {
6267 return BooleanBit::get(flag(), kAllCanReadBit);
6271 void AccessorInfo::set_all_can_read(bool value) {
6272 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6276 bool AccessorInfo::all_can_write() {
6277 return BooleanBit::get(flag(), kAllCanWriteBit);
6281 void AccessorInfo::set_all_can_write(bool value) {
6282 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6286 bool AccessorInfo::prohibits_overwriting() {
6287 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
6291 void AccessorInfo::set_prohibits_overwriting(bool value) {
6292 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
6296 PropertyAttributes AccessorInfo::property_attributes() {
6297 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6301 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6302 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6306 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6307 Object* function_template = expected_receiver_type();
6308 if (!function_template->IsFunctionTemplateInfo()) return true;
6309 return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver);
6313 void AccessorPair::set_access_flags(v8::AccessControl access_control) {
6314 int current = access_flags()->value();
6315 current = BooleanBit::set(current,
6316 kProhibitsOverwritingBit,
6317 access_control & PROHIBITS_OVERWRITING);
6318 current = BooleanBit::set(current,
6320 access_control & ALL_CAN_READ);
6321 current = BooleanBit::set(current,
6323 access_control & ALL_CAN_WRITE);
6324 set_access_flags(Smi::FromInt(current));
6328 bool AccessorPair::all_can_read() {
6329 return BooleanBit::get(access_flags(), kAllCanReadBit);
6333 bool AccessorPair::all_can_write() {
6334 return BooleanBit::get(access_flags(), kAllCanWriteBit);
6338 bool AccessorPair::prohibits_overwriting() {
6339 return BooleanBit::get(access_flags(), kProhibitsOverwritingBit);
6343 template<typename Shape, typename Key>
6344 void Dictionary<Shape, Key>::SetEntry(int entry,
6347 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6351 template<typename Shape, typename Key>
6352 void Dictionary<Shape, Key>::SetEntry(int entry,
6355 PropertyDetails details) {
6356 ASSERT(!key->IsName() ||
6357 details.IsDeleted() ||
6358 details.dictionary_index() > 0);
6359 int index = HashTable<Shape, Key>::EntryToIndex(entry);
6360 DisallowHeapAllocation no_gc;
6361 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6362 FixedArray::set(index, key, mode);
6363 FixedArray::set(index+1, value, mode);
6364 FixedArray::set(index+2, details.AsSmi());
6368 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6369 ASSERT(other->IsNumber());
6370 return key == static_cast<uint32_t>(other->Number());
6374 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6375 return ComputeIntegerHash(key, 0);
6379 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6381 ASSERT(other->IsNumber());
6382 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6385 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6386 return ComputeIntegerHash(key, seed);
6389 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6392 ASSERT(other->IsNumber());
6393 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6396 MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) {
6397 return heap->NumberFromUint32(key);
6401 bool NameDictionaryShape::IsMatch(Name* key, Object* other) {
6402 // We know that all entries in a hash table had their hash keys created.
6403 // Use that knowledge to have fast failure.
6404 if (key->Hash() != Name::cast(other)->Hash()) return false;
6405 return key->Equals(Name::cast(other));
6409 uint32_t NameDictionaryShape::Hash(Name* key) {
6414 uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) {
6415 return Name::cast(other)->Hash();
6419 MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) {
6420 ASSERT(key->IsUniqueName());
6425 template <int entrysize>
6426 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6427 return key->SameValue(other);
6431 template <int entrysize>
6432 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
6433 return Smi::cast(key->GetHash())->value();
6437 template <int entrysize>
6438 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
6440 return Smi::cast(other->GetHash())->value();
6444 template <int entrysize>
6445 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Heap* heap,
6451 template <int entrysize>
6452 bool WeakHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6453 return key->SameValue(other);
6457 template <int entrysize>
6458 uint32_t WeakHashTableShape<entrysize>::Hash(Object* key) {
6459 intptr_t hash = reinterpret_cast<intptr_t>(key);
6460 return (uint32_t)(hash & 0xFFFFFFFF);
6464 template <int entrysize>
6465 uint32_t WeakHashTableShape<entrysize>::HashForObject(Object* key,
6467 intptr_t hash = reinterpret_cast<intptr_t>(other);
6468 return (uint32_t)(hash & 0xFFFFFFFF);
6472 template <int entrysize>
6473 MaybeObject* WeakHashTableShape<entrysize>::AsObject(Heap* heap,
6479 void Map::ClearCodeCache(Heap* heap) {
6480 // No write barrier is needed since empty_fixed_array is not in new space.
6481 // Please note this function is used during marking:
6482 // - MarkCompactCollector::MarkUnmarkedObject
6483 // - IncrementalMarking::Step
6484 ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
6485 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6489 void JSArray::EnsureSize(int required_size) {
6490 ASSERT(HasFastSmiOrObjectElements());
6491 FixedArray* elts = FixedArray::cast(elements());
6492 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6493 if (elts->length() < required_size) {
6494 // Doubling in size would be overkill, but leave some slack to avoid
6495 // constantly growing.
6496 Expand(required_size + (required_size >> 3));
6497 // It's a performance benefit to keep a frequently used array in new-space.
6498 } else if (!GetHeap()->new_space()->Contains(elts) &&
6499 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6500 // Expand will allocate a new backing store in new space even if the size
6501 // we asked for isn't larger than what we had before.
6502 Expand(required_size);
6507 void JSArray::set_length(Smi* length) {
6508 // Don't need a write barrier for a Smi.
6509 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6513 bool JSArray::AllowsSetElementsLength() {
6514 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6515 ASSERT(result == !HasExternalArrayElements());
6520 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
6521 MaybeObject* maybe_result = EnsureCanContainElements(
6522 storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
6523 if (maybe_result->IsFailure()) return maybe_result;
6524 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
6525 IsFastDoubleElementsKind(GetElementsKind())) ||
6526 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
6527 (IsFastObjectElementsKind(GetElementsKind()) ||
6528 (IsFastSmiElementsKind(GetElementsKind()) &&
6529 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
6530 set_elements(storage);
6531 set_length(Smi::FromInt(storage->length()));
6536 MaybeObject* FixedArray::Copy() {
6537 if (length() == 0) return this;
6538 return GetHeap()->CopyFixedArray(this);
6542 MaybeObject* FixedDoubleArray::Copy() {
6543 if (length() == 0) return this;
6544 return GetHeap()->CopyFixedDoubleArray(this);
6548 MaybeObject* ConstantPoolArray::Copy() {
6549 if (length() == 0) return this;
6550 return GetHeap()->CopyConstantPoolArray(this);
6554 void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) {
6555 set(1 + index * 2, Smi::FromInt(id.ToInt()));
6559 TypeFeedbackId TypeFeedbackCells::AstId(int index) {
6560 return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
6564 void TypeFeedbackCells::SetCell(int index, Cell* cell) {
6565 set(index * 2, cell);
6569 Cell* TypeFeedbackCells::GetCell(int index) {
6570 return Cell::cast(get(index * 2));
6574 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
6575 return isolate->factory()->the_hole_value();
6579 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
6580 return isolate->factory()->undefined_value();
6584 Handle<Object> TypeFeedbackCells::MonomorphicArraySentinel(Isolate* isolate,
6585 ElementsKind elements_kind) {
6586 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
6590 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
6591 return heap->the_hole_value();
6595 int TypeFeedbackInfo::ic_total_count() {
6596 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6597 return ICTotalCountField::decode(current);
6601 void TypeFeedbackInfo::set_ic_total_count(int count) {
6602 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6603 value = ICTotalCountField::update(value,
6604 ICTotalCountField::decode(count));
6605 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6609 int TypeFeedbackInfo::ic_with_type_info_count() {
6610 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6611 return ICsWithTypeInfoCountField::decode(current);
6615 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
6616 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6617 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6618 // We can get negative count here when the type-feedback info is
6619 // shared between two code objects. The can only happen when
6620 // the debugger made a shallow copy of code object (see Heap::CopyCode).
6621 // Since we do not optimize when the debugger is active, we can skip
6622 // this counter update.
6623 if (new_count >= 0) {
6624 new_count &= ICsWithTypeInfoCountField::kMask;
6625 value = ICsWithTypeInfoCountField::update(value, new_count);
6626 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6631 void TypeFeedbackInfo::initialize_storage() {
6632 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
6633 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
6637 void TypeFeedbackInfo::change_own_type_change_checksum() {
6638 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6639 int checksum = OwnTypeChangeChecksum::decode(value);
6640 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
6641 value = OwnTypeChangeChecksum::update(value, checksum);
6642 // Ensure packed bit field is in Smi range.
6643 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6644 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6645 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6649 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
6650 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6651 int mask = (1 << kTypeChangeChecksumBits) - 1;
6652 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
6653 // Ensure packed bit field is in Smi range.
6654 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6655 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6656 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6660 int TypeFeedbackInfo::own_type_change_checksum() {
6661 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6662 return OwnTypeChangeChecksum::decode(value);
6666 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
6667 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6668 int mask = (1 << kTypeChangeChecksumBits) - 1;
6669 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
6673 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
6674 kTypeFeedbackCellsOffset)
6677 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
6680 Relocatable::Relocatable(Isolate* isolate) {
6682 prev_ = isolate->relocatable_top();
6683 isolate->set_relocatable_top(this);
6687 Relocatable::~Relocatable() {
6688 ASSERT_EQ(isolate_->relocatable_top(), this);
6689 isolate_->set_relocatable_top(prev_);
6693 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
6694 return map->instance_size();
6698 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
6699 v->VisitExternalReference(
6700 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6704 template<typename StaticVisitor>
6705 void Foreign::ForeignIterateBody() {
6706 StaticVisitor::VisitExternalReference(
6707 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6711 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
6712 typedef v8::String::ExternalAsciiStringResource Resource;
6713 v->VisitExternalAsciiString(
6714 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6718 template<typename StaticVisitor>
6719 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
6720 typedef v8::String::ExternalAsciiStringResource Resource;
6721 StaticVisitor::VisitExternalAsciiString(
6722 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6726 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
6727 typedef v8::String::ExternalStringResource Resource;
6728 v->VisitExternalTwoByteString(
6729 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6733 template<typename StaticVisitor>
6734 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
6735 typedef v8::String::ExternalStringResource Resource;
6736 StaticVisitor::VisitExternalTwoByteString(
6737 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6741 template<int start_offset, int end_offset, int size>
6742 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
6745 v->VisitPointers(HeapObject::RawField(obj, start_offset),
6746 HeapObject::RawField(obj, end_offset));
6750 template<int start_offset>
6751 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
6754 v->VisitPointers(HeapObject::RawField(obj, start_offset),
6755 HeapObject::RawField(obj, object_size));
6760 #undef CAST_ACCESSOR
6761 #undef INT_ACCESSORS
6763 #undef ACCESSORS_TO_SMI
6764 #undef SMI_ACCESSORS
6766 #undef BOOL_ACCESSORS
6770 #undef WRITE_BARRIER
6771 #undef CONDITIONAL_WRITE_BARRIER
6772 #undef READ_DOUBLE_FIELD
6773 #undef WRITE_DOUBLE_FIELD
6774 #undef READ_INT_FIELD
6775 #undef WRITE_INT_FIELD
6776 #undef READ_INTPTR_FIELD
6777 #undef WRITE_INTPTR_FIELD
6778 #undef READ_UINT32_FIELD
6779 #undef WRITE_UINT32_FIELD
6780 #undef READ_SHORT_FIELD
6781 #undef WRITE_SHORT_FIELD
6782 #undef READ_BYTE_FIELD
6783 #undef WRITE_BYTE_FIELD
6785 } } // namespace v8::internal
6787 #endif // V8_OBJECTS_INL_H_