1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
47 #include "store-buffer.h"
50 #include "incremental-marking.h"
51 #include "transitions-inl.h"
52 #include "objects-visiting.h"
57 PropertyDetails::PropertyDetails(Smi* smi) {
58 value_ = smi->value();
62 Smi* PropertyDetails::AsSmi() {
63 // Ensure the upper 2 bits have the same value by sign extending it. This is
64 // necessary to be able to use the 31st bit of the property details.
65 int value = value_ << 1;
66 return Smi::FromInt(value >> 1);
70 PropertyDetails PropertyDetails::AsDeleted() {
71 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
72 return PropertyDetails(smi);
76 #define TYPE_CHECKER(type, instancetype) \
77 bool Object::Is##type() { \
78 return Object::IsHeapObject() && \
79 HeapObject::cast(this)->map()->instance_type() == instancetype; \
83 #define CAST_ACCESSOR(type) \
84 type* type::cast(Object* object) { \
85 SLOW_ASSERT(object->Is##type()); \
86 return reinterpret_cast<type*>(object); \
90 #define FIXED_TYPED_ARRAY_CAST_ACCESSOR(type) \
92 type* type::cast(Object* object) { \
93 SLOW_ASSERT(object->Is##type()); \
94 return reinterpret_cast<type*>(object); \
97 #define INT_ACCESSORS(holder, name, offset) \
98 int holder::name() { return READ_INT_FIELD(this, offset); } \
99 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
102 #define ACCESSORS(holder, name, type, offset) \
103 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
104 void holder::set_##name(type* value, WriteBarrierMode mode) { \
105 WRITE_FIELD(this, offset, value); \
106 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
110 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
111 #define ACCESSORS_TO_SMI(holder, name, offset) \
112 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
113 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
114 WRITE_FIELD(this, offset, value); \
118 // Getter that returns a Smi as an int and writes an int as a Smi.
119 #define SMI_ACCESSORS(holder, name, offset) \
120 int holder::name() { \
121 Object* value = READ_FIELD(this, offset); \
122 return Smi::cast(value)->value(); \
124 void holder::set_##name(int value) { \
125 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
129 #define BOOL_GETTER(holder, field, name, offset) \
130 bool holder::name() { \
131 return BooleanBit::get(field(), offset); \
135 #define BOOL_ACCESSORS(holder, field, name, offset) \
136 bool holder::name() { \
137 return BooleanBit::get(field(), offset); \
139 void holder::set_##name(bool value) { \
140 set_##field(BooleanBit::set(field(), offset, value)); \
144 bool Object::IsFixedArrayBase() {
145 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
146 IsFixedTypedArrayBase() || IsExternalArray();
150 // External objects are not extensible, so the map check is enough.
151 bool Object::IsExternal() {
152 return Object::IsHeapObject() &&
153 HeapObject::cast(this)->map() ==
154 HeapObject::cast(this)->GetHeap()->external_map();
158 bool Object::IsAccessorInfo() {
159 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
163 bool Object::IsSmi() {
164 return HAS_SMI_TAG(this);
168 bool Object::IsHeapObject() {
169 return Internals::HasHeapObjectTag(this);
173 bool Object::NonFailureIsHeapObject() {
174 ASSERT(!this->IsFailure());
175 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
179 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
180 TYPE_CHECKER(Float32x4, FLOAT32x4_TYPE)
181 TYPE_CHECKER(Int32x4, INT32x4_TYPE)
182 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
185 bool Object::IsString() {
186 return Object::IsHeapObject()
187 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
191 bool Object::IsName() {
192 return IsString() || IsSymbol();
196 bool Object::IsUniqueName() {
197 return IsInternalizedString() || IsSymbol();
201 bool Object::IsSpecObject() {
202 return Object::IsHeapObject()
203 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
207 bool Object::IsSpecFunction() {
208 if (!Object::IsHeapObject()) return false;
209 InstanceType type = HeapObject::cast(this)->map()->instance_type();
210 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
214 bool Object::IsInternalizedString() {
215 if (!this->IsHeapObject()) return false;
216 uint32_t type = HeapObject::cast(this)->map()->instance_type();
217 STATIC_ASSERT(kNotInternalizedTag != 0);
218 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
219 (kStringTag | kInternalizedTag);
223 bool Object::IsConsString() {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsCons();
229 bool Object::IsSlicedString() {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSliced();
235 bool Object::IsSeqString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential();
241 bool Object::IsSeqOneByteString() {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsSequential() &&
244 String::cast(this)->IsOneByteRepresentation();
248 bool Object::IsSeqTwoByteString() {
249 if (!IsString()) return false;
250 return StringShape(String::cast(this)).IsSequential() &&
251 String::cast(this)->IsTwoByteRepresentation();
255 bool Object::IsExternalString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal();
261 bool Object::IsExternalAsciiString() {
262 if (!IsString()) return false;
263 return StringShape(String::cast(this)).IsExternal() &&
264 String::cast(this)->IsOneByteRepresentation();
268 bool Object::IsExternalTwoByteString() {
269 if (!IsString()) return false;
270 return StringShape(String::cast(this)).IsExternal() &&
271 String::cast(this)->IsTwoByteRepresentation();
274 bool Object::HasValidElements() {
275 // Dictionary is covered under FixedArray.
276 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
277 IsFixedTypedArrayBase();
281 MaybeObject* Object::AllocateNewStorageFor(Heap* heap,
282 Representation representation) {
283 if (FLAG_track_fields && representation.IsSmi() && IsUninitialized()) {
284 return Smi::FromInt(0);
286 if (!FLAG_track_double_fields) return this;
287 if (!representation.IsDouble()) return this;
288 if (IsUninitialized()) {
289 return heap->AllocateHeapNumber(0);
291 return heap->AllocateHeapNumber(Number());
295 StringShape::StringShape(String* str)
296 : type_(str->map()->instance_type()) {
298 ASSERT((type_ & kIsNotStringMask) == kStringTag);
302 StringShape::StringShape(Map* map)
303 : type_(map->instance_type()) {
305 ASSERT((type_ & kIsNotStringMask) == kStringTag);
309 StringShape::StringShape(InstanceType t)
310 : type_(static_cast<uint32_t>(t)) {
312 ASSERT((type_ & kIsNotStringMask) == kStringTag);
316 bool StringShape::IsInternalized() {
318 STATIC_ASSERT(kNotInternalizedTag != 0);
319 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
320 (kStringTag | kInternalizedTag);
324 bool String::IsOneByteRepresentation() {
325 uint32_t type = map()->instance_type();
326 return (type & kStringEncodingMask) == kOneByteStringTag;
330 bool String::IsTwoByteRepresentation() {
331 uint32_t type = map()->instance_type();
332 return (type & kStringEncodingMask) == kTwoByteStringTag;
336 bool String::IsOneByteRepresentationUnderneath() {
337 uint32_t type = map()->instance_type();
338 STATIC_ASSERT(kIsIndirectStringTag != 0);
339 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
341 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
342 case kOneByteStringTag:
344 case kTwoByteStringTag:
346 default: // Cons or sliced string. Need to go deeper.
347 return GetUnderlying()->IsOneByteRepresentation();
352 bool String::IsTwoByteRepresentationUnderneath() {
353 uint32_t type = map()->instance_type();
354 STATIC_ASSERT(kIsIndirectStringTag != 0);
355 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
357 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
358 case kOneByteStringTag:
360 case kTwoByteStringTag:
362 default: // Cons or sliced string. Need to go deeper.
363 return GetUnderlying()->IsTwoByteRepresentation();
368 bool String::HasOnlyOneByteChars() {
369 uint32_t type = map()->instance_type();
370 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
371 IsOneByteRepresentation();
375 bool StringShape::IsCons() {
376 return (type_ & kStringRepresentationMask) == kConsStringTag;
380 bool StringShape::IsSliced() {
381 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
385 bool StringShape::IsIndirect() {
386 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
390 bool StringShape::IsExternal() {
391 return (type_ & kStringRepresentationMask) == kExternalStringTag;
395 bool StringShape::IsSequential() {
396 return (type_ & kStringRepresentationMask) == kSeqStringTag;
400 StringRepresentationTag StringShape::representation_tag() {
401 uint32_t tag = (type_ & kStringRepresentationMask);
402 return static_cast<StringRepresentationTag>(tag);
406 uint32_t StringShape::encoding_tag() {
407 return type_ & kStringEncodingMask;
411 uint32_t StringShape::full_representation_tag() {
412 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
416 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
417 Internals::kFullStringRepresentationMask);
419 STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
420 Internals::kStringEncodingMask);
423 bool StringShape::IsSequentialAscii() {
424 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
428 bool StringShape::IsSequentialTwoByte() {
429 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
433 bool StringShape::IsExternalAscii() {
434 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
438 STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
439 Internals::kExternalAsciiRepresentationTag);
441 STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
444 bool StringShape::IsExternalTwoByte() {
445 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
449 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
450 Internals::kExternalTwoByteRepresentationTag);
452 STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
454 uc32 FlatStringReader::Get(int index) {
455 ASSERT(0 <= index && index <= length_);
457 return static_cast<const byte*>(start_)[index];
459 return static_cast<const uc16*>(start_)[index];
464 template <typename Char>
465 class SequentialStringKey : public HashTableKey {
467 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
468 : string_(string), hash_field_(0), seed_(seed) { }
470 virtual uint32_t Hash() {
471 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
475 uint32_t result = hash_field_ >> String::kHashShift;
476 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
481 virtual uint32_t HashForObject(Object* other) {
482 return String::cast(other)->Hash();
485 Vector<const Char> string_;
486 uint32_t hash_field_;
491 class OneByteStringKey : public SequentialStringKey<uint8_t> {
493 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
494 : SequentialStringKey<uint8_t>(str, seed) { }
496 virtual bool IsMatch(Object* string) {
497 return String::cast(string)->IsOneByteEqualTo(string_);
500 virtual MaybeObject* AsObject(Heap* heap);
505 class SubStringKey : public HashTableKey {
507 SubStringKey(Handle<String> string, int from, int length)
508 : string_(string), from_(from), length_(length) {
509 if (string_->IsSlicedString()) {
510 string_ = Handle<String>(Unslice(*string_, &from_));
512 ASSERT(string_->IsSeqString() || string->IsExternalString());
515 virtual uint32_t Hash() {
516 ASSERT(length_ >= 0);
517 ASSERT(from_ + length_ <= string_->length());
518 const Char* chars = GetChars() + from_;
519 hash_field_ = StringHasher::HashSequentialString(
520 chars, length_, string_->GetHeap()->HashSeed());
521 uint32_t result = hash_field_ >> String::kHashShift;
522 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
526 virtual uint32_t HashForObject(Object* other) {
527 return String::cast(other)->Hash();
530 virtual bool IsMatch(Object* string);
531 virtual MaybeObject* AsObject(Heap* heap);
534 const Char* GetChars();
535 String* Unslice(String* string, int* offset) {
536 while (string->IsSlicedString()) {
537 SlicedString* sliced = SlicedString::cast(string);
538 *offset += sliced->offset();
539 string = sliced->parent();
544 Handle<String> string_;
547 uint32_t hash_field_;
551 class TwoByteStringKey : public SequentialStringKey<uc16> {
553 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
554 : SequentialStringKey<uc16>(str, seed) { }
556 virtual bool IsMatch(Object* string) {
557 return String::cast(string)->IsTwoByteEqualTo(string_);
560 virtual MaybeObject* AsObject(Heap* heap);
564 // Utf8StringKey carries a vector of chars as key.
565 class Utf8StringKey : public HashTableKey {
567 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
568 : string_(string), hash_field_(0), seed_(seed) { }
570 virtual bool IsMatch(Object* string) {
571 return String::cast(string)->IsUtf8EqualTo(string_);
574 virtual uint32_t Hash() {
575 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
576 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
577 uint32_t result = hash_field_ >> String::kHashShift;
578 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
582 virtual uint32_t HashForObject(Object* other) {
583 return String::cast(other)->Hash();
586 virtual MaybeObject* AsObject(Heap* heap) {
587 if (hash_field_ == 0) Hash();
588 return heap->AllocateInternalizedStringFromUtf8(string_,
593 Vector<const char> string_;
594 uint32_t hash_field_;
595 int chars_; // Caches the number of characters when computing the hash code.
600 bool Object::IsNumber() {
601 return IsSmi() || IsHeapNumber();
605 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
606 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
609 bool Object::IsFiller() {
610 if (!Object::IsHeapObject()) return false;
611 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
612 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
616 bool Object::IsExternalArray() {
617 if (!Object::IsHeapObject())
619 InstanceType instance_type =
620 HeapObject::cast(this)->map()->instance_type();
621 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
622 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
626 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
627 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
628 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
630 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
631 #undef TYPED_ARRAY_TYPE_CHECKER
634 bool Object::IsFixedTypedArrayBase() {
635 if (!Object::IsHeapObject()) return false;
637 InstanceType instance_type =
638 HeapObject::cast(this)->map()->instance_type();
639 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
640 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
644 bool MaybeObject::IsFailure() {
645 return HAS_FAILURE_TAG(this);
649 bool MaybeObject::IsRetryAfterGC() {
650 return HAS_FAILURE_TAG(this)
651 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
655 bool MaybeObject::IsOutOfMemory() {
656 return HAS_FAILURE_TAG(this)
657 && Failure::cast(this)->IsOutOfMemoryException();
661 bool MaybeObject::IsException() {
662 return this == Failure::Exception();
666 bool MaybeObject::IsTheHole() {
667 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
671 bool MaybeObject::IsUninitialized() {
672 return !IsFailure() && ToObjectUnchecked()->IsUninitialized();
676 Failure* Failure::cast(MaybeObject* obj) {
677 ASSERT(HAS_FAILURE_TAG(obj));
678 return reinterpret_cast<Failure*>(obj);
682 bool Object::IsJSReceiver() {
683 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
684 return IsHeapObject() &&
685 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
689 bool Object::IsJSObject() {
690 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
691 return IsHeapObject() &&
692 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
696 bool Object::IsJSProxy() {
697 if (!Object::IsHeapObject()) return false;
698 InstanceType type = HeapObject::cast(this)->map()->instance_type();
699 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
703 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
704 TYPE_CHECKER(JSSet, JS_SET_TYPE)
705 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
706 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
707 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
708 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
709 TYPE_CHECKER(Map, MAP_TYPE)
710 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
711 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
712 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
715 bool Object::IsJSWeakCollection() {
716 return IsJSWeakMap() || IsJSWeakSet();
720 bool Object::IsDescriptorArray() {
721 return IsFixedArray();
725 bool Object::IsTransitionArray() {
726 return IsFixedArray();
730 bool Object::IsDeoptimizationInputData() {
731 // Must be a fixed array.
732 if (!IsFixedArray()) return false;
734 // There's no sure way to detect the difference between a fixed array and
735 // a deoptimization data array. Since this is used for asserts we can
736 // check that the length is zero or else the fixed size plus a multiple of
738 int length = FixedArray::cast(this)->length();
739 if (length == 0) return true;
741 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
742 return length >= 0 &&
743 length % DeoptimizationInputData::kDeoptEntrySize == 0;
747 bool Object::IsDeoptimizationOutputData() {
748 if (!IsFixedArray()) return false;
749 // There's actually no way to see the difference between a fixed array and
750 // a deoptimization data array. Since this is used for asserts we can check
751 // that the length is plausible though.
752 if (FixedArray::cast(this)->length() % 2 != 0) return false;
757 bool Object::IsDependentCode() {
758 if (!IsFixedArray()) return false;
759 // There's actually no way to see the difference between a fixed array and
760 // a dependent codes array.
765 bool Object::IsTypeFeedbackCells() {
766 if (!IsFixedArray()) return false;
767 // There's actually no way to see the difference between a fixed array and
768 // a cache cells array. Since this is used for asserts we can check that
769 // the length is plausible though.
770 if (FixedArray::cast(this)->length() % 2 != 0) return false;
775 bool Object::IsContext() {
776 if (!Object::IsHeapObject()) return false;
777 Map* map = HeapObject::cast(this)->map();
778 Heap* heap = map->GetHeap();
779 return (map == heap->function_context_map() ||
780 map == heap->catch_context_map() ||
781 map == heap->with_context_map() ||
782 map == heap->native_context_map() ||
783 map == heap->block_context_map() ||
784 map == heap->module_context_map() ||
785 map == heap->global_context_map());
789 bool Object::IsNativeContext() {
790 return Object::IsHeapObject() &&
791 HeapObject::cast(this)->map() ==
792 HeapObject::cast(this)->GetHeap()->native_context_map();
796 bool Object::IsScopeInfo() {
797 return Object::IsHeapObject() &&
798 HeapObject::cast(this)->map() ==
799 HeapObject::cast(this)->GetHeap()->scope_info_map();
803 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
806 template <> inline bool Is<JSFunction>(Object* obj) {
807 return obj->IsJSFunction();
811 TYPE_CHECKER(Code, CODE_TYPE)
812 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
813 TYPE_CHECKER(Cell, CELL_TYPE)
814 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
815 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
816 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
817 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
818 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
819 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
820 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
823 bool Object::IsStringWrapper() {
824 return IsJSValue() && JSValue::cast(this)->value()->IsString();
828 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
831 bool Object::IsBoolean() {
832 return IsOddball() &&
833 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
837 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
838 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
839 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
840 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
843 bool Object::IsJSArrayBufferView() {
844 return IsJSDataView() || IsJSTypedArray();
848 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
851 template <> inline bool Is<JSArray>(Object* obj) {
852 return obj->IsJSArray();
856 bool Object::IsHashTable() {
857 return Object::IsHeapObject() &&
858 HeapObject::cast(this)->map() ==
859 HeapObject::cast(this)->GetHeap()->hash_table_map();
863 bool Object::IsDictionary() {
864 return IsHashTable() &&
865 this != HeapObject::cast(this)->GetHeap()->string_table();
869 bool Object::IsStringTable() {
870 return IsHashTable() &&
871 this == HeapObject::cast(this)->GetHeap()->raw_unchecked_string_table();
875 bool Object::IsJSFunctionResultCache() {
876 if (!IsFixedArray()) return false;
877 FixedArray* self = FixedArray::cast(this);
878 int length = self->length();
879 if (length < JSFunctionResultCache::kEntriesIndex) return false;
880 if ((length - JSFunctionResultCache::kEntriesIndex)
881 % JSFunctionResultCache::kEntrySize != 0) {
885 if (FLAG_verify_heap) {
886 reinterpret_cast<JSFunctionResultCache*>(this)->
887 JSFunctionResultCacheVerify();
894 bool Object::IsNormalizedMapCache() {
895 if (!IsFixedArray()) return false;
896 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
900 if (FLAG_verify_heap) {
901 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
908 bool Object::IsCompilationCacheTable() {
909 return IsHashTable();
913 bool Object::IsCodeCacheHashTable() {
914 return IsHashTable();
918 bool Object::IsPolymorphicCodeCacheHashTable() {
919 return IsHashTable();
923 bool Object::IsMapCache() {
924 return IsHashTable();
928 bool Object::IsObjectHashTable() {
929 return IsHashTable();
933 bool Object::IsPrimitive() {
934 return IsOddball() || IsNumber() || IsString();
938 bool Object::IsJSGlobalProxy() {
939 bool result = IsHeapObject() &&
940 (HeapObject::cast(this)->map()->instance_type() ==
941 JS_GLOBAL_PROXY_TYPE);
942 ASSERT(!result || IsAccessCheckNeeded());
947 bool Object::IsGlobalObject() {
948 if (!IsHeapObject()) return false;
950 InstanceType type = HeapObject::cast(this)->map()->instance_type();
951 return type == JS_GLOBAL_OBJECT_TYPE ||
952 type == JS_BUILTINS_OBJECT_TYPE;
956 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
957 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
960 bool Object::IsUndetectableObject() {
961 return IsHeapObject()
962 && HeapObject::cast(this)->map()->is_undetectable();
966 bool Object::IsAccessCheckNeeded() {
967 return IsHeapObject()
968 && HeapObject::cast(this)->map()->is_access_check_needed();
972 bool Object::IsStruct() {
973 if (!IsHeapObject()) return false;
974 switch (HeapObject::cast(this)->map()->instance_type()) {
975 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
976 STRUCT_LIST(MAKE_STRUCT_CASE)
977 #undef MAKE_STRUCT_CASE
978 default: return false;
983 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
984 bool Object::Is##Name() { \
985 return Object::IsHeapObject() \
986 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
988 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
989 #undef MAKE_STRUCT_PREDICATE
992 bool Object::IsUndefined() {
993 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
997 bool Object::IsNull() {
998 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1002 bool Object::IsTheHole() {
1003 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1007 bool Object::IsUninitialized() {
1008 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1012 bool Object::IsTrue() {
1013 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1017 bool Object::IsFalse() {
1018 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1022 bool Object::IsArgumentsMarker() {
1023 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1027 double Object::Number() {
1030 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1031 : reinterpret_cast<HeapNumber*>(this)->value();
1035 bool Object::IsNaN() {
1036 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1040 MaybeObject* Object::ToSmi() {
1041 if (IsSmi()) return this;
1042 if (IsHeapNumber()) {
1043 double value = HeapNumber::cast(this)->value();
1044 int int_value = FastD2I(value);
1045 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1046 return Smi::FromInt(int_value);
1049 return Failure::Exception();
1053 bool Object::HasSpecificClassOf(String* name) {
1054 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1058 MaybeObject* Object::GetElement(Isolate* isolate, uint32_t index) {
1059 // GetElement can trigger a getter which can cause allocation.
1060 // This was not always the case. This ASSERT is here to catch
1061 // leftover incorrect uses.
1062 ASSERT(AllowHeapAllocation::IsAllowed());
1063 return GetElementWithReceiver(isolate, this, index);
1067 Object* Object::GetElementNoExceptionThrown(Isolate* isolate, uint32_t index) {
1068 MaybeObject* maybe = GetElementWithReceiver(isolate, this, index);
1069 ASSERT(!maybe->IsFailure());
1070 Object* result = NULL; // Initialization to please compiler.
1071 maybe->ToObject(&result);
1076 MaybeObject* Object::GetProperty(Name* key) {
1077 PropertyAttributes attributes;
1078 return GetPropertyWithReceiver(this, key, &attributes);
1082 MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) {
1083 return GetPropertyWithReceiver(this, key, attributes);
1087 #define FIELD_ADDR(p, offset) \
1088 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1090 #define READ_FIELD(p, offset) \
1091 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
1093 #define WRITE_FIELD(p, offset, value) \
1094 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1096 #define WRITE_BARRIER(heap, object, offset, value) \
1097 heap->incremental_marking()->RecordWrite( \
1098 object, HeapObject::RawField(object, offset), value); \
1099 if (heap->InNewSpace(value)) { \
1100 heap->RecordWrite(object->address(), offset); \
1103 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1104 if (mode == UPDATE_WRITE_BARRIER) { \
1105 heap->incremental_marking()->RecordWrite( \
1106 object, HeapObject::RawField(object, offset), value); \
1107 if (heap->InNewSpace(value)) { \
1108 heap->RecordWrite(object->address(), offset); \
1112 #ifndef V8_TARGET_ARCH_MIPS
1113 #define READ_DOUBLE_FIELD(p, offset) \
1114 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
1115 #else // V8_TARGET_ARCH_MIPS
1116 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1117 // non-64-bit aligned HeapNumber::value.
1118 static inline double read_double_field(void* p, int offset) {
1123 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
1124 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
1127 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1128 #endif // V8_TARGET_ARCH_MIPS
1130 #ifndef V8_TARGET_ARCH_MIPS
1131 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1132 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1133 #else // V8_TARGET_ARCH_MIPS
1134 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1135 // non-64-bit aligned HeapNumber::value.
1136 static inline void write_double_field(void* p, int offset,
1143 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1144 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1146 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1147 write_double_field(p, offset, value)
1148 #endif // V8_TARGET_ARCH_MIPS
1150 #define READ_FLOAT32x4_FIELD(p, offset) \
1151 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)))
1153 #define WRITE_FLOAT32x4_FIELD(p, offset, value) \
1154 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1156 #define READ_INT32x4_FIELD(p, offset) \
1157 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)))
1159 #define WRITE_INT32x4_FIELD(p, offset, value) \
1160 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1162 #define READ_FLOAT_FIELD(p, offset) \
1163 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)))
1165 #define WRITE_FLOAT_FIELD(p, offset, value) \
1166 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1168 #define READ_INT_FIELD(p, offset) \
1169 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1171 #define WRITE_INT_FIELD(p, offset, value) \
1172 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1174 #define READ_INTPTR_FIELD(p, offset) \
1175 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1177 #define WRITE_INTPTR_FIELD(p, offset, value) \
1178 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1180 #define READ_UINT32_FIELD(p, offset) \
1181 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1183 #define WRITE_UINT32_FIELD(p, offset, value) \
1184 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1186 #define READ_INT32_FIELD(p, offset) \
1187 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1189 #define WRITE_INT32_FIELD(p, offset, value) \
1190 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1192 #define READ_INT64_FIELD(p, offset) \
1193 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1195 #define WRITE_INT64_FIELD(p, offset, value) \
1196 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1198 #define READ_SHORT_FIELD(p, offset) \
1199 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1201 #define WRITE_SHORT_FIELD(p, offset, value) \
1202 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1204 #define READ_BYTE_FIELD(p, offset) \
1205 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1207 #define WRITE_BYTE_FIELD(p, offset, value) \
1208 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1211 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1212 return &READ_FIELD(obj, byte_offset);
1217 return Internals::SmiValue(this);
1221 Smi* Smi::FromInt(int value) {
1222 ASSERT(Smi::IsValid(value));
1223 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1227 Smi* Smi::FromIntptr(intptr_t value) {
1228 ASSERT(Smi::IsValid(value));
1229 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1230 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1234 Failure::Type Failure::type() const {
1235 return static_cast<Type>(value() & kFailureTypeTagMask);
1239 bool Failure::IsInternalError() const {
1240 return type() == INTERNAL_ERROR;
1244 bool Failure::IsOutOfMemoryException() const {
1245 return type() == OUT_OF_MEMORY_EXCEPTION;
1249 AllocationSpace Failure::allocation_space() const {
1250 ASSERT_EQ(RETRY_AFTER_GC, type());
1251 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1256 Failure* Failure::InternalError() {
1257 return Construct(INTERNAL_ERROR);
1261 Failure* Failure::Exception() {
1262 return Construct(EXCEPTION);
1266 Failure* Failure::OutOfMemoryException(intptr_t value) {
1267 return Construct(OUT_OF_MEMORY_EXCEPTION, value);
1271 intptr_t Failure::value() const {
1272 return static_cast<intptr_t>(
1273 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1277 Failure* Failure::RetryAfterGC() {
1278 return RetryAfterGC(NEW_SPACE);
1282 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1283 ASSERT((space & ~kSpaceTagMask) == 0);
1284 return Construct(RETRY_AFTER_GC, space);
1288 Failure* Failure::Construct(Type type, intptr_t value) {
1290 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1291 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1292 // Fill the unused bits with a pattern that's easy to recognize in crash
1294 static const int kFailureMagicPattern = 0x0BAD0000;
1295 return reinterpret_cast<Failure*>(
1296 (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern);
1300 bool Smi::IsValid(intptr_t value) {
1301 bool result = Internals::IsValidSmi(value);
1302 ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1307 MapWord MapWord::FromMap(Map* map) {
1308 return MapWord(reinterpret_cast<uintptr_t>(map));
1312 Map* MapWord::ToMap() {
1313 return reinterpret_cast<Map*>(value_);
1317 bool MapWord::IsForwardingAddress() {
1318 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1322 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1323 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1324 return MapWord(reinterpret_cast<uintptr_t>(raw));
1328 HeapObject* MapWord::ToForwardingAddress() {
1329 ASSERT(IsForwardingAddress());
1330 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1335 void HeapObject::VerifyObjectField(int offset) {
1336 VerifyPointer(READ_FIELD(this, offset));
1339 void HeapObject::VerifySmiField(int offset) {
1340 CHECK(READ_FIELD(this, offset)->IsSmi());
1345 Heap* HeapObject::GetHeap() {
1347 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1348 SLOW_ASSERT(heap != NULL);
1353 Isolate* HeapObject::GetIsolate() {
1354 return GetHeap()->isolate();
1358 Map* HeapObject::map() {
1359 return map_word().ToMap();
1363 void HeapObject::set_map(Map* value) {
1364 set_map_word(MapWord::FromMap(value));
1365 if (value != NULL) {
1366 // TODO(1600) We are passing NULL as a slot because maps can never be on
1367 // evacuation candidate.
1368 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1373 // Unsafe accessor omitting write barrier.
1374 void HeapObject::set_map_no_write_barrier(Map* value) {
1375 set_map_word(MapWord::FromMap(value));
1379 MapWord HeapObject::map_word() {
1380 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1384 void HeapObject::set_map_word(MapWord map_word) {
1385 // WRITE_FIELD does not invoke write barrier, but there is no need
1387 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1391 HeapObject* HeapObject::FromAddress(Address address) {
1392 ASSERT_TAG_ALIGNED(address);
1393 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1397 Address HeapObject::address() {
1398 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1402 int HeapObject::Size() {
1403 return SizeFromMap(map());
1407 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1408 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1409 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1413 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1414 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1418 double HeapNumber::value() {
1419 return READ_DOUBLE_FIELD(this, kValueOffset);
1423 void HeapNumber::set_value(double value) {
1424 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1428 int HeapNumber::get_exponent() {
1429 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1430 kExponentShift) - kExponentBias;
1434 int HeapNumber::get_sign() {
1435 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1439 const char* Float32x4::Name() {
1444 int Float32x4::kRuntimeAllocatorId() {
1445 return Runtime::kAllocateFloat32x4;
1449 int Float32x4::kMapRootIndex() {
1450 return Heap::kFloat32x4MapRootIndex;
1454 float32x4_value_t Float32x4::value() {
1455 return READ_FLOAT32x4_FIELD(this, kValueOffset);
1459 void Float32x4::set_value(float32x4_value_t value) {
1460 WRITE_FLOAT32x4_FIELD(this, kValueOffset, value);
1464 float Float32x4::getAt(int index) {
1465 ASSERT(index >= 0 && index < kLanes);
1466 return READ_FLOAT_FIELD(this, kValueOffset + index * kFloatSize);
1470 const char* Int32x4::Name() {
1475 int Int32x4::kRuntimeAllocatorId() {
1476 return Runtime::kAllocateInt32x4;
1480 int Int32x4::kMapRootIndex() {
1481 return Heap::kInt32x4MapRootIndex;
1485 int32x4_value_t Int32x4::value() {
1486 return READ_INT32x4_FIELD(this, kValueOffset);
1490 void Int32x4::set_value(int32x4_value_t value) {
1491 WRITE_INT32x4_FIELD(this, kValueOffset, value);
1495 int32_t Int32x4::getAt(int index) {
1496 ASSERT(index >= 0 && index < kLanes);
1497 return READ_INT32_FIELD(this, kValueOffset + index * kInt32Size);
1501 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1504 Object** FixedArray::GetFirstElementAddress() {
1505 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1509 bool FixedArray::ContainsOnlySmisOrHoles() {
1510 Object* the_hole = GetHeap()->the_hole_value();
1511 Object** current = GetFirstElementAddress();
1512 for (int i = 0; i < length(); ++i) {
1513 Object* candidate = *current++;
1514 if (!candidate->IsSmi() && candidate != the_hole) return false;
1520 FixedArrayBase* JSObject::elements() {
1521 Object* array = READ_FIELD(this, kElementsOffset);
1522 return static_cast<FixedArrayBase*>(array);
1526 void JSObject::ValidateElements() {
1527 #ifdef ENABLE_SLOW_ASSERTS
1528 if (FLAG_enable_slow_asserts) {
1529 ElementsAccessor* accessor = GetElementsAccessor();
1530 accessor->Validate(this);
1536 void AllocationSite::Initialize() {
1537 set_transition_info(Smi::FromInt(0));
1538 SetElementsKind(GetInitialFastElementsKind());
1539 set_nested_site(Smi::FromInt(0));
1540 set_pretenure_data(Smi::FromInt(0));
1541 set_pretenure_create_count(Smi::FromInt(0));
1542 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1543 SKIP_WRITE_BARRIER);
1547 void AllocationSite::MarkZombie() {
1548 ASSERT(!IsZombie());
1550 set_pretenure_decision(kZombie);
1554 // Heuristic: We only need to create allocation site info if the boilerplate
1555 // elements kind is the initial elements kind.
1556 AllocationSiteMode AllocationSite::GetMode(
1557 ElementsKind boilerplate_elements_kind) {
1558 if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
1559 return TRACK_ALLOCATION_SITE;
1562 return DONT_TRACK_ALLOCATION_SITE;
1566 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1568 if (IsFastSmiElementsKind(from) &&
1569 IsMoreGeneralElementsKindTransition(from, to)) {
1570 return TRACK_ALLOCATION_SITE;
1573 return DONT_TRACK_ALLOCATION_SITE;
1577 inline bool AllocationSite::CanTrack(InstanceType type) {
1578 if (FLAG_allocation_site_pretenuring) {
1579 return type == JS_ARRAY_TYPE ||
1580 type == JS_OBJECT_TYPE ||
1581 type < FIRST_NONSTRING_TYPE;
1583 return type == JS_ARRAY_TYPE;
1587 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1591 return DependentCode::kAllocationSiteTenuringChangedGroup;
1594 return DependentCode::kAllocationSiteTransitionChangedGroup;
1598 return DependentCode::kAllocationSiteTransitionChangedGroup;
1602 inline void AllocationSite::set_memento_found_count(int count) {
1603 int value = pretenure_data()->value();
1604 // Verify that we can count more mementos than we can possibly find in one
1605 // new space collection.
1606 ASSERT((GetHeap()->MaxSemiSpaceSize() /
1607 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1608 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1609 ASSERT(count < MementoFoundCountBits::kMax);
1611 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1612 SKIP_WRITE_BARRIER);
1615 inline bool AllocationSite::IncrementMementoFoundCount() {
1616 if (IsZombie()) return false;
1618 int value = memento_found_count();
1619 set_memento_found_count(value + 1);
1624 inline void AllocationSite::IncrementMementoCreateCount() {
1625 ASSERT(FLAG_allocation_site_pretenuring);
1626 int value = memento_create_count();
1627 set_memento_create_count(value + 1);
1631 inline bool AllocationSite::DigestPretenuringFeedback() {
1632 bool decision_changed = false;
1633 int create_count = memento_create_count();
1634 int found_count = memento_found_count();
1635 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1637 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1638 static_cast<double>(found_count) / create_count : 0.0;
1639 PretenureFlag current_mode = GetPretenureMode();
1641 if (minimum_mementos_created) {
1642 PretenureDecision result = ratio >= kPretenureRatio
1645 set_pretenure_decision(result);
1646 if (current_mode != GetPretenureMode()) {
1647 decision_changed = true;
1648 dependent_code()->MarkCodeForDeoptimization(
1650 DependentCode::kAllocationSiteTenuringChangedGroup);
1654 if (FLAG_trace_pretenuring_statistics) {
1656 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1657 static_cast<void*>(this), create_count, found_count, ratio,
1658 current_mode == TENURED ? "tenured" : "not tenured",
1659 GetPretenureMode() == TENURED ? "tenured" : "not tenured");
1662 // Clear feedback calculation fields until the next gc.
1663 set_memento_found_count(0);
1664 set_memento_create_count(0);
1665 return decision_changed;
1669 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1670 object->ValidateElements();
1671 ElementsKind elements_kind = object->map()->elements_kind();
1672 if (!IsFastObjectElementsKind(elements_kind)) {
1673 if (IsFastHoleyElementsKind(elements_kind)) {
1674 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1676 TransitionElementsKind(object, FAST_ELEMENTS);
1682 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1684 EnsureElementsMode mode) {
1685 ElementsKind current_kind = map()->elements_kind();
1686 ElementsKind target_kind = current_kind;
1687 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1688 bool is_holey = IsFastHoleyElementsKind(current_kind);
1689 if (current_kind == FAST_HOLEY_ELEMENTS) return this;
1690 Heap* heap = GetHeap();
1691 Object* the_hole = heap->the_hole_value();
1692 for (uint32_t i = 0; i < count; ++i) {
1693 Object* current = *objects++;
1694 if (current == the_hole) {
1696 target_kind = GetHoleyElementsKind(target_kind);
1697 } else if (!current->IsSmi()) {
1698 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1699 if (IsFastSmiElementsKind(target_kind)) {
1701 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1703 target_kind = FAST_DOUBLE_ELEMENTS;
1706 } else if (is_holey) {
1707 target_kind = FAST_HOLEY_ELEMENTS;
1710 target_kind = FAST_ELEMENTS;
1715 if (target_kind != current_kind) {
1716 return TransitionElementsKind(target_kind);
1722 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1724 EnsureElementsMode mode) {
1725 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1726 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1727 elements->map() == GetHeap()->fixed_cow_array_map());
1728 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1729 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1731 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1732 return EnsureCanContainElements(objects, length, mode);
1735 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1736 if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1737 return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
1738 } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
1739 FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
1740 for (uint32_t i = 0; i < length; ++i) {
1741 if (double_array->is_the_hole(i)) {
1742 return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
1745 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1752 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1753 ElementsKind to_kind) {
1754 Map* current_map = map();
1755 ElementsKind from_kind = current_map->elements_kind();
1756 if (from_kind == to_kind) return current_map;
1758 Context* native_context = isolate->context()->native_context();
1759 Object* maybe_array_maps = native_context->js_array_maps();
1760 if (maybe_array_maps->IsFixedArray()) {
1761 FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
1762 if (array_maps->get(from_kind) == current_map) {
1763 Object* maybe_transitioned_map = array_maps->get(to_kind);
1764 if (maybe_transitioned_map->IsMap()) {
1765 return Map::cast(maybe_transitioned_map);
1770 return GetElementsTransitionMapSlow(to_kind);
1774 void JSObject::set_map_and_elements(Map* new_map,
1775 FixedArrayBase* value,
1776 WriteBarrierMode mode) {
1777 ASSERT(value->HasValidElements());
1778 if (new_map != NULL) {
1779 if (mode == UPDATE_WRITE_BARRIER) {
1782 ASSERT(mode == SKIP_WRITE_BARRIER);
1783 set_map_no_write_barrier(new_map);
1786 ASSERT((map()->has_fast_smi_or_object_elements() ||
1787 (value == GetHeap()->empty_fixed_array())) ==
1788 (value->map() == GetHeap()->fixed_array_map() ||
1789 value->map() == GetHeap()->fixed_cow_array_map()));
1790 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1791 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1792 WRITE_FIELD(this, kElementsOffset, value);
1793 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1797 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1798 set_map_and_elements(NULL, value, mode);
1802 void JSObject::initialize_properties() {
1803 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1804 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1808 void JSObject::initialize_elements() {
1809 if (map()->has_fast_smi_or_object_elements() ||
1810 map()->has_fast_double_elements()) {
1811 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1812 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1813 } else if (map()->has_external_array_elements()) {
1814 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(map());
1815 ASSERT(!GetHeap()->InNewSpace(empty_array));
1816 WRITE_FIELD(this, kElementsOffset, empty_array);
1823 MaybeObject* JSObject::ResetElements() {
1824 if (map()->is_observed()) {
1825 // Maintain invariant that observed elements are always in dictionary mode.
1826 SeededNumberDictionary* dictionary;
1827 MaybeObject* maybe = SeededNumberDictionary::Allocate(GetHeap(), 0);
1828 if (!maybe->To(&dictionary)) return maybe;
1829 if (map() == GetHeap()->non_strict_arguments_elements_map()) {
1830 FixedArray::cast(elements())->set(1, dictionary);
1832 set_elements(dictionary);
1837 ElementsKind elements_kind = GetInitialFastElementsKind();
1838 if (!FLAG_smi_only_arrays) {
1839 elements_kind = FastSmiToObjectElementsKind(elements_kind);
1841 MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
1843 if (!maybe->To(&map)) return maybe;
1845 initialize_elements();
1851 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
1852 DisallowHeapAllocation no_gc;
1853 if (!map->HasTransitionArray()) return Handle<String>::null();
1854 TransitionArray* transitions = map->transitions();
1855 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1856 int transition = TransitionArray::kSimpleTransitionIndex;
1857 PropertyDetails details = transitions->GetTargetDetails(transition);
1858 Name* name = transitions->GetKey(transition);
1859 if (details.type() != FIELD) return Handle<String>::null();
1860 if (details.attributes() != NONE) return Handle<String>::null();
1861 if (!name->IsString()) return Handle<String>::null();
1862 return Handle<String>(String::cast(name));
1866 Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
1867 ASSERT(!ExpectedTransitionKey(map).is_null());
1868 return Handle<Map>(map->transitions()->GetTarget(
1869 TransitionArray::kSimpleTransitionIndex));
1873 Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1874 DisallowHeapAllocation no_allocation;
1875 if (!map->HasTransitionArray()) return Handle<Map>::null();
1876 TransitionArray* transitions = map->transitions();
1877 int transition = transitions->Search(*key);
1878 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1879 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1880 if (target_details.type() != FIELD) return Handle<Map>::null();
1881 if (target_details.attributes() != NONE) return Handle<Map>::null();
1882 return Handle<Map>(transitions->GetTarget(transition));
1886 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1887 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1890 byte Oddball::kind() {
1891 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1895 void Oddball::set_kind(byte value) {
1896 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1900 Object* Cell::value() {
1901 return READ_FIELD(this, kValueOffset);
1905 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1906 // The write barrier is not used for global property cells.
1907 ASSERT(!val->IsPropertyCell() && !val->IsCell());
1908 WRITE_FIELD(this, kValueOffset, val);
1911 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1913 Object* PropertyCell::type_raw() {
1914 return READ_FIELD(this, kTypeOffset);
1918 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1919 WRITE_FIELD(this, kTypeOffset, val);
1923 int JSObject::GetHeaderSize() {
1924 InstanceType type = map()->instance_type();
1925 // Check for the most common kind of JavaScript object before
1926 // falling into the generic switch. This speeds up the internal
1927 // field operations considerably on average.
1928 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1930 case JS_GENERATOR_OBJECT_TYPE:
1931 return JSGeneratorObject::kSize;
1932 case JS_MODULE_TYPE:
1933 return JSModule::kSize;
1934 case JS_GLOBAL_PROXY_TYPE:
1935 return JSGlobalProxy::kSize;
1936 case JS_GLOBAL_OBJECT_TYPE:
1937 return JSGlobalObject::kSize;
1938 case JS_BUILTINS_OBJECT_TYPE:
1939 return JSBuiltinsObject::kSize;
1940 case JS_FUNCTION_TYPE:
1941 return JSFunction::kSize;
1943 return JSValue::kSize;
1945 return JSDate::kSize;
1947 return JSArray::kSize;
1948 case JS_ARRAY_BUFFER_TYPE:
1949 return JSArrayBuffer::kSize;
1950 case JS_TYPED_ARRAY_TYPE:
1951 return JSTypedArray::kSize;
1952 case JS_DATA_VIEW_TYPE:
1953 return JSDataView::kSize;
1955 return JSSet::kSize;
1957 return JSMap::kSize;
1958 case JS_WEAK_MAP_TYPE:
1959 return JSWeakMap::kSize;
1960 case JS_WEAK_SET_TYPE:
1961 return JSWeakSet::kSize;
1962 case JS_REGEXP_TYPE:
1963 return JSRegExp::kSize;
1964 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1965 return JSObject::kHeaderSize;
1966 case JS_MESSAGE_OBJECT_TYPE:
1967 return JSMessageObject::kSize;
1969 // TODO(jkummerow): Re-enable this. Blink currently hits this
1970 // from its CustomElementConstructorBuilder.
1977 int JSObject::GetInternalFieldCount() {
1978 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1979 // Make sure to adjust for the number of in-object properties. These
1980 // properties do contribute to the size, but are not internal fields.
1981 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1982 map()->inobject_properties();
1986 int JSObject::GetInternalFieldOffset(int index) {
1987 ASSERT(index < GetInternalFieldCount() && index >= 0);
1988 return GetHeaderSize() + (kPointerSize * index);
1992 Object* JSObject::GetInternalField(int index) {
1993 ASSERT(index < GetInternalFieldCount() && index >= 0);
1994 // Internal objects do follow immediately after the header, whereas in-object
1995 // properties are at the end of the object. Therefore there is no need
1996 // to adjust the index here.
1997 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2001 void JSObject::SetInternalField(int index, Object* value) {
2002 ASSERT(index < GetInternalFieldCount() && index >= 0);
2003 // Internal objects do follow immediately after the header, whereas in-object
2004 // properties are at the end of the object. Therefore there is no need
2005 // to adjust the index here.
2006 int offset = GetHeaderSize() + (kPointerSize * index);
2007 WRITE_FIELD(this, offset, value);
2008 WRITE_BARRIER(GetHeap(), this, offset, value);
2012 void JSObject::SetInternalField(int index, Smi* value) {
2013 ASSERT(index < GetInternalFieldCount() && index >= 0);
2014 // Internal objects do follow immediately after the header, whereas in-object
2015 // properties are at the end of the object. Therefore there is no need
2016 // to adjust the index here.
2017 int offset = GetHeaderSize() + (kPointerSize * index);
2018 WRITE_FIELD(this, offset, value);
2022 MaybeObject* JSObject::FastPropertyAt(Representation representation,
2024 Object* raw_value = RawFastPropertyAt(index);
2025 return raw_value->AllocateNewStorageFor(GetHeap(), representation);
2029 // Access fast-case object properties at index. The use of these routines
2030 // is needed to correctly distinguish between properties stored in-object and
2031 // properties stored in the properties array.
2032 Object* JSObject::RawFastPropertyAt(int index) {
2033 // Adjust for the number of properties stored in the object.
2034 index -= map()->inobject_properties();
2036 int offset = map()->instance_size() + (index * kPointerSize);
2037 return READ_FIELD(this, offset);
2039 ASSERT(index < properties()->length());
2040 return properties()->get(index);
2045 void JSObject::FastPropertyAtPut(int index, Object* value) {
2046 // Adjust for the number of properties stored in the object.
2047 index -= map()->inobject_properties();
2049 int offset = map()->instance_size() + (index * kPointerSize);
2050 WRITE_FIELD(this, offset, value);
2051 WRITE_BARRIER(GetHeap(), this, offset, value);
2053 ASSERT(index < properties()->length());
2054 properties()->set(index, value);
2059 int JSObject::GetInObjectPropertyOffset(int index) {
2060 return map()->GetInObjectPropertyOffset(index);
2064 Object* JSObject::InObjectPropertyAt(int index) {
2065 int offset = GetInObjectPropertyOffset(index);
2066 return READ_FIELD(this, offset);
2070 Object* JSObject::InObjectPropertyAtPut(int index,
2072 WriteBarrierMode mode) {
2073 // Adjust for the number of properties stored in the object.
2074 int offset = GetInObjectPropertyOffset(index);
2075 WRITE_FIELD(this, offset, value);
2076 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2082 void JSObject::InitializeBody(Map* map,
2083 Object* pre_allocated_value,
2084 Object* filler_value) {
2085 ASSERT(!filler_value->IsHeapObject() ||
2086 !GetHeap()->InNewSpace(filler_value));
2087 ASSERT(!pre_allocated_value->IsHeapObject() ||
2088 !GetHeap()->InNewSpace(pre_allocated_value));
2089 int size = map->instance_size();
2090 int offset = kHeaderSize;
2091 if (filler_value != pre_allocated_value) {
2092 int pre_allocated = map->pre_allocated_property_fields();
2093 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
2094 for (int i = 0; i < pre_allocated; i++) {
2095 WRITE_FIELD(this, offset, pre_allocated_value);
2096 offset += kPointerSize;
2099 while (offset < size) {
2100 WRITE_FIELD(this, offset, filler_value);
2101 offset += kPointerSize;
2106 bool JSObject::HasFastProperties() {
2107 ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
2108 return !properties()->IsDictionary();
2112 bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
2113 // Allow extra fast properties if the object has more than
2114 // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
2115 // very unlikely that the object is being used as a dictionary and there is a
2116 // good chance that allowing more map transitions will be worth it.
2117 Map* map = this->map();
2118 if (map->unused_property_fields() != 0) return false;
2120 int inobject = map->inobject_properties();
2123 if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
2124 limit = Max(inobject, kMaxFastProperties);
2126 limit = Max(inobject, kFastPropertiesSoftLimit);
2128 return properties()->length() > limit;
2132 void Struct::InitializeBody(int object_size) {
2133 Object* value = GetHeap()->undefined_value();
2134 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2135 WRITE_FIELD(this, offset, value);
2140 bool Object::ToArrayIndex(uint32_t* index) {
2142 int value = Smi::cast(this)->value();
2143 if (value < 0) return false;
2147 if (IsHeapNumber()) {
2148 double value = HeapNumber::cast(this)->value();
2149 uint32_t uint_value = static_cast<uint32_t>(value);
2150 if (value == static_cast<double>(uint_value)) {
2151 *index = uint_value;
2159 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2160 if (!this->IsJSValue()) return false;
2162 JSValue* js_value = JSValue::cast(this);
2163 if (!js_value->value()->IsString()) return false;
2165 String* str = String::cast(js_value->value());
2166 if (index >= static_cast<uint32_t>(str->length())) return false;
2173 void Object::VerifyApiCallResultType() {
2174 #if ENABLE_EXTRA_CHECKS
2183 FATAL("API call returned invalid object");
2185 #endif // ENABLE_EXTRA_CHECKS
2189 FixedArrayBase* FixedArrayBase::cast(Object* object) {
2190 ASSERT(object->IsFixedArrayBase());
2191 return reinterpret_cast<FixedArrayBase*>(object);
2195 Object* FixedArray::get(int index) {
2196 SLOW_ASSERT(index >= 0 && index < this->length());
2197 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2201 bool FixedArray::is_the_hole(int index) {
2202 return get(index) == GetHeap()->the_hole_value();
2206 void FixedArray::set(int index, Smi* value) {
2207 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2208 ASSERT(index >= 0 && index < this->length());
2209 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
2210 int offset = kHeaderSize + index * kPointerSize;
2211 WRITE_FIELD(this, offset, value);
2215 void FixedArray::set(int index, Object* value) {
2216 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2217 ASSERT(index >= 0 && index < this->length());
2218 int offset = kHeaderSize + index * kPointerSize;
2219 WRITE_FIELD(this, offset, value);
2220 WRITE_BARRIER(GetHeap(), this, offset, value);
2224 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2225 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2229 inline double FixedDoubleArray::hole_nan_as_double() {
2230 return BitCast<double, uint64_t>(kHoleNanInt64);
2234 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2235 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
2236 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
2237 return OS::nan_value();
2241 double FixedDoubleArray::get_scalar(int index) {
2242 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2243 map() != GetHeap()->fixed_array_map());
2244 ASSERT(index >= 0 && index < this->length());
2245 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2246 ASSERT(!is_the_hole_nan(result));
2250 int64_t FixedDoubleArray::get_representation(int index) {
2251 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2252 map() != GetHeap()->fixed_array_map());
2253 ASSERT(index >= 0 && index < this->length());
2254 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2257 MaybeObject* FixedDoubleArray::get(int index) {
2258 if (is_the_hole(index)) {
2259 return GetHeap()->the_hole_value();
2261 return GetHeap()->NumberFromDouble(get_scalar(index));
2266 void FixedDoubleArray::set(int index, double value) {
2267 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2268 map() != GetHeap()->fixed_array_map());
2269 int offset = kHeaderSize + index * kDoubleSize;
2270 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2271 WRITE_DOUBLE_FIELD(this, offset, value);
2275 void FixedDoubleArray::set_the_hole(int index) {
2276 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2277 map() != GetHeap()->fixed_array_map());
2278 int offset = kHeaderSize + index * kDoubleSize;
2279 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2283 bool FixedDoubleArray::is_the_hole(int index) {
2284 int offset = kHeaderSize + index * kDoubleSize;
2285 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2289 SMI_ACCESSORS(ConstantPoolArray, first_ptr_index, kFirstPointerIndexOffset)
2290 SMI_ACCESSORS(ConstantPoolArray, first_int32_index, kFirstInt32IndexOffset)
2293 int ConstantPoolArray::first_int64_index() {
2298 int ConstantPoolArray::count_of_int64_entries() {
2299 return first_ptr_index();
2303 int ConstantPoolArray::count_of_ptr_entries() {
2304 return first_int32_index() - first_ptr_index();
2308 int ConstantPoolArray::count_of_int32_entries() {
2309 return length() - first_int32_index();
2313 void ConstantPoolArray::SetEntryCounts(int number_of_int64_entries,
2314 int number_of_ptr_entries,
2315 int number_of_int32_entries) {
2316 set_first_ptr_index(number_of_int64_entries);
2317 set_first_int32_index(number_of_int64_entries + number_of_ptr_entries);
2318 set_length(number_of_int64_entries + number_of_ptr_entries +
2319 number_of_int32_entries);
2323 int64_t ConstantPoolArray::get_int64_entry(int index) {
2324 ASSERT(map() == GetHeap()->constant_pool_array_map());
2325 ASSERT(index >= 0 && index < first_ptr_index());
2326 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2329 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2330 STATIC_ASSERT(kDoubleSize == kInt64Size);
2331 ASSERT(map() == GetHeap()->constant_pool_array_map());
2332 ASSERT(index >= 0 && index < first_ptr_index());
2333 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2337 Object* ConstantPoolArray::get_ptr_entry(int index) {
2338 ASSERT(map() == GetHeap()->constant_pool_array_map());
2339 ASSERT(index >= first_ptr_index() && index < first_int32_index());
2340 return READ_FIELD(this, OffsetOfElementAt(index));
2344 int32_t ConstantPoolArray::get_int32_entry(int index) {
2345 ASSERT(map() == GetHeap()->constant_pool_array_map());
2346 ASSERT(index >= first_int32_index() && index < length());
2347 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2351 void ConstantPoolArray::set(int index, Object* value) {
2352 ASSERT(map() == GetHeap()->constant_pool_array_map());
2353 ASSERT(index >= first_ptr_index() && index < first_int32_index());
2354 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2355 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2359 void ConstantPoolArray::set(int index, int64_t value) {
2360 ASSERT(map() == GetHeap()->constant_pool_array_map());
2361 ASSERT(index >= first_int64_index() && index < first_ptr_index());
2362 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2366 void ConstantPoolArray::set(int index, double value) {
2367 STATIC_ASSERT(kDoubleSize == kInt64Size);
2368 ASSERT(map() == GetHeap()->constant_pool_array_map());
2369 ASSERT(index >= first_int64_index() && index < first_ptr_index());
2370 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2374 void ConstantPoolArray::set(int index, int32_t value) {
2375 ASSERT(map() == GetHeap()->constant_pool_array_map());
2376 ASSERT(index >= this->first_int32_index() && index < length());
2377 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2381 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2382 const DisallowHeapAllocation& promise) {
2383 Heap* heap = GetHeap();
2384 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2385 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2386 return UPDATE_WRITE_BARRIER;
2390 void FixedArray::set(int index,
2392 WriteBarrierMode mode) {
2393 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2394 ASSERT(index >= 0 && index < this->length());
2395 int offset = kHeaderSize + index * kPointerSize;
2396 WRITE_FIELD(this, offset, value);
2397 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2401 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2404 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2405 ASSERT(index >= 0 && index < array->length());
2406 int offset = kHeaderSize + index * kPointerSize;
2407 WRITE_FIELD(array, offset, value);
2408 Heap* heap = array->GetHeap();
2409 if (heap->InNewSpace(value)) {
2410 heap->RecordWrite(array->address(), offset);
2415 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2418 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2419 ASSERT(index >= 0 && index < array->length());
2420 ASSERT(!array->GetHeap()->InNewSpace(value));
2421 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2425 void FixedArray::set_undefined(int index) {
2426 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2427 ASSERT(index >= 0 && index < this->length());
2428 ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2430 kHeaderSize + index * kPointerSize,
2431 GetHeap()->undefined_value());
2435 void FixedArray::set_null(int index) {
2436 ASSERT(index >= 0 && index < this->length());
2437 ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2439 kHeaderSize + index * kPointerSize,
2440 GetHeap()->null_value());
2444 void FixedArray::set_the_hole(int index) {
2445 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2446 ASSERT(index >= 0 && index < this->length());
2447 ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2449 kHeaderSize + index * kPointerSize,
2450 GetHeap()->the_hole_value());
2454 double* FixedDoubleArray::data_start() {
2455 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2459 Object** FixedArray::data_start() {
2460 return HeapObject::RawField(this, kHeaderSize);
2464 bool DescriptorArray::IsEmpty() {
2465 ASSERT(length() >= kFirstIndex ||
2466 this == GetHeap()->empty_descriptor_array());
2467 return length() < kFirstIndex;
2471 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2473 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2477 // Perform a binary search in a fixed array. Low and high are entry indices. If
2478 // there are three entries in this array it should be called with low=0 and
2480 template<SearchMode search_mode, typename T>
2481 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2482 uint32_t hash = name->Hash();
2485 ASSERT(low <= high);
2487 while (low != high) {
2488 int mid = (low + high) / 2;
2489 Name* mid_name = array->GetSortedKey(mid);
2490 uint32_t mid_hash = mid_name->Hash();
2492 if (mid_hash >= hash) {
2499 for (; low <= limit; ++low) {
2500 int sort_index = array->GetSortedKeyIndex(low);
2501 Name* entry = array->GetKey(sort_index);
2502 if (entry->Hash() != hash) break;
2503 if (entry->Equals(name)) {
2504 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2507 return T::kNotFound;
2511 return T::kNotFound;
2515 // Perform a linear search in this fixed array. len is the number of entry
2516 // indices that are valid.
2517 template<SearchMode search_mode, typename T>
2518 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2519 uint32_t hash = name->Hash();
2520 if (search_mode == ALL_ENTRIES) {
2521 for (int number = 0; number < len; number++) {
2522 int sorted_index = array->GetSortedKeyIndex(number);
2523 Name* entry = array->GetKey(sorted_index);
2524 uint32_t current_hash = entry->Hash();
2525 if (current_hash > hash) break;
2526 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2529 ASSERT(len >= valid_entries);
2530 for (int number = 0; number < valid_entries; number++) {
2531 Name* entry = array->GetKey(number);
2532 uint32_t current_hash = entry->Hash();
2533 if (current_hash == hash && entry->Equals(name)) return number;
2536 return T::kNotFound;
2540 template<SearchMode search_mode, typename T>
2541 int Search(T* array, Name* name, int valid_entries) {
2542 if (search_mode == VALID_ENTRIES) {
2543 SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2545 SLOW_ASSERT(array->IsSortedNoDuplicates());
2548 int nof = array->number_of_entries();
2549 if (nof == 0) return T::kNotFound;
2551 // Fast case: do linear search for small arrays.
2552 const int kMaxElementsForLinearSearch = 8;
2553 if ((search_mode == ALL_ENTRIES &&
2554 nof <= kMaxElementsForLinearSearch) ||
2555 (search_mode == VALID_ENTRIES &&
2556 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2557 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2560 // Slow case: perform binary search.
2561 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2565 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2566 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2570 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2571 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2572 if (number_of_own_descriptors == 0) return kNotFound;
2574 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2575 int number = cache->Lookup(map, name);
2577 if (number == DescriptorLookupCache::kAbsent) {
2578 number = Search(name, number_of_own_descriptors);
2579 cache->Update(map, name, number);
2586 PropertyDetails Map::GetLastDescriptorDetails() {
2587 return instance_descriptors()->GetDetails(LastAdded());
2591 void Map::LookupDescriptor(JSObject* holder,
2593 LookupResult* result) {
2594 DescriptorArray* descriptors = this->instance_descriptors();
2595 int number = descriptors->SearchWithCache(name, this);
2596 if (number == DescriptorArray::kNotFound) return result->NotFound();
2597 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2601 void Map::LookupTransition(JSObject* holder,
2603 LookupResult* result) {
2604 if (HasTransitionArray()) {
2605 TransitionArray* transition_array = transitions();
2606 int number = transition_array->Search(name);
2607 if (number != TransitionArray::kNotFound) {
2608 return result->TransitionResult(
2609 holder, transition_array->GetTarget(number));
2616 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2617 ASSERT(descriptor_number < number_of_descriptors());
2618 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2622 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2623 return GetKeySlot(descriptor_number);
2627 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2628 return GetValueSlot(descriptor_number - 1) + 1;
2632 Name* DescriptorArray::GetKey(int descriptor_number) {
2633 ASSERT(descriptor_number < number_of_descriptors());
2634 return Name::cast(get(ToKeyIndex(descriptor_number)));
2638 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2639 return GetDetails(descriptor_number).pointer();
2643 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2644 return GetKey(GetSortedKeyIndex(descriptor_number));
2648 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2649 PropertyDetails details = GetDetails(descriptor_index);
2650 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2654 void DescriptorArray::SetRepresentation(int descriptor_index,
2655 Representation representation) {
2656 ASSERT(!representation.IsNone());
2657 PropertyDetails details = GetDetails(descriptor_index);
2658 set(ToDetailsIndex(descriptor_index),
2659 details.CopyWithRepresentation(representation).AsSmi());
2663 void DescriptorArray::InitializeRepresentations(Representation representation) {
2664 int length = number_of_descriptors();
2665 for (int i = 0; i < length; i++) {
2666 SetRepresentation(i, representation);
2671 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2672 ASSERT(descriptor_number < number_of_descriptors());
2673 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2677 Object* DescriptorArray::GetValue(int descriptor_number) {
2678 ASSERT(descriptor_number < number_of_descriptors());
2679 return get(ToValueIndex(descriptor_number));
2683 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2684 ASSERT(descriptor_number < number_of_descriptors());
2685 Object* details = get(ToDetailsIndex(descriptor_number));
2686 return PropertyDetails(Smi::cast(details));
2690 PropertyType DescriptorArray::GetType(int descriptor_number) {
2691 return GetDetails(descriptor_number).type();
2695 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2696 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2697 return GetDetails(descriptor_number).field_index();
2701 Object* DescriptorArray::GetConstant(int descriptor_number) {
2702 return GetValue(descriptor_number);
2706 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2707 ASSERT(GetType(descriptor_number) == CALLBACKS);
2708 return GetValue(descriptor_number);
2712 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2713 ASSERT(GetType(descriptor_number) == CALLBACKS);
2714 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2715 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2719 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2720 desc->Init(GetKey(descriptor_number),
2721 GetValue(descriptor_number),
2722 GetDetails(descriptor_number));
2726 void DescriptorArray::Set(int descriptor_number,
2728 const WhitenessWitness&) {
2730 ASSERT(descriptor_number < number_of_descriptors());
2732 NoIncrementalWriteBarrierSet(this,
2733 ToKeyIndex(descriptor_number),
2735 NoIncrementalWriteBarrierSet(this,
2736 ToValueIndex(descriptor_number),
2738 NoIncrementalWriteBarrierSet(this,
2739 ToDetailsIndex(descriptor_number),
2740 desc->GetDetails().AsSmi());
2744 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2746 ASSERT(descriptor_number < number_of_descriptors());
2748 set(ToKeyIndex(descriptor_number), desc->GetKey());
2749 set(ToValueIndex(descriptor_number), desc->GetValue());
2750 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2754 void DescriptorArray::Append(Descriptor* desc,
2755 const WhitenessWitness& witness) {
2756 int descriptor_number = number_of_descriptors();
2757 SetNumberOfDescriptors(descriptor_number + 1);
2758 Set(descriptor_number, desc, witness);
2760 uint32_t hash = desc->GetKey()->Hash();
2764 for (insertion = descriptor_number; insertion > 0; --insertion) {
2765 Name* key = GetSortedKey(insertion - 1);
2766 if (key->Hash() <= hash) break;
2767 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2770 SetSortedKey(insertion, descriptor_number);
2774 void DescriptorArray::Append(Descriptor* desc) {
2775 int descriptor_number = number_of_descriptors();
2776 SetNumberOfDescriptors(descriptor_number + 1);
2777 Set(descriptor_number, desc);
2779 uint32_t hash = desc->GetKey()->Hash();
2783 for (insertion = descriptor_number; insertion > 0; --insertion) {
2784 Name* key = GetSortedKey(insertion - 1);
2785 if (key->Hash() <= hash) break;
2786 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2789 SetSortedKey(insertion, descriptor_number);
2793 void DescriptorArray::SwapSortedKeys(int first, int second) {
2794 int first_key = GetSortedKeyIndex(first);
2795 SetSortedKey(first, GetSortedKeyIndex(second));
2796 SetSortedKey(second, first_key);
2800 DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array)
2801 : marking_(array->GetHeap()->incremental_marking()) {
2802 marking_->EnterNoMarkingScope();
2803 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2807 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2808 marking_->LeaveNoMarkingScope();
2812 template<typename Shape, typename Key>
2813 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2814 const int kMinCapacity = 32;
2815 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2816 if (capacity < kMinCapacity) {
2817 capacity = kMinCapacity; // Guarantee min capacity.
2823 template<typename Shape, typename Key>
2824 int HashTable<Shape, Key>::FindEntry(Key key) {
2825 return FindEntry(GetIsolate(), key);
2829 // Find entry for key otherwise return kNotFound.
2830 template<typename Shape, typename Key>
2831 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2832 uint32_t capacity = Capacity();
2833 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2835 // EnsureCapacity will guarantee the hash table is never full.
2837 Object* element = KeyAt(entry);
2838 // Empty entry. Uses raw unchecked accessors because it is called by the
2839 // string table during bootstrapping.
2840 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2841 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2842 Shape::IsMatch(key, element)) return entry;
2843 entry = NextProbe(entry, count++, capacity);
2849 bool SeededNumberDictionary::requires_slow_elements() {
2850 Object* max_index_object = get(kMaxNumberKeyIndex);
2851 if (!max_index_object->IsSmi()) return false;
2853 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2856 uint32_t SeededNumberDictionary::max_number_key() {
2857 ASSERT(!requires_slow_elements());
2858 Object* max_index_object = get(kMaxNumberKeyIndex);
2859 if (!max_index_object->IsSmi()) return 0;
2860 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2861 return value >> kRequiresSlowElementsTagSize;
2864 void SeededNumberDictionary::set_requires_slow_elements() {
2865 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2869 // ------------------------------------
2873 CAST_ACCESSOR(FixedArray)
2874 CAST_ACCESSOR(FixedDoubleArray)
2875 CAST_ACCESSOR(FixedTypedArrayBase)
2876 CAST_ACCESSOR(ConstantPoolArray)
2877 CAST_ACCESSOR(DescriptorArray)
2878 CAST_ACCESSOR(DeoptimizationInputData)
2879 CAST_ACCESSOR(DeoptimizationOutputData)
2880 CAST_ACCESSOR(DependentCode)
2881 CAST_ACCESSOR(TypeFeedbackCells)
2882 CAST_ACCESSOR(StringTable)
2883 CAST_ACCESSOR(JSFunctionResultCache)
2884 CAST_ACCESSOR(NormalizedMapCache)
2885 CAST_ACCESSOR(ScopeInfo)
2886 CAST_ACCESSOR(CompilationCacheTable)
2887 CAST_ACCESSOR(CodeCacheHashTable)
2888 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2889 CAST_ACCESSOR(MapCache)
2890 CAST_ACCESSOR(String)
2891 CAST_ACCESSOR(SeqString)
2892 CAST_ACCESSOR(SeqOneByteString)
2893 CAST_ACCESSOR(SeqTwoByteString)
2894 CAST_ACCESSOR(SlicedString)
2895 CAST_ACCESSOR(ConsString)
2896 CAST_ACCESSOR(ExternalString)
2897 CAST_ACCESSOR(ExternalAsciiString)
2898 CAST_ACCESSOR(ExternalTwoByteString)
2899 CAST_ACCESSOR(Symbol)
2901 CAST_ACCESSOR(JSReceiver)
2902 CAST_ACCESSOR(JSObject)
2904 CAST_ACCESSOR(HeapObject)
2905 CAST_ACCESSOR(HeapNumber)
2906 CAST_ACCESSOR(Float32x4)
2907 CAST_ACCESSOR(Int32x4)
2908 CAST_ACCESSOR(Oddball)
2910 CAST_ACCESSOR(PropertyCell)
2911 CAST_ACCESSOR(SharedFunctionInfo)
2913 CAST_ACCESSOR(JSFunction)
2914 CAST_ACCESSOR(GlobalObject)
2915 CAST_ACCESSOR(JSGlobalProxy)
2916 CAST_ACCESSOR(JSGlobalObject)
2917 CAST_ACCESSOR(JSBuiltinsObject)
2919 CAST_ACCESSOR(JSArray)
2920 CAST_ACCESSOR(JSArrayBuffer)
2921 CAST_ACCESSOR(JSArrayBufferView)
2922 CAST_ACCESSOR(JSTypedArray)
2923 CAST_ACCESSOR(JSDataView)
2924 CAST_ACCESSOR(JSRegExp)
2925 CAST_ACCESSOR(JSProxy)
2926 CAST_ACCESSOR(JSFunctionProxy)
2927 CAST_ACCESSOR(JSSet)
2928 CAST_ACCESSOR(JSMap)
2929 CAST_ACCESSOR(JSWeakMap)
2930 CAST_ACCESSOR(JSWeakSet)
2931 CAST_ACCESSOR(Foreign)
2932 CAST_ACCESSOR(ByteArray)
2933 CAST_ACCESSOR(FreeSpace)
2934 CAST_ACCESSOR(ExternalArray)
2935 CAST_ACCESSOR(ExternalInt8Array)
2936 CAST_ACCESSOR(ExternalUint8Array)
2937 CAST_ACCESSOR(ExternalInt16Array)
2938 CAST_ACCESSOR(ExternalUint16Array)
2939 CAST_ACCESSOR(ExternalInt32Array)
2940 CAST_ACCESSOR(ExternalInt32x4Array)
2941 CAST_ACCESSOR(ExternalUint32Array)
2942 CAST_ACCESSOR(ExternalFloat32Array)
2943 CAST_ACCESSOR(ExternalFloat32x4Array)
2944 CAST_ACCESSOR(ExternalFloat64Array)
2945 CAST_ACCESSOR(ExternalUint8ClampedArray)
2946 CAST_ACCESSOR(Struct)
2947 CAST_ACCESSOR(AccessorInfo)
2949 template <class Traits>
2950 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
2951 SLOW_ASSERT(object->IsHeapObject() &&
2952 HeapObject::cast(object)->map()->instance_type() ==
2953 Traits::kInstanceType);
2954 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
2958 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2959 STRUCT_LIST(MAKE_STRUCT_CAST)
2960 #undef MAKE_STRUCT_CAST
2963 template <typename Shape, typename Key>
2964 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2965 ASSERT(obj->IsHashTable());
2966 return reinterpret_cast<HashTable*>(obj);
2970 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2971 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2973 SMI_ACCESSORS(String, length, kLengthOffset)
2976 uint32_t Name::hash_field() {
2977 return READ_UINT32_FIELD(this, kHashFieldOffset);
2981 void Name::set_hash_field(uint32_t value) {
2982 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2983 #if V8_HOST_ARCH_64_BIT
2984 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2989 bool Name::Equals(Name* other) {
2990 if (other == this) return true;
2991 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
2992 this->IsSymbol() || other->IsSymbol()) {
2995 return String::cast(this)->SlowEquals(String::cast(other));
2999 ACCESSORS(Symbol, name, Object, kNameOffset)
3000 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3001 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3004 bool String::Equals(String* other) {
3005 if (other == this) return true;
3006 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3009 return SlowEquals(other);
3013 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
3014 if (!StringShape(this).IsCons()) return this;
3015 ConsString* cons = ConsString::cast(this);
3016 if (cons->IsFlat()) return cons->first();
3017 return SlowTryFlatten(pretenure);
3021 String* String::TryFlattenGetString(PretenureFlag pretenure) {
3022 MaybeObject* flat = TryFlatten(pretenure);
3023 Object* successfully_flattened;
3024 if (!flat->ToObject(&successfully_flattened)) return this;
3025 return String::cast(successfully_flattened);
3029 uint16_t String::Get(int index) {
3030 ASSERT(index >= 0 && index < length());
3031 switch (StringShape(this).full_representation_tag()) {
3032 case kSeqStringTag | kOneByteStringTag:
3033 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3034 case kSeqStringTag | kTwoByteStringTag:
3035 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3036 case kConsStringTag | kOneByteStringTag:
3037 case kConsStringTag | kTwoByteStringTag:
3038 return ConsString::cast(this)->ConsStringGet(index);
3039 case kExternalStringTag | kOneByteStringTag:
3040 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
3041 case kExternalStringTag | kTwoByteStringTag:
3042 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3043 case kSlicedStringTag | kOneByteStringTag:
3044 case kSlicedStringTag | kTwoByteStringTag:
3045 return SlicedString::cast(this)->SlicedStringGet(index);
3055 void String::Set(int index, uint16_t value) {
3056 ASSERT(index >= 0 && index < length());
3057 ASSERT(StringShape(this).IsSequential());
3059 return this->IsOneByteRepresentation()
3060 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3061 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3065 bool String::IsFlat() {
3066 if (!StringShape(this).IsCons()) return true;
3067 return ConsString::cast(this)->second()->length() == 0;
3071 String* String::GetUnderlying() {
3072 // Giving direct access to underlying string only makes sense if the
3073 // wrapping string is already flattened.
3074 ASSERT(this->IsFlat());
3075 ASSERT(StringShape(this).IsIndirect());
3076 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3077 const int kUnderlyingOffset = SlicedString::kParentOffset;
3078 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3082 template<class Visitor, class ConsOp>
3090 ASSERT(length == static_cast<unsigned>(string->length()));
3091 ASSERT(offset <= length);
3092 unsigned slice_offset = offset;
3094 ASSERT(type == string->map()->instance_type());
3096 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3097 case kSeqStringTag | kOneByteStringTag:
3098 visitor.VisitOneByteString(
3099 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3103 case kSeqStringTag | kTwoByteStringTag:
3104 visitor.VisitTwoByteString(
3105 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3109 case kExternalStringTag | kOneByteStringTag:
3110 visitor.VisitOneByteString(
3111 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3115 case kExternalStringTag | kTwoByteStringTag:
3116 visitor.VisitTwoByteString(
3117 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3121 case kSlicedStringTag | kOneByteStringTag:
3122 case kSlicedStringTag | kTwoByteStringTag: {
3123 SlicedString* slicedString = SlicedString::cast(string);
3124 slice_offset += slicedString->offset();
3125 string = slicedString->parent();
3126 type = string->map()->instance_type();
3130 case kConsStringTag | kOneByteStringTag:
3131 case kConsStringTag | kTwoByteStringTag:
3132 string = cons_op.Operate(string, &offset, &type, &length);
3133 if (string == NULL) return;
3134 slice_offset = offset;
3135 ASSERT(length == static_cast<unsigned>(string->length()));
3146 // TODO(dcarney): Remove this class after conversion to VisitFlat.
3147 class ConsStringCaptureOp {
3149 inline ConsStringCaptureOp() : cons_string_(NULL) {}
3150 inline String* Operate(String* string, unsigned*, int32_t*, unsigned*) {
3151 cons_string_ = ConsString::cast(string);
3154 ConsString* cons_string_;
3158 template<class Visitor>
3159 ConsString* String::VisitFlat(Visitor* visitor,
3164 ASSERT(length >= 0 && length == string->length());
3165 ASSERT(offset >= 0 && offset <= length);
3166 ConsStringCaptureOp op;
3167 Visit(string, offset, *visitor, op, type, static_cast<unsigned>(length));
3168 return op.cons_string_;
3172 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3173 ASSERT(index >= 0 && index < length());
3174 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3178 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3179 ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3180 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3181 static_cast<byte>(value));
3185 Address SeqOneByteString::GetCharsAddress() {
3186 return FIELD_ADDR(this, kHeaderSize);
3190 uint8_t* SeqOneByteString::GetChars() {
3191 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3195 Address SeqTwoByteString::GetCharsAddress() {
3196 return FIELD_ADDR(this, kHeaderSize);
3200 uc16* SeqTwoByteString::GetChars() {
3201 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3205 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3206 ASSERT(index >= 0 && index < length());
3207 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3211 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3212 ASSERT(index >= 0 && index < length());
3213 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3217 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3218 return SizeFor(length());
3222 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3223 return SizeFor(length());
3227 String* SlicedString::parent() {
3228 return String::cast(READ_FIELD(this, kParentOffset));
3232 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3233 ASSERT(parent->IsSeqString() || parent->IsExternalString());
3234 WRITE_FIELD(this, kParentOffset, parent);
3235 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3239 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3242 String* ConsString::first() {
3243 return String::cast(READ_FIELD(this, kFirstOffset));
3247 Object* ConsString::unchecked_first() {
3248 return READ_FIELD(this, kFirstOffset);
3252 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3253 WRITE_FIELD(this, kFirstOffset, value);
3254 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3258 String* ConsString::second() {
3259 return String::cast(READ_FIELD(this, kSecondOffset));
3263 Object* ConsString::unchecked_second() {
3264 return READ_FIELD(this, kSecondOffset);
3268 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3269 WRITE_FIELD(this, kSecondOffset, value);
3270 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3274 bool ExternalString::is_short() {
3275 InstanceType type = map()->instance_type();
3276 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3280 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3281 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3285 void ExternalAsciiString::update_data_cache() {
3286 if (is_short()) return;
3287 const char** data_field =
3288 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3289 *data_field = resource()->data();
3293 void ExternalAsciiString::set_resource(
3294 const ExternalAsciiString::Resource* resource) {
3295 ASSERT(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3296 *reinterpret_cast<const Resource**>(
3297 FIELD_ADDR(this, kResourceOffset)) = resource;
3298 if (resource != NULL) update_data_cache();
3302 const uint8_t* ExternalAsciiString::GetChars() {
3303 return reinterpret_cast<const uint8_t*>(resource()->data());
3307 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3308 ASSERT(index >= 0 && index < length());
3309 return GetChars()[index];
3313 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3314 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3318 void ExternalTwoByteString::update_data_cache() {
3319 if (is_short()) return;
3320 const uint16_t** data_field =
3321 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3322 *data_field = resource()->data();
3326 void ExternalTwoByteString::set_resource(
3327 const ExternalTwoByteString::Resource* resource) {
3328 *reinterpret_cast<const Resource**>(
3329 FIELD_ADDR(this, kResourceOffset)) = resource;
3330 if (resource != NULL) update_data_cache();
3334 const uint16_t* ExternalTwoByteString::GetChars() {
3335 return resource()->data();
3339 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3340 ASSERT(index >= 0 && index < length());
3341 return GetChars()[index];
3345 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3347 return GetChars() + start;
3351 String* ConsStringNullOp::Operate(String*, unsigned*, int32_t*, unsigned*) {
3356 unsigned ConsStringIteratorOp::OffsetForDepth(unsigned depth) {
3357 return depth & kDepthMask;
3361 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3362 frames_[depth_++ & kDepthMask] = string;
3366 void ConsStringIteratorOp::PushRight(ConsString* string) {
3368 frames_[(depth_-1) & kDepthMask] = string;
3372 void ConsStringIteratorOp::AdjustMaximumDepth() {
3373 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3377 void ConsStringIteratorOp::Pop() {
3379 ASSERT(depth_ <= maximum_depth_);
3384 bool ConsStringIteratorOp::HasMore() {
3389 void ConsStringIteratorOp::Reset() {
3394 String* ConsStringIteratorOp::ContinueOperation(int32_t* type_out,
3395 unsigned* length_out) {
3396 bool blew_stack = false;
3397 String* string = NextLeaf(&blew_stack, type_out, length_out);
3399 if (string != NULL) {
3401 ASSERT(*length_out == static_cast<unsigned>(string->length()));
3402 ASSERT(*type_out == string->map()->instance_type());
3405 // Traversal complete.
3406 if (!blew_stack) return NULL;
3407 // Restart search from root.
3408 unsigned offset_out;
3409 string = Search(&offset_out, type_out, length_out);
3411 ASSERT(string == NULL || offset_out == 0);
3412 ASSERT(string == NULL ||
3413 *length_out == static_cast<unsigned>(string->length()));
3414 ASSERT(string == NULL || *type_out == string->map()->instance_type());
3419 uint16_t StringCharacterStream::GetNext() {
3420 ASSERT(buffer8_ != NULL && end_ != NULL);
3421 // Advance cursor if needed.
3422 // TODO(dcarney): Ensure uses of the api call HasMore first and avoid this.
3423 if (buffer8_ == end_) HasMore();
3424 ASSERT(buffer8_ < end_);
3425 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3429 StringCharacterStream::StringCharacterStream(String* string,
3430 ConsStringIteratorOp* op,
3432 : is_one_byte_(false),
3434 Reset(string, offset);
3438 void StringCharacterStream::Reset(String* string, unsigned offset) {
3442 int32_t type = string->map()->instance_type();
3443 unsigned length = string->length();
3444 String::Visit(string, offset, *this, *op_, type, length);
3448 bool StringCharacterStream::HasMore() {
3449 if (buffer8_ != end_) return true;
3450 if (!op_->HasMore()) return false;
3453 String* string = op_->ContinueOperation(&type, &length);
3454 if (string == NULL) return false;
3455 ASSERT(!string->IsConsString());
3456 ASSERT(string->length() != 0);
3457 ConsStringNullOp null_op;
3458 String::Visit(string, 0, *this, null_op, type, length);
3459 ASSERT(buffer8_ != end_);
3464 void StringCharacterStream::VisitOneByteString(
3465 const uint8_t* chars, unsigned length) {
3466 is_one_byte_ = true;
3468 end_ = chars + length;
3472 void StringCharacterStream::VisitTwoByteString(
3473 const uint16_t* chars, unsigned length) {
3474 is_one_byte_ = false;
3476 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3480 void JSFunctionResultCache::MakeZeroSize() {
3481 set_finger_index(kEntriesIndex);
3482 set_size(kEntriesIndex);
3486 void JSFunctionResultCache::Clear() {
3487 int cache_size = size();
3488 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3489 MemsetPointer(entries_start,
3490 GetHeap()->the_hole_value(),
3491 cache_size - kEntriesIndex);
3496 int JSFunctionResultCache::size() {
3497 return Smi::cast(get(kCacheSizeIndex))->value();
3501 void JSFunctionResultCache::set_size(int size) {
3502 set(kCacheSizeIndex, Smi::FromInt(size));
3506 int JSFunctionResultCache::finger_index() {
3507 return Smi::cast(get(kFingerIndex))->value();
3511 void JSFunctionResultCache::set_finger_index(int finger_index) {
3512 set(kFingerIndex, Smi::FromInt(finger_index));
3516 byte ByteArray::get(int index) {
3517 ASSERT(index >= 0 && index < this->length());
3518 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3522 void ByteArray::set(int index, byte value) {
3523 ASSERT(index >= 0 && index < this->length());
3524 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3528 int ByteArray::get_int(int index) {
3529 ASSERT(index >= 0 && (index * kIntSize) < this->length());
3530 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3534 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3535 ASSERT_TAG_ALIGNED(address);
3536 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3540 Address ByteArray::GetDataStartAddress() {
3541 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3545 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3546 return reinterpret_cast<uint8_t*>(external_pointer());
3550 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3551 ASSERT((index >= 0) && (index < this->length()));
3552 uint8_t* ptr = external_uint8_clamped_pointer();
3557 MaybeObject* ExternalUint8ClampedArray::get(int index) {
3558 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3562 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3563 ASSERT((index >= 0) && (index < this->length()));
3564 uint8_t* ptr = external_uint8_clamped_pointer();
3569 void* ExternalArray::external_pointer() {
3570 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3571 return reinterpret_cast<void*>(ptr);
3575 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3576 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3577 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3581 int8_t ExternalInt8Array::get_scalar(int index) {
3582 ASSERT((index >= 0) && (index < this->length()));
3583 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3588 MaybeObject* ExternalInt8Array::get(int index) {
3589 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3593 void ExternalInt8Array::set(int index, int8_t value) {
3594 ASSERT((index >= 0) && (index < this->length()));
3595 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3600 uint8_t ExternalUint8Array::get_scalar(int index) {
3601 ASSERT((index >= 0) && (index < this->length()));
3602 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3607 MaybeObject* ExternalUint8Array::get(int index) {
3608 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3612 void ExternalUint8Array::set(int index, uint8_t value) {
3613 ASSERT((index >= 0) && (index < this->length()));
3614 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3619 int16_t ExternalInt16Array::get_scalar(int index) {
3620 ASSERT((index >= 0) && (index < this->length()));
3621 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3626 MaybeObject* ExternalInt16Array::get(int index) {
3627 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3631 void ExternalInt16Array::set(int index, int16_t value) {
3632 ASSERT((index >= 0) && (index < this->length()));
3633 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3638 uint16_t ExternalUint16Array::get_scalar(int index) {
3639 ASSERT((index >= 0) && (index < this->length()));
3640 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3645 MaybeObject* ExternalUint16Array::get(int index) {
3646 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3650 void ExternalUint16Array::set(int index, uint16_t value) {
3651 ASSERT((index >= 0) && (index < this->length()));
3652 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3657 int32_t ExternalInt32Array::get_scalar(int index) {
3658 ASSERT((index >= 0) && (index < this->length()));
3659 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3664 MaybeObject* ExternalInt32Array::get(int index) {
3665 return GetHeap()->NumberFromInt32(get_scalar(index));
3669 void ExternalInt32Array::set(int index, int32_t value) {
3670 ASSERT((index >= 0) && (index < this->length()));
3671 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3676 uint32_t ExternalUint32Array::get_scalar(int index) {
3677 ASSERT((index >= 0) && (index < this->length()));
3678 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3683 MaybeObject* ExternalUint32Array::get(int index) {
3684 return GetHeap()->NumberFromUint32(get_scalar(index));
3688 void ExternalUint32Array::set(int index, uint32_t value) {
3689 ASSERT((index >= 0) && (index < this->length()));
3690 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3695 float ExternalFloat32Array::get_scalar(int index) {
3696 ASSERT((index >= 0) && (index < this->length()));
3697 float* ptr = static_cast<float*>(external_pointer());
3702 MaybeObject* ExternalFloat32Array::get(int index) {
3703 return GetHeap()->NumberFromDouble(get_scalar(index));
3707 void ExternalFloat32Array::set(int index, float value) {
3708 ASSERT((index >= 0) && (index < this->length()));
3709 float* ptr = static_cast<float*>(external_pointer());
3714 float32x4_value_t ExternalFloat32x4Array::get_scalar(int index) {
3715 ASSERT((index >= 0) && (index < this->length()));
3716 float* ptr = static_cast<float*>(external_pointer());
3717 float32x4_value_t value;
3718 value.storage[0] = ptr[index * 4 + 0];
3719 value.storage[1] = ptr[index * 4 + 1];
3720 value.storage[2] = ptr[index * 4 + 2];
3721 value.storage[3] = ptr[index * 4 + 3];
3726 MaybeObject* ExternalFloat32x4Array::get(int index) {
3727 float32x4_value_t value = get_scalar(index);
3728 return GetHeap()->AllocateFloat32x4(value);
3732 void ExternalFloat32x4Array::set(int index, const float32x4_value_t& value) {
3733 ASSERT((index >= 0) && (index < this->length()));
3734 float* ptr = static_cast<float*>(external_pointer());
3735 ptr[index * 4 + 0] = value.storage[0];
3736 ptr[index * 4 + 1] = value.storage[1];
3737 ptr[index * 4 + 2] = value.storage[2];
3738 ptr[index * 4 + 3] = value.storage[3];
3742 int32x4_value_t ExternalInt32x4Array::get_scalar(int index) {
3743 ASSERT((index >= 0) && (index < this->length()));
3744 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3745 int32x4_value_t value;
3746 value.storage[0] = ptr[index * 4 + 0];
3747 value.storage[1] = ptr[index * 4 + 1];
3748 value.storage[2] = ptr[index * 4 + 2];
3749 value.storage[3] = ptr[index * 4 + 3];
3754 MaybeObject* ExternalInt32x4Array::get(int index) {
3755 int32x4_value_t value = get_scalar(index);
3756 return GetHeap()->AllocateInt32x4(value);
3760 void ExternalInt32x4Array::set(int index, const int32x4_value_t& value) {
3761 ASSERT((index >= 0) && (index < this->length()));
3762 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3763 ptr[index * 4 + 0] = value.storage[0];
3764 ptr[index * 4 + 1] = value.storage[1];
3765 ptr[index * 4 + 2] = value.storage[2];
3766 ptr[index * 4 + 3] = value.storage[3];
3770 double ExternalFloat64Array::get_scalar(int index) {
3771 ASSERT((index >= 0) && (index < this->length()));
3772 double* ptr = static_cast<double*>(external_pointer());
3777 MaybeObject* ExternalFloat64Array::get(int index) {
3778 return GetHeap()->NumberFromDouble(get_scalar(index));
3782 void ExternalFloat64Array::set(int index, double value) {
3783 ASSERT((index >= 0) && (index < this->length()));
3784 double* ptr = static_cast<double*>(external_pointer());
3789 int FixedTypedArrayBase::size() {
3790 InstanceType instance_type = map()->instance_type();
3792 switch (instance_type) {
3793 case FIXED_UINT8_ARRAY_TYPE:
3794 case FIXED_INT8_ARRAY_TYPE:
3795 case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
3798 case FIXED_UINT16_ARRAY_TYPE:
3799 case FIXED_INT16_ARRAY_TYPE:
3802 case FIXED_UINT32_ARRAY_TYPE:
3803 case FIXED_INT32_ARRAY_TYPE:
3804 case FIXED_FLOAT32_ARRAY_TYPE:
3807 case FIXED_FLOAT64_ARRAY_TYPE:
3810 case FIXED_FLOAT32x4_ARRAY_TYPE:
3811 case FIXED_INT32x4_ARRAY_TYPE:
3818 return OBJECT_POINTER_ALIGN(kDataOffset + length() * element_size);
3822 template <class Traits>
3823 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
3824 ASSERT((index >= 0) && (index < this->length()));
3825 ElementType* ptr = reinterpret_cast<ElementType*>(
3826 FIELD_ADDR(this, kDataOffset));
3832 FixedTypedArray<Float64ArrayTraits>::ElementType
3833 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
3834 ASSERT((index >= 0) && (index < this->length()));
3835 return READ_DOUBLE_FIELD(this, ElementOffset(index));
3839 template <class Traits>
3840 void FixedTypedArray<Traits>::set(int index, ElementType value) {
3841 ASSERT((index >= 0) && (index < this->length()));
3842 ElementType* ptr = reinterpret_cast<ElementType*>(
3843 FIELD_ADDR(this, kDataOffset));
3849 void FixedTypedArray<Float64ArrayTraits>::set(
3850 int index, Float64ArrayTraits::ElementType value) {
3851 ASSERT((index >= 0) && (index < this->length()));
3852 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
3856 template <class Traits>
3857 MaybeObject* FixedTypedArray<Traits>::get(int index) {
3858 return Traits::ToObject(GetHeap(), get_scalar(index));
3861 template <class Traits>
3862 MaybeObject* FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
3863 ElementType cast_value = Traits::defaultValue();
3864 if (index < static_cast<uint32_t>(length())) {
3865 if (value->IsSmi()) {
3866 int int_value = Smi::cast(value)->value();
3867 cast_value = static_cast<ElementType>(int_value);
3868 } else if (value->IsHeapNumber()) {
3869 double double_value = HeapNumber::cast(value)->value();
3870 cast_value = static_cast<ElementType>(DoubleToInt32(double_value));
3872 // Clamp undefined to the default value. All other types have been
3873 // converted to a number type further up in the call chain.
3874 ASSERT(value->IsUndefined());
3876 set(index, cast_value);
3878 return Traits::ToObject(GetHeap(), cast_value);
3881 template <class Traits>
3882 Handle<Object> FixedTypedArray<Traits>::SetValue(
3883 Handle<FixedTypedArray<Traits> > array,
3885 Handle<Object> value) {
3886 CALL_HEAP_FUNCTION(array->GetIsolate(),
3887 array->SetValue(index, *value),
3892 MaybeObject* FixedTypedArray<Float32x4ArrayTraits>::SetValue(
3893 uint32_t index, Object* value) {
3894 float32x4_value_t cast_value;
3895 cast_value.storage[0] = static_cast<float>(OS::nan_value());
3896 cast_value.storage[1] = static_cast<float>(OS::nan_value());
3897 cast_value.storage[2] = static_cast<float>(OS::nan_value());
3898 cast_value.storage[3] = static_cast<float>(OS::nan_value());
3899 Heap* heap = GetHeap();
3900 if (index < static_cast<uint32_t>(length())) {
3901 if (value->IsFloat32x4()) {
3902 cast_value = Float32x4::cast(value)->value();
3904 // Clamp undefined to NaN (default). All other types have been
3905 // converted to a number type further up in the call chain.
3906 ASSERT(value->IsUndefined());
3908 set(index, cast_value);
3910 return heap->AllocateFloat32x4(cast_value);
3915 MaybeObject* FixedTypedArray<Int32x4ArrayTraits>::SetValue(
3916 uint32_t index, Object* value) {
3917 int32x4_value_t cast_value;
3918 cast_value.storage[0] = 0;
3919 cast_value.storage[1] = 0;
3920 cast_value.storage[2] = 0;
3921 cast_value.storage[3] = 0;
3922 Heap* heap = GetHeap();
3923 if (index < static_cast<uint32_t>(length())) {
3924 if (value->IsInt32x4()) {
3925 cast_value = Int32x4::cast(value)->value();
3927 // Clamp undefined to zero (default). All other types have been
3928 // converted to a number type further up in the call chain.
3929 ASSERT(value->IsUndefined());
3931 set(index, cast_value);
3933 return heap->AllocateInt32x4(cast_value);
3937 MaybeObject* Uint8ArrayTraits::ToObject(Heap*, uint8_t scalar) {
3938 return Smi::FromInt(scalar);
3942 MaybeObject* Uint8ClampedArrayTraits::ToObject(Heap*, uint8_t scalar) {
3943 return Smi::FromInt(scalar);
3947 MaybeObject* Int8ArrayTraits::ToObject(Heap*, int8_t scalar) {
3948 return Smi::FromInt(scalar);
3952 MaybeObject* Uint16ArrayTraits::ToObject(Heap*, uint16_t scalar) {
3953 return Smi::FromInt(scalar);
3957 MaybeObject* Int16ArrayTraits::ToObject(Heap*, int16_t scalar) {
3958 return Smi::FromInt(scalar);
3962 MaybeObject* Uint32ArrayTraits::ToObject(Heap* heap, uint32_t scalar) {
3963 return heap->NumberFromUint32(scalar);
3967 MaybeObject* Int32ArrayTraits::ToObject(Heap* heap, int32_t scalar) {
3968 return heap->NumberFromInt32(scalar);
3972 MaybeObject* Float32ArrayTraits::ToObject(Heap* heap, float scalar) {
3973 return heap->NumberFromDouble(scalar);
3977 MaybeObject* Int32x4ArrayTraits::ToObject(Heap* heap, int32x4_value_t scalar) {
3978 return heap->AllocateInt32x4(scalar);
3982 MaybeObject* Float32x4ArrayTraits::ToObject(
3983 Heap* heap, float32x4_value_t scalar) {
3984 return heap->AllocateFloat32x4(scalar);
3988 MaybeObject* Float64ArrayTraits::ToObject(Heap* heap, double scalar) {
3989 return heap->NumberFromDouble(scalar);
3993 int Map::visitor_id() {
3994 return READ_BYTE_FIELD(this, kVisitorIdOffset);
3998 void Map::set_visitor_id(int id) {
3999 ASSERT(0 <= id && id < 256);
4000 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4004 int Map::instance_size() {
4005 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
4009 int Map::inobject_properties() {
4010 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4014 int Map::pre_allocated_property_fields() {
4015 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4019 int Map::GetInObjectPropertyOffset(int index) {
4020 // Adjust for the number of properties stored in the object.
4021 index -= inobject_properties();
4023 return instance_size() + (index * kPointerSize);
4027 int HeapObject::SizeFromMap(Map* map) {
4028 int instance_size = map->instance_size();
4029 if (instance_size != kVariableSizeSentinel) return instance_size;
4030 // Only inline the most frequent cases.
4031 int instance_type = static_cast<int>(map->instance_type());
4032 if (instance_type == FIXED_ARRAY_TYPE) {
4033 return FixedArray::BodyDescriptor::SizeOf(map, this);
4035 if (instance_type == ASCII_STRING_TYPE ||
4036 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
4037 return SeqOneByteString::SizeFor(
4038 reinterpret_cast<SeqOneByteString*>(this)->length());
4040 if (instance_type == BYTE_ARRAY_TYPE) {
4041 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4043 if (instance_type == FREE_SPACE_TYPE) {
4044 return reinterpret_cast<FreeSpace*>(this)->size();
4046 if (instance_type == STRING_TYPE ||
4047 instance_type == INTERNALIZED_STRING_TYPE) {
4048 return SeqTwoByteString::SizeFor(
4049 reinterpret_cast<SeqTwoByteString*>(this)->length());
4051 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4052 return FixedDoubleArray::SizeFor(
4053 reinterpret_cast<FixedDoubleArray*>(this)->length());
4055 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4056 return ConstantPoolArray::SizeFor(
4057 reinterpret_cast<ConstantPoolArray*>(this)->count_of_int64_entries(),
4058 reinterpret_cast<ConstantPoolArray*>(this)->count_of_ptr_entries(),
4059 reinterpret_cast<ConstantPoolArray*>(this)->count_of_int32_entries());
4061 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4062 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4063 return reinterpret_cast<FixedTypedArrayBase*>(this)->size();
4065 ASSERT(instance_type == CODE_TYPE);
4066 return reinterpret_cast<Code*>(this)->CodeSize();
4070 void Map::set_instance_size(int value) {
4071 ASSERT_EQ(0, value & (kPointerSize - 1));
4072 value >>= kPointerSizeLog2;
4073 ASSERT(0 <= value && value < 256);
4074 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
4078 void Map::set_inobject_properties(int value) {
4079 ASSERT(0 <= value && value < 256);
4080 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4084 void Map::set_pre_allocated_property_fields(int value) {
4085 ASSERT(0 <= value && value < 256);
4086 WRITE_BYTE_FIELD(this,
4087 kPreAllocatedPropertyFieldsOffset,
4088 static_cast<byte>(value));
4092 InstanceType Map::instance_type() {
4093 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4097 void Map::set_instance_type(InstanceType value) {
4098 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4102 int Map::unused_property_fields() {
4103 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4107 void Map::set_unused_property_fields(int value) {
4108 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4112 byte Map::bit_field() {
4113 return READ_BYTE_FIELD(this, kBitFieldOffset);
4117 void Map::set_bit_field(byte value) {
4118 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4122 byte Map::bit_field2() {
4123 return READ_BYTE_FIELD(this, kBitField2Offset);
4127 void Map::set_bit_field2(byte value) {
4128 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4132 void Map::set_non_instance_prototype(bool value) {
4134 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4136 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4141 bool Map::has_non_instance_prototype() {
4142 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4146 void Map::set_function_with_prototype(bool value) {
4147 set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
4151 bool Map::function_with_prototype() {
4152 return FunctionWithPrototype::decode(bit_field3());
4156 void Map::set_is_access_check_needed(bool access_check_needed) {
4157 if (access_check_needed) {
4158 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4160 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4165 bool Map::is_access_check_needed() {
4166 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4170 void Map::set_is_extensible(bool value) {
4172 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4174 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4178 bool Map::is_extensible() {
4179 return ((1 << kIsExtensible) & bit_field2()) != 0;
4183 void Map::set_attached_to_shared_function_info(bool value) {
4185 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
4187 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
4191 bool Map::attached_to_shared_function_info() {
4192 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
4196 void Map::set_is_shared(bool value) {
4197 set_bit_field3(IsShared::update(bit_field3(), value));
4201 bool Map::is_shared() {
4202 return IsShared::decode(bit_field3());
4206 void Map::set_dictionary_map(bool value) {
4207 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4208 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4209 set_bit_field3(new_bit_field3);
4213 bool Map::is_dictionary_map() {
4214 return DictionaryMap::decode(bit_field3());
4218 Code::Flags Code::flags() {
4219 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4223 void Map::set_owns_descriptors(bool is_shared) {
4224 set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
4228 bool Map::owns_descriptors() {
4229 return OwnsDescriptors::decode(bit_field3());
4233 void Map::set_has_instance_call_handler() {
4234 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4238 bool Map::has_instance_call_handler() {
4239 return HasInstanceCallHandler::decode(bit_field3());
4243 void Map::deprecate() {
4244 set_bit_field3(Deprecated::update(bit_field3(), true));
4248 bool Map::is_deprecated() {
4249 if (!FLAG_track_fields) return false;
4250 return Deprecated::decode(bit_field3());
4254 void Map::set_migration_target(bool value) {
4255 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4259 bool Map::is_migration_target() {
4260 if (!FLAG_track_fields) return false;
4261 return IsMigrationTarget::decode(bit_field3());
4265 void Map::freeze() {
4266 set_bit_field3(IsFrozen::update(bit_field3(), true));
4270 bool Map::is_frozen() {
4271 return IsFrozen::decode(bit_field3());
4275 void Map::mark_unstable() {
4276 set_bit_field3(IsUnstable::update(bit_field3(), true));
4280 bool Map::is_stable() {
4281 return !IsUnstable::decode(bit_field3());
4285 bool Map::has_code_cache() {
4286 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4290 bool Map::CanBeDeprecated() {
4291 int descriptor = LastAdded();
4292 for (int i = 0; i <= descriptor; i++) {
4293 PropertyDetails details = instance_descriptors()->GetDetails(i);
4294 if (FLAG_track_fields && details.representation().IsNone()) {
4297 if (FLAG_track_fields && details.representation().IsSmi()) {
4300 if (FLAG_track_double_fields && details.representation().IsDouble()) {
4303 if (FLAG_track_heap_object_fields &&
4304 details.representation().IsHeapObject()) {
4307 if (FLAG_track_fields && details.type() == CONSTANT) {
4315 void Map::NotifyLeafMapLayoutChange() {
4318 dependent_code()->DeoptimizeDependentCodeGroup(
4320 DependentCode::kPrototypeCheckGroup);
4325 bool Map::CanOmitMapChecks() {
4326 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4330 int DependentCode::number_of_entries(DependencyGroup group) {
4331 if (length() == 0) return 0;
4332 return Smi::cast(get(group))->value();
4336 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4337 set(group, Smi::FromInt(value));
4341 bool DependentCode::is_code_at(int i) {
4342 return get(kCodesStartIndex + i)->IsCode();
4345 Code* DependentCode::code_at(int i) {
4346 return Code::cast(get(kCodesStartIndex + i));
4350 CompilationInfo* DependentCode::compilation_info_at(int i) {
4351 return reinterpret_cast<CompilationInfo*>(
4352 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4356 void DependentCode::set_object_at(int i, Object* object) {
4357 set(kCodesStartIndex + i, object);
4361 Object* DependentCode::object_at(int i) {
4362 return get(kCodesStartIndex + i);
4366 Object** DependentCode::slot_at(int i) {
4367 return RawFieldOfElementAt(kCodesStartIndex + i);
4371 void DependentCode::clear_at(int i) {
4372 set_undefined(kCodesStartIndex + i);
4376 void DependentCode::copy(int from, int to) {
4377 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4381 void DependentCode::ExtendGroup(DependencyGroup group) {
4382 GroupStartIndexes starts(this);
4383 for (int g = kGroupCount - 1; g > group; g--) {
4384 if (starts.at(g) < starts.at(g + 1)) {
4385 copy(starts.at(g), starts.at(g + 1));
4391 void Code::set_flags(Code::Flags flags) {
4392 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4393 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4397 Code::Kind Code::kind() {
4398 return ExtractKindFromFlags(flags());
4402 InlineCacheState Code::ic_state() {
4403 InlineCacheState result = ExtractICStateFromFlags(flags());
4404 // Only allow uninitialized or debugger states for non-IC code
4405 // objects. This is used in the debugger to determine whether or not
4406 // a call to code object has been replaced with a debug break call.
4407 ASSERT(is_inline_cache_stub() ||
4408 result == UNINITIALIZED ||
4409 result == DEBUG_STUB);
4414 ExtraICState Code::extra_ic_state() {
4415 ASSERT((is_inline_cache_stub() && !needs_extended_extra_ic_state(kind()))
4416 || ic_state() == DEBUG_STUB);
4417 return ExtractExtraICStateFromFlags(flags());
4421 ExtraICState Code::extended_extra_ic_state() {
4422 ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4423 ASSERT(needs_extended_extra_ic_state(kind()));
4424 return ExtractExtendedExtraICStateFromFlags(flags());
4428 Code::StubType Code::type() {
4429 return ExtractTypeFromFlags(flags());
4433 int Code::arguments_count() {
4434 ASSERT(kind() == STUB || is_handler());
4435 return ExtractArgumentsCountFromFlags(flags());
4439 // For initialization.
4440 void Code::set_raw_kind_specific_flags1(int value) {
4441 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4445 void Code::set_raw_kind_specific_flags2(int value) {
4446 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4450 inline bool Code::is_crankshafted() {
4451 return IsCrankshaftedField::decode(
4452 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4456 inline void Code::set_is_crankshafted(bool value) {
4457 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4458 int updated = IsCrankshaftedField::update(previous, value);
4459 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4463 int Code::major_key() {
4464 ASSERT(has_major_key());
4465 return StubMajorKeyField::decode(
4466 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4470 void Code::set_major_key(int major) {
4471 ASSERT(has_major_key());
4472 ASSERT(0 <= major && major < 256);
4473 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4474 int updated = StubMajorKeyField::update(previous, major);
4475 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4479 bool Code::has_major_key() {
4480 return kind() == STUB ||
4481 kind() == HANDLER ||
4482 kind() == BINARY_OP_IC ||
4483 kind() == COMPARE_IC ||
4484 kind() == COMPARE_NIL_IC ||
4485 kind() == LOAD_IC ||
4486 kind() == KEYED_LOAD_IC ||
4487 kind() == STORE_IC ||
4488 kind() == KEYED_STORE_IC ||
4489 kind() == TO_BOOLEAN_IC;
4493 bool Code::optimizable() {
4494 ASSERT_EQ(FUNCTION, kind());
4495 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4499 void Code::set_optimizable(bool value) {
4500 ASSERT_EQ(FUNCTION, kind());
4501 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4505 bool Code::has_deoptimization_support() {
4506 ASSERT_EQ(FUNCTION, kind());
4507 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4508 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4512 void Code::set_has_deoptimization_support(bool value) {
4513 ASSERT_EQ(FUNCTION, kind());
4514 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4515 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4516 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4520 bool Code::has_debug_break_slots() {
4521 ASSERT_EQ(FUNCTION, kind());
4522 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4523 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4527 void Code::set_has_debug_break_slots(bool value) {
4528 ASSERT_EQ(FUNCTION, kind());
4529 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4530 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4531 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4535 bool Code::is_compiled_optimizable() {
4536 ASSERT_EQ(FUNCTION, kind());
4537 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4538 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4542 void Code::set_compiled_optimizable(bool value) {
4543 ASSERT_EQ(FUNCTION, kind());
4544 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4545 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4546 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4550 int Code::allow_osr_at_loop_nesting_level() {
4551 ASSERT_EQ(FUNCTION, kind());
4552 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
4556 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4557 ASSERT_EQ(FUNCTION, kind());
4558 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
4559 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
4563 int Code::profiler_ticks() {
4564 ASSERT_EQ(FUNCTION, kind());
4565 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4569 void Code::set_profiler_ticks(int ticks) {
4570 ASSERT_EQ(FUNCTION, kind());
4571 ASSERT(ticks < 256);
4572 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4576 unsigned Code::stack_slots() {
4577 ASSERT(is_crankshafted());
4578 return StackSlotsField::decode(
4579 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4583 void Code::set_stack_slots(unsigned slots) {
4584 CHECK(slots <= (1 << kStackSlotsBitCount));
4585 ASSERT(is_crankshafted());
4586 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4587 int updated = StackSlotsField::update(previous, slots);
4588 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4592 unsigned Code::safepoint_table_offset() {
4593 ASSERT(is_crankshafted());
4594 return SafepointTableOffsetField::decode(
4595 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4599 void Code::set_safepoint_table_offset(unsigned offset) {
4600 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4601 ASSERT(is_crankshafted());
4602 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4603 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4604 int updated = SafepointTableOffsetField::update(previous, offset);
4605 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4609 unsigned Code::back_edge_table_offset() {
4610 ASSERT_EQ(FUNCTION, kind());
4611 return BackEdgeTableOffsetField::decode(
4612 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4616 void Code::set_back_edge_table_offset(unsigned offset) {
4617 ASSERT_EQ(FUNCTION, kind());
4618 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4619 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4620 int updated = BackEdgeTableOffsetField::update(previous, offset);
4621 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4625 bool Code::back_edges_patched_for_osr() {
4626 ASSERT_EQ(FUNCTION, kind());
4627 return BackEdgesPatchedForOSRField::decode(
4628 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4632 void Code::set_back_edges_patched_for_osr(bool value) {
4633 ASSERT_EQ(FUNCTION, kind());
4634 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4635 int updated = BackEdgesPatchedForOSRField::update(previous, value);
4636 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4641 byte Code::to_boolean_state() {
4642 return extended_extra_ic_state();
4646 bool Code::has_function_cache() {
4647 ASSERT(kind() == STUB);
4648 return HasFunctionCacheField::decode(
4649 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4653 void Code::set_has_function_cache(bool flag) {
4654 ASSERT(kind() == STUB);
4655 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4656 int updated = HasFunctionCacheField::update(previous, flag);
4657 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4661 bool Code::marked_for_deoptimization() {
4662 ASSERT(kind() == OPTIMIZED_FUNCTION);
4663 return MarkedForDeoptimizationField::decode(
4664 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4668 void Code::set_marked_for_deoptimization(bool flag) {
4669 ASSERT(kind() == OPTIMIZED_FUNCTION);
4670 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4671 int updated = MarkedForDeoptimizationField::update(previous, flag);
4672 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4676 bool Code::is_inline_cache_stub() {
4677 Kind kind = this->kind();
4679 #define CASE(name) case name: return true;
4682 default: return false;
4687 bool Code::is_keyed_stub() {
4688 return is_keyed_load_stub() || is_keyed_store_stub();
4692 bool Code::is_debug_stub() {
4693 return ic_state() == DEBUG_STUB;
4697 ConstantPoolArray* Code::constant_pool() {
4698 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
4702 void Code::set_constant_pool(Object* value) {
4703 ASSERT(value->IsConstantPoolArray());
4704 WRITE_FIELD(this, kConstantPoolOffset, value);
4705 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4709 Code::Flags Code::ComputeFlags(Kind kind,
4710 InlineCacheState ic_state,
4711 ExtraICState extra_ic_state,
4714 InlineCacheHolderFlag holder) {
4715 ASSERT(argc <= Code::kMaxArguments);
4716 // Compute the bit mask.
4717 unsigned int bits = KindField::encode(kind)
4718 | ICStateField::encode(ic_state)
4719 | TypeField::encode(type)
4720 | ExtendedExtraICStateField::encode(extra_ic_state)
4721 | CacheHolderField::encode(holder);
4722 if (!Code::needs_extended_extra_ic_state(kind)) {
4723 bits |= (argc << kArgumentsCountShift);
4725 return static_cast<Flags>(bits);
4729 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4730 ExtraICState extra_ic_state,
4731 InlineCacheHolderFlag holder,
4734 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
4738 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4739 return KindField::decode(flags);
4743 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4744 return ICStateField::decode(flags);
4748 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4749 return ExtraICStateField::decode(flags);
4753 ExtraICState Code::ExtractExtendedExtraICStateFromFlags(
4755 return ExtendedExtraICStateField::decode(flags);
4759 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4760 return TypeField::decode(flags);
4764 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
4765 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
4769 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4770 return CacheHolderField::decode(flags);
4774 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4775 int bits = flags & ~TypeField::kMask;
4776 return static_cast<Flags>(bits);
4780 Code* Code::GetCodeFromTargetAddress(Address address) {
4781 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4782 // GetCodeFromTargetAddress might be called when marking objects during mark
4783 // sweep. reinterpret_cast is therefore used instead of the more appropriate
4784 // Code::cast. Code::cast does not work when the object's map is
4786 Code* result = reinterpret_cast<Code*>(code);
4791 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4793 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4797 Object* Map::prototype() {
4798 return READ_FIELD(this, kPrototypeOffset);
4802 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4803 ASSERT(value->IsNull() || value->IsJSReceiver());
4804 WRITE_FIELD(this, kPrototypeOffset, value);
4805 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4809 // If the descriptor is using the empty transition array, install a new empty
4810 // transition array that will have place for an element transition.
4811 static MaybeObject* EnsureHasTransitionArray(Map* map) {
4812 TransitionArray* transitions;
4813 MaybeObject* maybe_transitions;
4814 if (!map->HasTransitionArray()) {
4815 maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
4816 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4817 transitions->set_back_pointer_storage(map->GetBackPointer());
4818 } else if (!map->transitions()->IsFullTransitionArray()) {
4819 maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
4820 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4824 map->set_transitions(transitions);
4829 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
4830 int len = descriptors->number_of_descriptors();
4831 set_instance_descriptors(descriptors);
4832 SetNumberOfOwnDescriptors(len);
4836 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4839 void Map::set_bit_field3(uint32_t bits) {
4840 // Ensure the upper 2 bits have the same value by sign extending it. This is
4841 // necessary to be able to use the 31st bit.
4842 int value = bits << 1;
4843 WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
4847 uint32_t Map::bit_field3() {
4848 Object* value = READ_FIELD(this, kBitField3Offset);
4849 return Smi::cast(value)->value();
4853 void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
4854 Object* back_pointer = GetBackPointer();
4856 if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
4860 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
4861 CONDITIONAL_WRITE_BARRIER(
4862 heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
4866 void Map::AppendDescriptor(Descriptor* desc,
4867 const DescriptorArray::WhitenessWitness& witness) {
4868 DescriptorArray* descriptors = instance_descriptors();
4869 int number_of_own_descriptors = NumberOfOwnDescriptors();
4870 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
4871 descriptors->Append(desc, witness);
4872 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4876 Object* Map::GetBackPointer() {
4877 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4878 if (object->IsDescriptorArray()) {
4879 return TransitionArray::cast(object)->back_pointer_storage();
4881 ASSERT(object->IsMap() || object->IsUndefined());
4887 bool Map::HasElementsTransition() {
4888 return HasTransitionArray() && transitions()->HasElementsTransition();
4892 bool Map::HasTransitionArray() {
4893 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4894 return object->IsTransitionArray();
4898 Map* Map::elements_transition_map() {
4899 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
4900 return transitions()->GetTarget(index);
4904 bool Map::CanHaveMoreTransitions() {
4905 if (!HasTransitionArray()) return true;
4906 return FixedArray::SizeFor(transitions()->length() +
4907 TransitionArray::kTransitionSize)
4908 <= Page::kMaxRegularHeapObjectSize;
4912 MaybeObject* Map::AddTransition(Name* key,
4914 SimpleTransitionFlag flag) {
4915 if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
4916 return TransitionArray::NewWith(flag, key, target, GetBackPointer());
4920 void Map::SetTransition(int transition_index, Map* target) {
4921 transitions()->SetTarget(transition_index, target);
4925 Map* Map::GetTransition(int transition_index) {
4926 return transitions()->GetTarget(transition_index);
4930 MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
4931 TransitionArray* transitions;
4932 MaybeObject* maybe_transitions = AddTransition(
4933 GetHeap()->elements_transition_symbol(),
4936 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4937 set_transitions(transitions);
4942 FixedArray* Map::GetPrototypeTransitions() {
4943 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
4944 if (!transitions()->HasPrototypeTransitions()) {
4945 return GetHeap()->empty_fixed_array();
4947 return transitions()->GetPrototypeTransitions();
4951 MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
4952 MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
4953 if (allow_prototype->IsFailure()) return allow_prototype;
4954 int old_number_of_transitions = NumberOfProtoTransitions();
4956 if (HasPrototypeTransitions()) {
4957 ASSERT(GetPrototypeTransitions() != proto_transitions);
4958 ZapPrototypeTransitions();
4961 transitions()->SetPrototypeTransitions(proto_transitions);
4962 SetNumberOfProtoTransitions(old_number_of_transitions);
4967 bool Map::HasPrototypeTransitions() {
4968 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
4972 TransitionArray* Map::transitions() {
4973 ASSERT(HasTransitionArray());
4974 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4975 return TransitionArray::cast(object);
4979 void Map::set_transitions(TransitionArray* transition_array,
4980 WriteBarrierMode mode) {
4981 // Transition arrays are not shared. When one is replaced, it should not
4982 // keep referenced objects alive, so we zap it.
4983 // When there is another reference to the array somewhere (e.g. a handle),
4984 // not zapping turns from a waste of memory into a source of crashes.
4985 if (HasTransitionArray()) {
4987 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
4988 Map* target = transitions()->GetTarget(i);
4989 if (target->instance_descriptors() == instance_descriptors()) {
4990 Name* key = transitions()->GetKey(i);
4991 int new_target_index = transition_array->Search(key);
4992 ASSERT(new_target_index != TransitionArray::kNotFound);
4993 ASSERT(transition_array->GetTarget(new_target_index) == target);
4997 ASSERT(transitions() != transition_array);
5001 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5002 CONDITIONAL_WRITE_BARRIER(
5003 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5007 void Map::init_back_pointer(Object* undefined) {
5008 ASSERT(undefined->IsUndefined());
5009 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5013 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5014 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5015 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5016 (value->IsMap() && GetBackPointer()->IsUndefined()));
5017 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5018 if (object->IsTransitionArray()) {
5019 TransitionArray::cast(object)->set_back_pointer_storage(value);
5021 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5022 CONDITIONAL_WRITE_BARRIER(
5023 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5028 // Can either be Smi (no transitions), normal transition array, or a transition
5029 // array with the header overwritten as a Smi (thus iterating).
5030 TransitionArray* Map::unchecked_transition_array() {
5031 Object* object = *HeapObject::RawField(this,
5032 Map::kTransitionsOrBackPointerOffset);
5033 TransitionArray* transition_array = static_cast<TransitionArray*>(object);
5034 return transition_array;
5038 HeapObject* Map::UncheckedPrototypeTransitions() {
5039 ASSERT(HasTransitionArray());
5040 ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
5041 return unchecked_transition_array()->UncheckedPrototypeTransitions();
5045 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5046 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5047 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5049 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5050 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5051 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5053 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5054 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5055 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5056 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
5058 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5060 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5061 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5062 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5063 kExpectedReceiverTypeOffset)
5065 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5066 kSerializedDataOffset)
5068 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5071 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5072 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5073 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5075 ACCESSORS(Box, value, Object, kValueOffset)
5077 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5078 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5079 ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
5081 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5082 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5083 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5085 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5086 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5087 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5088 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5089 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5090 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5092 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5093 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5095 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5096 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5097 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5099 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5100 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5101 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5102 kPrototypeTemplateOffset)
5103 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5104 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5105 kNamedPropertyHandlerOffset)
5106 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5107 kIndexedPropertyHandlerOffset)
5108 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5109 kInstanceTemplateOffset)
5110 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5111 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5112 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5113 kInstanceCallHandlerOffset)
5114 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5115 kAccessCheckInfoOffset)
5116 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5118 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5119 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5120 kInternalFieldCountOffset)
5122 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5123 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5125 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5127 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5128 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5129 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5130 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5131 kPretenureCreateCountOffset)
5132 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5133 kDependentCodeOffset)
5134 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5135 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5137 ACCESSORS(Script, source, Object, kSourceOffset)
5138 ACCESSORS(Script, name, Object, kNameOffset)
5139 ACCESSORS(Script, id, Smi, kIdOffset)
5140 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5141 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5142 ACCESSORS(Script, data, Object, kDataOffset)
5143 ACCESSORS(Script, context_data, Object, kContextOffset)
5144 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5145 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5146 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5147 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5148 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5149 kEvalFrominstructionsOffsetOffset)
5150 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5151 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5153 Script::CompilationType Script::compilation_type() {
5154 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5155 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5157 void Script::set_compilation_type(CompilationType type) {
5158 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5159 type == COMPILATION_TYPE_EVAL));
5161 Script::CompilationState Script::compilation_state() {
5162 return BooleanBit::get(flags(), kCompilationStateBit) ?
5163 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5165 void Script::set_compilation_state(CompilationState state) {
5166 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5167 state == COMPILATION_STATE_COMPILED));
5171 #ifdef ENABLE_DEBUGGER_SUPPORT
5172 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5173 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5174 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5175 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5177 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5178 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5179 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5180 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5183 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5184 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5185 kOptimizedCodeMapOffset)
5186 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5187 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
5188 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5189 kInstanceClassNameOffset)
5190 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5191 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5192 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5193 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5194 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5197 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5198 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5199 kHiddenPrototypeBit)
5200 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5201 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5202 kNeedsAccessCheckBit)
5203 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5204 kReadOnlyPrototypeBit)
5205 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5206 kRemovePrototypeBit)
5207 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5209 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5211 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5214 BOOL_ACCESSORS(SharedFunctionInfo,
5216 allows_lazy_compilation,
5217 kAllowLazyCompilation)
5218 BOOL_ACCESSORS(SharedFunctionInfo,
5220 allows_lazy_compilation_without_context,
5221 kAllowLazyCompilationWithoutContext)
5222 BOOL_ACCESSORS(SharedFunctionInfo,
5226 BOOL_ACCESSORS(SharedFunctionInfo,
5228 has_duplicate_parameters,
5229 kHasDuplicateParameters)
5232 #if V8_HOST_ARCH_32_BIT
5233 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5234 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5235 kFormalParameterCountOffset)
5236 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5237 kExpectedNofPropertiesOffset)
5238 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5239 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5240 kStartPositionAndTypeOffset)
5241 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5242 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5243 kFunctionTokenPositionOffset)
5244 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5245 kCompilerHintsOffset)
5246 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5247 kOptCountAndBailoutReasonOffset)
5248 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5252 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5253 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5254 int holder::name() { \
5255 int value = READ_INT_FIELD(this, offset); \
5256 ASSERT(kHeapObjectTag == 1); \
5257 ASSERT((value & kHeapObjectTag) == 0); \
5258 return value >> 1; \
5260 void holder::set_##name(int value) { \
5261 ASSERT(kHeapObjectTag == 1); \
5262 ASSERT((value & 0xC0000000) == 0xC0000000 || \
5263 (value & 0xC0000000) == 0x000000000); \
5264 WRITE_INT_FIELD(this, \
5266 (value << 1) & ~kHeapObjectTag); \
5269 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5270 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5271 INT_ACCESSORS(holder, name, offset)
5274 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5275 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5276 formal_parameter_count,
5277 kFormalParameterCountOffset)
5279 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5280 expected_nof_properties,
5281 kExpectedNofPropertiesOffset)
5282 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5284 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5285 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5286 start_position_and_type,
5287 kStartPositionAndTypeOffset)
5289 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5290 function_token_position,
5291 kFunctionTokenPositionOffset)
5292 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5294 kCompilerHintsOffset)
5296 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5297 opt_count_and_bailout_reason,
5298 kOptCountAndBailoutReasonOffset)
5300 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5305 int SharedFunctionInfo::construction_count() {
5306 return READ_BYTE_FIELD(this, kConstructionCountOffset);
5310 void SharedFunctionInfo::set_construction_count(int value) {
5311 ASSERT(0 <= value && value < 256);
5312 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
5316 BOOL_ACCESSORS(SharedFunctionInfo,
5318 live_objects_may_exist,
5319 kLiveObjectsMayExist)
5322 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
5323 return initial_map() != GetHeap()->undefined_value();
5327 BOOL_GETTER(SharedFunctionInfo,
5329 optimization_disabled,
5330 kOptimizationDisabled)
5333 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5334 set_compiler_hints(BooleanBit::set(compiler_hints(),
5335 kOptimizationDisabled,
5337 // If disabling optimizations we reflect that in the code object so
5338 // it will not be counted as optimizable code.
5339 if ((code()->kind() == Code::FUNCTION) && disable) {
5340 code()->set_optimizable(false);
5345 int SharedFunctionInfo::profiler_ticks() {
5346 if (code()->kind() != Code::FUNCTION) return 0;
5347 return code()->profiler_ticks();
5351 LanguageMode SharedFunctionInfo::language_mode() {
5352 int hints = compiler_hints();
5353 if (BooleanBit::get(hints, kExtendedModeFunction)) {
5354 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
5355 return EXTENDED_MODE;
5357 return BooleanBit::get(hints, kStrictModeFunction)
5358 ? STRICT_MODE : CLASSIC_MODE;
5362 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5363 // We only allow language mode transitions that go set the same language mode
5364 // again or go up in the chain:
5365 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
5366 ASSERT(this->language_mode() == CLASSIC_MODE ||
5367 this->language_mode() == language_mode ||
5368 language_mode == EXTENDED_MODE);
5369 int hints = compiler_hints();
5370 hints = BooleanBit::set(
5371 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
5372 hints = BooleanBit::set(
5373 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
5374 set_compiler_hints(hints);
5378 bool SharedFunctionInfo::is_classic_mode() {
5379 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
5382 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
5383 kExtendedModeFunction)
5384 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5385 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5387 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5388 name_should_print_as_anonymous,
5389 kNameShouldPrintAsAnonymous)
5390 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5391 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5392 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5393 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
5395 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
5396 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5397 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5398 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5400 void SharedFunctionInfo::BeforeVisitingPointers() {
5401 if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
5405 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5406 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5408 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5410 bool Script::HasValidSource() {
5411 Object* src = this->source();
5412 if (!src->IsString()) return true;
5413 String* src_str = String::cast(src);
5414 if (!StringShape(src_str).IsExternal()) return true;
5415 if (src_str->IsOneByteRepresentation()) {
5416 return ExternalAsciiString::cast(src)->resource() != NULL;
5417 } else if (src_str->IsTwoByteRepresentation()) {
5418 return ExternalTwoByteString::cast(src)->resource() != NULL;
5424 void SharedFunctionInfo::DontAdaptArguments() {
5425 ASSERT(code()->kind() == Code::BUILTIN);
5426 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5430 int SharedFunctionInfo::start_position() {
5431 return start_position_and_type() >> kStartPositionShift;
5435 void SharedFunctionInfo::set_start_position(int start_position) {
5436 set_start_position_and_type((start_position << kStartPositionShift)
5437 | (start_position_and_type() & ~kStartPositionMask));
5441 Code* SharedFunctionInfo::code() {
5442 return Code::cast(READ_FIELD(this, kCodeOffset));
5446 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5447 ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
5448 WRITE_FIELD(this, kCodeOffset, value);
5449 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5453 void SharedFunctionInfo::ReplaceCode(Code* value) {
5454 // If the GC metadata field is already used then the function was
5455 // enqueued as a code flushing candidate and we remove it now.
5456 if (code()->gc_metadata() != NULL) {
5457 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5458 flusher->EvictCandidate(this);
5461 ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5466 ScopeInfo* SharedFunctionInfo::scope_info() {
5467 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5471 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5472 WriteBarrierMode mode) {
5473 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5474 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5477 reinterpret_cast<Object*>(value),
5482 bool SharedFunctionInfo::is_compiled() {
5484 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5488 bool SharedFunctionInfo::IsApiFunction() {
5489 return function_data()->IsFunctionTemplateInfo();
5493 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5494 ASSERT(IsApiFunction());
5495 return FunctionTemplateInfo::cast(function_data());
5499 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5500 return function_data()->IsSmi();
5504 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5505 ASSERT(HasBuiltinFunctionId());
5506 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5510 int SharedFunctionInfo::ic_age() {
5511 return ICAgeBits::decode(counters());
5515 void SharedFunctionInfo::set_ic_age(int ic_age) {
5516 set_counters(ICAgeBits::update(counters(), ic_age));
5520 int SharedFunctionInfo::deopt_count() {
5521 return DeoptCountBits::decode(counters());
5525 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5526 set_counters(DeoptCountBits::update(counters(), deopt_count));
5530 void SharedFunctionInfo::increment_deopt_count() {
5531 int value = counters();
5532 int deopt_count = DeoptCountBits::decode(value);
5533 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5534 set_counters(DeoptCountBits::update(value, deopt_count));
5538 int SharedFunctionInfo::opt_reenable_tries() {
5539 return OptReenableTriesBits::decode(counters());
5543 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5544 set_counters(OptReenableTriesBits::update(counters(), tries));
5548 int SharedFunctionInfo::opt_count() {
5549 return OptCountBits::decode(opt_count_and_bailout_reason());
5553 void SharedFunctionInfo::set_opt_count(int opt_count) {
5554 set_opt_count_and_bailout_reason(
5555 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5559 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5560 BailoutReason reason = static_cast<BailoutReason>(
5561 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5566 bool SharedFunctionInfo::has_deoptimization_support() {
5567 Code* code = this->code();
5568 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5572 void SharedFunctionInfo::TryReenableOptimization() {
5573 int tries = opt_reenable_tries();
5574 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5575 // We reenable optimization whenever the number of tries is a large
5576 // enough power of 2.
5577 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5578 set_optimization_disabled(false);
5581 code()->set_optimizable(true);
5586 bool JSFunction::IsBuiltin() {
5587 return context()->global_object()->IsJSBuiltinsObject();
5591 bool JSFunction::NeedsArgumentsAdaption() {
5592 return shared()->formal_parameter_count() !=
5593 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5597 bool JSFunction::IsOptimized() {
5598 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5602 bool JSFunction::IsOptimizable() {
5603 return code()->kind() == Code::FUNCTION && code()->optimizable();
5607 bool JSFunction::IsMarkedForOptimization() {
5608 return code() == GetIsolate()->builtins()->builtin(
5609 Builtins::kCompileOptimized);
5613 bool JSFunction::IsMarkedForConcurrentOptimization() {
5614 return code() == GetIsolate()->builtins()->builtin(
5615 Builtins::kCompileOptimizedConcurrent);
5619 bool JSFunction::IsInOptimizationQueue() {
5620 return code() == GetIsolate()->builtins()->builtin(
5621 Builtins::kInOptimizationQueue);
5625 Code* JSFunction::code() {
5627 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5631 void JSFunction::set_code(Code* value) {
5632 ASSERT(!GetHeap()->InNewSpace(value));
5633 Address entry = value->entry();
5634 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5635 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5637 HeapObject::RawField(this, kCodeEntryOffset),
5642 void JSFunction::set_code_no_write_barrier(Code* value) {
5643 ASSERT(!GetHeap()->InNewSpace(value));
5644 Address entry = value->entry();
5645 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5649 void JSFunction::ReplaceCode(Code* code) {
5650 bool was_optimized = IsOptimized();
5651 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5653 if (was_optimized && is_optimized) {
5654 shared()->EvictFromOptimizedCodeMap(
5655 this->code(), "Replacing with another optimized code");
5660 // Add/remove the function from the list of optimized functions for this
5661 // context based on the state change.
5662 if (!was_optimized && is_optimized) {
5663 context()->native_context()->AddOptimizedFunction(this);
5665 if (was_optimized && !is_optimized) {
5666 // TODO(titzer): linear in the number of optimized functions; fix!
5667 context()->native_context()->RemoveOptimizedFunction(this);
5672 Context* JSFunction::context() {
5673 return Context::cast(READ_FIELD(this, kContextOffset));
5677 void JSFunction::set_context(Object* value) {
5678 ASSERT(value->IsUndefined() || value->IsContext());
5679 WRITE_FIELD(this, kContextOffset, value);
5680 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5683 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5684 kPrototypeOrInitialMapOffset)
5687 Map* JSFunction::initial_map() {
5688 return Map::cast(prototype_or_initial_map());
5692 void JSFunction::set_initial_map(Map* value) {
5693 set_prototype_or_initial_map(value);
5697 bool JSFunction::has_initial_map() {
5698 return prototype_or_initial_map()->IsMap();
5702 bool JSFunction::has_instance_prototype() {
5703 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5707 bool JSFunction::has_prototype() {
5708 return map()->has_non_instance_prototype() || has_instance_prototype();
5712 Object* JSFunction::instance_prototype() {
5713 ASSERT(has_instance_prototype());
5714 if (has_initial_map()) return initial_map()->prototype();
5715 // When there is no initial map and the prototype is a JSObject, the
5716 // initial map field is used for the prototype field.
5717 return prototype_or_initial_map();
5721 Object* JSFunction::prototype() {
5722 ASSERT(has_prototype());
5723 // If the function's prototype property has been set to a non-JSObject
5724 // value, that value is stored in the constructor field of the map.
5725 if (map()->has_non_instance_prototype()) return map()->constructor();
5726 return instance_prototype();
5730 bool JSFunction::should_have_prototype() {
5731 return map()->function_with_prototype();
5735 bool JSFunction::is_compiled() {
5737 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5741 FixedArray* JSFunction::literals() {
5742 ASSERT(!shared()->bound());
5743 return literals_or_bindings();
5747 void JSFunction::set_literals(FixedArray* literals) {
5748 ASSERT(!shared()->bound());
5749 set_literals_or_bindings(literals);
5753 FixedArray* JSFunction::function_bindings() {
5754 ASSERT(shared()->bound());
5755 return literals_or_bindings();
5759 void JSFunction::set_function_bindings(FixedArray* bindings) {
5760 ASSERT(shared()->bound());
5761 // Bound function literal may be initialized to the empty fixed array
5762 // before the bindings are set.
5763 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
5764 bindings->map() == GetHeap()->fixed_cow_array_map());
5765 set_literals_or_bindings(bindings);
5769 int JSFunction::NumberOfLiterals() {
5770 ASSERT(!shared()->bound());
5771 return literals()->length();
5775 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5776 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5777 return READ_FIELD(this, OffsetOfFunctionWithId(id));
5781 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5783 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5784 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5785 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5789 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
5790 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5791 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
5795 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
5797 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5798 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
5799 ASSERT(!GetHeap()->InNewSpace(value));
5803 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5804 ACCESSORS(JSProxy, hash, Object, kHashOffset)
5805 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5806 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5809 void JSProxy::InitializeBody(int object_size, Object* value) {
5810 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5811 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5812 WRITE_FIELD(this, offset, value);
5817 ACCESSORS(JSSet, table, Object, kTableOffset)
5818 ACCESSORS(JSMap, table, Object, kTableOffset)
5819 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5820 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5823 Address Foreign::foreign_address() {
5824 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5828 void Foreign::set_foreign_address(Address value) {
5829 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5833 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5834 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5835 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5836 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5837 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5838 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
5841 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
5842 ASSERT(obj->IsJSGeneratorObject());
5843 ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
5844 return reinterpret_cast<JSGeneratorObject*>(obj);
5848 ACCESSORS(JSModule, context, Object, kContextOffset)
5849 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5852 JSModule* JSModule::cast(Object* obj) {
5853 ASSERT(obj->IsJSModule());
5854 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
5855 return reinterpret_cast<JSModule*>(obj);
5859 ACCESSORS(JSValue, value, Object, kValueOffset)
5862 JSValue* JSValue::cast(Object* obj) {
5863 ASSERT(obj->IsJSValue());
5864 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
5865 return reinterpret_cast<JSValue*>(obj);
5869 ACCESSORS(JSDate, value, Object, kValueOffset)
5870 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5871 ACCESSORS(JSDate, year, Object, kYearOffset)
5872 ACCESSORS(JSDate, month, Object, kMonthOffset)
5873 ACCESSORS(JSDate, day, Object, kDayOffset)
5874 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5875 ACCESSORS(JSDate, hour, Object, kHourOffset)
5876 ACCESSORS(JSDate, min, Object, kMinOffset)
5877 ACCESSORS(JSDate, sec, Object, kSecOffset)
5880 JSDate* JSDate::cast(Object* obj) {
5881 ASSERT(obj->IsJSDate());
5882 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
5883 return reinterpret_cast<JSDate*>(obj);
5887 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
5888 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
5889 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5890 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
5891 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5892 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5893 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5896 JSMessageObject* JSMessageObject::cast(Object* obj) {
5897 ASSERT(obj->IsJSMessageObject());
5898 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
5899 return reinterpret_cast<JSMessageObject*>(obj);
5903 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5904 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5905 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5906 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5907 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5908 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
5911 void Code::WipeOutHeader() {
5912 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
5913 WRITE_FIELD(this, kHandlerTableOffset, NULL);
5914 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
5915 // Do not wipe out e.g. a minor key.
5916 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
5917 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
5922 Object* Code::type_feedback_info() {
5923 ASSERT(kind() == FUNCTION);
5924 return raw_type_feedback_info();
5928 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
5929 ASSERT(kind() == FUNCTION);
5930 set_raw_type_feedback_info(value, mode);
5931 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5936 Object* Code::next_code_link() {
5937 CHECK(kind() == OPTIMIZED_FUNCTION);
5938 return raw_type_feedback_info();
5942 void Code::set_next_code_link(Object* value, WriteBarrierMode mode) {
5943 CHECK(kind() == OPTIMIZED_FUNCTION);
5944 set_raw_type_feedback_info(value);
5945 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5950 int Code::stub_info() {
5951 ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
5952 kind() == BINARY_OP_IC || kind() == LOAD_IC);
5953 return Smi::cast(raw_type_feedback_info())->value();
5957 void Code::set_stub_info(int value) {
5958 ASSERT(kind() == COMPARE_IC ||
5959 kind() == COMPARE_NIL_IC ||
5960 kind() == BINARY_OP_IC ||
5962 kind() == LOAD_IC ||
5963 kind() == KEYED_LOAD_IC ||
5964 kind() == STORE_IC ||
5965 kind() == KEYED_STORE_IC);
5966 set_raw_type_feedback_info(Smi::FromInt(value));
5970 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5971 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
5974 byte* Code::instruction_start() {
5975 return FIELD_ADDR(this, kHeaderSize);
5979 byte* Code::instruction_end() {
5980 return instruction_start() + instruction_size();
5984 int Code::body_size() {
5985 return RoundUp(instruction_size(), kObjectAlignment);
5989 ByteArray* Code::unchecked_relocation_info() {
5990 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
5994 byte* Code::relocation_start() {
5995 return unchecked_relocation_info()->GetDataStartAddress();
5999 int Code::relocation_size() {
6000 return unchecked_relocation_info()->length();
6004 byte* Code::entry() {
6005 return instruction_start();
6009 bool Code::contains(byte* inner_pointer) {
6010 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6014 ACCESSORS(JSArray, length, Object, kLengthOffset)
6017 void* JSArrayBuffer::backing_store() {
6018 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6019 return reinterpret_cast<void*>(ptr);
6023 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6024 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6025 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6029 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6030 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6033 bool JSArrayBuffer::is_external() {
6034 return BooleanBit::get(flag(), kIsExternalBit);
6038 void JSArrayBuffer::set_is_external(bool value) {
6039 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6043 bool JSArrayBuffer::should_be_freed() {
6044 return BooleanBit::get(flag(), kShouldBeFreed);
6048 void JSArrayBuffer::set_should_be_freed(bool value) {
6049 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6053 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6054 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6057 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6058 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6059 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6060 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6061 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6063 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6066 JSRegExp::Type JSRegExp::TypeTag() {
6067 Object* data = this->data();
6068 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6069 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6070 return static_cast<JSRegExp::Type>(smi->value());
6074 int JSRegExp::CaptureCount() {
6075 switch (TypeTag()) {
6079 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6087 JSRegExp::Flags JSRegExp::GetFlags() {
6088 ASSERT(this->data()->IsFixedArray());
6089 Object* data = this->data();
6090 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6091 return Flags(smi->value());
6095 String* JSRegExp::Pattern() {
6096 ASSERT(this->data()->IsFixedArray());
6097 Object* data = this->data();
6098 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6103 Object* JSRegExp::DataAt(int index) {
6104 ASSERT(TypeTag() != NOT_COMPILED);
6105 return FixedArray::cast(data())->get(index);
6109 void JSRegExp::SetDataAt(int index, Object* value) {
6110 ASSERT(TypeTag() != NOT_COMPILED);
6111 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
6112 FixedArray::cast(data())->set(index, value);
6116 ElementsKind JSObject::GetElementsKind() {
6117 ElementsKind kind = map()->elements_kind();
6119 FixedArrayBase* fixed_array =
6120 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6122 // If a GC was caused while constructing this object, the elements
6123 // pointer may point to a one pointer filler map.
6124 if (ElementsAreSafeToExamine()) {
6125 Map* map = fixed_array->map();
6126 ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
6127 (map == GetHeap()->fixed_array_map() ||
6128 map == GetHeap()->fixed_cow_array_map())) ||
6129 (IsFastDoubleElementsKind(kind) &&
6130 (fixed_array->IsFixedDoubleArray() ||
6131 fixed_array == GetHeap()->empty_fixed_array())) ||
6132 (kind == DICTIONARY_ELEMENTS &&
6133 fixed_array->IsFixedArray() &&
6134 fixed_array->IsDictionary()) ||
6135 (kind > DICTIONARY_ELEMENTS));
6136 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
6137 (elements()->IsFixedArray() && elements()->length() >= 2));
6144 ElementsAccessor* JSObject::GetElementsAccessor() {
6145 return ElementsAccessor::ForKind(GetElementsKind());
6149 bool JSObject::HasFastObjectElements() {
6150 return IsFastObjectElementsKind(GetElementsKind());
6154 bool JSObject::HasFastSmiElements() {
6155 return IsFastSmiElementsKind(GetElementsKind());
6159 bool JSObject::HasFastSmiOrObjectElements() {
6160 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6164 bool JSObject::HasFastDoubleElements() {
6165 return IsFastDoubleElementsKind(GetElementsKind());
6169 bool JSObject::HasFastHoleyElements() {
6170 return IsFastHoleyElementsKind(GetElementsKind());
6174 bool JSObject::HasFastElements() {
6175 return IsFastElementsKind(GetElementsKind());
6179 bool JSObject::HasDictionaryElements() {
6180 return GetElementsKind() == DICTIONARY_ELEMENTS;
6184 bool JSObject::HasNonStrictArgumentsElements() {
6185 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
6189 bool JSObject::HasExternalArrayElements() {
6190 HeapObject* array = elements();
6191 ASSERT(array != NULL);
6192 return array->IsExternalArray();
6196 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6197 bool JSObject::HasExternal##Type##Elements() { \
6198 HeapObject* array = elements(); \
6199 ASSERT(array != NULL); \
6200 if (!array->IsHeapObject()) \
6202 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6205 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6207 #undef EXTERNAL_ELEMENTS_CHECK
6210 bool JSObject::HasFixedTypedArrayElements() {
6211 HeapObject* array = elements();
6212 ASSERT(array != NULL);
6213 return array->IsFixedTypedArrayBase();
6217 bool JSObject::HasNamedInterceptor() {
6218 return map()->has_named_interceptor();
6222 bool JSObject::HasIndexedInterceptor() {
6223 return map()->has_indexed_interceptor();
6227 MaybeObject* JSObject::EnsureWritableFastElements() {
6228 ASSERT(HasFastSmiOrObjectElements());
6229 FixedArray* elems = FixedArray::cast(elements());
6230 Isolate* isolate = GetIsolate();
6231 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
6232 Object* writable_elems;
6233 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
6234 elems, isolate->heap()->fixed_array_map());
6235 if (!maybe_writable_elems->ToObject(&writable_elems)) {
6236 return maybe_writable_elems;
6239 set_elements(FixedArray::cast(writable_elems));
6240 isolate->counters()->cow_arrays_converted()->Increment();
6241 return writable_elems;
6245 NameDictionary* JSObject::property_dictionary() {
6246 ASSERT(!HasFastProperties());
6247 return NameDictionary::cast(properties());
6251 SeededNumberDictionary* JSObject::element_dictionary() {
6252 ASSERT(HasDictionaryElements());
6253 return SeededNumberDictionary::cast(elements());
6257 bool Name::IsHashFieldComputed(uint32_t field) {
6258 return (field & kHashNotComputedMask) == 0;
6262 bool Name::HasHashCode() {
6263 return IsHashFieldComputed(hash_field());
6267 uint32_t Name::Hash() {
6268 // Fast case: has hash code already been computed?
6269 uint32_t field = hash_field();
6270 if (IsHashFieldComputed(field)) return field >> kHashShift;
6271 // Slow case: compute hash code and set it. Has to be a string.
6272 return String::cast(this)->ComputeAndSetHash();
6276 StringHasher::StringHasher(int length, uint32_t seed)
6278 raw_running_hash_(seed),
6280 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6281 is_first_char_(true) {
6282 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
6286 bool StringHasher::has_trivial_hash() {
6287 return length_ > String::kMaxHashCalcLength;
6291 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6293 running_hash += (running_hash << 10);
6294 running_hash ^= (running_hash >> 6);
6295 return running_hash;
6299 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6300 running_hash += (running_hash << 3);
6301 running_hash ^= (running_hash >> 11);
6302 running_hash += (running_hash << 15);
6303 if ((running_hash & String::kHashBitMask) == 0) {
6306 return running_hash;
6310 void StringHasher::AddCharacter(uint16_t c) {
6311 // Use the Jenkins one-at-a-time hash function to update the hash
6312 // for the given character.
6313 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6317 bool StringHasher::UpdateIndex(uint16_t c) {
6318 ASSERT(is_array_index_);
6319 if (c < '0' || c > '9') {
6320 is_array_index_ = false;
6324 if (is_first_char_) {
6325 is_first_char_ = false;
6326 if (c == '0' && length_ > 1) {
6327 is_array_index_ = false;
6331 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6332 is_array_index_ = false;
6335 array_index_ = array_index_ * 10 + d;
6340 template<typename Char>
6341 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6342 ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
6344 if (is_array_index_) {
6345 for (; i < length; i++) {
6346 AddCharacter(chars[i]);
6347 if (!UpdateIndex(chars[i])) {
6353 for (; i < length; i++) {
6354 ASSERT(!is_array_index_);
6355 AddCharacter(chars[i]);
6360 template <typename schar>
6361 uint32_t StringHasher::HashSequentialString(const schar* chars,
6364 StringHasher hasher(length, seed);
6365 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6366 return hasher.GetHashField();
6370 bool Name::AsArrayIndex(uint32_t* index) {
6371 return IsString() && String::cast(this)->AsArrayIndex(index);
6375 bool String::AsArrayIndex(uint32_t* index) {
6376 uint32_t field = hash_field();
6377 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6380 return SlowAsArrayIndex(index);
6384 Object* JSReceiver::GetPrototype() {
6385 return map()->prototype();
6389 Object* JSReceiver::GetConstructor() {
6390 return map()->constructor();
6394 bool JSReceiver::HasProperty(Handle<JSReceiver> object,
6395 Handle<Name> name) {
6396 if (object->IsJSProxy()) {
6397 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6398 return JSProxy::HasPropertyWithHandler(proxy, name);
6400 return object->GetPropertyAttribute(*name) != ABSENT;
6404 bool JSReceiver::HasLocalProperty(Handle<JSReceiver> object,
6405 Handle<Name> name) {
6406 if (object->IsJSProxy()) {
6407 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6408 return JSProxy::HasPropertyWithHandler(proxy, name);
6410 return object->GetLocalPropertyAttribute(*name) != ABSENT;
6414 PropertyAttributes JSReceiver::GetPropertyAttribute(Name* key) {
6416 if (IsJSObject() && key->AsArrayIndex(&index)) {
6417 return GetElementAttribute(index);
6419 return GetPropertyAttributeWithReceiver(this, key);
6423 PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
6425 return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
6427 return JSObject::cast(this)->GetElementAttributeWithReceiver(
6432 bool JSGlobalObject::IsDetached() {
6433 return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this);
6437 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) {
6438 return GetPrototype() != global;
6442 Handle<Object> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6443 return object->IsJSProxy()
6444 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6445 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6449 Object* JSReceiver::GetIdentityHash() {
6451 ? JSProxy::cast(this)->GetIdentityHash()
6452 : JSObject::cast(this)->GetIdentityHash();
6456 bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6457 if (object->IsJSProxy()) {
6458 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6459 return JSProxy::HasElementWithHandler(proxy, index);
6461 return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
6462 *object, index, true) != ABSENT;
6466 bool JSReceiver::HasLocalElement(Handle<JSReceiver> object, uint32_t index) {
6467 if (object->IsJSProxy()) {
6468 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6469 return JSProxy::HasElementWithHandler(proxy, index);
6471 return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
6472 *object, index, false) != ABSENT;
6476 PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
6478 return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
6480 return JSObject::cast(this)->GetElementAttributeWithReceiver(
6481 this, index, false);
6485 bool AccessorInfo::all_can_read() {
6486 return BooleanBit::get(flag(), kAllCanReadBit);
6490 void AccessorInfo::set_all_can_read(bool value) {
6491 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6495 bool AccessorInfo::all_can_write() {
6496 return BooleanBit::get(flag(), kAllCanWriteBit);
6500 void AccessorInfo::set_all_can_write(bool value) {
6501 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6505 bool AccessorInfo::prohibits_overwriting() {
6506 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
6510 void AccessorInfo::set_prohibits_overwriting(bool value) {
6511 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
6515 PropertyAttributes AccessorInfo::property_attributes() {
6516 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6520 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6521 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6525 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6526 Object* function_template = expected_receiver_type();
6527 if (!function_template->IsFunctionTemplateInfo()) return true;
6528 return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver);
6532 void AccessorPair::set_access_flags(v8::AccessControl access_control) {
6533 int current = access_flags()->value();
6534 current = BooleanBit::set(current,
6535 kProhibitsOverwritingBit,
6536 access_control & PROHIBITS_OVERWRITING);
6537 current = BooleanBit::set(current,
6539 access_control & ALL_CAN_READ);
6540 current = BooleanBit::set(current,
6542 access_control & ALL_CAN_WRITE);
6543 set_access_flags(Smi::FromInt(current));
6547 bool AccessorPair::all_can_read() {
6548 return BooleanBit::get(access_flags(), kAllCanReadBit);
6552 bool AccessorPair::all_can_write() {
6553 return BooleanBit::get(access_flags(), kAllCanWriteBit);
6557 bool AccessorPair::prohibits_overwriting() {
6558 return BooleanBit::get(access_flags(), kProhibitsOverwritingBit);
6562 template<typename Shape, typename Key>
6563 void Dictionary<Shape, Key>::SetEntry(int entry,
6566 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6570 template<typename Shape, typename Key>
6571 void Dictionary<Shape, Key>::SetEntry(int entry,
6574 PropertyDetails details) {
6575 ASSERT(!key->IsName() ||
6576 details.IsDeleted() ||
6577 details.dictionary_index() > 0);
6578 int index = HashTable<Shape, Key>::EntryToIndex(entry);
6579 DisallowHeapAllocation no_gc;
6580 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6581 FixedArray::set(index, key, mode);
6582 FixedArray::set(index+1, value, mode);
6583 FixedArray::set(index+2, details.AsSmi());
6587 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6588 ASSERT(other->IsNumber());
6589 return key == static_cast<uint32_t>(other->Number());
6593 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6594 return ComputeIntegerHash(key, 0);
6598 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6600 ASSERT(other->IsNumber());
6601 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6604 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6605 return ComputeIntegerHash(key, seed);
6608 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6611 ASSERT(other->IsNumber());
6612 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6615 MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) {
6616 return heap->NumberFromUint32(key);
6620 bool NameDictionaryShape::IsMatch(Name* key, Object* other) {
6621 // We know that all entries in a hash table had their hash keys created.
6622 // Use that knowledge to have fast failure.
6623 if (key->Hash() != Name::cast(other)->Hash()) return false;
6624 return key->Equals(Name::cast(other));
6628 uint32_t NameDictionaryShape::Hash(Name* key) {
6633 uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) {
6634 return Name::cast(other)->Hash();
6638 MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) {
6639 ASSERT(key->IsUniqueName());
6644 template <int entrysize>
6645 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6646 return key->SameValue(other);
6650 template <int entrysize>
6651 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
6652 return Smi::cast(key->GetHash())->value();
6656 template <int entrysize>
6657 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
6659 return Smi::cast(other->GetHash())->value();
6663 template <int entrysize>
6664 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Heap* heap,
6670 template <int entrysize>
6671 bool WeakHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6672 return key->SameValue(other);
6676 template <int entrysize>
6677 uint32_t WeakHashTableShape<entrysize>::Hash(Object* key) {
6678 intptr_t hash = reinterpret_cast<intptr_t>(key);
6679 return (uint32_t)(hash & 0xFFFFFFFF);
6683 template <int entrysize>
6684 uint32_t WeakHashTableShape<entrysize>::HashForObject(Object* key,
6686 intptr_t hash = reinterpret_cast<intptr_t>(other);
6687 return (uint32_t)(hash & 0xFFFFFFFF);
6691 template <int entrysize>
6692 MaybeObject* WeakHashTableShape<entrysize>::AsObject(Heap* heap,
6698 void Map::ClearCodeCache(Heap* heap) {
6699 // No write barrier is needed since empty_fixed_array is not in new space.
6700 // Please note this function is used during marking:
6701 // - MarkCompactCollector::MarkUnmarkedObject
6702 // - IncrementalMarking::Step
6703 ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
6704 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6708 void JSArray::EnsureSize(int required_size) {
6709 ASSERT(HasFastSmiOrObjectElements());
6710 FixedArray* elts = FixedArray::cast(elements());
6711 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6712 if (elts->length() < required_size) {
6713 // Doubling in size would be overkill, but leave some slack to avoid
6714 // constantly growing.
6715 Expand(required_size + (required_size >> 3));
6716 // It's a performance benefit to keep a frequently used array in new-space.
6717 } else if (!GetHeap()->new_space()->Contains(elts) &&
6718 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6719 // Expand will allocate a new backing store in new space even if the size
6720 // we asked for isn't larger than what we had before.
6721 Expand(required_size);
6726 void JSArray::set_length(Smi* length) {
6727 // Don't need a write barrier for a Smi.
6728 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6732 bool JSArray::AllowsSetElementsLength() {
6733 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6734 ASSERT(result == !HasExternalArrayElements());
6739 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
6740 MaybeObject* maybe_result = EnsureCanContainElements(
6741 storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
6742 if (maybe_result->IsFailure()) return maybe_result;
6743 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
6744 IsFastDoubleElementsKind(GetElementsKind())) ||
6745 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
6746 (IsFastObjectElementsKind(GetElementsKind()) ||
6747 (IsFastSmiElementsKind(GetElementsKind()) &&
6748 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
6749 set_elements(storage);
6750 set_length(Smi::FromInt(storage->length()));
6755 MaybeObject* FixedArray::Copy() {
6756 if (length() == 0) return this;
6757 return GetHeap()->CopyFixedArray(this);
6761 MaybeObject* FixedDoubleArray::Copy() {
6762 if (length() == 0) return this;
6763 return GetHeap()->CopyFixedDoubleArray(this);
6767 MaybeObject* ConstantPoolArray::Copy() {
6768 if (length() == 0) return this;
6769 return GetHeap()->CopyConstantPoolArray(this);
6773 void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) {
6774 set(1 + index * 2, Smi::FromInt(id.ToInt()));
6778 TypeFeedbackId TypeFeedbackCells::AstId(int index) {
6779 return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
6783 void TypeFeedbackCells::SetCell(int index, Cell* cell) {
6784 set(index * 2, cell);
6788 Cell* TypeFeedbackCells::GetCell(int index) {
6789 return Cell::cast(get(index * 2));
6793 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
6794 return isolate->factory()->the_hole_value();
6798 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
6799 return isolate->factory()->undefined_value();
6803 Handle<Object> TypeFeedbackCells::MonomorphicArraySentinel(Isolate* isolate,
6804 ElementsKind elements_kind) {
6805 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
6809 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
6810 return heap->the_hole_value();
6814 int TypeFeedbackInfo::ic_total_count() {
6815 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6816 return ICTotalCountField::decode(current);
6820 void TypeFeedbackInfo::set_ic_total_count(int count) {
6821 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6822 value = ICTotalCountField::update(value,
6823 ICTotalCountField::decode(count));
6824 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6828 int TypeFeedbackInfo::ic_with_type_info_count() {
6829 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6830 return ICsWithTypeInfoCountField::decode(current);
6834 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
6835 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6836 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6837 // We can get negative count here when the type-feedback info is
6838 // shared between two code objects. The can only happen when
6839 // the debugger made a shallow copy of code object (see Heap::CopyCode).
6840 // Since we do not optimize when the debugger is active, we can skip
6841 // this counter update.
6842 if (new_count >= 0) {
6843 new_count &= ICsWithTypeInfoCountField::kMask;
6844 value = ICsWithTypeInfoCountField::update(value, new_count);
6845 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6850 void TypeFeedbackInfo::initialize_storage() {
6851 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
6852 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
6856 void TypeFeedbackInfo::change_own_type_change_checksum() {
6857 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6858 int checksum = OwnTypeChangeChecksum::decode(value);
6859 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
6860 value = OwnTypeChangeChecksum::update(value, checksum);
6861 // Ensure packed bit field is in Smi range.
6862 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6863 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6864 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6868 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
6869 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6870 int mask = (1 << kTypeChangeChecksumBits) - 1;
6871 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
6872 // Ensure packed bit field is in Smi range.
6873 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6874 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6875 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6879 int TypeFeedbackInfo::own_type_change_checksum() {
6880 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6881 return OwnTypeChangeChecksum::decode(value);
6885 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
6886 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6887 int mask = (1 << kTypeChangeChecksumBits) - 1;
6888 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
6892 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
6893 kTypeFeedbackCellsOffset)
6896 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
6899 Relocatable::Relocatable(Isolate* isolate) {
6901 prev_ = isolate->relocatable_top();
6902 isolate->set_relocatable_top(this);
6906 Relocatable::~Relocatable() {
6907 ASSERT_EQ(isolate_->relocatable_top(), this);
6908 isolate_->set_relocatable_top(prev_);
6912 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
6913 return map->instance_size();
6917 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
6918 v->VisitExternalReference(
6919 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6923 template<typename StaticVisitor>
6924 void Foreign::ForeignIterateBody() {
6925 StaticVisitor::VisitExternalReference(
6926 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6930 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
6931 typedef v8::String::ExternalAsciiStringResource Resource;
6932 v->VisitExternalAsciiString(
6933 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6937 template<typename StaticVisitor>
6938 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
6939 typedef v8::String::ExternalAsciiStringResource Resource;
6940 StaticVisitor::VisitExternalAsciiString(
6941 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6945 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
6946 typedef v8::String::ExternalStringResource Resource;
6947 v->VisitExternalTwoByteString(
6948 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6952 template<typename StaticVisitor>
6953 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
6954 typedef v8::String::ExternalStringResource Resource;
6955 StaticVisitor::VisitExternalTwoByteString(
6956 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6960 template<int start_offset, int end_offset, int size>
6961 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
6964 v->VisitPointers(HeapObject::RawField(obj, start_offset),
6965 HeapObject::RawField(obj, end_offset));
6969 template<int start_offset>
6970 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
6973 v->VisitPointers(HeapObject::RawField(obj, start_offset),
6974 HeapObject::RawField(obj, object_size));
6979 #undef CAST_ACCESSOR
6980 #undef INT_ACCESSORS
6982 #undef ACCESSORS_TO_SMI
6983 #undef SMI_ACCESSORS
6985 #undef BOOL_ACCESSORS
6989 #undef WRITE_BARRIER
6990 #undef CONDITIONAL_WRITE_BARRIER
6991 #undef READ_DOUBLE_FIELD
6992 #undef WRITE_DOUBLE_FIELD
6993 #undef READ_INT_FIELD
6994 #undef WRITE_INT_FIELD
6995 #undef READ_INTPTR_FIELD
6996 #undef WRITE_INTPTR_FIELD
6997 #undef READ_UINT32_FIELD
6998 #undef WRITE_UINT32_FIELD
6999 #undef READ_SHORT_FIELD
7000 #undef WRITE_SHORT_FIELD
7001 #undef READ_BYTE_FIELD
7002 #undef WRITE_BYTE_FIELD
7004 } } // namespace v8::internal
7006 #endif // V8_OBJECTS_INL_H_