1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/elements.h"
17 #include "src/objects.h"
18 #include "src/contexts.h"
19 #include "src/conversions-inl.h"
20 #include "src/field-index-inl.h"
22 #include "src/isolate.h"
23 #include "src/heap-inl.h"
24 #include "src/property.h"
25 #include "src/spaces.h"
26 #include "src/store-buffer.h"
27 #include "src/v8memory.h"
28 #include "src/factory.h"
29 #include "src/incremental-marking.h"
30 #include "src/transitions-inl.h"
31 #include "src/objects-visiting.h"
32 #include "src/lookup.h"
37 PropertyDetails::PropertyDetails(Smi* smi) {
38 value_ = smi->value();
42 Smi* PropertyDetails::AsSmi() const {
43 // Ensure the upper 2 bits have the same value by sign extending it. This is
44 // necessary to be able to use the 31st bit of the property details.
45 int value = value_ << 1;
46 return Smi::FromInt(value >> 1);
50 PropertyDetails PropertyDetails::AsDeleted() const {
51 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
52 return PropertyDetails(smi);
56 #define TYPE_CHECKER(type, instancetype) \
57 bool Object::Is##type() { \
58 return Object::IsHeapObject() && \
59 HeapObject::cast(this)->map()->instance_type() == instancetype; \
63 #define CAST_ACCESSOR(type) \
64 type* type::cast(Object* object) { \
65 SLOW_ASSERT(object->Is##type()); \
66 return reinterpret_cast<type*>(object); \
70 #define INT_ACCESSORS(holder, name, offset) \
71 int holder::name() { return READ_INT_FIELD(this, offset); } \
72 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
75 #define ACCESSORS(holder, name, type, offset) \
76 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
77 void holder::set_##name(type* value, WriteBarrierMode mode) { \
78 WRITE_FIELD(this, offset, value); \
79 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
83 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
84 #define ACCESSORS_TO_SMI(holder, name, offset) \
85 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
86 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
87 WRITE_FIELD(this, offset, value); \
91 // Getter that returns a Smi as an int and writes an int as a Smi.
92 #define SMI_ACCESSORS(holder, name, offset) \
93 int holder::name() { \
94 Object* value = READ_FIELD(this, offset); \
95 return Smi::cast(value)->value(); \
97 void holder::set_##name(int value) { \
98 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
101 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
102 int holder::synchronized_##name() { \
103 Object* value = ACQUIRE_READ_FIELD(this, offset); \
104 return Smi::cast(value)->value(); \
106 void holder::synchronized_set_##name(int value) { \
107 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
110 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
111 int holder::nobarrier_##name() { \
112 Object* value = NOBARRIER_READ_FIELD(this, offset); \
113 return Smi::cast(value)->value(); \
115 void holder::nobarrier_set_##name(int value) { \
116 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
119 #define BOOL_GETTER(holder, field, name, offset) \
120 bool holder::name() { \
121 return BooleanBit::get(field(), offset); \
125 #define BOOL_ACCESSORS(holder, field, name, offset) \
126 bool holder::name() { \
127 return BooleanBit::get(field(), offset); \
129 void holder::set_##name(bool value) { \
130 set_##field(BooleanBit::set(field(), offset, value)); \
134 bool Object::IsFixedArrayBase() {
135 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
136 IsFixedTypedArrayBase() || IsExternalArray();
140 // External objects are not extensible, so the map check is enough.
141 bool Object::IsExternal() {
142 return Object::IsHeapObject() &&
143 HeapObject::cast(this)->map() ==
144 HeapObject::cast(this)->GetHeap()->external_map();
148 bool Object::IsAccessorInfo() {
149 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
153 bool Object::IsSmi() {
154 return HAS_SMI_TAG(this);
158 bool Object::IsHeapObject() {
159 return Internals::HasHeapObjectTag(this);
163 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
164 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
167 bool Object::IsString() {
168 return Object::IsHeapObject()
169 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
173 bool Object::IsName() {
174 return IsString() || IsSymbol();
178 bool Object::IsUniqueName() {
179 return IsInternalizedString() || IsSymbol();
183 bool Object::IsSpecObject() {
184 return Object::IsHeapObject()
185 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
189 bool Object::IsSpecFunction() {
190 if (!Object::IsHeapObject()) return false;
191 InstanceType type = HeapObject::cast(this)->map()->instance_type();
192 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
196 bool Object::IsTemplateInfo() {
197 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
201 bool Object::IsInternalizedString() {
202 if (!this->IsHeapObject()) return false;
203 uint32_t type = HeapObject::cast(this)->map()->instance_type();
204 STATIC_ASSERT(kNotInternalizedTag != 0);
205 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
206 (kStringTag | kInternalizedTag);
210 bool Object::IsConsString() {
211 if (!IsString()) return false;
212 return StringShape(String::cast(this)).IsCons();
216 bool Object::IsSlicedString() {
217 if (!IsString()) return false;
218 return StringShape(String::cast(this)).IsSliced();
222 bool Object::IsSeqString() {
223 if (!IsString()) return false;
224 return StringShape(String::cast(this)).IsSequential();
228 bool Object::IsSeqOneByteString() {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsSequential() &&
231 String::cast(this)->IsOneByteRepresentation();
235 bool Object::IsSeqTwoByteString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsTwoByteRepresentation();
242 bool Object::IsExternalString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsExternal();
248 bool Object::IsExternalAsciiString() {
249 if (!IsString()) return false;
250 return StringShape(String::cast(this)).IsExternal() &&
251 String::cast(this)->IsOneByteRepresentation();
255 bool Object::IsExternalTwoByteString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsTwoByteRepresentation();
262 bool Object::HasValidElements() {
263 // Dictionary is covered under FixedArray.
264 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
265 IsFixedTypedArrayBase();
269 Handle<Object> Object::NewStorageFor(Isolate* isolate,
270 Handle<Object> object,
271 Representation representation) {
272 if (representation.IsSmi() && object->IsUninitialized()) {
273 return handle(Smi::FromInt(0), isolate);
275 if (!representation.IsDouble()) return object;
276 if (object->IsUninitialized()) {
277 return isolate->factory()->NewHeapNumber(0);
279 return isolate->factory()->NewHeapNumber(object->Number());
283 StringShape::StringShape(String* str)
284 : type_(str->map()->instance_type()) {
286 ASSERT((type_ & kIsNotStringMask) == kStringTag);
290 StringShape::StringShape(Map* map)
291 : type_(map->instance_type()) {
293 ASSERT((type_ & kIsNotStringMask) == kStringTag);
297 StringShape::StringShape(InstanceType t)
298 : type_(static_cast<uint32_t>(t)) {
300 ASSERT((type_ & kIsNotStringMask) == kStringTag);
304 bool StringShape::IsInternalized() {
306 STATIC_ASSERT(kNotInternalizedTag != 0);
307 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
308 (kStringTag | kInternalizedTag);
312 bool String::IsOneByteRepresentation() {
313 uint32_t type = map()->instance_type();
314 return (type & kStringEncodingMask) == kOneByteStringTag;
318 bool String::IsTwoByteRepresentation() {
319 uint32_t type = map()->instance_type();
320 return (type & kStringEncodingMask) == kTwoByteStringTag;
324 bool String::IsOneByteRepresentationUnderneath() {
325 uint32_t type = map()->instance_type();
326 STATIC_ASSERT(kIsIndirectStringTag != 0);
327 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
329 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
330 case kOneByteStringTag:
332 case kTwoByteStringTag:
334 default: // Cons or sliced string. Need to go deeper.
335 return GetUnderlying()->IsOneByteRepresentation();
340 bool String::IsTwoByteRepresentationUnderneath() {
341 uint32_t type = map()->instance_type();
342 STATIC_ASSERT(kIsIndirectStringTag != 0);
343 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
345 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
346 case kOneByteStringTag:
348 case kTwoByteStringTag:
350 default: // Cons or sliced string. Need to go deeper.
351 return GetUnderlying()->IsTwoByteRepresentation();
356 bool String::HasOnlyOneByteChars() {
357 uint32_t type = map()->instance_type();
358 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
359 IsOneByteRepresentation();
363 bool StringShape::IsCons() {
364 return (type_ & kStringRepresentationMask) == kConsStringTag;
368 bool StringShape::IsSliced() {
369 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
373 bool StringShape::IsIndirect() {
374 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
378 bool StringShape::IsExternal() {
379 return (type_ & kStringRepresentationMask) == kExternalStringTag;
383 bool StringShape::IsSequential() {
384 return (type_ & kStringRepresentationMask) == kSeqStringTag;
388 StringRepresentationTag StringShape::representation_tag() {
389 uint32_t tag = (type_ & kStringRepresentationMask);
390 return static_cast<StringRepresentationTag>(tag);
394 uint32_t StringShape::encoding_tag() {
395 return type_ & kStringEncodingMask;
399 uint32_t StringShape::full_representation_tag() {
400 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
404 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
405 Internals::kFullStringRepresentationMask);
407 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
408 Internals::kStringEncodingMask);
411 bool StringShape::IsSequentialAscii() {
412 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
416 bool StringShape::IsSequentialTwoByte() {
417 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
421 bool StringShape::IsExternalAscii() {
422 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
426 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
427 Internals::kExternalAsciiRepresentationTag);
429 STATIC_ASSERT(v8::String::ASCII_ENCODING == kOneByteStringTag);
432 bool StringShape::IsExternalTwoByte() {
433 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
437 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
438 Internals::kExternalTwoByteRepresentationTag);
440 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
442 uc32 FlatStringReader::Get(int index) {
443 ASSERT(0 <= index && index <= length_);
445 return static_cast<const byte*>(start_)[index];
447 return static_cast<const uc16*>(start_)[index];
452 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
453 return key->AsHandle(isolate);
457 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
458 return key->AsHandle(isolate);
462 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
464 return key->AsHandle(isolate);
468 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
470 return key->AsHandle(isolate);
473 template <typename Char>
474 class SequentialStringKey : public HashTableKey {
476 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
477 : string_(string), hash_field_(0), seed_(seed) { }
479 virtual uint32_t Hash() V8_OVERRIDE {
480 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
484 uint32_t result = hash_field_ >> String::kHashShift;
485 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
490 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
491 return String::cast(other)->Hash();
494 Vector<const Char> string_;
495 uint32_t hash_field_;
500 class OneByteStringKey : public SequentialStringKey<uint8_t> {
502 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
503 : SequentialStringKey<uint8_t>(str, seed) { }
505 virtual bool IsMatch(Object* string) V8_OVERRIDE {
506 return String::cast(string)->IsOneByteEqualTo(string_);
509 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
514 class SubStringKey : public HashTableKey {
516 SubStringKey(Handle<String> string, int from, int length)
517 : string_(string), from_(from), length_(length) {
518 if (string_->IsSlicedString()) {
519 string_ = Handle<String>(Unslice(*string_, &from_));
521 ASSERT(string_->IsSeqString() || string->IsExternalString());
524 virtual uint32_t Hash() V8_OVERRIDE {
525 ASSERT(length_ >= 0);
526 ASSERT(from_ + length_ <= string_->length());
527 const Char* chars = GetChars() + from_;
528 hash_field_ = StringHasher::HashSequentialString(
529 chars, length_, string_->GetHeap()->HashSeed());
530 uint32_t result = hash_field_ >> String::kHashShift;
531 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
535 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
536 return String::cast(other)->Hash();
539 virtual bool IsMatch(Object* string) V8_OVERRIDE;
540 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
543 const Char* GetChars();
544 String* Unslice(String* string, int* offset) {
545 while (string->IsSlicedString()) {
546 SlicedString* sliced = SlicedString::cast(string);
547 *offset += sliced->offset();
548 string = sliced->parent();
553 Handle<String> string_;
556 uint32_t hash_field_;
560 class TwoByteStringKey : public SequentialStringKey<uc16> {
562 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
563 : SequentialStringKey<uc16>(str, seed) { }
565 virtual bool IsMatch(Object* string) V8_OVERRIDE {
566 return String::cast(string)->IsTwoByteEqualTo(string_);
569 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
573 // Utf8StringKey carries a vector of chars as key.
574 class Utf8StringKey : public HashTableKey {
576 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
577 : string_(string), hash_field_(0), seed_(seed) { }
579 virtual bool IsMatch(Object* string) V8_OVERRIDE {
580 return String::cast(string)->IsUtf8EqualTo(string_);
583 virtual uint32_t Hash() V8_OVERRIDE {
584 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
585 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
586 uint32_t result = hash_field_ >> String::kHashShift;
587 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
591 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
592 return String::cast(other)->Hash();
595 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
596 if (hash_field_ == 0) Hash();
597 return isolate->factory()->NewInternalizedStringFromUtf8(
598 string_, chars_, hash_field_);
601 Vector<const char> string_;
602 uint32_t hash_field_;
603 int chars_; // Caches the number of characters when computing the hash code.
608 bool Object::IsNumber() {
609 return IsSmi() || IsHeapNumber();
613 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
614 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
617 bool Object::IsFiller() {
618 if (!Object::IsHeapObject()) return false;
619 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
620 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
624 bool Object::IsExternalArray() {
625 if (!Object::IsHeapObject())
627 InstanceType instance_type =
628 HeapObject::cast(this)->map()->instance_type();
629 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
630 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
634 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
635 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
636 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
638 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
639 #undef TYPED_ARRAY_TYPE_CHECKER
642 bool Object::IsFixedTypedArrayBase() {
643 if (!Object::IsHeapObject()) return false;
645 InstanceType instance_type =
646 HeapObject::cast(this)->map()->instance_type();
647 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
648 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
652 bool Object::IsJSReceiver() {
653 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
654 return IsHeapObject() &&
655 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
659 bool Object::IsJSObject() {
660 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
661 return IsHeapObject() &&
662 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
666 bool Object::IsJSProxy() {
667 if (!Object::IsHeapObject()) return false;
668 return HeapObject::cast(this)->map()->IsJSProxyMap();
672 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
673 TYPE_CHECKER(JSSet, JS_SET_TYPE)
674 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
675 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
676 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
677 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
678 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
679 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
680 TYPE_CHECKER(Map, MAP_TYPE)
681 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
682 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
683 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
686 bool Object::IsJSWeakCollection() {
687 return IsJSWeakMap() || IsJSWeakSet();
691 bool Object::IsDescriptorArray() {
692 return IsFixedArray();
696 bool Object::IsTransitionArray() {
697 return IsFixedArray();
701 bool Object::IsDeoptimizationInputData() {
702 // Must be a fixed array.
703 if (!IsFixedArray()) return false;
705 // There's no sure way to detect the difference between a fixed array and
706 // a deoptimization data array. Since this is used for asserts we can
707 // check that the length is zero or else the fixed size plus a multiple of
709 int length = FixedArray::cast(this)->length();
710 if (length == 0) return true;
712 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
713 return length >= 0 &&
714 length % DeoptimizationInputData::kDeoptEntrySize == 0;
718 bool Object::IsDeoptimizationOutputData() {
719 if (!IsFixedArray()) return false;
720 // There's actually no way to see the difference between a fixed array and
721 // a deoptimization data array. Since this is used for asserts we can check
722 // that the length is plausible though.
723 if (FixedArray::cast(this)->length() % 2 != 0) return false;
728 bool Object::IsDependentCode() {
729 if (!IsFixedArray()) return false;
730 // There's actually no way to see the difference between a fixed array and
731 // a dependent codes array.
736 bool Object::IsContext() {
737 if (!Object::IsHeapObject()) return false;
738 Map* map = HeapObject::cast(this)->map();
739 Heap* heap = map->GetHeap();
740 return (map == heap->function_context_map() ||
741 map == heap->catch_context_map() ||
742 map == heap->with_context_map() ||
743 map == heap->native_context_map() ||
744 map == heap->block_context_map() ||
745 map == heap->module_context_map() ||
746 map == heap->global_context_map());
750 bool Object::IsNativeContext() {
751 return Object::IsHeapObject() &&
752 HeapObject::cast(this)->map() ==
753 HeapObject::cast(this)->GetHeap()->native_context_map();
757 bool Object::IsScopeInfo() {
758 return Object::IsHeapObject() &&
759 HeapObject::cast(this)->map() ==
760 HeapObject::cast(this)->GetHeap()->scope_info_map();
764 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
767 template <> inline bool Is<JSFunction>(Object* obj) {
768 return obj->IsJSFunction();
772 TYPE_CHECKER(Code, CODE_TYPE)
773 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
774 TYPE_CHECKER(Cell, CELL_TYPE)
775 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
776 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
777 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
778 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
779 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
780 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
781 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
784 bool Object::IsStringWrapper() {
785 return IsJSValue() && JSValue::cast(this)->value()->IsString();
789 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
792 bool Object::IsBoolean() {
793 return IsOddball() &&
794 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
798 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
799 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
800 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
801 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
803 TYPE_CHECKER(Float32x4, FLOAT32x4_TYPE)
804 TYPE_CHECKER(Float64x2, FLOAT64x2_TYPE)
805 TYPE_CHECKER(Int32x4, INT32x4_TYPE)
807 bool Object::IsJSArrayBufferView() {
808 return IsJSDataView() || IsJSTypedArray();
812 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
815 template <> inline bool Is<JSArray>(Object* obj) {
816 return obj->IsJSArray();
820 bool Object::IsHashTable() {
821 return Object::IsHeapObject() &&
822 HeapObject::cast(this)->map() ==
823 HeapObject::cast(this)->GetHeap()->hash_table_map();
827 bool Object::IsDictionary() {
828 return IsHashTable() &&
829 this != HeapObject::cast(this)->GetHeap()->string_table();
833 bool Object::IsStringTable() {
834 return IsHashTable();
838 bool Object::IsJSFunctionResultCache() {
839 if (!IsFixedArray()) return false;
840 FixedArray* self = FixedArray::cast(this);
841 int length = self->length();
842 if (length < JSFunctionResultCache::kEntriesIndex) return false;
843 if ((length - JSFunctionResultCache::kEntriesIndex)
844 % JSFunctionResultCache::kEntrySize != 0) {
848 if (FLAG_verify_heap) {
849 reinterpret_cast<JSFunctionResultCache*>(this)->
850 JSFunctionResultCacheVerify();
857 bool Object::IsNormalizedMapCache() {
858 return NormalizedMapCache::IsNormalizedMapCache(this);
862 int NormalizedMapCache::GetIndex(Handle<Map> map) {
863 return map->Hash() % NormalizedMapCache::kEntries;
867 bool NormalizedMapCache::IsNormalizedMapCache(Object* obj) {
868 if (!obj->IsFixedArray()) return false;
869 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
873 if (FLAG_verify_heap) {
874 reinterpret_cast<NormalizedMapCache*>(obj)->NormalizedMapCacheVerify();
881 bool Object::IsCompilationCacheTable() {
882 return IsHashTable();
886 bool Object::IsCodeCacheHashTable() {
887 return IsHashTable();
891 bool Object::IsPolymorphicCodeCacheHashTable() {
892 return IsHashTable();
896 bool Object::IsMapCache() {
897 return IsHashTable();
901 bool Object::IsObjectHashTable() {
902 return IsHashTable();
906 bool Object::IsOrderedHashTable() {
907 return IsHeapObject() &&
908 HeapObject::cast(this)->map() ==
909 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
913 bool Object::IsPrimitive() {
914 return IsOddball() || IsNumber() || IsString();
918 bool Object::IsJSGlobalProxy() {
919 bool result = IsHeapObject() &&
920 (HeapObject::cast(this)->map()->instance_type() ==
921 JS_GLOBAL_PROXY_TYPE);
923 HeapObject::cast(this)->map()->is_access_check_needed());
928 bool Object::IsGlobalObject() {
929 if (!IsHeapObject()) return false;
931 InstanceType type = HeapObject::cast(this)->map()->instance_type();
932 return type == JS_GLOBAL_OBJECT_TYPE ||
933 type == JS_BUILTINS_OBJECT_TYPE;
937 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
938 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
941 bool Object::IsUndetectableObject() {
942 return IsHeapObject()
943 && HeapObject::cast(this)->map()->is_undetectable();
947 bool Object::IsAccessCheckNeeded() {
948 if (!IsHeapObject()) return false;
949 if (IsJSGlobalProxy()) {
950 JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
951 GlobalObject* global =
952 proxy->GetIsolate()->context()->global_object();
953 return proxy->IsDetachedFrom(global);
955 return HeapObject::cast(this)->map()->is_access_check_needed();
959 bool Object::IsStruct() {
960 if (!IsHeapObject()) return false;
961 switch (HeapObject::cast(this)->map()->instance_type()) {
962 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
963 STRUCT_LIST(MAKE_STRUCT_CASE)
964 #undef MAKE_STRUCT_CASE
965 default: return false;
970 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
971 bool Object::Is##Name() { \
972 return Object::IsHeapObject() \
973 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
975 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
976 #undef MAKE_STRUCT_PREDICATE
979 bool Object::IsUndefined() {
980 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
984 bool Object::IsNull() {
985 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
989 bool Object::IsTheHole() {
990 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
994 bool Object::IsException() {
995 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
999 bool Object::IsUninitialized() {
1000 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1004 bool Object::IsTrue() {
1005 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1009 bool Object::IsFalse() {
1010 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1014 bool Object::IsArgumentsMarker() {
1015 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1019 double Object::Number() {
1022 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1023 : reinterpret_cast<HeapNumber*>(this)->value();
1027 bool Object::IsNaN() {
1028 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1032 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1033 if (object->IsSmi()) return Handle<Smi>::cast(object);
1034 if (object->IsHeapNumber()) {
1035 double value = Handle<HeapNumber>::cast(object)->value();
1036 int int_value = FastD2I(value);
1037 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1038 return handle(Smi::FromInt(int_value), isolate);
1041 return Handle<Smi>();
1045 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1046 Handle<Object> object) {
1048 isolate, object, handle(isolate->context()->native_context(), isolate));
1052 bool Object::HasSpecificClassOf(String* name) {
1053 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1057 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1058 Handle<Name> name) {
1059 LookupIterator it(object, name);
1060 return GetProperty(&it);
1064 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1065 Handle<Object> object,
1067 // GetElement can trigger a getter which can cause allocation.
1068 // This was not always the case. This ASSERT is here to catch
1069 // leftover incorrect uses.
1070 ASSERT(AllowHeapAllocation::IsAllowed());
1071 return Object::GetElementWithReceiver(isolate, object, object, index);
1075 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1076 Handle<Name> name) {
1078 Isolate* isolate = name->GetIsolate();
1079 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1080 return GetProperty(object, name);
1084 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1085 Handle<Object> object,
1087 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1088 ASSERT(!str.is_null());
1090 uint32_t index; // Assert that the name is not an array index.
1091 ASSERT(!str->AsArrayIndex(&index));
1093 return GetProperty(object, str);
1097 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1098 Handle<Object> receiver,
1100 return GetPropertyWithHandler(
1101 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1105 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1106 Handle<JSReceiver> receiver,
1108 Handle<Object> value,
1109 StrictMode strict_mode) {
1110 Isolate* isolate = proxy->GetIsolate();
1111 Handle<String> name = isolate->factory()->Uint32ToString(index);
1112 return SetPropertyWithHandler(
1113 proxy, receiver, name, value, NONE, strict_mode);
1117 bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) {
1118 Isolate* isolate = proxy->GetIsolate();
1119 Handle<String> name = isolate->factory()->Uint32ToString(index);
1120 return HasPropertyWithHandler(proxy, name);
1124 #define FIELD_ADDR(p, offset) \
1125 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1127 #define READ_FIELD(p, offset) \
1128 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
1130 #define ACQUIRE_READ_FIELD(p, offset) \
1131 reinterpret_cast<Object*>(base::Acquire_Load( \
1132 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset))))
1134 #define NOBARRIER_READ_FIELD(p, offset) \
1135 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1136 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset))))
1138 #define WRITE_FIELD(p, offset, value) \
1139 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1141 #define RELEASE_WRITE_FIELD(p, offset, value) \
1142 base::Release_Store( \
1143 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1144 reinterpret_cast<base::AtomicWord>(value));
1146 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1147 base::NoBarrier_Store( \
1148 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1149 reinterpret_cast<base::AtomicWord>(value));
1151 #define WRITE_BARRIER(heap, object, offset, value) \
1152 heap->incremental_marking()->RecordWrite( \
1153 object, HeapObject::RawField(object, offset), value); \
1154 if (heap->InNewSpace(value)) { \
1155 heap->RecordWrite(object->address(), offset); \
1158 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1159 if (mode == UPDATE_WRITE_BARRIER) { \
1160 heap->incremental_marking()->RecordWrite( \
1161 object, HeapObject::RawField(object, offset), value); \
1162 if (heap->InNewSpace(value)) { \
1163 heap->RecordWrite(object->address(), offset); \
1167 #ifndef V8_TARGET_ARCH_MIPS
1168 #define READ_DOUBLE_FIELD(p, offset) \
1169 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
1170 #else // V8_TARGET_ARCH_MIPS
1171 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1172 // non-64-bit aligned HeapNumber::value.
1173 static inline double read_double_field(void* p, int offset) {
1178 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
1179 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
1182 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1183 #endif // V8_TARGET_ARCH_MIPS
1185 #ifndef V8_TARGET_ARCH_MIPS
1186 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1187 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1188 #else // V8_TARGET_ARCH_MIPS
1189 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1190 // non-64-bit aligned HeapNumber::value.
1191 static inline void write_double_field(void* p, int offset,
1198 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1199 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1201 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1202 write_double_field(p, offset, value)
1203 #endif // V8_TARGET_ARCH_MIPS
1205 #define READ_FLOAT32x4_FIELD(p, offset) \
1206 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)))
1208 #define WRITE_FLOAT32x4_FIELD(p, offset, value) \
1209 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1211 #define READ_FLOAT64x2_FIELD(p, offset) \
1212 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)))
1214 #define WRITE_FLOAT64x2_FIELD(p, offset, value) \
1215 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)) = value)
1217 #define READ_INT32x4_FIELD(p, offset) \
1218 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)))
1220 #define WRITE_INT32x4_FIELD(p, offset, value) \
1221 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1223 #define READ_FLOAT_FIELD(p, offset) \
1224 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)))
1226 #define WRITE_FLOAT_FIELD(p, offset, value) \
1227 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1229 #define READ_INT_FIELD(p, offset) \
1230 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1232 #define WRITE_INT_FIELD(p, offset, value) \
1233 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1235 #define READ_INTPTR_FIELD(p, offset) \
1236 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1238 #define WRITE_INTPTR_FIELD(p, offset, value) \
1239 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1241 #define READ_UINT32_FIELD(p, offset) \
1242 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1244 #define WRITE_UINT32_FIELD(p, offset, value) \
1245 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1247 #define READ_INT32_FIELD(p, offset) \
1248 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1250 #define WRITE_INT32_FIELD(p, offset, value) \
1251 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1253 #define READ_INT64_FIELD(p, offset) \
1254 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1256 #define WRITE_INT64_FIELD(p, offset, value) \
1257 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1259 #define READ_SHORT_FIELD(p, offset) \
1260 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1262 #define WRITE_SHORT_FIELD(p, offset, value) \
1263 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1265 #define READ_BYTE_FIELD(p, offset) \
1266 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1268 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1269 static_cast<byte>(base::NoBarrier_Load( \
1270 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1272 #define WRITE_BYTE_FIELD(p, offset, value) \
1273 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1275 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1276 base::NoBarrier_Store( \
1277 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1278 static_cast<base::Atomic8>(value));
1280 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1281 return &READ_FIELD(obj, byte_offset);
1286 return Internals::SmiValue(this);
1290 Smi* Smi::FromInt(int value) {
1291 ASSERT(Smi::IsValid(value));
1292 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1296 Smi* Smi::FromIntptr(intptr_t value) {
1297 ASSERT(Smi::IsValid(value));
1298 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1299 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1303 bool Smi::IsValid(intptr_t value) {
1304 bool result = Internals::IsValidSmi(value);
1305 ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1310 MapWord MapWord::FromMap(Map* map) {
1311 return MapWord(reinterpret_cast<uintptr_t>(map));
1315 Map* MapWord::ToMap() {
1316 return reinterpret_cast<Map*>(value_);
1320 bool MapWord::IsForwardingAddress() {
1321 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1325 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1326 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1327 return MapWord(reinterpret_cast<uintptr_t>(raw));
1331 HeapObject* MapWord::ToForwardingAddress() {
1332 ASSERT(IsForwardingAddress());
1333 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1338 void HeapObject::VerifyObjectField(int offset) {
1339 VerifyPointer(READ_FIELD(this, offset));
1342 void HeapObject::VerifySmiField(int offset) {
1343 CHECK(READ_FIELD(this, offset)->IsSmi());
1348 Heap* HeapObject::GetHeap() {
1350 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1351 SLOW_ASSERT(heap != NULL);
1356 Isolate* HeapObject::GetIsolate() {
1357 return GetHeap()->isolate();
1361 Map* HeapObject::map() {
1363 // Clear mark potentially added by PathTracer.
1364 uintptr_t raw_value =
1365 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1366 return MapWord::FromRawValue(raw_value).ToMap();
1368 return map_word().ToMap();
1373 void HeapObject::set_map(Map* value) {
1374 set_map_word(MapWord::FromMap(value));
1375 if (value != NULL) {
1376 // TODO(1600) We are passing NULL as a slot because maps can never be on
1377 // evacuation candidate.
1378 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1383 Map* HeapObject::synchronized_map() {
1384 return synchronized_map_word().ToMap();
1388 void HeapObject::synchronized_set_map(Map* value) {
1389 synchronized_set_map_word(MapWord::FromMap(value));
1390 if (value != NULL) {
1391 // TODO(1600) We are passing NULL as a slot because maps can never be on
1392 // evacuation candidate.
1393 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1398 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1399 synchronized_set_map_word(MapWord::FromMap(value));
1403 // Unsafe accessor omitting write barrier.
1404 void HeapObject::set_map_no_write_barrier(Map* value) {
1405 set_map_word(MapWord::FromMap(value));
1409 MapWord HeapObject::map_word() {
1411 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1415 void HeapObject::set_map_word(MapWord map_word) {
1416 NOBARRIER_WRITE_FIELD(
1417 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1421 MapWord HeapObject::synchronized_map_word() {
1423 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1427 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1428 RELEASE_WRITE_FIELD(
1429 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1433 HeapObject* HeapObject::FromAddress(Address address) {
1434 ASSERT_TAG_ALIGNED(address);
1435 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1439 Address HeapObject::address() {
1440 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1444 int HeapObject::Size() {
1445 return SizeFromMap(map());
1449 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1450 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1451 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1455 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1456 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1460 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1461 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1465 double HeapNumber::value() {
1466 return READ_DOUBLE_FIELD(this, kValueOffset);
1470 void HeapNumber::set_value(double value) {
1471 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1475 int HeapNumber::get_exponent() {
1476 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1477 kExponentShift) - kExponentBias;
1481 int HeapNumber::get_sign() {
1482 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1486 ACCESSORS(Float32x4, value, Object, kValueOffset)
1487 ACCESSORS(Float64x2, value, Object, kValueOffset)
1488 ACCESSORS(Int32x4, value, Object, kValueOffset)
1491 const char* Float32x4::Name() {
1496 int Float32x4::kRuntimeAllocatorId() {
1497 return Runtime::kAllocateFloat32x4;
1501 float Float32x4::getAt(int index) {
1502 ASSERT(index >= 0 && index < kLanes);
1503 return get().storage[index];
1507 float32x4_value_t Float32x4::get() {
1508 return FixedFloat32x4Array::cast(value())->get_scalar(0);
1512 void Float32x4::set(float32x4_value_t f32x4) {
1513 FixedFloat32x4Array::cast(value())->set(0, f32x4);
1517 const char* Float64x2::Name() {
1522 int Float64x2::kRuntimeAllocatorId() {
1523 return Runtime::kAllocateFloat64x2;
1527 double Float64x2::getAt(int index) {
1528 ASSERT(index >= 0 && index < kLanes);
1529 return get().storage[index];
1532 float64x2_value_t Float64x2::get() {
1533 return FixedFloat64x2Array::cast(value())->get_scalar(0);
1537 void Float64x2::set(float64x2_value_t f64x2) {
1538 FixedFloat64x2Array::cast(value())->set(0, f64x2);
1542 const char* Int32x4::Name() {
1547 int Int32x4::kRuntimeAllocatorId() {
1548 return Runtime::kAllocateInt32x4;
1552 int32_t Int32x4::getAt(int index) {
1553 ASSERT(index >= 0 && index < kLanes);
1554 return get().storage[index];;
1558 int32x4_value_t Int32x4::get() {
1559 return FixedInt32x4Array::cast(value())->get_scalar(0);
1563 void Int32x4::set(int32x4_value_t i32x4) {
1564 FixedInt32x4Array::cast(value())->set(0, i32x4);
1568 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1571 Object** FixedArray::GetFirstElementAddress() {
1572 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1576 bool FixedArray::ContainsOnlySmisOrHoles() {
1577 Object* the_hole = GetHeap()->the_hole_value();
1578 Object** current = GetFirstElementAddress();
1579 for (int i = 0; i < length(); ++i) {
1580 Object* candidate = *current++;
1581 if (!candidate->IsSmi() && candidate != the_hole) return false;
1587 FixedArrayBase* JSObject::elements() {
1588 Object* array = READ_FIELD(this, kElementsOffset);
1589 return static_cast<FixedArrayBase*>(array);
1593 void JSObject::ValidateElements(Handle<JSObject> object) {
1594 #ifdef ENABLE_SLOW_ASSERTS
1595 if (FLAG_enable_slow_asserts) {
1596 ElementsAccessor* accessor = object->GetElementsAccessor();
1597 accessor->Validate(object);
1603 void AllocationSite::Initialize() {
1604 set_transition_info(Smi::FromInt(0));
1605 SetElementsKind(GetInitialFastElementsKind());
1606 set_nested_site(Smi::FromInt(0));
1607 set_pretenure_data(Smi::FromInt(0));
1608 set_pretenure_create_count(Smi::FromInt(0));
1609 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1610 SKIP_WRITE_BARRIER);
1614 void AllocationSite::MarkZombie() {
1615 ASSERT(!IsZombie());
1617 set_pretenure_decision(kZombie);
1621 // Heuristic: We only need to create allocation site info if the boilerplate
1622 // elements kind is the initial elements kind.
1623 AllocationSiteMode AllocationSite::GetMode(
1624 ElementsKind boilerplate_elements_kind) {
1625 if (FLAG_pretenuring_call_new ||
1626 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1627 return TRACK_ALLOCATION_SITE;
1630 return DONT_TRACK_ALLOCATION_SITE;
1634 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1636 if (FLAG_pretenuring_call_new ||
1637 (IsFastSmiElementsKind(from) &&
1638 IsMoreGeneralElementsKindTransition(from, to))) {
1639 return TRACK_ALLOCATION_SITE;
1642 return DONT_TRACK_ALLOCATION_SITE;
1646 inline bool AllocationSite::CanTrack(InstanceType type) {
1647 if (FLAG_allocation_site_pretenuring) {
1648 return type == JS_ARRAY_TYPE ||
1649 type == JS_OBJECT_TYPE ||
1650 type < FIRST_NONSTRING_TYPE;
1652 return type == JS_ARRAY_TYPE;
1656 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1660 return DependentCode::kAllocationSiteTenuringChangedGroup;
1663 return DependentCode::kAllocationSiteTransitionChangedGroup;
1667 return DependentCode::kAllocationSiteTransitionChangedGroup;
1671 inline void AllocationSite::set_memento_found_count(int count) {
1672 int value = pretenure_data()->value();
1673 // Verify that we can count more mementos than we can possibly find in one
1674 // new space collection.
1675 ASSERT((GetHeap()->MaxSemiSpaceSize() /
1676 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1677 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1678 ASSERT(count < MementoFoundCountBits::kMax);
1680 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1681 SKIP_WRITE_BARRIER);
1684 inline bool AllocationSite::IncrementMementoFoundCount() {
1685 if (IsZombie()) return false;
1687 int value = memento_found_count();
1688 set_memento_found_count(value + 1);
1689 return memento_found_count() == kPretenureMinimumCreated;
1693 inline void AllocationSite::IncrementMementoCreateCount() {
1694 ASSERT(FLAG_allocation_site_pretenuring);
1695 int value = memento_create_count();
1696 set_memento_create_count(value + 1);
1700 inline bool AllocationSite::MakePretenureDecision(
1701 PretenureDecision current_decision,
1703 bool maximum_size_scavenge) {
1704 // Here we just allow state transitions from undecided or maybe tenure
1705 // to don't tenure, maybe tenure, or tenure.
1706 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1707 if (ratio >= kPretenureRatio) {
1708 // We just transition into tenure state when the semi-space was at
1709 // maximum capacity.
1710 if (maximum_size_scavenge) {
1711 set_deopt_dependent_code(true);
1712 set_pretenure_decision(kTenure);
1713 // Currently we just need to deopt when we make a state transition to
1717 set_pretenure_decision(kMaybeTenure);
1719 set_pretenure_decision(kDontTenure);
1726 inline bool AllocationSite::DigestPretenuringFeedback(
1727 bool maximum_size_scavenge) {
1729 int create_count = memento_create_count();
1730 int found_count = memento_found_count();
1731 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1733 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1734 static_cast<double>(found_count) / create_count : 0.0;
1735 PretenureDecision current_decision = pretenure_decision();
1737 if (minimum_mementos_created) {
1738 deopt = MakePretenureDecision(
1739 current_decision, ratio, maximum_size_scavenge);
1742 if (FLAG_trace_pretenuring_statistics) {
1744 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1745 static_cast<void*>(this), create_count, found_count, ratio,
1746 PretenureDecisionName(current_decision),
1747 PretenureDecisionName(pretenure_decision()));
1750 // Clear feedback calculation fields until the next gc.
1751 set_memento_found_count(0);
1752 set_memento_create_count(0);
1757 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1758 JSObject::ValidateElements(object);
1759 ElementsKind elements_kind = object->map()->elements_kind();
1760 if (!IsFastObjectElementsKind(elements_kind)) {
1761 if (IsFastHoleyElementsKind(elements_kind)) {
1762 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1764 TransitionElementsKind(object, FAST_ELEMENTS);
1770 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1773 EnsureElementsMode mode) {
1774 ElementsKind current_kind = object->map()->elements_kind();
1775 ElementsKind target_kind = current_kind;
1777 DisallowHeapAllocation no_allocation;
1778 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1779 bool is_holey = IsFastHoleyElementsKind(current_kind);
1780 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1781 Heap* heap = object->GetHeap();
1782 Object* the_hole = heap->the_hole_value();
1783 for (uint32_t i = 0; i < count; ++i) {
1784 Object* current = *objects++;
1785 if (current == the_hole) {
1787 target_kind = GetHoleyElementsKind(target_kind);
1788 } else if (!current->IsSmi()) {
1789 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1790 if (IsFastSmiElementsKind(target_kind)) {
1792 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1794 target_kind = FAST_DOUBLE_ELEMENTS;
1797 } else if (is_holey) {
1798 target_kind = FAST_HOLEY_ELEMENTS;
1801 target_kind = FAST_ELEMENTS;
1806 if (target_kind != current_kind) {
1807 TransitionElementsKind(object, target_kind);
1812 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1813 Handle<FixedArrayBase> elements,
1815 EnsureElementsMode mode) {
1816 Heap* heap = object->GetHeap();
1817 if (elements->map() != heap->fixed_double_array_map()) {
1818 ASSERT(elements->map() == heap->fixed_array_map() ||
1819 elements->map() == heap->fixed_cow_array_map());
1820 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1821 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1824 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1825 EnsureCanContainElements(object, objects, length, mode);
1829 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1830 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1831 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1832 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1833 Handle<FixedDoubleArray> double_array =
1834 Handle<FixedDoubleArray>::cast(elements);
1835 for (uint32_t i = 0; i < length; ++i) {
1836 if (double_array->is_the_hole(i)) {
1837 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1841 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1846 void JSObject::SetMapAndElements(Handle<JSObject> object,
1847 Handle<Map> new_map,
1848 Handle<FixedArrayBase> value) {
1849 JSObject::MigrateToMap(object, new_map);
1850 ASSERT((object->map()->has_fast_smi_or_object_elements() ||
1851 (*value == object->GetHeap()->empty_fixed_array())) ==
1852 (value->map() == object->GetHeap()->fixed_array_map() ||
1853 value->map() == object->GetHeap()->fixed_cow_array_map()));
1854 ASSERT((*value == object->GetHeap()->empty_fixed_array()) ||
1855 (object->map()->has_fast_double_elements() ==
1856 value->IsFixedDoubleArray()));
1857 object->set_elements(*value);
1861 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1862 WRITE_FIELD(this, kElementsOffset, value);
1863 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1867 void JSObject::initialize_properties() {
1868 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1869 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1873 void JSObject::initialize_elements() {
1874 FixedArrayBase* elements = map()->GetInitialElements();
1875 WRITE_FIELD(this, kElementsOffset, elements);
1879 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
1880 DisallowHeapAllocation no_gc;
1881 if (!map->HasTransitionArray()) return Handle<String>::null();
1882 TransitionArray* transitions = map->transitions();
1883 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1884 int transition = TransitionArray::kSimpleTransitionIndex;
1885 PropertyDetails details = transitions->GetTargetDetails(transition);
1886 Name* name = transitions->GetKey(transition);
1887 if (details.type() != FIELD) return Handle<String>::null();
1888 if (details.attributes() != NONE) return Handle<String>::null();
1889 if (!name->IsString()) return Handle<String>::null();
1890 return Handle<String>(String::cast(name));
1894 Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
1895 ASSERT(!ExpectedTransitionKey(map).is_null());
1896 return Handle<Map>(map->transitions()->GetTarget(
1897 TransitionArray::kSimpleTransitionIndex));
1901 Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1902 DisallowHeapAllocation no_allocation;
1903 if (!map->HasTransitionArray()) return Handle<Map>::null();
1904 TransitionArray* transitions = map->transitions();
1905 int transition = transitions->Search(*key);
1906 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1907 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1908 if (target_details.type() != FIELD) return Handle<Map>::null();
1909 if (target_details.attributes() != NONE) return Handle<Map>::null();
1910 return Handle<Map>(transitions->GetTarget(transition));
1914 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1915 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1918 byte Oddball::kind() {
1919 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1923 void Oddball::set_kind(byte value) {
1924 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1928 Object* Cell::value() {
1929 return READ_FIELD(this, kValueOffset);
1933 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1934 // The write barrier is not used for global property cells.
1935 ASSERT(!val->IsPropertyCell() && !val->IsCell());
1936 WRITE_FIELD(this, kValueOffset, val);
1939 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1941 Object* PropertyCell::type_raw() {
1942 return READ_FIELD(this, kTypeOffset);
1946 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1947 WRITE_FIELD(this, kTypeOffset, val);
1951 int JSObject::GetHeaderSize() {
1952 InstanceType type = map()->instance_type();
1953 // Check for the most common kind of JavaScript object before
1954 // falling into the generic switch. This speeds up the internal
1955 // field operations considerably on average.
1956 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1958 case JS_GENERATOR_OBJECT_TYPE:
1959 return JSGeneratorObject::kSize;
1960 case JS_MODULE_TYPE:
1961 return JSModule::kSize;
1962 case JS_GLOBAL_PROXY_TYPE:
1963 return JSGlobalProxy::kSize;
1964 case JS_GLOBAL_OBJECT_TYPE:
1965 return JSGlobalObject::kSize;
1966 case JS_BUILTINS_OBJECT_TYPE:
1967 return JSBuiltinsObject::kSize;
1968 case JS_FUNCTION_TYPE:
1969 return JSFunction::kSize;
1971 return JSValue::kSize;
1973 return JSDate::kSize;
1975 return JSArray::kSize;
1976 case JS_ARRAY_BUFFER_TYPE:
1977 return JSArrayBuffer::kSize;
1978 case JS_TYPED_ARRAY_TYPE:
1979 return JSTypedArray::kSize;
1980 case JS_DATA_VIEW_TYPE:
1981 return JSDataView::kSize;
1982 case FLOAT32x4_TYPE:
1983 return Float32x4::kSize;
1984 case FLOAT64x2_TYPE:
1985 return Float64x2::kSize;
1987 return Int32x4::kSize;
1989 return JSSet::kSize;
1991 return JSMap::kSize;
1992 case JS_SET_ITERATOR_TYPE:
1993 return JSSetIterator::kSize;
1994 case JS_MAP_ITERATOR_TYPE:
1995 return JSMapIterator::kSize;
1996 case JS_WEAK_MAP_TYPE:
1997 return JSWeakMap::kSize;
1998 case JS_WEAK_SET_TYPE:
1999 return JSWeakSet::kSize;
2000 case JS_REGEXP_TYPE:
2001 return JSRegExp::kSize;
2002 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2003 return JSObject::kHeaderSize;
2004 case JS_MESSAGE_OBJECT_TYPE:
2005 return JSMessageObject::kSize;
2007 // TODO(jkummerow): Re-enable this. Blink currently hits this
2008 // from its CustomElementConstructorBuilder.
2015 int JSObject::GetInternalFieldCount() {
2016 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
2017 // Make sure to adjust for the number of in-object properties. These
2018 // properties do contribute to the size, but are not internal fields.
2019 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2020 map()->inobject_properties();
2024 int JSObject::GetInternalFieldOffset(int index) {
2025 ASSERT(index < GetInternalFieldCount() && index >= 0);
2026 return GetHeaderSize() + (kPointerSize * index);
2030 Object* JSObject::GetInternalField(int index) {
2031 ASSERT(index < GetInternalFieldCount() && index >= 0);
2032 // Internal objects do follow immediately after the header, whereas in-object
2033 // properties are at the end of the object. Therefore there is no need
2034 // to adjust the index here.
2035 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2039 void JSObject::SetInternalField(int index, Object* value) {
2040 ASSERT(index < GetInternalFieldCount() && index >= 0);
2041 // Internal objects do follow immediately after the header, whereas in-object
2042 // properties are at the end of the object. Therefore there is no need
2043 // to adjust the index here.
2044 int offset = GetHeaderSize() + (kPointerSize * index);
2045 WRITE_FIELD(this, offset, value);
2046 WRITE_BARRIER(GetHeap(), this, offset, value);
2050 void JSObject::SetInternalField(int index, Smi* value) {
2051 ASSERT(index < GetInternalFieldCount() && index >= 0);
2052 // Internal objects do follow immediately after the header, whereas in-object
2053 // properties are at the end of the object. Therefore there is no need
2054 // to adjust the index here.
2055 int offset = GetHeaderSize() + (kPointerSize * index);
2056 WRITE_FIELD(this, offset, value);
2060 // Access fast-case object properties at index. The use of these routines
2061 // is needed to correctly distinguish between properties stored in-object and
2062 // properties stored in the properties array.
2063 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2064 if (index.is_inobject()) {
2065 return READ_FIELD(this, index.offset());
2067 return properties()->get(index.outobject_array_index());
2072 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2073 if (index.is_inobject()) {
2074 int offset = index.offset();
2075 WRITE_FIELD(this, offset, value);
2076 WRITE_BARRIER(GetHeap(), this, offset, value);
2078 properties()->set(index.outobject_array_index(), value);
2083 int JSObject::GetInObjectPropertyOffset(int index) {
2084 return map()->GetInObjectPropertyOffset(index);
2088 Object* JSObject::InObjectPropertyAt(int index) {
2089 int offset = GetInObjectPropertyOffset(index);
2090 return READ_FIELD(this, offset);
2094 Object* JSObject::InObjectPropertyAtPut(int index,
2096 WriteBarrierMode mode) {
2097 // Adjust for the number of properties stored in the object.
2098 int offset = GetInObjectPropertyOffset(index);
2099 WRITE_FIELD(this, offset, value);
2100 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2106 void JSObject::InitializeBody(Map* map,
2107 Object* pre_allocated_value,
2108 Object* filler_value) {
2109 ASSERT(!filler_value->IsHeapObject() ||
2110 !GetHeap()->InNewSpace(filler_value));
2111 ASSERT(!pre_allocated_value->IsHeapObject() ||
2112 !GetHeap()->InNewSpace(pre_allocated_value));
2113 int size = map->instance_size();
2114 int offset = kHeaderSize;
2115 if (filler_value != pre_allocated_value) {
2116 int pre_allocated = map->pre_allocated_property_fields();
2117 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
2118 for (int i = 0; i < pre_allocated; i++) {
2119 WRITE_FIELD(this, offset, pre_allocated_value);
2120 offset += kPointerSize;
2123 while (offset < size) {
2124 WRITE_FIELD(this, offset, filler_value);
2125 offset += kPointerSize;
2130 bool JSObject::HasFastProperties() {
2131 ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
2132 return !properties()->IsDictionary();
2136 bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
2137 // Allow extra fast properties if the object has more than
2138 // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
2139 // very unlikely that the object is being used as a dictionary and there is a
2140 // good chance that allowing more map transitions will be worth it.
2141 Map* map = this->map();
2142 if (map->unused_property_fields() != 0) return false;
2144 int inobject = map->inobject_properties();
2147 if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
2148 limit = Max(inobject, kMaxFastProperties);
2150 limit = Max(inobject, kFastPropertiesSoftLimit);
2152 return properties()->length() > limit;
2156 void Struct::InitializeBody(int object_size) {
2157 Object* value = GetHeap()->undefined_value();
2158 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2159 WRITE_FIELD(this, offset, value);
2164 bool Object::ToArrayIndex(uint32_t* index) {
2166 int value = Smi::cast(this)->value();
2167 if (value < 0) return false;
2171 if (IsHeapNumber()) {
2172 double value = HeapNumber::cast(this)->value();
2173 uint32_t uint_value = static_cast<uint32_t>(value);
2174 if (value == static_cast<double>(uint_value)) {
2175 *index = uint_value;
2183 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2184 if (!this->IsJSValue()) return false;
2186 JSValue* js_value = JSValue::cast(this);
2187 if (!js_value->value()->IsString()) return false;
2189 String* str = String::cast(js_value->value());
2190 if (index >= static_cast<uint32_t>(str->length())) return false;
2196 void Object::VerifyApiCallResultType() {
2197 #if ENABLE_EXTRA_CHECKS
2207 FATAL("API call returned invalid object");
2209 #endif // ENABLE_EXTRA_CHECKS
2213 FixedArrayBase* FixedArrayBase::cast(Object* object) {
2214 ASSERT(object->IsFixedArrayBase());
2215 return reinterpret_cast<FixedArrayBase*>(object);
2219 Object* FixedArray::get(int index) {
2220 SLOW_ASSERT(index >= 0 && index < this->length());
2221 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2225 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2226 return handle(array->get(index), array->GetIsolate());
2230 bool FixedArray::is_the_hole(int index) {
2231 return get(index) == GetHeap()->the_hole_value();
2235 void FixedArray::set(int index, Smi* value) {
2236 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2237 ASSERT(index >= 0 && index < this->length());
2238 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
2239 int offset = kHeaderSize + index * kPointerSize;
2240 WRITE_FIELD(this, offset, value);
2244 void FixedArray::set(int index, Object* value) {
2245 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2246 ASSERT(index >= 0 && index < this->length());
2247 int offset = kHeaderSize + index * kPointerSize;
2248 WRITE_FIELD(this, offset, value);
2249 WRITE_BARRIER(GetHeap(), this, offset, value);
2253 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2254 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2258 inline double FixedDoubleArray::hole_nan_as_double() {
2259 return BitCast<double, uint64_t>(kHoleNanInt64);
2263 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2264 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
2265 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
2266 return OS::nan_value();
2270 double FixedDoubleArray::get_scalar(int index) {
2271 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2272 map() != GetHeap()->fixed_array_map());
2273 ASSERT(index >= 0 && index < this->length());
2274 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2275 ASSERT(!is_the_hole_nan(result));
2279 int64_t FixedDoubleArray::get_representation(int index) {
2280 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2281 map() != GetHeap()->fixed_array_map());
2282 ASSERT(index >= 0 && index < this->length());
2283 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2287 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2289 if (array->is_the_hole(index)) {
2290 return array->GetIsolate()->factory()->the_hole_value();
2292 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2297 void FixedDoubleArray::set(int index, double value) {
2298 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2299 map() != GetHeap()->fixed_array_map());
2300 int offset = kHeaderSize + index * kDoubleSize;
2301 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2302 WRITE_DOUBLE_FIELD(this, offset, value);
2306 void FixedDoubleArray::set_the_hole(int index) {
2307 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
2308 map() != GetHeap()->fixed_array_map());
2309 int offset = kHeaderSize + index * kDoubleSize;
2310 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2314 bool FixedDoubleArray::is_the_hole(int index) {
2315 int offset = kHeaderSize + index * kDoubleSize;
2316 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2320 double* FixedDoubleArray::data_start() {
2321 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2325 void FixedDoubleArray::FillWithHoles(int from, int to) {
2326 for (int i = from; i < to; i++) {
2332 bool ConstantPoolArray::is_extended_layout() {
2333 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2334 return IsExtendedField::decode(small_layout_1);
2338 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2339 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2343 int ConstantPoolArray::first_extended_section_index() {
2344 ASSERT(is_extended_layout());
2345 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2346 return TotalCountField::decode(small_layout_2);
2350 int ConstantPoolArray::get_extended_section_header_offset() {
2351 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2355 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2356 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2357 return WeakObjectStateField::decode(small_layout_2);
2361 void ConstantPoolArray::set_weak_object_state(
2362 ConstantPoolArray::WeakObjectState state) {
2363 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2364 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2365 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2369 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2371 if (section == EXTENDED_SECTION) {
2372 ASSERT(is_extended_layout());
2373 index += first_extended_section_index();
2376 for (Type type_iter = FIRST_TYPE; type_iter < type;
2377 type_iter = next_type(type_iter)) {
2378 index += number_of_entries(type_iter, section);
2385 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2386 return first_index(type, section) + number_of_entries(type, section) - 1;
2390 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2391 if (section == SMALL_SECTION) {
2392 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2393 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2396 return Int64CountField::decode(small_layout_1);
2398 return CodePtrCountField::decode(small_layout_1);
2400 return HeapPtrCountField::decode(small_layout_1);
2402 return Int32CountField::decode(small_layout_2);
2408 ASSERT(section == EXTENDED_SECTION && is_extended_layout());
2409 int offset = get_extended_section_header_offset();
2412 offset += kExtendedInt64CountOffset;
2415 offset += kExtendedCodePtrCountOffset;
2418 offset += kExtendedHeapPtrCountOffset;
2421 offset += kExtendedInt32CountOffset;
2426 return READ_INT_FIELD(this, offset);
2431 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2432 LayoutSection section;
2433 if (is_extended_layout() && index >= first_extended_section_index()) {
2434 section = EXTENDED_SECTION;
2436 section = SMALL_SECTION;
2439 Type type = FIRST_TYPE;
2440 while (index > last_index(type, section)) {
2441 type = next_type(type);
2443 ASSERT(type <= LAST_TYPE);
2448 int64_t ConstantPoolArray::get_int64_entry(int index) {
2449 ASSERT(map() == GetHeap()->constant_pool_array_map());
2450 ASSERT(get_type(index) == INT64);
2451 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2455 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2456 STATIC_ASSERT(kDoubleSize == kInt64Size);
2457 ASSERT(map() == GetHeap()->constant_pool_array_map());
2458 ASSERT(get_type(index) == INT64);
2459 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2463 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2464 ASSERT(map() == GetHeap()->constant_pool_array_map());
2465 ASSERT(get_type(index) == CODE_PTR);
2466 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2470 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2471 ASSERT(map() == GetHeap()->constant_pool_array_map());
2472 ASSERT(get_type(index) == HEAP_PTR);
2473 return READ_FIELD(this, OffsetOfElementAt(index));
2477 int32_t ConstantPoolArray::get_int32_entry(int index) {
2478 ASSERT(map() == GetHeap()->constant_pool_array_map());
2479 ASSERT(get_type(index) == INT32);
2480 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2484 void ConstantPoolArray::set(int index, int64_t value) {
2485 ASSERT(map() == GetHeap()->constant_pool_array_map());
2486 ASSERT(get_type(index) == INT64);
2487 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2491 void ConstantPoolArray::set(int index, double value) {
2492 STATIC_ASSERT(kDoubleSize == kInt64Size);
2493 ASSERT(map() == GetHeap()->constant_pool_array_map());
2494 ASSERT(get_type(index) == INT64);
2495 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2499 void ConstantPoolArray::set(int index, Address value) {
2500 ASSERT(map() == GetHeap()->constant_pool_array_map());
2501 ASSERT(get_type(index) == CODE_PTR);
2502 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2506 void ConstantPoolArray::set(int index, Object* value) {
2507 ASSERT(map() == GetHeap()->constant_pool_array_map());
2508 ASSERT(get_type(index) == HEAP_PTR);
2509 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2510 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2514 void ConstantPoolArray::set(int index, int32_t value) {
2515 ASSERT(map() == GetHeap()->constant_pool_array_map());
2516 ASSERT(get_type(index) == INT32);
2517 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2521 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2522 uint32_t small_layout_1 =
2523 Int64CountField::encode(small.count_of(INT64)) |
2524 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2525 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2526 IsExtendedField::encode(false);
2527 uint32_t small_layout_2 =
2528 Int32CountField::encode(small.count_of(INT32)) |
2529 TotalCountField::encode(small.total_count()) |
2530 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2531 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2532 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2533 if (kHeaderSize != kFirstEntryOffset) {
2534 ASSERT(kFirstEntryOffset - kHeaderSize == kInt32Size);
2535 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2540 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2541 const NumberOfEntries& extended) {
2542 // Initialize small layout fields first.
2545 // Set is_extended_layout field.
2546 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2547 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2548 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2550 // Initialize the extended layout fields.
2551 int extended_header_offset = get_extended_section_header_offset();
2552 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2553 extended.count_of(INT64));
2554 WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2555 extended.count_of(CODE_PTR));
2556 WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2557 extended.count_of(HEAP_PTR));
2558 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2559 extended.count_of(INT32));
2563 int ConstantPoolArray::size() {
2564 NumberOfEntries small(this, SMALL_SECTION);
2565 if (!is_extended_layout()) {
2566 return SizeFor(small);
2568 NumberOfEntries extended(this, EXTENDED_SECTION);
2569 return SizeForExtended(small, extended);
2574 int ConstantPoolArray::length() {
2575 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2576 int length = TotalCountField::decode(small_layout_2);
2577 if (is_extended_layout()) {
2578 length += number_of_entries(INT64, EXTENDED_SECTION) +
2579 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2580 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2581 number_of_entries(INT32, EXTENDED_SECTION);
2587 int ConstantPoolArray::Iterator::next_index() {
2588 ASSERT(!is_finished());
2589 int ret = next_index_++;
2595 bool ConstantPoolArray::Iterator::is_finished() {
2596 return next_index_ > array_->last_index(type_, final_section_);
2600 void ConstantPoolArray::Iterator::update_section() {
2601 if (next_index_ > array_->last_index(type_, current_section_) &&
2602 current_section_ != final_section_) {
2603 ASSERT(final_section_ == EXTENDED_SECTION);
2604 current_section_ = EXTENDED_SECTION;
2605 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2610 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2611 const DisallowHeapAllocation& promise) {
2612 Heap* heap = GetHeap();
2613 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2614 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2615 return UPDATE_WRITE_BARRIER;
2619 void FixedArray::set(int index,
2621 WriteBarrierMode mode) {
2622 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2623 ASSERT(index >= 0 && index < this->length());
2624 int offset = kHeaderSize + index * kPointerSize;
2625 WRITE_FIELD(this, offset, value);
2626 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2630 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2633 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2634 ASSERT(index >= 0 && index < array->length());
2635 int offset = kHeaderSize + index * kPointerSize;
2636 WRITE_FIELD(array, offset, value);
2637 Heap* heap = array->GetHeap();
2638 if (heap->InNewSpace(value)) {
2639 heap->RecordWrite(array->address(), offset);
2644 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2647 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2648 ASSERT(index >= 0 && index < array->length());
2649 ASSERT(!array->GetHeap()->InNewSpace(value));
2650 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2654 void FixedArray::set_undefined(int index) {
2655 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2656 ASSERT(index >= 0 && index < this->length());
2657 ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2659 kHeaderSize + index * kPointerSize,
2660 GetHeap()->undefined_value());
2664 void FixedArray::set_null(int index) {
2665 ASSERT(index >= 0 && index < this->length());
2666 ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2668 kHeaderSize + index * kPointerSize,
2669 GetHeap()->null_value());
2673 void FixedArray::set_the_hole(int index) {
2674 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2675 ASSERT(index >= 0 && index < this->length());
2676 ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2678 kHeaderSize + index * kPointerSize,
2679 GetHeap()->the_hole_value());
2683 void FixedArray::FillWithHoles(int from, int to) {
2684 for (int i = from; i < to; i++) {
2690 Object** FixedArray::data_start() {
2691 return HeapObject::RawField(this, kHeaderSize);
2695 bool DescriptorArray::IsEmpty() {
2696 ASSERT(length() >= kFirstIndex ||
2697 this == GetHeap()->empty_descriptor_array());
2698 return length() < kFirstIndex;
2702 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2704 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2708 // Perform a binary search in a fixed array. Low and high are entry indices. If
2709 // there are three entries in this array it should be called with low=0 and
2711 template<SearchMode search_mode, typename T>
2712 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2713 uint32_t hash = name->Hash();
2716 ASSERT(low <= high);
2718 while (low != high) {
2719 int mid = (low + high) / 2;
2720 Name* mid_name = array->GetSortedKey(mid);
2721 uint32_t mid_hash = mid_name->Hash();
2723 if (mid_hash >= hash) {
2730 for (; low <= limit; ++low) {
2731 int sort_index = array->GetSortedKeyIndex(low);
2732 Name* entry = array->GetKey(sort_index);
2733 if (entry->Hash() != hash) break;
2734 if (entry->Equals(name)) {
2735 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2738 return T::kNotFound;
2742 return T::kNotFound;
2746 // Perform a linear search in this fixed array. len is the number of entry
2747 // indices that are valid.
2748 template<SearchMode search_mode, typename T>
2749 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2750 uint32_t hash = name->Hash();
2751 if (search_mode == ALL_ENTRIES) {
2752 for (int number = 0; number < len; number++) {
2753 int sorted_index = array->GetSortedKeyIndex(number);
2754 Name* entry = array->GetKey(sorted_index);
2755 uint32_t current_hash = entry->Hash();
2756 if (current_hash > hash) break;
2757 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2760 ASSERT(len >= valid_entries);
2761 for (int number = 0; number < valid_entries; number++) {
2762 Name* entry = array->GetKey(number);
2763 uint32_t current_hash = entry->Hash();
2764 if (current_hash == hash && entry->Equals(name)) return number;
2767 return T::kNotFound;
2771 template<SearchMode search_mode, typename T>
2772 int Search(T* array, Name* name, int valid_entries) {
2773 if (search_mode == VALID_ENTRIES) {
2774 SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2776 SLOW_ASSERT(array->IsSortedNoDuplicates());
2779 int nof = array->number_of_entries();
2780 if (nof == 0) return T::kNotFound;
2782 // Fast case: do linear search for small arrays.
2783 const int kMaxElementsForLinearSearch = 8;
2784 if ((search_mode == ALL_ENTRIES &&
2785 nof <= kMaxElementsForLinearSearch) ||
2786 (search_mode == VALID_ENTRIES &&
2787 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2788 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2791 // Slow case: perform binary search.
2792 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2796 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2797 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2801 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2802 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2803 if (number_of_own_descriptors == 0) return kNotFound;
2805 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2806 int number = cache->Lookup(map, name);
2808 if (number == DescriptorLookupCache::kAbsent) {
2809 number = Search(name, number_of_own_descriptors);
2810 cache->Update(map, name, number);
2817 PropertyDetails Map::GetLastDescriptorDetails() {
2818 return instance_descriptors()->GetDetails(LastAdded());
2822 void Map::LookupDescriptor(JSObject* holder,
2824 LookupResult* result) {
2825 DescriptorArray* descriptors = this->instance_descriptors();
2826 int number = descriptors->SearchWithCache(name, this);
2827 if (number == DescriptorArray::kNotFound) return result->NotFound();
2828 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2832 void Map::LookupTransition(JSObject* holder,
2834 LookupResult* result) {
2835 int transition_index = this->SearchTransition(name);
2836 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
2837 result->TransitionResult(holder, this->GetTransition(transition_index));
2841 FixedArrayBase* Map::GetInitialElements() {
2842 if (has_fast_smi_or_object_elements() ||
2843 has_fast_double_elements()) {
2844 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2845 return GetHeap()->empty_fixed_array();
2846 } else if (has_external_array_elements()) {
2847 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2848 ASSERT(!GetHeap()->InNewSpace(empty_array));
2850 } else if (has_fixed_typed_array_elements()) {
2851 FixedTypedArrayBase* empty_array =
2852 GetHeap()->EmptyFixedTypedArrayForMap(this);
2853 ASSERT(!GetHeap()->InNewSpace(empty_array));
2855 } else if (has_dictionary_elements()) {
2856 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_slow_element_dictionary()));
2857 return GetHeap()->empty_slow_element_dictionary();
2865 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2866 ASSERT(descriptor_number < number_of_descriptors());
2867 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2871 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2872 return GetKeySlot(descriptor_number);
2876 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2877 return GetValueSlot(descriptor_number - 1) + 1;
2881 Name* DescriptorArray::GetKey(int descriptor_number) {
2882 ASSERT(descriptor_number < number_of_descriptors());
2883 return Name::cast(get(ToKeyIndex(descriptor_number)));
2887 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2888 return GetDetails(descriptor_number).pointer();
2892 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2893 return GetKey(GetSortedKeyIndex(descriptor_number));
2897 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2898 PropertyDetails details = GetDetails(descriptor_index);
2899 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2903 void DescriptorArray::SetRepresentation(int descriptor_index,
2904 Representation representation) {
2905 ASSERT(!representation.IsNone());
2906 PropertyDetails details = GetDetails(descriptor_index);
2907 set(ToDetailsIndex(descriptor_index),
2908 details.CopyWithRepresentation(representation).AsSmi());
2912 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2913 ASSERT(descriptor_number < number_of_descriptors());
2914 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2918 Object* DescriptorArray::GetValue(int descriptor_number) {
2919 ASSERT(descriptor_number < number_of_descriptors());
2920 return get(ToValueIndex(descriptor_number));
2924 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2925 set(ToValueIndex(descriptor_index), value);
2929 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2930 ASSERT(descriptor_number < number_of_descriptors());
2931 Object* details = get(ToDetailsIndex(descriptor_number));
2932 return PropertyDetails(Smi::cast(details));
2936 PropertyType DescriptorArray::GetType(int descriptor_number) {
2937 return GetDetails(descriptor_number).type();
2941 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2942 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2943 return GetDetails(descriptor_number).field_index();
2947 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
2948 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2949 return HeapType::cast(GetValue(descriptor_number));
2953 Object* DescriptorArray::GetConstant(int descriptor_number) {
2954 return GetValue(descriptor_number);
2958 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2959 ASSERT(GetType(descriptor_number) == CALLBACKS);
2960 return GetValue(descriptor_number);
2964 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2965 ASSERT(GetType(descriptor_number) == CALLBACKS);
2966 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2967 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2971 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2972 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
2973 handle(GetValue(descriptor_number), GetIsolate()),
2974 GetDetails(descriptor_number));
2978 void DescriptorArray::Set(int descriptor_number,
2980 const WhitenessWitness&) {
2982 ASSERT(descriptor_number < number_of_descriptors());
2984 NoIncrementalWriteBarrierSet(this,
2985 ToKeyIndex(descriptor_number),
2987 NoIncrementalWriteBarrierSet(this,
2988 ToValueIndex(descriptor_number),
2990 NoIncrementalWriteBarrierSet(this,
2991 ToDetailsIndex(descriptor_number),
2992 desc->GetDetails().AsSmi());
2996 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2998 ASSERT(descriptor_number < number_of_descriptors());
3000 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3001 set(ToValueIndex(descriptor_number), *desc->GetValue());
3002 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3006 void DescriptorArray::Append(Descriptor* desc,
3007 const WhitenessWitness& witness) {
3008 DisallowHeapAllocation no_gc;
3009 int descriptor_number = number_of_descriptors();
3010 SetNumberOfDescriptors(descriptor_number + 1);
3011 Set(descriptor_number, desc, witness);
3013 uint32_t hash = desc->GetKey()->Hash();
3017 for (insertion = descriptor_number; insertion > 0; --insertion) {
3018 Name* key = GetSortedKey(insertion - 1);
3019 if (key->Hash() <= hash) break;
3020 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3023 SetSortedKey(insertion, descriptor_number);
3027 void DescriptorArray::Append(Descriptor* desc) {
3028 DisallowHeapAllocation no_gc;
3029 int descriptor_number = number_of_descriptors();
3030 SetNumberOfDescriptors(descriptor_number + 1);
3031 Set(descriptor_number, desc);
3033 uint32_t hash = desc->GetKey()->Hash();
3037 for (insertion = descriptor_number; insertion > 0; --insertion) {
3038 Name* key = GetSortedKey(insertion - 1);
3039 if (key->Hash() <= hash) break;
3040 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3043 SetSortedKey(insertion, descriptor_number);
3047 void DescriptorArray::SwapSortedKeys(int first, int second) {
3048 int first_key = GetSortedKeyIndex(first);
3049 SetSortedKey(first, GetSortedKeyIndex(second));
3050 SetSortedKey(second, first_key);
3054 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3055 : marking_(array->GetHeap()->incremental_marking()) {
3056 marking_->EnterNoMarkingScope();
3057 ASSERT(!marking_->IsMarking() ||
3058 Marking::Color(array) == Marking::WHITE_OBJECT);
3062 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3063 marking_->LeaveNoMarkingScope();
3067 template<typename Derived, typename Shape, typename Key>
3068 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3069 const int kMinCapacity = 32;
3070 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
3071 if (capacity < kMinCapacity) {
3072 capacity = kMinCapacity; // Guarantee min capacity.
3078 template<typename Derived, typename Shape, typename Key>
3079 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3080 return FindEntry(GetIsolate(), key);
3084 // Find entry for key otherwise return kNotFound.
3085 template<typename Derived, typename Shape, typename Key>
3086 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3087 uint32_t capacity = Capacity();
3088 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3090 // EnsureCapacity will guarantee the hash table is never full.
3092 Object* element = KeyAt(entry);
3093 // Empty entry. Uses raw unchecked accessors because it is called by the
3094 // string table during bootstrapping.
3095 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3096 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3097 Shape::IsMatch(key, element)) return entry;
3098 entry = NextProbe(entry, count++, capacity);
3104 bool SeededNumberDictionary::requires_slow_elements() {
3105 Object* max_index_object = get(kMaxNumberKeyIndex);
3106 if (!max_index_object->IsSmi()) return false;
3108 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3111 uint32_t SeededNumberDictionary::max_number_key() {
3112 ASSERT(!requires_slow_elements());
3113 Object* max_index_object = get(kMaxNumberKeyIndex);
3114 if (!max_index_object->IsSmi()) return 0;
3115 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3116 return value >> kRequiresSlowElementsTagSize;
3119 void SeededNumberDictionary::set_requires_slow_elements() {
3120 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3124 // ------------------------------------
3128 CAST_ACCESSOR(FixedArray)
3129 CAST_ACCESSOR(FixedDoubleArray)
3130 CAST_ACCESSOR(FixedTypedArrayBase)
3131 CAST_ACCESSOR(ConstantPoolArray)
3132 CAST_ACCESSOR(DescriptorArray)
3133 CAST_ACCESSOR(DeoptimizationInputData)
3134 CAST_ACCESSOR(DeoptimizationOutputData)
3135 CAST_ACCESSOR(DependentCode)
3136 CAST_ACCESSOR(StringTable)
3137 CAST_ACCESSOR(JSFunctionResultCache)
3138 CAST_ACCESSOR(NormalizedMapCache)
3139 CAST_ACCESSOR(ScopeInfo)
3140 CAST_ACCESSOR(CompilationCacheTable)
3141 CAST_ACCESSOR(CodeCacheHashTable)
3142 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3143 CAST_ACCESSOR(MapCache)
3144 CAST_ACCESSOR(String)
3145 CAST_ACCESSOR(SeqString)
3146 CAST_ACCESSOR(SeqOneByteString)
3147 CAST_ACCESSOR(SeqTwoByteString)
3148 CAST_ACCESSOR(SlicedString)
3149 CAST_ACCESSOR(ConsString)
3150 CAST_ACCESSOR(ExternalString)
3151 CAST_ACCESSOR(ExternalAsciiString)
3152 CAST_ACCESSOR(ExternalTwoByteString)
3153 CAST_ACCESSOR(Symbol)
3155 CAST_ACCESSOR(JSReceiver)
3156 CAST_ACCESSOR(JSObject)
3158 CAST_ACCESSOR(HeapObject)
3159 CAST_ACCESSOR(HeapNumber)
3160 CAST_ACCESSOR(Float32x4)
3161 CAST_ACCESSOR(Float64x2)
3162 CAST_ACCESSOR(Int32x4)
3163 CAST_ACCESSOR(Oddball)
3165 CAST_ACCESSOR(PropertyCell)
3166 CAST_ACCESSOR(SharedFunctionInfo)
3168 CAST_ACCESSOR(JSFunction)
3169 CAST_ACCESSOR(GlobalObject)
3170 CAST_ACCESSOR(JSGlobalProxy)
3171 CAST_ACCESSOR(JSGlobalObject)
3172 CAST_ACCESSOR(JSBuiltinsObject)
3174 CAST_ACCESSOR(JSArray)
3175 CAST_ACCESSOR(JSArrayBuffer)
3176 CAST_ACCESSOR(JSArrayBufferView)
3177 CAST_ACCESSOR(JSTypedArray)
3178 CAST_ACCESSOR(JSDataView)
3179 CAST_ACCESSOR(JSRegExp)
3180 CAST_ACCESSOR(JSProxy)
3181 CAST_ACCESSOR(JSFunctionProxy)
3182 CAST_ACCESSOR(JSSet)
3183 CAST_ACCESSOR(JSMap)
3184 CAST_ACCESSOR(JSSetIterator)
3185 CAST_ACCESSOR(JSMapIterator)
3186 CAST_ACCESSOR(JSWeakMap)
3187 CAST_ACCESSOR(JSWeakSet)
3188 CAST_ACCESSOR(Foreign)
3189 CAST_ACCESSOR(ByteArray)
3190 CAST_ACCESSOR(FreeSpace)
3191 CAST_ACCESSOR(ExternalArray)
3192 CAST_ACCESSOR(ExternalInt8Array)
3193 CAST_ACCESSOR(ExternalUint8Array)
3194 CAST_ACCESSOR(ExternalInt16Array)
3195 CAST_ACCESSOR(ExternalUint16Array)
3196 CAST_ACCESSOR(ExternalInt32Array)
3197 CAST_ACCESSOR(ExternalInt32x4Array)
3198 CAST_ACCESSOR(ExternalUint32Array)
3199 CAST_ACCESSOR(ExternalFloat32Array)
3200 CAST_ACCESSOR(ExternalFloat32x4Array)
3201 CAST_ACCESSOR(ExternalFloat64Array)
3202 CAST_ACCESSOR(ExternalFloat64x2Array)
3203 CAST_ACCESSOR(ExternalUint8ClampedArray)
3204 CAST_ACCESSOR(Struct)
3205 CAST_ACCESSOR(AccessorInfo)
3207 template <class Traits>
3208 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3209 SLOW_ASSERT(object->IsHeapObject() &&
3210 HeapObject::cast(object)->map()->instance_type() ==
3211 Traits::kInstanceType);
3212 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3216 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3217 STRUCT_LIST(MAKE_STRUCT_CAST)
3218 #undef MAKE_STRUCT_CAST
3221 template <typename Derived, typename Shape, typename Key>
3222 HashTable<Derived, Shape, Key>*
3223 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3224 ASSERT(obj->IsHashTable());
3225 return reinterpret_cast<HashTable*>(obj);
3229 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3230 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3232 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3233 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3235 SMI_ACCESSORS(String, length, kLengthOffset)
3236 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3239 uint32_t Name::hash_field() {
3240 return READ_UINT32_FIELD(this, kHashFieldOffset);
3244 void Name::set_hash_field(uint32_t value) {
3245 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3246 #if V8_HOST_ARCH_64_BIT
3247 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3252 bool Name::Equals(Name* other) {
3253 if (other == this) return true;
3254 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3255 this->IsSymbol() || other->IsSymbol()) {
3258 return String::cast(this)->SlowEquals(String::cast(other));
3262 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3263 if (one.is_identical_to(two)) return true;
3264 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3265 one->IsSymbol() || two->IsSymbol()) {
3268 return String::SlowEquals(Handle<String>::cast(one),
3269 Handle<String>::cast(two));
3273 ACCESSORS(Symbol, name, Object, kNameOffset)
3274 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3275 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3278 bool String::Equals(String* other) {
3279 if (other == this) return true;
3280 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3283 return SlowEquals(other);
3287 bool String::Equals(Handle<String> one, Handle<String> two) {
3288 if (one.is_identical_to(two)) return true;
3289 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3292 return SlowEquals(one, two);
3296 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3297 if (!string->IsConsString()) return string;
3298 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3299 if (cons->IsFlat()) return handle(cons->first());
3300 return SlowFlatten(cons, pretenure);
3304 uint16_t String::Get(int index) {
3305 ASSERT(index >= 0 && index < length());
3306 switch (StringShape(this).full_representation_tag()) {
3307 case kSeqStringTag | kOneByteStringTag:
3308 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3309 case kSeqStringTag | kTwoByteStringTag:
3310 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3311 case kConsStringTag | kOneByteStringTag:
3312 case kConsStringTag | kTwoByteStringTag:
3313 return ConsString::cast(this)->ConsStringGet(index);
3314 case kExternalStringTag | kOneByteStringTag:
3315 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
3316 case kExternalStringTag | kTwoByteStringTag:
3317 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3318 case kSlicedStringTag | kOneByteStringTag:
3319 case kSlicedStringTag | kTwoByteStringTag:
3320 return SlicedString::cast(this)->SlicedStringGet(index);
3330 void String::Set(int index, uint16_t value) {
3331 ASSERT(index >= 0 && index < length());
3332 ASSERT(StringShape(this).IsSequential());
3334 return this->IsOneByteRepresentation()
3335 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3336 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3340 bool String::IsFlat() {
3341 if (!StringShape(this).IsCons()) return true;
3342 return ConsString::cast(this)->second()->length() == 0;
3346 String* String::GetUnderlying() {
3347 // Giving direct access to underlying string only makes sense if the
3348 // wrapping string is already flattened.
3349 ASSERT(this->IsFlat());
3350 ASSERT(StringShape(this).IsIndirect());
3351 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3352 const int kUnderlyingOffset = SlicedString::kParentOffset;
3353 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3357 template<class Visitor>
3358 ConsString* String::VisitFlat(Visitor* visitor,
3361 int slice_offset = offset;
3362 const int length = string->length();
3363 ASSERT(offset <= length);
3365 int32_t type = string->map()->instance_type();
3366 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3367 case kSeqStringTag | kOneByteStringTag:
3368 visitor->VisitOneByteString(
3369 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3373 case kSeqStringTag | kTwoByteStringTag:
3374 visitor->VisitTwoByteString(
3375 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3379 case kExternalStringTag | kOneByteStringTag:
3380 visitor->VisitOneByteString(
3381 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3385 case kExternalStringTag | kTwoByteStringTag:
3386 visitor->VisitTwoByteString(
3387 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3391 case kSlicedStringTag | kOneByteStringTag:
3392 case kSlicedStringTag | kTwoByteStringTag: {
3393 SlicedString* slicedString = SlicedString::cast(string);
3394 slice_offset += slicedString->offset();
3395 string = slicedString->parent();
3399 case kConsStringTag | kOneByteStringTag:
3400 case kConsStringTag | kTwoByteStringTag:
3401 return ConsString::cast(string);
3411 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3412 ASSERT(index >= 0 && index < length());
3413 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3417 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3418 ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3419 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3420 static_cast<byte>(value));
3424 Address SeqOneByteString::GetCharsAddress() {
3425 return FIELD_ADDR(this, kHeaderSize);
3429 uint8_t* SeqOneByteString::GetChars() {
3430 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3434 Address SeqTwoByteString::GetCharsAddress() {
3435 return FIELD_ADDR(this, kHeaderSize);
3439 uc16* SeqTwoByteString::GetChars() {
3440 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3444 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3445 ASSERT(index >= 0 && index < length());
3446 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3450 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3451 ASSERT(index >= 0 && index < length());
3452 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3456 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3457 return SizeFor(length());
3461 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3462 return SizeFor(length());
3466 String* SlicedString::parent() {
3467 return String::cast(READ_FIELD(this, kParentOffset));
3471 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3472 ASSERT(parent->IsSeqString() || parent->IsExternalString());
3473 WRITE_FIELD(this, kParentOffset, parent);
3474 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3478 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3481 String* ConsString::first() {
3482 return String::cast(READ_FIELD(this, kFirstOffset));
3486 Object* ConsString::unchecked_first() {
3487 return READ_FIELD(this, kFirstOffset);
3491 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3492 WRITE_FIELD(this, kFirstOffset, value);
3493 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3497 String* ConsString::second() {
3498 return String::cast(READ_FIELD(this, kSecondOffset));
3502 Object* ConsString::unchecked_second() {
3503 return READ_FIELD(this, kSecondOffset);
3507 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3508 WRITE_FIELD(this, kSecondOffset, value);
3509 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3513 bool ExternalString::is_short() {
3514 InstanceType type = map()->instance_type();
3515 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3519 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3520 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3524 void ExternalAsciiString::update_data_cache() {
3525 if (is_short()) return;
3526 const char** data_field =
3527 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3528 *data_field = resource()->data();
3532 void ExternalAsciiString::set_resource(
3533 const ExternalAsciiString::Resource* resource) {
3534 ASSERT(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3535 *reinterpret_cast<const Resource**>(
3536 FIELD_ADDR(this, kResourceOffset)) = resource;
3537 if (resource != NULL) update_data_cache();
3541 const uint8_t* ExternalAsciiString::GetChars() {
3542 return reinterpret_cast<const uint8_t*>(resource()->data());
3546 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3547 ASSERT(index >= 0 && index < length());
3548 return GetChars()[index];
3552 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3553 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3557 void ExternalTwoByteString::update_data_cache() {
3558 if (is_short()) return;
3559 const uint16_t** data_field =
3560 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3561 *data_field = resource()->data();
3565 void ExternalTwoByteString::set_resource(
3566 const ExternalTwoByteString::Resource* resource) {
3567 *reinterpret_cast<const Resource**>(
3568 FIELD_ADDR(this, kResourceOffset)) = resource;
3569 if (resource != NULL) update_data_cache();
3573 const uint16_t* ExternalTwoByteString::GetChars() {
3574 return resource()->data();
3578 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3579 ASSERT(index >= 0 && index < length());
3580 return GetChars()[index];
3584 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3586 return GetChars() + start;
3590 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3591 return depth & kDepthMask;
3595 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3596 frames_[depth_++ & kDepthMask] = string;
3600 void ConsStringIteratorOp::PushRight(ConsString* string) {
3602 frames_[(depth_-1) & kDepthMask] = string;
3606 void ConsStringIteratorOp::AdjustMaximumDepth() {
3607 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3611 void ConsStringIteratorOp::Pop() {
3613 ASSERT(depth_ <= maximum_depth_);
3618 uint16_t StringCharacterStream::GetNext() {
3619 ASSERT(buffer8_ != NULL && end_ != NULL);
3620 // Advance cursor if needed.
3621 if (buffer8_ == end_) HasMore();
3622 ASSERT(buffer8_ < end_);
3623 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3627 StringCharacterStream::StringCharacterStream(String* string,
3628 ConsStringIteratorOp* op,
3630 : is_one_byte_(false),
3632 Reset(string, offset);
3636 void StringCharacterStream::Reset(String* string, int offset) {
3639 ConsString* cons_string = String::VisitFlat(this, string, offset);
3640 op_->Reset(cons_string, offset);
3641 if (cons_string != NULL) {
3642 string = op_->Next(&offset);
3643 if (string != NULL) String::VisitFlat(this, string, offset);
3648 bool StringCharacterStream::HasMore() {
3649 if (buffer8_ != end_) return true;
3651 String* string = op_->Next(&offset);
3652 ASSERT_EQ(offset, 0);
3653 if (string == NULL) return false;
3654 String::VisitFlat(this, string);
3655 ASSERT(buffer8_ != end_);
3660 void StringCharacterStream::VisitOneByteString(
3661 const uint8_t* chars, int length) {
3662 is_one_byte_ = true;
3664 end_ = chars + length;
3668 void StringCharacterStream::VisitTwoByteString(
3669 const uint16_t* chars, int length) {
3670 is_one_byte_ = false;
3672 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3676 void JSFunctionResultCache::MakeZeroSize() {
3677 set_finger_index(kEntriesIndex);
3678 set_size(kEntriesIndex);
3682 void JSFunctionResultCache::Clear() {
3683 int cache_size = size();
3684 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3685 MemsetPointer(entries_start,
3686 GetHeap()->the_hole_value(),
3687 cache_size - kEntriesIndex);
3692 int JSFunctionResultCache::size() {
3693 return Smi::cast(get(kCacheSizeIndex))->value();
3697 void JSFunctionResultCache::set_size(int size) {
3698 set(kCacheSizeIndex, Smi::FromInt(size));
3702 int JSFunctionResultCache::finger_index() {
3703 return Smi::cast(get(kFingerIndex))->value();
3707 void JSFunctionResultCache::set_finger_index(int finger_index) {
3708 set(kFingerIndex, Smi::FromInt(finger_index));
3712 byte ByteArray::get(int index) {
3713 ASSERT(index >= 0 && index < this->length());
3714 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3718 void ByteArray::set(int index, byte value) {
3719 ASSERT(index >= 0 && index < this->length());
3720 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3724 int ByteArray::get_int(int index) {
3725 ASSERT(index >= 0 && (index * kIntSize) < this->length());
3726 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3730 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3731 ASSERT_TAG_ALIGNED(address);
3732 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3736 Address ByteArray::GetDataStartAddress() {
3737 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3741 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3742 return reinterpret_cast<uint8_t*>(external_pointer());
3746 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3747 ASSERT((index >= 0) && (index < this->length()));
3748 uint8_t* ptr = external_uint8_clamped_pointer();
3753 Handle<Object> ExternalUint8ClampedArray::get(
3754 Handle<ExternalUint8ClampedArray> array,
3756 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3757 array->GetIsolate());
3761 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3762 ASSERT((index >= 0) && (index < this->length()));
3763 uint8_t* ptr = external_uint8_clamped_pointer();
3768 void* ExternalArray::external_pointer() {
3769 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3770 return reinterpret_cast<void*>(ptr);
3774 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3775 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3776 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3780 int8_t ExternalInt8Array::get_scalar(int index) {
3781 ASSERT((index >= 0) && (index < this->length()));
3782 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3787 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3789 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3790 array->GetIsolate());
3794 void ExternalInt8Array::set(int index, int8_t value) {
3795 ASSERT((index >= 0) && (index < this->length()));
3796 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3801 uint8_t ExternalUint8Array::get_scalar(int index) {
3802 ASSERT((index >= 0) && (index < this->length()));
3803 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3808 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3810 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3811 array->GetIsolate());
3815 void ExternalUint8Array::set(int index, uint8_t value) {
3816 ASSERT((index >= 0) && (index < this->length()));
3817 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3822 int16_t ExternalInt16Array::get_scalar(int index) {
3823 ASSERT((index >= 0) && (index < this->length()));
3824 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3829 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3831 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3832 array->GetIsolate());
3836 void ExternalInt16Array::set(int index, int16_t value) {
3837 ASSERT((index >= 0) && (index < this->length()));
3838 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3843 uint16_t ExternalUint16Array::get_scalar(int index) {
3844 ASSERT((index >= 0) && (index < this->length()));
3845 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3850 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3852 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3853 array->GetIsolate());
3857 void ExternalUint16Array::set(int index, uint16_t value) {
3858 ASSERT((index >= 0) && (index < this->length()));
3859 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3864 int32_t ExternalInt32Array::get_scalar(int index) {
3865 ASSERT((index >= 0) && (index < this->length()));
3866 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3871 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3873 return array->GetIsolate()->factory()->
3874 NewNumberFromInt(array->get_scalar(index));
3878 void ExternalInt32Array::set(int index, int32_t value) {
3879 ASSERT((index >= 0) && (index < this->length()));
3880 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3885 uint32_t ExternalUint32Array::get_scalar(int index) {
3886 ASSERT((index >= 0) && (index < this->length()));
3887 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3892 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3894 return array->GetIsolate()->factory()->
3895 NewNumberFromUint(array->get_scalar(index));
3899 void ExternalUint32Array::set(int index, uint32_t value) {
3900 ASSERT((index >= 0) && (index < this->length()));
3901 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3906 float ExternalFloat32Array::get_scalar(int index) {
3907 ASSERT((index >= 0) && (index < this->length()));
3908 float* ptr = static_cast<float*>(external_pointer());
3913 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
3915 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3919 void ExternalFloat32Array::set(int index, float value) {
3920 ASSERT((index >= 0) && (index < this->length()));
3921 float* ptr = static_cast<float*>(external_pointer());
3926 float32x4_value_t ExternalFloat32x4Array::get_scalar(int index) {
3927 ASSERT((index >= 0) && (index < this->length()));
3928 float* ptr = static_cast<float*>(external_pointer());
3929 float32x4_value_t value;
3930 value.storage[0] = ptr[index * 4 + 0];
3931 value.storage[1] = ptr[index * 4 + 1];
3932 value.storage[2] = ptr[index * 4 + 2];
3933 value.storage[3] = ptr[index * 4 + 3];
3938 Handle<Object> ExternalFloat32x4Array::get(Handle<ExternalFloat32x4Array> array,
3940 float32x4_value_t value = array->get_scalar(index);
3941 return array->GetIsolate()->factory()->NewFloat32x4(value);
3945 void ExternalFloat32x4Array::set(int index, const float32x4_value_t& value) {
3946 ASSERT((index >= 0) && (index < this->length()));
3947 float* ptr = static_cast<float*>(external_pointer());
3948 ptr[index * 4 + 0] = value.storage[0];
3949 ptr[index * 4 + 1] = value.storage[1];
3950 ptr[index * 4 + 2] = value.storage[2];
3951 ptr[index * 4 + 3] = value.storage[3];
3955 float64x2_value_t ExternalFloat64x2Array::get_scalar(int index) {
3956 ASSERT((index >= 0) && (index < this->length()));
3957 double* ptr = static_cast<double*>(external_pointer());
3958 float64x2_value_t value;
3959 value.storage[0] = ptr[index * 2 + 0];
3960 value.storage[1] = ptr[index * 2 + 1];
3965 Handle<Object> ExternalFloat64x2Array::get(Handle<ExternalFloat64x2Array> array,
3967 float64x2_value_t value = array->get_scalar(index);
3968 return array->GetIsolate()->factory()->NewFloat64x2(value);
3972 void ExternalFloat64x2Array::set(int index, const float64x2_value_t& value) {
3973 ASSERT((index >= 0) && (index < this->length()));
3974 double* ptr = static_cast<double*>(external_pointer());
3975 ptr[index * 2 + 0] = value.storage[0];
3976 ptr[index * 2 + 1] = value.storage[1];
3980 int32x4_value_t ExternalInt32x4Array::get_scalar(int index) {
3981 ASSERT((index >= 0) && (index < this->length()));
3982 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3983 int32x4_value_t value;
3984 value.storage[0] = ptr[index * 4 + 0];
3985 value.storage[1] = ptr[index * 4 + 1];
3986 value.storage[2] = ptr[index * 4 + 2];
3987 value.storage[3] = ptr[index * 4 + 3];
3992 Handle<Object> ExternalInt32x4Array::get(Handle<ExternalInt32x4Array> array,
3994 int32x4_value_t value = array->get_scalar(index);
3995 return array->GetIsolate()->factory()->NewInt32x4(value);
3999 void ExternalInt32x4Array::set(int index, const int32x4_value_t& value) {
4000 ASSERT((index >= 0) && (index < this->length()));
4001 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4002 ptr[index * 4 + 0] = value.storage[0];
4003 ptr[index * 4 + 1] = value.storage[1];
4004 ptr[index * 4 + 2] = value.storage[2];
4005 ptr[index * 4 + 3] = value.storage[3];
4009 double ExternalFloat64Array::get_scalar(int index) {
4010 ASSERT((index >= 0) && (index < this->length()));
4011 double* ptr = static_cast<double*>(external_pointer());
4016 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
4018 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4022 void ExternalFloat64Array::set(int index, double value) {
4023 ASSERT((index >= 0) && (index < this->length()));
4024 double* ptr = static_cast<double*>(external_pointer());
4029 void* FixedTypedArrayBase::DataPtr() {
4030 return FIELD_ADDR(this, kDataOffset);
4034 int FixedTypedArrayBase::DataSize(InstanceType type) {
4037 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4038 case FIXED_##TYPE##_ARRAY_TYPE: \
4039 element_size = size; \
4042 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4043 #undef TYPED_ARRAY_CASE
4048 return length() * element_size;
4052 int FixedTypedArrayBase::DataSize() {
4053 return DataSize(map()->instance_type());
4057 int FixedTypedArrayBase::size() {
4058 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4062 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4063 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4067 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4070 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4073 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4076 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4079 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4082 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4085 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4088 float Float32ArrayTraits::defaultValue() {
4089 return static_cast<float>(OS::nan_value());
4093 double Float64ArrayTraits::defaultValue() { return OS::nan_value(); }
4096 template <class Traits>
4097 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4098 ASSERT((index >= 0) && (index < this->length()));
4099 ElementType* ptr = reinterpret_cast<ElementType*>(
4100 FIELD_ADDR(this, kDataOffset));
4106 FixedTypedArray<Float64ArrayTraits>::ElementType
4107 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4108 ASSERT((index >= 0) && (index < this->length()));
4109 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4113 template <class Traits>
4114 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4115 ASSERT((index >= 0) && (index < this->length()));
4116 ElementType* ptr = reinterpret_cast<ElementType*>(
4117 FIELD_ADDR(this, kDataOffset));
4123 void FixedTypedArray<Float64ArrayTraits>::set(
4124 int index, Float64ArrayTraits::ElementType value) {
4125 ASSERT((index >= 0) && (index < this->length()));
4126 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4130 template <class Traits>
4131 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4132 return static_cast<ElementType>(value);
4137 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4138 if (value < 0) return 0;
4139 if (value > 0xFF) return 0xFF;
4140 return static_cast<uint8_t>(value);
4144 template <class Traits>
4145 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4147 return static_cast<ElementType>(DoubleToInt32(value));
4152 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4153 if (value < 0) return 0;
4154 if (value > 0xFF) return 0xFF;
4155 return static_cast<uint8_t>(lrint(value));
4160 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4161 return static_cast<float>(value);
4166 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4171 template <class Traits>
4172 Handle<Object> FixedTypedArray<Traits>::get(
4173 Handle<FixedTypedArray<Traits> > array,
4175 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4179 template <class Traits>
4180 Handle<Object> FixedTypedArray<Traits>::SetValue(
4181 Handle<FixedTypedArray<Traits> > array,
4183 Handle<Object> value) {
4184 ElementType cast_value = Traits::defaultValue();
4185 if (index < static_cast<uint32_t>(array->length())) {
4186 if (value->IsSmi()) {
4187 int int_value = Handle<Smi>::cast(value)->value();
4188 cast_value = from_int(int_value);
4189 } else if (value->IsHeapNumber()) {
4190 double double_value = Handle<HeapNumber>::cast(value)->value();
4191 cast_value = from_double(double_value);
4193 // Clamp undefined to the default value. All other types have been
4194 // converted to a number type further up in the call chain.
4195 ASSERT(value->IsUndefined());
4197 array->set(index, cast_value);
4199 return Traits::ToHandle(array->GetIsolate(), cast_value);
4203 Handle<Object> FixedTypedArray<Float32x4ArrayTraits>::SetValue(
4204 Handle<FixedTypedArray<Float32x4ArrayTraits> > array,
4205 uint32_t index, Handle<Object> value) {
4206 float32x4_value_t cast_value;
4207 cast_value.storage[0] = static_cast<float>(OS::nan_value());
4208 cast_value.storage[1] = static_cast<float>(OS::nan_value());
4209 cast_value.storage[2] = static_cast<float>(OS::nan_value());
4210 cast_value.storage[3] = static_cast<float>(OS::nan_value());
4211 if (index < static_cast<uint32_t>(array->length())) {
4212 if (value->IsFloat32x4()) {
4213 cast_value = Handle<Float32x4>::cast(value)->get();
4215 // Clamp undefined to NaN (default). All other types have been
4216 // converted to a number type further up in the call chain.
4217 ASSERT(value->IsUndefined());
4219 array->set(index, cast_value);
4221 return Float32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4226 Handle<Object> FixedTypedArray<Float64x2ArrayTraits>::SetValue(
4227 Handle<FixedTypedArray<Float64x2ArrayTraits> > array,
4228 uint32_t index, Handle<Object> value) {
4229 float64x2_value_t cast_value;
4230 cast_value.storage[0] = OS::nan_value();
4231 cast_value.storage[1] = OS::nan_value();
4232 if (index < static_cast<uint32_t>(array->length())) {
4233 if (value->IsFloat64x2()) {
4234 cast_value = Handle<Float64x2>::cast(value)->get();
4236 // Clamp undefined to NaN (default). All other types have been
4237 // converted to a number type further up in the call chain.
4238 ASSERT(value->IsUndefined());
4240 array->set(index, cast_value);
4242 return Float64x2ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4247 Handle<Object> FixedTypedArray<Int32x4ArrayTraits>::SetValue(
4248 Handle<FixedTypedArray<Int32x4ArrayTraits> > array,
4249 uint32_t index, Handle<Object> value) {
4250 int32x4_value_t cast_value;
4251 cast_value.storage[0] = 0;
4252 cast_value.storage[1] = 0;
4253 cast_value.storage[2] = 0;
4254 cast_value.storage[3] = 0;
4255 if (index < static_cast<uint32_t>(array->length())) {
4256 if (value->IsInt32x4()) {
4257 cast_value = Handle<Int32x4>::cast(value)->get();
4259 // Clamp undefined to zero (default). All other types have been
4260 // converted to a number type further up in the call chain.
4261 ASSERT(value->IsUndefined());
4263 array->set(index, cast_value);
4265 return Int32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4269 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4270 return handle(Smi::FromInt(scalar), isolate);
4274 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4276 return handle(Smi::FromInt(scalar), isolate);
4280 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4281 return handle(Smi::FromInt(scalar), isolate);
4285 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4286 return handle(Smi::FromInt(scalar), isolate);
4290 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4291 return handle(Smi::FromInt(scalar), isolate);
4295 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4296 return isolate->factory()->NewNumberFromUint(scalar);
4300 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4301 return isolate->factory()->NewNumberFromInt(scalar);
4305 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4306 return isolate->factory()->NewNumber(scalar);
4310 Handle<Object> Int32x4ArrayTraits::ToHandle(
4311 Isolate* isolate, int32x4_value_t scalar) {
4312 return isolate->factory()->NewInt32x4(scalar);
4316 Handle<Object> Float32x4ArrayTraits::ToHandle(
4317 Isolate* isolate, float32x4_value_t scalar) {
4318 return isolate->factory()->NewFloat32x4(scalar);
4322 Handle<Object> Float64x2ArrayTraits::ToHandle(
4323 Isolate* isolate, float64x2_value_t scalar) {
4324 return isolate->factory()->NewFloat64x2(scalar);
4328 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4329 return isolate->factory()->NewNumber(scalar);
4333 int Map::visitor_id() {
4334 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4338 void Map::set_visitor_id(int id) {
4339 ASSERT(0 <= id && id < 256);
4340 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4344 int Map::instance_size() {
4345 return NOBARRIER_READ_BYTE_FIELD(
4346 this, kInstanceSizeOffset) << kPointerSizeLog2;
4350 int Map::inobject_properties() {
4351 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4355 int Map::pre_allocated_property_fields() {
4356 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4360 int Map::GetInObjectPropertyOffset(int index) {
4361 // Adjust for the number of properties stored in the object.
4362 index -= inobject_properties();
4364 return instance_size() + (index * kPointerSize);
4368 int HeapObject::SizeFromMap(Map* map) {
4369 int instance_size = map->instance_size();
4370 if (instance_size != kVariableSizeSentinel) return instance_size;
4371 // Only inline the most frequent cases.
4372 InstanceType instance_type = map->instance_type();
4373 if (instance_type == FIXED_ARRAY_TYPE) {
4374 return FixedArray::BodyDescriptor::SizeOf(map, this);
4376 if (instance_type == ASCII_STRING_TYPE ||
4377 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
4378 return SeqOneByteString::SizeFor(
4379 reinterpret_cast<SeqOneByteString*>(this)->length());
4381 if (instance_type == BYTE_ARRAY_TYPE) {
4382 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4384 if (instance_type == FREE_SPACE_TYPE) {
4385 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4387 if (instance_type == STRING_TYPE ||
4388 instance_type == INTERNALIZED_STRING_TYPE) {
4389 return SeqTwoByteString::SizeFor(
4390 reinterpret_cast<SeqTwoByteString*>(this)->length());
4392 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4393 return FixedDoubleArray::SizeFor(
4394 reinterpret_cast<FixedDoubleArray*>(this)->length());
4396 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4397 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4399 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4400 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4401 return reinterpret_cast<FixedTypedArrayBase*>(
4402 this)->TypedArraySize(instance_type);
4404 ASSERT(instance_type == CODE_TYPE);
4405 return reinterpret_cast<Code*>(this)->CodeSize();
4409 void Map::set_instance_size(int value) {
4410 ASSERT_EQ(0, value & (kPointerSize - 1));
4411 value >>= kPointerSizeLog2;
4412 ASSERT(0 <= value && value < 256);
4413 NOBARRIER_WRITE_BYTE_FIELD(
4414 this, kInstanceSizeOffset, static_cast<byte>(value));
4418 void Map::set_inobject_properties(int value) {
4419 ASSERT(0 <= value && value < 256);
4420 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4424 void Map::set_pre_allocated_property_fields(int value) {
4425 ASSERT(0 <= value && value < 256);
4426 WRITE_BYTE_FIELD(this,
4427 kPreAllocatedPropertyFieldsOffset,
4428 static_cast<byte>(value));
4432 InstanceType Map::instance_type() {
4433 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4437 void Map::set_instance_type(InstanceType value) {
4438 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4442 int Map::unused_property_fields() {
4443 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4447 void Map::set_unused_property_fields(int value) {
4448 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4452 byte Map::bit_field() {
4453 return READ_BYTE_FIELD(this, kBitFieldOffset);
4457 void Map::set_bit_field(byte value) {
4458 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4462 byte Map::bit_field2() {
4463 return READ_BYTE_FIELD(this, kBitField2Offset);
4467 void Map::set_bit_field2(byte value) {
4468 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4472 void Map::set_non_instance_prototype(bool value) {
4474 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4476 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4481 bool Map::has_non_instance_prototype() {
4482 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4486 void Map::set_function_with_prototype(bool value) {
4487 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4491 bool Map::function_with_prototype() {
4492 return FunctionWithPrototype::decode(bit_field());
4496 void Map::set_is_access_check_needed(bool access_check_needed) {
4497 if (access_check_needed) {
4498 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4500 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4505 bool Map::is_access_check_needed() {
4506 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4510 void Map::set_is_extensible(bool value) {
4512 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4514 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4518 bool Map::is_extensible() {
4519 return ((1 << kIsExtensible) & bit_field2()) != 0;
4523 void Map::set_is_shared(bool value) {
4524 set_bit_field3(IsShared::update(bit_field3(), value));
4528 bool Map::is_shared() {
4529 return IsShared::decode(bit_field3()); }
4532 void Map::set_dictionary_map(bool value) {
4533 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4534 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4535 set_bit_field3(new_bit_field3);
4539 bool Map::is_dictionary_map() {
4540 return DictionaryMap::decode(bit_field3());
4544 Code::Flags Code::flags() {
4545 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4549 void Map::set_owns_descriptors(bool is_shared) {
4550 set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
4554 bool Map::owns_descriptors() {
4555 return OwnsDescriptors::decode(bit_field3());
4559 void Map::set_has_instance_call_handler() {
4560 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4564 bool Map::has_instance_call_handler() {
4565 return HasInstanceCallHandler::decode(bit_field3());
4569 void Map::deprecate() {
4570 set_bit_field3(Deprecated::update(bit_field3(), true));
4574 bool Map::is_deprecated() {
4575 return Deprecated::decode(bit_field3());
4579 void Map::set_migration_target(bool value) {
4580 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4584 bool Map::is_migration_target() {
4585 return IsMigrationTarget::decode(bit_field3());
4589 void Map::set_done_inobject_slack_tracking(bool value) {
4590 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4594 bool Map::done_inobject_slack_tracking() {
4595 return DoneInobjectSlackTracking::decode(bit_field3());
4599 void Map::set_construction_count(int value) {
4600 set_bit_field3(ConstructionCount::update(bit_field3(), value));
4604 int Map::construction_count() {
4605 return ConstructionCount::decode(bit_field3());
4609 void Map::freeze() {
4610 set_bit_field3(IsFrozen::update(bit_field3(), true));
4614 bool Map::is_frozen() {
4615 return IsFrozen::decode(bit_field3());
4619 void Map::mark_unstable() {
4620 set_bit_field3(IsUnstable::update(bit_field3(), true));
4624 bool Map::is_stable() {
4625 return !IsUnstable::decode(bit_field3());
4629 bool Map::has_code_cache() {
4630 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4634 bool Map::CanBeDeprecated() {
4635 int descriptor = LastAdded();
4636 for (int i = 0; i <= descriptor; i++) {
4637 PropertyDetails details = instance_descriptors()->GetDetails(i);
4638 if (details.representation().IsNone()) return true;
4639 if (details.representation().IsSmi()) return true;
4640 if (details.representation().IsDouble()) return true;
4641 if (details.representation().IsHeapObject()) return true;
4642 if (details.type() == CONSTANT) return true;
4648 void Map::NotifyLeafMapLayoutChange() {
4651 dependent_code()->DeoptimizeDependentCodeGroup(
4653 DependentCode::kPrototypeCheckGroup);
4658 bool Map::CanOmitMapChecks() {
4659 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4663 int DependentCode::number_of_entries(DependencyGroup group) {
4664 if (length() == 0) return 0;
4665 return Smi::cast(get(group))->value();
4669 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4670 set(group, Smi::FromInt(value));
4674 bool DependentCode::is_code_at(int i) {
4675 return get(kCodesStartIndex + i)->IsCode();
4678 Code* DependentCode::code_at(int i) {
4679 return Code::cast(get(kCodesStartIndex + i));
4683 CompilationInfo* DependentCode::compilation_info_at(int i) {
4684 return reinterpret_cast<CompilationInfo*>(
4685 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4689 void DependentCode::set_object_at(int i, Object* object) {
4690 set(kCodesStartIndex + i, object);
4694 Object* DependentCode::object_at(int i) {
4695 return get(kCodesStartIndex + i);
4699 Object** DependentCode::slot_at(int i) {
4700 return RawFieldOfElementAt(kCodesStartIndex + i);
4704 void DependentCode::clear_at(int i) {
4705 set_undefined(kCodesStartIndex + i);
4709 void DependentCode::copy(int from, int to) {
4710 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4714 void DependentCode::ExtendGroup(DependencyGroup group) {
4715 GroupStartIndexes starts(this);
4716 for (int g = kGroupCount - 1; g > group; g--) {
4717 if (starts.at(g) < starts.at(g + 1)) {
4718 copy(starts.at(g), starts.at(g + 1));
4724 void Code::set_flags(Code::Flags flags) {
4725 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4726 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4730 Code::Kind Code::kind() {
4731 return ExtractKindFromFlags(flags());
4735 InlineCacheState Code::ic_state() {
4736 InlineCacheState result = ExtractICStateFromFlags(flags());
4737 // Only allow uninitialized or debugger states for non-IC code
4738 // objects. This is used in the debugger to determine whether or not
4739 // a call to code object has been replaced with a debug break call.
4740 ASSERT(is_inline_cache_stub() ||
4741 result == UNINITIALIZED ||
4742 result == DEBUG_STUB);
4747 ExtraICState Code::extra_ic_state() {
4748 ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4749 return ExtractExtraICStateFromFlags(flags());
4753 Code::StubType Code::type() {
4754 return ExtractTypeFromFlags(flags());
4758 // For initialization.
4759 void Code::set_raw_kind_specific_flags1(int value) {
4760 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4764 void Code::set_raw_kind_specific_flags2(int value) {
4765 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4769 inline bool Code::is_crankshafted() {
4770 return IsCrankshaftedField::decode(
4771 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4775 inline void Code::set_is_crankshafted(bool value) {
4776 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4777 int updated = IsCrankshaftedField::update(previous, value);
4778 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4782 int Code::major_key() {
4783 ASSERT(has_major_key());
4784 return StubMajorKeyField::decode(
4785 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4789 void Code::set_major_key(int major) {
4790 ASSERT(has_major_key());
4791 ASSERT(0 <= major && major < 256);
4792 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4793 int updated = StubMajorKeyField::update(previous, major);
4794 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4798 bool Code::has_major_key() {
4799 return kind() == STUB ||
4800 kind() == HANDLER ||
4801 kind() == BINARY_OP_IC ||
4802 kind() == COMPARE_IC ||
4803 kind() == COMPARE_NIL_IC ||
4804 kind() == LOAD_IC ||
4805 kind() == KEYED_LOAD_IC ||
4806 kind() == STORE_IC ||
4807 kind() == CALL_IC ||
4808 kind() == KEYED_STORE_IC ||
4809 kind() == TO_BOOLEAN_IC;
4813 bool Code::optimizable() {
4814 ASSERT_EQ(FUNCTION, kind());
4815 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4819 void Code::set_optimizable(bool value) {
4820 ASSERT_EQ(FUNCTION, kind());
4821 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4825 bool Code::has_deoptimization_support() {
4826 ASSERT_EQ(FUNCTION, kind());
4827 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4828 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4832 void Code::set_has_deoptimization_support(bool value) {
4833 ASSERT_EQ(FUNCTION, kind());
4834 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4835 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4836 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4840 bool Code::has_debug_break_slots() {
4841 ASSERT_EQ(FUNCTION, kind());
4842 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4843 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4847 void Code::set_has_debug_break_slots(bool value) {
4848 ASSERT_EQ(FUNCTION, kind());
4849 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4850 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4851 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4855 bool Code::is_compiled_optimizable() {
4856 ASSERT_EQ(FUNCTION, kind());
4857 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4858 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4862 void Code::set_compiled_optimizable(bool value) {
4863 ASSERT_EQ(FUNCTION, kind());
4864 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4865 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4866 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4870 int Code::allow_osr_at_loop_nesting_level() {
4871 ASSERT_EQ(FUNCTION, kind());
4872 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
4876 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4877 ASSERT_EQ(FUNCTION, kind());
4878 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
4879 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
4883 int Code::profiler_ticks() {
4884 ASSERT_EQ(FUNCTION, kind());
4885 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4889 void Code::set_profiler_ticks(int ticks) {
4890 ASSERT_EQ(FUNCTION, kind());
4891 ASSERT(ticks < 256);
4892 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4896 unsigned Code::stack_slots() {
4897 ASSERT(is_crankshafted());
4898 return StackSlotsField::decode(
4899 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4903 void Code::set_stack_slots(unsigned slots) {
4904 CHECK(slots <= (1 << kStackSlotsBitCount));
4905 ASSERT(is_crankshafted());
4906 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4907 int updated = StackSlotsField::update(previous, slots);
4908 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4912 unsigned Code::safepoint_table_offset() {
4913 ASSERT(is_crankshafted());
4914 return SafepointTableOffsetField::decode(
4915 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4919 void Code::set_safepoint_table_offset(unsigned offset) {
4920 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4921 ASSERT(is_crankshafted());
4922 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4923 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4924 int updated = SafepointTableOffsetField::update(previous, offset);
4925 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4929 unsigned Code::back_edge_table_offset() {
4930 ASSERT_EQ(FUNCTION, kind());
4931 return BackEdgeTableOffsetField::decode(
4932 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4936 void Code::set_back_edge_table_offset(unsigned offset) {
4937 ASSERT_EQ(FUNCTION, kind());
4938 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4939 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4940 int updated = BackEdgeTableOffsetField::update(previous, offset);
4941 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4945 bool Code::back_edges_patched_for_osr() {
4946 ASSERT_EQ(FUNCTION, kind());
4947 return BackEdgesPatchedForOSRField::decode(
4948 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4952 void Code::set_back_edges_patched_for_osr(bool value) {
4953 ASSERT_EQ(FUNCTION, kind());
4954 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4955 int updated = BackEdgesPatchedForOSRField::update(previous, value);
4956 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4961 byte Code::to_boolean_state() {
4962 return extra_ic_state();
4966 bool Code::has_function_cache() {
4967 ASSERT(kind() == STUB);
4968 return HasFunctionCacheField::decode(
4969 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4973 void Code::set_has_function_cache(bool flag) {
4974 ASSERT(kind() == STUB);
4975 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4976 int updated = HasFunctionCacheField::update(previous, flag);
4977 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4981 bool Code::marked_for_deoptimization() {
4982 ASSERT(kind() == OPTIMIZED_FUNCTION);
4983 return MarkedForDeoptimizationField::decode(
4984 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4988 void Code::set_marked_for_deoptimization(bool flag) {
4989 ASSERT(kind() == OPTIMIZED_FUNCTION);
4990 ASSERT(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4991 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4992 int updated = MarkedForDeoptimizationField::update(previous, flag);
4993 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4997 bool Code::is_weak_stub() {
4998 return CanBeWeakStub() && WeakStubField::decode(
4999 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5003 void Code::mark_as_weak_stub() {
5004 ASSERT(CanBeWeakStub());
5005 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5006 int updated = WeakStubField::update(previous, true);
5007 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5011 bool Code::is_invalidated_weak_stub() {
5012 return is_weak_stub() && InvalidatedWeakStubField::decode(
5013 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5017 void Code::mark_as_invalidated_weak_stub() {
5018 ASSERT(is_inline_cache_stub());
5019 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5020 int updated = InvalidatedWeakStubField::update(previous, true);
5021 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5025 bool Code::is_inline_cache_stub() {
5026 Kind kind = this->kind();
5028 #define CASE(name) case name: return true;
5031 default: return false;
5036 bool Code::is_keyed_stub() {
5037 return is_keyed_load_stub() || is_keyed_store_stub();
5041 bool Code::is_debug_stub() {
5042 return ic_state() == DEBUG_STUB;
5046 ConstantPoolArray* Code::constant_pool() {
5047 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
5051 void Code::set_constant_pool(Object* value) {
5052 ASSERT(value->IsConstantPoolArray());
5053 WRITE_FIELD(this, kConstantPoolOffset, value);
5054 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
5058 Code::Flags Code::ComputeFlags(Kind kind,
5059 InlineCacheState ic_state,
5060 ExtraICState extra_ic_state,
5062 InlineCacheHolderFlag holder) {
5063 // Compute the bit mask.
5064 unsigned int bits = KindField::encode(kind)
5065 | ICStateField::encode(ic_state)
5066 | TypeField::encode(type)
5067 | ExtraICStateField::encode(extra_ic_state)
5068 | CacheHolderField::encode(holder);
5069 return static_cast<Flags>(bits);
5073 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5074 ExtraICState extra_ic_state,
5075 InlineCacheHolderFlag holder,
5077 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5081 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
5083 InlineCacheHolderFlag holder) {
5084 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5088 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5089 return KindField::decode(flags);
5093 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5094 return ICStateField::decode(flags);
5098 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5099 return ExtraICStateField::decode(flags);
5103 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5104 return TypeField::decode(flags);
5108 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5109 return CacheHolderField::decode(flags);
5113 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5114 int bits = flags & ~TypeField::kMask;
5115 return static_cast<Flags>(bits);
5119 Code* Code::GetCodeFromTargetAddress(Address address) {
5120 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5121 // GetCodeFromTargetAddress might be called when marking objects during mark
5122 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5123 // Code::cast. Code::cast does not work when the object's map is
5125 Code* result = reinterpret_cast<Code*>(code);
5130 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5132 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5136 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5137 if (!FLAG_collect_maps) return false;
5138 if (object->IsMap()) {
5139 return Map::cast(object)->CanTransition() &&
5140 FLAG_weak_embedded_maps_in_optimized_code;
5142 if (object->IsJSObject() ||
5143 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
5144 return FLAG_weak_embedded_objects_in_optimized_code;
5150 class Code::FindAndReplacePattern {
5152 FindAndReplacePattern() : count_(0) { }
5153 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5154 ASSERT(count_ < kMaxCount);
5155 find_[count_] = map_to_find;
5156 replace_[count_] = obj_to_replace;
5160 static const int kMaxCount = 4;
5162 Handle<Map> find_[kMaxCount];
5163 Handle<Object> replace_[kMaxCount];
5168 bool Code::IsWeakObjectInIC(Object* object) {
5169 return object->IsMap() && Map::cast(object)->CanTransition() &&
5170 FLAG_collect_maps &&
5171 FLAG_weak_embedded_maps_in_ic;
5175 Object* Map::prototype() {
5176 return READ_FIELD(this, kPrototypeOffset);
5180 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5181 ASSERT(value->IsNull() || value->IsJSReceiver());
5182 WRITE_FIELD(this, kPrototypeOffset, value);
5183 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5187 // If the descriptor is using the empty transition array, install a new empty
5188 // transition array that will have place for an element transition.
5189 static void EnsureHasTransitionArray(Handle<Map> map) {
5190 Handle<TransitionArray> transitions;
5191 if (!map->HasTransitionArray()) {
5192 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5193 transitions->set_back_pointer_storage(map->GetBackPointer());
5194 } else if (!map->transitions()->IsFullTransitionArray()) {
5195 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5199 map->set_transitions(*transitions);
5203 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5204 int len = descriptors->number_of_descriptors();
5205 set_instance_descriptors(descriptors);
5206 SetNumberOfOwnDescriptors(len);
5210 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5213 void Map::set_bit_field3(uint32_t bits) {
5214 if (kInt32Size != kPointerSize) {
5215 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5217 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5221 uint32_t Map::bit_field3() {
5222 return READ_UINT32_FIELD(this, kBitField3Offset);
5226 void Map::AppendDescriptor(Descriptor* desc) {
5227 DescriptorArray* descriptors = instance_descriptors();
5228 int number_of_own_descriptors = NumberOfOwnDescriptors();
5229 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
5230 descriptors->Append(desc);
5231 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5235 Object* Map::GetBackPointer() {
5236 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5237 if (object->IsDescriptorArray()) {
5238 return TransitionArray::cast(object)->back_pointer_storage();
5240 ASSERT(object->IsMap() || object->IsUndefined());
5246 bool Map::HasElementsTransition() {
5247 return HasTransitionArray() && transitions()->HasElementsTransition();
5251 bool Map::HasTransitionArray() {
5252 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5253 return object->IsTransitionArray();
5257 Map* Map::elements_transition_map() {
5258 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5259 return transitions()->GetTarget(index);
5263 bool Map::CanHaveMoreTransitions() {
5264 if (!HasTransitionArray()) return true;
5265 return FixedArray::SizeFor(transitions()->length() +
5266 TransitionArray::kTransitionSize)
5267 <= Page::kMaxRegularHeapObjectSize;
5271 Map* Map::GetTransition(int transition_index) {
5272 return transitions()->GetTarget(transition_index);
5276 int Map::SearchTransition(Name* name) {
5277 if (HasTransitionArray()) return transitions()->Search(name);
5278 return TransitionArray::kNotFound;
5282 FixedArray* Map::GetPrototypeTransitions() {
5283 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5284 if (!transitions()->HasPrototypeTransitions()) {
5285 return GetHeap()->empty_fixed_array();
5287 return transitions()->GetPrototypeTransitions();
5291 void Map::SetPrototypeTransitions(
5292 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5293 EnsureHasTransitionArray(map);
5294 int old_number_of_transitions = map->NumberOfProtoTransitions();
5296 if (map->HasPrototypeTransitions()) {
5297 ASSERT(map->GetPrototypeTransitions() != *proto_transitions);
5298 map->ZapPrototypeTransitions();
5301 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5302 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5306 bool Map::HasPrototypeTransitions() {
5307 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5311 TransitionArray* Map::transitions() {
5312 ASSERT(HasTransitionArray());
5313 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5314 return TransitionArray::cast(object);
5318 void Map::set_transitions(TransitionArray* transition_array,
5319 WriteBarrierMode mode) {
5320 // Transition arrays are not shared. When one is replaced, it should not
5321 // keep referenced objects alive, so we zap it.
5322 // When there is another reference to the array somewhere (e.g. a handle),
5323 // not zapping turns from a waste of memory into a source of crashes.
5324 if (HasTransitionArray()) {
5326 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5327 Map* target = transitions()->GetTarget(i);
5328 if (target->instance_descriptors() == instance_descriptors()) {
5329 Name* key = transitions()->GetKey(i);
5330 int new_target_index = transition_array->Search(key);
5331 ASSERT(new_target_index != TransitionArray::kNotFound);
5332 ASSERT(transition_array->GetTarget(new_target_index) == target);
5336 ASSERT(transitions() != transition_array);
5340 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5341 CONDITIONAL_WRITE_BARRIER(
5342 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5346 void Map::init_back_pointer(Object* undefined) {
5347 ASSERT(undefined->IsUndefined());
5348 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5352 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5353 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5354 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5355 (value->IsMap() && GetBackPointer()->IsUndefined()));
5356 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5357 if (object->IsTransitionArray()) {
5358 TransitionArray::cast(object)->set_back_pointer_storage(value);
5360 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5361 CONDITIONAL_WRITE_BARRIER(
5362 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5367 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5368 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5369 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5371 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5372 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5373 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5375 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5376 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5377 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5378 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
5380 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5381 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5383 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5384 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5385 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5386 kExpectedReceiverTypeOffset)
5388 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5389 kSerializedDataOffset)
5391 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5394 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5395 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5396 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5398 ACCESSORS(Box, value, Object, kValueOffset)
5400 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5401 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5402 ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
5404 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5405 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5406 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5408 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5409 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5410 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5411 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5412 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5413 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5415 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5416 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5418 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5419 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5420 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5422 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5423 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5424 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5425 kPrototypeTemplateOffset)
5426 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5427 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5428 kNamedPropertyHandlerOffset)
5429 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5430 kIndexedPropertyHandlerOffset)
5431 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5432 kInstanceTemplateOffset)
5433 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5434 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5435 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5436 kInstanceCallHandlerOffset)
5437 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5438 kAccessCheckInfoOffset)
5439 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5441 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5442 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5443 kInternalFieldCountOffset)
5445 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5446 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5448 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5450 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5451 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5452 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5453 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5454 kPretenureCreateCountOffset)
5455 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5456 kDependentCodeOffset)
5457 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5458 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5460 ACCESSORS(Script, source, Object, kSourceOffset)
5461 ACCESSORS(Script, name, Object, kNameOffset)
5462 ACCESSORS(Script, id, Smi, kIdOffset)
5463 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5464 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5465 ACCESSORS(Script, context_data, Object, kContextOffset)
5466 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5467 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5468 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5469 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5470 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5471 kEvalFrominstructionsOffsetOffset)
5472 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5473 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5475 Script::CompilationType Script::compilation_type() {
5476 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5477 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5479 void Script::set_compilation_type(CompilationType type) {
5480 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5481 type == COMPILATION_TYPE_EVAL));
5483 Script::CompilationState Script::compilation_state() {
5484 return BooleanBit::get(flags(), kCompilationStateBit) ?
5485 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5487 void Script::set_compilation_state(CompilationState state) {
5488 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5489 state == COMPILATION_STATE_COMPILED));
5493 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5494 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5495 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5496 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5498 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5499 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5500 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5501 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5503 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5504 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5505 kOptimizedCodeMapOffset)
5506 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5507 ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
5508 kFeedbackVectorOffset)
5509 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5510 kInstanceClassNameOffset)
5511 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5512 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5513 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5514 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5517 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5518 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5519 kHiddenPrototypeBit)
5520 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5521 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5522 kNeedsAccessCheckBit)
5523 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5524 kReadOnlyPrototypeBit)
5525 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5526 kRemovePrototypeBit)
5527 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5529 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5531 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5534 BOOL_ACCESSORS(SharedFunctionInfo,
5536 allows_lazy_compilation,
5537 kAllowLazyCompilation)
5538 BOOL_ACCESSORS(SharedFunctionInfo,
5540 allows_lazy_compilation_without_context,
5541 kAllowLazyCompilationWithoutContext)
5542 BOOL_ACCESSORS(SharedFunctionInfo,
5546 BOOL_ACCESSORS(SharedFunctionInfo,
5548 has_duplicate_parameters,
5549 kHasDuplicateParameters)
5552 #if V8_HOST_ARCH_32_BIT
5553 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5554 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5555 kFormalParameterCountOffset)
5556 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5557 kExpectedNofPropertiesOffset)
5558 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5559 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5560 kStartPositionAndTypeOffset)
5561 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5562 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5563 kFunctionTokenPositionOffset)
5564 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5565 kCompilerHintsOffset)
5566 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5567 kOptCountAndBailoutReasonOffset)
5568 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5569 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5570 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5574 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5575 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5576 int holder::name() { \
5577 int value = READ_INT_FIELD(this, offset); \
5578 ASSERT(kHeapObjectTag == 1); \
5579 ASSERT((value & kHeapObjectTag) == 0); \
5580 return value >> 1; \
5582 void holder::set_##name(int value) { \
5583 ASSERT(kHeapObjectTag == 1); \
5584 ASSERT((value & 0xC0000000) == 0xC0000000 || \
5585 (value & 0xC0000000) == 0x0); \
5586 WRITE_INT_FIELD(this, \
5588 (value << 1) & ~kHeapObjectTag); \
5591 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5592 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5593 INT_ACCESSORS(holder, name, offset)
5596 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5597 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5598 formal_parameter_count,
5599 kFormalParameterCountOffset)
5601 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5602 expected_nof_properties,
5603 kExpectedNofPropertiesOffset)
5604 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5606 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5607 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5608 start_position_and_type,
5609 kStartPositionAndTypeOffset)
5611 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5612 function_token_position,
5613 kFunctionTokenPositionOffset)
5614 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5616 kCompilerHintsOffset)
5618 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5619 opt_count_and_bailout_reason,
5620 kOptCountAndBailoutReasonOffset)
5621 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5623 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5625 kAstNodeCountOffset)
5626 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5628 kProfilerTicksOffset)
5633 BOOL_GETTER(SharedFunctionInfo,
5635 optimization_disabled,
5636 kOptimizationDisabled)
5639 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5640 set_compiler_hints(BooleanBit::set(compiler_hints(),
5641 kOptimizationDisabled,
5643 // If disabling optimizations we reflect that in the code object so
5644 // it will not be counted as optimizable code.
5645 if ((code()->kind() == Code::FUNCTION) && disable) {
5646 code()->set_optimizable(false);
5651 StrictMode SharedFunctionInfo::strict_mode() {
5652 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5657 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5658 // We only allow mode transitions from sloppy to strict.
5659 ASSERT(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5660 int hints = compiler_hints();
5661 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5662 set_compiler_hints(hints);
5666 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5667 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5669 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5670 name_should_print_as_anonymous,
5671 kNameShouldPrintAsAnonymous)
5672 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5673 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5674 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5675 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
5677 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
5678 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5679 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5680 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5682 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5683 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5685 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5687 bool Script::HasValidSource() {
5688 Object* src = this->source();
5689 if (!src->IsString()) return true;
5690 String* src_str = String::cast(src);
5691 if (!StringShape(src_str).IsExternal()) return true;
5692 if (src_str->IsOneByteRepresentation()) {
5693 return ExternalAsciiString::cast(src)->resource() != NULL;
5694 } else if (src_str->IsTwoByteRepresentation()) {
5695 return ExternalTwoByteString::cast(src)->resource() != NULL;
5701 void SharedFunctionInfo::DontAdaptArguments() {
5702 ASSERT(code()->kind() == Code::BUILTIN);
5703 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5707 int SharedFunctionInfo::start_position() {
5708 return start_position_and_type() >> kStartPositionShift;
5712 void SharedFunctionInfo::set_start_position(int start_position) {
5713 set_start_position_and_type((start_position << kStartPositionShift)
5714 | (start_position_and_type() & ~kStartPositionMask));
5718 Code* SharedFunctionInfo::code() {
5719 return Code::cast(READ_FIELD(this, kCodeOffset));
5723 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5724 ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
5725 WRITE_FIELD(this, kCodeOffset, value);
5726 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5730 void SharedFunctionInfo::ReplaceCode(Code* value) {
5731 // If the GC metadata field is already used then the function was
5732 // enqueued as a code flushing candidate and we remove it now.
5733 if (code()->gc_metadata() != NULL) {
5734 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5735 flusher->EvictCandidate(this);
5738 ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5744 ScopeInfo* SharedFunctionInfo::scope_info() {
5745 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5749 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5750 WriteBarrierMode mode) {
5751 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5752 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5755 reinterpret_cast<Object*>(value),
5760 bool SharedFunctionInfo::is_compiled() {
5762 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5766 bool SharedFunctionInfo::IsApiFunction() {
5767 return function_data()->IsFunctionTemplateInfo();
5771 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5772 ASSERT(IsApiFunction());
5773 return FunctionTemplateInfo::cast(function_data());
5777 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5778 return function_data()->IsSmi();
5782 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5783 ASSERT(HasBuiltinFunctionId());
5784 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5788 int SharedFunctionInfo::ic_age() {
5789 return ICAgeBits::decode(counters());
5793 void SharedFunctionInfo::set_ic_age(int ic_age) {
5794 set_counters(ICAgeBits::update(counters(), ic_age));
5798 int SharedFunctionInfo::deopt_count() {
5799 return DeoptCountBits::decode(counters());
5803 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5804 set_counters(DeoptCountBits::update(counters(), deopt_count));
5808 void SharedFunctionInfo::increment_deopt_count() {
5809 int value = counters();
5810 int deopt_count = DeoptCountBits::decode(value);
5811 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5812 set_counters(DeoptCountBits::update(value, deopt_count));
5816 int SharedFunctionInfo::opt_reenable_tries() {
5817 return OptReenableTriesBits::decode(counters());
5821 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5822 set_counters(OptReenableTriesBits::update(counters(), tries));
5826 int SharedFunctionInfo::opt_count() {
5827 return OptCountBits::decode(opt_count_and_bailout_reason());
5831 void SharedFunctionInfo::set_opt_count(int opt_count) {
5832 set_opt_count_and_bailout_reason(
5833 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5837 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5838 BailoutReason reason = static_cast<BailoutReason>(
5839 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5844 bool SharedFunctionInfo::has_deoptimization_support() {
5845 Code* code = this->code();
5846 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5850 void SharedFunctionInfo::TryReenableOptimization() {
5851 int tries = opt_reenable_tries();
5852 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5853 // We reenable optimization whenever the number of tries is a large
5854 // enough power of 2.
5855 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5856 set_optimization_disabled(false);
5859 code()->set_optimizable(true);
5864 bool JSFunction::IsBuiltin() {
5865 return context()->global_object()->IsJSBuiltinsObject();
5869 bool JSFunction::IsNative() {
5870 Object* script = shared()->script();
5871 bool native = script->IsScript() &&
5872 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
5873 ASSERT(!IsBuiltin() || native); // All builtins are also native.
5878 bool JSFunction::NeedsArgumentsAdaption() {
5879 return shared()->formal_parameter_count() !=
5880 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5884 bool JSFunction::IsOptimized() {
5885 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5889 bool JSFunction::IsOptimizable() {
5890 return code()->kind() == Code::FUNCTION && code()->optimizable();
5894 bool JSFunction::IsMarkedForOptimization() {
5895 return code() == GetIsolate()->builtins()->builtin(
5896 Builtins::kCompileOptimized);
5900 bool JSFunction::IsMarkedForConcurrentOptimization() {
5901 return code() == GetIsolate()->builtins()->builtin(
5902 Builtins::kCompileOptimizedConcurrent);
5906 bool JSFunction::IsInOptimizationQueue() {
5907 return code() == GetIsolate()->builtins()->builtin(
5908 Builtins::kInOptimizationQueue);
5912 bool JSFunction::IsInobjectSlackTrackingInProgress() {
5913 return has_initial_map() &&
5914 initial_map()->construction_count() != JSFunction::kNoSlackTracking;
5918 Code* JSFunction::code() {
5920 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5924 void JSFunction::set_code(Code* value) {
5925 ASSERT(!GetHeap()->InNewSpace(value));
5926 Address entry = value->entry();
5927 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5928 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5930 HeapObject::RawField(this, kCodeEntryOffset),
5935 void JSFunction::set_code_no_write_barrier(Code* value) {
5936 ASSERT(!GetHeap()->InNewSpace(value));
5937 Address entry = value->entry();
5938 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5942 void JSFunction::ReplaceCode(Code* code) {
5943 bool was_optimized = IsOptimized();
5944 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5946 if (was_optimized && is_optimized) {
5947 shared()->EvictFromOptimizedCodeMap(this->code(),
5948 "Replacing with another optimized code");
5953 // Add/remove the function from the list of optimized functions for this
5954 // context based on the state change.
5955 if (!was_optimized && is_optimized) {
5956 context()->native_context()->AddOptimizedFunction(this);
5958 if (was_optimized && !is_optimized) {
5959 // TODO(titzer): linear in the number of optimized functions; fix!
5960 context()->native_context()->RemoveOptimizedFunction(this);
5965 Context* JSFunction::context() {
5966 return Context::cast(READ_FIELD(this, kContextOffset));
5970 void JSFunction::set_context(Object* value) {
5971 ASSERT(value->IsUndefined() || value->IsContext());
5972 WRITE_FIELD(this, kContextOffset, value);
5973 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5976 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5977 kPrototypeOrInitialMapOffset)
5980 Map* JSFunction::initial_map() {
5981 return Map::cast(prototype_or_initial_map());
5985 void JSFunction::set_initial_map(Map* value) {
5986 set_prototype_or_initial_map(value);
5990 bool JSFunction::has_initial_map() {
5991 return prototype_or_initial_map()->IsMap();
5995 bool JSFunction::has_instance_prototype() {
5996 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6000 bool JSFunction::has_prototype() {
6001 return map()->has_non_instance_prototype() || has_instance_prototype();
6005 Object* JSFunction::instance_prototype() {
6006 ASSERT(has_instance_prototype());
6007 if (has_initial_map()) return initial_map()->prototype();
6008 // When there is no initial map and the prototype is a JSObject, the
6009 // initial map field is used for the prototype field.
6010 return prototype_or_initial_map();
6014 Object* JSFunction::prototype() {
6015 ASSERT(has_prototype());
6016 // If the function's prototype property has been set to a non-JSObject
6017 // value, that value is stored in the constructor field of the map.
6018 if (map()->has_non_instance_prototype()) return map()->constructor();
6019 return instance_prototype();
6023 bool JSFunction::should_have_prototype() {
6024 return map()->function_with_prototype();
6028 bool JSFunction::is_compiled() {
6030 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
6034 FixedArray* JSFunction::literals() {
6035 ASSERT(!shared()->bound());
6036 return literals_or_bindings();
6040 void JSFunction::set_literals(FixedArray* literals) {
6041 ASSERT(!shared()->bound());
6042 set_literals_or_bindings(literals);
6046 FixedArray* JSFunction::function_bindings() {
6047 ASSERT(shared()->bound());
6048 return literals_or_bindings();
6052 void JSFunction::set_function_bindings(FixedArray* bindings) {
6053 ASSERT(shared()->bound());
6054 // Bound function literal may be initialized to the empty fixed array
6055 // before the bindings are set.
6056 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
6057 bindings->map() == GetHeap()->fixed_cow_array_map());
6058 set_literals_or_bindings(bindings);
6062 int JSFunction::NumberOfLiterals() {
6063 ASSERT(!shared()->bound());
6064 return literals()->length();
6068 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6069 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
6070 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6074 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6076 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
6077 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6078 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6082 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6083 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
6084 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6088 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6090 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
6091 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6092 ASSERT(!GetHeap()->InNewSpace(value));
6096 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6097 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6098 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6099 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6102 void JSProxy::InitializeBody(int object_size, Object* value) {
6103 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6104 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6105 WRITE_FIELD(this, offset, value);
6110 ACCESSORS(JSSet, table, Object, kTableOffset)
6111 ACCESSORS(JSMap, table, Object, kTableOffset)
6114 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6115 template<class Derived, class TableType> \
6116 type* OrderedHashTableIterator<Derived, TableType>::name() { \
6117 return type::cast(READ_FIELD(this, offset)); \
6119 template<class Derived, class TableType> \
6120 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6121 type* value, WriteBarrierMode mode) { \
6122 WRITE_FIELD(this, offset, value); \
6123 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6126 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6127 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Smi, kIndexOffset)
6128 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Smi, kKindOffset)
6130 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6133 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6134 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6137 Address Foreign::foreign_address() {
6138 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6142 void Foreign::set_foreign_address(Address value) {
6143 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6147 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6148 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6149 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6150 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6151 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6152 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6154 bool JSGeneratorObject::is_suspended() {
6155 ASSERT_LT(kGeneratorExecuting, kGeneratorClosed);
6156 ASSERT_EQ(kGeneratorClosed, 0);
6157 return continuation() > 0;
6160 bool JSGeneratorObject::is_closed() {
6161 return continuation() == kGeneratorClosed;
6164 bool JSGeneratorObject::is_executing() {
6165 return continuation() == kGeneratorExecuting;
6168 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
6169 ASSERT(obj->IsJSGeneratorObject());
6170 ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
6171 return reinterpret_cast<JSGeneratorObject*>(obj);
6175 ACCESSORS(JSModule, context, Object, kContextOffset)
6176 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6179 JSModule* JSModule::cast(Object* obj) {
6180 ASSERT(obj->IsJSModule());
6181 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
6182 return reinterpret_cast<JSModule*>(obj);
6186 ACCESSORS(JSValue, value, Object, kValueOffset)
6189 JSValue* JSValue::cast(Object* obj) {
6190 ASSERT(obj->IsJSValue());
6191 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
6192 return reinterpret_cast<JSValue*>(obj);
6196 ACCESSORS(JSDate, value, Object, kValueOffset)
6197 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6198 ACCESSORS(JSDate, year, Object, kYearOffset)
6199 ACCESSORS(JSDate, month, Object, kMonthOffset)
6200 ACCESSORS(JSDate, day, Object, kDayOffset)
6201 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6202 ACCESSORS(JSDate, hour, Object, kHourOffset)
6203 ACCESSORS(JSDate, min, Object, kMinOffset)
6204 ACCESSORS(JSDate, sec, Object, kSecOffset)
6207 JSDate* JSDate::cast(Object* obj) {
6208 ASSERT(obj->IsJSDate());
6209 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
6210 return reinterpret_cast<JSDate*>(obj);
6214 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6215 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6216 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6217 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6218 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6219 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6222 JSMessageObject* JSMessageObject::cast(Object* obj) {
6223 ASSERT(obj->IsJSMessageObject());
6224 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
6225 return reinterpret_cast<JSMessageObject*>(obj);
6229 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6230 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6231 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6232 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6233 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6234 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6235 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6238 void Code::WipeOutHeader() {
6239 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6240 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6241 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6242 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6243 // Do not wipe out e.g. a minor key.
6244 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6245 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6250 Object* Code::type_feedback_info() {
6251 ASSERT(kind() == FUNCTION);
6252 return raw_type_feedback_info();
6256 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6257 ASSERT(kind() == FUNCTION);
6258 set_raw_type_feedback_info(value, mode);
6259 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6264 int Code::stub_info() {
6265 ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
6266 kind() == BINARY_OP_IC || kind() == LOAD_IC || kind() == CALL_IC);
6267 return Smi::cast(raw_type_feedback_info())->value();
6271 void Code::set_stub_info(int value) {
6272 ASSERT(kind() == COMPARE_IC ||
6273 kind() == COMPARE_NIL_IC ||
6274 kind() == BINARY_OP_IC ||
6276 kind() == LOAD_IC ||
6277 kind() == CALL_IC ||
6278 kind() == KEYED_LOAD_IC ||
6279 kind() == STORE_IC ||
6280 kind() == KEYED_STORE_IC);
6281 set_raw_type_feedback_info(Smi::FromInt(value));
6285 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6286 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6289 byte* Code::instruction_start() {
6290 return FIELD_ADDR(this, kHeaderSize);
6294 byte* Code::instruction_end() {
6295 return instruction_start() + instruction_size();
6299 int Code::body_size() {
6300 return RoundUp(instruction_size(), kObjectAlignment);
6304 ByteArray* Code::unchecked_relocation_info() {
6305 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6309 byte* Code::relocation_start() {
6310 return unchecked_relocation_info()->GetDataStartAddress();
6314 int Code::relocation_size() {
6315 return unchecked_relocation_info()->length();
6319 byte* Code::entry() {
6320 return instruction_start();
6324 bool Code::contains(byte* inner_pointer) {
6325 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6329 ACCESSORS(JSArray, length, Object, kLengthOffset)
6332 void* JSArrayBuffer::backing_store() {
6333 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6334 return reinterpret_cast<void*>(ptr);
6338 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6339 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6340 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6344 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6345 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6348 bool JSArrayBuffer::is_external() {
6349 return BooleanBit::get(flag(), kIsExternalBit);
6353 void JSArrayBuffer::set_is_external(bool value) {
6354 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6358 bool JSArrayBuffer::should_be_freed() {
6359 return BooleanBit::get(flag(), kShouldBeFreed);
6363 void JSArrayBuffer::set_should_be_freed(bool value) {
6364 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6368 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6369 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6372 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6373 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6374 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6375 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6376 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6378 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6381 JSRegExp::Type JSRegExp::TypeTag() {
6382 Object* data = this->data();
6383 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6384 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6385 return static_cast<JSRegExp::Type>(smi->value());
6389 int JSRegExp::CaptureCount() {
6390 switch (TypeTag()) {
6394 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6402 JSRegExp::Flags JSRegExp::GetFlags() {
6403 ASSERT(this->data()->IsFixedArray());
6404 Object* data = this->data();
6405 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6406 return Flags(smi->value());
6410 String* JSRegExp::Pattern() {
6411 ASSERT(this->data()->IsFixedArray());
6412 Object* data = this->data();
6413 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6418 Object* JSRegExp::DataAt(int index) {
6419 ASSERT(TypeTag() != NOT_COMPILED);
6420 return FixedArray::cast(data())->get(index);
6424 void JSRegExp::SetDataAt(int index, Object* value) {
6425 ASSERT(TypeTag() != NOT_COMPILED);
6426 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
6427 FixedArray::cast(data())->set(index, value);
6431 ElementsKind JSObject::GetElementsKind() {
6432 ElementsKind kind = map()->elements_kind();
6434 FixedArrayBase* fixed_array =
6435 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6437 // If a GC was caused while constructing this object, the elements
6438 // pointer may point to a one pointer filler map.
6439 if (ElementsAreSafeToExamine()) {
6440 Map* map = fixed_array->map();
6441 ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
6442 (map == GetHeap()->fixed_array_map() ||
6443 map == GetHeap()->fixed_cow_array_map())) ||
6444 (IsFastDoubleElementsKind(kind) &&
6445 (fixed_array->IsFixedDoubleArray() ||
6446 fixed_array == GetHeap()->empty_fixed_array())) ||
6447 (kind == DICTIONARY_ELEMENTS &&
6448 fixed_array->IsFixedArray() &&
6449 fixed_array->IsDictionary()) ||
6450 (kind > DICTIONARY_ELEMENTS));
6451 ASSERT((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6452 (elements()->IsFixedArray() && elements()->length() >= 2));
6459 ElementsAccessor* JSObject::GetElementsAccessor() {
6460 return ElementsAccessor::ForKind(GetElementsKind());
6464 bool JSObject::HasFastObjectElements() {
6465 return IsFastObjectElementsKind(GetElementsKind());
6469 bool JSObject::HasFastSmiElements() {
6470 return IsFastSmiElementsKind(GetElementsKind());
6474 bool JSObject::HasFastSmiOrObjectElements() {
6475 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6479 bool JSObject::HasFastDoubleElements() {
6480 return IsFastDoubleElementsKind(GetElementsKind());
6484 bool JSObject::HasFastHoleyElements() {
6485 return IsFastHoleyElementsKind(GetElementsKind());
6489 bool JSObject::HasFastElements() {
6490 return IsFastElementsKind(GetElementsKind());
6494 bool JSObject::HasDictionaryElements() {
6495 return GetElementsKind() == DICTIONARY_ELEMENTS;
6499 bool JSObject::HasSloppyArgumentsElements() {
6500 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6504 bool JSObject::HasExternalArrayElements() {
6505 HeapObject* array = elements();
6506 ASSERT(array != NULL);
6507 return array->IsExternalArray();
6511 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6512 bool JSObject::HasExternal##Type##Elements() { \
6513 HeapObject* array = elements(); \
6514 ASSERT(array != NULL); \
6515 if (!array->IsHeapObject()) \
6517 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6520 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6522 #undef EXTERNAL_ELEMENTS_CHECK
6525 bool JSObject::HasFixedTypedArrayElements() {
6526 HeapObject* array = elements();
6527 ASSERT(array != NULL);
6528 return array->IsFixedTypedArrayBase();
6532 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6533 bool JSObject::HasFixed##Type##Elements() { \
6534 HeapObject* array = elements(); \
6535 ASSERT(array != NULL); \
6536 if (!array->IsHeapObject()) \
6538 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6541 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6543 #undef FIXED_TYPED_ELEMENTS_CHECK
6546 bool JSObject::HasNamedInterceptor() {
6547 return map()->has_named_interceptor();
6551 bool JSObject::HasIndexedInterceptor() {
6552 return map()->has_indexed_interceptor();
6556 NameDictionary* JSObject::property_dictionary() {
6557 ASSERT(!HasFastProperties());
6558 return NameDictionary::cast(properties());
6562 SeededNumberDictionary* JSObject::element_dictionary() {
6563 ASSERT(HasDictionaryElements());
6564 return SeededNumberDictionary::cast(elements());
6568 bool Name::IsHashFieldComputed(uint32_t field) {
6569 return (field & kHashNotComputedMask) == 0;
6573 bool Name::HasHashCode() {
6574 return IsHashFieldComputed(hash_field());
6578 uint32_t Name::Hash() {
6579 // Fast case: has hash code already been computed?
6580 uint32_t field = hash_field();
6581 if (IsHashFieldComputed(field)) return field >> kHashShift;
6582 // Slow case: compute hash code and set it. Has to be a string.
6583 return String::cast(this)->ComputeAndSetHash();
6587 StringHasher::StringHasher(int length, uint32_t seed)
6589 raw_running_hash_(seed),
6591 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6592 is_first_char_(true) {
6593 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
6597 bool StringHasher::has_trivial_hash() {
6598 return length_ > String::kMaxHashCalcLength;
6602 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6604 running_hash += (running_hash << 10);
6605 running_hash ^= (running_hash >> 6);
6606 return running_hash;
6610 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6611 running_hash += (running_hash << 3);
6612 running_hash ^= (running_hash >> 11);
6613 running_hash += (running_hash << 15);
6614 if ((running_hash & String::kHashBitMask) == 0) {
6617 return running_hash;
6621 void StringHasher::AddCharacter(uint16_t c) {
6622 // Use the Jenkins one-at-a-time hash function to update the hash
6623 // for the given character.
6624 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6628 bool StringHasher::UpdateIndex(uint16_t c) {
6629 ASSERT(is_array_index_);
6630 if (c < '0' || c > '9') {
6631 is_array_index_ = false;
6635 if (is_first_char_) {
6636 is_first_char_ = false;
6637 if (c == '0' && length_ > 1) {
6638 is_array_index_ = false;
6642 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6643 is_array_index_ = false;
6646 array_index_ = array_index_ * 10 + d;
6651 template<typename Char>
6652 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6653 ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
6655 if (is_array_index_) {
6656 for (; i < length; i++) {
6657 AddCharacter(chars[i]);
6658 if (!UpdateIndex(chars[i])) {
6664 for (; i < length; i++) {
6665 ASSERT(!is_array_index_);
6666 AddCharacter(chars[i]);
6671 template <typename schar>
6672 uint32_t StringHasher::HashSequentialString(const schar* chars,
6675 StringHasher hasher(length, seed);
6676 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6677 return hasher.GetHashField();
6681 bool Name::AsArrayIndex(uint32_t* index) {
6682 return IsString() && String::cast(this)->AsArrayIndex(index);
6686 bool String::AsArrayIndex(uint32_t* index) {
6687 uint32_t field = hash_field();
6688 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6691 return SlowAsArrayIndex(index);
6695 Object* JSReceiver::GetPrototype() {
6696 return map()->prototype();
6700 Object* JSReceiver::GetConstructor() {
6701 return map()->constructor();
6705 bool JSReceiver::HasProperty(Handle<JSReceiver> object,
6706 Handle<Name> name) {
6707 if (object->IsJSProxy()) {
6708 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6709 return JSProxy::HasPropertyWithHandler(proxy, name);
6711 return GetPropertyAttributes(object, name) != ABSENT;
6715 bool JSReceiver::HasOwnProperty(Handle<JSReceiver> object, Handle<Name> name) {
6716 if (object->IsJSProxy()) {
6717 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6718 return JSProxy::HasPropertyWithHandler(proxy, name);
6720 return GetOwnPropertyAttributes(object, name) != ABSENT;
6724 PropertyAttributes JSReceiver::GetPropertyAttributes(Handle<JSReceiver> object,
6727 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6728 return GetElementAttribute(object, index);
6730 LookupIterator it(object, key);
6731 return GetPropertyAttributes(&it);
6735 PropertyAttributes JSReceiver::GetElementAttribute(Handle<JSReceiver> object,
6737 if (object->IsJSProxy()) {
6738 return JSProxy::GetElementAttributeWithHandler(
6739 Handle<JSProxy>::cast(object), object, index);
6741 return JSObject::GetElementAttributeWithReceiver(
6742 Handle<JSObject>::cast(object), object, index, true);
6746 bool JSGlobalObject::IsDetached() {
6747 return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this);
6751 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) {
6752 return GetPrototype() != global;
6756 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6757 return object->IsJSProxy()
6758 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6759 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6763 Object* JSReceiver::GetIdentityHash() {
6765 ? JSProxy::cast(this)->GetIdentityHash()
6766 : JSObject::cast(this)->GetIdentityHash();
6770 bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6771 if (object->IsJSProxy()) {
6772 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6773 return JSProxy::HasElementWithHandler(proxy, index);
6775 return JSObject::GetElementAttributeWithReceiver(
6776 Handle<JSObject>::cast(object), object, index, true) != ABSENT;
6780 bool JSReceiver::HasOwnElement(Handle<JSReceiver> object, uint32_t index) {
6781 if (object->IsJSProxy()) {
6782 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6783 return JSProxy::HasElementWithHandler(proxy, index);
6785 return JSObject::GetElementAttributeWithReceiver(
6786 Handle<JSObject>::cast(object), object, index, false) != ABSENT;
6790 PropertyAttributes JSReceiver::GetOwnElementAttribute(
6791 Handle<JSReceiver> object, uint32_t index) {
6792 if (object->IsJSProxy()) {
6793 return JSProxy::GetElementAttributeWithHandler(
6794 Handle<JSProxy>::cast(object), object, index);
6796 return JSObject::GetElementAttributeWithReceiver(
6797 Handle<JSObject>::cast(object), object, index, false);
6801 bool AccessorInfo::all_can_read() {
6802 return BooleanBit::get(flag(), kAllCanReadBit);
6806 void AccessorInfo::set_all_can_read(bool value) {
6807 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6811 bool AccessorInfo::all_can_write() {
6812 return BooleanBit::get(flag(), kAllCanWriteBit);
6816 void AccessorInfo::set_all_can_write(bool value) {
6817 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6821 PropertyAttributes AccessorInfo::property_attributes() {
6822 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6826 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6827 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6831 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6832 Object* function_template = expected_receiver_type();
6833 if (!function_template->IsFunctionTemplateInfo()) return true;
6834 return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver);
6838 void ExecutableAccessorInfo::clear_setter() {
6839 set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
6843 void AccessorPair::set_access_flags(v8::AccessControl access_control) {
6844 int current = access_flags()->value();
6845 current = BooleanBit::set(current,
6847 access_control & ALL_CAN_READ);
6848 current = BooleanBit::set(current,
6850 access_control & ALL_CAN_WRITE);
6851 set_access_flags(Smi::FromInt(current));
6855 bool AccessorPair::all_can_read() {
6856 return BooleanBit::get(access_flags(), kAllCanReadBit);
6860 bool AccessorPair::all_can_write() {
6861 return BooleanBit::get(access_flags(), kAllCanWriteBit);
6865 template<typename Derived, typename Shape, typename Key>
6866 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6868 Handle<Object> value) {
6869 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6873 template<typename Derived, typename Shape, typename Key>
6874 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6876 Handle<Object> value,
6877 PropertyDetails details) {
6878 ASSERT(!key->IsName() ||
6879 details.IsDeleted() ||
6880 details.dictionary_index() > 0);
6881 int index = DerivedHashTable::EntryToIndex(entry);
6882 DisallowHeapAllocation no_gc;
6883 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6884 FixedArray::set(index, *key, mode);
6885 FixedArray::set(index+1, *value, mode);
6886 FixedArray::set(index+2, details.AsSmi());
6890 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6891 ASSERT(other->IsNumber());
6892 return key == static_cast<uint32_t>(other->Number());
6896 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6897 return ComputeIntegerHash(key, 0);
6901 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6903 ASSERT(other->IsNumber());
6904 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6908 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6909 return ComputeIntegerHash(key, seed);
6913 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6916 ASSERT(other->IsNumber());
6917 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6921 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6922 return isolate->factory()->NewNumberFromUint(key);
6926 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6927 // We know that all entries in a hash table had their hash keys created.
6928 // Use that knowledge to have fast failure.
6929 if (key->Hash() != Name::cast(other)->Hash()) return false;
6930 return key->Equals(Name::cast(other));
6934 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6939 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6940 return Name::cast(other)->Hash();
6944 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6946 ASSERT(key->IsUniqueName());
6951 void NameDictionary::DoGenerateNewEnumerationIndices(
6952 Handle<NameDictionary> dictionary) {
6953 DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6957 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6958 return key->SameValue(other);
6962 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6963 return Smi::cast(key->GetHash())->value();
6967 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6969 return Smi::cast(other->GetHash())->value();
6973 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6974 Handle<Object> key) {
6979 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6980 Handle<ObjectHashTable> table, Handle<Object> key) {
6981 return DerivedHashTable::Shrink(table, key);
6985 template <int entrysize>
6986 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6987 return key->SameValue(other);
6991 template <int entrysize>
6992 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6993 intptr_t hash = reinterpret_cast<intptr_t>(*key);
6994 return (uint32_t)(hash & 0xFFFFFFFF);
6998 template <int entrysize>
6999 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7001 intptr_t hash = reinterpret_cast<intptr_t>(other);
7002 return (uint32_t)(hash & 0xFFFFFFFF);
7006 template <int entrysize>
7007 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7008 Handle<Object> key) {
7013 void Map::ClearCodeCache(Heap* heap) {
7014 // No write barrier is needed since empty_fixed_array is not in new space.
7015 // Please note this function is used during marking:
7016 // - MarkCompactCollector::MarkUnmarkedObject
7017 // - IncrementalMarking::Step
7018 ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
7019 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7023 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
7024 ASSERT(array->HasFastSmiOrObjectElements());
7025 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
7026 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
7027 if (elts->length() < required_size) {
7028 // Doubling in size would be overkill, but leave some slack to avoid
7029 // constantly growing.
7030 Expand(array, required_size + (required_size >> 3));
7031 // It's a performance benefit to keep a frequently used array in new-space.
7032 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
7033 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
7034 // Expand will allocate a new backing store in new space even if the size
7035 // we asked for isn't larger than what we had before.
7036 Expand(array, required_size);
7041 void JSArray::set_length(Smi* length) {
7042 // Don't need a write barrier for a Smi.
7043 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7047 bool JSArray::AllowsSetElementsLength() {
7048 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7049 ASSERT(result == !HasExternalArrayElements());
7054 void JSArray::SetContent(Handle<JSArray> array,
7055 Handle<FixedArrayBase> storage) {
7056 EnsureCanContainElements(array, storage, storage->length(),
7057 ALLOW_COPIED_DOUBLE_ELEMENTS);
7059 ASSERT((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7060 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7061 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7062 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7063 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7064 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7065 array->set_elements(*storage);
7066 array->set_length(Smi::FromInt(storage->length()));
7070 Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
7071 return isolate->factory()->uninitialized_symbol();
7075 Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
7076 return isolate->factory()->megamorphic_symbol();
7080 Handle<Object> TypeFeedbackInfo::MonomorphicArraySentinel(Isolate* isolate,
7081 ElementsKind elements_kind) {
7082 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
7086 Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) {
7087 return heap->uninitialized_symbol();
7091 int TypeFeedbackInfo::ic_total_count() {
7092 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7093 return ICTotalCountField::decode(current);
7097 void TypeFeedbackInfo::set_ic_total_count(int count) {
7098 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7099 value = ICTotalCountField::update(value,
7100 ICTotalCountField::decode(count));
7101 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7105 int TypeFeedbackInfo::ic_with_type_info_count() {
7106 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7107 return ICsWithTypeInfoCountField::decode(current);
7111 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7112 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7113 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7114 // We can get negative count here when the type-feedback info is
7115 // shared between two code objects. The can only happen when
7116 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7117 // Since we do not optimize when the debugger is active, we can skip
7118 // this counter update.
7119 if (new_count >= 0) {
7120 new_count &= ICsWithTypeInfoCountField::kMask;
7121 value = ICsWithTypeInfoCountField::update(value, new_count);
7122 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7127 void TypeFeedbackInfo::initialize_storage() {
7128 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7129 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7133 void TypeFeedbackInfo::change_own_type_change_checksum() {
7134 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7135 int checksum = OwnTypeChangeChecksum::decode(value);
7136 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7137 value = OwnTypeChangeChecksum::update(value, checksum);
7138 // Ensure packed bit field is in Smi range.
7139 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7140 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7141 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7145 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7146 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7147 int mask = (1 << kTypeChangeChecksumBits) - 1;
7148 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7149 // Ensure packed bit field is in Smi range.
7150 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7151 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7152 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7156 int TypeFeedbackInfo::own_type_change_checksum() {
7157 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7158 return OwnTypeChangeChecksum::decode(value);
7162 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7163 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7164 int mask = (1 << kTypeChangeChecksumBits) - 1;
7165 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7169 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7172 Relocatable::Relocatable(Isolate* isolate) {
7174 prev_ = isolate->relocatable_top();
7175 isolate->set_relocatable_top(this);
7179 Relocatable::~Relocatable() {
7180 ASSERT_EQ(isolate_->relocatable_top(), this);
7181 isolate_->set_relocatable_top(prev_);
7185 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7186 return map->instance_size();
7190 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7191 v->VisitExternalReference(
7192 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7196 template<typename StaticVisitor>
7197 void Foreign::ForeignIterateBody() {
7198 StaticVisitor::VisitExternalReference(
7199 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7203 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
7204 typedef v8::String::ExternalAsciiStringResource Resource;
7205 v->VisitExternalAsciiString(
7206 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7210 template<typename StaticVisitor>
7211 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
7212 typedef v8::String::ExternalAsciiStringResource Resource;
7213 StaticVisitor::VisitExternalAsciiString(
7214 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7218 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7219 typedef v8::String::ExternalStringResource Resource;
7220 v->VisitExternalTwoByteString(
7221 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7225 template<typename StaticVisitor>
7226 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7227 typedef v8::String::ExternalStringResource Resource;
7228 StaticVisitor::VisitExternalTwoByteString(
7229 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7233 template<int start_offset, int end_offset, int size>
7234 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7237 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7238 HeapObject::RawField(obj, end_offset));
7242 template<int start_offset>
7243 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7246 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7247 HeapObject::RawField(obj, object_size));
7252 #undef CAST_ACCESSOR
7253 #undef INT_ACCESSORS
7255 #undef ACCESSORS_TO_SMI
7256 #undef SMI_ACCESSORS
7257 #undef SYNCHRONIZED_SMI_ACCESSORS
7258 #undef NOBARRIER_SMI_ACCESSORS
7260 #undef BOOL_ACCESSORS
7263 #undef NOBARRIER_READ_FIELD
7265 #undef NOBARRIER_WRITE_FIELD
7266 #undef WRITE_BARRIER
7267 #undef CONDITIONAL_WRITE_BARRIER
7268 #undef READ_DOUBLE_FIELD
7269 #undef WRITE_DOUBLE_FIELD
7270 #undef READ_INT_FIELD
7271 #undef WRITE_INT_FIELD
7272 #undef READ_INTPTR_FIELD
7273 #undef WRITE_INTPTR_FIELD
7274 #undef READ_UINT32_FIELD
7275 #undef WRITE_UINT32_FIELD
7276 #undef READ_SHORT_FIELD
7277 #undef WRITE_SHORT_FIELD
7278 #undef READ_BYTE_FIELD
7279 #undef WRITE_BYTE_FIELD
7280 #undef NOBARRIER_READ_BYTE_FIELD
7281 #undef NOBARRIER_WRITE_BYTE_FIELD
7283 } } // namespace v8::internal
7285 #endif // V8_OBJECTS_INL_H_