1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/elements.h"
20 #include "src/factory.h"
21 #include "src/field-index-inl.h"
22 #include "src/heap/heap-inl.h"
23 #include "src/heap/heap.h"
24 #include "src/heap/incremental-marking.h"
25 #include "src/heap/objects-visiting.h"
26 #include "src/heap/spaces.h"
27 #include "src/heap/store-buffer.h"
28 #include "src/isolate.h"
29 #include "src/lookup.h"
30 #include "src/objects.h"
31 #include "src/property.h"
32 #include "src/prototype.h"
33 #include "src/transitions-inl.h"
34 #include "src/type-feedback-vector-inl.h"
35 #include "src/v8memory.h"
40 PropertyDetails::PropertyDetails(Smi* smi) {
41 value_ = smi->value();
45 Smi* PropertyDetails::AsSmi() const {
46 // Ensure the upper 2 bits have the same value by sign extending it. This is
47 // necessary to be able to use the 31st bit of the property details.
48 int value = value_ << 1;
49 return Smi::FromInt(value >> 1);
53 PropertyDetails PropertyDetails::AsDeleted() const {
54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
55 return PropertyDetails(smi);
59 #define TYPE_CHECKER(type, instancetype) \
60 bool Object::Is##type() const { \
61 return Object::IsHeapObject() && \
62 HeapObject::cast(this)->map()->instance_type() == instancetype; \
66 #define CAST_ACCESSOR(type) \
67 type* type::cast(Object* object) { \
68 SLOW_DCHECK(object->Is##type()); \
69 return reinterpret_cast<type*>(object); \
71 const type* type::cast(const Object* object) { \
72 SLOW_DCHECK(object->Is##type()); \
73 return reinterpret_cast<const type*>(object); \
77 #define INT_ACCESSORS(holder, name, offset) \
78 int holder::name() const { return READ_INT_FIELD(this, offset); } \
79 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
82 #define ACCESSORS(holder, name, type, offset) \
83 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
84 void holder::set_##name(type* value, WriteBarrierMode mode) { \
85 WRITE_FIELD(this, offset, value); \
86 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
90 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
91 #define ACCESSORS_TO_SMI(holder, name, offset) \
92 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
93 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
94 WRITE_FIELD(this, offset, value); \
98 // Getter that returns a Smi as an int and writes an int as a Smi.
99 #define SMI_ACCESSORS(holder, name, offset) \
100 int holder::name() const { \
101 Object* value = READ_FIELD(this, offset); \
102 return Smi::cast(value)->value(); \
104 void holder::set_##name(int value) { \
105 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
108 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
109 int holder::synchronized_##name() const { \
110 Object* value = ACQUIRE_READ_FIELD(this, offset); \
111 return Smi::cast(value)->value(); \
113 void holder::synchronized_set_##name(int value) { \
114 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
117 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
118 int holder::nobarrier_##name() const { \
119 Object* value = NOBARRIER_READ_FIELD(this, offset); \
120 return Smi::cast(value)->value(); \
122 void holder::nobarrier_set_##name(int value) { \
123 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
126 #define BOOL_GETTER(holder, field, name, offset) \
127 bool holder::name() const { \
128 return BooleanBit::get(field(), offset); \
132 #define BOOL_ACCESSORS(holder, field, name, offset) \
133 bool holder::name() const { \
134 return BooleanBit::get(field(), offset); \
136 void holder::set_##name(bool value) { \
137 set_##field(BooleanBit::set(field(), offset, value)); \
141 bool Object::IsFixedArrayBase() const {
142 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
143 IsFixedTypedArrayBase() || IsExternalArray();
147 // External objects are not extensible, so the map check is enough.
148 bool Object::IsExternal() const {
149 return Object::IsHeapObject() &&
150 HeapObject::cast(this)->map() ==
151 HeapObject::cast(this)->GetHeap()->external_map();
155 bool Object::IsAccessorInfo() const {
156 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
160 bool Object::IsSmi() const {
161 return HAS_SMI_TAG(this);
165 bool Object::IsHeapObject() const {
166 return Internals::HasHeapObjectTag(this);
170 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
171 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
172 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
175 bool Object::IsString() const {
176 return Object::IsHeapObject()
177 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
181 bool Object::IsName() const {
182 return IsString() || IsSymbol();
186 bool Object::IsUniqueName() const {
187 return IsInternalizedString() || IsSymbol();
191 bool Object::IsSpecObject() const {
192 return Object::IsHeapObject()
193 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
197 bool Object::IsSpecFunction() const {
198 if (!Object::IsHeapObject()) return false;
199 InstanceType type = HeapObject::cast(this)->map()->instance_type();
200 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
204 bool Object::IsTemplateInfo() const {
205 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
209 bool Object::IsInternalizedString() const {
210 if (!this->IsHeapObject()) return false;
211 uint32_t type = HeapObject::cast(this)->map()->instance_type();
212 STATIC_ASSERT(kNotInternalizedTag != 0);
213 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
214 (kStringTag | kInternalizedTag);
218 bool Object::IsConsString() const {
219 if (!IsString()) return false;
220 return StringShape(String::cast(this)).IsCons();
224 bool Object::IsSlicedString() const {
225 if (!IsString()) return false;
226 return StringShape(String::cast(this)).IsSliced();
230 bool Object::IsSeqString() const {
231 if (!IsString()) return false;
232 return StringShape(String::cast(this)).IsSequential();
236 bool Object::IsSeqOneByteString() const {
237 if (!IsString()) return false;
238 return StringShape(String::cast(this)).IsSequential() &&
239 String::cast(this)->IsOneByteRepresentation();
243 bool Object::IsSeqTwoByteString() const {
244 if (!IsString()) return false;
245 return StringShape(String::cast(this)).IsSequential() &&
246 String::cast(this)->IsTwoByteRepresentation();
250 bool Object::IsExternalString() const {
251 if (!IsString()) return false;
252 return StringShape(String::cast(this)).IsExternal();
256 bool Object::IsExternalOneByteString() const {
257 if (!IsString()) return false;
258 return StringShape(String::cast(this)).IsExternal() &&
259 String::cast(this)->IsOneByteRepresentation();
263 bool Object::IsExternalTwoByteString() const {
264 if (!IsString()) return false;
265 return StringShape(String::cast(this)).IsExternal() &&
266 String::cast(this)->IsTwoByteRepresentation();
270 bool Object::HasValidElements() {
271 // Dictionary is covered under FixedArray.
272 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
273 IsFixedTypedArrayBase();
277 Handle<Object> Object::NewStorageFor(Isolate* isolate,
278 Handle<Object> object,
279 Representation representation) {
280 if (representation.IsSmi() && object->IsUninitialized()) {
281 return handle(Smi::FromInt(0), isolate);
283 if (!representation.IsDouble()) return object;
285 if (object->IsUninitialized()) {
287 } else if (object->IsMutableHeapNumber()) {
288 value = HeapNumber::cast(*object)->value();
290 value = object->Number();
292 return isolate->factory()->NewHeapNumber(value, MUTABLE);
296 Handle<Object> Object::WrapForRead(Isolate* isolate,
297 Handle<Object> object,
298 Representation representation) {
299 DCHECK(!object->IsUninitialized());
300 if (!representation.IsDouble()) {
301 DCHECK(object->FitsRepresentation(representation));
304 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
308 StringShape::StringShape(const String* str)
309 : type_(str->map()->instance_type()) {
311 DCHECK((type_ & kIsNotStringMask) == kStringTag);
315 StringShape::StringShape(Map* map)
316 : type_(map->instance_type()) {
318 DCHECK((type_ & kIsNotStringMask) == kStringTag);
322 StringShape::StringShape(InstanceType t)
323 : type_(static_cast<uint32_t>(t)) {
325 DCHECK((type_ & kIsNotStringMask) == kStringTag);
329 bool StringShape::IsInternalized() {
331 STATIC_ASSERT(kNotInternalizedTag != 0);
332 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
333 (kStringTag | kInternalizedTag);
337 bool String::IsOneByteRepresentation() const {
338 uint32_t type = map()->instance_type();
339 return (type & kStringEncodingMask) == kOneByteStringTag;
343 bool String::IsTwoByteRepresentation() const {
344 uint32_t type = map()->instance_type();
345 return (type & kStringEncodingMask) == kTwoByteStringTag;
349 bool String::IsOneByteRepresentationUnderneath() {
350 uint32_t type = map()->instance_type();
351 STATIC_ASSERT(kIsIndirectStringTag != 0);
352 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
354 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
355 case kOneByteStringTag:
357 case kTwoByteStringTag:
359 default: // Cons or sliced string. Need to go deeper.
360 return GetUnderlying()->IsOneByteRepresentation();
365 bool String::IsTwoByteRepresentationUnderneath() {
366 uint32_t type = map()->instance_type();
367 STATIC_ASSERT(kIsIndirectStringTag != 0);
368 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
370 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
371 case kOneByteStringTag:
373 case kTwoByteStringTag:
375 default: // Cons or sliced string. Need to go deeper.
376 return GetUnderlying()->IsTwoByteRepresentation();
381 bool String::HasOnlyOneByteChars() {
382 uint32_t type = map()->instance_type();
383 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
384 IsOneByteRepresentation();
388 bool StringShape::IsCons() {
389 return (type_ & kStringRepresentationMask) == kConsStringTag;
393 bool StringShape::IsSliced() {
394 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
398 bool StringShape::IsIndirect() {
399 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
403 bool StringShape::IsExternal() {
404 return (type_ & kStringRepresentationMask) == kExternalStringTag;
408 bool StringShape::IsSequential() {
409 return (type_ & kStringRepresentationMask) == kSeqStringTag;
413 StringRepresentationTag StringShape::representation_tag() {
414 uint32_t tag = (type_ & kStringRepresentationMask);
415 return static_cast<StringRepresentationTag>(tag);
419 uint32_t StringShape::encoding_tag() {
420 return type_ & kStringEncodingMask;
424 uint32_t StringShape::full_representation_tag() {
425 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
429 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
430 Internals::kFullStringRepresentationMask);
432 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
433 Internals::kStringEncodingMask);
436 bool StringShape::IsSequentialOneByte() {
437 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
441 bool StringShape::IsSequentialTwoByte() {
442 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
446 bool StringShape::IsExternalOneByte() {
447 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
451 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
452 Internals::kExternalOneByteRepresentationTag);
454 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
457 bool StringShape::IsExternalTwoByte() {
458 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
462 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
463 Internals::kExternalTwoByteRepresentationTag);
465 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
467 uc32 FlatStringReader::Get(int index) {
468 DCHECK(0 <= index && index <= length_);
470 return static_cast<const byte*>(start_)[index];
472 return static_cast<const uc16*>(start_)[index];
477 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
478 return key->AsHandle(isolate);
482 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
483 return key->AsHandle(isolate);
487 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
489 return key->AsHandle(isolate);
493 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
495 return key->AsHandle(isolate);
498 template <typename Char>
499 class SequentialStringKey : public HashTableKey {
501 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
502 : string_(string), hash_field_(0), seed_(seed) { }
504 virtual uint32_t Hash() OVERRIDE {
505 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
509 uint32_t result = hash_field_ >> String::kHashShift;
510 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
515 virtual uint32_t HashForObject(Object* other) OVERRIDE {
516 return String::cast(other)->Hash();
519 Vector<const Char> string_;
520 uint32_t hash_field_;
525 class OneByteStringKey : public SequentialStringKey<uint8_t> {
527 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
528 : SequentialStringKey<uint8_t>(str, seed) { }
530 virtual bool IsMatch(Object* string) OVERRIDE {
531 return String::cast(string)->IsOneByteEqualTo(string_);
534 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
538 class SeqOneByteSubStringKey : public HashTableKey {
540 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
541 : string_(string), from_(from), length_(length) {
542 DCHECK(string_->IsSeqOneByteString());
545 virtual uint32_t Hash() OVERRIDE {
546 DCHECK(length_ >= 0);
547 DCHECK(from_ + length_ <= string_->length());
548 const uint8_t* chars = string_->GetChars() + from_;
549 hash_field_ = StringHasher::HashSequentialString(
550 chars, length_, string_->GetHeap()->HashSeed());
551 uint32_t result = hash_field_ >> String::kHashShift;
552 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
556 virtual uint32_t HashForObject(Object* other) OVERRIDE {
557 return String::cast(other)->Hash();
560 virtual bool IsMatch(Object* string) OVERRIDE;
561 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
564 Handle<SeqOneByteString> string_;
567 uint32_t hash_field_;
571 class TwoByteStringKey : public SequentialStringKey<uc16> {
573 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
574 : SequentialStringKey<uc16>(str, seed) { }
576 virtual bool IsMatch(Object* string) OVERRIDE {
577 return String::cast(string)->IsTwoByteEqualTo(string_);
580 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
584 // Utf8StringKey carries a vector of chars as key.
585 class Utf8StringKey : public HashTableKey {
587 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
588 : string_(string), hash_field_(0), seed_(seed) { }
590 virtual bool IsMatch(Object* string) OVERRIDE {
591 return String::cast(string)->IsUtf8EqualTo(string_);
594 virtual uint32_t Hash() OVERRIDE {
595 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
596 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
597 uint32_t result = hash_field_ >> String::kHashShift;
598 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
602 virtual uint32_t HashForObject(Object* other) OVERRIDE {
603 return String::cast(other)->Hash();
606 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
607 if (hash_field_ == 0) Hash();
608 return isolate->factory()->NewInternalizedStringFromUtf8(
609 string_, chars_, hash_field_);
612 Vector<const char> string_;
613 uint32_t hash_field_;
614 int chars_; // Caches the number of characters when computing the hash code.
619 bool Object::IsNumber() const {
620 return IsSmi() || IsHeapNumber();
624 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
625 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
628 bool Object::IsFiller() const {
629 if (!Object::IsHeapObject()) return false;
630 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
631 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
635 bool Object::IsExternalArray() const {
636 if (!Object::IsHeapObject())
638 InstanceType instance_type =
639 HeapObject::cast(this)->map()->instance_type();
640 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
641 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
645 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
646 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
647 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
649 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
650 #undef TYPED_ARRAY_TYPE_CHECKER
653 bool Object::IsFixedTypedArrayBase() const {
654 if (!Object::IsHeapObject()) return false;
656 InstanceType instance_type =
657 HeapObject::cast(this)->map()->instance_type();
658 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
659 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
663 bool Object::IsJSReceiver() const {
664 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
665 return IsHeapObject() &&
666 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
670 bool Object::IsJSObject() const {
671 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
672 return IsHeapObject() &&
673 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
677 bool Object::IsJSProxy() const {
678 if (!Object::IsHeapObject()) return false;
679 return HeapObject::cast(this)->map()->IsJSProxyMap();
683 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
684 TYPE_CHECKER(JSSet, JS_SET_TYPE)
685 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
686 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
687 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
688 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
689 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
690 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
691 TYPE_CHECKER(Map, MAP_TYPE)
692 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
693 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
694 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
697 bool Object::IsJSWeakCollection() const {
698 return IsJSWeakMap() || IsJSWeakSet();
702 bool Object::IsDescriptorArray() const {
703 return IsFixedArray();
707 bool Object::IsTransitionArray() const {
708 return IsFixedArray();
712 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
715 bool Object::IsDeoptimizationInputData() const {
716 // Must be a fixed array.
717 if (!IsFixedArray()) return false;
719 // There's no sure way to detect the difference between a fixed array and
720 // a deoptimization data array. Since this is used for asserts we can
721 // check that the length is zero or else the fixed size plus a multiple of
723 int length = FixedArray::cast(this)->length();
724 if (length == 0) return true;
726 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
727 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
731 bool Object::IsDeoptimizationOutputData() const {
732 if (!IsFixedArray()) return false;
733 // There's actually no way to see the difference between a fixed array and
734 // a deoptimization data array. Since this is used for asserts we can check
735 // that the length is plausible though.
736 if (FixedArray::cast(this)->length() % 2 != 0) return false;
741 bool Object::IsDependentCode() const {
742 if (!IsFixedArray()) return false;
743 // There's actually no way to see the difference between a fixed array and
744 // a dependent codes array.
749 bool Object::IsContext() const {
750 if (!Object::IsHeapObject()) return false;
751 Map* map = HeapObject::cast(this)->map();
752 Heap* heap = map->GetHeap();
753 return (map == heap->function_context_map() ||
754 map == heap->catch_context_map() ||
755 map == heap->with_context_map() ||
756 map == heap->native_context_map() ||
757 map == heap->block_context_map() ||
758 map == heap->module_context_map() ||
759 map == heap->global_context_map());
763 bool Object::IsNativeContext() const {
764 return Object::IsHeapObject() &&
765 HeapObject::cast(this)->map() ==
766 HeapObject::cast(this)->GetHeap()->native_context_map();
770 bool Object::IsScopeInfo() const {
771 return Object::IsHeapObject() &&
772 HeapObject::cast(this)->map() ==
773 HeapObject::cast(this)->GetHeap()->scope_info_map();
777 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
780 template <> inline bool Is<JSFunction>(Object* obj) {
781 return obj->IsJSFunction();
785 TYPE_CHECKER(Code, CODE_TYPE)
786 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
787 TYPE_CHECKER(Cell, CELL_TYPE)
788 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
789 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
790 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
791 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
792 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
793 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
794 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
797 bool Object::IsStringWrapper() const {
798 return IsJSValue() && JSValue::cast(this)->value()->IsString();
802 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
805 bool Object::IsBoolean() const {
806 return IsOddball() &&
807 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
811 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
812 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
813 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
814 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
816 TYPE_CHECKER(Float32x4, FLOAT32x4_TYPE)
817 TYPE_CHECKER(Float64x2, FLOAT64x2_TYPE)
818 TYPE_CHECKER(Int32x4, INT32x4_TYPE)
820 bool Object::IsJSArrayBufferView() const {
821 return IsJSDataView() || IsJSTypedArray();
825 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
828 template <> inline bool Is<JSArray>(Object* obj) {
829 return obj->IsJSArray();
833 bool Object::IsHashTable() const {
834 return Object::IsHeapObject() &&
835 HeapObject::cast(this)->map() ==
836 HeapObject::cast(this)->GetHeap()->hash_table_map();
840 bool Object::IsWeakHashTable() const {
841 return IsHashTable();
845 bool Object::IsDictionary() const {
846 return IsHashTable() &&
847 this != HeapObject::cast(this)->GetHeap()->string_table();
851 bool Object::IsNameDictionary() const {
852 return IsDictionary();
856 bool Object::IsSeededNumberDictionary() const {
857 return IsDictionary();
861 bool Object::IsUnseededNumberDictionary() const {
862 return IsDictionary();
866 bool Object::IsStringTable() const {
867 return IsHashTable();
871 bool Object::IsJSFunctionResultCache() const {
872 if (!IsFixedArray()) return false;
873 const FixedArray* self = FixedArray::cast(this);
874 int length = self->length();
875 if (length < JSFunctionResultCache::kEntriesIndex) return false;
876 if ((length - JSFunctionResultCache::kEntriesIndex)
877 % JSFunctionResultCache::kEntrySize != 0) {
881 if (FLAG_verify_heap) {
882 // TODO(svenpanne) We use const_cast here and below to break our dependency
883 // cycle between the predicates and the verifiers. This can be removed when
884 // the verifiers are const-correct, too.
885 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
886 JSFunctionResultCacheVerify();
893 bool Object::IsNormalizedMapCache() const {
894 return NormalizedMapCache::IsNormalizedMapCache(this);
898 int NormalizedMapCache::GetIndex(Handle<Map> map) {
899 return map->Hash() % NormalizedMapCache::kEntries;
903 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
904 if (!obj->IsFixedArray()) return false;
905 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
909 if (FLAG_verify_heap) {
910 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
911 NormalizedMapCacheVerify();
918 bool Object::IsCompilationCacheTable() const {
919 return IsHashTable();
923 bool Object::IsCodeCacheHashTable() const {
924 return IsHashTable();
928 bool Object::IsPolymorphicCodeCacheHashTable() const {
929 return IsHashTable();
933 bool Object::IsMapCache() const {
934 return IsHashTable();
938 bool Object::IsObjectHashTable() const {
939 return IsHashTable();
943 bool Object::IsOrderedHashTable() const {
944 return IsHeapObject() &&
945 HeapObject::cast(this)->map() ==
946 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
950 bool Object::IsOrderedHashSet() const {
951 return IsOrderedHashTable();
955 bool Object::IsOrderedHashMap() const {
956 return IsOrderedHashTable();
960 bool Object::IsPrimitive() const {
961 return IsOddball() || IsNumber() || IsString();
965 bool Object::IsJSGlobalProxy() const {
966 bool result = IsHeapObject() &&
967 (HeapObject::cast(this)->map()->instance_type() ==
968 JS_GLOBAL_PROXY_TYPE);
970 HeapObject::cast(this)->map()->is_access_check_needed());
975 bool Object::IsGlobalObject() const {
976 if (!IsHeapObject()) return false;
978 InstanceType type = HeapObject::cast(this)->map()->instance_type();
979 return type == JS_GLOBAL_OBJECT_TYPE ||
980 type == JS_BUILTINS_OBJECT_TYPE;
984 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
985 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
988 bool Object::IsUndetectableObject() const {
989 return IsHeapObject()
990 && HeapObject::cast(this)->map()->is_undetectable();
994 bool Object::IsAccessCheckNeeded() const {
995 if (!IsHeapObject()) return false;
996 if (IsJSGlobalProxy()) {
997 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
998 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
999 return proxy->IsDetachedFrom(global);
1001 return HeapObject::cast(this)->map()->is_access_check_needed();
1005 bool Object::IsStruct() const {
1006 if (!IsHeapObject()) return false;
1007 switch (HeapObject::cast(this)->map()->instance_type()) {
1008 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1009 STRUCT_LIST(MAKE_STRUCT_CASE)
1010 #undef MAKE_STRUCT_CASE
1011 default: return false;
1016 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1017 bool Object::Is##Name() const { \
1018 return Object::IsHeapObject() \
1019 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1021 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1022 #undef MAKE_STRUCT_PREDICATE
1025 bool Object::IsUndefined() const {
1026 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1030 bool Object::IsNull() const {
1031 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1035 bool Object::IsTheHole() const {
1036 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1040 bool Object::IsException() const {
1041 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1045 bool Object::IsUninitialized() const {
1046 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1050 bool Object::IsTrue() const {
1051 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1055 bool Object::IsFalse() const {
1056 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1060 bool Object::IsArgumentsMarker() const {
1061 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1065 double Object::Number() {
1068 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1069 : reinterpret_cast<HeapNumber*>(this)->value();
1073 bool Object::IsNaN() const {
1074 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1078 bool Object::IsMinusZero() const {
1079 return this->IsHeapNumber() &&
1080 i::IsMinusZero(HeapNumber::cast(this)->value());
1084 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1085 if (object->IsSmi()) return Handle<Smi>::cast(object);
1086 if (object->IsHeapNumber()) {
1087 double value = Handle<HeapNumber>::cast(object)->value();
1088 int int_value = FastD2I(value);
1089 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1090 return handle(Smi::FromInt(int_value), isolate);
1093 return Handle<Smi>();
1097 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1098 Handle<Object> object) {
1100 isolate, object, handle(isolate->context()->native_context(), isolate));
1104 bool Object::HasSpecificClassOf(String* name) {
1105 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1109 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1110 Handle<Name> name) {
1111 LookupIterator it(object, name);
1112 return GetProperty(&it);
1116 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1117 Handle<Object> object,
1119 // GetElement can trigger a getter which can cause allocation.
1120 // This was not always the case. This DCHECK is here to catch
1121 // leftover incorrect uses.
1122 DCHECK(AllowHeapAllocation::IsAllowed());
1123 return Object::GetElementWithReceiver(isolate, object, object, index);
1127 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1128 Handle<Name> name) {
1130 Isolate* isolate = name->GetIsolate();
1131 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1132 return GetProperty(object, name);
1136 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1137 Handle<Object> object,
1139 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1140 DCHECK(!str.is_null());
1142 uint32_t index; // Assert that the name is not an array index.
1143 DCHECK(!str->AsArrayIndex(&index));
1145 return GetProperty(object, str);
1149 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1150 Handle<Object> receiver,
1152 return GetPropertyWithHandler(
1153 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1157 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1158 Handle<JSReceiver> receiver,
1160 Handle<Object> value,
1161 StrictMode strict_mode) {
1162 Isolate* isolate = proxy->GetIsolate();
1163 Handle<String> name = isolate->factory()->Uint32ToString(index);
1164 return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode);
1168 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1170 Isolate* isolate = proxy->GetIsolate();
1171 Handle<String> name = isolate->factory()->Uint32ToString(index);
1172 return HasPropertyWithHandler(proxy, name);
1176 #define FIELD_ADDR(p, offset) \
1177 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1179 #define FIELD_ADDR_CONST(p, offset) \
1180 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1182 #define READ_FIELD(p, offset) \
1183 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1185 #define ACQUIRE_READ_FIELD(p, offset) \
1186 reinterpret_cast<Object*>(base::Acquire_Load( \
1187 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1189 #define NOBARRIER_READ_FIELD(p, offset) \
1190 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1191 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1193 #define WRITE_FIELD(p, offset, value) \
1194 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1196 #define RELEASE_WRITE_FIELD(p, offset, value) \
1197 base::Release_Store( \
1198 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1199 reinterpret_cast<base::AtomicWord>(value));
1201 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1202 base::NoBarrier_Store( \
1203 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1204 reinterpret_cast<base::AtomicWord>(value));
1206 #define WRITE_BARRIER(heap, object, offset, value) \
1207 heap->incremental_marking()->RecordWrite( \
1208 object, HeapObject::RawField(object, offset), value); \
1209 if (heap->InNewSpace(value)) { \
1210 heap->RecordWrite(object->address(), offset); \
1213 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1214 if (mode == UPDATE_WRITE_BARRIER) { \
1215 heap->incremental_marking()->RecordWrite( \
1216 object, HeapObject::RawField(object, offset), value); \
1217 if (heap->InNewSpace(value)) { \
1218 heap->RecordWrite(object->address(), offset); \
1222 #ifndef V8_TARGET_ARCH_MIPS
1223 #define READ_DOUBLE_FIELD(p, offset) \
1224 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1225 #else // V8_TARGET_ARCH_MIPS
1226 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1227 // non-64-bit aligned HeapNumber::value.
1228 static inline double read_double_field(const void* p, int offset) {
1233 c.u[0] = (*reinterpret_cast<const uint32_t*>(
1234 FIELD_ADDR_CONST(p, offset)));
1235 c.u[1] = (*reinterpret_cast<const uint32_t*>(
1236 FIELD_ADDR_CONST(p, offset + 4)));
1239 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1240 #endif // V8_TARGET_ARCH_MIPS
1242 #ifndef V8_TARGET_ARCH_MIPS
1243 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1244 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1245 #else // V8_TARGET_ARCH_MIPS
1246 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1247 // non-64-bit aligned HeapNumber::value.
1248 static inline void write_double_field(void* p, int offset,
1255 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1256 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1258 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1259 write_double_field(p, offset, value)
1260 #endif // V8_TARGET_ARCH_MIPS
1262 #define READ_FLOAT32x4_FIELD(p, offset) \
1263 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)))
1265 #define WRITE_FLOAT32x4_FIELD(p, offset, value) \
1266 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1268 #define READ_FLOAT64x2_FIELD(p, offset) \
1269 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)))
1271 #define WRITE_FLOAT64x2_FIELD(p, offset, value) \
1272 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)) = value)
1274 #define READ_INT32x4_FIELD(p, offset) \
1275 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)))
1277 #define WRITE_INT32x4_FIELD(p, offset, value) \
1278 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1280 #define READ_FLOAT_FIELD(p, offset) \
1281 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)))
1283 #define WRITE_FLOAT_FIELD(p, offset, value) \
1284 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1286 #define READ_INT_FIELD(p, offset) \
1287 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1289 #define WRITE_INT_FIELD(p, offset, value) \
1290 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1292 #define READ_INTPTR_FIELD(p, offset) \
1293 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1295 #define WRITE_INTPTR_FIELD(p, offset, value) \
1296 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1298 #define READ_UINT32_FIELD(p, offset) \
1299 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1301 #define WRITE_UINT32_FIELD(p, offset, value) \
1302 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1304 #define READ_INT32_FIELD(p, offset) \
1305 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1307 #define WRITE_INT32_FIELD(p, offset, value) \
1308 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1310 #define READ_INT64_FIELD(p, offset) \
1311 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1313 #define WRITE_INT64_FIELD(p, offset, value) \
1314 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1316 #define READ_SHORT_FIELD(p, offset) \
1317 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1319 #define WRITE_SHORT_FIELD(p, offset, value) \
1320 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1322 #define READ_BYTE_FIELD(p, offset) \
1323 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1325 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1326 static_cast<byte>(base::NoBarrier_Load( \
1327 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1329 #define WRITE_BYTE_FIELD(p, offset, value) \
1330 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1332 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1333 base::NoBarrier_Store( \
1334 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1335 static_cast<base::Atomic8>(value));
1337 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1338 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1342 int Smi::value() const {
1343 return Internals::SmiValue(this);
1347 Smi* Smi::FromInt(int value) {
1348 DCHECK(Smi::IsValid(value));
1349 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1353 Smi* Smi::FromIntptr(intptr_t value) {
1354 DCHECK(Smi::IsValid(value));
1355 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1356 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1360 bool Smi::IsValid(intptr_t value) {
1361 bool result = Internals::IsValidSmi(value);
1362 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1367 MapWord MapWord::FromMap(const Map* map) {
1368 return MapWord(reinterpret_cast<uintptr_t>(map));
1372 Map* MapWord::ToMap() {
1373 return reinterpret_cast<Map*>(value_);
1377 bool MapWord::IsForwardingAddress() {
1378 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1382 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1383 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1384 return MapWord(reinterpret_cast<uintptr_t>(raw));
1388 HeapObject* MapWord::ToForwardingAddress() {
1389 DCHECK(IsForwardingAddress());
1390 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1395 void HeapObject::VerifyObjectField(int offset) {
1396 VerifyPointer(READ_FIELD(this, offset));
1399 void HeapObject::VerifySmiField(int offset) {
1400 CHECK(READ_FIELD(this, offset)->IsSmi());
1405 Heap* HeapObject::GetHeap() const {
1407 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1408 SLOW_DCHECK(heap != NULL);
1413 Isolate* HeapObject::GetIsolate() const {
1414 return GetHeap()->isolate();
1418 Map* HeapObject::map() const {
1420 // Clear mark potentially added by PathTracer.
1421 uintptr_t raw_value =
1422 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1423 return MapWord::FromRawValue(raw_value).ToMap();
1425 return map_word().ToMap();
1430 void HeapObject::set_map(Map* value) {
1431 set_map_word(MapWord::FromMap(value));
1432 if (value != NULL) {
1433 // TODO(1600) We are passing NULL as a slot because maps can never be on
1434 // evacuation candidate.
1435 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1440 Map* HeapObject::synchronized_map() {
1441 return synchronized_map_word().ToMap();
1445 void HeapObject::synchronized_set_map(Map* value) {
1446 synchronized_set_map_word(MapWord::FromMap(value));
1447 if (value != NULL) {
1448 // TODO(1600) We are passing NULL as a slot because maps can never be on
1449 // evacuation candidate.
1450 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1455 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1456 synchronized_set_map_word(MapWord::FromMap(value));
1460 // Unsafe accessor omitting write barrier.
1461 void HeapObject::set_map_no_write_barrier(Map* value) {
1462 set_map_word(MapWord::FromMap(value));
1466 MapWord HeapObject::map_word() const {
1468 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1472 void HeapObject::set_map_word(MapWord map_word) {
1473 NOBARRIER_WRITE_FIELD(
1474 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1478 MapWord HeapObject::synchronized_map_word() const {
1480 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1484 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1485 RELEASE_WRITE_FIELD(
1486 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1490 HeapObject* HeapObject::FromAddress(Address address) {
1491 DCHECK_TAG_ALIGNED(address);
1492 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1496 Address HeapObject::address() {
1497 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1501 int HeapObject::Size() {
1502 return SizeFromMap(map());
1506 bool HeapObject::MayContainRawValues() {
1507 InstanceType type = map()->instance_type();
1508 if (type <= LAST_NAME_TYPE) {
1509 if (type == SYMBOL_TYPE) {
1512 DCHECK(type < FIRST_NONSTRING_TYPE);
1513 // There are four string representations: sequential strings, external
1514 // strings, cons strings, and sliced strings.
1515 // Only the former two contain raw values and no heap pointers (besides the
1517 return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
1519 // The ConstantPoolArray contains heap pointers, but also raw values.
1520 if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
1521 return (type <= LAST_DATA_TYPE);
1525 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1526 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1527 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1531 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1532 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1536 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1537 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1541 double HeapNumber::value() const {
1542 return READ_DOUBLE_FIELD(this, kValueOffset);
1546 void HeapNumber::set_value(double value) {
1547 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1551 int HeapNumber::get_exponent() {
1552 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1553 kExponentShift) - kExponentBias;
1557 int HeapNumber::get_sign() {
1558 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1562 ACCESSORS(Float32x4, value, Object, kValueOffset)
1563 ACCESSORS(Float64x2, value, Object, kValueOffset)
1564 ACCESSORS(Int32x4, value, Object, kValueOffset)
1567 const char* Float32x4::Name() {
1572 int Float32x4::kRuntimeAllocatorId() {
1573 return Runtime::kAllocateFloat32x4;
1577 float Float32x4::getAt(int index) {
1578 DCHECK(index >= 0 && index < kLanes);
1579 return get().storage[index];
1583 float32x4_value_t Float32x4::get() {
1584 return FixedFloat32x4Array::cast(value())->get_scalar(0);
1588 void Float32x4::set(float32x4_value_t f32x4) {
1589 FixedFloat32x4Array::cast(value())->set(0, f32x4);
1593 const char* Float64x2::Name() {
1598 int Float64x2::kRuntimeAllocatorId() {
1599 return Runtime::kAllocateFloat64x2;
1603 double Float64x2::getAt(int index) {
1604 DCHECK(index >= 0 && index < kLanes);
1605 return get().storage[index];
1608 float64x2_value_t Float64x2::get() {
1609 return FixedFloat64x2Array::cast(value())->get_scalar(0);
1613 void Float64x2::set(float64x2_value_t f64x2) {
1614 FixedFloat64x2Array::cast(value())->set(0, f64x2);
1618 const char* Int32x4::Name() {
1623 int Int32x4::kRuntimeAllocatorId() {
1624 return Runtime::kAllocateInt32x4;
1628 int32_t Int32x4::getAt(int index) {
1629 DCHECK(index >= 0 && index < kLanes);
1630 return get().storage[index];;
1634 int32x4_value_t Int32x4::get() {
1635 return FixedInt32x4Array::cast(value())->get_scalar(0);
1639 void Int32x4::set(int32x4_value_t i32x4) {
1640 FixedInt32x4Array::cast(value())->set(0, i32x4);
1644 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1647 Object** FixedArray::GetFirstElementAddress() {
1648 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1652 bool FixedArray::ContainsOnlySmisOrHoles() {
1653 Object* the_hole = GetHeap()->the_hole_value();
1654 Object** current = GetFirstElementAddress();
1655 for (int i = 0; i < length(); ++i) {
1656 Object* candidate = *current++;
1657 if (!candidate->IsSmi() && candidate != the_hole) return false;
1663 FixedArrayBase* JSObject::elements() const {
1664 Object* array = READ_FIELD(this, kElementsOffset);
1665 return static_cast<FixedArrayBase*>(array);
1669 void JSObject::ValidateElements(Handle<JSObject> object) {
1670 #ifdef ENABLE_SLOW_DCHECKS
1671 if (FLAG_enable_slow_asserts) {
1672 ElementsAccessor* accessor = object->GetElementsAccessor();
1673 accessor->Validate(object);
1679 void AllocationSite::Initialize() {
1680 set_transition_info(Smi::FromInt(0));
1681 SetElementsKind(GetInitialFastElementsKind());
1682 set_nested_site(Smi::FromInt(0));
1683 set_pretenure_data(Smi::FromInt(0));
1684 set_pretenure_create_count(Smi::FromInt(0));
1685 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1686 SKIP_WRITE_BARRIER);
1690 void AllocationSite::MarkZombie() {
1691 DCHECK(!IsZombie());
1693 set_pretenure_decision(kZombie);
1697 // Heuristic: We only need to create allocation site info if the boilerplate
1698 // elements kind is the initial elements kind.
1699 AllocationSiteMode AllocationSite::GetMode(
1700 ElementsKind boilerplate_elements_kind) {
1701 if (FLAG_pretenuring_call_new ||
1702 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1703 return TRACK_ALLOCATION_SITE;
1706 return DONT_TRACK_ALLOCATION_SITE;
1710 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1712 if (FLAG_pretenuring_call_new ||
1713 (IsFastSmiElementsKind(from) &&
1714 IsMoreGeneralElementsKindTransition(from, to))) {
1715 return TRACK_ALLOCATION_SITE;
1718 return DONT_TRACK_ALLOCATION_SITE;
1722 inline bool AllocationSite::CanTrack(InstanceType type) {
1723 if (FLAG_allocation_site_pretenuring) {
1724 return type == JS_ARRAY_TYPE ||
1725 type == JS_OBJECT_TYPE ||
1726 type < FIRST_NONSTRING_TYPE;
1728 return type == JS_ARRAY_TYPE;
1732 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1736 return DependentCode::kAllocationSiteTenuringChangedGroup;
1739 return DependentCode::kAllocationSiteTransitionChangedGroup;
1743 return DependentCode::kAllocationSiteTransitionChangedGroup;
1747 inline void AllocationSite::set_memento_found_count(int count) {
1748 int value = pretenure_data()->value();
1749 // Verify that we can count more mementos than we can possibly find in one
1750 // new space collection.
1751 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1752 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1753 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1754 DCHECK(count < MementoFoundCountBits::kMax);
1756 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1757 SKIP_WRITE_BARRIER);
1760 inline bool AllocationSite::IncrementMementoFoundCount() {
1761 if (IsZombie()) return false;
1763 int value = memento_found_count();
1764 set_memento_found_count(value + 1);
1765 return memento_found_count() == kPretenureMinimumCreated;
1769 inline void AllocationSite::IncrementMementoCreateCount() {
1770 DCHECK(FLAG_allocation_site_pretenuring);
1771 int value = memento_create_count();
1772 set_memento_create_count(value + 1);
1776 inline bool AllocationSite::MakePretenureDecision(
1777 PretenureDecision current_decision,
1779 bool maximum_size_scavenge) {
1780 // Here we just allow state transitions from undecided or maybe tenure
1781 // to don't tenure, maybe tenure, or tenure.
1782 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1783 if (ratio >= kPretenureRatio) {
1784 // We just transition into tenure state when the semi-space was at
1785 // maximum capacity.
1786 if (maximum_size_scavenge) {
1787 set_deopt_dependent_code(true);
1788 set_pretenure_decision(kTenure);
1789 // Currently we just need to deopt when we make a state transition to
1793 set_pretenure_decision(kMaybeTenure);
1795 set_pretenure_decision(kDontTenure);
1802 inline bool AllocationSite::DigestPretenuringFeedback(
1803 bool maximum_size_scavenge) {
1805 int create_count = memento_create_count();
1806 int found_count = memento_found_count();
1807 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1809 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1810 static_cast<double>(found_count) / create_count : 0.0;
1811 PretenureDecision current_decision = pretenure_decision();
1813 if (minimum_mementos_created) {
1814 deopt = MakePretenureDecision(
1815 current_decision, ratio, maximum_size_scavenge);
1818 if (FLAG_trace_pretenuring_statistics) {
1820 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1821 static_cast<void*>(this), create_count, found_count, ratio,
1822 PretenureDecisionName(current_decision),
1823 PretenureDecisionName(pretenure_decision()));
1826 // Clear feedback calculation fields until the next gc.
1827 set_memento_found_count(0);
1828 set_memento_create_count(0);
1833 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1834 JSObject::ValidateElements(object);
1835 ElementsKind elements_kind = object->map()->elements_kind();
1836 if (!IsFastObjectElementsKind(elements_kind)) {
1837 if (IsFastHoleyElementsKind(elements_kind)) {
1838 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1840 TransitionElementsKind(object, FAST_ELEMENTS);
1846 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1849 EnsureElementsMode mode) {
1850 ElementsKind current_kind = object->map()->elements_kind();
1851 ElementsKind target_kind = current_kind;
1853 DisallowHeapAllocation no_allocation;
1854 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1855 bool is_holey = IsFastHoleyElementsKind(current_kind);
1856 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1857 Heap* heap = object->GetHeap();
1858 Object* the_hole = heap->the_hole_value();
1859 for (uint32_t i = 0; i < count; ++i) {
1860 Object* current = *objects++;
1861 if (current == the_hole) {
1863 target_kind = GetHoleyElementsKind(target_kind);
1864 } else if (!current->IsSmi()) {
1865 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1866 if (IsFastSmiElementsKind(target_kind)) {
1868 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1870 target_kind = FAST_DOUBLE_ELEMENTS;
1873 } else if (is_holey) {
1874 target_kind = FAST_HOLEY_ELEMENTS;
1877 target_kind = FAST_ELEMENTS;
1882 if (target_kind != current_kind) {
1883 TransitionElementsKind(object, target_kind);
1888 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1889 Handle<FixedArrayBase> elements,
1891 EnsureElementsMode mode) {
1892 Heap* heap = object->GetHeap();
1893 if (elements->map() != heap->fixed_double_array_map()) {
1894 DCHECK(elements->map() == heap->fixed_array_map() ||
1895 elements->map() == heap->fixed_cow_array_map());
1896 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1897 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1900 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1901 EnsureCanContainElements(object, objects, length, mode);
1905 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1906 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1907 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1908 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1909 Handle<FixedDoubleArray> double_array =
1910 Handle<FixedDoubleArray>::cast(elements);
1911 for (uint32_t i = 0; i < length; ++i) {
1912 if (double_array->is_the_hole(i)) {
1913 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1917 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1922 void JSObject::SetMapAndElements(Handle<JSObject> object,
1923 Handle<Map> new_map,
1924 Handle<FixedArrayBase> value) {
1925 JSObject::MigrateToMap(object, new_map);
1926 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1927 (*value == object->GetHeap()->empty_fixed_array())) ==
1928 (value->map() == object->GetHeap()->fixed_array_map() ||
1929 value->map() == object->GetHeap()->fixed_cow_array_map()));
1930 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1931 (object->map()->has_fast_double_elements() ==
1932 value->IsFixedDoubleArray()));
1933 object->set_elements(*value);
1937 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1938 WRITE_FIELD(this, kElementsOffset, value);
1939 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1943 void JSObject::initialize_properties() {
1944 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1945 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1949 void JSObject::initialize_elements() {
1950 FixedArrayBase* elements = map()->GetInitialElements();
1951 WRITE_FIELD(this, kElementsOffset, elements);
1955 Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
1956 DisallowHeapAllocation no_gc;
1957 if (!map->HasTransitionArray()) return Handle<String>::null();
1958 TransitionArray* transitions = map->transitions();
1959 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1960 int transition = TransitionArray::kSimpleTransitionIndex;
1961 PropertyDetails details = transitions->GetTargetDetails(transition);
1962 Name* name = transitions->GetKey(transition);
1963 if (details.type() != FIELD) return Handle<String>::null();
1964 if (details.attributes() != NONE) return Handle<String>::null();
1965 if (!name->IsString()) return Handle<String>::null();
1966 return Handle<String>(String::cast(name));
1970 Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
1971 DCHECK(!ExpectedTransitionKey(map).is_null());
1972 return Handle<Map>(map->transitions()->GetTarget(
1973 TransitionArray::kSimpleTransitionIndex));
1977 Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1978 DisallowHeapAllocation no_allocation;
1979 if (!map->HasTransitionArray()) return Handle<Map>::null();
1980 TransitionArray* transitions = map->transitions();
1981 int transition = transitions->Search(*key);
1982 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1983 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1984 if (target_details.type() != FIELD) return Handle<Map>::null();
1985 if (target_details.attributes() != NONE) return Handle<Map>::null();
1986 return Handle<Map>(transitions->GetTarget(transition));
1990 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1991 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1994 byte Oddball::kind() const {
1995 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1999 void Oddball::set_kind(byte value) {
2000 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2004 Object* Cell::value() const {
2005 return READ_FIELD(this, kValueOffset);
2009 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
2010 // The write barrier is not used for global property cells.
2011 DCHECK(!val->IsPropertyCell() && !val->IsCell());
2012 WRITE_FIELD(this, kValueOffset, val);
2015 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2017 Object* PropertyCell::type_raw() const {
2018 return READ_FIELD(this, kTypeOffset);
2022 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
2023 WRITE_FIELD(this, kTypeOffset, val);
2027 int JSObject::GetHeaderSize() {
2028 InstanceType type = map()->instance_type();
2029 // Check for the most common kind of JavaScript object before
2030 // falling into the generic switch. This speeds up the internal
2031 // field operations considerably on average.
2032 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2034 case JS_GENERATOR_OBJECT_TYPE:
2035 return JSGeneratorObject::kSize;
2036 case JS_MODULE_TYPE:
2037 return JSModule::kSize;
2038 case JS_GLOBAL_PROXY_TYPE:
2039 return JSGlobalProxy::kSize;
2040 case JS_GLOBAL_OBJECT_TYPE:
2041 return JSGlobalObject::kSize;
2042 case JS_BUILTINS_OBJECT_TYPE:
2043 return JSBuiltinsObject::kSize;
2044 case JS_FUNCTION_TYPE:
2045 return JSFunction::kSize;
2047 return JSValue::kSize;
2049 return JSDate::kSize;
2051 return JSArray::kSize;
2052 case JS_ARRAY_BUFFER_TYPE:
2053 return JSArrayBuffer::kSize;
2054 case JS_TYPED_ARRAY_TYPE:
2055 return JSTypedArray::kSize;
2056 case JS_DATA_VIEW_TYPE:
2057 return JSDataView::kSize;
2058 case FLOAT32x4_TYPE:
2059 return Float32x4::kSize;
2060 case FLOAT64x2_TYPE:
2061 return Float64x2::kSize;
2063 return Int32x4::kSize;
2065 return JSSet::kSize;
2067 return JSMap::kSize;
2068 case JS_SET_ITERATOR_TYPE:
2069 return JSSetIterator::kSize;
2070 case JS_MAP_ITERATOR_TYPE:
2071 return JSMapIterator::kSize;
2072 case JS_WEAK_MAP_TYPE:
2073 return JSWeakMap::kSize;
2074 case JS_WEAK_SET_TYPE:
2075 return JSWeakSet::kSize;
2076 case JS_REGEXP_TYPE:
2077 return JSRegExp::kSize;
2078 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2079 return JSObject::kHeaderSize;
2080 case JS_MESSAGE_OBJECT_TYPE:
2081 return JSMessageObject::kSize;
2083 // TODO(jkummerow): Re-enable this. Blink currently hits this
2084 // from its CustomElementConstructorBuilder.
2091 int JSObject::GetInternalFieldCount() {
2092 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2093 // Make sure to adjust for the number of in-object properties. These
2094 // properties do contribute to the size, but are not internal fields.
2095 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2096 map()->inobject_properties();
2100 int JSObject::GetInternalFieldOffset(int index) {
2101 DCHECK(index < GetInternalFieldCount() && index >= 0);
2102 return GetHeaderSize() + (kPointerSize * index);
2106 Object* JSObject::GetInternalField(int index) {
2107 DCHECK(index < GetInternalFieldCount() && index >= 0);
2108 // Internal objects do follow immediately after the header, whereas in-object
2109 // properties are at the end of the object. Therefore there is no need
2110 // to adjust the index here.
2111 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2115 void JSObject::SetInternalField(int index, Object* value) {
2116 DCHECK(index < GetInternalFieldCount() && index >= 0);
2117 // Internal objects do follow immediately after the header, whereas in-object
2118 // properties are at the end of the object. Therefore there is no need
2119 // to adjust the index here.
2120 int offset = GetHeaderSize() + (kPointerSize * index);
2121 WRITE_FIELD(this, offset, value);
2122 WRITE_BARRIER(GetHeap(), this, offset, value);
2126 void JSObject::SetInternalField(int index, Smi* value) {
2127 DCHECK(index < GetInternalFieldCount() && index >= 0);
2128 // Internal objects do follow immediately after the header, whereas in-object
2129 // properties are at the end of the object. Therefore there is no need
2130 // to adjust the index here.
2131 int offset = GetHeaderSize() + (kPointerSize * index);
2132 WRITE_FIELD(this, offset, value);
2136 // Access fast-case object properties at index. The use of these routines
2137 // is needed to correctly distinguish between properties stored in-object and
2138 // properties stored in the properties array.
2139 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2140 if (index.is_inobject()) {
2141 return READ_FIELD(this, index.offset());
2143 return properties()->get(index.outobject_array_index());
2148 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2149 if (index.is_inobject()) {
2150 int offset = index.offset();
2151 WRITE_FIELD(this, offset, value);
2152 WRITE_BARRIER(GetHeap(), this, offset, value);
2154 properties()->set(index.outobject_array_index(), value);
2159 int JSObject::GetInObjectPropertyOffset(int index) {
2160 return map()->GetInObjectPropertyOffset(index);
2164 Object* JSObject::InObjectPropertyAt(int index) {
2165 int offset = GetInObjectPropertyOffset(index);
2166 return READ_FIELD(this, offset);
2170 Object* JSObject::InObjectPropertyAtPut(int index,
2172 WriteBarrierMode mode) {
2173 // Adjust for the number of properties stored in the object.
2174 int offset = GetInObjectPropertyOffset(index);
2175 WRITE_FIELD(this, offset, value);
2176 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2182 void JSObject::InitializeBody(Map* map,
2183 Object* pre_allocated_value,
2184 Object* filler_value) {
2185 DCHECK(!filler_value->IsHeapObject() ||
2186 !GetHeap()->InNewSpace(filler_value));
2187 DCHECK(!pre_allocated_value->IsHeapObject() ||
2188 !GetHeap()->InNewSpace(pre_allocated_value));
2189 int size = map->instance_size();
2190 int offset = kHeaderSize;
2191 if (filler_value != pre_allocated_value) {
2192 int pre_allocated = map->pre_allocated_property_fields();
2193 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2194 for (int i = 0; i < pre_allocated; i++) {
2195 WRITE_FIELD(this, offset, pre_allocated_value);
2196 offset += kPointerSize;
2199 while (offset < size) {
2200 WRITE_FIELD(this, offset, filler_value);
2201 offset += kPointerSize;
2206 bool JSObject::HasFastProperties() {
2207 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2208 return !properties()->IsDictionary();
2212 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2213 if (unused_property_fields() != 0) return false;
2214 if (is_prototype_map()) return false;
2215 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2216 int limit = Max(minimum, inobject_properties());
2217 int external = NumberOfFields() - inobject_properties();
2218 return external > limit;
2222 void Struct::InitializeBody(int object_size) {
2223 Object* value = GetHeap()->undefined_value();
2224 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2225 WRITE_FIELD(this, offset, value);
2230 bool Object::ToArrayIndex(uint32_t* index) {
2232 int value = Smi::cast(this)->value();
2233 if (value < 0) return false;
2237 if (IsHeapNumber()) {
2238 double value = HeapNumber::cast(this)->value();
2239 uint32_t uint_value = static_cast<uint32_t>(value);
2240 if (value == static_cast<double>(uint_value)) {
2241 *index = uint_value;
2249 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2250 if (!this->IsJSValue()) return false;
2252 JSValue* js_value = JSValue::cast(this);
2253 if (!js_value->value()->IsString()) return false;
2255 String* str = String::cast(js_value->value());
2256 if (index >= static_cast<uint32_t>(str->length())) return false;
2262 void Object::VerifyApiCallResultType() {
2263 #if ENABLE_EXTRA_CHECKS
2273 FATAL("API call returned invalid object");
2275 #endif // ENABLE_EXTRA_CHECKS
2279 Object* FixedArray::get(int index) {
2280 SLOW_DCHECK(index >= 0 && index < this->length());
2281 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2285 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2286 return handle(array->get(index), array->GetIsolate());
2290 bool FixedArray::is_the_hole(int index) {
2291 return get(index) == GetHeap()->the_hole_value();
2295 void FixedArray::set(int index, Smi* value) {
2296 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2297 DCHECK(index >= 0 && index < this->length());
2298 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2299 int offset = kHeaderSize + index * kPointerSize;
2300 WRITE_FIELD(this, offset, value);
2304 void FixedArray::set(int index, Object* value) {
2305 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2306 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2307 DCHECK(index >= 0 && index < this->length());
2308 int offset = kHeaderSize + index * kPointerSize;
2309 WRITE_FIELD(this, offset, value);
2310 WRITE_BARRIER(GetHeap(), this, offset, value);
2314 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2315 return bit_cast<uint64_t, double>(value) == kHoleNanInt64;
2319 inline double FixedDoubleArray::hole_nan_as_double() {
2320 return bit_cast<double, uint64_t>(kHoleNanInt64);
2324 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2325 DCHECK(bit_cast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64);
2326 DCHECK((bit_cast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32);
2327 return base::OS::nan_value();
2331 double FixedDoubleArray::get_scalar(int index) {
2332 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2333 map() != GetHeap()->fixed_array_map());
2334 DCHECK(index >= 0 && index < this->length());
2335 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2336 DCHECK(!is_the_hole_nan(result));
2340 int64_t FixedDoubleArray::get_representation(int index) {
2341 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2342 map() != GetHeap()->fixed_array_map());
2343 DCHECK(index >= 0 && index < this->length());
2344 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2348 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2350 if (array->is_the_hole(index)) {
2351 return array->GetIsolate()->factory()->the_hole_value();
2353 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2358 void FixedDoubleArray::set(int index, double value) {
2359 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2360 map() != GetHeap()->fixed_array_map());
2361 int offset = kHeaderSize + index * kDoubleSize;
2362 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2363 WRITE_DOUBLE_FIELD(this, offset, value);
2367 void FixedDoubleArray::set_the_hole(int index) {
2368 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2369 map() != GetHeap()->fixed_array_map());
2370 int offset = kHeaderSize + index * kDoubleSize;
2371 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2375 bool FixedDoubleArray::is_the_hole(int index) {
2376 int offset = kHeaderSize + index * kDoubleSize;
2377 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2381 double* FixedDoubleArray::data_start() {
2382 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2386 void FixedDoubleArray::FillWithHoles(int from, int to) {
2387 for (int i = from; i < to; i++) {
2393 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2394 DCHECK(type < NUMBER_OF_TYPES);
2395 element_counts_[type]++;
2399 int ConstantPoolArray::NumberOfEntries::equals(
2400 const ConstantPoolArray::NumberOfEntries& other) const {
2401 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2402 if (element_counts_[i] != other.element_counts_[i]) return false;
2408 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2409 return total_count() == 0;
2413 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2414 DCHECK(type < NUMBER_OF_TYPES);
2415 return element_counts_[type];
2419 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2421 DCHECK(type < NUMBER_OF_TYPES);
2422 for (int i = 0; i < type; i++) {
2423 base += element_counts_[i];
2429 int ConstantPoolArray::NumberOfEntries::total_count() const {
2431 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2432 count += element_counts_[i];
2438 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2439 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2440 if (element_counts_[i] < min || element_counts_[i] > max) {
2448 int ConstantPoolArray::Iterator::next_index() {
2449 DCHECK(!is_finished());
2450 int ret = next_index_++;
2456 bool ConstantPoolArray::Iterator::is_finished() {
2457 return next_index_ > array_->last_index(type_, final_section_);
2461 void ConstantPoolArray::Iterator::update_section() {
2462 if (next_index_ > array_->last_index(type_, current_section_) &&
2463 current_section_ != final_section_) {
2464 DCHECK(final_section_ == EXTENDED_SECTION);
2465 current_section_ = EXTENDED_SECTION;
2466 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2471 bool ConstantPoolArray::is_extended_layout() {
2472 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2473 return IsExtendedField::decode(small_layout_1);
2477 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2478 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2482 int ConstantPoolArray::first_extended_section_index() {
2483 DCHECK(is_extended_layout());
2484 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2485 return TotalCountField::decode(small_layout_2);
2489 int ConstantPoolArray::get_extended_section_header_offset() {
2490 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2494 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2495 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2496 return WeakObjectStateField::decode(small_layout_2);
2500 void ConstantPoolArray::set_weak_object_state(
2501 ConstantPoolArray::WeakObjectState state) {
2502 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2503 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2504 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2508 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2510 if (section == EXTENDED_SECTION) {
2511 DCHECK(is_extended_layout());
2512 index += first_extended_section_index();
2515 for (Type type_iter = FIRST_TYPE; type_iter < type;
2516 type_iter = next_type(type_iter)) {
2517 index += number_of_entries(type_iter, section);
2524 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2525 return first_index(type, section) + number_of_entries(type, section) - 1;
2529 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2530 if (section == SMALL_SECTION) {
2531 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2532 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2535 return Int64CountField::decode(small_layout_1);
2537 return CodePtrCountField::decode(small_layout_1);
2539 return HeapPtrCountField::decode(small_layout_1);
2541 return Int32CountField::decode(small_layout_2);
2547 DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2548 int offset = get_extended_section_header_offset();
2551 offset += kExtendedInt64CountOffset;
2554 offset += kExtendedCodePtrCountOffset;
2557 offset += kExtendedHeapPtrCountOffset;
2560 offset += kExtendedInt32CountOffset;
2565 return READ_INT_FIELD(this, offset);
2570 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2571 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2572 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2573 (is_extended_layout() &&
2574 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2575 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2579 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2580 LayoutSection section;
2581 if (is_extended_layout() && index >= first_extended_section_index()) {
2582 section = EXTENDED_SECTION;
2584 section = SMALL_SECTION;
2587 Type type = FIRST_TYPE;
2588 while (index > last_index(type, section)) {
2589 type = next_type(type);
2591 DCHECK(type <= LAST_TYPE);
2596 int64_t ConstantPoolArray::get_int64_entry(int index) {
2597 DCHECK(map() == GetHeap()->constant_pool_array_map());
2598 DCHECK(get_type(index) == INT64);
2599 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2603 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2604 STATIC_ASSERT(kDoubleSize == kInt64Size);
2605 DCHECK(map() == GetHeap()->constant_pool_array_map());
2606 DCHECK(get_type(index) == INT64);
2607 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2611 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2612 DCHECK(map() == GetHeap()->constant_pool_array_map());
2613 DCHECK(get_type(index) == CODE_PTR);
2614 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2618 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2619 DCHECK(map() == GetHeap()->constant_pool_array_map());
2620 DCHECK(get_type(index) == HEAP_PTR);
2621 return READ_FIELD(this, OffsetOfElementAt(index));
2625 int32_t ConstantPoolArray::get_int32_entry(int index) {
2626 DCHECK(map() == GetHeap()->constant_pool_array_map());
2627 DCHECK(get_type(index) == INT32);
2628 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2632 void ConstantPoolArray::set(int index, int64_t value) {
2633 DCHECK(map() == GetHeap()->constant_pool_array_map());
2634 DCHECK(get_type(index) == INT64);
2635 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2639 void ConstantPoolArray::set(int index, double value) {
2640 STATIC_ASSERT(kDoubleSize == kInt64Size);
2641 DCHECK(map() == GetHeap()->constant_pool_array_map());
2642 DCHECK(get_type(index) == INT64);
2643 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2647 void ConstantPoolArray::set(int index, Address value) {
2648 DCHECK(map() == GetHeap()->constant_pool_array_map());
2649 DCHECK(get_type(index) == CODE_PTR);
2650 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2654 void ConstantPoolArray::set(int index, Object* value) {
2655 DCHECK(map() == GetHeap()->constant_pool_array_map());
2656 DCHECK(!GetHeap()->InNewSpace(value));
2657 DCHECK(get_type(index) == HEAP_PTR);
2658 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2659 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2663 void ConstantPoolArray::set(int index, int32_t value) {
2664 DCHECK(map() == GetHeap()->constant_pool_array_map());
2665 DCHECK(get_type(index) == INT32);
2666 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2670 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2671 DCHECK(map() == GetHeap()->constant_pool_array_map());
2672 DCHECK(offset_is_type(offset, INT32));
2673 WRITE_INT32_FIELD(this, offset, value);
2677 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2678 DCHECK(map() == GetHeap()->constant_pool_array_map());
2679 DCHECK(offset_is_type(offset, INT64));
2680 WRITE_INT64_FIELD(this, offset, value);
2684 void ConstantPoolArray::set_at_offset(int offset, double value) {
2685 DCHECK(map() == GetHeap()->constant_pool_array_map());
2686 DCHECK(offset_is_type(offset, INT64));
2687 WRITE_DOUBLE_FIELD(this, offset, value);
2691 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2692 DCHECK(map() == GetHeap()->constant_pool_array_map());
2693 DCHECK(offset_is_type(offset, CODE_PTR));
2694 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2695 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2699 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2700 DCHECK(map() == GetHeap()->constant_pool_array_map());
2701 DCHECK(!GetHeap()->InNewSpace(value));
2702 DCHECK(offset_is_type(offset, HEAP_PTR));
2703 WRITE_FIELD(this, offset, value);
2704 WRITE_BARRIER(GetHeap(), this, offset, value);
2708 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2709 uint32_t small_layout_1 =
2710 Int64CountField::encode(small.count_of(INT64)) |
2711 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2712 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2713 IsExtendedField::encode(false);
2714 uint32_t small_layout_2 =
2715 Int32CountField::encode(small.count_of(INT32)) |
2716 TotalCountField::encode(small.total_count()) |
2717 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2718 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2719 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2720 if (kHeaderSize != kFirstEntryOffset) {
2721 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2722 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2727 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2728 const NumberOfEntries& extended) {
2729 // Initialize small layout fields first.
2732 // Set is_extended_layout field.
2733 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2734 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2735 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2737 // Initialize the extended layout fields.
2738 int extended_header_offset = get_extended_section_header_offset();
2739 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2740 extended.count_of(INT64));
2741 WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2742 extended.count_of(CODE_PTR));
2743 WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2744 extended.count_of(HEAP_PTR));
2745 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2746 extended.count_of(INT32));
2750 int ConstantPoolArray::size() {
2751 NumberOfEntries small(this, SMALL_SECTION);
2752 if (!is_extended_layout()) {
2753 return SizeFor(small);
2755 NumberOfEntries extended(this, EXTENDED_SECTION);
2756 return SizeForExtended(small, extended);
2761 int ConstantPoolArray::length() {
2762 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2763 int length = TotalCountField::decode(small_layout_2);
2764 if (is_extended_layout()) {
2765 length += number_of_entries(INT64, EXTENDED_SECTION) +
2766 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2767 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2768 number_of_entries(INT32, EXTENDED_SECTION);
2774 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2775 const DisallowHeapAllocation& promise) {
2776 Heap* heap = GetHeap();
2777 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2778 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2779 return UPDATE_WRITE_BARRIER;
2783 void FixedArray::set(int index,
2785 WriteBarrierMode mode) {
2786 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2787 DCHECK(index >= 0 && index < this->length());
2788 int offset = kHeaderSize + index * kPointerSize;
2789 WRITE_FIELD(this, offset, value);
2790 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2794 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2797 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2798 DCHECK(index >= 0 && index < array->length());
2799 int offset = kHeaderSize + index * kPointerSize;
2800 WRITE_FIELD(array, offset, value);
2801 Heap* heap = array->GetHeap();
2802 if (heap->InNewSpace(value)) {
2803 heap->RecordWrite(array->address(), offset);
2808 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2811 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2812 DCHECK(index >= 0 && index < array->length());
2813 DCHECK(!array->GetHeap()->InNewSpace(value));
2814 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2818 void FixedArray::set_undefined(int index) {
2819 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2820 DCHECK(index >= 0 && index < this->length());
2821 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2823 kHeaderSize + index * kPointerSize,
2824 GetHeap()->undefined_value());
2828 void FixedArray::set_null(int index) {
2829 DCHECK(index >= 0 && index < this->length());
2830 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2832 kHeaderSize + index * kPointerSize,
2833 GetHeap()->null_value());
2837 void FixedArray::set_the_hole(int index) {
2838 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2839 DCHECK(index >= 0 && index < this->length());
2840 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2842 kHeaderSize + index * kPointerSize,
2843 GetHeap()->the_hole_value());
2847 void FixedArray::FillWithHoles(int from, int to) {
2848 for (int i = from; i < to; i++) {
2854 Object** FixedArray::data_start() {
2855 return HeapObject::RawField(this, kHeaderSize);
2859 bool DescriptorArray::IsEmpty() {
2860 DCHECK(length() >= kFirstIndex ||
2861 this == GetHeap()->empty_descriptor_array());
2862 return length() < kFirstIndex;
2866 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2868 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2872 // Perform a binary search in a fixed array. Low and high are entry indices. If
2873 // there are three entries in this array it should be called with low=0 and
2875 template<SearchMode search_mode, typename T>
2876 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2877 uint32_t hash = name->Hash();
2880 DCHECK(low <= high);
2882 while (low != high) {
2883 int mid = (low + high) / 2;
2884 Name* mid_name = array->GetSortedKey(mid);
2885 uint32_t mid_hash = mid_name->Hash();
2887 if (mid_hash >= hash) {
2894 for (; low <= limit; ++low) {
2895 int sort_index = array->GetSortedKeyIndex(low);
2896 Name* entry = array->GetKey(sort_index);
2897 if (entry->Hash() != hash) break;
2898 if (entry->Equals(name)) {
2899 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2902 return T::kNotFound;
2906 return T::kNotFound;
2910 // Perform a linear search in this fixed array. len is the number of entry
2911 // indices that are valid.
2912 template<SearchMode search_mode, typename T>
2913 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2914 uint32_t hash = name->Hash();
2915 if (search_mode == ALL_ENTRIES) {
2916 for (int number = 0; number < len; number++) {
2917 int sorted_index = array->GetSortedKeyIndex(number);
2918 Name* entry = array->GetKey(sorted_index);
2919 uint32_t current_hash = entry->Hash();
2920 if (current_hash > hash) break;
2921 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2924 DCHECK(len >= valid_entries);
2925 for (int number = 0; number < valid_entries; number++) {
2926 Name* entry = array->GetKey(number);
2927 uint32_t current_hash = entry->Hash();
2928 if (current_hash == hash && entry->Equals(name)) return number;
2931 return T::kNotFound;
2935 template<SearchMode search_mode, typename T>
2936 int Search(T* array, Name* name, int valid_entries) {
2937 if (search_mode == VALID_ENTRIES) {
2938 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2940 SLOW_DCHECK(array->IsSortedNoDuplicates());
2943 int nof = array->number_of_entries();
2944 if (nof == 0) return T::kNotFound;
2946 // Fast case: do linear search for small arrays.
2947 const int kMaxElementsForLinearSearch = 8;
2948 if ((search_mode == ALL_ENTRIES &&
2949 nof <= kMaxElementsForLinearSearch) ||
2950 (search_mode == VALID_ENTRIES &&
2951 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2952 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2955 // Slow case: perform binary search.
2956 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2960 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2961 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2965 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2966 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2967 if (number_of_own_descriptors == 0) return kNotFound;
2969 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2970 int number = cache->Lookup(map, name);
2972 if (number == DescriptorLookupCache::kAbsent) {
2973 number = Search(name, number_of_own_descriptors);
2974 cache->Update(map, name, number);
2981 PropertyDetails Map::GetLastDescriptorDetails() {
2982 return instance_descriptors()->GetDetails(LastAdded());
2986 void Map::LookupDescriptor(JSObject* holder,
2988 LookupResult* result) {
2989 DescriptorArray* descriptors = this->instance_descriptors();
2990 int number = descriptors->SearchWithCache(name, this);
2991 if (number == DescriptorArray::kNotFound) return result->NotFound();
2992 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2996 void Map::LookupTransition(JSObject* holder,
2998 LookupResult* result) {
2999 int transition_index = this->SearchTransition(name);
3000 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
3001 result->TransitionResult(holder, this->GetTransition(transition_index));
3005 FixedArrayBase* Map::GetInitialElements() {
3006 if (has_fast_smi_or_object_elements() ||
3007 has_fast_double_elements()) {
3008 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
3009 return GetHeap()->empty_fixed_array();
3010 } else if (has_external_array_elements()) {
3011 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
3012 DCHECK(!GetHeap()->InNewSpace(empty_array));
3014 } else if (has_fixed_typed_array_elements()) {
3015 FixedTypedArrayBase* empty_array =
3016 GetHeap()->EmptyFixedTypedArrayForMap(this);
3017 DCHECK(!GetHeap()->InNewSpace(empty_array));
3026 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
3027 DCHECK(descriptor_number < number_of_descriptors());
3028 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
3032 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
3033 return GetKeySlot(descriptor_number);
3037 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
3038 return GetValueSlot(descriptor_number - 1) + 1;
3042 Name* DescriptorArray::GetKey(int descriptor_number) {
3043 DCHECK(descriptor_number < number_of_descriptors());
3044 return Name::cast(get(ToKeyIndex(descriptor_number)));
3048 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
3049 return GetDetails(descriptor_number).pointer();
3053 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
3054 return GetKey(GetSortedKeyIndex(descriptor_number));
3058 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
3059 PropertyDetails details = GetDetails(descriptor_index);
3060 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
3064 void DescriptorArray::SetRepresentation(int descriptor_index,
3065 Representation representation) {
3066 DCHECK(!representation.IsNone());
3067 PropertyDetails details = GetDetails(descriptor_index);
3068 set(ToDetailsIndex(descriptor_index),
3069 details.CopyWithRepresentation(representation).AsSmi());
3073 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3074 DCHECK(descriptor_number < number_of_descriptors());
3075 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3079 int DescriptorArray::GetValueOffset(int descriptor_number) {
3080 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3084 Object* DescriptorArray::GetValue(int descriptor_number) {
3085 DCHECK(descriptor_number < number_of_descriptors());
3086 return get(ToValueIndex(descriptor_number));
3090 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3091 set(ToValueIndex(descriptor_index), value);
3095 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3096 DCHECK(descriptor_number < number_of_descriptors());
3097 Object* details = get(ToDetailsIndex(descriptor_number));
3098 return PropertyDetails(Smi::cast(details));
3102 PropertyType DescriptorArray::GetType(int descriptor_number) {
3103 return GetDetails(descriptor_number).type();
3107 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3108 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3109 return GetDetails(descriptor_number).field_index();
3113 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3114 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3115 return HeapType::cast(GetValue(descriptor_number));
3119 Object* DescriptorArray::GetConstant(int descriptor_number) {
3120 return GetValue(descriptor_number);
3124 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3125 DCHECK(GetType(descriptor_number) == CALLBACKS);
3126 return GetValue(descriptor_number);
3130 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3131 DCHECK(GetType(descriptor_number) == CALLBACKS);
3132 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3133 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3137 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3138 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3139 handle(GetValue(descriptor_number), GetIsolate()),
3140 GetDetails(descriptor_number));
3144 void DescriptorArray::Set(int descriptor_number,
3146 const WhitenessWitness&) {
3148 DCHECK(descriptor_number < number_of_descriptors());
3150 NoIncrementalWriteBarrierSet(this,
3151 ToKeyIndex(descriptor_number),
3153 NoIncrementalWriteBarrierSet(this,
3154 ToValueIndex(descriptor_number),
3156 NoIncrementalWriteBarrierSet(this,
3157 ToDetailsIndex(descriptor_number),
3158 desc->GetDetails().AsSmi());
3162 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3164 DCHECK(descriptor_number < number_of_descriptors());
3166 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3167 set(ToValueIndex(descriptor_number), *desc->GetValue());
3168 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3172 void DescriptorArray::Append(Descriptor* desc) {
3173 DisallowHeapAllocation no_gc;
3174 int descriptor_number = number_of_descriptors();
3175 SetNumberOfDescriptors(descriptor_number + 1);
3176 Set(descriptor_number, desc);
3178 uint32_t hash = desc->GetKey()->Hash();
3182 for (insertion = descriptor_number; insertion > 0; --insertion) {
3183 Name* key = GetSortedKey(insertion - 1);
3184 if (key->Hash() <= hash) break;
3185 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3188 SetSortedKey(insertion, descriptor_number);
3192 void DescriptorArray::SwapSortedKeys(int first, int second) {
3193 int first_key = GetSortedKeyIndex(first);
3194 SetSortedKey(first, GetSortedKeyIndex(second));
3195 SetSortedKey(second, first_key);
3199 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3200 : marking_(array->GetHeap()->incremental_marking()) {
3201 marking_->EnterNoMarkingScope();
3202 DCHECK(!marking_->IsMarking() ||
3203 Marking::Color(array) == Marking::WHITE_OBJECT);
3207 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3208 marking_->LeaveNoMarkingScope();
3212 template<typename Derived, typename Shape, typename Key>
3213 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3214 const int kMinCapacity = 32;
3215 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3216 if (capacity < kMinCapacity) {
3217 capacity = kMinCapacity; // Guarantee min capacity.
3223 template<typename Derived, typename Shape, typename Key>
3224 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3225 return FindEntry(GetIsolate(), key);
3229 // Find entry for key otherwise return kNotFound.
3230 template<typename Derived, typename Shape, typename Key>
3231 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3232 uint32_t capacity = Capacity();
3233 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3235 // EnsureCapacity will guarantee the hash table is never full.
3237 Object* element = KeyAt(entry);
3238 // Empty entry. Uses raw unchecked accessors because it is called by the
3239 // string table during bootstrapping.
3240 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3241 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3242 Shape::IsMatch(key, element)) return entry;
3243 entry = NextProbe(entry, count++, capacity);
3249 bool SeededNumberDictionary::requires_slow_elements() {
3250 Object* max_index_object = get(kMaxNumberKeyIndex);
3251 if (!max_index_object->IsSmi()) return false;
3253 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3256 uint32_t SeededNumberDictionary::max_number_key() {
3257 DCHECK(!requires_slow_elements());
3258 Object* max_index_object = get(kMaxNumberKeyIndex);
3259 if (!max_index_object->IsSmi()) return 0;
3260 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3261 return value >> kRequiresSlowElementsTagSize;
3264 void SeededNumberDictionary::set_requires_slow_elements() {
3265 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3269 // ------------------------------------
3273 CAST_ACCESSOR(AccessorInfo)
3274 CAST_ACCESSOR(ByteArray)
3277 CAST_ACCESSOR(CodeCacheHashTable)
3278 CAST_ACCESSOR(CompilationCacheTable)
3279 CAST_ACCESSOR(ConsString)
3280 CAST_ACCESSOR(ConstantPoolArray)
3281 CAST_ACCESSOR(DeoptimizationInputData)
3282 CAST_ACCESSOR(DeoptimizationOutputData)
3283 CAST_ACCESSOR(DependentCode)
3284 CAST_ACCESSOR(DescriptorArray)
3285 CAST_ACCESSOR(ExternalArray)
3286 CAST_ACCESSOR(ExternalOneByteString)
3287 CAST_ACCESSOR(ExternalFloat32Array)
3288 CAST_ACCESSOR(ExternalFloat32x4Array)
3289 CAST_ACCESSOR(ExternalFloat64Array)
3290 CAST_ACCESSOR(ExternalFloat64x2Array)
3291 CAST_ACCESSOR(ExternalInt16Array)
3292 CAST_ACCESSOR(ExternalInt32Array)
3293 CAST_ACCESSOR(ExternalInt32x4Array)
3294 CAST_ACCESSOR(ExternalInt8Array)
3295 CAST_ACCESSOR(ExternalString)
3296 CAST_ACCESSOR(ExternalTwoByteString)
3297 CAST_ACCESSOR(ExternalUint16Array)
3298 CAST_ACCESSOR(ExternalUint32Array)
3299 CAST_ACCESSOR(ExternalUint8Array)
3300 CAST_ACCESSOR(ExternalUint8ClampedArray)
3301 CAST_ACCESSOR(FixedArray)
3302 CAST_ACCESSOR(FixedArrayBase)
3303 CAST_ACCESSOR(FixedDoubleArray)
3304 CAST_ACCESSOR(FixedTypedArrayBase)
3305 CAST_ACCESSOR(Foreign)
3306 CAST_ACCESSOR(FreeSpace)
3307 CAST_ACCESSOR(GlobalObject)
3308 CAST_ACCESSOR(HeapObject)
3309 CAST_ACCESSOR(Float32x4)
3310 CAST_ACCESSOR(Float64x2)
3311 CAST_ACCESSOR(Int32x4)
3312 CAST_ACCESSOR(JSArray)
3313 CAST_ACCESSOR(JSArrayBuffer)
3314 CAST_ACCESSOR(JSArrayBufferView)
3315 CAST_ACCESSOR(JSBuiltinsObject)
3316 CAST_ACCESSOR(JSDataView)
3317 CAST_ACCESSOR(JSDate)
3318 CAST_ACCESSOR(JSFunction)
3319 CAST_ACCESSOR(JSFunctionProxy)
3320 CAST_ACCESSOR(JSFunctionResultCache)
3321 CAST_ACCESSOR(JSGeneratorObject)
3322 CAST_ACCESSOR(JSGlobalObject)
3323 CAST_ACCESSOR(JSGlobalProxy)
3324 CAST_ACCESSOR(JSMap)
3325 CAST_ACCESSOR(JSMapIterator)
3326 CAST_ACCESSOR(JSMessageObject)
3327 CAST_ACCESSOR(JSModule)
3328 CAST_ACCESSOR(JSObject)
3329 CAST_ACCESSOR(JSProxy)
3330 CAST_ACCESSOR(JSReceiver)
3331 CAST_ACCESSOR(JSRegExp)
3332 CAST_ACCESSOR(JSSet)
3333 CAST_ACCESSOR(JSSetIterator)
3334 CAST_ACCESSOR(JSTypedArray)
3335 CAST_ACCESSOR(JSValue)
3336 CAST_ACCESSOR(JSWeakMap)
3337 CAST_ACCESSOR(JSWeakSet)
3339 CAST_ACCESSOR(MapCache)
3341 CAST_ACCESSOR(NameDictionary)
3342 CAST_ACCESSOR(NormalizedMapCache)
3343 CAST_ACCESSOR(Object)
3344 CAST_ACCESSOR(ObjectHashTable)
3345 CAST_ACCESSOR(Oddball)
3346 CAST_ACCESSOR(OrderedHashMap)
3347 CAST_ACCESSOR(OrderedHashSet)
3348 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3349 CAST_ACCESSOR(PropertyCell)
3350 CAST_ACCESSOR(ScopeInfo)
3351 CAST_ACCESSOR(SeededNumberDictionary)
3352 CAST_ACCESSOR(SeqOneByteString)
3353 CAST_ACCESSOR(SeqString)
3354 CAST_ACCESSOR(SeqTwoByteString)
3355 CAST_ACCESSOR(SharedFunctionInfo)
3356 CAST_ACCESSOR(SlicedString)
3358 CAST_ACCESSOR(String)
3359 CAST_ACCESSOR(StringTable)
3360 CAST_ACCESSOR(Struct)
3361 CAST_ACCESSOR(Symbol)
3362 CAST_ACCESSOR(UnseededNumberDictionary)
3363 CAST_ACCESSOR(WeakHashTable)
3366 template <class Traits>
3367 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3368 SLOW_DCHECK(object->IsHeapObject() &&
3369 HeapObject::cast(object)->map()->instance_type() ==
3370 Traits::kInstanceType);
3371 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3375 template <class Traits>
3376 const FixedTypedArray<Traits>*
3377 FixedTypedArray<Traits>::cast(const Object* object) {
3378 SLOW_DCHECK(object->IsHeapObject() &&
3379 HeapObject::cast(object)->map()->instance_type() ==
3380 Traits::kInstanceType);
3381 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3385 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3386 STRUCT_LIST(MAKE_STRUCT_CAST)
3387 #undef MAKE_STRUCT_CAST
3390 template <typename Derived, typename Shape, typename Key>
3391 HashTable<Derived, Shape, Key>*
3392 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3393 SLOW_DCHECK(obj->IsHashTable());
3394 return reinterpret_cast<HashTable*>(obj);
3398 template <typename Derived, typename Shape, typename Key>
3399 const HashTable<Derived, Shape, Key>*
3400 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3401 SLOW_DCHECK(obj->IsHashTable());
3402 return reinterpret_cast<const HashTable*>(obj);
3406 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3407 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3409 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3410 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3412 SMI_ACCESSORS(String, length, kLengthOffset)
3413 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3416 uint32_t Name::hash_field() {
3417 return READ_UINT32_FIELD(this, kHashFieldOffset);
3421 void Name::set_hash_field(uint32_t value) {
3422 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3423 #if V8_HOST_ARCH_64_BIT
3424 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3429 bool Name::Equals(Name* other) {
3430 if (other == this) return true;
3431 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3432 this->IsSymbol() || other->IsSymbol()) {
3435 return String::cast(this)->SlowEquals(String::cast(other));
3439 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3440 if (one.is_identical_to(two)) return true;
3441 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3442 one->IsSymbol() || two->IsSymbol()) {
3445 return String::SlowEquals(Handle<String>::cast(one),
3446 Handle<String>::cast(two));
3450 ACCESSORS(Symbol, name, Object, kNameOffset)
3451 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3452 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3453 BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit)
3456 bool String::Equals(String* other) {
3457 if (other == this) return true;
3458 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3461 return SlowEquals(other);
3465 bool String::Equals(Handle<String> one, Handle<String> two) {
3466 if (one.is_identical_to(two)) return true;
3467 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3470 return SlowEquals(one, two);
3474 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3475 if (!string->IsConsString()) return string;
3476 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3477 if (cons->IsFlat()) return handle(cons->first());
3478 return SlowFlatten(cons, pretenure);
3482 uint16_t String::Get(int index) {
3483 DCHECK(index >= 0 && index < length());
3484 switch (StringShape(this).full_representation_tag()) {
3485 case kSeqStringTag | kOneByteStringTag:
3486 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3487 case kSeqStringTag | kTwoByteStringTag:
3488 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3489 case kConsStringTag | kOneByteStringTag:
3490 case kConsStringTag | kTwoByteStringTag:
3491 return ConsString::cast(this)->ConsStringGet(index);
3492 case kExternalStringTag | kOneByteStringTag:
3493 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3494 case kExternalStringTag | kTwoByteStringTag:
3495 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3496 case kSlicedStringTag | kOneByteStringTag:
3497 case kSlicedStringTag | kTwoByteStringTag:
3498 return SlicedString::cast(this)->SlicedStringGet(index);
3508 void String::Set(int index, uint16_t value) {
3509 DCHECK(index >= 0 && index < length());
3510 DCHECK(StringShape(this).IsSequential());
3512 return this->IsOneByteRepresentation()
3513 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3514 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3518 bool String::IsFlat() {
3519 if (!StringShape(this).IsCons()) return true;
3520 return ConsString::cast(this)->second()->length() == 0;
3524 String* String::GetUnderlying() {
3525 // Giving direct access to underlying string only makes sense if the
3526 // wrapping string is already flattened.
3527 DCHECK(this->IsFlat());
3528 DCHECK(StringShape(this).IsIndirect());
3529 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3530 const int kUnderlyingOffset = SlicedString::kParentOffset;
3531 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3535 template<class Visitor>
3536 ConsString* String::VisitFlat(Visitor* visitor,
3539 int slice_offset = offset;
3540 const int length = string->length();
3541 DCHECK(offset <= length);
3543 int32_t type = string->map()->instance_type();
3544 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3545 case kSeqStringTag | kOneByteStringTag:
3546 visitor->VisitOneByteString(
3547 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3551 case kSeqStringTag | kTwoByteStringTag:
3552 visitor->VisitTwoByteString(
3553 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3557 case kExternalStringTag | kOneByteStringTag:
3558 visitor->VisitOneByteString(
3559 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3563 case kExternalStringTag | kTwoByteStringTag:
3564 visitor->VisitTwoByteString(
3565 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3569 case kSlicedStringTag | kOneByteStringTag:
3570 case kSlicedStringTag | kTwoByteStringTag: {
3571 SlicedString* slicedString = SlicedString::cast(string);
3572 slice_offset += slicedString->offset();
3573 string = slicedString->parent();
3577 case kConsStringTag | kOneByteStringTag:
3578 case kConsStringTag | kTwoByteStringTag:
3579 return ConsString::cast(string);
3589 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3590 DCHECK(index >= 0 && index < length());
3591 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3595 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3596 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3597 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3598 static_cast<byte>(value));
3602 Address SeqOneByteString::GetCharsAddress() {
3603 return FIELD_ADDR(this, kHeaderSize);
3607 uint8_t* SeqOneByteString::GetChars() {
3608 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3612 Address SeqTwoByteString::GetCharsAddress() {
3613 return FIELD_ADDR(this, kHeaderSize);
3617 uc16* SeqTwoByteString::GetChars() {
3618 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3622 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3623 DCHECK(index >= 0 && index < length());
3624 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3628 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3629 DCHECK(index >= 0 && index < length());
3630 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3634 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3635 return SizeFor(length());
3639 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3640 return SizeFor(length());
3644 String* SlicedString::parent() {
3645 return String::cast(READ_FIELD(this, kParentOffset));
3649 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3650 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3651 WRITE_FIELD(this, kParentOffset, parent);
3652 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3656 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3659 String* ConsString::first() {
3660 return String::cast(READ_FIELD(this, kFirstOffset));
3664 Object* ConsString::unchecked_first() {
3665 return READ_FIELD(this, kFirstOffset);
3669 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3670 WRITE_FIELD(this, kFirstOffset, value);
3671 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3675 String* ConsString::second() {
3676 return String::cast(READ_FIELD(this, kSecondOffset));
3680 Object* ConsString::unchecked_second() {
3681 return READ_FIELD(this, kSecondOffset);
3685 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3686 WRITE_FIELD(this, kSecondOffset, value);
3687 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3691 bool ExternalString::is_short() {
3692 InstanceType type = map()->instance_type();
3693 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3697 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3698 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3702 void ExternalOneByteString::update_data_cache() {
3703 if (is_short()) return;
3704 const char** data_field =
3705 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3706 *data_field = resource()->data();
3710 void ExternalOneByteString::set_resource(
3711 const ExternalOneByteString::Resource* resource) {
3712 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3713 *reinterpret_cast<const Resource**>(
3714 FIELD_ADDR(this, kResourceOffset)) = resource;
3715 if (resource != NULL) update_data_cache();
3719 const uint8_t* ExternalOneByteString::GetChars() {
3720 return reinterpret_cast<const uint8_t*>(resource()->data());
3724 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3725 DCHECK(index >= 0 && index < length());
3726 return GetChars()[index];
3730 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3731 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3735 void ExternalTwoByteString::update_data_cache() {
3736 if (is_short()) return;
3737 const uint16_t** data_field =
3738 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3739 *data_field = resource()->data();
3743 void ExternalTwoByteString::set_resource(
3744 const ExternalTwoByteString::Resource* resource) {
3745 *reinterpret_cast<const Resource**>(
3746 FIELD_ADDR(this, kResourceOffset)) = resource;
3747 if (resource != NULL) update_data_cache();
3751 const uint16_t* ExternalTwoByteString::GetChars() {
3752 return resource()->data();
3756 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3757 DCHECK(index >= 0 && index < length());
3758 return GetChars()[index];
3762 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3764 return GetChars() + start;
3768 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3769 return depth & kDepthMask;
3773 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3774 frames_[depth_++ & kDepthMask] = string;
3778 void ConsStringIteratorOp::PushRight(ConsString* string) {
3780 frames_[(depth_-1) & kDepthMask] = string;
3784 void ConsStringIteratorOp::AdjustMaximumDepth() {
3785 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3789 void ConsStringIteratorOp::Pop() {
3791 DCHECK(depth_ <= maximum_depth_);
3796 uint16_t StringCharacterStream::GetNext() {
3797 DCHECK(buffer8_ != NULL && end_ != NULL);
3798 // Advance cursor if needed.
3799 if (buffer8_ == end_) HasMore();
3800 DCHECK(buffer8_ < end_);
3801 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3805 StringCharacterStream::StringCharacterStream(String* string,
3806 ConsStringIteratorOp* op,
3808 : is_one_byte_(false),
3810 Reset(string, offset);
3814 void StringCharacterStream::Reset(String* string, int offset) {
3817 ConsString* cons_string = String::VisitFlat(this, string, offset);
3818 op_->Reset(cons_string, offset);
3819 if (cons_string != NULL) {
3820 string = op_->Next(&offset);
3821 if (string != NULL) String::VisitFlat(this, string, offset);
3826 bool StringCharacterStream::HasMore() {
3827 if (buffer8_ != end_) return true;
3829 String* string = op_->Next(&offset);
3830 DCHECK_EQ(offset, 0);
3831 if (string == NULL) return false;
3832 String::VisitFlat(this, string);
3833 DCHECK(buffer8_ != end_);
3838 void StringCharacterStream::VisitOneByteString(
3839 const uint8_t* chars, int length) {
3840 is_one_byte_ = true;
3842 end_ = chars + length;
3846 void StringCharacterStream::VisitTwoByteString(
3847 const uint16_t* chars, int length) {
3848 is_one_byte_ = false;
3850 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3854 void JSFunctionResultCache::MakeZeroSize() {
3855 set_finger_index(kEntriesIndex);
3856 set_size(kEntriesIndex);
3860 void JSFunctionResultCache::Clear() {
3861 int cache_size = size();
3862 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3863 MemsetPointer(entries_start,
3864 GetHeap()->the_hole_value(),
3865 cache_size - kEntriesIndex);
3870 int JSFunctionResultCache::size() {
3871 return Smi::cast(get(kCacheSizeIndex))->value();
3875 void JSFunctionResultCache::set_size(int size) {
3876 set(kCacheSizeIndex, Smi::FromInt(size));
3880 int JSFunctionResultCache::finger_index() {
3881 return Smi::cast(get(kFingerIndex))->value();
3885 void JSFunctionResultCache::set_finger_index(int finger_index) {
3886 set(kFingerIndex, Smi::FromInt(finger_index));
3890 byte ByteArray::get(int index) {
3891 DCHECK(index >= 0 && index < this->length());
3892 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3896 void ByteArray::set(int index, byte value) {
3897 DCHECK(index >= 0 && index < this->length());
3898 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3902 int ByteArray::get_int(int index) {
3903 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3904 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3908 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3909 DCHECK_TAG_ALIGNED(address);
3910 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3914 Address ByteArray::GetDataStartAddress() {
3915 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3919 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3920 return reinterpret_cast<uint8_t*>(external_pointer());
3924 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3925 DCHECK((index >= 0) && (index < this->length()));
3926 uint8_t* ptr = external_uint8_clamped_pointer();
3931 Handle<Object> ExternalUint8ClampedArray::get(
3932 Handle<ExternalUint8ClampedArray> array,
3934 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3935 array->GetIsolate());
3939 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3940 DCHECK((index >= 0) && (index < this->length()));
3941 uint8_t* ptr = external_uint8_clamped_pointer();
3946 void* ExternalArray::external_pointer() const {
3947 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3948 return reinterpret_cast<void*>(ptr);
3952 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3953 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3954 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3958 int8_t ExternalInt8Array::get_scalar(int index) {
3959 DCHECK((index >= 0) && (index < this->length()));
3960 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3965 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3967 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3968 array->GetIsolate());
3972 void ExternalInt8Array::set(int index, int8_t value) {
3973 DCHECK((index >= 0) && (index < this->length()));
3974 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3979 uint8_t ExternalUint8Array::get_scalar(int index) {
3980 DCHECK((index >= 0) && (index < this->length()));
3981 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3986 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3988 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3989 array->GetIsolate());
3993 void ExternalUint8Array::set(int index, uint8_t value) {
3994 DCHECK((index >= 0) && (index < this->length()));
3995 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4000 int16_t ExternalInt16Array::get_scalar(int index) {
4001 DCHECK((index >= 0) && (index < this->length()));
4002 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4007 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
4009 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4010 array->GetIsolate());
4014 void ExternalInt16Array::set(int index, int16_t value) {
4015 DCHECK((index >= 0) && (index < this->length()));
4016 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4021 uint16_t ExternalUint16Array::get_scalar(int index) {
4022 DCHECK((index >= 0) && (index < this->length()));
4023 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4028 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
4030 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4031 array->GetIsolate());
4035 void ExternalUint16Array::set(int index, uint16_t value) {
4036 DCHECK((index >= 0) && (index < this->length()));
4037 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4042 int32_t ExternalInt32Array::get_scalar(int index) {
4043 DCHECK((index >= 0) && (index < this->length()));
4044 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4049 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
4051 return array->GetIsolate()->factory()->
4052 NewNumberFromInt(array->get_scalar(index));
4056 void ExternalInt32Array::set(int index, int32_t value) {
4057 DCHECK((index >= 0) && (index < this->length()));
4058 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4063 uint32_t ExternalUint32Array::get_scalar(int index) {
4064 DCHECK((index >= 0) && (index < this->length()));
4065 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4070 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
4072 return array->GetIsolate()->factory()->
4073 NewNumberFromUint(array->get_scalar(index));
4077 void ExternalUint32Array::set(int index, uint32_t value) {
4078 DCHECK((index >= 0) && (index < this->length()));
4079 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4084 float ExternalFloat32Array::get_scalar(int index) {
4085 DCHECK((index >= 0) && (index < this->length()));
4086 float* ptr = static_cast<float*>(external_pointer());
4091 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
4093 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4097 void ExternalFloat32Array::set(int index, float value) {
4098 DCHECK((index >= 0) && (index < this->length()));
4099 float* ptr = static_cast<float*>(external_pointer());
4104 float32x4_value_t ExternalFloat32x4Array::get_scalar(int index) {
4105 DCHECK((index >= 0) && (index < this->length()));
4106 float* ptr = static_cast<float*>(external_pointer());
4107 float32x4_value_t value;
4108 value.storage[0] = ptr[index * 4 + 0];
4109 value.storage[1] = ptr[index * 4 + 1];
4110 value.storage[2] = ptr[index * 4 + 2];
4111 value.storage[3] = ptr[index * 4 + 3];
4116 Handle<Object> ExternalFloat32x4Array::get(Handle<ExternalFloat32x4Array> array,
4118 float32x4_value_t value = array->get_scalar(index);
4119 return array->GetIsolate()->factory()->NewFloat32x4(value);
4123 void ExternalFloat32x4Array::set(int index, const float32x4_value_t& value) {
4124 DCHECK((index >= 0) && (index < this->length()));
4125 float* ptr = static_cast<float*>(external_pointer());
4126 ptr[index * 4 + 0] = value.storage[0];
4127 ptr[index * 4 + 1] = value.storage[1];
4128 ptr[index * 4 + 2] = value.storage[2];
4129 ptr[index * 4 + 3] = value.storage[3];
4133 float64x2_value_t ExternalFloat64x2Array::get_scalar(int index) {
4134 DCHECK((index >= 0) && (index < this->length()));
4135 double* ptr = static_cast<double*>(external_pointer());
4136 float64x2_value_t value;
4137 value.storage[0] = ptr[index * 2 + 0];
4138 value.storage[1] = ptr[index * 2 + 1];
4143 Handle<Object> ExternalFloat64x2Array::get(Handle<ExternalFloat64x2Array> array,
4145 float64x2_value_t value = array->get_scalar(index);
4146 return array->GetIsolate()->factory()->NewFloat64x2(value);
4150 void ExternalFloat64x2Array::set(int index, const float64x2_value_t& value) {
4151 DCHECK((index >= 0) && (index < this->length()));
4152 double* ptr = static_cast<double*>(external_pointer());
4153 ptr[index * 2 + 0] = value.storage[0];
4154 ptr[index * 2 + 1] = value.storage[1];
4158 int32x4_value_t ExternalInt32x4Array::get_scalar(int index) {
4159 DCHECK((index >= 0) && (index < this->length()));
4160 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4161 int32x4_value_t value;
4162 value.storage[0] = ptr[index * 4 + 0];
4163 value.storage[1] = ptr[index * 4 + 1];
4164 value.storage[2] = ptr[index * 4 + 2];
4165 value.storage[3] = ptr[index * 4 + 3];
4170 Handle<Object> ExternalInt32x4Array::get(Handle<ExternalInt32x4Array> array,
4172 int32x4_value_t value = array->get_scalar(index);
4173 return array->GetIsolate()->factory()->NewInt32x4(value);
4177 void ExternalInt32x4Array::set(int index, const int32x4_value_t& value) {
4178 DCHECK((index >= 0) && (index < this->length()));
4179 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4180 ptr[index * 4 + 0] = value.storage[0];
4181 ptr[index * 4 + 1] = value.storage[1];
4182 ptr[index * 4 + 2] = value.storage[2];
4183 ptr[index * 4 + 3] = value.storage[3];
4187 double ExternalFloat64Array::get_scalar(int index) {
4188 DCHECK((index >= 0) && (index < this->length()));
4189 double* ptr = static_cast<double*>(external_pointer());
4194 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
4196 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4200 void ExternalFloat64Array::set(int index, double value) {
4201 DCHECK((index >= 0) && (index < this->length()));
4202 double* ptr = static_cast<double*>(external_pointer());
4207 void* FixedTypedArrayBase::DataPtr() {
4208 return FIELD_ADDR(this, kDataOffset);
4212 int FixedTypedArrayBase::DataSize(InstanceType type) {
4215 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4216 case FIXED_##TYPE##_ARRAY_TYPE: \
4217 element_size = size; \
4220 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4221 #undef TYPED_ARRAY_CASE
4226 return length() * element_size;
4230 int FixedTypedArrayBase::DataSize() {
4231 return DataSize(map()->instance_type());
4235 int FixedTypedArrayBase::size() {
4236 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4240 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4241 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4245 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4248 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4251 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4254 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4257 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4260 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4263 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4266 float Float32ArrayTraits::defaultValue() {
4267 return static_cast<float>(base::OS::nan_value());
4271 double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); }
4274 template <class Traits>
4275 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4276 DCHECK((index >= 0) && (index < this->length()));
4277 ElementType* ptr = reinterpret_cast<ElementType*>(
4278 FIELD_ADDR(this, kDataOffset));
4284 FixedTypedArray<Float64ArrayTraits>::ElementType
4285 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4286 DCHECK((index >= 0) && (index < this->length()));
4287 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4291 template <class Traits>
4292 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4293 DCHECK((index >= 0) && (index < this->length()));
4294 ElementType* ptr = reinterpret_cast<ElementType*>(
4295 FIELD_ADDR(this, kDataOffset));
4301 void FixedTypedArray<Float64ArrayTraits>::set(
4302 int index, Float64ArrayTraits::ElementType value) {
4303 DCHECK((index >= 0) && (index < this->length()));
4304 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4308 template <class Traits>
4309 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4310 return static_cast<ElementType>(value);
4315 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4316 if (value < 0) return 0;
4317 if (value > 0xFF) return 0xFF;
4318 return static_cast<uint8_t>(value);
4322 template <class Traits>
4323 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4325 return static_cast<ElementType>(DoubleToInt32(value));
4330 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4331 if (value < 0) return 0;
4332 if (value > 0xFF) return 0xFF;
4333 return static_cast<uint8_t>(lrint(value));
4338 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4339 return static_cast<float>(value);
4344 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4349 template <class Traits>
4350 Handle<Object> FixedTypedArray<Traits>::get(
4351 Handle<FixedTypedArray<Traits> > array,
4353 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4357 template <class Traits>
4358 Handle<Object> FixedTypedArray<Traits>::SetValue(
4359 Handle<FixedTypedArray<Traits> > array,
4361 Handle<Object> value) {
4362 ElementType cast_value = Traits::defaultValue();
4363 if (index < static_cast<uint32_t>(array->length())) {
4364 if (value->IsSmi()) {
4365 int int_value = Handle<Smi>::cast(value)->value();
4366 cast_value = from_int(int_value);
4367 } else if (value->IsHeapNumber()) {
4368 double double_value = Handle<HeapNumber>::cast(value)->value();
4369 cast_value = from_double(double_value);
4371 // Clamp undefined to the default value. All other types have been
4372 // converted to a number type further up in the call chain.
4373 DCHECK(value->IsUndefined());
4375 array->set(index, cast_value);
4377 return Traits::ToHandle(array->GetIsolate(), cast_value);
4381 Handle<Object> FixedTypedArray<Float32x4ArrayTraits>::SetValue(
4382 Handle<FixedTypedArray<Float32x4ArrayTraits> > array,
4383 uint32_t index, Handle<Object> value) {
4384 float32x4_value_t cast_value;
4385 cast_value.storage[0] = static_cast<float>(base::OS::nan_value());
4386 cast_value.storage[1] = static_cast<float>(base::OS::nan_value());
4387 cast_value.storage[2] = static_cast<float>(base::OS::nan_value());
4388 cast_value.storage[3] = static_cast<float>(base::OS::nan_value());
4389 if (index < static_cast<uint32_t>(array->length())) {
4390 if (value->IsFloat32x4()) {
4391 cast_value = Handle<Float32x4>::cast(value)->get();
4393 // Clamp undefined to NaN (default). All other types have been
4394 // converted to a number type further up in the call chain.
4395 DCHECK(value->IsUndefined());
4397 array->set(index, cast_value);
4399 return Float32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4404 Handle<Object> FixedTypedArray<Float64x2ArrayTraits>::SetValue(
4405 Handle<FixedTypedArray<Float64x2ArrayTraits> > array,
4406 uint32_t index, Handle<Object> value) {
4407 float64x2_value_t cast_value;
4408 cast_value.storage[0] = base::OS::nan_value();
4409 cast_value.storage[1] = base::OS::nan_value();
4410 if (index < static_cast<uint32_t>(array->length())) {
4411 if (value->IsFloat64x2()) {
4412 cast_value = Handle<Float64x2>::cast(value)->get();
4414 // Clamp undefined to NaN (default). All other types have been
4415 // converted to a number type further up in the call chain.
4416 DCHECK(value->IsUndefined());
4418 array->set(index, cast_value);
4420 return Float64x2ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4425 Handle<Object> FixedTypedArray<Int32x4ArrayTraits>::SetValue(
4426 Handle<FixedTypedArray<Int32x4ArrayTraits> > array,
4427 uint32_t index, Handle<Object> value) {
4428 int32x4_value_t cast_value;
4429 cast_value.storage[0] = 0;
4430 cast_value.storage[1] = 0;
4431 cast_value.storage[2] = 0;
4432 cast_value.storage[3] = 0;
4433 if (index < static_cast<uint32_t>(array->length())) {
4434 if (value->IsInt32x4()) {
4435 cast_value = Handle<Int32x4>::cast(value)->get();
4437 // Clamp undefined to zero (default). All other types have been
4438 // converted to a number type further up in the call chain.
4439 DCHECK(value->IsUndefined());
4441 array->set(index, cast_value);
4443 return Int32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4447 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4448 return handle(Smi::FromInt(scalar), isolate);
4452 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4454 return handle(Smi::FromInt(scalar), isolate);
4458 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4459 return handle(Smi::FromInt(scalar), isolate);
4463 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4464 return handle(Smi::FromInt(scalar), isolate);
4468 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4469 return handle(Smi::FromInt(scalar), isolate);
4473 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4474 return isolate->factory()->NewNumberFromUint(scalar);
4478 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4479 return isolate->factory()->NewNumberFromInt(scalar);
4483 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4484 return isolate->factory()->NewNumber(scalar);
4488 Handle<Object> Int32x4ArrayTraits::ToHandle(
4489 Isolate* isolate, int32x4_value_t scalar) {
4490 return isolate->factory()->NewInt32x4(scalar);
4494 Handle<Object> Float32x4ArrayTraits::ToHandle(
4495 Isolate* isolate, float32x4_value_t scalar) {
4496 return isolate->factory()->NewFloat32x4(scalar);
4500 Handle<Object> Float64x2ArrayTraits::ToHandle(
4501 Isolate* isolate, float64x2_value_t scalar) {
4502 return isolate->factory()->NewFloat64x2(scalar);
4506 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4507 return isolate->factory()->NewNumber(scalar);
4511 int Map::visitor_id() {
4512 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4516 void Map::set_visitor_id(int id) {
4517 DCHECK(0 <= id && id < 256);
4518 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4522 int Map::instance_size() {
4523 return NOBARRIER_READ_BYTE_FIELD(
4524 this, kInstanceSizeOffset) << kPointerSizeLog2;
4528 int Map::inobject_properties() {
4529 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4533 int Map::pre_allocated_property_fields() {
4534 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4538 int Map::GetInObjectPropertyOffset(int index) {
4539 // Adjust for the number of properties stored in the object.
4540 index -= inobject_properties();
4542 return instance_size() + (index * kPointerSize);
4546 int HeapObject::SizeFromMap(Map* map) {
4547 int instance_size = map->instance_size();
4548 if (instance_size != kVariableSizeSentinel) return instance_size;
4549 // Only inline the most frequent cases.
4550 InstanceType instance_type = map->instance_type();
4551 if (instance_type == FIXED_ARRAY_TYPE) {
4552 return FixedArray::BodyDescriptor::SizeOf(map, this);
4554 if (instance_type == ONE_BYTE_STRING_TYPE ||
4555 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4556 return SeqOneByteString::SizeFor(
4557 reinterpret_cast<SeqOneByteString*>(this)->length());
4559 if (instance_type == BYTE_ARRAY_TYPE) {
4560 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4562 if (instance_type == FREE_SPACE_TYPE) {
4563 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4565 if (instance_type == STRING_TYPE ||
4566 instance_type == INTERNALIZED_STRING_TYPE) {
4567 return SeqTwoByteString::SizeFor(
4568 reinterpret_cast<SeqTwoByteString*>(this)->length());
4570 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4571 return FixedDoubleArray::SizeFor(
4572 reinterpret_cast<FixedDoubleArray*>(this)->length());
4574 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4575 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4577 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4578 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4579 return reinterpret_cast<FixedTypedArrayBase*>(
4580 this)->TypedArraySize(instance_type);
4582 DCHECK(instance_type == CODE_TYPE);
4583 return reinterpret_cast<Code*>(this)->CodeSize();
4587 void Map::set_instance_size(int value) {
4588 DCHECK_EQ(0, value & (kPointerSize - 1));
4589 value >>= kPointerSizeLog2;
4590 DCHECK(0 <= value && value < 256);
4591 NOBARRIER_WRITE_BYTE_FIELD(
4592 this, kInstanceSizeOffset, static_cast<byte>(value));
4596 void Map::set_inobject_properties(int value) {
4597 DCHECK(0 <= value && value < 256);
4598 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4602 void Map::set_pre_allocated_property_fields(int value) {
4603 DCHECK(0 <= value && value < 256);
4604 WRITE_BYTE_FIELD(this,
4605 kPreAllocatedPropertyFieldsOffset,
4606 static_cast<byte>(value));
4610 InstanceType Map::instance_type() {
4611 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4615 void Map::set_instance_type(InstanceType value) {
4616 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4620 int Map::unused_property_fields() {
4621 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4625 void Map::set_unused_property_fields(int value) {
4626 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4630 byte Map::bit_field() {
4631 return READ_BYTE_FIELD(this, kBitFieldOffset);
4635 void Map::set_bit_field(byte value) {
4636 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4640 byte Map::bit_field2() {
4641 return READ_BYTE_FIELD(this, kBitField2Offset);
4645 void Map::set_bit_field2(byte value) {
4646 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4650 void Map::set_non_instance_prototype(bool value) {
4652 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4654 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4659 bool Map::has_non_instance_prototype() {
4660 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4664 void Map::set_function_with_prototype(bool value) {
4665 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4669 bool Map::function_with_prototype() {
4670 return FunctionWithPrototype::decode(bit_field());
4674 void Map::set_is_access_check_needed(bool access_check_needed) {
4675 if (access_check_needed) {
4676 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4678 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4683 bool Map::is_access_check_needed() {
4684 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4688 void Map::set_is_extensible(bool value) {
4690 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4692 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4696 bool Map::is_extensible() {
4697 return ((1 << kIsExtensible) & bit_field2()) != 0;
4701 void Map::set_is_prototype_map(bool value) {
4702 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4705 bool Map::is_prototype_map() {
4706 return IsPrototypeMapBits::decode(bit_field2());
4710 void Map::set_dictionary_map(bool value) {
4711 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4712 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4713 set_bit_field3(new_bit_field3);
4717 bool Map::is_dictionary_map() {
4718 return DictionaryMap::decode(bit_field3());
4722 Code::Flags Code::flags() {
4723 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4727 void Map::set_owns_descriptors(bool owns_descriptors) {
4728 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4732 bool Map::owns_descriptors() {
4733 return OwnsDescriptors::decode(bit_field3());
4737 void Map::set_has_instance_call_handler() {
4738 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4742 bool Map::has_instance_call_handler() {
4743 return HasInstanceCallHandler::decode(bit_field3());
4747 void Map::deprecate() {
4748 set_bit_field3(Deprecated::update(bit_field3(), true));
4752 bool Map::is_deprecated() {
4753 return Deprecated::decode(bit_field3());
4757 void Map::set_migration_target(bool value) {
4758 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4762 bool Map::is_migration_target() {
4763 return IsMigrationTarget::decode(bit_field3());
4767 void Map::set_done_inobject_slack_tracking(bool value) {
4768 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4772 bool Map::done_inobject_slack_tracking() {
4773 return DoneInobjectSlackTracking::decode(bit_field3());
4777 void Map::set_construction_count(int value) {
4778 set_bit_field3(ConstructionCount::update(bit_field3(), value));
4782 int Map::construction_count() {
4783 return ConstructionCount::decode(bit_field3());
4787 void Map::freeze() {
4788 set_bit_field3(IsFrozen::update(bit_field3(), true));
4792 bool Map::is_frozen() {
4793 return IsFrozen::decode(bit_field3());
4797 void Map::mark_unstable() {
4798 set_bit_field3(IsUnstable::update(bit_field3(), true));
4802 bool Map::is_stable() {
4803 return !IsUnstable::decode(bit_field3());
4807 bool Map::has_code_cache() {
4808 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4812 bool Map::CanBeDeprecated() {
4813 int descriptor = LastAdded();
4814 for (int i = 0; i <= descriptor; i++) {
4815 PropertyDetails details = instance_descriptors()->GetDetails(i);
4816 if (details.representation().IsNone()) return true;
4817 if (details.representation().IsSmi()) return true;
4818 if (details.representation().IsDouble()) return true;
4819 if (details.representation().IsHeapObject()) return true;
4820 if (details.type() == CONSTANT) return true;
4826 void Map::NotifyLeafMapLayoutChange() {
4829 dependent_code()->DeoptimizeDependentCodeGroup(
4831 DependentCode::kPrototypeCheckGroup);
4836 bool Map::CanOmitMapChecks() {
4837 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4841 int DependentCode::number_of_entries(DependencyGroup group) {
4842 if (length() == 0) return 0;
4843 return Smi::cast(get(group))->value();
4847 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4848 set(group, Smi::FromInt(value));
4852 bool DependentCode::is_code_at(int i) {
4853 return get(kCodesStartIndex + i)->IsCode();
4856 Code* DependentCode::code_at(int i) {
4857 return Code::cast(get(kCodesStartIndex + i));
4861 CompilationInfo* DependentCode::compilation_info_at(int i) {
4862 return reinterpret_cast<CompilationInfo*>(
4863 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4867 void DependentCode::set_object_at(int i, Object* object) {
4868 set(kCodesStartIndex + i, object);
4872 Object* DependentCode::object_at(int i) {
4873 return get(kCodesStartIndex + i);
4877 Object** DependentCode::slot_at(int i) {
4878 return RawFieldOfElementAt(kCodesStartIndex + i);
4882 void DependentCode::clear_at(int i) {
4883 set_undefined(kCodesStartIndex + i);
4887 void DependentCode::copy(int from, int to) {
4888 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4892 void DependentCode::ExtendGroup(DependencyGroup group) {
4893 GroupStartIndexes starts(this);
4894 for (int g = kGroupCount - 1; g > group; g--) {
4895 if (starts.at(g) < starts.at(g + 1)) {
4896 copy(starts.at(g), starts.at(g + 1));
4902 void Code::set_flags(Code::Flags flags) {
4903 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4904 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4908 Code::Kind Code::kind() {
4909 return ExtractKindFromFlags(flags());
4913 bool Code::IsCodeStubOrIC() {
4914 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4915 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4916 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4917 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4918 kind() == TO_BOOLEAN_IC;
4922 InlineCacheState Code::ic_state() {
4923 InlineCacheState result = ExtractICStateFromFlags(flags());
4924 // Only allow uninitialized or debugger states for non-IC code
4925 // objects. This is used in the debugger to determine whether or not
4926 // a call to code object has been replaced with a debug break call.
4927 DCHECK(is_inline_cache_stub() ||
4928 result == UNINITIALIZED ||
4929 result == DEBUG_STUB);
4934 ExtraICState Code::extra_ic_state() {
4935 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4936 return ExtractExtraICStateFromFlags(flags());
4940 Code::StubType Code::type() {
4941 return ExtractTypeFromFlags(flags());
4945 // For initialization.
4946 void Code::set_raw_kind_specific_flags1(int value) {
4947 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4951 void Code::set_raw_kind_specific_flags2(int value) {
4952 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4956 inline bool Code::is_crankshafted() {
4957 return IsCrankshaftedField::decode(
4958 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4962 inline bool Code::is_hydrogen_stub() {
4963 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4967 inline void Code::set_is_crankshafted(bool value) {
4968 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4969 int updated = IsCrankshaftedField::update(previous, value);
4970 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4974 inline bool Code::is_turbofanned() {
4975 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4976 return IsTurbofannedField::decode(
4977 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4981 inline void Code::set_is_turbofanned(bool value) {
4982 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4983 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4984 int updated = IsTurbofannedField::update(previous, value);
4985 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4989 bool Code::optimizable() {
4990 DCHECK_EQ(FUNCTION, kind());
4991 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4995 void Code::set_optimizable(bool value) {
4996 DCHECK_EQ(FUNCTION, kind());
4997 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
5001 bool Code::has_deoptimization_support() {
5002 DCHECK_EQ(FUNCTION, kind());
5003 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5004 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
5008 void Code::set_has_deoptimization_support(bool value) {
5009 DCHECK_EQ(FUNCTION, kind());
5010 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5011 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
5012 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5016 bool Code::has_debug_break_slots() {
5017 DCHECK_EQ(FUNCTION, kind());
5018 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5019 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
5023 void Code::set_has_debug_break_slots(bool value) {
5024 DCHECK_EQ(FUNCTION, kind());
5025 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5026 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
5027 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5031 bool Code::is_compiled_optimizable() {
5032 DCHECK_EQ(FUNCTION, kind());
5033 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5034 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
5038 void Code::set_compiled_optimizable(bool value) {
5039 DCHECK_EQ(FUNCTION, kind());
5040 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5041 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
5042 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5046 int Code::allow_osr_at_loop_nesting_level() {
5047 DCHECK_EQ(FUNCTION, kind());
5048 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5049 return AllowOSRAtLoopNestingLevelField::decode(fields);
5053 void Code::set_allow_osr_at_loop_nesting_level(int level) {
5054 DCHECK_EQ(FUNCTION, kind());
5055 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
5056 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5057 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5058 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5062 int Code::profiler_ticks() {
5063 DCHECK_EQ(FUNCTION, kind());
5064 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
5068 void Code::set_profiler_ticks(int ticks) {
5069 DCHECK(ticks < 256);
5070 if (kind() == FUNCTION) {
5071 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
5076 int Code::builtin_index() {
5077 DCHECK_EQ(BUILTIN, kind());
5078 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
5082 void Code::set_builtin_index(int index) {
5083 DCHECK_EQ(BUILTIN, kind());
5084 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
5088 unsigned Code::stack_slots() {
5089 DCHECK(is_crankshafted());
5090 return StackSlotsField::decode(
5091 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5095 void Code::set_stack_slots(unsigned slots) {
5096 CHECK(slots <= (1 << kStackSlotsBitCount));
5097 DCHECK(is_crankshafted());
5098 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5099 int updated = StackSlotsField::update(previous, slots);
5100 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5104 unsigned Code::safepoint_table_offset() {
5105 DCHECK(is_crankshafted());
5106 return SafepointTableOffsetField::decode(
5107 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5111 void Code::set_safepoint_table_offset(unsigned offset) {
5112 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5113 DCHECK(is_crankshafted());
5114 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5115 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5116 int updated = SafepointTableOffsetField::update(previous, offset);
5117 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5121 unsigned Code::back_edge_table_offset() {
5122 DCHECK_EQ(FUNCTION, kind());
5123 return BackEdgeTableOffsetField::decode(
5124 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5128 void Code::set_back_edge_table_offset(unsigned offset) {
5129 DCHECK_EQ(FUNCTION, kind());
5130 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5131 offset = offset >> kPointerSizeLog2;
5132 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5133 int updated = BackEdgeTableOffsetField::update(previous, offset);
5134 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5138 bool Code::back_edges_patched_for_osr() {
5139 DCHECK_EQ(FUNCTION, kind());
5140 return allow_osr_at_loop_nesting_level() > 0;
5144 byte Code::to_boolean_state() {
5145 return extra_ic_state();
5149 bool Code::has_function_cache() {
5150 DCHECK(kind() == STUB);
5151 return HasFunctionCacheField::decode(
5152 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5156 void Code::set_has_function_cache(bool flag) {
5157 DCHECK(kind() == STUB);
5158 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5159 int updated = HasFunctionCacheField::update(previous, flag);
5160 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5164 bool Code::marked_for_deoptimization() {
5165 DCHECK(kind() == OPTIMIZED_FUNCTION);
5166 return MarkedForDeoptimizationField::decode(
5167 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5171 void Code::set_marked_for_deoptimization(bool flag) {
5172 DCHECK(kind() == OPTIMIZED_FUNCTION);
5173 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5174 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5175 int updated = MarkedForDeoptimizationField::update(previous, flag);
5176 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5180 bool Code::is_weak_stub() {
5181 return CanBeWeakStub() && WeakStubField::decode(
5182 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5186 void Code::mark_as_weak_stub() {
5187 DCHECK(CanBeWeakStub());
5188 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5189 int updated = WeakStubField::update(previous, true);
5190 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5194 bool Code::is_invalidated_weak_stub() {
5195 return is_weak_stub() && InvalidatedWeakStubField::decode(
5196 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5200 void Code::mark_as_invalidated_weak_stub() {
5201 DCHECK(is_inline_cache_stub());
5202 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5203 int updated = InvalidatedWeakStubField::update(previous, true);
5204 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5208 bool Code::is_inline_cache_stub() {
5209 Kind kind = this->kind();
5211 #define CASE(name) case name: return true;
5214 default: return false;
5219 bool Code::is_keyed_stub() {
5220 return is_keyed_load_stub() || is_keyed_store_stub();
5224 bool Code::is_debug_stub() {
5225 return ic_state() == DEBUG_STUB;
5229 ConstantPoolArray* Code::constant_pool() {
5230 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
5234 void Code::set_constant_pool(Object* value) {
5235 DCHECK(value->IsConstantPoolArray());
5236 WRITE_FIELD(this, kConstantPoolOffset, value);
5237 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
5241 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5242 ExtraICState extra_ic_state, StubType type,
5243 CacheHolderFlag holder) {
5244 // Compute the bit mask.
5245 unsigned int bits = KindField::encode(kind)
5246 | ICStateField::encode(ic_state)
5247 | TypeField::encode(type)
5248 | ExtraICStateField::encode(extra_ic_state)
5249 | CacheHolderField::encode(holder);
5250 return static_cast<Flags>(bits);
5254 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5255 ExtraICState extra_ic_state,
5256 CacheHolderFlag holder,
5258 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5262 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5263 CacheHolderFlag holder) {
5264 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5268 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5269 return KindField::decode(flags);
5273 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5274 return ICStateField::decode(flags);
5278 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5279 return ExtraICStateField::decode(flags);
5283 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5284 return TypeField::decode(flags);
5288 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5289 return CacheHolderField::decode(flags);
5293 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5294 int bits = flags & ~TypeField::kMask;
5295 return static_cast<Flags>(bits);
5299 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5300 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5301 return static_cast<Flags>(bits);
5305 Code* Code::GetCodeFromTargetAddress(Address address) {
5306 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5307 // GetCodeFromTargetAddress might be called when marking objects during mark
5308 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5309 // Code::cast. Code::cast does not work when the object's map is
5311 Code* result = reinterpret_cast<Code*>(code);
5316 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5318 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5322 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5323 if (!FLAG_collect_maps) return false;
5324 if (object->IsMap()) {
5325 return Map::cast(object)->CanTransition() &&
5326 FLAG_weak_embedded_maps_in_optimized_code;
5328 if (object->IsJSObject() ||
5329 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
5330 return FLAG_weak_embedded_objects_in_optimized_code;
5336 class Code::FindAndReplacePattern {
5338 FindAndReplacePattern() : count_(0) { }
5339 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5340 DCHECK(count_ < kMaxCount);
5341 find_[count_] = map_to_find;
5342 replace_[count_] = obj_to_replace;
5346 static const int kMaxCount = 4;
5348 Handle<Map> find_[kMaxCount];
5349 Handle<Object> replace_[kMaxCount];
5354 bool Code::IsWeakObjectInIC(Object* object) {
5355 return object->IsMap() && Map::cast(object)->CanTransition() &&
5356 FLAG_collect_maps &&
5357 FLAG_weak_embedded_maps_in_ic;
5361 Object* Map::prototype() const {
5362 return READ_FIELD(this, kPrototypeOffset);
5366 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5367 DCHECK(value->IsNull() || value->IsJSReceiver());
5368 WRITE_FIELD(this, kPrototypeOffset, value);
5369 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5373 // If the descriptor is using the empty transition array, install a new empty
5374 // transition array that will have place for an element transition.
5375 static void EnsureHasTransitionArray(Handle<Map> map) {
5376 Handle<TransitionArray> transitions;
5377 if (!map->HasTransitionArray()) {
5378 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5379 transitions->set_back_pointer_storage(map->GetBackPointer());
5380 } else if (!map->transitions()->IsFullTransitionArray()) {
5381 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5385 map->set_transitions(*transitions);
5389 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5390 int len = descriptors->number_of_descriptors();
5391 set_instance_descriptors(descriptors);
5392 SetNumberOfOwnDescriptors(len);
5396 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5399 void Map::set_bit_field3(uint32_t bits) {
5400 if (kInt32Size != kPointerSize) {
5401 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5403 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5407 uint32_t Map::bit_field3() {
5408 return READ_UINT32_FIELD(this, kBitField3Offset);
5412 void Map::AppendDescriptor(Descriptor* desc) {
5413 DescriptorArray* descriptors = instance_descriptors();
5414 int number_of_own_descriptors = NumberOfOwnDescriptors();
5415 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5416 descriptors->Append(desc);
5417 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5421 Object* Map::GetBackPointer() {
5422 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5423 if (object->IsDescriptorArray()) {
5424 return TransitionArray::cast(object)->back_pointer_storage();
5426 DCHECK(object->IsMap() || object->IsUndefined());
5432 bool Map::HasElementsTransition() {
5433 return HasTransitionArray() && transitions()->HasElementsTransition();
5437 bool Map::HasTransitionArray() const {
5438 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5439 return object->IsTransitionArray();
5443 Map* Map::elements_transition_map() {
5444 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5445 return transitions()->GetTarget(index);
5449 bool Map::CanHaveMoreTransitions() {
5450 if (!HasTransitionArray()) return true;
5451 return FixedArray::SizeFor(transitions()->length() +
5452 TransitionArray::kTransitionSize)
5453 <= Page::kMaxRegularHeapObjectSize;
5457 Map* Map::GetTransition(int transition_index) {
5458 return transitions()->GetTarget(transition_index);
5462 int Map::SearchTransition(Name* name) {
5463 if (HasTransitionArray()) return transitions()->Search(name);
5464 return TransitionArray::kNotFound;
5468 FixedArray* Map::GetPrototypeTransitions() {
5469 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5470 if (!transitions()->HasPrototypeTransitions()) {
5471 return GetHeap()->empty_fixed_array();
5473 return transitions()->GetPrototypeTransitions();
5477 void Map::SetPrototypeTransitions(
5478 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5479 EnsureHasTransitionArray(map);
5480 int old_number_of_transitions = map->NumberOfProtoTransitions();
5482 if (map->HasPrototypeTransitions()) {
5483 DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
5484 map->ZapPrototypeTransitions();
5487 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5488 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5492 bool Map::HasPrototypeTransitions() {
5493 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5497 TransitionArray* Map::transitions() const {
5498 DCHECK(HasTransitionArray());
5499 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5500 return TransitionArray::cast(object);
5504 void Map::set_transitions(TransitionArray* transition_array,
5505 WriteBarrierMode mode) {
5506 // Transition arrays are not shared. When one is replaced, it should not
5507 // keep referenced objects alive, so we zap it.
5508 // When there is another reference to the array somewhere (e.g. a handle),
5509 // not zapping turns from a waste of memory into a source of crashes.
5510 if (HasTransitionArray()) {
5512 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5513 Map* target = transitions()->GetTarget(i);
5514 if (target->instance_descriptors() == instance_descriptors()) {
5515 Name* key = transitions()->GetKey(i);
5516 int new_target_index = transition_array->Search(key);
5517 DCHECK(new_target_index != TransitionArray::kNotFound);
5518 DCHECK(transition_array->GetTarget(new_target_index) == target);
5522 DCHECK(transitions() != transition_array);
5526 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5527 CONDITIONAL_WRITE_BARRIER(
5528 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5532 void Map::init_back_pointer(Object* undefined) {
5533 DCHECK(undefined->IsUndefined());
5534 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5538 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5539 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5540 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5541 (value->IsMap() && GetBackPointer()->IsUndefined()));
5542 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5543 if (object->IsTransitionArray()) {
5544 TransitionArray::cast(object)->set_back_pointer_storage(value);
5546 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5547 CONDITIONAL_WRITE_BARRIER(
5548 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5553 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5554 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5555 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5557 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5558 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5559 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5561 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5562 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5563 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5564 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5566 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5567 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5569 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5570 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5571 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5572 kExpectedReceiverTypeOffset)
5574 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5575 kSerializedDataOffset)
5577 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5580 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5581 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5582 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5584 ACCESSORS(Box, value, Object, kValueOffset)
5586 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5587 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5589 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5590 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5591 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5593 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5594 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5595 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5596 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5597 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5598 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5600 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5601 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5603 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5604 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5605 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5607 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5608 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5609 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5610 kPrototypeTemplateOffset)
5611 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5612 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5613 kNamedPropertyHandlerOffset)
5614 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5615 kIndexedPropertyHandlerOffset)
5616 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5617 kInstanceTemplateOffset)
5618 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5619 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5620 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5621 kInstanceCallHandlerOffset)
5622 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5623 kAccessCheckInfoOffset)
5624 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5626 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5627 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5628 kInternalFieldCountOffset)
5630 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5631 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5633 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5635 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5636 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5637 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5638 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5639 kPretenureCreateCountOffset)
5640 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5641 kDependentCodeOffset)
5642 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5643 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5645 ACCESSORS(Script, source, Object, kSourceOffset)
5646 ACCESSORS(Script, name, Object, kNameOffset)
5647 ACCESSORS(Script, id, Smi, kIdOffset)
5648 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5649 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5650 ACCESSORS(Script, context_data, Object, kContextOffset)
5651 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5652 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5653 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5654 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5655 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5656 kEvalFrominstructionsOffsetOffset)
5657 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5658 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5659 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5660 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5662 Script::CompilationType Script::compilation_type() {
5663 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5664 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5666 void Script::set_compilation_type(CompilationType type) {
5667 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5668 type == COMPILATION_TYPE_EVAL));
5670 Script::CompilationState Script::compilation_state() {
5671 return BooleanBit::get(flags(), kCompilationStateBit) ?
5672 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5674 void Script::set_compilation_state(CompilationState state) {
5675 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5676 state == COMPILATION_STATE_COMPILED));
5680 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5681 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5682 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5683 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5685 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5686 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5687 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5688 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5690 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5691 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5692 kOptimizedCodeMapOffset)
5693 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5694 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5695 kFeedbackVectorOffset)
5696 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5697 kInstanceClassNameOffset)
5698 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5699 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5700 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5701 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5704 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5705 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5706 kHiddenPrototypeBit)
5707 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5708 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5709 kNeedsAccessCheckBit)
5710 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5711 kReadOnlyPrototypeBit)
5712 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5713 kRemovePrototypeBit)
5714 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5716 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5718 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5721 BOOL_ACCESSORS(SharedFunctionInfo,
5723 allows_lazy_compilation,
5724 kAllowLazyCompilation)
5725 BOOL_ACCESSORS(SharedFunctionInfo,
5727 allows_lazy_compilation_without_context,
5728 kAllowLazyCompilationWithoutContext)
5729 BOOL_ACCESSORS(SharedFunctionInfo,
5733 BOOL_ACCESSORS(SharedFunctionInfo,
5735 has_duplicate_parameters,
5736 kHasDuplicateParameters)
5737 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5740 #if V8_HOST_ARCH_32_BIT
5741 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5742 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5743 kFormalParameterCountOffset)
5744 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5745 kExpectedNofPropertiesOffset)
5746 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5747 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5748 kStartPositionAndTypeOffset)
5749 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5750 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5751 kFunctionTokenPositionOffset)
5752 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5753 kCompilerHintsOffset)
5754 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5755 kOptCountAndBailoutReasonOffset)
5756 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5757 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5758 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5762 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5763 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5764 int holder::name() const { \
5765 int value = READ_INT_FIELD(this, offset); \
5766 DCHECK(kHeapObjectTag == 1); \
5767 DCHECK((value & kHeapObjectTag) == 0); \
5768 return value >> 1; \
5770 void holder::set_##name(int value) { \
5771 DCHECK(kHeapObjectTag == 1); \
5772 DCHECK((value & 0xC0000000) == 0xC0000000 || \
5773 (value & 0xC0000000) == 0x0); \
5774 WRITE_INT_FIELD(this, \
5776 (value << 1) & ~kHeapObjectTag); \
5779 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5780 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5781 INT_ACCESSORS(holder, name, offset)
5784 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5785 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5786 formal_parameter_count,
5787 kFormalParameterCountOffset)
5789 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5790 expected_nof_properties,
5791 kExpectedNofPropertiesOffset)
5792 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5794 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5795 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5796 start_position_and_type,
5797 kStartPositionAndTypeOffset)
5799 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5800 function_token_position,
5801 kFunctionTokenPositionOffset)
5802 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5804 kCompilerHintsOffset)
5806 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5807 opt_count_and_bailout_reason,
5808 kOptCountAndBailoutReasonOffset)
5809 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5811 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5813 kAstNodeCountOffset)
5814 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5816 kProfilerTicksOffset)
5821 BOOL_GETTER(SharedFunctionInfo,
5823 optimization_disabled,
5824 kOptimizationDisabled)
5827 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5828 set_compiler_hints(BooleanBit::set(compiler_hints(),
5829 kOptimizationDisabled,
5831 // If disabling optimizations we reflect that in the code object so
5832 // it will not be counted as optimizable code.
5833 if ((code()->kind() == Code::FUNCTION) && disable) {
5834 code()->set_optimizable(false);
5839 StrictMode SharedFunctionInfo::strict_mode() {
5840 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5845 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5846 // We only allow mode transitions from sloppy to strict.
5847 DCHECK(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5848 int hints = compiler_hints();
5849 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5850 set_compiler_hints(hints);
5854 FunctionKind SharedFunctionInfo::kind() {
5855 return FunctionKindBits::decode(compiler_hints());
5859 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5860 DCHECK(IsValidFunctionKind(kind));
5861 int hints = compiler_hints();
5862 hints = FunctionKindBits::update(hints, kind);
5863 set_compiler_hints(hints);
5867 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5868 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5870 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5871 name_should_print_as_anonymous,
5872 kNameShouldPrintAsAnonymous)
5873 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5874 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5875 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5876 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5877 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5878 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5879 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5880 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5883 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5884 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5886 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5888 bool Script::HasValidSource() {
5889 Object* src = this->source();
5890 if (!src->IsString()) return true;
5891 String* src_str = String::cast(src);
5892 if (!StringShape(src_str).IsExternal()) return true;
5893 if (src_str->IsOneByteRepresentation()) {
5894 return ExternalOneByteString::cast(src)->resource() != NULL;
5895 } else if (src_str->IsTwoByteRepresentation()) {
5896 return ExternalTwoByteString::cast(src)->resource() != NULL;
5902 void SharedFunctionInfo::DontAdaptArguments() {
5903 DCHECK(code()->kind() == Code::BUILTIN);
5904 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5908 int SharedFunctionInfo::start_position() const {
5909 return start_position_and_type() >> kStartPositionShift;
5913 void SharedFunctionInfo::set_start_position(int start_position) {
5914 set_start_position_and_type((start_position << kStartPositionShift)
5915 | (start_position_and_type() & ~kStartPositionMask));
5919 Code* SharedFunctionInfo::code() const {
5920 return Code::cast(READ_FIELD(this, kCodeOffset));
5924 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5925 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5926 WRITE_FIELD(this, kCodeOffset, value);
5927 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5931 void SharedFunctionInfo::ReplaceCode(Code* value) {
5932 // If the GC metadata field is already used then the function was
5933 // enqueued as a code flushing candidate and we remove it now.
5934 if (code()->gc_metadata() != NULL) {
5935 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5936 flusher->EvictCandidate(this);
5939 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5945 ScopeInfo* SharedFunctionInfo::scope_info() const {
5946 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5950 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5951 WriteBarrierMode mode) {
5952 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5953 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5956 reinterpret_cast<Object*>(value),
5961 bool SharedFunctionInfo::is_compiled() {
5962 return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
5966 bool SharedFunctionInfo::IsApiFunction() {
5967 return function_data()->IsFunctionTemplateInfo();
5971 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5972 DCHECK(IsApiFunction());
5973 return FunctionTemplateInfo::cast(function_data());
5977 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5978 return function_data()->IsSmi();
5982 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5983 DCHECK(HasBuiltinFunctionId());
5984 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5988 int SharedFunctionInfo::ic_age() {
5989 return ICAgeBits::decode(counters());
5993 void SharedFunctionInfo::set_ic_age(int ic_age) {
5994 set_counters(ICAgeBits::update(counters(), ic_age));
5998 int SharedFunctionInfo::deopt_count() {
5999 return DeoptCountBits::decode(counters());
6003 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6004 set_counters(DeoptCountBits::update(counters(), deopt_count));
6008 void SharedFunctionInfo::increment_deopt_count() {
6009 int value = counters();
6010 int deopt_count = DeoptCountBits::decode(value);
6011 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6012 set_counters(DeoptCountBits::update(value, deopt_count));
6016 int SharedFunctionInfo::opt_reenable_tries() {
6017 return OptReenableTriesBits::decode(counters());
6021 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6022 set_counters(OptReenableTriesBits::update(counters(), tries));
6026 int SharedFunctionInfo::opt_count() {
6027 return OptCountBits::decode(opt_count_and_bailout_reason());
6031 void SharedFunctionInfo::set_opt_count(int opt_count) {
6032 set_opt_count_and_bailout_reason(
6033 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6037 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
6038 BailoutReason reason = static_cast<BailoutReason>(
6039 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6044 bool SharedFunctionInfo::has_deoptimization_support() {
6045 Code* code = this->code();
6046 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6050 void SharedFunctionInfo::TryReenableOptimization() {
6051 int tries = opt_reenable_tries();
6052 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6053 // We reenable optimization whenever the number of tries is a large
6054 // enough power of 2.
6055 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6056 set_optimization_disabled(false);
6059 code()->set_optimizable(true);
6064 bool JSFunction::IsBuiltin() {
6065 return context()->global_object()->IsJSBuiltinsObject();
6069 bool JSFunction::IsFromNativeScript() {
6070 Object* script = shared()->script();
6071 bool native = script->IsScript() &&
6072 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
6073 DCHECK(!IsBuiltin() || native); // All builtins are also native.
6078 bool JSFunction::IsFromExtensionScript() {
6079 Object* script = shared()->script();
6080 return script->IsScript() &&
6081 Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
6085 bool JSFunction::NeedsArgumentsAdaption() {
6086 return shared()->formal_parameter_count() !=
6087 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
6091 bool JSFunction::IsOptimized() {
6092 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6096 bool JSFunction::IsOptimizable() {
6097 return code()->kind() == Code::FUNCTION && code()->optimizable();
6101 bool JSFunction::IsMarkedForOptimization() {
6102 return code() == GetIsolate()->builtins()->builtin(
6103 Builtins::kCompileOptimized);
6107 bool JSFunction::IsMarkedForConcurrentOptimization() {
6108 return code() == GetIsolate()->builtins()->builtin(
6109 Builtins::kCompileOptimizedConcurrent);
6113 bool JSFunction::IsInOptimizationQueue() {
6114 return code() == GetIsolate()->builtins()->builtin(
6115 Builtins::kInOptimizationQueue);
6119 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6120 return has_initial_map() &&
6121 initial_map()->construction_count() != JSFunction::kNoSlackTracking;
6125 Code* JSFunction::code() {
6127 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6131 void JSFunction::set_code(Code* value) {
6132 DCHECK(!GetHeap()->InNewSpace(value));
6133 Address entry = value->entry();
6134 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6135 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6137 HeapObject::RawField(this, kCodeEntryOffset),
6142 void JSFunction::set_code_no_write_barrier(Code* value) {
6143 DCHECK(!GetHeap()->InNewSpace(value));
6144 Address entry = value->entry();
6145 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6149 void JSFunction::ReplaceCode(Code* code) {
6150 bool was_optimized = IsOptimized();
6151 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6153 if (was_optimized && is_optimized) {
6154 shared()->EvictFromOptimizedCodeMap(this->code(),
6155 "Replacing with another optimized code");
6160 // Add/remove the function from the list of optimized functions for this
6161 // context based on the state change.
6162 if (!was_optimized && is_optimized) {
6163 context()->native_context()->AddOptimizedFunction(this);
6165 if (was_optimized && !is_optimized) {
6166 // TODO(titzer): linear in the number of optimized functions; fix!
6167 context()->native_context()->RemoveOptimizedFunction(this);
6172 Context* JSFunction::context() {
6173 return Context::cast(READ_FIELD(this, kContextOffset));
6177 JSObject* JSFunction::global_proxy() {
6178 return context()->global_proxy();
6182 void JSFunction::set_context(Object* value) {
6183 DCHECK(value->IsUndefined() || value->IsContext());
6184 WRITE_FIELD(this, kContextOffset, value);
6185 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6188 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6189 kPrototypeOrInitialMapOffset)
6192 Map* JSFunction::initial_map() {
6193 return Map::cast(prototype_or_initial_map());
6197 bool JSFunction::has_initial_map() {
6198 return prototype_or_initial_map()->IsMap();
6202 bool JSFunction::has_instance_prototype() {
6203 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6207 bool JSFunction::has_prototype() {
6208 return map()->has_non_instance_prototype() || has_instance_prototype();
6212 Object* JSFunction::instance_prototype() {
6213 DCHECK(has_instance_prototype());
6214 if (has_initial_map()) return initial_map()->prototype();
6215 // When there is no initial map and the prototype is a JSObject, the
6216 // initial map field is used for the prototype field.
6217 return prototype_or_initial_map();
6221 Object* JSFunction::prototype() {
6222 DCHECK(has_prototype());
6223 // If the function's prototype property has been set to a non-JSObject
6224 // value, that value is stored in the constructor field of the map.
6225 if (map()->has_non_instance_prototype()) return map()->constructor();
6226 return instance_prototype();
6230 bool JSFunction::should_have_prototype() {
6231 return map()->function_with_prototype();
6235 bool JSFunction::is_compiled() {
6236 return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
6240 FixedArray* JSFunction::literals() {
6241 DCHECK(!shared()->bound());
6242 return literals_or_bindings();
6246 void JSFunction::set_literals(FixedArray* literals) {
6247 DCHECK(!shared()->bound());
6248 set_literals_or_bindings(literals);
6252 FixedArray* JSFunction::function_bindings() {
6253 DCHECK(shared()->bound());
6254 return literals_or_bindings();
6258 void JSFunction::set_function_bindings(FixedArray* bindings) {
6259 DCHECK(shared()->bound());
6260 // Bound function literal may be initialized to the empty fixed array
6261 // before the bindings are set.
6262 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6263 bindings->map() == GetHeap()->fixed_cow_array_map());
6264 set_literals_or_bindings(bindings);
6268 int JSFunction::NumberOfLiterals() {
6269 DCHECK(!shared()->bound());
6270 return literals()->length();
6274 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6275 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6276 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6280 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6282 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6283 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6284 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6288 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6289 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6290 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6294 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6296 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6297 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6298 DCHECK(!GetHeap()->InNewSpace(value));
6302 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6303 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6304 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6305 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6308 void JSProxy::InitializeBody(int object_size, Object* value) {
6309 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6310 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6311 WRITE_FIELD(this, offset, value);
6316 ACCESSORS(JSCollection, table, Object, kTableOffset)
6319 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6320 template<class Derived, class TableType> \
6321 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6322 return type::cast(READ_FIELD(this, offset)); \
6324 template<class Derived, class TableType> \
6325 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6326 type* value, WriteBarrierMode mode) { \
6327 WRITE_FIELD(this, offset, value); \
6328 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6331 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6332 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6333 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6335 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6338 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6339 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6342 Address Foreign::foreign_address() {
6343 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6347 void Foreign::set_foreign_address(Address value) {
6348 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6352 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6353 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6354 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6355 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6356 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6357 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6359 bool JSGeneratorObject::is_suspended() {
6360 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6361 DCHECK_EQ(kGeneratorClosed, 0);
6362 return continuation() > 0;
6365 bool JSGeneratorObject::is_closed() {
6366 return continuation() == kGeneratorClosed;
6369 bool JSGeneratorObject::is_executing() {
6370 return continuation() == kGeneratorExecuting;
6373 ACCESSORS(JSModule, context, Object, kContextOffset)
6374 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6377 ACCESSORS(JSValue, value, Object, kValueOffset)
6380 HeapNumber* HeapNumber::cast(Object* object) {
6381 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6382 return reinterpret_cast<HeapNumber*>(object);
6386 const HeapNumber* HeapNumber::cast(const Object* object) {
6387 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6388 return reinterpret_cast<const HeapNumber*>(object);
6392 ACCESSORS(JSDate, value, Object, kValueOffset)
6393 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6394 ACCESSORS(JSDate, year, Object, kYearOffset)
6395 ACCESSORS(JSDate, month, Object, kMonthOffset)
6396 ACCESSORS(JSDate, day, Object, kDayOffset)
6397 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6398 ACCESSORS(JSDate, hour, Object, kHourOffset)
6399 ACCESSORS(JSDate, min, Object, kMinOffset)
6400 ACCESSORS(JSDate, sec, Object, kSecOffset)
6403 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6404 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6405 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6406 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6407 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6408 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6411 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6412 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6413 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6414 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6415 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6416 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6417 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6420 void Code::WipeOutHeader() {
6421 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6422 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6423 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6424 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6425 // Do not wipe out major/minor keys on a code stub or IC
6426 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6427 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6432 Object* Code::type_feedback_info() {
6433 DCHECK(kind() == FUNCTION);
6434 return raw_type_feedback_info();
6438 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6439 DCHECK(kind() == FUNCTION);
6440 set_raw_type_feedback_info(value, mode);
6441 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6446 uint32_t Code::stub_key() {
6447 DCHECK(IsCodeStubOrIC());
6448 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6449 return static_cast<uint32_t>(smi_key->value());
6453 void Code::set_stub_key(uint32_t key) {
6454 DCHECK(IsCodeStubOrIC());
6455 set_raw_type_feedback_info(Smi::FromInt(key));
6459 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6460 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6463 byte* Code::instruction_start() {
6464 return FIELD_ADDR(this, kHeaderSize);
6468 byte* Code::instruction_end() {
6469 return instruction_start() + instruction_size();
6473 int Code::body_size() {
6474 return RoundUp(instruction_size(), kObjectAlignment);
6478 ByteArray* Code::unchecked_relocation_info() {
6479 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6483 byte* Code::relocation_start() {
6484 return unchecked_relocation_info()->GetDataStartAddress();
6488 int Code::relocation_size() {
6489 return unchecked_relocation_info()->length();
6493 byte* Code::entry() {
6494 return instruction_start();
6498 bool Code::contains(byte* inner_pointer) {
6499 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6503 ACCESSORS(JSArray, length, Object, kLengthOffset)
6506 void* JSArrayBuffer::backing_store() const {
6507 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6508 return reinterpret_cast<void*>(ptr);
6512 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6513 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6514 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6518 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6519 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6522 bool JSArrayBuffer::is_external() {
6523 return BooleanBit::get(flag(), kIsExternalBit);
6527 void JSArrayBuffer::set_is_external(bool value) {
6528 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6532 bool JSArrayBuffer::should_be_freed() {
6533 return BooleanBit::get(flag(), kShouldBeFreed);
6537 void JSArrayBuffer::set_should_be_freed(bool value) {
6538 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6542 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6543 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6546 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6547 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6548 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6549 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6550 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6552 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6555 JSRegExp::Type JSRegExp::TypeTag() {
6556 Object* data = this->data();
6557 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6558 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6559 return static_cast<JSRegExp::Type>(smi->value());
6563 int JSRegExp::CaptureCount() {
6564 switch (TypeTag()) {
6568 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6576 JSRegExp::Flags JSRegExp::GetFlags() {
6577 DCHECK(this->data()->IsFixedArray());
6578 Object* data = this->data();
6579 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6580 return Flags(smi->value());
6584 String* JSRegExp::Pattern() {
6585 DCHECK(this->data()->IsFixedArray());
6586 Object* data = this->data();
6587 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6592 Object* JSRegExp::DataAt(int index) {
6593 DCHECK(TypeTag() != NOT_COMPILED);
6594 return FixedArray::cast(data())->get(index);
6598 void JSRegExp::SetDataAt(int index, Object* value) {
6599 DCHECK(TypeTag() != NOT_COMPILED);
6600 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6601 FixedArray::cast(data())->set(index, value);
6605 ElementsKind JSObject::GetElementsKind() {
6606 ElementsKind kind = map()->elements_kind();
6608 FixedArrayBase* fixed_array =
6609 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6611 // If a GC was caused while constructing this object, the elements
6612 // pointer may point to a one pointer filler map.
6613 if (ElementsAreSafeToExamine()) {
6614 Map* map = fixed_array->map();
6615 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6616 (map == GetHeap()->fixed_array_map() ||
6617 map == GetHeap()->fixed_cow_array_map())) ||
6618 (IsFastDoubleElementsKind(kind) &&
6619 (fixed_array->IsFixedDoubleArray() ||
6620 fixed_array == GetHeap()->empty_fixed_array())) ||
6621 (kind == DICTIONARY_ELEMENTS &&
6622 fixed_array->IsFixedArray() &&
6623 fixed_array->IsDictionary()) ||
6624 (kind > DICTIONARY_ELEMENTS));
6625 DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6626 (elements()->IsFixedArray() && elements()->length() >= 2));
6633 ElementsAccessor* JSObject::GetElementsAccessor() {
6634 return ElementsAccessor::ForKind(GetElementsKind());
6638 bool JSObject::HasFastObjectElements() {
6639 return IsFastObjectElementsKind(GetElementsKind());
6643 bool JSObject::HasFastSmiElements() {
6644 return IsFastSmiElementsKind(GetElementsKind());
6648 bool JSObject::HasFastSmiOrObjectElements() {
6649 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6653 bool JSObject::HasFastDoubleElements() {
6654 return IsFastDoubleElementsKind(GetElementsKind());
6658 bool JSObject::HasFastHoleyElements() {
6659 return IsFastHoleyElementsKind(GetElementsKind());
6663 bool JSObject::HasFastElements() {
6664 return IsFastElementsKind(GetElementsKind());
6668 bool JSObject::HasDictionaryElements() {
6669 return GetElementsKind() == DICTIONARY_ELEMENTS;
6673 bool JSObject::HasSloppyArgumentsElements() {
6674 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6678 bool JSObject::HasExternalArrayElements() {
6679 HeapObject* array = elements();
6680 DCHECK(array != NULL);
6681 return array->IsExternalArray();
6685 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6686 bool JSObject::HasExternal##Type##Elements() { \
6687 HeapObject* array = elements(); \
6688 DCHECK(array != NULL); \
6689 if (!array->IsHeapObject()) \
6691 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6694 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6696 #undef EXTERNAL_ELEMENTS_CHECK
6699 bool JSObject::HasFixedTypedArrayElements() {
6700 HeapObject* array = elements();
6701 DCHECK(array != NULL);
6702 return array->IsFixedTypedArrayBase();
6706 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6707 bool JSObject::HasFixed##Type##Elements() { \
6708 HeapObject* array = elements(); \
6709 DCHECK(array != NULL); \
6710 if (!array->IsHeapObject()) \
6712 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6715 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6717 #undef FIXED_TYPED_ELEMENTS_CHECK
6720 bool JSObject::HasNamedInterceptor() {
6721 return map()->has_named_interceptor();
6725 bool JSObject::HasIndexedInterceptor() {
6726 return map()->has_indexed_interceptor();
6730 NameDictionary* JSObject::property_dictionary() {
6731 DCHECK(!HasFastProperties());
6732 return NameDictionary::cast(properties());
6736 SeededNumberDictionary* JSObject::element_dictionary() {
6737 DCHECK(HasDictionaryElements());
6738 return SeededNumberDictionary::cast(elements());
6742 bool Name::IsHashFieldComputed(uint32_t field) {
6743 return (field & kHashNotComputedMask) == 0;
6747 bool Name::HasHashCode() {
6748 return IsHashFieldComputed(hash_field());
6752 uint32_t Name::Hash() {
6753 // Fast case: has hash code already been computed?
6754 uint32_t field = hash_field();
6755 if (IsHashFieldComputed(field)) return field >> kHashShift;
6756 // Slow case: compute hash code and set it. Has to be a string.
6757 return String::cast(this)->ComputeAndSetHash();
6760 bool Name::IsOwn() {
6761 return this->IsSymbol() && Symbol::cast(this)->is_own();
6765 StringHasher::StringHasher(int length, uint32_t seed)
6767 raw_running_hash_(seed),
6769 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6770 is_first_char_(true) {
6771 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6775 bool StringHasher::has_trivial_hash() {
6776 return length_ > String::kMaxHashCalcLength;
6780 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6782 running_hash += (running_hash << 10);
6783 running_hash ^= (running_hash >> 6);
6784 return running_hash;
6788 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6789 running_hash += (running_hash << 3);
6790 running_hash ^= (running_hash >> 11);
6791 running_hash += (running_hash << 15);
6792 if ((running_hash & String::kHashBitMask) == 0) {
6795 return running_hash;
6799 void StringHasher::AddCharacter(uint16_t c) {
6800 // Use the Jenkins one-at-a-time hash function to update the hash
6801 // for the given character.
6802 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6806 bool StringHasher::UpdateIndex(uint16_t c) {
6807 DCHECK(is_array_index_);
6808 if (c < '0' || c > '9') {
6809 is_array_index_ = false;
6813 if (is_first_char_) {
6814 is_first_char_ = false;
6815 if (c == '0' && length_ > 1) {
6816 is_array_index_ = false;
6820 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6821 is_array_index_ = false;
6824 array_index_ = array_index_ * 10 + d;
6829 template<typename Char>
6830 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6831 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6833 if (is_array_index_) {
6834 for (; i < length; i++) {
6835 AddCharacter(chars[i]);
6836 if (!UpdateIndex(chars[i])) {
6842 for (; i < length; i++) {
6843 DCHECK(!is_array_index_);
6844 AddCharacter(chars[i]);
6849 template <typename schar>
6850 uint32_t StringHasher::HashSequentialString(const schar* chars,
6853 StringHasher hasher(length, seed);
6854 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6855 return hasher.GetHashField();
6859 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6860 IteratingStringHasher hasher(string->length(), seed);
6862 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6863 ConsString* cons_string = String::VisitFlat(&hasher, string);
6864 // The string was flat.
6865 if (cons_string == NULL) return hasher.GetHashField();
6866 // This is a ConsString, iterate across it.
6867 ConsStringIteratorOp op(cons_string);
6869 while (NULL != (string = op.Next(&offset))) {
6870 String::VisitFlat(&hasher, string, offset);
6872 return hasher.GetHashField();
6876 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6878 AddCharacters(chars, length);
6882 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6884 AddCharacters(chars, length);
6888 bool Name::AsArrayIndex(uint32_t* index) {
6889 return IsString() && String::cast(this)->AsArrayIndex(index);
6893 bool String::AsArrayIndex(uint32_t* index) {
6894 uint32_t field = hash_field();
6895 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6898 return SlowAsArrayIndex(index);
6902 void String::SetForwardedInternalizedString(String* canonical) {
6903 DCHECK(IsInternalizedString());
6904 DCHECK(HasHashCode());
6905 if (canonical == this) return; // No need to forward.
6906 DCHECK(SlowEquals(canonical));
6907 DCHECK(canonical->IsInternalizedString());
6908 DCHECK(canonical->HasHashCode());
6909 WRITE_FIELD(this, kHashFieldOffset, canonical);
6910 // Setting the hash field to a tagged value sets the LSB, causing the hash
6911 // code to be interpreted as uninitialized. We use this fact to recognize
6912 // that we have a forwarded string.
6913 DCHECK(!HasHashCode());
6917 String* String::GetForwardedInternalizedString() {
6918 DCHECK(IsInternalizedString());
6919 if (HasHashCode()) return this;
6920 String* canonical = String::cast(READ_FIELD(this, kHashFieldOffset));
6921 DCHECK(canonical->IsInternalizedString());
6922 DCHECK(SlowEquals(canonical));
6923 DCHECK(canonical->HasHashCode());
6928 Object* JSReceiver::GetConstructor() {
6929 return map()->constructor();
6933 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6934 Handle<Name> name) {
6935 if (object->IsJSProxy()) {
6936 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6937 return JSProxy::HasPropertyWithHandler(proxy, name);
6939 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6940 if (!result.has_value) return Maybe<bool>();
6941 return maybe(result.value != ABSENT);
6945 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6946 Handle<Name> name) {
6947 if (object->IsJSProxy()) {
6948 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6949 return JSProxy::HasPropertyWithHandler(proxy, name);
6951 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6952 if (!result.has_value) return Maybe<bool>();
6953 return maybe(result.value != ABSENT);
6957 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6958 Handle<JSReceiver> object, Handle<Name> key) {
6960 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6961 return GetElementAttribute(object, index);
6963 LookupIterator it(object, key);
6964 return GetPropertyAttributes(&it);
6968 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6969 Handle<JSReceiver> object, uint32_t index) {
6970 if (object->IsJSProxy()) {
6971 return JSProxy::GetElementAttributeWithHandler(
6972 Handle<JSProxy>::cast(object), object, index);
6974 return JSObject::GetElementAttributeWithReceiver(
6975 Handle<JSObject>::cast(object), object, index, true);
6979 bool JSGlobalObject::IsDetached() {
6980 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6984 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6985 const PrototypeIterator iter(this->GetIsolate(),
6986 const_cast<JSGlobalProxy*>(this));
6987 return iter.GetCurrent() != global;
6991 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6992 return object->IsJSProxy()
6993 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6994 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6998 Object* JSReceiver::GetIdentityHash() {
7000 ? JSProxy::cast(this)->GetIdentityHash()
7001 : JSObject::cast(this)->GetIdentityHash();
7005 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7006 if (object->IsJSProxy()) {
7007 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7008 return JSProxy::HasElementWithHandler(proxy, index);
7010 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
7011 Handle<JSObject>::cast(object), object, index, true);
7012 if (!result.has_value) return Maybe<bool>();
7013 return maybe(result.value != ABSENT);
7017 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
7019 if (object->IsJSProxy()) {
7020 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7021 return JSProxy::HasElementWithHandler(proxy, index);
7023 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
7024 Handle<JSObject>::cast(object), object, index, false);
7025 if (!result.has_value) return Maybe<bool>();
7026 return maybe(result.value != ABSENT);
7030 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
7031 Handle<JSReceiver> object, uint32_t index) {
7032 if (object->IsJSProxy()) {
7033 return JSProxy::GetElementAttributeWithHandler(
7034 Handle<JSProxy>::cast(object), object, index);
7036 return JSObject::GetElementAttributeWithReceiver(
7037 Handle<JSObject>::cast(object), object, index, false);
7041 bool AccessorInfo::all_can_read() {
7042 return BooleanBit::get(flag(), kAllCanReadBit);
7046 void AccessorInfo::set_all_can_read(bool value) {
7047 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7051 bool AccessorInfo::all_can_write() {
7052 return BooleanBit::get(flag(), kAllCanWriteBit);
7056 void AccessorInfo::set_all_can_write(bool value) {
7057 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7061 PropertyAttributes AccessorInfo::property_attributes() {
7062 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
7066 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7067 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
7071 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7072 if (!HasExpectedReceiverType()) return true;
7073 if (!receiver->IsJSObject()) return false;
7074 return FunctionTemplateInfo::cast(expected_receiver_type())
7075 ->IsTemplateFor(JSObject::cast(receiver)->map());
7079 void ExecutableAccessorInfo::clear_setter() {
7080 set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
7084 template<typename Derived, typename Shape, typename Key>
7085 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7087 Handle<Object> value) {
7088 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7092 template<typename Derived, typename Shape, typename Key>
7093 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7095 Handle<Object> value,
7096 PropertyDetails details) {
7097 DCHECK(!key->IsName() ||
7098 details.IsDeleted() ||
7099 details.dictionary_index() > 0);
7100 int index = DerivedHashTable::EntryToIndex(entry);
7101 DisallowHeapAllocation no_gc;
7102 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
7103 FixedArray::set(index, *key, mode);
7104 FixedArray::set(index+1, *value, mode);
7105 FixedArray::set(index+2, details.AsSmi());
7109 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7110 DCHECK(other->IsNumber());
7111 return key == static_cast<uint32_t>(other->Number());
7115 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7116 return ComputeIntegerHash(key, 0);
7120 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7122 DCHECK(other->IsNumber());
7123 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7127 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7128 return ComputeIntegerHash(key, seed);
7132 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7135 DCHECK(other->IsNumber());
7136 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7140 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7141 return isolate->factory()->NewNumberFromUint(key);
7145 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7146 // We know that all entries in a hash table had their hash keys created.
7147 // Use that knowledge to have fast failure.
7148 if (key->Hash() != Name::cast(other)->Hash()) return false;
7149 return key->Equals(Name::cast(other));
7153 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7158 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7159 return Name::cast(other)->Hash();
7163 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7165 DCHECK(key->IsUniqueName());
7170 void NameDictionary::DoGenerateNewEnumerationIndices(
7171 Handle<NameDictionary> dictionary) {
7172 DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7176 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7177 return key->SameValue(other);
7181 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7182 return Smi::cast(key->GetHash())->value();
7186 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7188 return Smi::cast(other->GetHash())->value();
7192 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7193 Handle<Object> key) {
7198 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7199 Handle<ObjectHashTable> table, Handle<Object> key) {
7200 return DerivedHashTable::Shrink(table, key);
7204 template <int entrysize>
7205 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7206 return key->SameValue(other);
7210 template <int entrysize>
7211 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7212 intptr_t hash = reinterpret_cast<intptr_t>(*key);
7213 return (uint32_t)(hash & 0xFFFFFFFF);
7217 template <int entrysize>
7218 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7220 intptr_t hash = reinterpret_cast<intptr_t>(other);
7221 return (uint32_t)(hash & 0xFFFFFFFF);
7225 template <int entrysize>
7226 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7227 Handle<Object> key) {
7232 void Map::ClearCodeCache(Heap* heap) {
7233 // No write barrier is needed since empty_fixed_array is not in new space.
7234 // Please note this function is used during marking:
7235 // - MarkCompactCollector::MarkUnmarkedObject
7236 // - IncrementalMarking::Step
7237 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7238 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7242 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
7243 DCHECK(array->HasFastSmiOrObjectElements());
7244 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
7245 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
7246 if (elts->length() < required_size) {
7247 // Doubling in size would be overkill, but leave some slack to avoid
7248 // constantly growing.
7249 Expand(array, required_size + (required_size >> 3));
7250 // It's a performance benefit to keep a frequently used array in new-space.
7251 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
7252 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
7253 // Expand will allocate a new backing store in new space even if the size
7254 // we asked for isn't larger than what we had before.
7255 Expand(array, required_size);
7260 void JSArray::set_length(Smi* length) {
7261 // Don't need a write barrier for a Smi.
7262 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7266 bool JSArray::AllowsSetElementsLength() {
7267 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7268 DCHECK(result == !HasExternalArrayElements());
7273 void JSArray::SetContent(Handle<JSArray> array,
7274 Handle<FixedArrayBase> storage) {
7275 EnsureCanContainElements(array, storage, storage->length(),
7276 ALLOW_COPIED_DOUBLE_ELEMENTS);
7278 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7279 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7280 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7281 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7282 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7283 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7284 array->set_elements(*storage);
7285 array->set_length(Smi::FromInt(storage->length()));
7289 int TypeFeedbackInfo::ic_total_count() {
7290 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7291 return ICTotalCountField::decode(current);
7295 void TypeFeedbackInfo::set_ic_total_count(int count) {
7296 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7297 value = ICTotalCountField::update(value,
7298 ICTotalCountField::decode(count));
7299 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7303 int TypeFeedbackInfo::ic_with_type_info_count() {
7304 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7305 return ICsWithTypeInfoCountField::decode(current);
7309 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7310 if (delta == 0) return;
7311 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7312 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7313 // We can get negative count here when the type-feedback info is
7314 // shared between two code objects. The can only happen when
7315 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7316 // Since we do not optimize when the debugger is active, we can skip
7317 // this counter update.
7318 if (new_count >= 0) {
7319 new_count &= ICsWithTypeInfoCountField::kMask;
7320 value = ICsWithTypeInfoCountField::update(value, new_count);
7321 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7326 int TypeFeedbackInfo::ic_generic_count() {
7327 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7331 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7332 if (delta == 0) return;
7333 int new_count = ic_generic_count() + delta;
7334 if (new_count >= 0) {
7335 new_count &= ~Smi::kMinValue;
7336 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7341 void TypeFeedbackInfo::initialize_storage() {
7342 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7343 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7344 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7348 void TypeFeedbackInfo::change_own_type_change_checksum() {
7349 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7350 int checksum = OwnTypeChangeChecksum::decode(value);
7351 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7352 value = OwnTypeChangeChecksum::update(value, checksum);
7353 // Ensure packed bit field is in Smi range.
7354 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7355 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7356 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7360 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7361 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7362 int mask = (1 << kTypeChangeChecksumBits) - 1;
7363 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7364 // Ensure packed bit field is in Smi range.
7365 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7366 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7367 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7371 int TypeFeedbackInfo::own_type_change_checksum() {
7372 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7373 return OwnTypeChangeChecksum::decode(value);
7377 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7378 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7379 int mask = (1 << kTypeChangeChecksumBits) - 1;
7380 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7384 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7387 Relocatable::Relocatable(Isolate* isolate) {
7389 prev_ = isolate->relocatable_top();
7390 isolate->set_relocatable_top(this);
7394 Relocatable::~Relocatable() {
7395 DCHECK_EQ(isolate_->relocatable_top(), this);
7396 isolate_->set_relocatable_top(prev_);
7400 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7401 return map->instance_size();
7405 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7406 v->VisitExternalReference(
7407 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7411 template<typename StaticVisitor>
7412 void Foreign::ForeignIterateBody() {
7413 StaticVisitor::VisitExternalReference(
7414 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7418 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7419 typedef v8::String::ExternalOneByteStringResource Resource;
7420 v->VisitExternalOneByteString(
7421 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7425 template <typename StaticVisitor>
7426 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7427 typedef v8::String::ExternalOneByteStringResource Resource;
7428 StaticVisitor::VisitExternalOneByteString(
7429 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7433 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7434 typedef v8::String::ExternalStringResource Resource;
7435 v->VisitExternalTwoByteString(
7436 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7440 template<typename StaticVisitor>
7441 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7442 typedef v8::String::ExternalStringResource Resource;
7443 StaticVisitor::VisitExternalTwoByteString(
7444 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7448 template<int start_offset, int end_offset, int size>
7449 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7452 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7453 HeapObject::RawField(obj, end_offset));
7457 template<int start_offset>
7458 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7461 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7462 HeapObject::RawField(obj, object_size));
7466 template<class Derived, class TableType>
7467 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7468 TableType* table(TableType::cast(this->table()));
7469 int index = Smi::cast(this->index())->value();
7470 Object* key = table->KeyAt(index);
7471 DCHECK(!key->IsTheHole());
7476 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7477 array->set(0, CurrentKey());
7481 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7482 array->set(0, CurrentKey());
7483 array->set(1, CurrentValue());
7487 Object* JSMapIterator::CurrentValue() {
7488 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7489 int index = Smi::cast(this->index())->value();
7490 Object* value = table->ValueAt(index);
7491 DCHECK(!value->IsTheHole());
7497 #undef CAST_ACCESSOR
7498 #undef INT_ACCESSORS
7500 #undef ACCESSORS_TO_SMI
7501 #undef SMI_ACCESSORS
7502 #undef SYNCHRONIZED_SMI_ACCESSORS
7503 #undef NOBARRIER_SMI_ACCESSORS
7505 #undef BOOL_ACCESSORS
7507 #undef FIELD_ADDR_CONST
7509 #undef NOBARRIER_READ_FIELD
7511 #undef NOBARRIER_WRITE_FIELD
7512 #undef WRITE_BARRIER
7513 #undef CONDITIONAL_WRITE_BARRIER
7514 #undef READ_DOUBLE_FIELD
7515 #undef WRITE_DOUBLE_FIELD
7516 #undef READ_INT_FIELD
7517 #undef WRITE_INT_FIELD
7518 #undef READ_INTPTR_FIELD
7519 #undef WRITE_INTPTR_FIELD
7520 #undef READ_UINT32_FIELD
7521 #undef WRITE_UINT32_FIELD
7522 #undef READ_SHORT_FIELD
7523 #undef WRITE_SHORT_FIELD
7524 #undef READ_BYTE_FIELD
7525 #undef WRITE_BYTE_FIELD
7526 #undef NOBARRIER_READ_BYTE_FIELD
7527 #undef NOBARRIER_WRITE_BYTE_FIELD
7529 } } // namespace v8::internal
7531 #endif // V8_OBJECTS_INL_H_