1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/contexts.h"
17 #include "src/conversions-inl.h"
18 #include "src/elements.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/heap/incremental-marking.h"
24 #include "src/heap/objects-visiting.h"
25 #include "src/heap/spaces.h"
26 #include "src/heap/store-buffer.h"
27 #include "src/isolate.h"
28 #include "src/lookup.h"
29 #include "src/objects.h"
30 #include "src/property.h"
31 #include "src/prototype.h"
32 #include "src/transitions-inl.h"
33 #include "src/v8memory.h"
38 PropertyDetails::PropertyDetails(Smi* smi) {
39 value_ = smi->value();
43 Smi* PropertyDetails::AsSmi() const {
44 // Ensure the upper 2 bits have the same value by sign extending it. This is
45 // necessary to be able to use the 31st bit of the property details.
46 int value = value_ << 1;
47 return Smi::FromInt(value >> 1);
51 PropertyDetails PropertyDetails::AsDeleted() const {
52 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
53 return PropertyDetails(smi);
57 #define TYPE_CHECKER(type, instancetype) \
58 bool Object::Is##type() const { \
59 return Object::IsHeapObject() && \
60 HeapObject::cast(this)->map()->instance_type() == instancetype; \
64 #define CAST_ACCESSOR(type) \
65 type* type::cast(Object* object) { \
66 SLOW_DCHECK(object->Is##type()); \
67 return reinterpret_cast<type*>(object); \
69 const type* type::cast(const Object* object) { \
70 SLOW_DCHECK(object->Is##type()); \
71 return reinterpret_cast<const type*>(object); \
75 #define INT_ACCESSORS(holder, name, offset) \
76 int holder::name() const { return READ_INT_FIELD(this, offset); } \
77 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
80 #define ACCESSORS(holder, name, type, offset) \
81 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
82 void holder::set_##name(type* value, WriteBarrierMode mode) { \
83 WRITE_FIELD(this, offset, value); \
84 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
88 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
89 #define ACCESSORS_TO_SMI(holder, name, offset) \
90 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
96 // Getter that returns a Smi as an int and writes an int as a Smi.
97 #define SMI_ACCESSORS(holder, name, offset) \
98 int holder::name() const { \
99 Object* value = READ_FIELD(this, offset); \
100 return Smi::cast(value)->value(); \
102 void holder::set_##name(int value) { \
103 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
106 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
107 int holder::synchronized_##name() const { \
108 Object* value = ACQUIRE_READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::synchronized_set_##name(int value) { \
112 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
115 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
116 int holder::nobarrier_##name() const { \
117 Object* value = NOBARRIER_READ_FIELD(this, offset); \
118 return Smi::cast(value)->value(); \
120 void holder::nobarrier_set_##name(int value) { \
121 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
124 #define BOOL_GETTER(holder, field, name, offset) \
125 bool holder::name() const { \
126 return BooleanBit::get(field(), offset); \
130 #define BOOL_ACCESSORS(holder, field, name, offset) \
131 bool holder::name() const { \
132 return BooleanBit::get(field(), offset); \
134 void holder::set_##name(bool value) { \
135 set_##field(BooleanBit::set(field(), offset, value)); \
139 bool Object::IsFixedArrayBase() const {
140 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
141 IsFixedTypedArrayBase() || IsExternalArray();
145 // External objects are not extensible, so the map check is enough.
146 bool Object::IsExternal() const {
147 return Object::IsHeapObject() &&
148 HeapObject::cast(this)->map() ==
149 HeapObject::cast(this)->GetHeap()->external_map();
153 bool Object::IsAccessorInfo() const {
154 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
158 bool Object::IsSmi() const {
159 return HAS_SMI_TAG(this);
163 bool Object::IsHeapObject() const {
164 return Internals::HasHeapObjectTag(this);
168 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
169 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
170 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
173 bool Object::IsString() const {
174 return Object::IsHeapObject()
175 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
179 bool Object::IsName() const {
180 return IsString() || IsSymbol();
184 bool Object::IsUniqueName() const {
185 return IsInternalizedString() || IsSymbol();
189 bool Object::IsSpecObject() const {
190 return Object::IsHeapObject()
191 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
195 bool Object::IsSpecFunction() const {
196 if (!Object::IsHeapObject()) return false;
197 InstanceType type = HeapObject::cast(this)->map()->instance_type();
198 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
202 bool Object::IsTemplateInfo() const {
203 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
207 bool Object::IsInternalizedString() const {
208 if (!this->IsHeapObject()) return false;
209 uint32_t type = HeapObject::cast(this)->map()->instance_type();
210 STATIC_ASSERT(kNotInternalizedTag != 0);
211 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
212 (kStringTag | kInternalizedTag);
216 bool Object::IsConsString() const {
217 if (!IsString()) return false;
218 return StringShape(String::cast(this)).IsCons();
222 bool Object::IsSlicedString() const {
223 if (!IsString()) return false;
224 return StringShape(String::cast(this)).IsSliced();
228 bool Object::IsSeqString() const {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsSequential();
234 bool Object::IsSeqOneByteString() const {
235 if (!IsString()) return false;
236 return StringShape(String::cast(this)).IsSequential() &&
237 String::cast(this)->IsOneByteRepresentation();
241 bool Object::IsSeqTwoByteString() const {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsSequential() &&
244 String::cast(this)->IsTwoByteRepresentation();
248 bool Object::IsExternalString() const {
249 if (!IsString()) return false;
250 return StringShape(String::cast(this)).IsExternal();
254 bool Object::IsExternalAsciiString() const {
255 if (!IsString()) return false;
256 return StringShape(String::cast(this)).IsExternal() &&
257 String::cast(this)->IsOneByteRepresentation();
261 bool Object::IsExternalTwoByteString() const {
262 if (!IsString()) return false;
263 return StringShape(String::cast(this)).IsExternal() &&
264 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
271 IsFixedTypedArrayBase();
275 Handle<Object> Object::NewStorageFor(Isolate* isolate,
276 Handle<Object> object,
277 Representation representation) {
278 if (representation.IsSmi() && object->IsUninitialized()) {
279 return handle(Smi::FromInt(0), isolate);
281 if (!representation.IsDouble()) return object;
283 if (object->IsUninitialized()) {
285 } else if (object->IsMutableHeapNumber()) {
286 value = HeapNumber::cast(*object)->value();
288 value = object->Number();
290 return isolate->factory()->NewHeapNumber(value, MUTABLE);
294 Handle<Object> Object::WrapForRead(Isolate* isolate,
295 Handle<Object> object,
296 Representation representation) {
297 DCHECK(!object->IsUninitialized());
298 if (!representation.IsDouble()) {
299 DCHECK(object->FitsRepresentation(representation));
302 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
306 StringShape::StringShape(const String* str)
307 : type_(str->map()->instance_type()) {
309 DCHECK((type_ & kIsNotStringMask) == kStringTag);
313 StringShape::StringShape(Map* map)
314 : type_(map->instance_type()) {
316 DCHECK((type_ & kIsNotStringMask) == kStringTag);
320 StringShape::StringShape(InstanceType t)
321 : type_(static_cast<uint32_t>(t)) {
323 DCHECK((type_ & kIsNotStringMask) == kStringTag);
327 bool StringShape::IsInternalized() {
329 STATIC_ASSERT(kNotInternalizedTag != 0);
330 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
331 (kStringTag | kInternalizedTag);
335 bool String::IsOneByteRepresentation() const {
336 uint32_t type = map()->instance_type();
337 return (type & kStringEncodingMask) == kOneByteStringTag;
341 bool String::IsTwoByteRepresentation() const {
342 uint32_t type = map()->instance_type();
343 return (type & kStringEncodingMask) == kTwoByteStringTag;
347 bool String::IsOneByteRepresentationUnderneath() {
348 uint32_t type = map()->instance_type();
349 STATIC_ASSERT(kIsIndirectStringTag != 0);
350 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
352 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
353 case kOneByteStringTag:
355 case kTwoByteStringTag:
357 default: // Cons or sliced string. Need to go deeper.
358 return GetUnderlying()->IsOneByteRepresentation();
363 bool String::IsTwoByteRepresentationUnderneath() {
364 uint32_t type = map()->instance_type();
365 STATIC_ASSERT(kIsIndirectStringTag != 0);
366 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
368 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
369 case kOneByteStringTag:
371 case kTwoByteStringTag:
373 default: // Cons or sliced string. Need to go deeper.
374 return GetUnderlying()->IsTwoByteRepresentation();
379 bool String::HasOnlyOneByteChars() {
380 uint32_t type = map()->instance_type();
381 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
382 IsOneByteRepresentation();
386 bool StringShape::IsCons() {
387 return (type_ & kStringRepresentationMask) == kConsStringTag;
391 bool StringShape::IsSliced() {
392 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
396 bool StringShape::IsIndirect() {
397 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
401 bool StringShape::IsExternal() {
402 return (type_ & kStringRepresentationMask) == kExternalStringTag;
406 bool StringShape::IsSequential() {
407 return (type_ & kStringRepresentationMask) == kSeqStringTag;
411 StringRepresentationTag StringShape::representation_tag() {
412 uint32_t tag = (type_ & kStringRepresentationMask);
413 return static_cast<StringRepresentationTag>(tag);
417 uint32_t StringShape::encoding_tag() {
418 return type_ & kStringEncodingMask;
422 uint32_t StringShape::full_representation_tag() {
423 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
427 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
428 Internals::kFullStringRepresentationMask);
430 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
431 Internals::kStringEncodingMask);
434 bool StringShape::IsSequentialAscii() {
435 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
439 bool StringShape::IsSequentialTwoByte() {
440 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
444 bool StringShape::IsExternalAscii() {
445 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
449 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
450 Internals::kExternalAsciiRepresentationTag);
452 STATIC_ASSERT(v8::String::ASCII_ENCODING == kOneByteStringTag);
455 bool StringShape::IsExternalTwoByte() {
456 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
460 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
461 Internals::kExternalTwoByteRepresentationTag);
463 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
465 uc32 FlatStringReader::Get(int index) {
466 DCHECK(0 <= index && index <= length_);
468 return static_cast<const byte*>(start_)[index];
470 return static_cast<const uc16*>(start_)[index];
475 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
476 return key->AsHandle(isolate);
480 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
481 return key->AsHandle(isolate);
485 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
487 return key->AsHandle(isolate);
491 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
493 return key->AsHandle(isolate);
496 template <typename Char>
497 class SequentialStringKey : public HashTableKey {
499 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
500 : string_(string), hash_field_(0), seed_(seed) { }
502 virtual uint32_t Hash() V8_OVERRIDE {
503 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
507 uint32_t result = hash_field_ >> String::kHashShift;
508 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
513 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
514 return String::cast(other)->Hash();
517 Vector<const Char> string_;
518 uint32_t hash_field_;
523 class OneByteStringKey : public SequentialStringKey<uint8_t> {
525 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
526 : SequentialStringKey<uint8_t>(str, seed) { }
528 virtual bool IsMatch(Object* string) V8_OVERRIDE {
529 return String::cast(string)->IsOneByteEqualTo(string_);
532 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
537 class SubStringKey : public HashTableKey {
539 SubStringKey(Handle<String> string, int from, int length)
540 : string_(string), from_(from), length_(length) {
541 if (string_->IsSlicedString()) {
542 string_ = Handle<String>(Unslice(*string_, &from_));
544 DCHECK(string_->IsSeqString() || string->IsExternalString());
547 virtual uint32_t Hash() V8_OVERRIDE {
548 DCHECK(length_ >= 0);
549 DCHECK(from_ + length_ <= string_->length());
550 const Char* chars = GetChars() + from_;
551 hash_field_ = StringHasher::HashSequentialString(
552 chars, length_, string_->GetHeap()->HashSeed());
553 uint32_t result = hash_field_ >> String::kHashShift;
554 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
558 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
559 return String::cast(other)->Hash();
562 virtual bool IsMatch(Object* string) V8_OVERRIDE;
563 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
566 const Char* GetChars();
567 String* Unslice(String* string, int* offset) {
568 while (string->IsSlicedString()) {
569 SlicedString* sliced = SlicedString::cast(string);
570 *offset += sliced->offset();
571 string = sliced->parent();
576 Handle<String> string_;
579 uint32_t hash_field_;
583 class TwoByteStringKey : public SequentialStringKey<uc16> {
585 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
586 : SequentialStringKey<uc16>(str, seed) { }
588 virtual bool IsMatch(Object* string) V8_OVERRIDE {
589 return String::cast(string)->IsTwoByteEqualTo(string_);
592 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
596 // Utf8StringKey carries a vector of chars as key.
597 class Utf8StringKey : public HashTableKey {
599 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
600 : string_(string), hash_field_(0), seed_(seed) { }
602 virtual bool IsMatch(Object* string) V8_OVERRIDE {
603 return String::cast(string)->IsUtf8EqualTo(string_);
606 virtual uint32_t Hash() V8_OVERRIDE {
607 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
608 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
609 uint32_t result = hash_field_ >> String::kHashShift;
610 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
614 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
615 return String::cast(other)->Hash();
618 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
619 if (hash_field_ == 0) Hash();
620 return isolate->factory()->NewInternalizedStringFromUtf8(
621 string_, chars_, hash_field_);
624 Vector<const char> string_;
625 uint32_t hash_field_;
626 int chars_; // Caches the number of characters when computing the hash code.
631 bool Object::IsNumber() const {
632 return IsSmi() || IsHeapNumber();
636 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
637 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
640 bool Object::IsFiller() const {
641 if (!Object::IsHeapObject()) return false;
642 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
643 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
647 bool Object::IsExternalArray() const {
648 if (!Object::IsHeapObject())
650 InstanceType instance_type =
651 HeapObject::cast(this)->map()->instance_type();
652 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
653 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
657 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
658 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
659 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
661 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
662 #undef TYPED_ARRAY_TYPE_CHECKER
665 bool Object::IsFixedTypedArrayBase() const {
666 if (!Object::IsHeapObject()) return false;
668 InstanceType instance_type =
669 HeapObject::cast(this)->map()->instance_type();
670 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
671 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
675 bool Object::IsJSReceiver() const {
676 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
677 return IsHeapObject() &&
678 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
682 bool Object::IsJSObject() const {
683 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
684 return IsHeapObject() &&
685 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
689 bool Object::IsJSProxy() const {
690 if (!Object::IsHeapObject()) return false;
691 return HeapObject::cast(this)->map()->IsJSProxyMap();
695 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
696 TYPE_CHECKER(JSSet, JS_SET_TYPE)
697 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
698 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
699 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
700 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
701 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
702 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
703 TYPE_CHECKER(Map, MAP_TYPE)
704 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
705 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
706 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
709 bool Object::IsJSWeakCollection() const {
710 return IsJSWeakMap() || IsJSWeakSet();
714 bool Object::IsDescriptorArray() const {
715 return IsFixedArray();
719 bool Object::IsTransitionArray() const {
720 return IsFixedArray();
724 bool Object::IsDeoptimizationInputData() const {
725 // Must be a fixed array.
726 if (!IsFixedArray()) return false;
728 // There's no sure way to detect the difference between a fixed array and
729 // a deoptimization data array. Since this is used for asserts we can
730 // check that the length is zero or else the fixed size plus a multiple of
732 int length = FixedArray::cast(this)->length();
733 if (length == 0) return true;
734 if (length < DeoptimizationInputData::kFirstDeoptEntryIndex) return false;
736 FixedArray* self = FixedArray::cast(const_cast<Object*>(this));
738 Smi::cast(self->get(DeoptimizationInputData::kDeoptEntryCountIndex))
743 DeoptimizationInputData::kReturnAddressPatchEntryCountIndex))
746 return length == DeoptimizationInputData::LengthFor(deopt_count, patch_count);
750 bool Object::IsDeoptimizationOutputData() const {
751 if (!IsFixedArray()) return false;
752 // There's actually no way to see the difference between a fixed array and
753 // a deoptimization data array. Since this is used for asserts we can check
754 // that the length is plausible though.
755 if (FixedArray::cast(this)->length() % 2 != 0) return false;
760 bool Object::IsDependentCode() const {
761 if (!IsFixedArray()) return false;
762 // There's actually no way to see the difference between a fixed array and
763 // a dependent codes array.
768 bool Object::IsContext() const {
769 if (!Object::IsHeapObject()) return false;
770 Map* map = HeapObject::cast(this)->map();
771 Heap* heap = map->GetHeap();
772 return (map == heap->function_context_map() ||
773 map == heap->catch_context_map() ||
774 map == heap->with_context_map() ||
775 map == heap->native_context_map() ||
776 map == heap->block_context_map() ||
777 map == heap->module_context_map() ||
778 map == heap->global_context_map());
782 bool Object::IsNativeContext() const {
783 return Object::IsHeapObject() &&
784 HeapObject::cast(this)->map() ==
785 HeapObject::cast(this)->GetHeap()->native_context_map();
789 bool Object::IsScopeInfo() const {
790 return Object::IsHeapObject() &&
791 HeapObject::cast(this)->map() ==
792 HeapObject::cast(this)->GetHeap()->scope_info_map();
796 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
799 template <> inline bool Is<JSFunction>(Object* obj) {
800 return obj->IsJSFunction();
804 TYPE_CHECKER(Code, CODE_TYPE)
805 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
806 TYPE_CHECKER(Cell, CELL_TYPE)
807 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
808 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
809 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
810 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
811 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
812 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
813 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
816 bool Object::IsStringWrapper() const {
817 return IsJSValue() && JSValue::cast(this)->value()->IsString();
821 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
824 bool Object::IsBoolean() const {
825 return IsOddball() &&
826 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
830 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
831 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
832 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
833 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
835 TYPE_CHECKER(Float32x4, FLOAT32x4_TYPE)
836 TYPE_CHECKER(Float64x2, FLOAT64x2_TYPE)
837 TYPE_CHECKER(Int32x4, INT32x4_TYPE)
839 bool Object::IsJSArrayBufferView() const {
840 return IsJSDataView() || IsJSTypedArray();
844 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
847 template <> inline bool Is<JSArray>(Object* obj) {
848 return obj->IsJSArray();
852 bool Object::IsHashTable() const {
853 return Object::IsHeapObject() &&
854 HeapObject::cast(this)->map() ==
855 HeapObject::cast(this)->GetHeap()->hash_table_map();
859 bool Object::IsWeakHashTable() const {
860 return IsHashTable();
864 bool Object::IsDictionary() const {
865 return IsHashTable() &&
866 this != HeapObject::cast(this)->GetHeap()->string_table();
870 bool Object::IsNameDictionary() const {
871 return IsDictionary();
875 bool Object::IsSeededNumberDictionary() const {
876 return IsDictionary();
880 bool Object::IsUnseededNumberDictionary() const {
881 return IsDictionary();
885 bool Object::IsStringTable() const {
886 return IsHashTable();
890 bool Object::IsJSFunctionResultCache() const {
891 if (!IsFixedArray()) return false;
892 const FixedArray* self = FixedArray::cast(this);
893 int length = self->length();
894 if (length < JSFunctionResultCache::kEntriesIndex) return false;
895 if ((length - JSFunctionResultCache::kEntriesIndex)
896 % JSFunctionResultCache::kEntrySize != 0) {
900 if (FLAG_verify_heap) {
901 // TODO(svenpanne) We use const_cast here and below to break our dependency
902 // cycle between the predicates and the verifiers. This can be removed when
903 // the verifiers are const-correct, too.
904 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
905 JSFunctionResultCacheVerify();
912 bool Object::IsNormalizedMapCache() const {
913 return NormalizedMapCache::IsNormalizedMapCache(this);
917 int NormalizedMapCache::GetIndex(Handle<Map> map) {
918 return map->Hash() % NormalizedMapCache::kEntries;
922 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
923 if (!obj->IsFixedArray()) return false;
924 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
928 if (FLAG_verify_heap) {
929 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
930 NormalizedMapCacheVerify();
937 bool Object::IsCompilationCacheTable() const {
938 return IsHashTable();
942 bool Object::IsCodeCacheHashTable() const {
943 return IsHashTable();
947 bool Object::IsPolymorphicCodeCacheHashTable() const {
948 return IsHashTable();
952 bool Object::IsMapCache() const {
953 return IsHashTable();
957 bool Object::IsObjectHashTable() const {
958 return IsHashTable();
962 bool Object::IsOrderedHashTable() const {
963 return IsHeapObject() &&
964 HeapObject::cast(this)->map() ==
965 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
969 bool Object::IsOrderedHashSet() const {
970 return IsOrderedHashTable();
974 bool Object::IsOrderedHashMap() const {
975 return IsOrderedHashTable();
979 bool Object::IsPrimitive() const {
980 return IsOddball() || IsNumber() || IsString();
984 bool Object::IsJSGlobalProxy() const {
985 bool result = IsHeapObject() &&
986 (HeapObject::cast(this)->map()->instance_type() ==
987 JS_GLOBAL_PROXY_TYPE);
989 HeapObject::cast(this)->map()->is_access_check_needed());
994 bool Object::IsGlobalObject() const {
995 if (!IsHeapObject()) return false;
997 InstanceType type = HeapObject::cast(this)->map()->instance_type();
998 return type == JS_GLOBAL_OBJECT_TYPE ||
999 type == JS_BUILTINS_OBJECT_TYPE;
1003 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
1004 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1007 bool Object::IsUndetectableObject() const {
1008 return IsHeapObject()
1009 && HeapObject::cast(this)->map()->is_undetectable();
1013 bool Object::IsAccessCheckNeeded() const {
1014 if (!IsHeapObject()) return false;
1015 if (IsJSGlobalProxy()) {
1016 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1017 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1018 return proxy->IsDetachedFrom(global);
1020 return HeapObject::cast(this)->map()->is_access_check_needed();
1024 bool Object::IsStruct() const {
1025 if (!IsHeapObject()) return false;
1026 switch (HeapObject::cast(this)->map()->instance_type()) {
1027 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1028 STRUCT_LIST(MAKE_STRUCT_CASE)
1029 #undef MAKE_STRUCT_CASE
1030 default: return false;
1035 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1036 bool Object::Is##Name() const { \
1037 return Object::IsHeapObject() \
1038 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1040 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1041 #undef MAKE_STRUCT_PREDICATE
1044 bool Object::IsUndefined() const {
1045 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1049 bool Object::IsNull() const {
1050 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1054 bool Object::IsTheHole() const {
1055 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1059 bool Object::IsException() const {
1060 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1064 bool Object::IsUninitialized() const {
1065 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1069 bool Object::IsTrue() const {
1070 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1074 bool Object::IsFalse() const {
1075 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1079 bool Object::IsArgumentsMarker() const {
1080 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1084 double Object::Number() {
1087 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1088 : reinterpret_cast<HeapNumber*>(this)->value();
1092 bool Object::IsNaN() const {
1093 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1097 bool Object::IsMinusZero() const {
1098 return this->IsHeapNumber() &&
1099 i::IsMinusZero(HeapNumber::cast(this)->value());
1103 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1104 if (object->IsSmi()) return Handle<Smi>::cast(object);
1105 if (object->IsHeapNumber()) {
1106 double value = Handle<HeapNumber>::cast(object)->value();
1107 int int_value = FastD2I(value);
1108 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1109 return handle(Smi::FromInt(int_value), isolate);
1112 return Handle<Smi>();
1116 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1117 Handle<Object> object) {
1119 isolate, object, handle(isolate->context()->native_context(), isolate));
1123 bool Object::HasSpecificClassOf(String* name) {
1124 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1128 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1129 Handle<Name> name) {
1130 LookupIterator it(object, name);
1131 return GetProperty(&it);
1135 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1136 Handle<Object> object,
1138 // GetElement can trigger a getter which can cause allocation.
1139 // This was not always the case. This DCHECK is here to catch
1140 // leftover incorrect uses.
1141 DCHECK(AllowHeapAllocation::IsAllowed());
1142 return Object::GetElementWithReceiver(isolate, object, object, index);
1146 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1147 Handle<Name> name) {
1149 Isolate* isolate = name->GetIsolate();
1150 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1151 return GetProperty(object, name);
1155 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1156 Handle<Object> object,
1158 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1159 DCHECK(!str.is_null());
1161 uint32_t index; // Assert that the name is not an array index.
1162 DCHECK(!str->AsArrayIndex(&index));
1164 return GetProperty(object, str);
1168 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1169 Handle<Object> receiver,
1171 return GetPropertyWithHandler(
1172 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1176 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1177 Handle<JSReceiver> receiver,
1179 Handle<Object> value,
1180 StrictMode strict_mode) {
1181 Isolate* isolate = proxy->GetIsolate();
1182 Handle<String> name = isolate->factory()->Uint32ToString(index);
1183 return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode);
1187 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1189 Isolate* isolate = proxy->GetIsolate();
1190 Handle<String> name = isolate->factory()->Uint32ToString(index);
1191 return HasPropertyWithHandler(proxy, name);
1195 #define FIELD_ADDR(p, offset) \
1196 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1198 #define FIELD_ADDR_CONST(p, offset) \
1199 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1201 #define READ_FIELD(p, offset) \
1202 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1204 #define ACQUIRE_READ_FIELD(p, offset) \
1205 reinterpret_cast<Object*>(base::Acquire_Load( \
1206 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1208 #define NOBARRIER_READ_FIELD(p, offset) \
1209 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1210 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1212 #define WRITE_FIELD(p, offset, value) \
1213 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1215 #define RELEASE_WRITE_FIELD(p, offset, value) \
1216 base::Release_Store( \
1217 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1218 reinterpret_cast<base::AtomicWord>(value));
1220 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1221 base::NoBarrier_Store( \
1222 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1223 reinterpret_cast<base::AtomicWord>(value));
1225 #define WRITE_BARRIER(heap, object, offset, value) \
1226 heap->incremental_marking()->RecordWrite( \
1227 object, HeapObject::RawField(object, offset), value); \
1228 if (heap->InNewSpace(value)) { \
1229 heap->RecordWrite(object->address(), offset); \
1232 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1233 if (mode == UPDATE_WRITE_BARRIER) { \
1234 heap->incremental_marking()->RecordWrite( \
1235 object, HeapObject::RawField(object, offset), value); \
1236 if (heap->InNewSpace(value)) { \
1237 heap->RecordWrite(object->address(), offset); \
1241 #ifndef V8_TARGET_ARCH_MIPS
1242 #define READ_DOUBLE_FIELD(p, offset) \
1243 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1244 #else // V8_TARGET_ARCH_MIPS
1245 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1246 // non-64-bit aligned HeapNumber::value.
1247 static inline double read_double_field(const void* p, int offset) {
1252 c.u[0] = (*reinterpret_cast<const uint32_t*>(
1253 FIELD_ADDR_CONST(p, offset)));
1254 c.u[1] = (*reinterpret_cast<const uint32_t*>(
1255 FIELD_ADDR_CONST(p, offset + 4)));
1258 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1259 #endif // V8_TARGET_ARCH_MIPS
1261 #ifndef V8_TARGET_ARCH_MIPS
1262 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1263 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1264 #else // V8_TARGET_ARCH_MIPS
1265 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1266 // non-64-bit aligned HeapNumber::value.
1267 static inline void write_double_field(void* p, int offset,
1274 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1275 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1277 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1278 write_double_field(p, offset, value)
1279 #endif // V8_TARGET_ARCH_MIPS
1281 #define READ_FLOAT32x4_FIELD(p, offset) \
1282 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)))
1284 #define WRITE_FLOAT32x4_FIELD(p, offset, value) \
1285 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1287 #define READ_FLOAT64x2_FIELD(p, offset) \
1288 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)))
1290 #define WRITE_FLOAT64x2_FIELD(p, offset, value) \
1291 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)) = value)
1293 #define READ_INT32x4_FIELD(p, offset) \
1294 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)))
1296 #define WRITE_INT32x4_FIELD(p, offset, value) \
1297 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1299 #define READ_FLOAT_FIELD(p, offset) \
1300 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)))
1302 #define WRITE_FLOAT_FIELD(p, offset, value) \
1303 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1305 #define READ_INT_FIELD(p, offset) \
1306 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1308 #define WRITE_INT_FIELD(p, offset, value) \
1309 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1311 #define READ_INTPTR_FIELD(p, offset) \
1312 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1314 #define WRITE_INTPTR_FIELD(p, offset, value) \
1315 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1317 #define READ_UINT32_FIELD(p, offset) \
1318 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1320 #define WRITE_UINT32_FIELD(p, offset, value) \
1321 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1323 #define READ_INT32_FIELD(p, offset) \
1324 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1326 #define WRITE_INT32_FIELD(p, offset, value) \
1327 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1329 #define READ_INT64_FIELD(p, offset) \
1330 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1332 #define WRITE_INT64_FIELD(p, offset, value) \
1333 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1335 #define READ_SHORT_FIELD(p, offset) \
1336 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1338 #define WRITE_SHORT_FIELD(p, offset, value) \
1339 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1341 #define READ_BYTE_FIELD(p, offset) \
1342 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1344 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1345 static_cast<byte>(base::NoBarrier_Load( \
1346 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1348 #define WRITE_BYTE_FIELD(p, offset, value) \
1349 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1351 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1352 base::NoBarrier_Store( \
1353 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1354 static_cast<base::Atomic8>(value));
1356 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1357 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1361 int Smi::value() const {
1362 return Internals::SmiValue(this);
1366 Smi* Smi::FromInt(int value) {
1367 DCHECK(Smi::IsValid(value));
1368 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1372 Smi* Smi::FromIntptr(intptr_t value) {
1373 DCHECK(Smi::IsValid(value));
1374 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1375 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1379 bool Smi::IsValid(intptr_t value) {
1380 bool result = Internals::IsValidSmi(value);
1381 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1386 MapWord MapWord::FromMap(const Map* map) {
1387 return MapWord(reinterpret_cast<uintptr_t>(map));
1391 Map* MapWord::ToMap() {
1392 return reinterpret_cast<Map*>(value_);
1396 bool MapWord::IsForwardingAddress() {
1397 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1401 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1402 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1403 return MapWord(reinterpret_cast<uintptr_t>(raw));
1407 HeapObject* MapWord::ToForwardingAddress() {
1408 DCHECK(IsForwardingAddress());
1409 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1414 void HeapObject::VerifyObjectField(int offset) {
1415 VerifyPointer(READ_FIELD(this, offset));
1418 void HeapObject::VerifySmiField(int offset) {
1419 CHECK(READ_FIELD(this, offset)->IsSmi());
1424 Heap* HeapObject::GetHeap() const {
1426 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1427 SLOW_DCHECK(heap != NULL);
1432 Isolate* HeapObject::GetIsolate() const {
1433 return GetHeap()->isolate();
1437 Map* HeapObject::map() const {
1439 // Clear mark potentially added by PathTracer.
1440 uintptr_t raw_value =
1441 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1442 return MapWord::FromRawValue(raw_value).ToMap();
1444 return map_word().ToMap();
1449 void HeapObject::set_map(Map* value) {
1450 set_map_word(MapWord::FromMap(value));
1451 if (value != NULL) {
1452 // TODO(1600) We are passing NULL as a slot because maps can never be on
1453 // evacuation candidate.
1454 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1459 Map* HeapObject::synchronized_map() {
1460 return synchronized_map_word().ToMap();
1464 void HeapObject::synchronized_set_map(Map* value) {
1465 synchronized_set_map_word(MapWord::FromMap(value));
1466 if (value != NULL) {
1467 // TODO(1600) We are passing NULL as a slot because maps can never be on
1468 // evacuation candidate.
1469 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1474 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1475 synchronized_set_map_word(MapWord::FromMap(value));
1479 // Unsafe accessor omitting write barrier.
1480 void HeapObject::set_map_no_write_barrier(Map* value) {
1481 set_map_word(MapWord::FromMap(value));
1485 MapWord HeapObject::map_word() const {
1487 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1491 void HeapObject::set_map_word(MapWord map_word) {
1492 NOBARRIER_WRITE_FIELD(
1493 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1497 MapWord HeapObject::synchronized_map_word() const {
1499 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1503 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1504 RELEASE_WRITE_FIELD(
1505 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1509 HeapObject* HeapObject::FromAddress(Address address) {
1510 DCHECK_TAG_ALIGNED(address);
1511 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1515 Address HeapObject::address() {
1516 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1520 int HeapObject::Size() {
1521 return SizeFromMap(map());
1525 bool HeapObject::MayContainNewSpacePointers() {
1526 InstanceType type = map()->instance_type();
1527 if (type <= LAST_NAME_TYPE) {
1528 if (type == SYMBOL_TYPE) {
1531 DCHECK(type < FIRST_NONSTRING_TYPE);
1532 // There are four string representations: sequential strings, external
1533 // strings, cons strings, and sliced strings.
1534 // Only the latter two contain non-map-word pointers to heap objects.
1535 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag);
1537 // The ConstantPoolArray contains heap pointers, but not new space pointers.
1538 if (type == CONSTANT_POOL_ARRAY_TYPE) return false;
1539 return (type > LAST_DATA_TYPE);
1543 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1544 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1545 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1549 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1550 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1554 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1555 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1559 double HeapNumber::value() const {
1560 return READ_DOUBLE_FIELD(this, kValueOffset);
1564 void HeapNumber::set_value(double value) {
1565 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1569 int HeapNumber::get_exponent() {
1570 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1571 kExponentShift) - kExponentBias;
1575 int HeapNumber::get_sign() {
1576 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1580 ACCESSORS(Float32x4, value, Object, kValueOffset)
1581 ACCESSORS(Float64x2, value, Object, kValueOffset)
1582 ACCESSORS(Int32x4, value, Object, kValueOffset)
1585 const char* Float32x4::Name() {
1590 int Float32x4::kRuntimeAllocatorId() {
1591 return Runtime::kAllocateFloat32x4;
1595 float Float32x4::getAt(int index) {
1596 DCHECK(index >= 0 && index < kLanes);
1597 return get().storage[index];
1601 float32x4_value_t Float32x4::get() {
1602 return FixedFloat32x4Array::cast(value())->get_scalar(0);
1606 void Float32x4::set(float32x4_value_t f32x4) {
1607 FixedFloat32x4Array::cast(value())->set(0, f32x4);
1611 const char* Float64x2::Name() {
1616 int Float64x2::kRuntimeAllocatorId() {
1617 return Runtime::kAllocateFloat64x2;
1621 double Float64x2::getAt(int index) {
1622 DCHECK(index >= 0 && index < kLanes);
1623 return get().storage[index];
1626 float64x2_value_t Float64x2::get() {
1627 return FixedFloat64x2Array::cast(value())->get_scalar(0);
1631 void Float64x2::set(float64x2_value_t f64x2) {
1632 FixedFloat64x2Array::cast(value())->set(0, f64x2);
1636 const char* Int32x4::Name() {
1641 int Int32x4::kRuntimeAllocatorId() {
1642 return Runtime::kAllocateInt32x4;
1646 int32_t Int32x4::getAt(int index) {
1647 DCHECK(index >= 0 && index < kLanes);
1648 return get().storage[index];;
1652 int32x4_value_t Int32x4::get() {
1653 return FixedInt32x4Array::cast(value())->get_scalar(0);
1657 void Int32x4::set(int32x4_value_t i32x4) {
1658 FixedInt32x4Array::cast(value())->set(0, i32x4);
1662 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1665 Object** FixedArray::GetFirstElementAddress() {
1666 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1670 bool FixedArray::ContainsOnlySmisOrHoles() {
1671 Object* the_hole = GetHeap()->the_hole_value();
1672 Object** current = GetFirstElementAddress();
1673 for (int i = 0; i < length(); ++i) {
1674 Object* candidate = *current++;
1675 if (!candidate->IsSmi() && candidate != the_hole) return false;
1681 FixedArrayBase* JSObject::elements() const {
1682 Object* array = READ_FIELD(this, kElementsOffset);
1683 return static_cast<FixedArrayBase*>(array);
1687 void JSObject::ValidateElements(Handle<JSObject> object) {
1688 #ifdef ENABLE_SLOW_DCHECKS
1689 if (FLAG_enable_slow_asserts) {
1690 ElementsAccessor* accessor = object->GetElementsAccessor();
1691 accessor->Validate(object);
1697 void AllocationSite::Initialize() {
1698 set_transition_info(Smi::FromInt(0));
1699 SetElementsKind(GetInitialFastElementsKind());
1700 set_nested_site(Smi::FromInt(0));
1701 set_pretenure_data(Smi::FromInt(0));
1702 set_pretenure_create_count(Smi::FromInt(0));
1703 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1704 SKIP_WRITE_BARRIER);
1708 void AllocationSite::MarkZombie() {
1709 DCHECK(!IsZombie());
1711 set_pretenure_decision(kZombie);
1715 // Heuristic: We only need to create allocation site info if the boilerplate
1716 // elements kind is the initial elements kind.
1717 AllocationSiteMode AllocationSite::GetMode(
1718 ElementsKind boilerplate_elements_kind) {
1719 if (FLAG_pretenuring_call_new ||
1720 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1721 return TRACK_ALLOCATION_SITE;
1724 return DONT_TRACK_ALLOCATION_SITE;
1728 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1730 if (FLAG_pretenuring_call_new ||
1731 (IsFastSmiElementsKind(from) &&
1732 IsMoreGeneralElementsKindTransition(from, to))) {
1733 return TRACK_ALLOCATION_SITE;
1736 return DONT_TRACK_ALLOCATION_SITE;
1740 inline bool AllocationSite::CanTrack(InstanceType type) {
1741 if (FLAG_allocation_site_pretenuring) {
1742 return type == JS_ARRAY_TYPE ||
1743 type == JS_OBJECT_TYPE ||
1744 type < FIRST_NONSTRING_TYPE;
1746 return type == JS_ARRAY_TYPE;
1750 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1754 return DependentCode::kAllocationSiteTenuringChangedGroup;
1757 return DependentCode::kAllocationSiteTransitionChangedGroup;
1761 return DependentCode::kAllocationSiteTransitionChangedGroup;
1765 inline void AllocationSite::set_memento_found_count(int count) {
1766 int value = pretenure_data()->value();
1767 // Verify that we can count more mementos than we can possibly find in one
1768 // new space collection.
1769 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1770 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1771 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1772 DCHECK(count < MementoFoundCountBits::kMax);
1774 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1775 SKIP_WRITE_BARRIER);
1778 inline bool AllocationSite::IncrementMementoFoundCount() {
1779 if (IsZombie()) return false;
1781 int value = memento_found_count();
1782 set_memento_found_count(value + 1);
1783 return memento_found_count() == kPretenureMinimumCreated;
1787 inline void AllocationSite::IncrementMementoCreateCount() {
1788 DCHECK(FLAG_allocation_site_pretenuring);
1789 int value = memento_create_count();
1790 set_memento_create_count(value + 1);
1794 inline bool AllocationSite::MakePretenureDecision(
1795 PretenureDecision current_decision,
1797 bool maximum_size_scavenge) {
1798 // Here we just allow state transitions from undecided or maybe tenure
1799 // to don't tenure, maybe tenure, or tenure.
1800 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1801 if (ratio >= kPretenureRatio) {
1802 // We just transition into tenure state when the semi-space was at
1803 // maximum capacity.
1804 if (maximum_size_scavenge) {
1805 set_deopt_dependent_code(true);
1806 set_pretenure_decision(kTenure);
1807 // Currently we just need to deopt when we make a state transition to
1811 set_pretenure_decision(kMaybeTenure);
1813 set_pretenure_decision(kDontTenure);
1820 inline bool AllocationSite::DigestPretenuringFeedback(
1821 bool maximum_size_scavenge) {
1823 int create_count = memento_create_count();
1824 int found_count = memento_found_count();
1825 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1827 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1828 static_cast<double>(found_count) / create_count : 0.0;
1829 PretenureDecision current_decision = pretenure_decision();
1831 if (minimum_mementos_created) {
1832 deopt = MakePretenureDecision(
1833 current_decision, ratio, maximum_size_scavenge);
1836 if (FLAG_trace_pretenuring_statistics) {
1838 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1839 static_cast<void*>(this), create_count, found_count, ratio,
1840 PretenureDecisionName(current_decision),
1841 PretenureDecisionName(pretenure_decision()));
1844 // Clear feedback calculation fields until the next gc.
1845 set_memento_found_count(0);
1846 set_memento_create_count(0);
1851 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1852 JSObject::ValidateElements(object);
1853 ElementsKind elements_kind = object->map()->elements_kind();
1854 if (!IsFastObjectElementsKind(elements_kind)) {
1855 if (IsFastHoleyElementsKind(elements_kind)) {
1856 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1858 TransitionElementsKind(object, FAST_ELEMENTS);
1864 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1867 EnsureElementsMode mode) {
1868 ElementsKind current_kind = object->map()->elements_kind();
1869 ElementsKind target_kind = current_kind;
1871 DisallowHeapAllocation no_allocation;
1872 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1873 bool is_holey = IsFastHoleyElementsKind(current_kind);
1874 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1875 Heap* heap = object->GetHeap();
1876 Object* the_hole = heap->the_hole_value();
1877 for (uint32_t i = 0; i < count; ++i) {
1878 Object* current = *objects++;
1879 if (current == the_hole) {
1881 target_kind = GetHoleyElementsKind(target_kind);
1882 } else if (!current->IsSmi()) {
1883 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1884 if (IsFastSmiElementsKind(target_kind)) {
1886 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1888 target_kind = FAST_DOUBLE_ELEMENTS;
1891 } else if (is_holey) {
1892 target_kind = FAST_HOLEY_ELEMENTS;
1895 target_kind = FAST_ELEMENTS;
1900 if (target_kind != current_kind) {
1901 TransitionElementsKind(object, target_kind);
1906 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1907 Handle<FixedArrayBase> elements,
1909 EnsureElementsMode mode) {
1910 Heap* heap = object->GetHeap();
1911 if (elements->map() != heap->fixed_double_array_map()) {
1912 DCHECK(elements->map() == heap->fixed_array_map() ||
1913 elements->map() == heap->fixed_cow_array_map());
1914 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1915 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1918 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1919 EnsureCanContainElements(object, objects, length, mode);
1923 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1924 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1925 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1926 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1927 Handle<FixedDoubleArray> double_array =
1928 Handle<FixedDoubleArray>::cast(elements);
1929 for (uint32_t i = 0; i < length; ++i) {
1930 if (double_array->is_the_hole(i)) {
1931 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1935 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1940 void JSObject::SetMapAndElements(Handle<JSObject> object,
1941 Handle<Map> new_map,
1942 Handle<FixedArrayBase> value) {
1943 JSObject::MigrateToMap(object, new_map);
1944 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1945 (*value == object->GetHeap()->empty_fixed_array())) ==
1946 (value->map() == object->GetHeap()->fixed_array_map() ||
1947 value->map() == object->GetHeap()->fixed_cow_array_map()));
1948 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1949 (object->map()->has_fast_double_elements() ==
1950 value->IsFixedDoubleArray()));
1951 object->set_elements(*value);
1955 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1956 WRITE_FIELD(this, kElementsOffset, value);
1957 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1961 void JSObject::initialize_properties() {
1962 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1963 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1967 void JSObject::initialize_elements() {
1968 FixedArrayBase* elements = map()->GetInitialElements();
1969 WRITE_FIELD(this, kElementsOffset, elements);
1973 Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
1974 DisallowHeapAllocation no_gc;
1975 if (!map->HasTransitionArray()) return Handle<String>::null();
1976 TransitionArray* transitions = map->transitions();
1977 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1978 int transition = TransitionArray::kSimpleTransitionIndex;
1979 PropertyDetails details = transitions->GetTargetDetails(transition);
1980 Name* name = transitions->GetKey(transition);
1981 if (details.type() != FIELD) return Handle<String>::null();
1982 if (details.attributes() != NONE) return Handle<String>::null();
1983 if (!name->IsString()) return Handle<String>::null();
1984 return Handle<String>(String::cast(name));
1988 Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
1989 DCHECK(!ExpectedTransitionKey(map).is_null());
1990 return Handle<Map>(map->transitions()->GetTarget(
1991 TransitionArray::kSimpleTransitionIndex));
1995 Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1996 DisallowHeapAllocation no_allocation;
1997 if (!map->HasTransitionArray()) return Handle<Map>::null();
1998 TransitionArray* transitions = map->transitions();
1999 int transition = transitions->Search(*key);
2000 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
2001 PropertyDetails target_details = transitions->GetTargetDetails(transition);
2002 if (target_details.type() != FIELD) return Handle<Map>::null();
2003 if (target_details.attributes() != NONE) return Handle<Map>::null();
2004 return Handle<Map>(transitions->GetTarget(transition));
2008 ACCESSORS(Oddball, to_string, String, kToStringOffset)
2009 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
2012 byte Oddball::kind() const {
2013 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
2017 void Oddball::set_kind(byte value) {
2018 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2022 Object* Cell::value() const {
2023 return READ_FIELD(this, kValueOffset);
2027 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
2028 // The write barrier is not used for global property cells.
2029 DCHECK(!val->IsPropertyCell() && !val->IsCell());
2030 WRITE_FIELD(this, kValueOffset, val);
2033 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2035 Object* PropertyCell::type_raw() const {
2036 return READ_FIELD(this, kTypeOffset);
2040 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
2041 WRITE_FIELD(this, kTypeOffset, val);
2045 int JSObject::GetHeaderSize() {
2046 InstanceType type = map()->instance_type();
2047 // Check for the most common kind of JavaScript object before
2048 // falling into the generic switch. This speeds up the internal
2049 // field operations considerably on average.
2050 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2052 case JS_GENERATOR_OBJECT_TYPE:
2053 return JSGeneratorObject::kSize;
2054 case JS_MODULE_TYPE:
2055 return JSModule::kSize;
2056 case JS_GLOBAL_PROXY_TYPE:
2057 return JSGlobalProxy::kSize;
2058 case JS_GLOBAL_OBJECT_TYPE:
2059 return JSGlobalObject::kSize;
2060 case JS_BUILTINS_OBJECT_TYPE:
2061 return JSBuiltinsObject::kSize;
2062 case JS_FUNCTION_TYPE:
2063 return JSFunction::kSize;
2065 return JSValue::kSize;
2067 return JSDate::kSize;
2069 return JSArray::kSize;
2070 case JS_ARRAY_BUFFER_TYPE:
2071 return JSArrayBuffer::kSize;
2072 case JS_TYPED_ARRAY_TYPE:
2073 return JSTypedArray::kSize;
2074 case JS_DATA_VIEW_TYPE:
2075 return JSDataView::kSize;
2076 case FLOAT32x4_TYPE:
2077 return Float32x4::kSize;
2078 case FLOAT64x2_TYPE:
2079 return Float64x2::kSize;
2081 return Int32x4::kSize;
2083 return JSSet::kSize;
2085 return JSMap::kSize;
2086 case JS_SET_ITERATOR_TYPE:
2087 return JSSetIterator::kSize;
2088 case JS_MAP_ITERATOR_TYPE:
2089 return JSMapIterator::kSize;
2090 case JS_WEAK_MAP_TYPE:
2091 return JSWeakMap::kSize;
2092 case JS_WEAK_SET_TYPE:
2093 return JSWeakSet::kSize;
2094 case JS_REGEXP_TYPE:
2095 return JSRegExp::kSize;
2096 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2097 return JSObject::kHeaderSize;
2098 case JS_MESSAGE_OBJECT_TYPE:
2099 return JSMessageObject::kSize;
2101 // TODO(jkummerow): Re-enable this. Blink currently hits this
2102 // from its CustomElementConstructorBuilder.
2109 int JSObject::GetInternalFieldCount() {
2110 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2111 // Make sure to adjust for the number of in-object properties. These
2112 // properties do contribute to the size, but are not internal fields.
2113 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2114 map()->inobject_properties();
2118 int JSObject::GetInternalFieldOffset(int index) {
2119 DCHECK(index < GetInternalFieldCount() && index >= 0);
2120 return GetHeaderSize() + (kPointerSize * index);
2124 Object* JSObject::GetInternalField(int index) {
2125 DCHECK(index < GetInternalFieldCount() && index >= 0);
2126 // Internal objects do follow immediately after the header, whereas in-object
2127 // properties are at the end of the object. Therefore there is no need
2128 // to adjust the index here.
2129 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2133 void JSObject::SetInternalField(int index, Object* value) {
2134 DCHECK(index < GetInternalFieldCount() && index >= 0);
2135 // Internal objects do follow immediately after the header, whereas in-object
2136 // properties are at the end of the object. Therefore there is no need
2137 // to adjust the index here.
2138 int offset = GetHeaderSize() + (kPointerSize * index);
2139 WRITE_FIELD(this, offset, value);
2140 WRITE_BARRIER(GetHeap(), this, offset, value);
2144 void JSObject::SetInternalField(int index, Smi* value) {
2145 DCHECK(index < GetInternalFieldCount() && index >= 0);
2146 // Internal objects do follow immediately after the header, whereas in-object
2147 // properties are at the end of the object. Therefore there is no need
2148 // to adjust the index here.
2149 int offset = GetHeaderSize() + (kPointerSize * index);
2150 WRITE_FIELD(this, offset, value);
2154 // Access fast-case object properties at index. The use of these routines
2155 // is needed to correctly distinguish between properties stored in-object and
2156 // properties stored in the properties array.
2157 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2158 if (index.is_inobject()) {
2159 return READ_FIELD(this, index.offset());
2161 return properties()->get(index.outobject_array_index());
2166 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2167 if (index.is_inobject()) {
2168 int offset = index.offset();
2169 WRITE_FIELD(this, offset, value);
2170 WRITE_BARRIER(GetHeap(), this, offset, value);
2172 properties()->set(index.outobject_array_index(), value);
2177 int JSObject::GetInObjectPropertyOffset(int index) {
2178 return map()->GetInObjectPropertyOffset(index);
2182 Object* JSObject::InObjectPropertyAt(int index) {
2183 int offset = GetInObjectPropertyOffset(index);
2184 return READ_FIELD(this, offset);
2188 Object* JSObject::InObjectPropertyAtPut(int index,
2190 WriteBarrierMode mode) {
2191 // Adjust for the number of properties stored in the object.
2192 int offset = GetInObjectPropertyOffset(index);
2193 WRITE_FIELD(this, offset, value);
2194 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2200 void JSObject::InitializeBody(Map* map,
2201 Object* pre_allocated_value,
2202 Object* filler_value) {
2203 DCHECK(!filler_value->IsHeapObject() ||
2204 !GetHeap()->InNewSpace(filler_value));
2205 DCHECK(!pre_allocated_value->IsHeapObject() ||
2206 !GetHeap()->InNewSpace(pre_allocated_value));
2207 int size = map->instance_size();
2208 int offset = kHeaderSize;
2209 if (filler_value != pre_allocated_value) {
2210 int pre_allocated = map->pre_allocated_property_fields();
2211 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2212 for (int i = 0; i < pre_allocated; i++) {
2213 WRITE_FIELD(this, offset, pre_allocated_value);
2214 offset += kPointerSize;
2217 while (offset < size) {
2218 WRITE_FIELD(this, offset, filler_value);
2219 offset += kPointerSize;
2224 bool JSObject::HasFastProperties() {
2225 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2226 return !properties()->IsDictionary();
2230 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2231 if (unused_property_fields() != 0) return false;
2232 if (is_prototype_map()) return false;
2233 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2234 int limit = Max(minimum, inobject_properties());
2235 int external = NumberOfFields() - inobject_properties();
2236 return external > limit;
2240 void Struct::InitializeBody(int object_size) {
2241 Object* value = GetHeap()->undefined_value();
2242 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2243 WRITE_FIELD(this, offset, value);
2248 bool Object::ToArrayIndex(uint32_t* index) {
2250 int value = Smi::cast(this)->value();
2251 if (value < 0) return false;
2255 if (IsHeapNumber()) {
2256 double value = HeapNumber::cast(this)->value();
2257 uint32_t uint_value = static_cast<uint32_t>(value);
2258 if (value == static_cast<double>(uint_value)) {
2259 *index = uint_value;
2267 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2268 if (!this->IsJSValue()) return false;
2270 JSValue* js_value = JSValue::cast(this);
2271 if (!js_value->value()->IsString()) return false;
2273 String* str = String::cast(js_value->value());
2274 if (index >= static_cast<uint32_t>(str->length())) return false;
2280 void Object::VerifyApiCallResultType() {
2281 #if ENABLE_EXTRA_CHECKS
2291 FATAL("API call returned invalid object");
2293 #endif // ENABLE_EXTRA_CHECKS
2297 Object* FixedArray::get(int index) {
2298 SLOW_DCHECK(index >= 0 && index < this->length());
2299 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2303 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2304 return handle(array->get(index), array->GetIsolate());
2308 bool FixedArray::is_the_hole(int index) {
2309 return get(index) == GetHeap()->the_hole_value();
2313 void FixedArray::set(int index, Smi* value) {
2314 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2315 DCHECK(index >= 0 && index < this->length());
2316 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2317 int offset = kHeaderSize + index * kPointerSize;
2318 WRITE_FIELD(this, offset, value);
2322 void FixedArray::set(int index, Object* value) {
2323 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2324 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2325 DCHECK(index >= 0 && index < this->length());
2326 int offset = kHeaderSize + index * kPointerSize;
2327 WRITE_FIELD(this, offset, value);
2328 WRITE_BARRIER(GetHeap(), this, offset, value);
2332 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2333 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2337 inline double FixedDoubleArray::hole_nan_as_double() {
2338 return BitCast<double, uint64_t>(kHoleNanInt64);
2342 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2343 DCHECK(BitCast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64);
2344 DCHECK((BitCast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32);
2345 return base::OS::nan_value();
2349 double FixedDoubleArray::get_scalar(int index) {
2350 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2351 map() != GetHeap()->fixed_array_map());
2352 DCHECK(index >= 0 && index < this->length());
2353 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2354 DCHECK(!is_the_hole_nan(result));
2358 int64_t FixedDoubleArray::get_representation(int index) {
2359 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2360 map() != GetHeap()->fixed_array_map());
2361 DCHECK(index >= 0 && index < this->length());
2362 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2366 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2368 if (array->is_the_hole(index)) {
2369 return array->GetIsolate()->factory()->the_hole_value();
2371 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2376 void FixedDoubleArray::set(int index, double value) {
2377 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2378 map() != GetHeap()->fixed_array_map());
2379 int offset = kHeaderSize + index * kDoubleSize;
2380 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2381 WRITE_DOUBLE_FIELD(this, offset, value);
2385 void FixedDoubleArray::set_the_hole(int index) {
2386 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2387 map() != GetHeap()->fixed_array_map());
2388 int offset = kHeaderSize + index * kDoubleSize;
2389 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2393 bool FixedDoubleArray::is_the_hole(int index) {
2394 int offset = kHeaderSize + index * kDoubleSize;
2395 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2399 double* FixedDoubleArray::data_start() {
2400 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2404 void FixedDoubleArray::FillWithHoles(int from, int to) {
2405 for (int i = from; i < to; i++) {
2411 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2412 DCHECK(type < NUMBER_OF_TYPES);
2413 element_counts_[type]++;
2417 int ConstantPoolArray::NumberOfEntries::equals(
2418 const ConstantPoolArray::NumberOfEntries& other) const {
2419 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2420 if (element_counts_[i] != other.element_counts_[i]) return false;
2426 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2427 return total_count() == 0;
2431 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2432 DCHECK(type < NUMBER_OF_TYPES);
2433 return element_counts_[type];
2437 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2439 DCHECK(type < NUMBER_OF_TYPES);
2440 for (int i = 0; i < type; i++) {
2441 base += element_counts_[i];
2447 int ConstantPoolArray::NumberOfEntries::total_count() const {
2449 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2450 count += element_counts_[i];
2456 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2457 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2458 if (element_counts_[i] < min || element_counts_[i] > max) {
2466 int ConstantPoolArray::Iterator::next_index() {
2467 DCHECK(!is_finished());
2468 int ret = next_index_++;
2474 bool ConstantPoolArray::Iterator::is_finished() {
2475 return next_index_ > array_->last_index(type_, final_section_);
2479 void ConstantPoolArray::Iterator::update_section() {
2480 if (next_index_ > array_->last_index(type_, current_section_) &&
2481 current_section_ != final_section_) {
2482 DCHECK(final_section_ == EXTENDED_SECTION);
2483 current_section_ = EXTENDED_SECTION;
2484 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2489 bool ConstantPoolArray::is_extended_layout() {
2490 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2491 return IsExtendedField::decode(small_layout_1);
2495 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2496 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2500 int ConstantPoolArray::first_extended_section_index() {
2501 DCHECK(is_extended_layout());
2502 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2503 return TotalCountField::decode(small_layout_2);
2507 int ConstantPoolArray::get_extended_section_header_offset() {
2508 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2512 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2513 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2514 return WeakObjectStateField::decode(small_layout_2);
2518 void ConstantPoolArray::set_weak_object_state(
2519 ConstantPoolArray::WeakObjectState state) {
2520 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2521 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2522 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2526 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2528 if (section == EXTENDED_SECTION) {
2529 DCHECK(is_extended_layout());
2530 index += first_extended_section_index();
2533 for (Type type_iter = FIRST_TYPE; type_iter < type;
2534 type_iter = next_type(type_iter)) {
2535 index += number_of_entries(type_iter, section);
2542 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2543 return first_index(type, section) + number_of_entries(type, section) - 1;
2547 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2548 if (section == SMALL_SECTION) {
2549 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2550 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2553 return Int64CountField::decode(small_layout_1);
2555 return CodePtrCountField::decode(small_layout_1);
2557 return HeapPtrCountField::decode(small_layout_1);
2559 return Int32CountField::decode(small_layout_2);
2565 DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2566 int offset = get_extended_section_header_offset();
2569 offset += kExtendedInt64CountOffset;
2572 offset += kExtendedCodePtrCountOffset;
2575 offset += kExtendedHeapPtrCountOffset;
2578 offset += kExtendedInt32CountOffset;
2583 return READ_INT_FIELD(this, offset);
2588 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2589 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2590 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2591 (is_extended_layout() &&
2592 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2593 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2597 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2598 LayoutSection section;
2599 if (is_extended_layout() && index >= first_extended_section_index()) {
2600 section = EXTENDED_SECTION;
2602 section = SMALL_SECTION;
2605 Type type = FIRST_TYPE;
2606 while (index > last_index(type, section)) {
2607 type = next_type(type);
2609 DCHECK(type <= LAST_TYPE);
2614 int64_t ConstantPoolArray::get_int64_entry(int index) {
2615 DCHECK(map() == GetHeap()->constant_pool_array_map());
2616 DCHECK(get_type(index) == INT64);
2617 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2621 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2622 STATIC_ASSERT(kDoubleSize == kInt64Size);
2623 DCHECK(map() == GetHeap()->constant_pool_array_map());
2624 DCHECK(get_type(index) == INT64);
2625 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2629 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2630 DCHECK(map() == GetHeap()->constant_pool_array_map());
2631 DCHECK(get_type(index) == CODE_PTR);
2632 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2636 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2637 DCHECK(map() == GetHeap()->constant_pool_array_map());
2638 DCHECK(get_type(index) == HEAP_PTR);
2639 return READ_FIELD(this, OffsetOfElementAt(index));
2643 int32_t ConstantPoolArray::get_int32_entry(int index) {
2644 DCHECK(map() == GetHeap()->constant_pool_array_map());
2645 DCHECK(get_type(index) == INT32);
2646 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2650 void ConstantPoolArray::set(int index, int64_t value) {
2651 DCHECK(map() == GetHeap()->constant_pool_array_map());
2652 DCHECK(get_type(index) == INT64);
2653 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2657 void ConstantPoolArray::set(int index, double value) {
2658 STATIC_ASSERT(kDoubleSize == kInt64Size);
2659 DCHECK(map() == GetHeap()->constant_pool_array_map());
2660 DCHECK(get_type(index) == INT64);
2661 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2665 void ConstantPoolArray::set(int index, Address value) {
2666 DCHECK(map() == GetHeap()->constant_pool_array_map());
2667 DCHECK(get_type(index) == CODE_PTR);
2668 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2672 void ConstantPoolArray::set(int index, Object* value) {
2673 DCHECK(map() == GetHeap()->constant_pool_array_map());
2674 DCHECK(!GetHeap()->InNewSpace(value));
2675 DCHECK(get_type(index) == HEAP_PTR);
2676 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2677 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2681 void ConstantPoolArray::set(int index, int32_t value) {
2682 DCHECK(map() == GetHeap()->constant_pool_array_map());
2683 DCHECK(get_type(index) == INT32);
2684 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2688 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2689 DCHECK(map() == GetHeap()->constant_pool_array_map());
2690 DCHECK(offset_is_type(offset, INT32));
2691 WRITE_INT32_FIELD(this, offset, value);
2695 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2696 DCHECK(map() == GetHeap()->constant_pool_array_map());
2697 DCHECK(offset_is_type(offset, INT64));
2698 WRITE_INT64_FIELD(this, offset, value);
2702 void ConstantPoolArray::set_at_offset(int offset, double value) {
2703 DCHECK(map() == GetHeap()->constant_pool_array_map());
2704 DCHECK(offset_is_type(offset, INT64));
2705 WRITE_DOUBLE_FIELD(this, offset, value);
2709 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2710 DCHECK(map() == GetHeap()->constant_pool_array_map());
2711 DCHECK(offset_is_type(offset, CODE_PTR));
2712 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2713 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2717 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2718 DCHECK(map() == GetHeap()->constant_pool_array_map());
2719 DCHECK(!GetHeap()->InNewSpace(value));
2720 DCHECK(offset_is_type(offset, HEAP_PTR));
2721 WRITE_FIELD(this, offset, value);
2722 WRITE_BARRIER(GetHeap(), this, offset, value);
2726 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2727 uint32_t small_layout_1 =
2728 Int64CountField::encode(small.count_of(INT64)) |
2729 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2730 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2731 IsExtendedField::encode(false);
2732 uint32_t small_layout_2 =
2733 Int32CountField::encode(small.count_of(INT32)) |
2734 TotalCountField::encode(small.total_count()) |
2735 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2736 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2737 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2738 if (kHeaderSize != kFirstEntryOffset) {
2739 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2740 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2745 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2746 const NumberOfEntries& extended) {
2747 // Initialize small layout fields first.
2750 // Set is_extended_layout field.
2751 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2752 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2753 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2755 // Initialize the extended layout fields.
2756 int extended_header_offset = get_extended_section_header_offset();
2757 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2758 extended.count_of(INT64));
2759 WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2760 extended.count_of(CODE_PTR));
2761 WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2762 extended.count_of(HEAP_PTR));
2763 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2764 extended.count_of(INT32));
2768 int ConstantPoolArray::size() {
2769 NumberOfEntries small(this, SMALL_SECTION);
2770 if (!is_extended_layout()) {
2771 return SizeFor(small);
2773 NumberOfEntries extended(this, EXTENDED_SECTION);
2774 return SizeForExtended(small, extended);
2779 int ConstantPoolArray::length() {
2780 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2781 int length = TotalCountField::decode(small_layout_2);
2782 if (is_extended_layout()) {
2783 length += number_of_entries(INT64, EXTENDED_SECTION) +
2784 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2785 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2786 number_of_entries(INT32, EXTENDED_SECTION);
2792 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2793 const DisallowHeapAllocation& promise) {
2794 Heap* heap = GetHeap();
2795 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2796 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2797 return UPDATE_WRITE_BARRIER;
2801 void FixedArray::set(int index,
2803 WriteBarrierMode mode) {
2804 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2805 DCHECK(index >= 0 && index < this->length());
2806 int offset = kHeaderSize + index * kPointerSize;
2807 WRITE_FIELD(this, offset, value);
2808 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2812 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2815 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2816 DCHECK(index >= 0 && index < array->length());
2817 int offset = kHeaderSize + index * kPointerSize;
2818 WRITE_FIELD(array, offset, value);
2819 Heap* heap = array->GetHeap();
2820 if (heap->InNewSpace(value)) {
2821 heap->RecordWrite(array->address(), offset);
2826 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2829 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2830 DCHECK(index >= 0 && index < array->length());
2831 DCHECK(!array->GetHeap()->InNewSpace(value));
2832 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2836 void FixedArray::set_undefined(int index) {
2837 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2838 DCHECK(index >= 0 && index < this->length());
2839 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2841 kHeaderSize + index * kPointerSize,
2842 GetHeap()->undefined_value());
2846 void FixedArray::set_null(int index) {
2847 DCHECK(index >= 0 && index < this->length());
2848 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2850 kHeaderSize + index * kPointerSize,
2851 GetHeap()->null_value());
2855 void FixedArray::set_the_hole(int index) {
2856 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2857 DCHECK(index >= 0 && index < this->length());
2858 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2860 kHeaderSize + index * kPointerSize,
2861 GetHeap()->the_hole_value());
2865 void FixedArray::FillWithHoles(int from, int to) {
2866 for (int i = from; i < to; i++) {
2872 Object** FixedArray::data_start() {
2873 return HeapObject::RawField(this, kHeaderSize);
2877 bool DescriptorArray::IsEmpty() {
2878 DCHECK(length() >= kFirstIndex ||
2879 this == GetHeap()->empty_descriptor_array());
2880 return length() < kFirstIndex;
2884 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2886 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2890 // Perform a binary search in a fixed array. Low and high are entry indices. If
2891 // there are three entries in this array it should be called with low=0 and
2893 template<SearchMode search_mode, typename T>
2894 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2895 uint32_t hash = name->Hash();
2898 DCHECK(low <= high);
2900 while (low != high) {
2901 int mid = (low + high) / 2;
2902 Name* mid_name = array->GetSortedKey(mid);
2903 uint32_t mid_hash = mid_name->Hash();
2905 if (mid_hash >= hash) {
2912 for (; low <= limit; ++low) {
2913 int sort_index = array->GetSortedKeyIndex(low);
2914 Name* entry = array->GetKey(sort_index);
2915 if (entry->Hash() != hash) break;
2916 if (entry->Equals(name)) {
2917 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2920 return T::kNotFound;
2924 return T::kNotFound;
2928 // Perform a linear search in this fixed array. len is the number of entry
2929 // indices that are valid.
2930 template<SearchMode search_mode, typename T>
2931 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2932 uint32_t hash = name->Hash();
2933 if (search_mode == ALL_ENTRIES) {
2934 for (int number = 0; number < len; number++) {
2935 int sorted_index = array->GetSortedKeyIndex(number);
2936 Name* entry = array->GetKey(sorted_index);
2937 uint32_t current_hash = entry->Hash();
2938 if (current_hash > hash) break;
2939 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2942 DCHECK(len >= valid_entries);
2943 for (int number = 0; number < valid_entries; number++) {
2944 Name* entry = array->GetKey(number);
2945 uint32_t current_hash = entry->Hash();
2946 if (current_hash == hash && entry->Equals(name)) return number;
2949 return T::kNotFound;
2953 template<SearchMode search_mode, typename T>
2954 int Search(T* array, Name* name, int valid_entries) {
2955 if (search_mode == VALID_ENTRIES) {
2956 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2958 SLOW_DCHECK(array->IsSortedNoDuplicates());
2961 int nof = array->number_of_entries();
2962 if (nof == 0) return T::kNotFound;
2964 // Fast case: do linear search for small arrays.
2965 const int kMaxElementsForLinearSearch = 8;
2966 if ((search_mode == ALL_ENTRIES &&
2967 nof <= kMaxElementsForLinearSearch) ||
2968 (search_mode == VALID_ENTRIES &&
2969 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2970 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2973 // Slow case: perform binary search.
2974 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2978 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2979 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2983 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2984 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2985 if (number_of_own_descriptors == 0) return kNotFound;
2987 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2988 int number = cache->Lookup(map, name);
2990 if (number == DescriptorLookupCache::kAbsent) {
2991 number = Search(name, number_of_own_descriptors);
2992 cache->Update(map, name, number);
2999 PropertyDetails Map::GetLastDescriptorDetails() {
3000 return instance_descriptors()->GetDetails(LastAdded());
3004 void Map::LookupDescriptor(JSObject* holder,
3006 LookupResult* result) {
3007 DescriptorArray* descriptors = this->instance_descriptors();
3008 int number = descriptors->SearchWithCache(name, this);
3009 if (number == DescriptorArray::kNotFound) return result->NotFound();
3010 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
3014 void Map::LookupTransition(JSObject* holder,
3016 LookupResult* result) {
3017 int transition_index = this->SearchTransition(name);
3018 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
3019 result->TransitionResult(holder, this->GetTransition(transition_index));
3023 FixedArrayBase* Map::GetInitialElements() {
3024 if (has_fast_smi_or_object_elements() ||
3025 has_fast_double_elements()) {
3026 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
3027 return GetHeap()->empty_fixed_array();
3028 } else if (has_external_array_elements()) {
3029 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
3030 DCHECK(!GetHeap()->InNewSpace(empty_array));
3032 } else if (has_fixed_typed_array_elements()) {
3033 FixedTypedArrayBase* empty_array =
3034 GetHeap()->EmptyFixedTypedArrayForMap(this);
3035 DCHECK(!GetHeap()->InNewSpace(empty_array));
3044 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
3045 DCHECK(descriptor_number < number_of_descriptors());
3046 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
3050 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
3051 return GetKeySlot(descriptor_number);
3055 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
3056 return GetValueSlot(descriptor_number - 1) + 1;
3060 Name* DescriptorArray::GetKey(int descriptor_number) {
3061 DCHECK(descriptor_number < number_of_descriptors());
3062 return Name::cast(get(ToKeyIndex(descriptor_number)));
3066 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
3067 return GetDetails(descriptor_number).pointer();
3071 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
3072 return GetKey(GetSortedKeyIndex(descriptor_number));
3076 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
3077 PropertyDetails details = GetDetails(descriptor_index);
3078 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
3082 void DescriptorArray::SetRepresentation(int descriptor_index,
3083 Representation representation) {
3084 DCHECK(!representation.IsNone());
3085 PropertyDetails details = GetDetails(descriptor_index);
3086 set(ToDetailsIndex(descriptor_index),
3087 details.CopyWithRepresentation(representation).AsSmi());
3091 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3092 DCHECK(descriptor_number < number_of_descriptors());
3093 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3097 int DescriptorArray::GetValueOffset(int descriptor_number) {
3098 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3102 Object* DescriptorArray::GetValue(int descriptor_number) {
3103 DCHECK(descriptor_number < number_of_descriptors());
3104 return get(ToValueIndex(descriptor_number));
3108 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3109 set(ToValueIndex(descriptor_index), value);
3113 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3114 DCHECK(descriptor_number < number_of_descriptors());
3115 Object* details = get(ToDetailsIndex(descriptor_number));
3116 return PropertyDetails(Smi::cast(details));
3120 PropertyType DescriptorArray::GetType(int descriptor_number) {
3121 return GetDetails(descriptor_number).type();
3125 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3126 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3127 return GetDetails(descriptor_number).field_index();
3131 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3132 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3133 return HeapType::cast(GetValue(descriptor_number));
3137 Object* DescriptorArray::GetConstant(int descriptor_number) {
3138 return GetValue(descriptor_number);
3142 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3143 DCHECK(GetType(descriptor_number) == CALLBACKS);
3144 return GetValue(descriptor_number);
3148 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3149 DCHECK(GetType(descriptor_number) == CALLBACKS);
3150 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3151 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3155 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3156 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3157 handle(GetValue(descriptor_number), GetIsolate()),
3158 GetDetails(descriptor_number));
3162 void DescriptorArray::Set(int descriptor_number,
3164 const WhitenessWitness&) {
3166 DCHECK(descriptor_number < number_of_descriptors());
3168 NoIncrementalWriteBarrierSet(this,
3169 ToKeyIndex(descriptor_number),
3171 NoIncrementalWriteBarrierSet(this,
3172 ToValueIndex(descriptor_number),
3174 NoIncrementalWriteBarrierSet(this,
3175 ToDetailsIndex(descriptor_number),
3176 desc->GetDetails().AsSmi());
3180 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3182 DCHECK(descriptor_number < number_of_descriptors());
3184 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3185 set(ToValueIndex(descriptor_number), *desc->GetValue());
3186 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3190 void DescriptorArray::Append(Descriptor* desc,
3191 const WhitenessWitness& witness) {
3192 DisallowHeapAllocation no_gc;
3193 int descriptor_number = number_of_descriptors();
3194 SetNumberOfDescriptors(descriptor_number + 1);
3195 Set(descriptor_number, desc, witness);
3197 uint32_t hash = desc->GetKey()->Hash();
3201 for (insertion = descriptor_number; insertion > 0; --insertion) {
3202 Name* key = GetSortedKey(insertion - 1);
3203 if (key->Hash() <= hash) break;
3204 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3207 SetSortedKey(insertion, descriptor_number);
3211 void DescriptorArray::Append(Descriptor* desc) {
3212 DisallowHeapAllocation no_gc;
3213 int descriptor_number = number_of_descriptors();
3214 SetNumberOfDescriptors(descriptor_number + 1);
3215 Set(descriptor_number, desc);
3217 uint32_t hash = desc->GetKey()->Hash();
3221 for (insertion = descriptor_number; insertion > 0; --insertion) {
3222 Name* key = GetSortedKey(insertion - 1);
3223 if (key->Hash() <= hash) break;
3224 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3227 SetSortedKey(insertion, descriptor_number);
3231 void DescriptorArray::SwapSortedKeys(int first, int second) {
3232 int first_key = GetSortedKeyIndex(first);
3233 SetSortedKey(first, GetSortedKeyIndex(second));
3234 SetSortedKey(second, first_key);
3238 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3239 : marking_(array->GetHeap()->incremental_marking()) {
3240 marking_->EnterNoMarkingScope();
3241 DCHECK(!marking_->IsMarking() ||
3242 Marking::Color(array) == Marking::WHITE_OBJECT);
3246 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3247 marking_->LeaveNoMarkingScope();
3251 template<typename Derived, typename Shape, typename Key>
3252 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3253 const int kMinCapacity = 32;
3254 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
3255 if (capacity < kMinCapacity) {
3256 capacity = kMinCapacity; // Guarantee min capacity.
3262 template<typename Derived, typename Shape, typename Key>
3263 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3264 return FindEntry(GetIsolate(), key);
3268 // Find entry for key otherwise return kNotFound.
3269 template<typename Derived, typename Shape, typename Key>
3270 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3271 uint32_t capacity = Capacity();
3272 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3274 // EnsureCapacity will guarantee the hash table is never full.
3276 Object* element = KeyAt(entry);
3277 // Empty entry. Uses raw unchecked accessors because it is called by the
3278 // string table during bootstrapping.
3279 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3280 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3281 Shape::IsMatch(key, element)) return entry;
3282 entry = NextProbe(entry, count++, capacity);
3288 bool SeededNumberDictionary::requires_slow_elements() {
3289 Object* max_index_object = get(kMaxNumberKeyIndex);
3290 if (!max_index_object->IsSmi()) return false;
3292 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3295 uint32_t SeededNumberDictionary::max_number_key() {
3296 DCHECK(!requires_slow_elements());
3297 Object* max_index_object = get(kMaxNumberKeyIndex);
3298 if (!max_index_object->IsSmi()) return 0;
3299 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3300 return value >> kRequiresSlowElementsTagSize;
3303 void SeededNumberDictionary::set_requires_slow_elements() {
3304 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3308 // ------------------------------------
3312 CAST_ACCESSOR(AccessorInfo)
3313 CAST_ACCESSOR(ByteArray)
3316 CAST_ACCESSOR(CodeCacheHashTable)
3317 CAST_ACCESSOR(CompilationCacheTable)
3318 CAST_ACCESSOR(ConsString)
3319 CAST_ACCESSOR(ConstantPoolArray)
3320 CAST_ACCESSOR(DeoptimizationInputData)
3321 CAST_ACCESSOR(DeoptimizationOutputData)
3322 CAST_ACCESSOR(DependentCode)
3323 CAST_ACCESSOR(DescriptorArray)
3324 CAST_ACCESSOR(ExternalArray)
3325 CAST_ACCESSOR(ExternalAsciiString)
3326 CAST_ACCESSOR(ExternalFloat32Array)
3327 CAST_ACCESSOR(ExternalFloat32x4Array)
3328 CAST_ACCESSOR(ExternalFloat64Array)
3329 CAST_ACCESSOR(ExternalFloat64x2Array)
3330 CAST_ACCESSOR(ExternalInt16Array)
3331 CAST_ACCESSOR(ExternalInt32Array)
3332 CAST_ACCESSOR(ExternalInt32x4Array)
3333 CAST_ACCESSOR(ExternalInt8Array)
3334 CAST_ACCESSOR(ExternalString)
3335 CAST_ACCESSOR(ExternalTwoByteString)
3336 CAST_ACCESSOR(ExternalUint16Array)
3337 CAST_ACCESSOR(ExternalUint32Array)
3338 CAST_ACCESSOR(ExternalUint8Array)
3339 CAST_ACCESSOR(ExternalUint8ClampedArray)
3340 CAST_ACCESSOR(FixedArray)
3341 CAST_ACCESSOR(FixedArrayBase)
3342 CAST_ACCESSOR(FixedDoubleArray)
3343 CAST_ACCESSOR(FixedTypedArrayBase)
3344 CAST_ACCESSOR(Foreign)
3345 CAST_ACCESSOR(FreeSpace)
3346 CAST_ACCESSOR(GlobalObject)
3347 CAST_ACCESSOR(HeapObject)
3348 CAST_ACCESSOR(Float32x4)
3349 CAST_ACCESSOR(Float64x2)
3350 CAST_ACCESSOR(Int32x4)
3351 CAST_ACCESSOR(JSArray)
3352 CAST_ACCESSOR(JSArrayBuffer)
3353 CAST_ACCESSOR(JSArrayBufferView)
3354 CAST_ACCESSOR(JSBuiltinsObject)
3355 CAST_ACCESSOR(JSDataView)
3356 CAST_ACCESSOR(JSDate)
3357 CAST_ACCESSOR(JSFunction)
3358 CAST_ACCESSOR(JSFunctionProxy)
3359 CAST_ACCESSOR(JSFunctionResultCache)
3360 CAST_ACCESSOR(JSGeneratorObject)
3361 CAST_ACCESSOR(JSGlobalObject)
3362 CAST_ACCESSOR(JSGlobalProxy)
3363 CAST_ACCESSOR(JSMap)
3364 CAST_ACCESSOR(JSMapIterator)
3365 CAST_ACCESSOR(JSMessageObject)
3366 CAST_ACCESSOR(JSModule)
3367 CAST_ACCESSOR(JSObject)
3368 CAST_ACCESSOR(JSProxy)
3369 CAST_ACCESSOR(JSReceiver)
3370 CAST_ACCESSOR(JSRegExp)
3371 CAST_ACCESSOR(JSSet)
3372 CAST_ACCESSOR(JSSetIterator)
3373 CAST_ACCESSOR(JSTypedArray)
3374 CAST_ACCESSOR(JSValue)
3375 CAST_ACCESSOR(JSWeakMap)
3376 CAST_ACCESSOR(JSWeakSet)
3378 CAST_ACCESSOR(MapCache)
3380 CAST_ACCESSOR(NameDictionary)
3381 CAST_ACCESSOR(NormalizedMapCache)
3382 CAST_ACCESSOR(Object)
3383 CAST_ACCESSOR(ObjectHashTable)
3384 CAST_ACCESSOR(Oddball)
3385 CAST_ACCESSOR(OrderedHashMap)
3386 CAST_ACCESSOR(OrderedHashSet)
3387 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3388 CAST_ACCESSOR(PropertyCell)
3389 CAST_ACCESSOR(ScopeInfo)
3390 CAST_ACCESSOR(SeededNumberDictionary)
3391 CAST_ACCESSOR(SeqOneByteString)
3392 CAST_ACCESSOR(SeqString)
3393 CAST_ACCESSOR(SeqTwoByteString)
3394 CAST_ACCESSOR(SharedFunctionInfo)
3395 CAST_ACCESSOR(SlicedString)
3397 CAST_ACCESSOR(String)
3398 CAST_ACCESSOR(StringTable)
3399 CAST_ACCESSOR(Struct)
3400 CAST_ACCESSOR(Symbol)
3401 CAST_ACCESSOR(UnseededNumberDictionary)
3402 CAST_ACCESSOR(WeakHashTable)
3405 template <class Traits>
3406 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3407 SLOW_DCHECK(object->IsHeapObject() &&
3408 HeapObject::cast(object)->map()->instance_type() ==
3409 Traits::kInstanceType);
3410 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3414 template <class Traits>
3415 const FixedTypedArray<Traits>*
3416 FixedTypedArray<Traits>::cast(const Object* object) {
3417 SLOW_DCHECK(object->IsHeapObject() &&
3418 HeapObject::cast(object)->map()->instance_type() ==
3419 Traits::kInstanceType);
3420 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3424 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3425 STRUCT_LIST(MAKE_STRUCT_CAST)
3426 #undef MAKE_STRUCT_CAST
3429 template <typename Derived, typename Shape, typename Key>
3430 HashTable<Derived, Shape, Key>*
3431 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3432 SLOW_DCHECK(obj->IsHashTable());
3433 return reinterpret_cast<HashTable*>(obj);
3437 template <typename Derived, typename Shape, typename Key>
3438 const HashTable<Derived, Shape, Key>*
3439 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3440 SLOW_DCHECK(obj->IsHashTable());
3441 return reinterpret_cast<const HashTable*>(obj);
3445 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3446 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3448 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3449 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3451 SMI_ACCESSORS(String, length, kLengthOffset)
3452 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3455 uint32_t Name::hash_field() {
3456 return READ_UINT32_FIELD(this, kHashFieldOffset);
3460 void Name::set_hash_field(uint32_t value) {
3461 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3462 #if V8_HOST_ARCH_64_BIT
3463 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3468 bool Name::Equals(Name* other) {
3469 if (other == this) return true;
3470 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3471 this->IsSymbol() || other->IsSymbol()) {
3474 return String::cast(this)->SlowEquals(String::cast(other));
3478 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3479 if (one.is_identical_to(two)) return true;
3480 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3481 one->IsSymbol() || two->IsSymbol()) {
3484 return String::SlowEquals(Handle<String>::cast(one),
3485 Handle<String>::cast(two));
3489 ACCESSORS(Symbol, name, Object, kNameOffset)
3490 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3491 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3494 bool String::Equals(String* other) {
3495 if (other == this) return true;
3496 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3499 return SlowEquals(other);
3503 bool String::Equals(Handle<String> one, Handle<String> two) {
3504 if (one.is_identical_to(two)) return true;
3505 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3508 return SlowEquals(one, two);
3512 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3513 if (!string->IsConsString()) return string;
3514 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3515 if (cons->IsFlat()) return handle(cons->first());
3516 return SlowFlatten(cons, pretenure);
3520 uint16_t String::Get(int index) {
3521 DCHECK(index >= 0 && index < length());
3522 switch (StringShape(this).full_representation_tag()) {
3523 case kSeqStringTag | kOneByteStringTag:
3524 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3525 case kSeqStringTag | kTwoByteStringTag:
3526 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3527 case kConsStringTag | kOneByteStringTag:
3528 case kConsStringTag | kTwoByteStringTag:
3529 return ConsString::cast(this)->ConsStringGet(index);
3530 case kExternalStringTag | kOneByteStringTag:
3531 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
3532 case kExternalStringTag | kTwoByteStringTag:
3533 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3534 case kSlicedStringTag | kOneByteStringTag:
3535 case kSlicedStringTag | kTwoByteStringTag:
3536 return SlicedString::cast(this)->SlicedStringGet(index);
3546 void String::Set(int index, uint16_t value) {
3547 DCHECK(index >= 0 && index < length());
3548 DCHECK(StringShape(this).IsSequential());
3550 return this->IsOneByteRepresentation()
3551 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3552 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3556 bool String::IsFlat() {
3557 if (!StringShape(this).IsCons()) return true;
3558 return ConsString::cast(this)->second()->length() == 0;
3562 String* String::GetUnderlying() {
3563 // Giving direct access to underlying string only makes sense if the
3564 // wrapping string is already flattened.
3565 DCHECK(this->IsFlat());
3566 DCHECK(StringShape(this).IsIndirect());
3567 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3568 const int kUnderlyingOffset = SlicedString::kParentOffset;
3569 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3573 template<class Visitor>
3574 ConsString* String::VisitFlat(Visitor* visitor,
3577 int slice_offset = offset;
3578 const int length = string->length();
3579 DCHECK(offset <= length);
3581 int32_t type = string->map()->instance_type();
3582 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3583 case kSeqStringTag | kOneByteStringTag:
3584 visitor->VisitOneByteString(
3585 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3589 case kSeqStringTag | kTwoByteStringTag:
3590 visitor->VisitTwoByteString(
3591 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3595 case kExternalStringTag | kOneByteStringTag:
3596 visitor->VisitOneByteString(
3597 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3601 case kExternalStringTag | kTwoByteStringTag:
3602 visitor->VisitTwoByteString(
3603 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3607 case kSlicedStringTag | kOneByteStringTag:
3608 case kSlicedStringTag | kTwoByteStringTag: {
3609 SlicedString* slicedString = SlicedString::cast(string);
3610 slice_offset += slicedString->offset();
3611 string = slicedString->parent();
3615 case kConsStringTag | kOneByteStringTag:
3616 case kConsStringTag | kTwoByteStringTag:
3617 return ConsString::cast(string);
3627 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3628 DCHECK(index >= 0 && index < length());
3629 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3633 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3634 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3635 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3636 static_cast<byte>(value));
3640 Address SeqOneByteString::GetCharsAddress() {
3641 return FIELD_ADDR(this, kHeaderSize);
3645 uint8_t* SeqOneByteString::GetChars() {
3646 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3650 Address SeqTwoByteString::GetCharsAddress() {
3651 return FIELD_ADDR(this, kHeaderSize);
3655 uc16* SeqTwoByteString::GetChars() {
3656 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3660 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3661 DCHECK(index >= 0 && index < length());
3662 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3666 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3667 DCHECK(index >= 0 && index < length());
3668 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3672 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3673 return SizeFor(length());
3677 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3678 return SizeFor(length());
3682 String* SlicedString::parent() {
3683 return String::cast(READ_FIELD(this, kParentOffset));
3687 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3688 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3689 WRITE_FIELD(this, kParentOffset, parent);
3690 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3694 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3697 String* ConsString::first() {
3698 return String::cast(READ_FIELD(this, kFirstOffset));
3702 Object* ConsString::unchecked_first() {
3703 return READ_FIELD(this, kFirstOffset);
3707 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3708 WRITE_FIELD(this, kFirstOffset, value);
3709 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3713 String* ConsString::second() {
3714 return String::cast(READ_FIELD(this, kSecondOffset));
3718 Object* ConsString::unchecked_second() {
3719 return READ_FIELD(this, kSecondOffset);
3723 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3724 WRITE_FIELD(this, kSecondOffset, value);
3725 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3729 bool ExternalString::is_short() {
3730 InstanceType type = map()->instance_type();
3731 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3735 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3736 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3740 void ExternalAsciiString::update_data_cache() {
3741 if (is_short()) return;
3742 const char** data_field =
3743 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3744 *data_field = resource()->data();
3748 void ExternalAsciiString::set_resource(
3749 const ExternalAsciiString::Resource* resource) {
3750 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3751 *reinterpret_cast<const Resource**>(
3752 FIELD_ADDR(this, kResourceOffset)) = resource;
3753 if (resource != NULL) update_data_cache();
3757 const uint8_t* ExternalAsciiString::GetChars() {
3758 return reinterpret_cast<const uint8_t*>(resource()->data());
3762 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3763 DCHECK(index >= 0 && index < length());
3764 return GetChars()[index];
3768 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3769 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3773 void ExternalTwoByteString::update_data_cache() {
3774 if (is_short()) return;
3775 const uint16_t** data_field =
3776 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3777 *data_field = resource()->data();
3781 void ExternalTwoByteString::set_resource(
3782 const ExternalTwoByteString::Resource* resource) {
3783 *reinterpret_cast<const Resource**>(
3784 FIELD_ADDR(this, kResourceOffset)) = resource;
3785 if (resource != NULL) update_data_cache();
3789 const uint16_t* ExternalTwoByteString::GetChars() {
3790 return resource()->data();
3794 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3795 DCHECK(index >= 0 && index < length());
3796 return GetChars()[index];
3800 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3802 return GetChars() + start;
3806 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3807 return depth & kDepthMask;
3811 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3812 frames_[depth_++ & kDepthMask] = string;
3816 void ConsStringIteratorOp::PushRight(ConsString* string) {
3818 frames_[(depth_-1) & kDepthMask] = string;
3822 void ConsStringIteratorOp::AdjustMaximumDepth() {
3823 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3827 void ConsStringIteratorOp::Pop() {
3829 DCHECK(depth_ <= maximum_depth_);
3834 uint16_t StringCharacterStream::GetNext() {
3835 DCHECK(buffer8_ != NULL && end_ != NULL);
3836 // Advance cursor if needed.
3837 if (buffer8_ == end_) HasMore();
3838 DCHECK(buffer8_ < end_);
3839 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3843 StringCharacterStream::StringCharacterStream(String* string,
3844 ConsStringIteratorOp* op,
3846 : is_one_byte_(false),
3848 Reset(string, offset);
3852 void StringCharacterStream::Reset(String* string, int offset) {
3855 ConsString* cons_string = String::VisitFlat(this, string, offset);
3856 op_->Reset(cons_string, offset);
3857 if (cons_string != NULL) {
3858 string = op_->Next(&offset);
3859 if (string != NULL) String::VisitFlat(this, string, offset);
3864 bool StringCharacterStream::HasMore() {
3865 if (buffer8_ != end_) return true;
3867 String* string = op_->Next(&offset);
3868 DCHECK_EQ(offset, 0);
3869 if (string == NULL) return false;
3870 String::VisitFlat(this, string);
3871 DCHECK(buffer8_ != end_);
3876 void StringCharacterStream::VisitOneByteString(
3877 const uint8_t* chars, int length) {
3878 is_one_byte_ = true;
3880 end_ = chars + length;
3884 void StringCharacterStream::VisitTwoByteString(
3885 const uint16_t* chars, int length) {
3886 is_one_byte_ = false;
3888 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3892 void JSFunctionResultCache::MakeZeroSize() {
3893 set_finger_index(kEntriesIndex);
3894 set_size(kEntriesIndex);
3898 void JSFunctionResultCache::Clear() {
3899 int cache_size = size();
3900 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3901 MemsetPointer(entries_start,
3902 GetHeap()->the_hole_value(),
3903 cache_size - kEntriesIndex);
3908 int JSFunctionResultCache::size() {
3909 return Smi::cast(get(kCacheSizeIndex))->value();
3913 void JSFunctionResultCache::set_size(int size) {
3914 set(kCacheSizeIndex, Smi::FromInt(size));
3918 int JSFunctionResultCache::finger_index() {
3919 return Smi::cast(get(kFingerIndex))->value();
3923 void JSFunctionResultCache::set_finger_index(int finger_index) {
3924 set(kFingerIndex, Smi::FromInt(finger_index));
3928 byte ByteArray::get(int index) {
3929 DCHECK(index >= 0 && index < this->length());
3930 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3934 void ByteArray::set(int index, byte value) {
3935 DCHECK(index >= 0 && index < this->length());
3936 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3940 int ByteArray::get_int(int index) {
3941 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3942 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3946 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3947 DCHECK_TAG_ALIGNED(address);
3948 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3952 Address ByteArray::GetDataStartAddress() {
3953 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3957 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3958 return reinterpret_cast<uint8_t*>(external_pointer());
3962 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3963 DCHECK((index >= 0) && (index < this->length()));
3964 uint8_t* ptr = external_uint8_clamped_pointer();
3969 Handle<Object> ExternalUint8ClampedArray::get(
3970 Handle<ExternalUint8ClampedArray> array,
3972 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3973 array->GetIsolate());
3977 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3978 DCHECK((index >= 0) && (index < this->length()));
3979 uint8_t* ptr = external_uint8_clamped_pointer();
3984 void* ExternalArray::external_pointer() const {
3985 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3986 return reinterpret_cast<void*>(ptr);
3990 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3991 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3992 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3996 int8_t ExternalInt8Array::get_scalar(int index) {
3997 DCHECK((index >= 0) && (index < this->length()));
3998 int8_t* ptr = static_cast<int8_t*>(external_pointer());
4003 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
4005 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4006 array->GetIsolate());
4010 void ExternalInt8Array::set(int index, int8_t value) {
4011 DCHECK((index >= 0) && (index < this->length()));
4012 int8_t* ptr = static_cast<int8_t*>(external_pointer());
4017 uint8_t ExternalUint8Array::get_scalar(int index) {
4018 DCHECK((index >= 0) && (index < this->length()));
4019 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4024 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
4026 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4027 array->GetIsolate());
4031 void ExternalUint8Array::set(int index, uint8_t value) {
4032 DCHECK((index >= 0) && (index < this->length()));
4033 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4038 int16_t ExternalInt16Array::get_scalar(int index) {
4039 DCHECK((index >= 0) && (index < this->length()));
4040 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4045 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
4047 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4048 array->GetIsolate());
4052 void ExternalInt16Array::set(int index, int16_t value) {
4053 DCHECK((index >= 0) && (index < this->length()));
4054 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4059 uint16_t ExternalUint16Array::get_scalar(int index) {
4060 DCHECK((index >= 0) && (index < this->length()));
4061 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4066 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
4068 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4069 array->GetIsolate());
4073 void ExternalUint16Array::set(int index, uint16_t value) {
4074 DCHECK((index >= 0) && (index < this->length()));
4075 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4080 int32_t ExternalInt32Array::get_scalar(int index) {
4081 DCHECK((index >= 0) && (index < this->length()));
4082 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4087 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
4089 return array->GetIsolate()->factory()->
4090 NewNumberFromInt(array->get_scalar(index));
4094 void ExternalInt32Array::set(int index, int32_t value) {
4095 DCHECK((index >= 0) && (index < this->length()));
4096 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4101 uint32_t ExternalUint32Array::get_scalar(int index) {
4102 DCHECK((index >= 0) && (index < this->length()));
4103 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4108 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
4110 return array->GetIsolate()->factory()->
4111 NewNumberFromUint(array->get_scalar(index));
4115 void ExternalUint32Array::set(int index, uint32_t value) {
4116 DCHECK((index >= 0) && (index < this->length()));
4117 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4122 float ExternalFloat32Array::get_scalar(int index) {
4123 DCHECK((index >= 0) && (index < this->length()));
4124 float* ptr = static_cast<float*>(external_pointer());
4129 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
4131 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4135 void ExternalFloat32Array::set(int index, float value) {
4136 DCHECK((index >= 0) && (index < this->length()));
4137 float* ptr = static_cast<float*>(external_pointer());
4142 float32x4_value_t ExternalFloat32x4Array::get_scalar(int index) {
4143 DCHECK((index >= 0) && (index < this->length()));
4144 float* ptr = static_cast<float*>(external_pointer());
4145 float32x4_value_t value;
4146 value.storage[0] = ptr[index * 4 + 0];
4147 value.storage[1] = ptr[index * 4 + 1];
4148 value.storage[2] = ptr[index * 4 + 2];
4149 value.storage[3] = ptr[index * 4 + 3];
4154 Handle<Object> ExternalFloat32x4Array::get(Handle<ExternalFloat32x4Array> array,
4156 float32x4_value_t value = array->get_scalar(index);
4157 return array->GetIsolate()->factory()->NewFloat32x4(value);
4161 void ExternalFloat32x4Array::set(int index, const float32x4_value_t& value) {
4162 DCHECK((index >= 0) && (index < this->length()));
4163 float* ptr = static_cast<float*>(external_pointer());
4164 ptr[index * 4 + 0] = value.storage[0];
4165 ptr[index * 4 + 1] = value.storage[1];
4166 ptr[index * 4 + 2] = value.storage[2];
4167 ptr[index * 4 + 3] = value.storage[3];
4171 float64x2_value_t ExternalFloat64x2Array::get_scalar(int index) {
4172 DCHECK((index >= 0) && (index < this->length()));
4173 double* ptr = static_cast<double*>(external_pointer());
4174 float64x2_value_t value;
4175 value.storage[0] = ptr[index * 2 + 0];
4176 value.storage[1] = ptr[index * 2 + 1];
4181 Handle<Object> ExternalFloat64x2Array::get(Handle<ExternalFloat64x2Array> array,
4183 float64x2_value_t value = array->get_scalar(index);
4184 return array->GetIsolate()->factory()->NewFloat64x2(value);
4188 void ExternalFloat64x2Array::set(int index, const float64x2_value_t& value) {
4189 DCHECK((index >= 0) && (index < this->length()));
4190 double* ptr = static_cast<double*>(external_pointer());
4191 ptr[index * 2 + 0] = value.storage[0];
4192 ptr[index * 2 + 1] = value.storage[1];
4196 int32x4_value_t ExternalInt32x4Array::get_scalar(int index) {
4197 DCHECK((index >= 0) && (index < this->length()));
4198 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4199 int32x4_value_t value;
4200 value.storage[0] = ptr[index * 4 + 0];
4201 value.storage[1] = ptr[index * 4 + 1];
4202 value.storage[2] = ptr[index * 4 + 2];
4203 value.storage[3] = ptr[index * 4 + 3];
4208 Handle<Object> ExternalInt32x4Array::get(Handle<ExternalInt32x4Array> array,
4210 int32x4_value_t value = array->get_scalar(index);
4211 return array->GetIsolate()->factory()->NewInt32x4(value);
4215 void ExternalInt32x4Array::set(int index, const int32x4_value_t& value) {
4216 DCHECK((index >= 0) && (index < this->length()));
4217 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4218 ptr[index * 4 + 0] = value.storage[0];
4219 ptr[index * 4 + 1] = value.storage[1];
4220 ptr[index * 4 + 2] = value.storage[2];
4221 ptr[index * 4 + 3] = value.storage[3];
4225 double ExternalFloat64Array::get_scalar(int index) {
4226 DCHECK((index >= 0) && (index < this->length()));
4227 double* ptr = static_cast<double*>(external_pointer());
4232 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
4234 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4238 void ExternalFloat64Array::set(int index, double value) {
4239 DCHECK((index >= 0) && (index < this->length()));
4240 double* ptr = static_cast<double*>(external_pointer());
4245 void* FixedTypedArrayBase::DataPtr() {
4246 return FIELD_ADDR(this, kDataOffset);
4250 int FixedTypedArrayBase::DataSize(InstanceType type) {
4253 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4254 case FIXED_##TYPE##_ARRAY_TYPE: \
4255 element_size = size; \
4258 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4259 #undef TYPED_ARRAY_CASE
4264 return length() * element_size;
4268 int FixedTypedArrayBase::DataSize() {
4269 return DataSize(map()->instance_type());
4273 int FixedTypedArrayBase::size() {
4274 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4278 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4279 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4283 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4286 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4289 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4292 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4295 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4298 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4301 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4304 float Float32ArrayTraits::defaultValue() {
4305 return static_cast<float>(base::OS::nan_value());
4309 double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); }
4312 template <class Traits>
4313 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4314 DCHECK((index >= 0) && (index < this->length()));
4315 ElementType* ptr = reinterpret_cast<ElementType*>(
4316 FIELD_ADDR(this, kDataOffset));
4322 FixedTypedArray<Float64ArrayTraits>::ElementType
4323 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4324 DCHECK((index >= 0) && (index < this->length()));
4325 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4329 template <class Traits>
4330 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4331 DCHECK((index >= 0) && (index < this->length()));
4332 ElementType* ptr = reinterpret_cast<ElementType*>(
4333 FIELD_ADDR(this, kDataOffset));
4339 void FixedTypedArray<Float64ArrayTraits>::set(
4340 int index, Float64ArrayTraits::ElementType value) {
4341 DCHECK((index >= 0) && (index < this->length()));
4342 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4346 template <class Traits>
4347 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4348 return static_cast<ElementType>(value);
4353 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4354 if (value < 0) return 0;
4355 if (value > 0xFF) return 0xFF;
4356 return static_cast<uint8_t>(value);
4360 template <class Traits>
4361 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4363 return static_cast<ElementType>(DoubleToInt32(value));
4368 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4369 if (value < 0) return 0;
4370 if (value > 0xFF) return 0xFF;
4371 return static_cast<uint8_t>(lrint(value));
4376 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4377 return static_cast<float>(value);
4382 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4387 template <class Traits>
4388 Handle<Object> FixedTypedArray<Traits>::get(
4389 Handle<FixedTypedArray<Traits> > array,
4391 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4395 template <class Traits>
4396 Handle<Object> FixedTypedArray<Traits>::SetValue(
4397 Handle<FixedTypedArray<Traits> > array,
4399 Handle<Object> value) {
4400 ElementType cast_value = Traits::defaultValue();
4401 if (index < static_cast<uint32_t>(array->length())) {
4402 if (value->IsSmi()) {
4403 int int_value = Handle<Smi>::cast(value)->value();
4404 cast_value = from_int(int_value);
4405 } else if (value->IsHeapNumber()) {
4406 double double_value = Handle<HeapNumber>::cast(value)->value();
4407 cast_value = from_double(double_value);
4409 // Clamp undefined to the default value. All other types have been
4410 // converted to a number type further up in the call chain.
4411 DCHECK(value->IsUndefined());
4413 array->set(index, cast_value);
4415 return Traits::ToHandle(array->GetIsolate(), cast_value);
4419 Handle<Object> FixedTypedArray<Float32x4ArrayTraits>::SetValue(
4420 Handle<FixedTypedArray<Float32x4ArrayTraits> > array,
4421 uint32_t index, Handle<Object> value) {
4422 float32x4_value_t cast_value;
4423 cast_value.storage[0] = static_cast<float>(base::OS::nan_value());
4424 cast_value.storage[1] = static_cast<float>(base::OS::nan_value());
4425 cast_value.storage[2] = static_cast<float>(base::OS::nan_value());
4426 cast_value.storage[3] = static_cast<float>(base::OS::nan_value());
4427 if (index < static_cast<uint32_t>(array->length())) {
4428 if (value->IsFloat32x4()) {
4429 cast_value = Handle<Float32x4>::cast(value)->get();
4431 // Clamp undefined to NaN (default). All other types have been
4432 // converted to a number type further up in the call chain.
4433 DCHECK(value->IsUndefined());
4435 array->set(index, cast_value);
4437 return Float32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4442 Handle<Object> FixedTypedArray<Float64x2ArrayTraits>::SetValue(
4443 Handle<FixedTypedArray<Float64x2ArrayTraits> > array,
4444 uint32_t index, Handle<Object> value) {
4445 float64x2_value_t cast_value;
4446 cast_value.storage[0] = base::OS::nan_value();
4447 cast_value.storage[1] = base::OS::nan_value();
4448 if (index < static_cast<uint32_t>(array->length())) {
4449 if (value->IsFloat64x2()) {
4450 cast_value = Handle<Float64x2>::cast(value)->get();
4452 // Clamp undefined to NaN (default). All other types have been
4453 // converted to a number type further up in the call chain.
4454 DCHECK(value->IsUndefined());
4456 array->set(index, cast_value);
4458 return Float64x2ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4463 Handle<Object> FixedTypedArray<Int32x4ArrayTraits>::SetValue(
4464 Handle<FixedTypedArray<Int32x4ArrayTraits> > array,
4465 uint32_t index, Handle<Object> value) {
4466 int32x4_value_t cast_value;
4467 cast_value.storage[0] = 0;
4468 cast_value.storage[1] = 0;
4469 cast_value.storage[2] = 0;
4470 cast_value.storage[3] = 0;
4471 if (index < static_cast<uint32_t>(array->length())) {
4472 if (value->IsInt32x4()) {
4473 cast_value = Handle<Int32x4>::cast(value)->get();
4475 // Clamp undefined to zero (default). All other types have been
4476 // converted to a number type further up in the call chain.
4477 DCHECK(value->IsUndefined());
4479 array->set(index, cast_value);
4481 return Int32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4485 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4486 return handle(Smi::FromInt(scalar), isolate);
4490 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4492 return handle(Smi::FromInt(scalar), isolate);
4496 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4497 return handle(Smi::FromInt(scalar), isolate);
4501 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4502 return handle(Smi::FromInt(scalar), isolate);
4506 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4507 return handle(Smi::FromInt(scalar), isolate);
4511 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4512 return isolate->factory()->NewNumberFromUint(scalar);
4516 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4517 return isolate->factory()->NewNumberFromInt(scalar);
4521 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4522 return isolate->factory()->NewNumber(scalar);
4526 Handle<Object> Int32x4ArrayTraits::ToHandle(
4527 Isolate* isolate, int32x4_value_t scalar) {
4528 return isolate->factory()->NewInt32x4(scalar);
4532 Handle<Object> Float32x4ArrayTraits::ToHandle(
4533 Isolate* isolate, float32x4_value_t scalar) {
4534 return isolate->factory()->NewFloat32x4(scalar);
4538 Handle<Object> Float64x2ArrayTraits::ToHandle(
4539 Isolate* isolate, float64x2_value_t scalar) {
4540 return isolate->factory()->NewFloat64x2(scalar);
4544 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4545 return isolate->factory()->NewNumber(scalar);
4549 int Map::visitor_id() {
4550 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4554 void Map::set_visitor_id(int id) {
4555 DCHECK(0 <= id && id < 256);
4556 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4560 int Map::instance_size() {
4561 return NOBARRIER_READ_BYTE_FIELD(
4562 this, kInstanceSizeOffset) << kPointerSizeLog2;
4566 int Map::inobject_properties() {
4567 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4571 int Map::pre_allocated_property_fields() {
4572 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4576 int Map::GetInObjectPropertyOffset(int index) {
4577 // Adjust for the number of properties stored in the object.
4578 index -= inobject_properties();
4580 return instance_size() + (index * kPointerSize);
4584 int HeapObject::SizeFromMap(Map* map) {
4585 int instance_size = map->instance_size();
4586 if (instance_size != kVariableSizeSentinel) return instance_size;
4587 // Only inline the most frequent cases.
4588 InstanceType instance_type = map->instance_type();
4589 if (instance_type == FIXED_ARRAY_TYPE) {
4590 return FixedArray::BodyDescriptor::SizeOf(map, this);
4592 if (instance_type == ASCII_STRING_TYPE ||
4593 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
4594 return SeqOneByteString::SizeFor(
4595 reinterpret_cast<SeqOneByteString*>(this)->length());
4597 if (instance_type == BYTE_ARRAY_TYPE) {
4598 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4600 if (instance_type == FREE_SPACE_TYPE) {
4601 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4603 if (instance_type == STRING_TYPE ||
4604 instance_type == INTERNALIZED_STRING_TYPE) {
4605 return SeqTwoByteString::SizeFor(
4606 reinterpret_cast<SeqTwoByteString*>(this)->length());
4608 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4609 return FixedDoubleArray::SizeFor(
4610 reinterpret_cast<FixedDoubleArray*>(this)->length());
4612 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4613 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4615 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4616 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4617 return reinterpret_cast<FixedTypedArrayBase*>(
4618 this)->TypedArraySize(instance_type);
4620 DCHECK(instance_type == CODE_TYPE);
4621 return reinterpret_cast<Code*>(this)->CodeSize();
4625 void Map::set_instance_size(int value) {
4626 DCHECK_EQ(0, value & (kPointerSize - 1));
4627 value >>= kPointerSizeLog2;
4628 DCHECK(0 <= value && value < 256);
4629 NOBARRIER_WRITE_BYTE_FIELD(
4630 this, kInstanceSizeOffset, static_cast<byte>(value));
4634 void Map::set_inobject_properties(int value) {
4635 DCHECK(0 <= value && value < 256);
4636 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4640 void Map::set_pre_allocated_property_fields(int value) {
4641 DCHECK(0 <= value && value < 256);
4642 WRITE_BYTE_FIELD(this,
4643 kPreAllocatedPropertyFieldsOffset,
4644 static_cast<byte>(value));
4648 InstanceType Map::instance_type() {
4649 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4653 void Map::set_instance_type(InstanceType value) {
4654 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4658 int Map::unused_property_fields() {
4659 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4663 void Map::set_unused_property_fields(int value) {
4664 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4668 byte Map::bit_field() {
4669 return READ_BYTE_FIELD(this, kBitFieldOffset);
4673 void Map::set_bit_field(byte value) {
4674 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4678 byte Map::bit_field2() {
4679 return READ_BYTE_FIELD(this, kBitField2Offset);
4683 void Map::set_bit_field2(byte value) {
4684 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4688 void Map::set_non_instance_prototype(bool value) {
4690 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4692 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4697 bool Map::has_non_instance_prototype() {
4698 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4702 void Map::set_function_with_prototype(bool value) {
4703 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4707 bool Map::function_with_prototype() {
4708 return FunctionWithPrototype::decode(bit_field());
4712 void Map::set_is_access_check_needed(bool access_check_needed) {
4713 if (access_check_needed) {
4714 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4716 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4721 bool Map::is_access_check_needed() {
4722 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4726 void Map::set_is_extensible(bool value) {
4728 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4730 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4734 bool Map::is_extensible() {
4735 return ((1 << kIsExtensible) & bit_field2()) != 0;
4739 void Map::set_is_prototype_map(bool value) {
4740 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4743 bool Map::is_prototype_map() {
4744 return IsPrototypeMapBits::decode(bit_field2());
4748 void Map::set_dictionary_map(bool value) {
4749 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4750 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4751 set_bit_field3(new_bit_field3);
4755 bool Map::is_dictionary_map() {
4756 return DictionaryMap::decode(bit_field3());
4760 Code::Flags Code::flags() {
4761 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4765 void Map::set_owns_descriptors(bool owns_descriptors) {
4766 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4770 bool Map::owns_descriptors() {
4771 return OwnsDescriptors::decode(bit_field3());
4775 void Map::set_has_instance_call_handler() {
4776 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4780 bool Map::has_instance_call_handler() {
4781 return HasInstanceCallHandler::decode(bit_field3());
4785 void Map::deprecate() {
4786 set_bit_field3(Deprecated::update(bit_field3(), true));
4790 bool Map::is_deprecated() {
4791 return Deprecated::decode(bit_field3());
4795 void Map::set_migration_target(bool value) {
4796 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4800 bool Map::is_migration_target() {
4801 return IsMigrationTarget::decode(bit_field3());
4805 void Map::set_done_inobject_slack_tracking(bool value) {
4806 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4810 bool Map::done_inobject_slack_tracking() {
4811 return DoneInobjectSlackTracking::decode(bit_field3());
4815 void Map::set_construction_count(int value) {
4816 set_bit_field3(ConstructionCount::update(bit_field3(), value));
4820 int Map::construction_count() {
4821 return ConstructionCount::decode(bit_field3());
4825 void Map::freeze() {
4826 set_bit_field3(IsFrozen::update(bit_field3(), true));
4830 bool Map::is_frozen() {
4831 return IsFrozen::decode(bit_field3());
4835 void Map::mark_unstable() {
4836 set_bit_field3(IsUnstable::update(bit_field3(), true));
4840 bool Map::is_stable() {
4841 return !IsUnstable::decode(bit_field3());
4845 bool Map::has_code_cache() {
4846 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4850 bool Map::CanBeDeprecated() {
4851 int descriptor = LastAdded();
4852 for (int i = 0; i <= descriptor; i++) {
4853 PropertyDetails details = instance_descriptors()->GetDetails(i);
4854 if (details.representation().IsNone()) return true;
4855 if (details.representation().IsSmi()) return true;
4856 if (details.representation().IsDouble()) return true;
4857 if (details.representation().IsHeapObject()) return true;
4858 if (details.type() == CONSTANT) return true;
4864 void Map::NotifyLeafMapLayoutChange() {
4867 dependent_code()->DeoptimizeDependentCodeGroup(
4869 DependentCode::kPrototypeCheckGroup);
4874 bool Map::CanOmitMapChecks() {
4875 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4879 int DependentCode::number_of_entries(DependencyGroup group) {
4880 if (length() == 0) return 0;
4881 return Smi::cast(get(group))->value();
4885 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4886 set(group, Smi::FromInt(value));
4890 bool DependentCode::is_code_at(int i) {
4891 return get(kCodesStartIndex + i)->IsCode();
4894 Code* DependentCode::code_at(int i) {
4895 return Code::cast(get(kCodesStartIndex + i));
4899 CompilationInfo* DependentCode::compilation_info_at(int i) {
4900 return reinterpret_cast<CompilationInfo*>(
4901 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4905 void DependentCode::set_object_at(int i, Object* object) {
4906 set(kCodesStartIndex + i, object);
4910 Object* DependentCode::object_at(int i) {
4911 return get(kCodesStartIndex + i);
4915 Object** DependentCode::slot_at(int i) {
4916 return RawFieldOfElementAt(kCodesStartIndex + i);
4920 void DependentCode::clear_at(int i) {
4921 set_undefined(kCodesStartIndex + i);
4925 void DependentCode::copy(int from, int to) {
4926 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4930 void DependentCode::ExtendGroup(DependencyGroup group) {
4931 GroupStartIndexes starts(this);
4932 for (int g = kGroupCount - 1; g > group; g--) {
4933 if (starts.at(g) < starts.at(g + 1)) {
4934 copy(starts.at(g), starts.at(g + 1));
4940 void Code::set_flags(Code::Flags flags) {
4941 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4942 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4946 Code::Kind Code::kind() {
4947 return ExtractKindFromFlags(flags());
4951 bool Code::IsCodeStubOrIC() {
4952 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4953 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4954 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4955 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4956 kind() == TO_BOOLEAN_IC;
4960 InlineCacheState Code::ic_state() {
4961 InlineCacheState result = ExtractICStateFromFlags(flags());
4962 // Only allow uninitialized or debugger states for non-IC code
4963 // objects. This is used in the debugger to determine whether or not
4964 // a call to code object has been replaced with a debug break call.
4965 DCHECK(is_inline_cache_stub() ||
4966 result == UNINITIALIZED ||
4967 result == DEBUG_STUB);
4972 ExtraICState Code::extra_ic_state() {
4973 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4974 return ExtractExtraICStateFromFlags(flags());
4978 Code::StubType Code::type() {
4979 return ExtractTypeFromFlags(flags());
4983 // For initialization.
4984 void Code::set_raw_kind_specific_flags1(int value) {
4985 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4989 void Code::set_raw_kind_specific_flags2(int value) {
4990 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4994 inline bool Code::is_crankshafted() {
4995 return IsCrankshaftedField::decode(
4996 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5000 inline bool Code::is_hydrogen_stub() {
5001 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
5005 inline void Code::set_is_crankshafted(bool value) {
5006 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5007 int updated = IsCrankshaftedField::update(previous, value);
5008 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5012 inline bool Code::is_turbofanned() {
5013 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
5014 return IsTurbofannedField::decode(
5015 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5019 inline void Code::set_is_turbofanned(bool value) {
5020 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
5021 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5022 int updated = IsTurbofannedField::update(previous, value);
5023 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5027 bool Code::optimizable() {
5028 DCHECK_EQ(FUNCTION, kind());
5029 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
5033 void Code::set_optimizable(bool value) {
5034 DCHECK_EQ(FUNCTION, kind());
5035 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
5039 bool Code::has_deoptimization_support() {
5040 DCHECK_EQ(FUNCTION, kind());
5041 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5042 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
5046 void Code::set_has_deoptimization_support(bool value) {
5047 DCHECK_EQ(FUNCTION, kind());
5048 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5049 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
5050 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5054 bool Code::has_debug_break_slots() {
5055 DCHECK_EQ(FUNCTION, kind());
5056 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5057 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
5061 void Code::set_has_debug_break_slots(bool value) {
5062 DCHECK_EQ(FUNCTION, kind());
5063 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5064 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
5065 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5069 bool Code::is_compiled_optimizable() {
5070 DCHECK_EQ(FUNCTION, kind());
5071 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5072 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
5076 void Code::set_compiled_optimizable(bool value) {
5077 DCHECK_EQ(FUNCTION, kind());
5078 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5079 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
5080 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5084 int Code::allow_osr_at_loop_nesting_level() {
5085 DCHECK_EQ(FUNCTION, kind());
5086 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5087 return AllowOSRAtLoopNestingLevelField::decode(fields);
5091 void Code::set_allow_osr_at_loop_nesting_level(int level) {
5092 DCHECK_EQ(FUNCTION, kind());
5093 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
5094 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5095 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5096 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5100 int Code::profiler_ticks() {
5101 DCHECK_EQ(FUNCTION, kind());
5102 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
5106 void Code::set_profiler_ticks(int ticks) {
5107 DCHECK_EQ(FUNCTION, kind());
5108 DCHECK(ticks < 256);
5109 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
5113 int Code::builtin_index() {
5114 DCHECK_EQ(BUILTIN, kind());
5115 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
5119 void Code::set_builtin_index(int index) {
5120 DCHECK_EQ(BUILTIN, kind());
5121 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
5125 unsigned Code::stack_slots() {
5126 DCHECK(is_crankshafted());
5127 return StackSlotsField::decode(
5128 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5132 void Code::set_stack_slots(unsigned slots) {
5133 CHECK(slots <= (1 << kStackSlotsBitCount));
5134 DCHECK(is_crankshafted());
5135 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5136 int updated = StackSlotsField::update(previous, slots);
5137 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5141 unsigned Code::safepoint_table_offset() {
5142 DCHECK(is_crankshafted());
5143 return SafepointTableOffsetField::decode(
5144 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5148 void Code::set_safepoint_table_offset(unsigned offset) {
5149 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5150 DCHECK(is_crankshafted());
5151 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5152 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5153 int updated = SafepointTableOffsetField::update(previous, offset);
5154 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5158 unsigned Code::back_edge_table_offset() {
5159 DCHECK_EQ(FUNCTION, kind());
5160 return BackEdgeTableOffsetField::decode(
5161 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5165 void Code::set_back_edge_table_offset(unsigned offset) {
5166 DCHECK_EQ(FUNCTION, kind());
5167 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5168 offset = offset >> kPointerSizeLog2;
5169 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5170 int updated = BackEdgeTableOffsetField::update(previous, offset);
5171 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5175 bool Code::back_edges_patched_for_osr() {
5176 DCHECK_EQ(FUNCTION, kind());
5177 return allow_osr_at_loop_nesting_level() > 0;
5181 byte Code::to_boolean_state() {
5182 return extra_ic_state();
5186 bool Code::has_function_cache() {
5187 DCHECK(kind() == STUB);
5188 return HasFunctionCacheField::decode(
5189 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5193 void Code::set_has_function_cache(bool flag) {
5194 DCHECK(kind() == STUB);
5195 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5196 int updated = HasFunctionCacheField::update(previous, flag);
5197 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5201 bool Code::marked_for_deoptimization() {
5202 DCHECK(kind() == OPTIMIZED_FUNCTION);
5203 return MarkedForDeoptimizationField::decode(
5204 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5208 void Code::set_marked_for_deoptimization(bool flag) {
5209 DCHECK(kind() == OPTIMIZED_FUNCTION);
5210 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5211 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5212 int updated = MarkedForDeoptimizationField::update(previous, flag);
5213 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5217 bool Code::is_weak_stub() {
5218 return CanBeWeakStub() && WeakStubField::decode(
5219 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5223 void Code::mark_as_weak_stub() {
5224 DCHECK(CanBeWeakStub());
5225 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5226 int updated = WeakStubField::update(previous, true);
5227 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5231 bool Code::is_invalidated_weak_stub() {
5232 return is_weak_stub() && InvalidatedWeakStubField::decode(
5233 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5237 void Code::mark_as_invalidated_weak_stub() {
5238 DCHECK(is_inline_cache_stub());
5239 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5240 int updated = InvalidatedWeakStubField::update(previous, true);
5241 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5245 bool Code::is_inline_cache_stub() {
5246 Kind kind = this->kind();
5248 #define CASE(name) case name: return true;
5251 default: return false;
5256 bool Code::is_keyed_stub() {
5257 return is_keyed_load_stub() || is_keyed_store_stub();
5261 bool Code::is_debug_stub() {
5262 return ic_state() == DEBUG_STUB;
5266 ConstantPoolArray* Code::constant_pool() {
5267 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
5271 void Code::set_constant_pool(Object* value) {
5272 DCHECK(value->IsConstantPoolArray());
5273 WRITE_FIELD(this, kConstantPoolOffset, value);
5274 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
5278 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5279 ExtraICState extra_ic_state, StubType type,
5280 CacheHolderFlag holder) {
5281 // Compute the bit mask.
5282 unsigned int bits = KindField::encode(kind)
5283 | ICStateField::encode(ic_state)
5284 | TypeField::encode(type)
5285 | ExtraICStateField::encode(extra_ic_state)
5286 | CacheHolderField::encode(holder);
5287 return static_cast<Flags>(bits);
5291 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5292 ExtraICState extra_ic_state,
5293 CacheHolderFlag holder,
5295 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5299 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5300 CacheHolderFlag holder) {
5301 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5305 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5306 return KindField::decode(flags);
5310 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5311 return ICStateField::decode(flags);
5315 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5316 return ExtraICStateField::decode(flags);
5320 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5321 return TypeField::decode(flags);
5325 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5326 return CacheHolderField::decode(flags);
5330 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5331 int bits = flags & ~TypeField::kMask;
5332 return static_cast<Flags>(bits);
5336 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5337 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5338 return static_cast<Flags>(bits);
5342 Code* Code::GetCodeFromTargetAddress(Address address) {
5343 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5344 // GetCodeFromTargetAddress might be called when marking objects during mark
5345 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5346 // Code::cast. Code::cast does not work when the object's map is
5348 Code* result = reinterpret_cast<Code*>(code);
5353 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5355 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5359 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5360 if (!FLAG_collect_maps) return false;
5361 if (object->IsMap()) {
5362 return Map::cast(object)->CanTransition() &&
5363 FLAG_weak_embedded_maps_in_optimized_code;
5365 if (object->IsJSObject() ||
5366 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
5367 return FLAG_weak_embedded_objects_in_optimized_code;
5373 class Code::FindAndReplacePattern {
5375 FindAndReplacePattern() : count_(0) { }
5376 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5377 DCHECK(count_ < kMaxCount);
5378 find_[count_] = map_to_find;
5379 replace_[count_] = obj_to_replace;
5383 static const int kMaxCount = 4;
5385 Handle<Map> find_[kMaxCount];
5386 Handle<Object> replace_[kMaxCount];
5391 bool Code::IsWeakObjectInIC(Object* object) {
5392 return object->IsMap() && Map::cast(object)->CanTransition() &&
5393 FLAG_collect_maps &&
5394 FLAG_weak_embedded_maps_in_ic;
5398 Object* Map::prototype() const {
5399 return READ_FIELD(this, kPrototypeOffset);
5403 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5404 DCHECK(value->IsNull() || value->IsJSReceiver());
5405 WRITE_FIELD(this, kPrototypeOffset, value);
5406 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5410 // If the descriptor is using the empty transition array, install a new empty
5411 // transition array that will have place for an element transition.
5412 static void EnsureHasTransitionArray(Handle<Map> map) {
5413 Handle<TransitionArray> transitions;
5414 if (!map->HasTransitionArray()) {
5415 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5416 transitions->set_back_pointer_storage(map->GetBackPointer());
5417 } else if (!map->transitions()->IsFullTransitionArray()) {
5418 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5422 map->set_transitions(*transitions);
5426 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5427 int len = descriptors->number_of_descriptors();
5428 set_instance_descriptors(descriptors);
5429 SetNumberOfOwnDescriptors(len);
5433 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5436 void Map::set_bit_field3(uint32_t bits) {
5437 if (kInt32Size != kPointerSize) {
5438 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5440 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5444 uint32_t Map::bit_field3() {
5445 return READ_UINT32_FIELD(this, kBitField3Offset);
5449 void Map::AppendDescriptor(Descriptor* desc) {
5450 DescriptorArray* descriptors = instance_descriptors();
5451 int number_of_own_descriptors = NumberOfOwnDescriptors();
5452 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5453 descriptors->Append(desc);
5454 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5458 Object* Map::GetBackPointer() {
5459 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5460 if (object->IsDescriptorArray()) {
5461 return TransitionArray::cast(object)->back_pointer_storage();
5463 DCHECK(object->IsMap() || object->IsUndefined());
5469 bool Map::HasElementsTransition() {
5470 return HasTransitionArray() && transitions()->HasElementsTransition();
5474 bool Map::HasTransitionArray() const {
5475 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5476 return object->IsTransitionArray();
5480 Map* Map::elements_transition_map() {
5481 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5482 return transitions()->GetTarget(index);
5486 bool Map::CanHaveMoreTransitions() {
5487 if (!HasTransitionArray()) return true;
5488 return FixedArray::SizeFor(transitions()->length() +
5489 TransitionArray::kTransitionSize)
5490 <= Page::kMaxRegularHeapObjectSize;
5494 Map* Map::GetTransition(int transition_index) {
5495 return transitions()->GetTarget(transition_index);
5499 int Map::SearchTransition(Name* name) {
5500 if (HasTransitionArray()) return transitions()->Search(name);
5501 return TransitionArray::kNotFound;
5505 FixedArray* Map::GetPrototypeTransitions() {
5506 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5507 if (!transitions()->HasPrototypeTransitions()) {
5508 return GetHeap()->empty_fixed_array();
5510 return transitions()->GetPrototypeTransitions();
5514 void Map::SetPrototypeTransitions(
5515 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5516 EnsureHasTransitionArray(map);
5517 int old_number_of_transitions = map->NumberOfProtoTransitions();
5519 if (map->HasPrototypeTransitions()) {
5520 DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
5521 map->ZapPrototypeTransitions();
5524 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5525 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5529 bool Map::HasPrototypeTransitions() {
5530 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5534 TransitionArray* Map::transitions() const {
5535 DCHECK(HasTransitionArray());
5536 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5537 return TransitionArray::cast(object);
5541 void Map::set_transitions(TransitionArray* transition_array,
5542 WriteBarrierMode mode) {
5543 // Transition arrays are not shared. When one is replaced, it should not
5544 // keep referenced objects alive, so we zap it.
5545 // When there is another reference to the array somewhere (e.g. a handle),
5546 // not zapping turns from a waste of memory into a source of crashes.
5547 if (HasTransitionArray()) {
5549 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5550 Map* target = transitions()->GetTarget(i);
5551 if (target->instance_descriptors() == instance_descriptors()) {
5552 Name* key = transitions()->GetKey(i);
5553 int new_target_index = transition_array->Search(key);
5554 DCHECK(new_target_index != TransitionArray::kNotFound);
5555 DCHECK(transition_array->GetTarget(new_target_index) == target);
5559 DCHECK(transitions() != transition_array);
5563 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5564 CONDITIONAL_WRITE_BARRIER(
5565 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5569 void Map::init_back_pointer(Object* undefined) {
5570 DCHECK(undefined->IsUndefined());
5571 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5575 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5576 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5577 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5578 (value->IsMap() && GetBackPointer()->IsUndefined()));
5579 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5580 if (object->IsTransitionArray()) {
5581 TransitionArray::cast(object)->set_back_pointer_storage(value);
5583 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5584 CONDITIONAL_WRITE_BARRIER(
5585 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5590 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5591 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5592 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5594 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5595 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5596 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5598 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5599 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5600 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5601 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5603 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5604 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5606 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5607 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5608 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5609 kExpectedReceiverTypeOffset)
5611 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5612 kSerializedDataOffset)
5614 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5617 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5618 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5619 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5621 ACCESSORS(Box, value, Object, kValueOffset)
5623 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5624 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5626 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5627 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5628 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5630 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5631 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5632 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5633 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5634 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5635 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5637 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5638 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5640 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5641 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5642 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5644 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5645 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5646 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5647 kPrototypeTemplateOffset)
5648 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5649 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5650 kNamedPropertyHandlerOffset)
5651 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5652 kIndexedPropertyHandlerOffset)
5653 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5654 kInstanceTemplateOffset)
5655 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5656 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5657 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5658 kInstanceCallHandlerOffset)
5659 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5660 kAccessCheckInfoOffset)
5661 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5663 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5664 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5665 kInternalFieldCountOffset)
5667 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5668 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5670 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5672 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5673 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5674 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5675 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5676 kPretenureCreateCountOffset)
5677 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5678 kDependentCodeOffset)
5679 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5680 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5682 ACCESSORS(Script, source, Object, kSourceOffset)
5683 ACCESSORS(Script, name, Object, kNameOffset)
5684 ACCESSORS(Script, id, Smi, kIdOffset)
5685 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5686 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5687 ACCESSORS(Script, context_data, Object, kContextOffset)
5688 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5689 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5690 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5691 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5692 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5693 kEvalFrominstructionsOffsetOffset)
5694 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5695 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5696 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5697 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5699 Script::CompilationType Script::compilation_type() {
5700 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5701 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5703 void Script::set_compilation_type(CompilationType type) {
5704 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5705 type == COMPILATION_TYPE_EVAL));
5707 Script::CompilationState Script::compilation_state() {
5708 return BooleanBit::get(flags(), kCompilationStateBit) ?
5709 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5711 void Script::set_compilation_state(CompilationState state) {
5712 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5713 state == COMPILATION_STATE_COMPILED));
5717 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5718 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5719 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5720 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5722 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5723 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5724 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5725 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5727 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5728 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5729 kOptimizedCodeMapOffset)
5730 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5731 ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
5732 kFeedbackVectorOffset)
5733 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5734 kInstanceClassNameOffset)
5735 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5736 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5737 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5738 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5741 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5742 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5743 kHiddenPrototypeBit)
5744 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5745 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5746 kNeedsAccessCheckBit)
5747 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5748 kReadOnlyPrototypeBit)
5749 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5750 kRemovePrototypeBit)
5751 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5753 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5755 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5758 BOOL_ACCESSORS(SharedFunctionInfo,
5760 allows_lazy_compilation,
5761 kAllowLazyCompilation)
5762 BOOL_ACCESSORS(SharedFunctionInfo,
5764 allows_lazy_compilation_without_context,
5765 kAllowLazyCompilationWithoutContext)
5766 BOOL_ACCESSORS(SharedFunctionInfo,
5770 BOOL_ACCESSORS(SharedFunctionInfo,
5772 has_duplicate_parameters,
5773 kHasDuplicateParameters)
5776 #if V8_HOST_ARCH_32_BIT
5777 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5778 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5779 kFormalParameterCountOffset)
5780 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5781 kExpectedNofPropertiesOffset)
5782 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5783 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5784 kStartPositionAndTypeOffset)
5785 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5786 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5787 kFunctionTokenPositionOffset)
5788 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5789 kCompilerHintsOffset)
5790 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5791 kOptCountAndBailoutReasonOffset)
5792 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5793 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5794 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5798 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5799 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5800 int holder::name() const { \
5801 int value = READ_INT_FIELD(this, offset); \
5802 DCHECK(kHeapObjectTag == 1); \
5803 DCHECK((value & kHeapObjectTag) == 0); \
5804 return value >> 1; \
5806 void holder::set_##name(int value) { \
5807 DCHECK(kHeapObjectTag == 1); \
5808 DCHECK((value & 0xC0000000) == 0xC0000000 || \
5809 (value & 0xC0000000) == 0x0); \
5810 WRITE_INT_FIELD(this, \
5812 (value << 1) & ~kHeapObjectTag); \
5815 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5816 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5817 INT_ACCESSORS(holder, name, offset)
5820 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5821 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5822 formal_parameter_count,
5823 kFormalParameterCountOffset)
5825 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5826 expected_nof_properties,
5827 kExpectedNofPropertiesOffset)
5828 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5830 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5831 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5832 start_position_and_type,
5833 kStartPositionAndTypeOffset)
5835 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5836 function_token_position,
5837 kFunctionTokenPositionOffset)
5838 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5840 kCompilerHintsOffset)
5842 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5843 opt_count_and_bailout_reason,
5844 kOptCountAndBailoutReasonOffset)
5845 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5847 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5849 kAstNodeCountOffset)
5850 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5852 kProfilerTicksOffset)
5857 BOOL_GETTER(SharedFunctionInfo,
5859 optimization_disabled,
5860 kOptimizationDisabled)
5863 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5864 set_compiler_hints(BooleanBit::set(compiler_hints(),
5865 kOptimizationDisabled,
5867 // If disabling optimizations we reflect that in the code object so
5868 // it will not be counted as optimizable code.
5869 if ((code()->kind() == Code::FUNCTION) && disable) {
5870 code()->set_optimizable(false);
5875 StrictMode SharedFunctionInfo::strict_mode() {
5876 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5881 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5882 // We only allow mode transitions from sloppy to strict.
5883 DCHECK(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5884 int hints = compiler_hints();
5885 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5886 set_compiler_hints(hints);
5890 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5891 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5893 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5894 name_should_print_as_anonymous,
5895 kNameShouldPrintAsAnonymous)
5896 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5897 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5898 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5899 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5900 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5901 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5902 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5904 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5905 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5907 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5909 bool Script::HasValidSource() {
5910 Object* src = this->source();
5911 if (!src->IsString()) return true;
5912 String* src_str = String::cast(src);
5913 if (!StringShape(src_str).IsExternal()) return true;
5914 if (src_str->IsOneByteRepresentation()) {
5915 return ExternalAsciiString::cast(src)->resource() != NULL;
5916 } else if (src_str->IsTwoByteRepresentation()) {
5917 return ExternalTwoByteString::cast(src)->resource() != NULL;
5923 void SharedFunctionInfo::DontAdaptArguments() {
5924 DCHECK(code()->kind() == Code::BUILTIN);
5925 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5929 int SharedFunctionInfo::start_position() const {
5930 return start_position_and_type() >> kStartPositionShift;
5934 void SharedFunctionInfo::set_start_position(int start_position) {
5935 set_start_position_and_type((start_position << kStartPositionShift)
5936 | (start_position_and_type() & ~kStartPositionMask));
5940 Code* SharedFunctionInfo::code() const {
5941 return Code::cast(READ_FIELD(this, kCodeOffset));
5945 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5946 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5947 WRITE_FIELD(this, kCodeOffset, value);
5948 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5952 void SharedFunctionInfo::ReplaceCode(Code* value) {
5953 // If the GC metadata field is already used then the function was
5954 // enqueued as a code flushing candidate and we remove it now.
5955 if (code()->gc_metadata() != NULL) {
5956 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5957 flusher->EvictCandidate(this);
5960 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5966 ScopeInfo* SharedFunctionInfo::scope_info() const {
5967 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5971 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5972 WriteBarrierMode mode) {
5973 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5974 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5977 reinterpret_cast<Object*>(value),
5982 bool SharedFunctionInfo::is_compiled() {
5984 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5988 bool SharedFunctionInfo::IsApiFunction() {
5989 return function_data()->IsFunctionTemplateInfo();
5993 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5994 DCHECK(IsApiFunction());
5995 return FunctionTemplateInfo::cast(function_data());
5999 bool SharedFunctionInfo::HasBuiltinFunctionId() {
6000 return function_data()->IsSmi();
6004 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
6005 DCHECK(HasBuiltinFunctionId());
6006 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
6010 int SharedFunctionInfo::ic_age() {
6011 return ICAgeBits::decode(counters());
6015 void SharedFunctionInfo::set_ic_age(int ic_age) {
6016 set_counters(ICAgeBits::update(counters(), ic_age));
6020 int SharedFunctionInfo::deopt_count() {
6021 return DeoptCountBits::decode(counters());
6025 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6026 set_counters(DeoptCountBits::update(counters(), deopt_count));
6030 void SharedFunctionInfo::increment_deopt_count() {
6031 int value = counters();
6032 int deopt_count = DeoptCountBits::decode(value);
6033 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6034 set_counters(DeoptCountBits::update(value, deopt_count));
6038 int SharedFunctionInfo::opt_reenable_tries() {
6039 return OptReenableTriesBits::decode(counters());
6043 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6044 set_counters(OptReenableTriesBits::update(counters(), tries));
6048 int SharedFunctionInfo::opt_count() {
6049 return OptCountBits::decode(opt_count_and_bailout_reason());
6053 void SharedFunctionInfo::set_opt_count(int opt_count) {
6054 set_opt_count_and_bailout_reason(
6055 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6059 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
6060 BailoutReason reason = static_cast<BailoutReason>(
6061 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6066 bool SharedFunctionInfo::has_deoptimization_support() {
6067 Code* code = this->code();
6068 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6072 void SharedFunctionInfo::TryReenableOptimization() {
6073 int tries = opt_reenable_tries();
6074 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6075 // We reenable optimization whenever the number of tries is a large
6076 // enough power of 2.
6077 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6078 set_optimization_disabled(false);
6081 code()->set_optimizable(true);
6086 bool JSFunction::IsBuiltin() {
6087 return context()->global_object()->IsJSBuiltinsObject();
6091 bool JSFunction::IsFromNativeScript() {
6092 Object* script = shared()->script();
6093 bool native = script->IsScript() &&
6094 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
6095 DCHECK(!IsBuiltin() || native); // All builtins are also native.
6100 bool JSFunction::IsFromExtensionScript() {
6101 Object* script = shared()->script();
6102 return script->IsScript() &&
6103 Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
6107 bool JSFunction::NeedsArgumentsAdaption() {
6108 return shared()->formal_parameter_count() !=
6109 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
6113 bool JSFunction::IsOptimized() {
6114 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6118 bool JSFunction::IsOptimizable() {
6119 return code()->kind() == Code::FUNCTION && code()->optimizable();
6123 bool JSFunction::IsMarkedForOptimization() {
6124 return code() == GetIsolate()->builtins()->builtin(
6125 Builtins::kCompileOptimized);
6129 bool JSFunction::IsMarkedForConcurrentOptimization() {
6130 return code() == GetIsolate()->builtins()->builtin(
6131 Builtins::kCompileOptimizedConcurrent);
6135 bool JSFunction::IsInOptimizationQueue() {
6136 return code() == GetIsolate()->builtins()->builtin(
6137 Builtins::kInOptimizationQueue);
6141 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6142 return has_initial_map() &&
6143 initial_map()->construction_count() != JSFunction::kNoSlackTracking;
6147 Code* JSFunction::code() {
6149 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6153 void JSFunction::set_code(Code* value) {
6154 DCHECK(!GetHeap()->InNewSpace(value));
6155 Address entry = value->entry();
6156 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6157 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6159 HeapObject::RawField(this, kCodeEntryOffset),
6164 void JSFunction::set_code_no_write_barrier(Code* value) {
6165 DCHECK(!GetHeap()->InNewSpace(value));
6166 Address entry = value->entry();
6167 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6171 void JSFunction::ReplaceCode(Code* code) {
6172 bool was_optimized = IsOptimized();
6173 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6175 if (was_optimized && is_optimized) {
6176 shared()->EvictFromOptimizedCodeMap(this->code(),
6177 "Replacing with another optimized code");
6182 // Add/remove the function from the list of optimized functions for this
6183 // context based on the state change.
6184 if (!was_optimized && is_optimized) {
6185 context()->native_context()->AddOptimizedFunction(this);
6187 if (was_optimized && !is_optimized) {
6188 // TODO(titzer): linear in the number of optimized functions; fix!
6189 context()->native_context()->RemoveOptimizedFunction(this);
6194 Context* JSFunction::context() {
6195 return Context::cast(READ_FIELD(this, kContextOffset));
6199 JSObject* JSFunction::global_proxy() {
6200 return context()->global_proxy();
6204 void JSFunction::set_context(Object* value) {
6205 DCHECK(value->IsUndefined() || value->IsContext());
6206 WRITE_FIELD(this, kContextOffset, value);
6207 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6210 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6211 kPrototypeOrInitialMapOffset)
6214 Map* JSFunction::initial_map() {
6215 return Map::cast(prototype_or_initial_map());
6219 bool JSFunction::has_initial_map() {
6220 return prototype_or_initial_map()->IsMap();
6224 bool JSFunction::has_instance_prototype() {
6225 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6229 bool JSFunction::has_prototype() {
6230 return map()->has_non_instance_prototype() || has_instance_prototype();
6234 Object* JSFunction::instance_prototype() {
6235 DCHECK(has_instance_prototype());
6236 if (has_initial_map()) return initial_map()->prototype();
6237 // When there is no initial map and the prototype is a JSObject, the
6238 // initial map field is used for the prototype field.
6239 return prototype_or_initial_map();
6243 Object* JSFunction::prototype() {
6244 DCHECK(has_prototype());
6245 // If the function's prototype property has been set to a non-JSObject
6246 // value, that value is stored in the constructor field of the map.
6247 if (map()->has_non_instance_prototype()) return map()->constructor();
6248 return instance_prototype();
6252 bool JSFunction::should_have_prototype() {
6253 return map()->function_with_prototype();
6257 bool JSFunction::is_compiled() {
6259 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
6263 FixedArray* JSFunction::literals() {
6264 DCHECK(!shared()->bound());
6265 return literals_or_bindings();
6269 void JSFunction::set_literals(FixedArray* literals) {
6270 DCHECK(!shared()->bound());
6271 set_literals_or_bindings(literals);
6275 FixedArray* JSFunction::function_bindings() {
6276 DCHECK(shared()->bound());
6277 return literals_or_bindings();
6281 void JSFunction::set_function_bindings(FixedArray* bindings) {
6282 DCHECK(shared()->bound());
6283 // Bound function literal may be initialized to the empty fixed array
6284 // before the bindings are set.
6285 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6286 bindings->map() == GetHeap()->fixed_cow_array_map());
6287 set_literals_or_bindings(bindings);
6291 int JSFunction::NumberOfLiterals() {
6292 DCHECK(!shared()->bound());
6293 return literals()->length();
6297 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6298 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6299 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6303 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6305 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6306 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6307 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6311 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6312 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6313 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6317 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6319 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6320 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6321 DCHECK(!GetHeap()->InNewSpace(value));
6325 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6326 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6327 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6328 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6331 void JSProxy::InitializeBody(int object_size, Object* value) {
6332 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6333 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6334 WRITE_FIELD(this, offset, value);
6339 ACCESSORS(JSCollection, table, Object, kTableOffset)
6342 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6343 template<class Derived, class TableType> \
6344 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6345 return type::cast(READ_FIELD(this, offset)); \
6347 template<class Derived, class TableType> \
6348 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6349 type* value, WriteBarrierMode mode) { \
6350 WRITE_FIELD(this, offset, value); \
6351 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6354 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6355 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Smi, kIndexOffset)
6356 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Smi, kKindOffset)
6358 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6361 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6362 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6365 Address Foreign::foreign_address() {
6366 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6370 void Foreign::set_foreign_address(Address value) {
6371 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6375 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6376 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6377 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6378 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6379 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6380 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6382 bool JSGeneratorObject::is_suspended() {
6383 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6384 DCHECK_EQ(kGeneratorClosed, 0);
6385 return continuation() > 0;
6388 bool JSGeneratorObject::is_closed() {
6389 return continuation() == kGeneratorClosed;
6392 bool JSGeneratorObject::is_executing() {
6393 return continuation() == kGeneratorExecuting;
6396 ACCESSORS(JSModule, context, Object, kContextOffset)
6397 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6400 ACCESSORS(JSValue, value, Object, kValueOffset)
6403 HeapNumber* HeapNumber::cast(Object* object) {
6404 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6405 return reinterpret_cast<HeapNumber*>(object);
6409 const HeapNumber* HeapNumber::cast(const Object* object) {
6410 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6411 return reinterpret_cast<const HeapNumber*>(object);
6415 ACCESSORS(JSDate, value, Object, kValueOffset)
6416 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6417 ACCESSORS(JSDate, year, Object, kYearOffset)
6418 ACCESSORS(JSDate, month, Object, kMonthOffset)
6419 ACCESSORS(JSDate, day, Object, kDayOffset)
6420 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6421 ACCESSORS(JSDate, hour, Object, kHourOffset)
6422 ACCESSORS(JSDate, min, Object, kMinOffset)
6423 ACCESSORS(JSDate, sec, Object, kSecOffset)
6426 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6427 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6428 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6429 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6430 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6431 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6434 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6435 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6436 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6437 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6438 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6439 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6440 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6443 void Code::WipeOutHeader() {
6444 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6445 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6446 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6447 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6448 // Do not wipe out major/minor keys on a code stub or IC
6449 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6450 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6455 Object* Code::type_feedback_info() {
6456 DCHECK(kind() == FUNCTION);
6457 return raw_type_feedback_info();
6461 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6462 DCHECK(kind() == FUNCTION);
6463 set_raw_type_feedback_info(value, mode);
6464 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6469 uint32_t Code::stub_key() {
6470 DCHECK(IsCodeStubOrIC());
6471 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6472 return static_cast<uint32_t>(smi_key->value());
6476 void Code::set_stub_key(uint32_t key) {
6477 DCHECK(IsCodeStubOrIC());
6478 set_raw_type_feedback_info(Smi::FromInt(key));
6482 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6483 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6486 byte* Code::instruction_start() {
6487 return FIELD_ADDR(this, kHeaderSize);
6491 byte* Code::instruction_end() {
6492 return instruction_start() + instruction_size();
6496 int Code::body_size() {
6497 return RoundUp(instruction_size(), kObjectAlignment);
6501 ByteArray* Code::unchecked_relocation_info() {
6502 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6506 byte* Code::relocation_start() {
6507 return unchecked_relocation_info()->GetDataStartAddress();
6511 int Code::relocation_size() {
6512 return unchecked_relocation_info()->length();
6516 byte* Code::entry() {
6517 return instruction_start();
6521 bool Code::contains(byte* inner_pointer) {
6522 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6526 ACCESSORS(JSArray, length, Object, kLengthOffset)
6529 void* JSArrayBuffer::backing_store() const {
6530 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6531 return reinterpret_cast<void*>(ptr);
6535 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6536 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6537 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6541 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6542 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6545 bool JSArrayBuffer::is_external() {
6546 return BooleanBit::get(flag(), kIsExternalBit);
6550 void JSArrayBuffer::set_is_external(bool value) {
6551 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6555 bool JSArrayBuffer::should_be_freed() {
6556 return BooleanBit::get(flag(), kShouldBeFreed);
6560 void JSArrayBuffer::set_should_be_freed(bool value) {
6561 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6565 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6566 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6569 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6570 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6571 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6572 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6573 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6575 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6578 JSRegExp::Type JSRegExp::TypeTag() {
6579 Object* data = this->data();
6580 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6581 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6582 return static_cast<JSRegExp::Type>(smi->value());
6586 int JSRegExp::CaptureCount() {
6587 switch (TypeTag()) {
6591 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6599 JSRegExp::Flags JSRegExp::GetFlags() {
6600 DCHECK(this->data()->IsFixedArray());
6601 Object* data = this->data();
6602 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6603 return Flags(smi->value());
6607 String* JSRegExp::Pattern() {
6608 DCHECK(this->data()->IsFixedArray());
6609 Object* data = this->data();
6610 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6615 Object* JSRegExp::DataAt(int index) {
6616 DCHECK(TypeTag() != NOT_COMPILED);
6617 return FixedArray::cast(data())->get(index);
6621 void JSRegExp::SetDataAt(int index, Object* value) {
6622 DCHECK(TypeTag() != NOT_COMPILED);
6623 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6624 FixedArray::cast(data())->set(index, value);
6628 ElementsKind JSObject::GetElementsKind() {
6629 ElementsKind kind = map()->elements_kind();
6631 FixedArrayBase* fixed_array =
6632 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6634 // If a GC was caused while constructing this object, the elements
6635 // pointer may point to a one pointer filler map.
6636 if (ElementsAreSafeToExamine()) {
6637 Map* map = fixed_array->map();
6638 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6639 (map == GetHeap()->fixed_array_map() ||
6640 map == GetHeap()->fixed_cow_array_map())) ||
6641 (IsFastDoubleElementsKind(kind) &&
6642 (fixed_array->IsFixedDoubleArray() ||
6643 fixed_array == GetHeap()->empty_fixed_array())) ||
6644 (kind == DICTIONARY_ELEMENTS &&
6645 fixed_array->IsFixedArray() &&
6646 fixed_array->IsDictionary()) ||
6647 (kind > DICTIONARY_ELEMENTS));
6648 DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6649 (elements()->IsFixedArray() && elements()->length() >= 2));
6656 ElementsAccessor* JSObject::GetElementsAccessor() {
6657 return ElementsAccessor::ForKind(GetElementsKind());
6661 bool JSObject::HasFastObjectElements() {
6662 return IsFastObjectElementsKind(GetElementsKind());
6666 bool JSObject::HasFastSmiElements() {
6667 return IsFastSmiElementsKind(GetElementsKind());
6671 bool JSObject::HasFastSmiOrObjectElements() {
6672 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6676 bool JSObject::HasFastDoubleElements() {
6677 return IsFastDoubleElementsKind(GetElementsKind());
6681 bool JSObject::HasFastHoleyElements() {
6682 return IsFastHoleyElementsKind(GetElementsKind());
6686 bool JSObject::HasFastElements() {
6687 return IsFastElementsKind(GetElementsKind());
6691 bool JSObject::HasDictionaryElements() {
6692 return GetElementsKind() == DICTIONARY_ELEMENTS;
6696 bool JSObject::HasSloppyArgumentsElements() {
6697 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6701 bool JSObject::HasExternalArrayElements() {
6702 HeapObject* array = elements();
6703 DCHECK(array != NULL);
6704 return array->IsExternalArray();
6708 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6709 bool JSObject::HasExternal##Type##Elements() { \
6710 HeapObject* array = elements(); \
6711 DCHECK(array != NULL); \
6712 if (!array->IsHeapObject()) \
6714 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6717 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6719 #undef EXTERNAL_ELEMENTS_CHECK
6722 bool JSObject::HasFixedTypedArrayElements() {
6723 HeapObject* array = elements();
6724 DCHECK(array != NULL);
6725 return array->IsFixedTypedArrayBase();
6729 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6730 bool JSObject::HasFixed##Type##Elements() { \
6731 HeapObject* array = elements(); \
6732 DCHECK(array != NULL); \
6733 if (!array->IsHeapObject()) \
6735 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6738 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6740 #undef FIXED_TYPED_ELEMENTS_CHECK
6743 bool JSObject::HasNamedInterceptor() {
6744 return map()->has_named_interceptor();
6748 bool JSObject::HasIndexedInterceptor() {
6749 return map()->has_indexed_interceptor();
6753 NameDictionary* JSObject::property_dictionary() {
6754 DCHECK(!HasFastProperties());
6755 return NameDictionary::cast(properties());
6759 SeededNumberDictionary* JSObject::element_dictionary() {
6760 DCHECK(HasDictionaryElements());
6761 return SeededNumberDictionary::cast(elements());
6765 bool Name::IsHashFieldComputed(uint32_t field) {
6766 return (field & kHashNotComputedMask) == 0;
6770 bool Name::HasHashCode() {
6771 return IsHashFieldComputed(hash_field());
6775 uint32_t Name::Hash() {
6776 // Fast case: has hash code already been computed?
6777 uint32_t field = hash_field();
6778 if (IsHashFieldComputed(field)) return field >> kHashShift;
6779 // Slow case: compute hash code and set it. Has to be a string.
6780 return String::cast(this)->ComputeAndSetHash();
6784 StringHasher::StringHasher(int length, uint32_t seed)
6786 raw_running_hash_(seed),
6788 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6789 is_first_char_(true) {
6790 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6794 bool StringHasher::has_trivial_hash() {
6795 return length_ > String::kMaxHashCalcLength;
6799 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6801 running_hash += (running_hash << 10);
6802 running_hash ^= (running_hash >> 6);
6803 return running_hash;
6807 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6808 running_hash += (running_hash << 3);
6809 running_hash ^= (running_hash >> 11);
6810 running_hash += (running_hash << 15);
6811 if ((running_hash & String::kHashBitMask) == 0) {
6814 return running_hash;
6818 void StringHasher::AddCharacter(uint16_t c) {
6819 // Use the Jenkins one-at-a-time hash function to update the hash
6820 // for the given character.
6821 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6825 bool StringHasher::UpdateIndex(uint16_t c) {
6826 DCHECK(is_array_index_);
6827 if (c < '0' || c > '9') {
6828 is_array_index_ = false;
6832 if (is_first_char_) {
6833 is_first_char_ = false;
6834 if (c == '0' && length_ > 1) {
6835 is_array_index_ = false;
6839 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6840 is_array_index_ = false;
6843 array_index_ = array_index_ * 10 + d;
6848 template<typename Char>
6849 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6850 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6852 if (is_array_index_) {
6853 for (; i < length; i++) {
6854 AddCharacter(chars[i]);
6855 if (!UpdateIndex(chars[i])) {
6861 for (; i < length; i++) {
6862 DCHECK(!is_array_index_);
6863 AddCharacter(chars[i]);
6868 template <typename schar>
6869 uint32_t StringHasher::HashSequentialString(const schar* chars,
6872 StringHasher hasher(length, seed);
6873 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6874 return hasher.GetHashField();
6878 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6879 IteratingStringHasher hasher(string->length(), seed);
6881 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6882 ConsString* cons_string = String::VisitFlat(&hasher, string);
6883 // The string was flat.
6884 if (cons_string == NULL) return hasher.GetHashField();
6885 // This is a ConsString, iterate across it.
6886 ConsStringIteratorOp op(cons_string);
6888 while (NULL != (string = op.Next(&offset))) {
6889 String::VisitFlat(&hasher, string, offset);
6891 return hasher.GetHashField();
6895 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6897 AddCharacters(chars, length);
6901 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6903 AddCharacters(chars, length);
6907 bool Name::AsArrayIndex(uint32_t* index) {
6908 return IsString() && String::cast(this)->AsArrayIndex(index);
6912 bool String::AsArrayIndex(uint32_t* index) {
6913 uint32_t field = hash_field();
6914 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6917 return SlowAsArrayIndex(index);
6921 void String::SetForwardedInternalizedString(String* canonical) {
6922 DCHECK(IsInternalizedString());
6923 DCHECK(HasHashCode());
6924 if (canonical == this) return; // No need to forward.
6925 DCHECK(SlowEquals(canonical));
6926 DCHECK(canonical->IsInternalizedString());
6927 DCHECK(canonical->HasHashCode());
6928 WRITE_FIELD(this, kHashFieldOffset, canonical);
6929 // Setting the hash field to a tagged value sets the LSB, causing the hash
6930 // code to be interpreted as uninitialized. We use this fact to recognize
6931 // that we have a forwarded string.
6932 DCHECK(!HasHashCode());
6936 String* String::GetForwardedInternalizedString() {
6937 DCHECK(IsInternalizedString());
6938 if (HasHashCode()) return this;
6939 String* canonical = String::cast(READ_FIELD(this, kHashFieldOffset));
6940 DCHECK(canonical->IsInternalizedString());
6941 DCHECK(SlowEquals(canonical));
6942 DCHECK(canonical->HasHashCode());
6947 Object* JSReceiver::GetConstructor() {
6948 return map()->constructor();
6952 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6953 Handle<Name> name) {
6954 if (object->IsJSProxy()) {
6955 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6956 return JSProxy::HasPropertyWithHandler(proxy, name);
6958 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6959 if (!result.has_value) return Maybe<bool>();
6960 return maybe(result.value != ABSENT);
6964 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6965 Handle<Name> name) {
6966 if (object->IsJSProxy()) {
6967 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6968 return JSProxy::HasPropertyWithHandler(proxy, name);
6970 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6971 if (!result.has_value) return Maybe<bool>();
6972 return maybe(result.value != ABSENT);
6976 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6977 Handle<JSReceiver> object, Handle<Name> key) {
6979 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6980 return GetElementAttribute(object, index);
6982 LookupIterator it(object, key);
6983 return GetPropertyAttributes(&it);
6987 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6988 Handle<JSReceiver> object, uint32_t index) {
6989 if (object->IsJSProxy()) {
6990 return JSProxy::GetElementAttributeWithHandler(
6991 Handle<JSProxy>::cast(object), object, index);
6993 return JSObject::GetElementAttributeWithReceiver(
6994 Handle<JSObject>::cast(object), object, index, true);
6998 bool JSGlobalObject::IsDetached() {
6999 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7003 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
7004 const PrototypeIterator iter(this->GetIsolate(),
7005 const_cast<JSGlobalProxy*>(this));
7006 return iter.GetCurrent() != global;
7010 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
7011 return object->IsJSProxy()
7012 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
7013 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
7017 Object* JSReceiver::GetIdentityHash() {
7019 ? JSProxy::cast(this)->GetIdentityHash()
7020 : JSObject::cast(this)->GetIdentityHash();
7024 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7025 if (object->IsJSProxy()) {
7026 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7027 return JSProxy::HasElementWithHandler(proxy, index);
7029 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
7030 Handle<JSObject>::cast(object), object, index, true);
7031 if (!result.has_value) return Maybe<bool>();
7032 return maybe(result.value != ABSENT);
7036 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
7038 if (object->IsJSProxy()) {
7039 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7040 return JSProxy::HasElementWithHandler(proxy, index);
7042 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
7043 Handle<JSObject>::cast(object), object, index, false);
7044 if (!result.has_value) return Maybe<bool>();
7045 return maybe(result.value != ABSENT);
7049 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
7050 Handle<JSReceiver> object, uint32_t index) {
7051 if (object->IsJSProxy()) {
7052 return JSProxy::GetElementAttributeWithHandler(
7053 Handle<JSProxy>::cast(object), object, index);
7055 return JSObject::GetElementAttributeWithReceiver(
7056 Handle<JSObject>::cast(object), object, index, false);
7060 bool AccessorInfo::all_can_read() {
7061 return BooleanBit::get(flag(), kAllCanReadBit);
7065 void AccessorInfo::set_all_can_read(bool value) {
7066 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7070 bool AccessorInfo::all_can_write() {
7071 return BooleanBit::get(flag(), kAllCanWriteBit);
7075 void AccessorInfo::set_all_can_write(bool value) {
7076 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7080 PropertyAttributes AccessorInfo::property_attributes() {
7081 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
7085 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7086 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
7090 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7091 if (!HasExpectedReceiverType()) return true;
7092 if (!receiver->IsJSObject()) return false;
7093 return FunctionTemplateInfo::cast(expected_receiver_type())
7094 ->IsTemplateFor(JSObject::cast(receiver)->map());
7098 void ExecutableAccessorInfo::clear_setter() {
7099 set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
7103 template<typename Derived, typename Shape, typename Key>
7104 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7106 Handle<Object> value) {
7107 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7111 template<typename Derived, typename Shape, typename Key>
7112 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7114 Handle<Object> value,
7115 PropertyDetails details) {
7116 DCHECK(!key->IsName() ||
7117 details.IsDeleted() ||
7118 details.dictionary_index() > 0);
7119 int index = DerivedHashTable::EntryToIndex(entry);
7120 DisallowHeapAllocation no_gc;
7121 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
7122 FixedArray::set(index, *key, mode);
7123 FixedArray::set(index+1, *value, mode);
7124 FixedArray::set(index+2, details.AsSmi());
7128 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7129 DCHECK(other->IsNumber());
7130 return key == static_cast<uint32_t>(other->Number());
7134 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7135 return ComputeIntegerHash(key, 0);
7139 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7141 DCHECK(other->IsNumber());
7142 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7146 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7147 return ComputeIntegerHash(key, seed);
7151 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7154 DCHECK(other->IsNumber());
7155 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7159 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7160 return isolate->factory()->NewNumberFromUint(key);
7164 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7165 // We know that all entries in a hash table had their hash keys created.
7166 // Use that knowledge to have fast failure.
7167 if (key->Hash() != Name::cast(other)->Hash()) return false;
7168 return key->Equals(Name::cast(other));
7172 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7177 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7178 return Name::cast(other)->Hash();
7182 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7184 DCHECK(key->IsUniqueName());
7189 void NameDictionary::DoGenerateNewEnumerationIndices(
7190 Handle<NameDictionary> dictionary) {
7191 DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7195 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7196 return key->SameValue(other);
7200 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7201 return Smi::cast(key->GetHash())->value();
7205 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7207 return Smi::cast(other->GetHash())->value();
7211 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7212 Handle<Object> key) {
7217 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7218 Handle<ObjectHashTable> table, Handle<Object> key) {
7219 return DerivedHashTable::Shrink(table, key);
7223 template <int entrysize>
7224 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7225 return key->SameValue(other);
7229 template <int entrysize>
7230 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7231 intptr_t hash = reinterpret_cast<intptr_t>(*key);
7232 return (uint32_t)(hash & 0xFFFFFFFF);
7236 template <int entrysize>
7237 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7239 intptr_t hash = reinterpret_cast<intptr_t>(other);
7240 return (uint32_t)(hash & 0xFFFFFFFF);
7244 template <int entrysize>
7245 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7246 Handle<Object> key) {
7251 void Map::ClearCodeCache(Heap* heap) {
7252 // No write barrier is needed since empty_fixed_array is not in new space.
7253 // Please note this function is used during marking:
7254 // - MarkCompactCollector::MarkUnmarkedObject
7255 // - IncrementalMarking::Step
7256 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7257 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7261 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
7262 DCHECK(array->HasFastSmiOrObjectElements());
7263 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
7264 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
7265 if (elts->length() < required_size) {
7266 // Doubling in size would be overkill, but leave some slack to avoid
7267 // constantly growing.
7268 Expand(array, required_size + (required_size >> 3));
7269 // It's a performance benefit to keep a frequently used array in new-space.
7270 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
7271 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
7272 // Expand will allocate a new backing store in new space even if the size
7273 // we asked for isn't larger than what we had before.
7274 Expand(array, required_size);
7279 void JSArray::set_length(Smi* length) {
7280 // Don't need a write barrier for a Smi.
7281 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7285 bool JSArray::AllowsSetElementsLength() {
7286 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7287 DCHECK(result == !HasExternalArrayElements());
7292 void JSArray::SetContent(Handle<JSArray> array,
7293 Handle<FixedArrayBase> storage) {
7294 EnsureCanContainElements(array, storage, storage->length(),
7295 ALLOW_COPIED_DOUBLE_ELEMENTS);
7297 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7298 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7299 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7300 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7301 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7302 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7303 array->set_elements(*storage);
7304 array->set_length(Smi::FromInt(storage->length()));
7308 Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
7309 return isolate->factory()->uninitialized_symbol();
7313 Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
7314 return isolate->factory()->megamorphic_symbol();
7318 Handle<Object> TypeFeedbackInfo::MonomorphicArraySentinel(Isolate* isolate,
7319 ElementsKind elements_kind) {
7320 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
7324 Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) {
7325 return heap->uninitialized_symbol();
7329 int TypeFeedbackInfo::ic_total_count() {
7330 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7331 return ICTotalCountField::decode(current);
7335 void TypeFeedbackInfo::set_ic_total_count(int count) {
7336 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7337 value = ICTotalCountField::update(value,
7338 ICTotalCountField::decode(count));
7339 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7343 int TypeFeedbackInfo::ic_with_type_info_count() {
7344 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7345 return ICsWithTypeInfoCountField::decode(current);
7349 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7350 if (delta == 0) return;
7351 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7352 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7353 // We can get negative count here when the type-feedback info is
7354 // shared between two code objects. The can only happen when
7355 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7356 // Since we do not optimize when the debugger is active, we can skip
7357 // this counter update.
7358 if (new_count >= 0) {
7359 new_count &= ICsWithTypeInfoCountField::kMask;
7360 value = ICsWithTypeInfoCountField::update(value, new_count);
7361 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7366 int TypeFeedbackInfo::ic_generic_count() {
7367 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7371 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7372 if (delta == 0) return;
7373 int new_count = ic_generic_count() + delta;
7374 if (new_count >= 0) {
7375 new_count &= ~Smi::kMinValue;
7376 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7381 void TypeFeedbackInfo::initialize_storage() {
7382 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7383 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7384 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7388 void TypeFeedbackInfo::change_own_type_change_checksum() {
7389 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7390 int checksum = OwnTypeChangeChecksum::decode(value);
7391 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7392 value = OwnTypeChangeChecksum::update(value, checksum);
7393 // Ensure packed bit field is in Smi range.
7394 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7395 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7396 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7400 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7401 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7402 int mask = (1 << kTypeChangeChecksumBits) - 1;
7403 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7404 // Ensure packed bit field is in Smi range.
7405 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7406 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7407 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7411 int TypeFeedbackInfo::own_type_change_checksum() {
7412 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7413 return OwnTypeChangeChecksum::decode(value);
7417 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7418 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7419 int mask = (1 << kTypeChangeChecksumBits) - 1;
7420 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7424 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7427 Relocatable::Relocatable(Isolate* isolate) {
7429 prev_ = isolate->relocatable_top();
7430 isolate->set_relocatable_top(this);
7434 Relocatable::~Relocatable() {
7435 DCHECK_EQ(isolate_->relocatable_top(), this);
7436 isolate_->set_relocatable_top(prev_);
7440 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7441 return map->instance_size();
7445 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7446 v->VisitExternalReference(
7447 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7451 template<typename StaticVisitor>
7452 void Foreign::ForeignIterateBody() {
7453 StaticVisitor::VisitExternalReference(
7454 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7458 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
7459 typedef v8::String::ExternalAsciiStringResource Resource;
7460 v->VisitExternalAsciiString(
7461 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7465 template<typename StaticVisitor>
7466 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
7467 typedef v8::String::ExternalAsciiStringResource Resource;
7468 StaticVisitor::VisitExternalAsciiString(
7469 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7473 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7474 typedef v8::String::ExternalStringResource Resource;
7475 v->VisitExternalTwoByteString(
7476 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7480 template<typename StaticVisitor>
7481 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7482 typedef v8::String::ExternalStringResource Resource;
7483 StaticVisitor::VisitExternalTwoByteString(
7484 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7488 template<int start_offset, int end_offset, int size>
7489 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7492 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7493 HeapObject::RawField(obj, end_offset));
7497 template<int start_offset>
7498 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7501 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7502 HeapObject::RawField(obj, object_size));
7506 template<class Derived, class TableType>
7507 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7508 TableType* table(TableType::cast(this->table()));
7509 int index = Smi::cast(this->index())->value();
7510 Object* key = table->KeyAt(index);
7511 DCHECK(!key->IsTheHole());
7516 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7517 array->set(0, CurrentKey());
7521 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7522 array->set(0, CurrentKey());
7523 array->set(1, CurrentValue());
7527 Object* JSMapIterator::CurrentValue() {
7528 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7529 int index = Smi::cast(this->index())->value();
7530 Object* value = table->ValueAt(index);
7531 DCHECK(!value->IsTheHole());
7537 #undef CAST_ACCESSOR
7538 #undef INT_ACCESSORS
7540 #undef ACCESSORS_TO_SMI
7541 #undef SMI_ACCESSORS
7542 #undef SYNCHRONIZED_SMI_ACCESSORS
7543 #undef NOBARRIER_SMI_ACCESSORS
7545 #undef BOOL_ACCESSORS
7547 #undef FIELD_ADDR_CONST
7549 #undef NOBARRIER_READ_FIELD
7551 #undef NOBARRIER_WRITE_FIELD
7552 #undef WRITE_BARRIER
7553 #undef CONDITIONAL_WRITE_BARRIER
7554 #undef READ_DOUBLE_FIELD
7555 #undef WRITE_DOUBLE_FIELD
7556 #undef READ_INT_FIELD
7557 #undef WRITE_INT_FIELD
7558 #undef READ_INTPTR_FIELD
7559 #undef WRITE_INTPTR_FIELD
7560 #undef READ_UINT32_FIELD
7561 #undef WRITE_UINT32_FIELD
7562 #undef READ_SHORT_FIELD
7563 #undef WRITE_SHORT_FIELD
7564 #undef READ_BYTE_FIELD
7565 #undef WRITE_BYTE_FIELD
7566 #undef NOBARRIER_READ_BYTE_FIELD
7567 #undef NOBARRIER_WRITE_BYTE_FIELD
7569 } } // namespace v8::internal
7571 #endif // V8_OBJECTS_INL_H_