1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/contexts.h"
17 #include "src/conversions-inl.h"
18 #include "src/elements.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/heap/incremental-marking.h"
24 #include "src/heap/objects-visiting.h"
25 #include "src/heap/spaces.h"
26 #include "src/heap/store-buffer.h"
27 #include "src/isolate.h"
28 #include "src/lookup.h"
29 #include "src/objects.h"
30 #include "src/property.h"
31 #include "src/prototype.h"
32 #include "src/transitions-inl.h"
33 #include "src/v8memory.h"
38 PropertyDetails::PropertyDetails(Smi* smi) {
39 value_ = smi->value();
43 Smi* PropertyDetails::AsSmi() const {
44 // Ensure the upper 2 bits have the same value by sign extending it. This is
45 // necessary to be able to use the 31st bit of the property details.
46 int value = value_ << 1;
47 return Smi::FromInt(value >> 1);
51 PropertyDetails PropertyDetails::AsDeleted() const {
52 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
53 return PropertyDetails(smi);
57 #define TYPE_CHECKER(type, instancetype) \
58 bool Object::Is##type() const { \
59 return Object::IsHeapObject() && \
60 HeapObject::cast(this)->map()->instance_type() == instancetype; \
64 #define CAST_ACCESSOR(type) \
65 type* type::cast(Object* object) { \
66 SLOW_DCHECK(object->Is##type()); \
67 return reinterpret_cast<type*>(object); \
69 const type* type::cast(const Object* object) { \
70 SLOW_DCHECK(object->Is##type()); \
71 return reinterpret_cast<const type*>(object); \
75 #define INT_ACCESSORS(holder, name, offset) \
76 int holder::name() const { return READ_INT_FIELD(this, offset); } \
77 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
80 #define ACCESSORS(holder, name, type, offset) \
81 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
82 void holder::set_##name(type* value, WriteBarrierMode mode) { \
83 WRITE_FIELD(this, offset, value); \
84 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
88 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
89 #define ACCESSORS_TO_SMI(holder, name, offset) \
90 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
96 // Getter that returns a Smi as an int and writes an int as a Smi.
97 #define SMI_ACCESSORS(holder, name, offset) \
98 int holder::name() const { \
99 Object* value = READ_FIELD(this, offset); \
100 return Smi::cast(value)->value(); \
102 void holder::set_##name(int value) { \
103 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
106 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
107 int holder::synchronized_##name() const { \
108 Object* value = ACQUIRE_READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::synchronized_set_##name(int value) { \
112 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
115 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
116 int holder::nobarrier_##name() const { \
117 Object* value = NOBARRIER_READ_FIELD(this, offset); \
118 return Smi::cast(value)->value(); \
120 void holder::nobarrier_set_##name(int value) { \
121 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
124 #define BOOL_GETTER(holder, field, name, offset) \
125 bool holder::name() const { \
126 return BooleanBit::get(field(), offset); \
130 #define BOOL_ACCESSORS(holder, field, name, offset) \
131 bool holder::name() const { \
132 return BooleanBit::get(field(), offset); \
134 void holder::set_##name(bool value) { \
135 set_##field(BooleanBit::set(field(), offset, value)); \
139 bool Object::IsFixedArrayBase() const {
140 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
141 IsFixedTypedArrayBase() || IsExternalArray();
145 // External objects are not extensible, so the map check is enough.
146 bool Object::IsExternal() const {
147 return Object::IsHeapObject() &&
148 HeapObject::cast(this)->map() ==
149 HeapObject::cast(this)->GetHeap()->external_map();
153 bool Object::IsAccessorInfo() const {
154 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
158 bool Object::IsSmi() const {
159 return HAS_SMI_TAG(this);
163 bool Object::IsHeapObject() const {
164 return Internals::HasHeapObjectTag(this);
168 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
169 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
170 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
173 bool Object::IsString() const {
174 return Object::IsHeapObject()
175 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
179 bool Object::IsName() const {
180 return IsString() || IsSymbol();
184 bool Object::IsUniqueName() const {
185 return IsInternalizedString() || IsSymbol();
189 bool Object::IsSpecObject() const {
190 return Object::IsHeapObject()
191 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
195 bool Object::IsSpecFunction() const {
196 if (!Object::IsHeapObject()) return false;
197 InstanceType type = HeapObject::cast(this)->map()->instance_type();
198 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
202 bool Object::IsTemplateInfo() const {
203 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
207 bool Object::IsInternalizedString() const {
208 if (!this->IsHeapObject()) return false;
209 uint32_t type = HeapObject::cast(this)->map()->instance_type();
210 STATIC_ASSERT(kNotInternalizedTag != 0);
211 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
212 (kStringTag | kInternalizedTag);
216 bool Object::IsConsString() const {
217 if (!IsString()) return false;
218 return StringShape(String::cast(this)).IsCons();
222 bool Object::IsSlicedString() const {
223 if (!IsString()) return false;
224 return StringShape(String::cast(this)).IsSliced();
228 bool Object::IsSeqString() const {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsSequential();
234 bool Object::IsSeqOneByteString() const {
235 if (!IsString()) return false;
236 return StringShape(String::cast(this)).IsSequential() &&
237 String::cast(this)->IsOneByteRepresentation();
241 bool Object::IsSeqTwoByteString() const {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsSequential() &&
244 String::cast(this)->IsTwoByteRepresentation();
248 bool Object::IsExternalString() const {
249 if (!IsString()) return false;
250 return StringShape(String::cast(this)).IsExternal();
254 bool Object::IsExternalAsciiString() const {
255 if (!IsString()) return false;
256 return StringShape(String::cast(this)).IsExternal() &&
257 String::cast(this)->IsOneByteRepresentation();
261 bool Object::IsExternalTwoByteString() const {
262 if (!IsString()) return false;
263 return StringShape(String::cast(this)).IsExternal() &&
264 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
271 IsFixedTypedArrayBase();
275 Handle<Object> Object::NewStorageFor(Isolate* isolate,
276 Handle<Object> object,
277 Representation representation) {
278 if (representation.IsSmi() && object->IsUninitialized()) {
279 return handle(Smi::FromInt(0), isolate);
281 if (!representation.IsDouble()) return object;
283 if (object->IsUninitialized()) {
285 } else if (object->IsMutableHeapNumber()) {
286 value = HeapNumber::cast(*object)->value();
288 value = object->Number();
290 return isolate->factory()->NewHeapNumber(value, MUTABLE);
294 Handle<Object> Object::WrapForRead(Isolate* isolate,
295 Handle<Object> object,
296 Representation representation) {
297 DCHECK(!object->IsUninitialized());
298 if (!representation.IsDouble()) {
299 DCHECK(object->FitsRepresentation(representation));
302 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
306 StringShape::StringShape(const String* str)
307 : type_(str->map()->instance_type()) {
309 DCHECK((type_ & kIsNotStringMask) == kStringTag);
313 StringShape::StringShape(Map* map)
314 : type_(map->instance_type()) {
316 DCHECK((type_ & kIsNotStringMask) == kStringTag);
320 StringShape::StringShape(InstanceType t)
321 : type_(static_cast<uint32_t>(t)) {
323 DCHECK((type_ & kIsNotStringMask) == kStringTag);
327 bool StringShape::IsInternalized() {
329 STATIC_ASSERT(kNotInternalizedTag != 0);
330 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
331 (kStringTag | kInternalizedTag);
335 bool String::IsOneByteRepresentation() const {
336 uint32_t type = map()->instance_type();
337 return (type & kStringEncodingMask) == kOneByteStringTag;
341 bool String::IsTwoByteRepresentation() const {
342 uint32_t type = map()->instance_type();
343 return (type & kStringEncodingMask) == kTwoByteStringTag;
347 bool String::IsOneByteRepresentationUnderneath() {
348 uint32_t type = map()->instance_type();
349 STATIC_ASSERT(kIsIndirectStringTag != 0);
350 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
352 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
353 case kOneByteStringTag:
355 case kTwoByteStringTag:
357 default: // Cons or sliced string. Need to go deeper.
358 return GetUnderlying()->IsOneByteRepresentation();
363 bool String::IsTwoByteRepresentationUnderneath() {
364 uint32_t type = map()->instance_type();
365 STATIC_ASSERT(kIsIndirectStringTag != 0);
366 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
368 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
369 case kOneByteStringTag:
371 case kTwoByteStringTag:
373 default: // Cons or sliced string. Need to go deeper.
374 return GetUnderlying()->IsTwoByteRepresentation();
379 bool String::HasOnlyOneByteChars() {
380 uint32_t type = map()->instance_type();
381 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
382 IsOneByteRepresentation();
386 bool StringShape::IsCons() {
387 return (type_ & kStringRepresentationMask) == kConsStringTag;
391 bool StringShape::IsSliced() {
392 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
396 bool StringShape::IsIndirect() {
397 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
401 bool StringShape::IsExternal() {
402 return (type_ & kStringRepresentationMask) == kExternalStringTag;
406 bool StringShape::IsSequential() {
407 return (type_ & kStringRepresentationMask) == kSeqStringTag;
411 StringRepresentationTag StringShape::representation_tag() {
412 uint32_t tag = (type_ & kStringRepresentationMask);
413 return static_cast<StringRepresentationTag>(tag);
417 uint32_t StringShape::encoding_tag() {
418 return type_ & kStringEncodingMask;
422 uint32_t StringShape::full_representation_tag() {
423 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
427 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
428 Internals::kFullStringRepresentationMask);
430 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
431 Internals::kStringEncodingMask);
434 bool StringShape::IsSequentialAscii() {
435 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
439 bool StringShape::IsSequentialTwoByte() {
440 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
444 bool StringShape::IsExternalAscii() {
445 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
449 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
450 Internals::kExternalAsciiRepresentationTag);
452 STATIC_ASSERT(v8::String::ASCII_ENCODING == kOneByteStringTag);
455 bool StringShape::IsExternalTwoByte() {
456 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
460 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
461 Internals::kExternalTwoByteRepresentationTag);
463 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
465 uc32 FlatStringReader::Get(int index) {
466 DCHECK(0 <= index && index <= length_);
468 return static_cast<const byte*>(start_)[index];
470 return static_cast<const uc16*>(start_)[index];
475 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
476 return key->AsHandle(isolate);
480 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
481 return key->AsHandle(isolate);
485 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
487 return key->AsHandle(isolate);
491 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
493 return key->AsHandle(isolate);
496 template <typename Char>
497 class SequentialStringKey : public HashTableKey {
499 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
500 : string_(string), hash_field_(0), seed_(seed) { }
502 virtual uint32_t Hash() V8_OVERRIDE {
503 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
507 uint32_t result = hash_field_ >> String::kHashShift;
508 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
513 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
514 return String::cast(other)->Hash();
517 Vector<const Char> string_;
518 uint32_t hash_field_;
523 class OneByteStringKey : public SequentialStringKey<uint8_t> {
525 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
526 : SequentialStringKey<uint8_t>(str, seed) { }
528 virtual bool IsMatch(Object* string) V8_OVERRIDE {
529 return String::cast(string)->IsOneByteEqualTo(string_);
532 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
537 class SubStringKey : public HashTableKey {
539 SubStringKey(Handle<String> string, int from, int length)
540 : string_(string), from_(from), length_(length) {
541 if (string_->IsSlicedString()) {
542 string_ = Handle<String>(Unslice(*string_, &from_));
544 DCHECK(string_->IsSeqString() || string->IsExternalString());
547 virtual uint32_t Hash() V8_OVERRIDE {
548 DCHECK(length_ >= 0);
549 DCHECK(from_ + length_ <= string_->length());
550 const Char* chars = GetChars() + from_;
551 hash_field_ = StringHasher::HashSequentialString(
552 chars, length_, string_->GetHeap()->HashSeed());
553 uint32_t result = hash_field_ >> String::kHashShift;
554 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
558 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
559 return String::cast(other)->Hash();
562 virtual bool IsMatch(Object* string) V8_OVERRIDE;
563 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
566 const Char* GetChars();
567 String* Unslice(String* string, int* offset) {
568 while (string->IsSlicedString()) {
569 SlicedString* sliced = SlicedString::cast(string);
570 *offset += sliced->offset();
571 string = sliced->parent();
576 Handle<String> string_;
579 uint32_t hash_field_;
583 class TwoByteStringKey : public SequentialStringKey<uc16> {
585 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
586 : SequentialStringKey<uc16>(str, seed) { }
588 virtual bool IsMatch(Object* string) V8_OVERRIDE {
589 return String::cast(string)->IsTwoByteEqualTo(string_);
592 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE;
596 // Utf8StringKey carries a vector of chars as key.
597 class Utf8StringKey : public HashTableKey {
599 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
600 : string_(string), hash_field_(0), seed_(seed) { }
602 virtual bool IsMatch(Object* string) V8_OVERRIDE {
603 return String::cast(string)->IsUtf8EqualTo(string_);
606 virtual uint32_t Hash() V8_OVERRIDE {
607 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
608 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
609 uint32_t result = hash_field_ >> String::kHashShift;
610 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
614 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
615 return String::cast(other)->Hash();
618 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
619 if (hash_field_ == 0) Hash();
620 return isolate->factory()->NewInternalizedStringFromUtf8(
621 string_, chars_, hash_field_);
624 Vector<const char> string_;
625 uint32_t hash_field_;
626 int chars_; // Caches the number of characters when computing the hash code.
631 bool Object::IsNumber() const {
632 return IsSmi() || IsHeapNumber();
636 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
637 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
640 bool Object::IsFiller() const {
641 if (!Object::IsHeapObject()) return false;
642 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
643 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
647 bool Object::IsExternalArray() const {
648 if (!Object::IsHeapObject())
650 InstanceType instance_type =
651 HeapObject::cast(this)->map()->instance_type();
652 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
653 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
657 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
658 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
659 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
661 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
662 #undef TYPED_ARRAY_TYPE_CHECKER
665 bool Object::IsFixedTypedArrayBase() const {
666 if (!Object::IsHeapObject()) return false;
668 InstanceType instance_type =
669 HeapObject::cast(this)->map()->instance_type();
670 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
671 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
675 bool Object::IsJSReceiver() const {
676 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
677 return IsHeapObject() &&
678 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
682 bool Object::IsJSObject() const {
683 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
684 return IsHeapObject() &&
685 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
689 bool Object::IsJSProxy() const {
690 if (!Object::IsHeapObject()) return false;
691 return HeapObject::cast(this)->map()->IsJSProxyMap();
695 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
696 TYPE_CHECKER(JSSet, JS_SET_TYPE)
697 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
698 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
699 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
700 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
701 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
702 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
703 TYPE_CHECKER(Map, MAP_TYPE)
704 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
705 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
706 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
709 bool Object::IsJSWeakCollection() const {
710 return IsJSWeakMap() || IsJSWeakSet();
714 bool Object::IsDescriptorArray() const {
715 return IsFixedArray();
719 bool Object::IsTransitionArray() const {
720 return IsFixedArray();
724 bool Object::IsDeoptimizationInputData() const {
725 // Must be a fixed array.
726 if (!IsFixedArray()) return false;
728 // There's no sure way to detect the difference between a fixed array and
729 // a deoptimization data array. Since this is used for asserts we can
730 // check that the length is zero or else the fixed size plus a multiple of
732 int length = FixedArray::cast(this)->length();
733 if (length == 0) return true;
734 if (length < DeoptimizationInputData::kFirstDeoptEntryIndex) return false;
736 FixedArray* self = FixedArray::cast(const_cast<Object*>(this));
738 Smi::cast(self->get(DeoptimizationInputData::kDeoptEntryCountIndex))
743 DeoptimizationInputData::kReturnAddressPatchEntryCountIndex))
746 return length == DeoptimizationInputData::LengthFor(deopt_count, patch_count);
750 bool Object::IsDeoptimizationOutputData() const {
751 if (!IsFixedArray()) return false;
752 // There's actually no way to see the difference between a fixed array and
753 // a deoptimization data array. Since this is used for asserts we can check
754 // that the length is plausible though.
755 if (FixedArray::cast(this)->length() % 2 != 0) return false;
760 bool Object::IsDependentCode() const {
761 if (!IsFixedArray()) return false;
762 // There's actually no way to see the difference between a fixed array and
763 // a dependent codes array.
768 bool Object::IsContext() const {
769 if (!Object::IsHeapObject()) return false;
770 Map* map = HeapObject::cast(this)->map();
771 Heap* heap = map->GetHeap();
772 return (map == heap->function_context_map() ||
773 map == heap->catch_context_map() ||
774 map == heap->with_context_map() ||
775 map == heap->native_context_map() ||
776 map == heap->block_context_map() ||
777 map == heap->module_context_map() ||
778 map == heap->global_context_map());
782 bool Object::IsNativeContext() const {
783 return Object::IsHeapObject() &&
784 HeapObject::cast(this)->map() ==
785 HeapObject::cast(this)->GetHeap()->native_context_map();
789 bool Object::IsScopeInfo() const {
790 return Object::IsHeapObject() &&
791 HeapObject::cast(this)->map() ==
792 HeapObject::cast(this)->GetHeap()->scope_info_map();
796 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
799 template <> inline bool Is<JSFunction>(Object* obj) {
800 return obj->IsJSFunction();
804 TYPE_CHECKER(Code, CODE_TYPE)
805 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
806 TYPE_CHECKER(Cell, CELL_TYPE)
807 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
808 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
809 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
810 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
811 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
812 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
813 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
816 bool Object::IsStringWrapper() const {
817 return IsJSValue() && JSValue::cast(this)->value()->IsString();
821 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
824 bool Object::IsBoolean() const {
825 return IsOddball() &&
826 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
830 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
831 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
832 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
833 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
835 TYPE_CHECKER(Float32x4, FLOAT32x4_TYPE)
836 TYPE_CHECKER(Float64x2, FLOAT64x2_TYPE)
837 TYPE_CHECKER(Int32x4, INT32x4_TYPE)
839 bool Object::IsJSArrayBufferView() const {
840 return IsJSDataView() || IsJSTypedArray();
844 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
847 template <> inline bool Is<JSArray>(Object* obj) {
848 return obj->IsJSArray();
852 bool Object::IsHashTable() const {
853 return Object::IsHeapObject() &&
854 HeapObject::cast(this)->map() ==
855 HeapObject::cast(this)->GetHeap()->hash_table_map();
859 bool Object::IsWeakHashTable() const {
860 return IsHashTable();
864 bool Object::IsDictionary() const {
865 return IsHashTable() &&
866 this != HeapObject::cast(this)->GetHeap()->string_table();
870 bool Object::IsNameDictionary() const {
871 return IsDictionary();
875 bool Object::IsSeededNumberDictionary() const {
876 return IsDictionary();
880 bool Object::IsUnseededNumberDictionary() const {
881 return IsDictionary();
885 bool Object::IsStringTable() const {
886 return IsHashTable();
890 bool Object::IsJSFunctionResultCache() const {
891 if (!IsFixedArray()) return false;
892 const FixedArray* self = FixedArray::cast(this);
893 int length = self->length();
894 if (length < JSFunctionResultCache::kEntriesIndex) return false;
895 if ((length - JSFunctionResultCache::kEntriesIndex)
896 % JSFunctionResultCache::kEntrySize != 0) {
900 if (FLAG_verify_heap) {
901 // TODO(svenpanne) We use const_cast here and below to break our dependency
902 // cycle between the predicates and the verifiers. This can be removed when
903 // the verifiers are const-correct, too.
904 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
905 JSFunctionResultCacheVerify();
912 bool Object::IsNormalizedMapCache() const {
913 return NormalizedMapCache::IsNormalizedMapCache(this);
917 int NormalizedMapCache::GetIndex(Handle<Map> map) {
918 return map->Hash() % NormalizedMapCache::kEntries;
922 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
923 if (!obj->IsFixedArray()) return false;
924 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
928 if (FLAG_verify_heap) {
929 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
930 NormalizedMapCacheVerify();
937 bool Object::IsCompilationCacheTable() const {
938 return IsHashTable();
942 bool Object::IsCodeCacheHashTable() const {
943 return IsHashTable();
947 bool Object::IsPolymorphicCodeCacheHashTable() const {
948 return IsHashTable();
952 bool Object::IsMapCache() const {
953 return IsHashTable();
957 bool Object::IsObjectHashTable() const {
958 return IsHashTable();
962 bool Object::IsOrderedHashTable() const {
963 return IsHeapObject() &&
964 HeapObject::cast(this)->map() ==
965 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
969 bool Object::IsOrderedHashSet() const {
970 return IsOrderedHashTable();
974 bool Object::IsOrderedHashMap() const {
975 return IsOrderedHashTable();
979 bool Object::IsPrimitive() const {
980 return IsOddball() || IsNumber() || IsString();
984 bool Object::IsJSGlobalProxy() const {
985 bool result = IsHeapObject() &&
986 (HeapObject::cast(this)->map()->instance_type() ==
987 JS_GLOBAL_PROXY_TYPE);
989 HeapObject::cast(this)->map()->is_access_check_needed());
994 bool Object::IsGlobalObject() const {
995 if (!IsHeapObject()) return false;
997 InstanceType type = HeapObject::cast(this)->map()->instance_type();
998 return type == JS_GLOBAL_OBJECT_TYPE ||
999 type == JS_BUILTINS_OBJECT_TYPE;
1003 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
1004 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1007 bool Object::IsUndetectableObject() const {
1008 return IsHeapObject()
1009 && HeapObject::cast(this)->map()->is_undetectable();
1013 bool Object::IsAccessCheckNeeded() const {
1014 if (!IsHeapObject()) return false;
1015 if (IsJSGlobalProxy()) {
1016 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1017 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1018 return proxy->IsDetachedFrom(global);
1020 return HeapObject::cast(this)->map()->is_access_check_needed();
1024 bool Object::IsStruct() const {
1025 if (!IsHeapObject()) return false;
1026 switch (HeapObject::cast(this)->map()->instance_type()) {
1027 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1028 STRUCT_LIST(MAKE_STRUCT_CASE)
1029 #undef MAKE_STRUCT_CASE
1030 default: return false;
1035 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1036 bool Object::Is##Name() const { \
1037 return Object::IsHeapObject() \
1038 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1040 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1041 #undef MAKE_STRUCT_PREDICATE
1044 bool Object::IsUndefined() const {
1045 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1049 bool Object::IsNull() const {
1050 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1054 bool Object::IsTheHole() const {
1055 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1059 bool Object::IsException() const {
1060 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1064 bool Object::IsUninitialized() const {
1065 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1069 bool Object::IsTrue() const {
1070 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1074 bool Object::IsFalse() const {
1075 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1079 bool Object::IsArgumentsMarker() const {
1080 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1084 double Object::Number() {
1087 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1088 : reinterpret_cast<HeapNumber*>(this)->value();
1092 bool Object::IsNaN() const {
1093 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1097 bool Object::IsMinusZero() const {
1098 return this->IsHeapNumber() &&
1099 i::IsMinusZero(HeapNumber::cast(this)->value());
1103 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1104 if (object->IsSmi()) return Handle<Smi>::cast(object);
1105 if (object->IsHeapNumber()) {
1106 double value = Handle<HeapNumber>::cast(object)->value();
1107 int int_value = FastD2I(value);
1108 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1109 return handle(Smi::FromInt(int_value), isolate);
1112 return Handle<Smi>();
1116 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1117 Handle<Object> object) {
1119 isolate, object, handle(isolate->context()->native_context(), isolate));
1123 bool Object::HasSpecificClassOf(String* name) {
1124 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1128 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1129 Handle<Name> name) {
1130 LookupIterator it(object, name);
1131 return GetProperty(&it);
1135 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1136 Handle<Object> object,
1138 // GetElement can trigger a getter which can cause allocation.
1139 // This was not always the case. This DCHECK is here to catch
1140 // leftover incorrect uses.
1141 DCHECK(AllowHeapAllocation::IsAllowed());
1142 return Object::GetElementWithReceiver(isolate, object, object, index);
1146 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1147 Handle<Name> name) {
1149 Isolate* isolate = name->GetIsolate();
1150 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1151 return GetProperty(object, name);
1155 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1156 Handle<Object> object,
1158 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1159 DCHECK(!str.is_null());
1161 uint32_t index; // Assert that the name is not an array index.
1162 DCHECK(!str->AsArrayIndex(&index));
1164 return GetProperty(object, str);
1168 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1169 Handle<Object> receiver,
1171 return GetPropertyWithHandler(
1172 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1176 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1177 Handle<JSReceiver> receiver,
1179 Handle<Object> value,
1180 StrictMode strict_mode) {
1181 Isolate* isolate = proxy->GetIsolate();
1182 Handle<String> name = isolate->factory()->Uint32ToString(index);
1183 return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode);
1187 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1189 Isolate* isolate = proxy->GetIsolate();
1190 Handle<String> name = isolate->factory()->Uint32ToString(index);
1191 return HasPropertyWithHandler(proxy, name);
1195 #define FIELD_ADDR(p, offset) \
1196 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1198 #define FIELD_ADDR_CONST(p, offset) \
1199 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1201 #define READ_FIELD(p, offset) \
1202 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1204 #define ACQUIRE_READ_FIELD(p, offset) \
1205 reinterpret_cast<Object*>(base::Acquire_Load( \
1206 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1208 #define NOBARRIER_READ_FIELD(p, offset) \
1209 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1210 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1212 #define WRITE_FIELD(p, offset, value) \
1213 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1215 #define RELEASE_WRITE_FIELD(p, offset, value) \
1216 base::Release_Store( \
1217 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1218 reinterpret_cast<base::AtomicWord>(value));
1220 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1221 base::NoBarrier_Store( \
1222 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1223 reinterpret_cast<base::AtomicWord>(value));
1225 #define WRITE_BARRIER(heap, object, offset, value) \
1226 heap->incremental_marking()->RecordWrite( \
1227 object, HeapObject::RawField(object, offset), value); \
1228 if (heap->InNewSpace(value)) { \
1229 heap->RecordWrite(object->address(), offset); \
1232 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1233 if (mode == UPDATE_WRITE_BARRIER) { \
1234 heap->incremental_marking()->RecordWrite( \
1235 object, HeapObject::RawField(object, offset), value); \
1236 if (heap->InNewSpace(value)) { \
1237 heap->RecordWrite(object->address(), offset); \
1241 #ifndef V8_TARGET_ARCH_MIPS
1242 #define READ_DOUBLE_FIELD(p, offset) \
1243 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1244 #else // V8_TARGET_ARCH_MIPS
1245 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1246 // non-64-bit aligned HeapNumber::value.
1247 static inline double read_double_field(const void* p, int offset) {
1252 c.u[0] = (*reinterpret_cast<const uint32_t*>(
1253 FIELD_ADDR_CONST(p, offset)));
1254 c.u[1] = (*reinterpret_cast<const uint32_t*>(
1255 FIELD_ADDR_CONST(p, offset + 4)));
1258 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1259 #endif // V8_TARGET_ARCH_MIPS
1261 #ifndef V8_TARGET_ARCH_MIPS
1262 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1263 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1264 #else // V8_TARGET_ARCH_MIPS
1265 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1266 // non-64-bit aligned HeapNumber::value.
1267 static inline void write_double_field(void* p, int offset,
1274 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1275 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1277 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1278 write_double_field(p, offset, value)
1279 #endif // V8_TARGET_ARCH_MIPS
1281 #define READ_FLOAT32x4_FIELD(p, offset) \
1282 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)))
1284 #define WRITE_FLOAT32x4_FIELD(p, offset, value) \
1285 (*reinterpret_cast<float32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1287 #define READ_FLOAT64x2_FIELD(p, offset) \
1288 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)))
1290 #define WRITE_FLOAT64x2_FIELD(p, offset, value) \
1291 (*reinterpret_cast<float64x2_value_t*>(FIELD_ADDR(p, offset)) = value)
1293 #define READ_INT32x4_FIELD(p, offset) \
1294 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)))
1296 #define WRITE_INT32x4_FIELD(p, offset, value) \
1297 (*reinterpret_cast<int32x4_value_t*>(FIELD_ADDR(p, offset)) = value)
1299 #define READ_FLOAT_FIELD(p, offset) \
1300 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)))
1302 #define WRITE_FLOAT_FIELD(p, offset, value) \
1303 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1305 #define READ_INT_FIELD(p, offset) \
1306 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1308 #define WRITE_INT_FIELD(p, offset, value) \
1309 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1311 #define READ_INTPTR_FIELD(p, offset) \
1312 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1314 #define WRITE_INTPTR_FIELD(p, offset, value) \
1315 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1317 #define READ_UINT32_FIELD(p, offset) \
1318 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1320 #define WRITE_UINT32_FIELD(p, offset, value) \
1321 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1323 #define READ_INT32_FIELD(p, offset) \
1324 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1326 #define WRITE_INT32_FIELD(p, offset, value) \
1327 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1329 #define READ_INT64_FIELD(p, offset) \
1330 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1332 #define WRITE_INT64_FIELD(p, offset, value) \
1333 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1335 #define READ_SHORT_FIELD(p, offset) \
1336 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1338 #define WRITE_SHORT_FIELD(p, offset, value) \
1339 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1341 #define READ_BYTE_FIELD(p, offset) \
1342 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1344 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1345 static_cast<byte>(base::NoBarrier_Load( \
1346 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1348 #define WRITE_BYTE_FIELD(p, offset, value) \
1349 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1351 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1352 base::NoBarrier_Store( \
1353 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1354 static_cast<base::Atomic8>(value));
1356 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1357 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1361 int Smi::value() const {
1362 return Internals::SmiValue(this);
1366 Smi* Smi::FromInt(int value) {
1367 DCHECK(Smi::IsValid(value));
1368 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1372 Smi* Smi::FromIntptr(intptr_t value) {
1373 DCHECK(Smi::IsValid(value));
1374 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1375 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1379 bool Smi::IsValid(intptr_t value) {
1380 bool result = Internals::IsValidSmi(value);
1381 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1386 MapWord MapWord::FromMap(const Map* map) {
1387 return MapWord(reinterpret_cast<uintptr_t>(map));
1391 Map* MapWord::ToMap() {
1392 return reinterpret_cast<Map*>(value_);
1396 bool MapWord::IsForwardingAddress() {
1397 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1401 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1402 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1403 return MapWord(reinterpret_cast<uintptr_t>(raw));
1407 HeapObject* MapWord::ToForwardingAddress() {
1408 DCHECK(IsForwardingAddress());
1409 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1414 void HeapObject::VerifyObjectField(int offset) {
1415 VerifyPointer(READ_FIELD(this, offset));
1418 void HeapObject::VerifySmiField(int offset) {
1419 CHECK(READ_FIELD(this, offset)->IsSmi());
1424 Heap* HeapObject::GetHeap() const {
1426 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1427 SLOW_DCHECK(heap != NULL);
1432 Isolate* HeapObject::GetIsolate() const {
1433 return GetHeap()->isolate();
1437 Map* HeapObject::map() const {
1439 // Clear mark potentially added by PathTracer.
1440 uintptr_t raw_value =
1441 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1442 return MapWord::FromRawValue(raw_value).ToMap();
1444 return map_word().ToMap();
1449 void HeapObject::set_map(Map* value) {
1450 set_map_word(MapWord::FromMap(value));
1451 if (value != NULL) {
1452 // TODO(1600) We are passing NULL as a slot because maps can never be on
1453 // evacuation candidate.
1454 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1459 Map* HeapObject::synchronized_map() {
1460 return synchronized_map_word().ToMap();
1464 void HeapObject::synchronized_set_map(Map* value) {
1465 synchronized_set_map_word(MapWord::FromMap(value));
1466 if (value != NULL) {
1467 // TODO(1600) We are passing NULL as a slot because maps can never be on
1468 // evacuation candidate.
1469 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1474 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1475 synchronized_set_map_word(MapWord::FromMap(value));
1479 // Unsafe accessor omitting write barrier.
1480 void HeapObject::set_map_no_write_barrier(Map* value) {
1481 set_map_word(MapWord::FromMap(value));
1485 MapWord HeapObject::map_word() const {
1487 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1491 void HeapObject::set_map_word(MapWord map_word) {
1492 NOBARRIER_WRITE_FIELD(
1493 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1497 MapWord HeapObject::synchronized_map_word() const {
1499 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1503 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1504 RELEASE_WRITE_FIELD(
1505 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1509 HeapObject* HeapObject::FromAddress(Address address) {
1510 DCHECK_TAG_ALIGNED(address);
1511 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1515 Address HeapObject::address() {
1516 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1520 int HeapObject::Size() {
1521 return SizeFromMap(map());
1525 bool HeapObject::MayContainNewSpacePointers() {
1526 InstanceType type = map()->instance_type();
1527 if (type <= LAST_NAME_TYPE) {
1528 if (type == SYMBOL_TYPE) {
1531 DCHECK(type < FIRST_NONSTRING_TYPE);
1532 // There are four string representations: sequential strings, external
1533 // strings, cons strings, and sliced strings.
1534 // Only the latter two contain non-map-word pointers to heap objects.
1535 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag);
1537 // The ConstantPoolArray contains heap pointers, but not new space pointers.
1538 if (type == CONSTANT_POOL_ARRAY_TYPE) return false;
1539 return (type > LAST_DATA_TYPE);
1543 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1544 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1545 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1549 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1550 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1554 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1555 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1559 double HeapNumber::value() const {
1560 return READ_DOUBLE_FIELD(this, kValueOffset);
1564 void HeapNumber::set_value(double value) {
1565 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1569 int HeapNumber::get_exponent() {
1570 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1571 kExponentShift) - kExponentBias;
1575 int HeapNumber::get_sign() {
1576 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1580 ACCESSORS(Float32x4, value, Object, kValueOffset)
1581 ACCESSORS(Float64x2, value, Object, kValueOffset)
1582 ACCESSORS(Int32x4, value, Object, kValueOffset)
1585 const char* Float32x4::Name() {
1590 int Float32x4::kRuntimeAllocatorId() {
1591 return Runtime::kAllocateFloat32x4;
1595 float Float32x4::getAt(int index) {
1596 DCHECK(index >= 0 && index < kLanes);
1597 return get().storage[index];
1601 float32x4_value_t Float32x4::get() {
1602 return FixedFloat32x4Array::cast(value())->get_scalar(0);
1606 void Float32x4::set(float32x4_value_t f32x4) {
1607 FixedFloat32x4Array::cast(value())->set(0, f32x4);
1611 const char* Float64x2::Name() {
1616 int Float64x2::kRuntimeAllocatorId() {
1617 return Runtime::kAllocateFloat64x2;
1621 double Float64x2::getAt(int index) {
1622 DCHECK(index >= 0 && index < kLanes);
1623 return get().storage[index];
1626 float64x2_value_t Float64x2::get() {
1627 return FixedFloat64x2Array::cast(value())->get_scalar(0);
1631 void Float64x2::set(float64x2_value_t f64x2) {
1632 FixedFloat64x2Array::cast(value())->set(0, f64x2);
1636 const char* Int32x4::Name() {
1641 int Int32x4::kRuntimeAllocatorId() {
1642 return Runtime::kAllocateInt32x4;
1646 int32_t Int32x4::getAt(int index) {
1647 DCHECK(index >= 0 && index < kLanes);
1648 return get().storage[index];;
1652 int32x4_value_t Int32x4::get() {
1653 return FixedInt32x4Array::cast(value())->get_scalar(0);
1657 void Int32x4::set(int32x4_value_t i32x4) {
1658 FixedInt32x4Array::cast(value())->set(0, i32x4);
1662 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1665 Object** FixedArray::GetFirstElementAddress() {
1666 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1670 bool FixedArray::ContainsOnlySmisOrHoles() {
1671 Object* the_hole = GetHeap()->the_hole_value();
1672 Object** current = GetFirstElementAddress();
1673 for (int i = 0; i < length(); ++i) {
1674 Object* candidate = *current++;
1675 if (!candidate->IsSmi() && candidate != the_hole) return false;
1681 FixedArrayBase* JSObject::elements() const {
1682 Object* array = READ_FIELD(this, kElementsOffset);
1683 return static_cast<FixedArrayBase*>(array);
1687 void JSObject::ValidateElements(Handle<JSObject> object) {
1688 #ifdef ENABLE_SLOW_DCHECKS
1689 if (FLAG_enable_slow_asserts) {
1690 ElementsAccessor* accessor = object->GetElementsAccessor();
1691 accessor->Validate(object);
1697 void AllocationSite::Initialize() {
1698 set_transition_info(Smi::FromInt(0));
1699 SetElementsKind(GetInitialFastElementsKind());
1700 set_nested_site(Smi::FromInt(0));
1701 set_pretenure_data(Smi::FromInt(0));
1702 set_pretenure_create_count(Smi::FromInt(0));
1703 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1704 SKIP_WRITE_BARRIER);
1708 void AllocationSite::MarkZombie() {
1709 DCHECK(!IsZombie());
1711 set_pretenure_decision(kZombie);
1715 // Heuristic: We only need to create allocation site info if the boilerplate
1716 // elements kind is the initial elements kind.
1717 AllocationSiteMode AllocationSite::GetMode(
1718 ElementsKind boilerplate_elements_kind) {
1719 if (FLAG_pretenuring_call_new ||
1720 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1721 return TRACK_ALLOCATION_SITE;
1724 return DONT_TRACK_ALLOCATION_SITE;
1728 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1730 if (FLAG_pretenuring_call_new ||
1731 (IsFastSmiElementsKind(from) &&
1732 IsMoreGeneralElementsKindTransition(from, to))) {
1733 return TRACK_ALLOCATION_SITE;
1736 return DONT_TRACK_ALLOCATION_SITE;
1740 inline bool AllocationSite::CanTrack(InstanceType type) {
1741 if (FLAG_allocation_site_pretenuring) {
1742 return type == JS_ARRAY_TYPE ||
1743 type == JS_OBJECT_TYPE ||
1744 type < FIRST_NONSTRING_TYPE;
1746 return type == JS_ARRAY_TYPE;
1750 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1754 return DependentCode::kAllocationSiteTenuringChangedGroup;
1757 return DependentCode::kAllocationSiteTransitionChangedGroup;
1761 return DependentCode::kAllocationSiteTransitionChangedGroup;
1765 inline void AllocationSite::set_memento_found_count(int count) {
1766 int value = pretenure_data()->value();
1767 // Verify that we can count more mementos than we can possibly find in one
1768 // new space collection.
1769 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1770 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1771 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1772 DCHECK(count < MementoFoundCountBits::kMax);
1774 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1775 SKIP_WRITE_BARRIER);
1778 inline bool AllocationSite::IncrementMementoFoundCount() {
1779 if (IsZombie()) return false;
1781 int value = memento_found_count();
1782 set_memento_found_count(value + 1);
1783 return memento_found_count() == kPretenureMinimumCreated;
1787 inline void AllocationSite::IncrementMementoCreateCount() {
1788 DCHECK(FLAG_allocation_site_pretenuring);
1789 int value = memento_create_count();
1790 set_memento_create_count(value + 1);
1794 inline bool AllocationSite::MakePretenureDecision(
1795 PretenureDecision current_decision,
1797 bool maximum_size_scavenge) {
1798 // Here we just allow state transitions from undecided or maybe tenure
1799 // to don't tenure, maybe tenure, or tenure.
1800 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1801 if (ratio >= kPretenureRatio) {
1802 // We just transition into tenure state when the semi-space was at
1803 // maximum capacity.
1804 if (maximum_size_scavenge) {
1805 set_deopt_dependent_code(true);
1806 set_pretenure_decision(kTenure);
1807 // Currently we just need to deopt when we make a state transition to
1811 set_pretenure_decision(kMaybeTenure);
1813 set_pretenure_decision(kDontTenure);
1820 inline bool AllocationSite::DigestPretenuringFeedback(
1821 bool maximum_size_scavenge) {
1823 int create_count = memento_create_count();
1824 int found_count = memento_found_count();
1825 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1827 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1828 static_cast<double>(found_count) / create_count : 0.0;
1829 PretenureDecision current_decision = pretenure_decision();
1831 if (minimum_mementos_created) {
1832 deopt = MakePretenureDecision(
1833 current_decision, ratio, maximum_size_scavenge);
1836 if (FLAG_trace_pretenuring_statistics) {
1838 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1839 static_cast<void*>(this), create_count, found_count, ratio,
1840 PretenureDecisionName(current_decision),
1841 PretenureDecisionName(pretenure_decision()));
1844 // Clear feedback calculation fields until the next gc.
1845 set_memento_found_count(0);
1846 set_memento_create_count(0);
1851 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1852 JSObject::ValidateElements(object);
1853 ElementsKind elements_kind = object->map()->elements_kind();
1854 if (!IsFastObjectElementsKind(elements_kind)) {
1855 if (IsFastHoleyElementsKind(elements_kind)) {
1856 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1858 TransitionElementsKind(object, FAST_ELEMENTS);
1864 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1867 EnsureElementsMode mode) {
1868 ElementsKind current_kind = object->map()->elements_kind();
1869 ElementsKind target_kind = current_kind;
1871 DisallowHeapAllocation no_allocation;
1872 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1873 bool is_holey = IsFastHoleyElementsKind(current_kind);
1874 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1875 Heap* heap = object->GetHeap();
1876 Object* the_hole = heap->the_hole_value();
1877 for (uint32_t i = 0; i < count; ++i) {
1878 Object* current = *objects++;
1879 if (current == the_hole) {
1881 target_kind = GetHoleyElementsKind(target_kind);
1882 } else if (!current->IsSmi()) {
1883 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1884 if (IsFastSmiElementsKind(target_kind)) {
1886 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1888 target_kind = FAST_DOUBLE_ELEMENTS;
1891 } else if (is_holey) {
1892 target_kind = FAST_HOLEY_ELEMENTS;
1895 target_kind = FAST_ELEMENTS;
1900 if (target_kind != current_kind) {
1901 TransitionElementsKind(object, target_kind);
1906 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1907 Handle<FixedArrayBase> elements,
1909 EnsureElementsMode mode) {
1910 Heap* heap = object->GetHeap();
1911 if (elements->map() != heap->fixed_double_array_map()) {
1912 DCHECK(elements->map() == heap->fixed_array_map() ||
1913 elements->map() == heap->fixed_cow_array_map());
1914 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1915 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1918 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1919 EnsureCanContainElements(object, objects, length, mode);
1923 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1924 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1925 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1926 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1927 Handle<FixedDoubleArray> double_array =
1928 Handle<FixedDoubleArray>::cast(elements);
1929 for (uint32_t i = 0; i < length; ++i) {
1930 if (double_array->is_the_hole(i)) {
1931 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1935 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1940 void JSObject::SetMapAndElements(Handle<JSObject> object,
1941 Handle<Map> new_map,
1942 Handle<FixedArrayBase> value) {
1943 JSObject::MigrateToMap(object, new_map);
1944 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1945 (*value == object->GetHeap()->empty_fixed_array())) ==
1946 (value->map() == object->GetHeap()->fixed_array_map() ||
1947 value->map() == object->GetHeap()->fixed_cow_array_map()));
1948 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1949 (object->map()->has_fast_double_elements() ==
1950 value->IsFixedDoubleArray()));
1951 object->set_elements(*value);
1955 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1956 WRITE_FIELD(this, kElementsOffset, value);
1957 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1961 void JSObject::initialize_properties() {
1962 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1963 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1967 void JSObject::initialize_elements() {
1968 FixedArrayBase* elements = map()->GetInitialElements();
1969 WRITE_FIELD(this, kElementsOffset, elements);
1973 Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
1974 DisallowHeapAllocation no_gc;
1975 if (!map->HasTransitionArray()) return Handle<String>::null();
1976 TransitionArray* transitions = map->transitions();
1977 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1978 int transition = TransitionArray::kSimpleTransitionIndex;
1979 PropertyDetails details = transitions->GetTargetDetails(transition);
1980 Name* name = transitions->GetKey(transition);
1981 if (details.type() != FIELD) return Handle<String>::null();
1982 if (details.attributes() != NONE) return Handle<String>::null();
1983 if (!name->IsString()) return Handle<String>::null();
1984 return Handle<String>(String::cast(name));
1988 Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
1989 DCHECK(!ExpectedTransitionKey(map).is_null());
1990 return Handle<Map>(map->transitions()->GetTarget(
1991 TransitionArray::kSimpleTransitionIndex));
1995 Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1996 DisallowHeapAllocation no_allocation;
1997 if (!map->HasTransitionArray()) return Handle<Map>::null();
1998 TransitionArray* transitions = map->transitions();
1999 int transition = transitions->Search(*key);
2000 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
2001 PropertyDetails target_details = transitions->GetTargetDetails(transition);
2002 if (target_details.type() != FIELD) return Handle<Map>::null();
2003 if (target_details.attributes() != NONE) return Handle<Map>::null();
2004 return Handle<Map>(transitions->GetTarget(transition));
2008 ACCESSORS(Oddball, to_string, String, kToStringOffset)
2009 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
2012 byte Oddball::kind() const {
2013 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
2017 void Oddball::set_kind(byte value) {
2018 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2022 Object* Cell::value() const {
2023 return READ_FIELD(this, kValueOffset);
2027 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
2028 // The write barrier is not used for global property cells.
2029 DCHECK(!val->IsPropertyCell() && !val->IsCell());
2030 WRITE_FIELD(this, kValueOffset, val);
2033 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2035 Object* PropertyCell::type_raw() const {
2036 return READ_FIELD(this, kTypeOffset);
2040 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
2041 WRITE_FIELD(this, kTypeOffset, val);
2045 int JSObject::GetHeaderSize() {
2046 InstanceType type = map()->instance_type();
2047 // Check for the most common kind of JavaScript object before
2048 // falling into the generic switch. This speeds up the internal
2049 // field operations considerably on average.
2050 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2052 case JS_GENERATOR_OBJECT_TYPE:
2053 return JSGeneratorObject::kSize;
2054 case JS_MODULE_TYPE:
2055 return JSModule::kSize;
2056 case JS_GLOBAL_PROXY_TYPE:
2057 return JSGlobalProxy::kSize;
2058 case JS_GLOBAL_OBJECT_TYPE:
2059 return JSGlobalObject::kSize;
2060 case JS_BUILTINS_OBJECT_TYPE:
2061 return JSBuiltinsObject::kSize;
2062 case JS_FUNCTION_TYPE:
2063 return JSFunction::kSize;
2065 return JSValue::kSize;
2067 return JSDate::kSize;
2069 return JSArray::kSize;
2070 case JS_ARRAY_BUFFER_TYPE:
2071 return JSArrayBuffer::kSize;
2072 case JS_TYPED_ARRAY_TYPE:
2073 return JSTypedArray::kSize;
2074 case JS_DATA_VIEW_TYPE:
2075 return JSDataView::kSize;
2076 case FLOAT32x4_TYPE:
2077 return Float32x4::kSize;
2078 case FLOAT64x2_TYPE:
2079 return Float64x2::kSize;
2081 return Int32x4::kSize;
2083 return JSSet::kSize;
2085 return JSMap::kSize;
2086 case JS_SET_ITERATOR_TYPE:
2087 return JSSetIterator::kSize;
2088 case JS_MAP_ITERATOR_TYPE:
2089 return JSMapIterator::kSize;
2090 case JS_WEAK_MAP_TYPE:
2091 return JSWeakMap::kSize;
2092 case JS_WEAK_SET_TYPE:
2093 return JSWeakSet::kSize;
2094 case JS_REGEXP_TYPE:
2095 return JSRegExp::kSize;
2096 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2097 return JSObject::kHeaderSize;
2098 case JS_MESSAGE_OBJECT_TYPE:
2099 return JSMessageObject::kSize;
2101 // TODO(jkummerow): Re-enable this. Blink currently hits this
2102 // from its CustomElementConstructorBuilder.
2109 int JSObject::GetInternalFieldCount() {
2110 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2111 // Make sure to adjust for the number of in-object properties. These
2112 // properties do contribute to the size, but are not internal fields.
2113 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2114 map()->inobject_properties();
2118 int JSObject::GetInternalFieldOffset(int index) {
2119 DCHECK(index < GetInternalFieldCount() && index >= 0);
2120 return GetHeaderSize() + (kPointerSize * index);
2124 Object* JSObject::GetInternalField(int index) {
2125 DCHECK(index < GetInternalFieldCount() && index >= 0);
2126 // Internal objects do follow immediately after the header, whereas in-object
2127 // properties are at the end of the object. Therefore there is no need
2128 // to adjust the index here.
2129 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2133 void JSObject::SetInternalField(int index, Object* value) {
2134 DCHECK(index < GetInternalFieldCount() && index >= 0);
2135 // Internal objects do follow immediately after the header, whereas in-object
2136 // properties are at the end of the object. Therefore there is no need
2137 // to adjust the index here.
2138 int offset = GetHeaderSize() + (kPointerSize * index);
2139 WRITE_FIELD(this, offset, value);
2140 WRITE_BARRIER(GetHeap(), this, offset, value);
2144 void JSObject::SetInternalField(int index, Smi* value) {
2145 DCHECK(index < GetInternalFieldCount() && index >= 0);
2146 // Internal objects do follow immediately after the header, whereas in-object
2147 // properties are at the end of the object. Therefore there is no need
2148 // to adjust the index here.
2149 int offset = GetHeaderSize() + (kPointerSize * index);
2150 WRITE_FIELD(this, offset, value);
2154 // Access fast-case object properties at index. The use of these routines
2155 // is needed to correctly distinguish between properties stored in-object and
2156 // properties stored in the properties array.
2157 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2158 if (index.is_inobject()) {
2159 return READ_FIELD(this, index.offset());
2161 return properties()->get(index.outobject_array_index());
2166 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2167 if (index.is_inobject()) {
2168 int offset = index.offset();
2169 WRITE_FIELD(this, offset, value);
2170 WRITE_BARRIER(GetHeap(), this, offset, value);
2172 properties()->set(index.outobject_array_index(), value);
2177 int JSObject::GetInObjectPropertyOffset(int index) {
2178 return map()->GetInObjectPropertyOffset(index);
2182 Object* JSObject::InObjectPropertyAt(int index) {
2183 int offset = GetInObjectPropertyOffset(index);
2184 return READ_FIELD(this, offset);
2188 Object* JSObject::InObjectPropertyAtPut(int index,
2190 WriteBarrierMode mode) {
2191 // Adjust for the number of properties stored in the object.
2192 int offset = GetInObjectPropertyOffset(index);
2193 WRITE_FIELD(this, offset, value);
2194 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2200 void JSObject::InitializeBody(Map* map,
2201 Object* pre_allocated_value,
2202 Object* filler_value) {
2203 DCHECK(!filler_value->IsHeapObject() ||
2204 !GetHeap()->InNewSpace(filler_value));
2205 DCHECK(!pre_allocated_value->IsHeapObject() ||
2206 !GetHeap()->InNewSpace(pre_allocated_value));
2207 int size = map->instance_size();
2208 int offset = kHeaderSize;
2209 if (filler_value != pre_allocated_value) {
2210 int pre_allocated = map->pre_allocated_property_fields();
2211 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2212 for (int i = 0; i < pre_allocated; i++) {
2213 WRITE_FIELD(this, offset, pre_allocated_value);
2214 offset += kPointerSize;
2217 while (offset < size) {
2218 WRITE_FIELD(this, offset, filler_value);
2219 offset += kPointerSize;
2224 bool JSObject::HasFastProperties() {
2225 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2226 return !properties()->IsDictionary();
2230 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2231 if (unused_property_fields() != 0) return false;
2232 if (is_prototype_map()) return false;
2233 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2234 int limit = Max(minimum, inobject_properties());
2235 int external = NumberOfFields() - inobject_properties();
2236 return external > limit;
2240 void Struct::InitializeBody(int object_size) {
2241 Object* value = GetHeap()->undefined_value();
2242 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2243 WRITE_FIELD(this, offset, value);
2248 bool Object::ToArrayIndex(uint32_t* index) {
2250 int value = Smi::cast(this)->value();
2251 if (value < 0) return false;
2255 if (IsHeapNumber()) {
2256 double value = HeapNumber::cast(this)->value();
2257 uint32_t uint_value = static_cast<uint32_t>(value);
2258 if (value == static_cast<double>(uint_value)) {
2259 *index = uint_value;
2267 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2268 if (!this->IsJSValue()) return false;
2270 JSValue* js_value = JSValue::cast(this);
2271 if (!js_value->value()->IsString()) return false;
2273 String* str = String::cast(js_value->value());
2274 if (index >= static_cast<uint32_t>(str->length())) return false;
2280 void Object::VerifyApiCallResultType() {
2281 #if ENABLE_EXTRA_CHECKS
2291 FATAL("API call returned invalid object");
2293 #endif // ENABLE_EXTRA_CHECKS
2297 Object* FixedArray::get(int index) {
2298 SLOW_DCHECK(index >= 0 && index < this->length());
2299 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2303 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2304 return handle(array->get(index), array->GetIsolate());
2308 bool FixedArray::is_the_hole(int index) {
2309 return get(index) == GetHeap()->the_hole_value();
2313 void FixedArray::set(int index, Smi* value) {
2314 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2315 DCHECK(index >= 0 && index < this->length());
2316 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2317 int offset = kHeaderSize + index * kPointerSize;
2318 WRITE_FIELD(this, offset, value);
2322 void FixedArray::set(int index, Object* value) {
2323 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2324 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2325 DCHECK(index >= 0 && index < this->length());
2326 int offset = kHeaderSize + index * kPointerSize;
2327 WRITE_FIELD(this, offset, value);
2328 WRITE_BARRIER(GetHeap(), this, offset, value);
2332 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2333 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
2337 inline double FixedDoubleArray::hole_nan_as_double() {
2338 return BitCast<double, uint64_t>(kHoleNanInt64);
2342 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2343 DCHECK(BitCast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64);
2344 DCHECK((BitCast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32);
2345 return base::OS::nan_value();
2349 double FixedDoubleArray::get_scalar(int index) {
2350 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2351 map() != GetHeap()->fixed_array_map());
2352 DCHECK(index >= 0 && index < this->length());
2353 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2354 DCHECK(!is_the_hole_nan(result));
2358 int64_t FixedDoubleArray::get_representation(int index) {
2359 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2360 map() != GetHeap()->fixed_array_map());
2361 DCHECK(index >= 0 && index < this->length());
2362 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2366 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2368 if (array->is_the_hole(index)) {
2369 return array->GetIsolate()->factory()->the_hole_value();
2371 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2376 void FixedDoubleArray::set(int index, double value) {
2377 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2378 map() != GetHeap()->fixed_array_map());
2379 int offset = kHeaderSize + index * kDoubleSize;
2380 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2381 WRITE_DOUBLE_FIELD(this, offset, value);
2385 void FixedDoubleArray::set_the_hole(int index) {
2386 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2387 map() != GetHeap()->fixed_array_map());
2388 int offset = kHeaderSize + index * kDoubleSize;
2389 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2393 bool FixedDoubleArray::is_the_hole(int index) {
2394 int offset = kHeaderSize + index * kDoubleSize;
2395 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2399 double* FixedDoubleArray::data_start() {
2400 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2404 void FixedDoubleArray::FillWithHoles(int from, int to) {
2405 for (int i = from; i < to; i++) {
2411 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2412 DCHECK(type < NUMBER_OF_TYPES);
2413 element_counts_[type]++;
2417 int ConstantPoolArray::NumberOfEntries::equals(
2418 const ConstantPoolArray::NumberOfEntries& other) const {
2419 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2420 if (element_counts_[i] != other.element_counts_[i]) return false;
2426 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2427 return total_count() == 0;
2431 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2432 DCHECK(type < NUMBER_OF_TYPES);
2433 return element_counts_[type];
2437 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2439 DCHECK(type < NUMBER_OF_TYPES);
2440 for (int i = 0; i < type; i++) {
2441 base += element_counts_[i];
2447 int ConstantPoolArray::NumberOfEntries::total_count() const {
2449 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2450 count += element_counts_[i];
2456 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2457 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2458 if (element_counts_[i] < min || element_counts_[i] > max) {
2466 int ConstantPoolArray::Iterator::next_index() {
2467 DCHECK(!is_finished());
2468 int ret = next_index_++;
2474 bool ConstantPoolArray::Iterator::is_finished() {
2475 return next_index_ > array_->last_index(type_, final_section_);
2479 void ConstantPoolArray::Iterator::update_section() {
2480 if (next_index_ > array_->last_index(type_, current_section_) &&
2481 current_section_ != final_section_) {
2482 DCHECK(final_section_ == EXTENDED_SECTION);
2483 current_section_ = EXTENDED_SECTION;
2484 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2489 bool ConstantPoolArray::is_extended_layout() {
2490 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2491 return IsExtendedField::decode(small_layout_1);
2495 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2496 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2500 int ConstantPoolArray::first_extended_section_index() {
2501 DCHECK(is_extended_layout());
2502 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2503 return TotalCountField::decode(small_layout_2);
2507 int ConstantPoolArray::get_extended_section_header_offset() {
2508 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2512 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2513 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2514 return WeakObjectStateField::decode(small_layout_2);
2518 void ConstantPoolArray::set_weak_object_state(
2519 ConstantPoolArray::WeakObjectState state) {
2520 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2521 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2522 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2526 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2528 if (section == EXTENDED_SECTION) {
2529 DCHECK(is_extended_layout());
2530 index += first_extended_section_index();
2533 for (Type type_iter = FIRST_TYPE; type_iter < type;
2534 type_iter = next_type(type_iter)) {
2535 index += number_of_entries(type_iter, section);
2542 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2543 return first_index(type, section) + number_of_entries(type, section) - 1;
2547 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2548 if (section == SMALL_SECTION) {
2549 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2550 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2553 return Int64CountField::decode(small_layout_1);
2555 return CodePtrCountField::decode(small_layout_1);
2557 return HeapPtrCountField::decode(small_layout_1);
2559 return Int32CountField::decode(small_layout_2);
2565 DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2566 int offset = get_extended_section_header_offset();
2569 offset += kExtendedInt64CountOffset;
2572 offset += kExtendedCodePtrCountOffset;
2575 offset += kExtendedHeapPtrCountOffset;
2578 offset += kExtendedInt32CountOffset;
2583 return READ_INT_FIELD(this, offset);
2588 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2589 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2590 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2591 (is_extended_layout() &&
2592 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2593 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2597 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2598 LayoutSection section;
2599 if (is_extended_layout() && index >= first_extended_section_index()) {
2600 section = EXTENDED_SECTION;
2602 section = SMALL_SECTION;
2605 Type type = FIRST_TYPE;
2606 while (index > last_index(type, section)) {
2607 type = next_type(type);
2609 DCHECK(type <= LAST_TYPE);
2614 int64_t ConstantPoolArray::get_int64_entry(int index) {
2615 DCHECK(map() == GetHeap()->constant_pool_array_map());
2616 DCHECK(get_type(index) == INT64);
2617 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2621 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2622 STATIC_ASSERT(kDoubleSize == kInt64Size);
2623 DCHECK(map() == GetHeap()->constant_pool_array_map());
2624 DCHECK(get_type(index) == INT64);
2625 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2629 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2630 DCHECK(map() == GetHeap()->constant_pool_array_map());
2631 DCHECK(get_type(index) == CODE_PTR);
2632 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2636 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2637 DCHECK(map() == GetHeap()->constant_pool_array_map());
2638 DCHECK(get_type(index) == HEAP_PTR);
2639 return READ_FIELD(this, OffsetOfElementAt(index));
2643 int32_t ConstantPoolArray::get_int32_entry(int index) {
2644 DCHECK(map() == GetHeap()->constant_pool_array_map());
2645 DCHECK(get_type(index) == INT32);
2646 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2650 void ConstantPoolArray::set(int index, int64_t value) {
2651 DCHECK(map() == GetHeap()->constant_pool_array_map());
2652 DCHECK(get_type(index) == INT64);
2653 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2657 void ConstantPoolArray::set(int index, double value) {
2658 STATIC_ASSERT(kDoubleSize == kInt64Size);
2659 DCHECK(map() == GetHeap()->constant_pool_array_map());
2660 DCHECK(get_type(index) == INT64);
2661 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2665 void ConstantPoolArray::set(int index, Address value) {
2666 DCHECK(map() == GetHeap()->constant_pool_array_map());
2667 DCHECK(get_type(index) == CODE_PTR);
2668 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2672 void ConstantPoolArray::set(int index, Object* value) {
2673 DCHECK(map() == GetHeap()->constant_pool_array_map());
2674 DCHECK(!GetHeap()->InNewSpace(value));
2675 DCHECK(get_type(index) == HEAP_PTR);
2676 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2677 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2681 void ConstantPoolArray::set(int index, int32_t value) {
2682 DCHECK(map() == GetHeap()->constant_pool_array_map());
2683 DCHECK(get_type(index) == INT32);
2684 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2688 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2689 DCHECK(map() == GetHeap()->constant_pool_array_map());
2690 DCHECK(offset_is_type(offset, INT32));
2691 WRITE_INT32_FIELD(this, offset, value);
2695 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2696 DCHECK(map() == GetHeap()->constant_pool_array_map());
2697 DCHECK(offset_is_type(offset, INT64));
2698 WRITE_INT64_FIELD(this, offset, value);
2702 void ConstantPoolArray::set_at_offset(int offset, double value) {
2703 DCHECK(map() == GetHeap()->constant_pool_array_map());
2704 DCHECK(offset_is_type(offset, INT64));
2705 WRITE_DOUBLE_FIELD(this, offset, value);
2709 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2710 DCHECK(map() == GetHeap()->constant_pool_array_map());
2711 DCHECK(offset_is_type(offset, CODE_PTR));
2712 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2713 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2717 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2718 DCHECK(map() == GetHeap()->constant_pool_array_map());
2719 DCHECK(!GetHeap()->InNewSpace(value));
2720 DCHECK(offset_is_type(offset, HEAP_PTR));
2721 WRITE_FIELD(this, offset, value);
2722 WRITE_BARRIER(GetHeap(), this, offset, value);
2726 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2727 uint32_t small_layout_1 =
2728 Int64CountField::encode(small.count_of(INT64)) |
2729 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2730 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2731 IsExtendedField::encode(false);
2732 uint32_t small_layout_2 =
2733 Int32CountField::encode(small.count_of(INT32)) |
2734 TotalCountField::encode(small.total_count()) |
2735 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2736 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2737 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2738 if (kHeaderSize != kFirstEntryOffset) {
2739 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2740 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2745 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2746 const NumberOfEntries& extended) {
2747 // Initialize small layout fields first.
2750 // Set is_extended_layout field.
2751 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2752 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2753 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2755 // Initialize the extended layout fields.
2756 int extended_header_offset = get_extended_section_header_offset();
2757 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2758 extended.count_of(INT64));
2759 WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2760 extended.count_of(CODE_PTR));
2761 WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2762 extended.count_of(HEAP_PTR));
2763 WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2764 extended.count_of(INT32));
2768 int ConstantPoolArray::size() {
2769 NumberOfEntries small(this, SMALL_SECTION);
2770 if (!is_extended_layout()) {
2771 return SizeFor(small);
2773 NumberOfEntries extended(this, EXTENDED_SECTION);
2774 return SizeForExtended(small, extended);
2779 int ConstantPoolArray::length() {
2780 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2781 int length = TotalCountField::decode(small_layout_2);
2782 if (is_extended_layout()) {
2783 length += number_of_entries(INT64, EXTENDED_SECTION) +
2784 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2785 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2786 number_of_entries(INT32, EXTENDED_SECTION);
2792 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2793 const DisallowHeapAllocation& promise) {
2794 Heap* heap = GetHeap();
2795 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2796 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2797 return UPDATE_WRITE_BARRIER;
2801 void FixedArray::set(int index,
2803 WriteBarrierMode mode) {
2804 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2805 DCHECK(index >= 0 && index < this->length());
2806 int offset = kHeaderSize + index * kPointerSize;
2807 WRITE_FIELD(this, offset, value);
2808 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2812 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2815 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2816 DCHECK(index >= 0 && index < array->length());
2817 int offset = kHeaderSize + index * kPointerSize;
2818 WRITE_FIELD(array, offset, value);
2819 Heap* heap = array->GetHeap();
2820 if (heap->InNewSpace(value)) {
2821 heap->RecordWrite(array->address(), offset);
2826 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2829 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2830 DCHECK(index >= 0 && index < array->length());
2831 DCHECK(!array->GetHeap()->InNewSpace(value));
2832 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2836 void FixedArray::set_undefined(int index) {
2837 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2838 DCHECK(index >= 0 && index < this->length());
2839 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2841 kHeaderSize + index * kPointerSize,
2842 GetHeap()->undefined_value());
2846 void FixedArray::set_null(int index) {
2847 DCHECK(index >= 0 && index < this->length());
2848 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2850 kHeaderSize + index * kPointerSize,
2851 GetHeap()->null_value());
2855 void FixedArray::set_the_hole(int index) {
2856 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2857 DCHECK(index >= 0 && index < this->length());
2858 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2860 kHeaderSize + index * kPointerSize,
2861 GetHeap()->the_hole_value());
2865 void FixedArray::FillWithHoles(int from, int to) {
2866 for (int i = from; i < to; i++) {
2872 Object** FixedArray::data_start() {
2873 return HeapObject::RawField(this, kHeaderSize);
2877 bool DescriptorArray::IsEmpty() {
2878 DCHECK(length() >= kFirstIndex ||
2879 this == GetHeap()->empty_descriptor_array());
2880 return length() < kFirstIndex;
2884 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2886 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2890 // Perform a binary search in a fixed array. Low and high are entry indices. If
2891 // there are three entries in this array it should be called with low=0 and
2893 template<SearchMode search_mode, typename T>
2894 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2895 uint32_t hash = name->Hash();
2898 DCHECK(low <= high);
2900 while (low != high) {
2901 int mid = (low + high) / 2;
2902 Name* mid_name = array->GetSortedKey(mid);
2903 uint32_t mid_hash = mid_name->Hash();
2905 if (mid_hash >= hash) {
2912 for (; low <= limit; ++low) {
2913 int sort_index = array->GetSortedKeyIndex(low);
2914 Name* entry = array->GetKey(sort_index);
2915 if (entry->Hash() != hash) break;
2916 if (entry->Equals(name)) {
2917 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2920 return T::kNotFound;
2924 return T::kNotFound;
2928 // Perform a linear search in this fixed array. len is the number of entry
2929 // indices that are valid.
2930 template<SearchMode search_mode, typename T>
2931 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2932 uint32_t hash = name->Hash();
2933 if (search_mode == ALL_ENTRIES) {
2934 for (int number = 0; number < len; number++) {
2935 int sorted_index = array->GetSortedKeyIndex(number);
2936 Name* entry = array->GetKey(sorted_index);
2937 uint32_t current_hash = entry->Hash();
2938 if (current_hash > hash) break;
2939 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2942 DCHECK(len >= valid_entries);
2943 for (int number = 0; number < valid_entries; number++) {
2944 Name* entry = array->GetKey(number);
2945 uint32_t current_hash = entry->Hash();
2946 if (current_hash == hash && entry->Equals(name)) return number;
2949 return T::kNotFound;
2953 template<SearchMode search_mode, typename T>
2954 int Search(T* array, Name* name, int valid_entries) {
2955 if (search_mode == VALID_ENTRIES) {
2956 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2958 SLOW_DCHECK(array->IsSortedNoDuplicates());
2961 int nof = array->number_of_entries();
2962 if (nof == 0) return T::kNotFound;
2964 // Fast case: do linear search for small arrays.
2965 const int kMaxElementsForLinearSearch = 8;
2966 if ((search_mode == ALL_ENTRIES &&
2967 nof <= kMaxElementsForLinearSearch) ||
2968 (search_mode == VALID_ENTRIES &&
2969 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2970 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2973 // Slow case: perform binary search.
2974 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2978 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2979 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2983 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2984 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2985 if (number_of_own_descriptors == 0) return kNotFound;
2987 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2988 int number = cache->Lookup(map, name);
2990 if (number == DescriptorLookupCache::kAbsent) {
2991 number = Search(name, number_of_own_descriptors);
2992 cache->Update(map, name, number);
2999 PropertyDetails Map::GetLastDescriptorDetails() {
3000 return instance_descriptors()->GetDetails(LastAdded());
3004 void Map::LookupDescriptor(JSObject* holder,
3006 LookupResult* result) {
3007 DescriptorArray* descriptors = this->instance_descriptors();
3008 int number = descriptors->SearchWithCache(name, this);
3009 if (number == DescriptorArray::kNotFound) return result->NotFound();
3010 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
3014 void Map::LookupTransition(JSObject* holder,
3016 LookupResult* result) {
3017 int transition_index = this->SearchTransition(name);
3018 if (transition_index == TransitionArray::kNotFound) return result->NotFound();
3019 result->TransitionResult(holder, this->GetTransition(transition_index));
3023 FixedArrayBase* Map::GetInitialElements() {
3024 if (has_fast_smi_or_object_elements() ||
3025 has_fast_double_elements()) {
3026 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
3027 return GetHeap()->empty_fixed_array();
3028 } else if (has_external_array_elements()) {
3029 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
3030 DCHECK(!GetHeap()->InNewSpace(empty_array));
3032 } else if (has_fixed_typed_array_elements()) {
3033 FixedTypedArrayBase* empty_array =
3034 GetHeap()->EmptyFixedTypedArrayForMap(this);
3035 DCHECK(!GetHeap()->InNewSpace(empty_array));
3037 } else if (has_dictionary_elements()) {
3038 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_slow_element_dictionary()));
3039 return GetHeap()->empty_slow_element_dictionary();
3047 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
3048 DCHECK(descriptor_number < number_of_descriptors());
3049 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
3053 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
3054 return GetKeySlot(descriptor_number);
3058 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
3059 return GetValueSlot(descriptor_number - 1) + 1;
3063 Name* DescriptorArray::GetKey(int descriptor_number) {
3064 DCHECK(descriptor_number < number_of_descriptors());
3065 return Name::cast(get(ToKeyIndex(descriptor_number)));
3069 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
3070 return GetDetails(descriptor_number).pointer();
3074 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
3075 return GetKey(GetSortedKeyIndex(descriptor_number));
3079 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
3080 PropertyDetails details = GetDetails(descriptor_index);
3081 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
3085 void DescriptorArray::SetRepresentation(int descriptor_index,
3086 Representation representation) {
3087 DCHECK(!representation.IsNone());
3088 PropertyDetails details = GetDetails(descriptor_index);
3089 set(ToDetailsIndex(descriptor_index),
3090 details.CopyWithRepresentation(representation).AsSmi());
3094 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3095 DCHECK(descriptor_number < number_of_descriptors());
3096 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3100 int DescriptorArray::GetValueOffset(int descriptor_number) {
3101 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3105 Object* DescriptorArray::GetValue(int descriptor_number) {
3106 DCHECK(descriptor_number < number_of_descriptors());
3107 return get(ToValueIndex(descriptor_number));
3111 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3112 set(ToValueIndex(descriptor_index), value);
3116 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3117 DCHECK(descriptor_number < number_of_descriptors());
3118 Object* details = get(ToDetailsIndex(descriptor_number));
3119 return PropertyDetails(Smi::cast(details));
3123 PropertyType DescriptorArray::GetType(int descriptor_number) {
3124 return GetDetails(descriptor_number).type();
3128 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3129 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3130 return GetDetails(descriptor_number).field_index();
3134 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3135 DCHECK(GetDetails(descriptor_number).type() == FIELD);
3136 return HeapType::cast(GetValue(descriptor_number));
3140 Object* DescriptorArray::GetConstant(int descriptor_number) {
3141 return GetValue(descriptor_number);
3145 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3146 DCHECK(GetType(descriptor_number) == CALLBACKS);
3147 return GetValue(descriptor_number);
3151 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3152 DCHECK(GetType(descriptor_number) == CALLBACKS);
3153 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3154 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3158 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3159 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3160 handle(GetValue(descriptor_number), GetIsolate()),
3161 GetDetails(descriptor_number));
3165 void DescriptorArray::Set(int descriptor_number,
3167 const WhitenessWitness&) {
3169 DCHECK(descriptor_number < number_of_descriptors());
3171 NoIncrementalWriteBarrierSet(this,
3172 ToKeyIndex(descriptor_number),
3174 NoIncrementalWriteBarrierSet(this,
3175 ToValueIndex(descriptor_number),
3177 NoIncrementalWriteBarrierSet(this,
3178 ToDetailsIndex(descriptor_number),
3179 desc->GetDetails().AsSmi());
3183 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3185 DCHECK(descriptor_number < number_of_descriptors());
3187 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3188 set(ToValueIndex(descriptor_number), *desc->GetValue());
3189 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3193 void DescriptorArray::Append(Descriptor* desc,
3194 const WhitenessWitness& witness) {
3195 DisallowHeapAllocation no_gc;
3196 int descriptor_number = number_of_descriptors();
3197 SetNumberOfDescriptors(descriptor_number + 1);
3198 Set(descriptor_number, desc, witness);
3200 uint32_t hash = desc->GetKey()->Hash();
3204 for (insertion = descriptor_number; insertion > 0; --insertion) {
3205 Name* key = GetSortedKey(insertion - 1);
3206 if (key->Hash() <= hash) break;
3207 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3210 SetSortedKey(insertion, descriptor_number);
3214 void DescriptorArray::Append(Descriptor* desc) {
3215 DisallowHeapAllocation no_gc;
3216 int descriptor_number = number_of_descriptors();
3217 SetNumberOfDescriptors(descriptor_number + 1);
3218 Set(descriptor_number, desc);
3220 uint32_t hash = desc->GetKey()->Hash();
3224 for (insertion = descriptor_number; insertion > 0; --insertion) {
3225 Name* key = GetSortedKey(insertion - 1);
3226 if (key->Hash() <= hash) break;
3227 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3230 SetSortedKey(insertion, descriptor_number);
3234 void DescriptorArray::SwapSortedKeys(int first, int second) {
3235 int first_key = GetSortedKeyIndex(first);
3236 SetSortedKey(first, GetSortedKeyIndex(second));
3237 SetSortedKey(second, first_key);
3241 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3242 : marking_(array->GetHeap()->incremental_marking()) {
3243 marking_->EnterNoMarkingScope();
3244 DCHECK(!marking_->IsMarking() ||
3245 Marking::Color(array) == Marking::WHITE_OBJECT);
3249 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3250 marking_->LeaveNoMarkingScope();
3254 template<typename Derived, typename Shape, typename Key>
3255 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3256 const int kMinCapacity = 32;
3257 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
3258 if (capacity < kMinCapacity) {
3259 capacity = kMinCapacity; // Guarantee min capacity.
3265 template<typename Derived, typename Shape, typename Key>
3266 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3267 return FindEntry(GetIsolate(), key);
3271 // Find entry for key otherwise return kNotFound.
3272 template<typename Derived, typename Shape, typename Key>
3273 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3274 uint32_t capacity = Capacity();
3275 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3277 // EnsureCapacity will guarantee the hash table is never full.
3279 Object* element = KeyAt(entry);
3280 // Empty entry. Uses raw unchecked accessors because it is called by the
3281 // string table during bootstrapping.
3282 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3283 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3284 Shape::IsMatch(key, element)) return entry;
3285 entry = NextProbe(entry, count++, capacity);
3291 bool SeededNumberDictionary::requires_slow_elements() {
3292 Object* max_index_object = get(kMaxNumberKeyIndex);
3293 if (!max_index_object->IsSmi()) return false;
3295 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3298 uint32_t SeededNumberDictionary::max_number_key() {
3299 DCHECK(!requires_slow_elements());
3300 Object* max_index_object = get(kMaxNumberKeyIndex);
3301 if (!max_index_object->IsSmi()) return 0;
3302 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3303 return value >> kRequiresSlowElementsTagSize;
3306 void SeededNumberDictionary::set_requires_slow_elements() {
3307 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3311 // ------------------------------------
3315 CAST_ACCESSOR(AccessorInfo)
3316 CAST_ACCESSOR(ByteArray)
3319 CAST_ACCESSOR(CodeCacheHashTable)
3320 CAST_ACCESSOR(CompilationCacheTable)
3321 CAST_ACCESSOR(ConsString)
3322 CAST_ACCESSOR(ConstantPoolArray)
3323 CAST_ACCESSOR(DeoptimizationInputData)
3324 CAST_ACCESSOR(DeoptimizationOutputData)
3325 CAST_ACCESSOR(DependentCode)
3326 CAST_ACCESSOR(DescriptorArray)
3327 CAST_ACCESSOR(ExternalArray)
3328 CAST_ACCESSOR(ExternalAsciiString)
3329 CAST_ACCESSOR(ExternalFloat32Array)
3330 CAST_ACCESSOR(ExternalFloat32x4Array)
3331 CAST_ACCESSOR(ExternalFloat64Array)
3332 CAST_ACCESSOR(ExternalFloat64x2Array)
3333 CAST_ACCESSOR(ExternalInt16Array)
3334 CAST_ACCESSOR(ExternalInt32Array)
3335 CAST_ACCESSOR(ExternalInt32x4Array)
3336 CAST_ACCESSOR(ExternalInt8Array)
3337 CAST_ACCESSOR(ExternalString)
3338 CAST_ACCESSOR(ExternalTwoByteString)
3339 CAST_ACCESSOR(ExternalUint16Array)
3340 CAST_ACCESSOR(ExternalUint32Array)
3341 CAST_ACCESSOR(ExternalUint8Array)
3342 CAST_ACCESSOR(ExternalUint8ClampedArray)
3343 CAST_ACCESSOR(FixedArray)
3344 CAST_ACCESSOR(FixedArrayBase)
3345 CAST_ACCESSOR(FixedDoubleArray)
3346 CAST_ACCESSOR(FixedTypedArrayBase)
3347 CAST_ACCESSOR(Foreign)
3348 CAST_ACCESSOR(FreeSpace)
3349 CAST_ACCESSOR(GlobalObject)
3350 CAST_ACCESSOR(HeapObject)
3351 CAST_ACCESSOR(Float32x4)
3352 CAST_ACCESSOR(Float64x2)
3353 CAST_ACCESSOR(Int32x4)
3354 CAST_ACCESSOR(JSArray)
3355 CAST_ACCESSOR(JSArrayBuffer)
3356 CAST_ACCESSOR(JSArrayBufferView)
3357 CAST_ACCESSOR(JSBuiltinsObject)
3358 CAST_ACCESSOR(JSDataView)
3359 CAST_ACCESSOR(JSDate)
3360 CAST_ACCESSOR(JSFunction)
3361 CAST_ACCESSOR(JSFunctionProxy)
3362 CAST_ACCESSOR(JSFunctionResultCache)
3363 CAST_ACCESSOR(JSGeneratorObject)
3364 CAST_ACCESSOR(JSGlobalObject)
3365 CAST_ACCESSOR(JSGlobalProxy)
3366 CAST_ACCESSOR(JSMap)
3367 CAST_ACCESSOR(JSMapIterator)
3368 CAST_ACCESSOR(JSMessageObject)
3369 CAST_ACCESSOR(JSModule)
3370 CAST_ACCESSOR(JSObject)
3371 CAST_ACCESSOR(JSProxy)
3372 CAST_ACCESSOR(JSReceiver)
3373 CAST_ACCESSOR(JSRegExp)
3374 CAST_ACCESSOR(JSSet)
3375 CAST_ACCESSOR(JSSetIterator)
3376 CAST_ACCESSOR(JSTypedArray)
3377 CAST_ACCESSOR(JSValue)
3378 CAST_ACCESSOR(JSWeakMap)
3379 CAST_ACCESSOR(JSWeakSet)
3381 CAST_ACCESSOR(MapCache)
3383 CAST_ACCESSOR(NameDictionary)
3384 CAST_ACCESSOR(NormalizedMapCache)
3385 CAST_ACCESSOR(Object)
3386 CAST_ACCESSOR(ObjectHashTable)
3387 CAST_ACCESSOR(Oddball)
3388 CAST_ACCESSOR(OrderedHashMap)
3389 CAST_ACCESSOR(OrderedHashSet)
3390 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3391 CAST_ACCESSOR(PropertyCell)
3392 CAST_ACCESSOR(ScopeInfo)
3393 CAST_ACCESSOR(SeededNumberDictionary)
3394 CAST_ACCESSOR(SeqOneByteString)
3395 CAST_ACCESSOR(SeqString)
3396 CAST_ACCESSOR(SeqTwoByteString)
3397 CAST_ACCESSOR(SharedFunctionInfo)
3398 CAST_ACCESSOR(SlicedString)
3400 CAST_ACCESSOR(String)
3401 CAST_ACCESSOR(StringTable)
3402 CAST_ACCESSOR(Struct)
3403 CAST_ACCESSOR(Symbol)
3404 CAST_ACCESSOR(UnseededNumberDictionary)
3405 CAST_ACCESSOR(WeakHashTable)
3408 template <class Traits>
3409 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3410 SLOW_DCHECK(object->IsHeapObject() &&
3411 HeapObject::cast(object)->map()->instance_type() ==
3412 Traits::kInstanceType);
3413 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3417 template <class Traits>
3418 const FixedTypedArray<Traits>*
3419 FixedTypedArray<Traits>::cast(const Object* object) {
3420 SLOW_DCHECK(object->IsHeapObject() &&
3421 HeapObject::cast(object)->map()->instance_type() ==
3422 Traits::kInstanceType);
3423 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3427 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3428 STRUCT_LIST(MAKE_STRUCT_CAST)
3429 #undef MAKE_STRUCT_CAST
3432 template <typename Derived, typename Shape, typename Key>
3433 HashTable<Derived, Shape, Key>*
3434 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3435 SLOW_DCHECK(obj->IsHashTable());
3436 return reinterpret_cast<HashTable*>(obj);
3440 template <typename Derived, typename Shape, typename Key>
3441 const HashTable<Derived, Shape, Key>*
3442 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3443 SLOW_DCHECK(obj->IsHashTable());
3444 return reinterpret_cast<const HashTable*>(obj);
3448 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3449 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3451 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3452 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3454 SMI_ACCESSORS(String, length, kLengthOffset)
3455 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3458 uint32_t Name::hash_field() {
3459 return READ_UINT32_FIELD(this, kHashFieldOffset);
3463 void Name::set_hash_field(uint32_t value) {
3464 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3465 #if V8_HOST_ARCH_64_BIT
3466 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3471 bool Name::Equals(Name* other) {
3472 if (other == this) return true;
3473 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3474 this->IsSymbol() || other->IsSymbol()) {
3477 return String::cast(this)->SlowEquals(String::cast(other));
3481 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3482 if (one.is_identical_to(two)) return true;
3483 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3484 one->IsSymbol() || two->IsSymbol()) {
3487 return String::SlowEquals(Handle<String>::cast(one),
3488 Handle<String>::cast(two));
3492 ACCESSORS(Symbol, name, Object, kNameOffset)
3493 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3494 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3497 bool String::Equals(String* other) {
3498 if (other == this) return true;
3499 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3502 return SlowEquals(other);
3506 bool String::Equals(Handle<String> one, Handle<String> two) {
3507 if (one.is_identical_to(two)) return true;
3508 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3511 return SlowEquals(one, two);
3515 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3516 if (!string->IsConsString()) return string;
3517 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3518 if (cons->IsFlat()) return handle(cons->first());
3519 return SlowFlatten(cons, pretenure);
3523 uint16_t String::Get(int index) {
3524 DCHECK(index >= 0 && index < length());
3525 switch (StringShape(this).full_representation_tag()) {
3526 case kSeqStringTag | kOneByteStringTag:
3527 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3528 case kSeqStringTag | kTwoByteStringTag:
3529 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3530 case kConsStringTag | kOneByteStringTag:
3531 case kConsStringTag | kTwoByteStringTag:
3532 return ConsString::cast(this)->ConsStringGet(index);
3533 case kExternalStringTag | kOneByteStringTag:
3534 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
3535 case kExternalStringTag | kTwoByteStringTag:
3536 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3537 case kSlicedStringTag | kOneByteStringTag:
3538 case kSlicedStringTag | kTwoByteStringTag:
3539 return SlicedString::cast(this)->SlicedStringGet(index);
3549 void String::Set(int index, uint16_t value) {
3550 DCHECK(index >= 0 && index < length());
3551 DCHECK(StringShape(this).IsSequential());
3553 return this->IsOneByteRepresentation()
3554 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3555 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3559 bool String::IsFlat() {
3560 if (!StringShape(this).IsCons()) return true;
3561 return ConsString::cast(this)->second()->length() == 0;
3565 String* String::GetUnderlying() {
3566 // Giving direct access to underlying string only makes sense if the
3567 // wrapping string is already flattened.
3568 DCHECK(this->IsFlat());
3569 DCHECK(StringShape(this).IsIndirect());
3570 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3571 const int kUnderlyingOffset = SlicedString::kParentOffset;
3572 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3576 template<class Visitor>
3577 ConsString* String::VisitFlat(Visitor* visitor,
3580 int slice_offset = offset;
3581 const int length = string->length();
3582 DCHECK(offset <= length);
3584 int32_t type = string->map()->instance_type();
3585 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3586 case kSeqStringTag | kOneByteStringTag:
3587 visitor->VisitOneByteString(
3588 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3592 case kSeqStringTag | kTwoByteStringTag:
3593 visitor->VisitTwoByteString(
3594 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3598 case kExternalStringTag | kOneByteStringTag:
3599 visitor->VisitOneByteString(
3600 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
3604 case kExternalStringTag | kTwoByteStringTag:
3605 visitor->VisitTwoByteString(
3606 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3610 case kSlicedStringTag | kOneByteStringTag:
3611 case kSlicedStringTag | kTwoByteStringTag: {
3612 SlicedString* slicedString = SlicedString::cast(string);
3613 slice_offset += slicedString->offset();
3614 string = slicedString->parent();
3618 case kConsStringTag | kOneByteStringTag:
3619 case kConsStringTag | kTwoByteStringTag:
3620 return ConsString::cast(string);
3630 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3631 DCHECK(index >= 0 && index < length());
3632 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3636 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3637 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3638 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3639 static_cast<byte>(value));
3643 Address SeqOneByteString::GetCharsAddress() {
3644 return FIELD_ADDR(this, kHeaderSize);
3648 uint8_t* SeqOneByteString::GetChars() {
3649 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3653 Address SeqTwoByteString::GetCharsAddress() {
3654 return FIELD_ADDR(this, kHeaderSize);
3658 uc16* SeqTwoByteString::GetChars() {
3659 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3663 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3664 DCHECK(index >= 0 && index < length());
3665 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3669 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3670 DCHECK(index >= 0 && index < length());
3671 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3675 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3676 return SizeFor(length());
3680 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3681 return SizeFor(length());
3685 String* SlicedString::parent() {
3686 return String::cast(READ_FIELD(this, kParentOffset));
3690 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3691 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3692 WRITE_FIELD(this, kParentOffset, parent);
3693 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3697 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3700 String* ConsString::first() {
3701 return String::cast(READ_FIELD(this, kFirstOffset));
3705 Object* ConsString::unchecked_first() {
3706 return READ_FIELD(this, kFirstOffset);
3710 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3711 WRITE_FIELD(this, kFirstOffset, value);
3712 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3716 String* ConsString::second() {
3717 return String::cast(READ_FIELD(this, kSecondOffset));
3721 Object* ConsString::unchecked_second() {
3722 return READ_FIELD(this, kSecondOffset);
3726 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3727 WRITE_FIELD(this, kSecondOffset, value);
3728 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3732 bool ExternalString::is_short() {
3733 InstanceType type = map()->instance_type();
3734 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3738 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
3739 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3743 void ExternalAsciiString::update_data_cache() {
3744 if (is_short()) return;
3745 const char** data_field =
3746 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3747 *data_field = resource()->data();
3751 void ExternalAsciiString::set_resource(
3752 const ExternalAsciiString::Resource* resource) {
3753 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3754 *reinterpret_cast<const Resource**>(
3755 FIELD_ADDR(this, kResourceOffset)) = resource;
3756 if (resource != NULL) update_data_cache();
3760 const uint8_t* ExternalAsciiString::GetChars() {
3761 return reinterpret_cast<const uint8_t*>(resource()->data());
3765 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3766 DCHECK(index >= 0 && index < length());
3767 return GetChars()[index];
3771 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3772 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3776 void ExternalTwoByteString::update_data_cache() {
3777 if (is_short()) return;
3778 const uint16_t** data_field =
3779 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3780 *data_field = resource()->data();
3784 void ExternalTwoByteString::set_resource(
3785 const ExternalTwoByteString::Resource* resource) {
3786 *reinterpret_cast<const Resource**>(
3787 FIELD_ADDR(this, kResourceOffset)) = resource;
3788 if (resource != NULL) update_data_cache();
3792 const uint16_t* ExternalTwoByteString::GetChars() {
3793 return resource()->data();
3797 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3798 DCHECK(index >= 0 && index < length());
3799 return GetChars()[index];
3803 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3805 return GetChars() + start;
3809 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3810 return depth & kDepthMask;
3814 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3815 frames_[depth_++ & kDepthMask] = string;
3819 void ConsStringIteratorOp::PushRight(ConsString* string) {
3821 frames_[(depth_-1) & kDepthMask] = string;
3825 void ConsStringIteratorOp::AdjustMaximumDepth() {
3826 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3830 void ConsStringIteratorOp::Pop() {
3832 DCHECK(depth_ <= maximum_depth_);
3837 uint16_t StringCharacterStream::GetNext() {
3838 DCHECK(buffer8_ != NULL && end_ != NULL);
3839 // Advance cursor if needed.
3840 if (buffer8_ == end_) HasMore();
3841 DCHECK(buffer8_ < end_);
3842 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3846 StringCharacterStream::StringCharacterStream(String* string,
3847 ConsStringIteratorOp* op,
3849 : is_one_byte_(false),
3851 Reset(string, offset);
3855 void StringCharacterStream::Reset(String* string, int offset) {
3858 ConsString* cons_string = String::VisitFlat(this, string, offset);
3859 op_->Reset(cons_string, offset);
3860 if (cons_string != NULL) {
3861 string = op_->Next(&offset);
3862 if (string != NULL) String::VisitFlat(this, string, offset);
3867 bool StringCharacterStream::HasMore() {
3868 if (buffer8_ != end_) return true;
3870 String* string = op_->Next(&offset);
3871 DCHECK_EQ(offset, 0);
3872 if (string == NULL) return false;
3873 String::VisitFlat(this, string);
3874 DCHECK(buffer8_ != end_);
3879 void StringCharacterStream::VisitOneByteString(
3880 const uint8_t* chars, int length) {
3881 is_one_byte_ = true;
3883 end_ = chars + length;
3887 void StringCharacterStream::VisitTwoByteString(
3888 const uint16_t* chars, int length) {
3889 is_one_byte_ = false;
3891 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3895 void JSFunctionResultCache::MakeZeroSize() {
3896 set_finger_index(kEntriesIndex);
3897 set_size(kEntriesIndex);
3901 void JSFunctionResultCache::Clear() {
3902 int cache_size = size();
3903 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3904 MemsetPointer(entries_start,
3905 GetHeap()->the_hole_value(),
3906 cache_size - kEntriesIndex);
3911 int JSFunctionResultCache::size() {
3912 return Smi::cast(get(kCacheSizeIndex))->value();
3916 void JSFunctionResultCache::set_size(int size) {
3917 set(kCacheSizeIndex, Smi::FromInt(size));
3921 int JSFunctionResultCache::finger_index() {
3922 return Smi::cast(get(kFingerIndex))->value();
3926 void JSFunctionResultCache::set_finger_index(int finger_index) {
3927 set(kFingerIndex, Smi::FromInt(finger_index));
3931 byte ByteArray::get(int index) {
3932 DCHECK(index >= 0 && index < this->length());
3933 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3937 void ByteArray::set(int index, byte value) {
3938 DCHECK(index >= 0 && index < this->length());
3939 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3943 int ByteArray::get_int(int index) {
3944 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3945 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3949 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3950 DCHECK_TAG_ALIGNED(address);
3951 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3955 Address ByteArray::GetDataStartAddress() {
3956 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3960 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3961 return reinterpret_cast<uint8_t*>(external_pointer());
3965 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3966 DCHECK((index >= 0) && (index < this->length()));
3967 uint8_t* ptr = external_uint8_clamped_pointer();
3972 Handle<Object> ExternalUint8ClampedArray::get(
3973 Handle<ExternalUint8ClampedArray> array,
3975 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3976 array->GetIsolate());
3980 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3981 DCHECK((index >= 0) && (index < this->length()));
3982 uint8_t* ptr = external_uint8_clamped_pointer();
3987 void* ExternalArray::external_pointer() const {
3988 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3989 return reinterpret_cast<void*>(ptr);
3993 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3994 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3995 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3999 int8_t ExternalInt8Array::get_scalar(int index) {
4000 DCHECK((index >= 0) && (index < this->length()));
4001 int8_t* ptr = static_cast<int8_t*>(external_pointer());
4006 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
4008 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4009 array->GetIsolate());
4013 void ExternalInt8Array::set(int index, int8_t value) {
4014 DCHECK((index >= 0) && (index < this->length()));
4015 int8_t* ptr = static_cast<int8_t*>(external_pointer());
4020 uint8_t ExternalUint8Array::get_scalar(int index) {
4021 DCHECK((index >= 0) && (index < this->length()));
4022 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4027 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
4029 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4030 array->GetIsolate());
4034 void ExternalUint8Array::set(int index, uint8_t value) {
4035 DCHECK((index >= 0) && (index < this->length()));
4036 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4041 int16_t ExternalInt16Array::get_scalar(int index) {
4042 DCHECK((index >= 0) && (index < this->length()));
4043 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4048 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
4050 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4051 array->GetIsolate());
4055 void ExternalInt16Array::set(int index, int16_t value) {
4056 DCHECK((index >= 0) && (index < this->length()));
4057 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4062 uint16_t ExternalUint16Array::get_scalar(int index) {
4063 DCHECK((index >= 0) && (index < this->length()));
4064 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4069 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
4071 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4072 array->GetIsolate());
4076 void ExternalUint16Array::set(int index, uint16_t value) {
4077 DCHECK((index >= 0) && (index < this->length()));
4078 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4083 int32_t ExternalInt32Array::get_scalar(int index) {
4084 DCHECK((index >= 0) && (index < this->length()));
4085 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4090 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
4092 return array->GetIsolate()->factory()->
4093 NewNumberFromInt(array->get_scalar(index));
4097 void ExternalInt32Array::set(int index, int32_t value) {
4098 DCHECK((index >= 0) && (index < this->length()));
4099 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4104 uint32_t ExternalUint32Array::get_scalar(int index) {
4105 DCHECK((index >= 0) && (index < this->length()));
4106 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4111 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
4113 return array->GetIsolate()->factory()->
4114 NewNumberFromUint(array->get_scalar(index));
4118 void ExternalUint32Array::set(int index, uint32_t value) {
4119 DCHECK((index >= 0) && (index < this->length()));
4120 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4125 float ExternalFloat32Array::get_scalar(int index) {
4126 DCHECK((index >= 0) && (index < this->length()));
4127 float* ptr = static_cast<float*>(external_pointer());
4132 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
4134 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4138 void ExternalFloat32Array::set(int index, float value) {
4139 DCHECK((index >= 0) && (index < this->length()));
4140 float* ptr = static_cast<float*>(external_pointer());
4145 float32x4_value_t ExternalFloat32x4Array::get_scalar(int index) {
4146 DCHECK((index >= 0) && (index < this->length()));
4147 float* ptr = static_cast<float*>(external_pointer());
4148 float32x4_value_t value;
4149 value.storage[0] = ptr[index * 4 + 0];
4150 value.storage[1] = ptr[index * 4 + 1];
4151 value.storage[2] = ptr[index * 4 + 2];
4152 value.storage[3] = ptr[index * 4 + 3];
4157 Handle<Object> ExternalFloat32x4Array::get(Handle<ExternalFloat32x4Array> array,
4159 float32x4_value_t value = array->get_scalar(index);
4160 return array->GetIsolate()->factory()->NewFloat32x4(value);
4164 void ExternalFloat32x4Array::set(int index, const float32x4_value_t& value) {
4165 DCHECK((index >= 0) && (index < this->length()));
4166 float* ptr = static_cast<float*>(external_pointer());
4167 ptr[index * 4 + 0] = value.storage[0];
4168 ptr[index * 4 + 1] = value.storage[1];
4169 ptr[index * 4 + 2] = value.storage[2];
4170 ptr[index * 4 + 3] = value.storage[3];
4174 float64x2_value_t ExternalFloat64x2Array::get_scalar(int index) {
4175 DCHECK((index >= 0) && (index < this->length()));
4176 double* ptr = static_cast<double*>(external_pointer());
4177 float64x2_value_t value;
4178 value.storage[0] = ptr[index * 2 + 0];
4179 value.storage[1] = ptr[index * 2 + 1];
4184 Handle<Object> ExternalFloat64x2Array::get(Handle<ExternalFloat64x2Array> array,
4186 float64x2_value_t value = array->get_scalar(index);
4187 return array->GetIsolate()->factory()->NewFloat64x2(value);
4191 void ExternalFloat64x2Array::set(int index, const float64x2_value_t& value) {
4192 DCHECK((index >= 0) && (index < this->length()));
4193 double* ptr = static_cast<double*>(external_pointer());
4194 ptr[index * 2 + 0] = value.storage[0];
4195 ptr[index * 2 + 1] = value.storage[1];
4199 int32x4_value_t ExternalInt32x4Array::get_scalar(int index) {
4200 DCHECK((index >= 0) && (index < this->length()));
4201 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4202 int32x4_value_t value;
4203 value.storage[0] = ptr[index * 4 + 0];
4204 value.storage[1] = ptr[index * 4 + 1];
4205 value.storage[2] = ptr[index * 4 + 2];
4206 value.storage[3] = ptr[index * 4 + 3];
4211 Handle<Object> ExternalInt32x4Array::get(Handle<ExternalInt32x4Array> array,
4213 int32x4_value_t value = array->get_scalar(index);
4214 return array->GetIsolate()->factory()->NewInt32x4(value);
4218 void ExternalInt32x4Array::set(int index, const int32x4_value_t& value) {
4219 DCHECK((index >= 0) && (index < this->length()));
4220 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4221 ptr[index * 4 + 0] = value.storage[0];
4222 ptr[index * 4 + 1] = value.storage[1];
4223 ptr[index * 4 + 2] = value.storage[2];
4224 ptr[index * 4 + 3] = value.storage[3];
4228 double ExternalFloat64Array::get_scalar(int index) {
4229 DCHECK((index >= 0) && (index < this->length()));
4230 double* ptr = static_cast<double*>(external_pointer());
4235 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
4237 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4241 void ExternalFloat64Array::set(int index, double value) {
4242 DCHECK((index >= 0) && (index < this->length()));
4243 double* ptr = static_cast<double*>(external_pointer());
4248 void* FixedTypedArrayBase::DataPtr() {
4249 return FIELD_ADDR(this, kDataOffset);
4253 int FixedTypedArrayBase::DataSize(InstanceType type) {
4256 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4257 case FIXED_##TYPE##_ARRAY_TYPE: \
4258 element_size = size; \
4261 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4262 #undef TYPED_ARRAY_CASE
4267 return length() * element_size;
4271 int FixedTypedArrayBase::DataSize() {
4272 return DataSize(map()->instance_type());
4276 int FixedTypedArrayBase::size() {
4277 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4281 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4282 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4286 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4289 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4292 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4295 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4298 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4301 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4304 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4307 float Float32ArrayTraits::defaultValue() {
4308 return static_cast<float>(base::OS::nan_value());
4312 double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); }
4315 template <class Traits>
4316 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4317 DCHECK((index >= 0) && (index < this->length()));
4318 ElementType* ptr = reinterpret_cast<ElementType*>(
4319 FIELD_ADDR(this, kDataOffset));
4325 FixedTypedArray<Float64ArrayTraits>::ElementType
4326 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4327 DCHECK((index >= 0) && (index < this->length()));
4328 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4332 template <class Traits>
4333 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4334 DCHECK((index >= 0) && (index < this->length()));
4335 ElementType* ptr = reinterpret_cast<ElementType*>(
4336 FIELD_ADDR(this, kDataOffset));
4342 void FixedTypedArray<Float64ArrayTraits>::set(
4343 int index, Float64ArrayTraits::ElementType value) {
4344 DCHECK((index >= 0) && (index < this->length()));
4345 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4349 template <class Traits>
4350 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4351 return static_cast<ElementType>(value);
4356 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4357 if (value < 0) return 0;
4358 if (value > 0xFF) return 0xFF;
4359 return static_cast<uint8_t>(value);
4363 template <class Traits>
4364 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4366 return static_cast<ElementType>(DoubleToInt32(value));
4371 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4372 if (value < 0) return 0;
4373 if (value > 0xFF) return 0xFF;
4374 return static_cast<uint8_t>(lrint(value));
4379 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4380 return static_cast<float>(value);
4385 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4390 template <class Traits>
4391 Handle<Object> FixedTypedArray<Traits>::get(
4392 Handle<FixedTypedArray<Traits> > array,
4394 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4398 template <class Traits>
4399 Handle<Object> FixedTypedArray<Traits>::SetValue(
4400 Handle<FixedTypedArray<Traits> > array,
4402 Handle<Object> value) {
4403 ElementType cast_value = Traits::defaultValue();
4404 if (index < static_cast<uint32_t>(array->length())) {
4405 if (value->IsSmi()) {
4406 int int_value = Handle<Smi>::cast(value)->value();
4407 cast_value = from_int(int_value);
4408 } else if (value->IsHeapNumber()) {
4409 double double_value = Handle<HeapNumber>::cast(value)->value();
4410 cast_value = from_double(double_value);
4412 // Clamp undefined to the default value. All other types have been
4413 // converted to a number type further up in the call chain.
4414 DCHECK(value->IsUndefined());
4416 array->set(index, cast_value);
4418 return Traits::ToHandle(array->GetIsolate(), cast_value);
4422 Handle<Object> FixedTypedArray<Float32x4ArrayTraits>::SetValue(
4423 Handle<FixedTypedArray<Float32x4ArrayTraits> > array,
4424 uint32_t index, Handle<Object> value) {
4425 float32x4_value_t cast_value;
4426 cast_value.storage[0] = static_cast<float>(base::OS::nan_value());
4427 cast_value.storage[1] = static_cast<float>(base::OS::nan_value());
4428 cast_value.storage[2] = static_cast<float>(base::OS::nan_value());
4429 cast_value.storage[3] = static_cast<float>(base::OS::nan_value());
4430 if (index < static_cast<uint32_t>(array->length())) {
4431 if (value->IsFloat32x4()) {
4432 cast_value = Handle<Float32x4>::cast(value)->get();
4434 // Clamp undefined to NaN (default). All other types have been
4435 // converted to a number type further up in the call chain.
4436 DCHECK(value->IsUndefined());
4438 array->set(index, cast_value);
4440 return Float32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4445 Handle<Object> FixedTypedArray<Float64x2ArrayTraits>::SetValue(
4446 Handle<FixedTypedArray<Float64x2ArrayTraits> > array,
4447 uint32_t index, Handle<Object> value) {
4448 float64x2_value_t cast_value;
4449 cast_value.storage[0] = base::OS::nan_value();
4450 cast_value.storage[1] = base::OS::nan_value();
4451 if (index < static_cast<uint32_t>(array->length())) {
4452 if (value->IsFloat64x2()) {
4453 cast_value = Handle<Float64x2>::cast(value)->get();
4455 // Clamp undefined to NaN (default). All other types have been
4456 // converted to a number type further up in the call chain.
4457 DCHECK(value->IsUndefined());
4459 array->set(index, cast_value);
4461 return Float64x2ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4466 Handle<Object> FixedTypedArray<Int32x4ArrayTraits>::SetValue(
4467 Handle<FixedTypedArray<Int32x4ArrayTraits> > array,
4468 uint32_t index, Handle<Object> value) {
4469 int32x4_value_t cast_value;
4470 cast_value.storage[0] = 0;
4471 cast_value.storage[1] = 0;
4472 cast_value.storage[2] = 0;
4473 cast_value.storage[3] = 0;
4474 if (index < static_cast<uint32_t>(array->length())) {
4475 if (value->IsInt32x4()) {
4476 cast_value = Handle<Int32x4>::cast(value)->get();
4478 // Clamp undefined to zero (default). All other types have been
4479 // converted to a number type further up in the call chain.
4480 DCHECK(value->IsUndefined());
4482 array->set(index, cast_value);
4484 return Int32x4ArrayTraits::ToHandle(array->GetIsolate(), cast_value);
4488 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4489 return handle(Smi::FromInt(scalar), isolate);
4493 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4495 return handle(Smi::FromInt(scalar), isolate);
4499 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4500 return handle(Smi::FromInt(scalar), isolate);
4504 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4505 return handle(Smi::FromInt(scalar), isolate);
4509 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4510 return handle(Smi::FromInt(scalar), isolate);
4514 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4515 return isolate->factory()->NewNumberFromUint(scalar);
4519 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4520 return isolate->factory()->NewNumberFromInt(scalar);
4524 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4525 return isolate->factory()->NewNumber(scalar);
4529 Handle<Object> Int32x4ArrayTraits::ToHandle(
4530 Isolate* isolate, int32x4_value_t scalar) {
4531 return isolate->factory()->NewInt32x4(scalar);
4535 Handle<Object> Float32x4ArrayTraits::ToHandle(
4536 Isolate* isolate, float32x4_value_t scalar) {
4537 return isolate->factory()->NewFloat32x4(scalar);
4541 Handle<Object> Float64x2ArrayTraits::ToHandle(
4542 Isolate* isolate, float64x2_value_t scalar) {
4543 return isolate->factory()->NewFloat64x2(scalar);
4547 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4548 return isolate->factory()->NewNumber(scalar);
4552 int Map::visitor_id() {
4553 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4557 void Map::set_visitor_id(int id) {
4558 DCHECK(0 <= id && id < 256);
4559 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4563 int Map::instance_size() {
4564 return NOBARRIER_READ_BYTE_FIELD(
4565 this, kInstanceSizeOffset) << kPointerSizeLog2;
4569 int Map::inobject_properties() {
4570 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4574 int Map::pre_allocated_property_fields() {
4575 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4579 int Map::GetInObjectPropertyOffset(int index) {
4580 // Adjust for the number of properties stored in the object.
4581 index -= inobject_properties();
4583 return instance_size() + (index * kPointerSize);
4587 int HeapObject::SizeFromMap(Map* map) {
4588 int instance_size = map->instance_size();
4589 if (instance_size != kVariableSizeSentinel) return instance_size;
4590 // Only inline the most frequent cases.
4591 InstanceType instance_type = map->instance_type();
4592 if (instance_type == FIXED_ARRAY_TYPE) {
4593 return FixedArray::BodyDescriptor::SizeOf(map, this);
4595 if (instance_type == ASCII_STRING_TYPE ||
4596 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
4597 return SeqOneByteString::SizeFor(
4598 reinterpret_cast<SeqOneByteString*>(this)->length());
4600 if (instance_type == BYTE_ARRAY_TYPE) {
4601 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4603 if (instance_type == FREE_SPACE_TYPE) {
4604 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4606 if (instance_type == STRING_TYPE ||
4607 instance_type == INTERNALIZED_STRING_TYPE) {
4608 return SeqTwoByteString::SizeFor(
4609 reinterpret_cast<SeqTwoByteString*>(this)->length());
4611 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4612 return FixedDoubleArray::SizeFor(
4613 reinterpret_cast<FixedDoubleArray*>(this)->length());
4615 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4616 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4618 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4619 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4620 return reinterpret_cast<FixedTypedArrayBase*>(
4621 this)->TypedArraySize(instance_type);
4623 DCHECK(instance_type == CODE_TYPE);
4624 return reinterpret_cast<Code*>(this)->CodeSize();
4628 void Map::set_instance_size(int value) {
4629 DCHECK_EQ(0, value & (kPointerSize - 1));
4630 value >>= kPointerSizeLog2;
4631 DCHECK(0 <= value && value < 256);
4632 NOBARRIER_WRITE_BYTE_FIELD(
4633 this, kInstanceSizeOffset, static_cast<byte>(value));
4637 void Map::set_inobject_properties(int value) {
4638 DCHECK(0 <= value && value < 256);
4639 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4643 void Map::set_pre_allocated_property_fields(int value) {
4644 DCHECK(0 <= value && value < 256);
4645 WRITE_BYTE_FIELD(this,
4646 kPreAllocatedPropertyFieldsOffset,
4647 static_cast<byte>(value));
4651 InstanceType Map::instance_type() {
4652 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4656 void Map::set_instance_type(InstanceType value) {
4657 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4661 int Map::unused_property_fields() {
4662 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4666 void Map::set_unused_property_fields(int value) {
4667 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4671 byte Map::bit_field() {
4672 return READ_BYTE_FIELD(this, kBitFieldOffset);
4676 void Map::set_bit_field(byte value) {
4677 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4681 byte Map::bit_field2() {
4682 return READ_BYTE_FIELD(this, kBitField2Offset);
4686 void Map::set_bit_field2(byte value) {
4687 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4691 void Map::set_non_instance_prototype(bool value) {
4693 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4695 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4700 bool Map::has_non_instance_prototype() {
4701 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4705 void Map::set_function_with_prototype(bool value) {
4706 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4710 bool Map::function_with_prototype() {
4711 return FunctionWithPrototype::decode(bit_field());
4715 void Map::set_is_access_check_needed(bool access_check_needed) {
4716 if (access_check_needed) {
4717 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4719 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4724 bool Map::is_access_check_needed() {
4725 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4729 void Map::set_is_extensible(bool value) {
4731 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4733 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4737 bool Map::is_extensible() {
4738 return ((1 << kIsExtensible) & bit_field2()) != 0;
4742 void Map::set_is_prototype_map(bool value) {
4743 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4746 bool Map::is_prototype_map() {
4747 return IsPrototypeMapBits::decode(bit_field2());
4751 void Map::set_dictionary_map(bool value) {
4752 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4753 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4754 set_bit_field3(new_bit_field3);
4758 bool Map::is_dictionary_map() {
4759 return DictionaryMap::decode(bit_field3());
4763 Code::Flags Code::flags() {
4764 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4768 void Map::set_owns_descriptors(bool owns_descriptors) {
4769 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4773 bool Map::owns_descriptors() {
4774 return OwnsDescriptors::decode(bit_field3());
4778 void Map::set_has_instance_call_handler() {
4779 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4783 bool Map::has_instance_call_handler() {
4784 return HasInstanceCallHandler::decode(bit_field3());
4788 void Map::deprecate() {
4789 set_bit_field3(Deprecated::update(bit_field3(), true));
4793 bool Map::is_deprecated() {
4794 return Deprecated::decode(bit_field3());
4798 void Map::set_migration_target(bool value) {
4799 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4803 bool Map::is_migration_target() {
4804 return IsMigrationTarget::decode(bit_field3());
4808 void Map::set_done_inobject_slack_tracking(bool value) {
4809 set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4813 bool Map::done_inobject_slack_tracking() {
4814 return DoneInobjectSlackTracking::decode(bit_field3());
4818 void Map::set_construction_count(int value) {
4819 set_bit_field3(ConstructionCount::update(bit_field3(), value));
4823 int Map::construction_count() {
4824 return ConstructionCount::decode(bit_field3());
4828 void Map::freeze() {
4829 set_bit_field3(IsFrozen::update(bit_field3(), true));
4833 bool Map::is_frozen() {
4834 return IsFrozen::decode(bit_field3());
4838 void Map::mark_unstable() {
4839 set_bit_field3(IsUnstable::update(bit_field3(), true));
4843 bool Map::is_stable() {
4844 return !IsUnstable::decode(bit_field3());
4848 bool Map::has_code_cache() {
4849 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4853 bool Map::CanBeDeprecated() {
4854 int descriptor = LastAdded();
4855 for (int i = 0; i <= descriptor; i++) {
4856 PropertyDetails details = instance_descriptors()->GetDetails(i);
4857 if (details.representation().IsNone()) return true;
4858 if (details.representation().IsSmi()) return true;
4859 if (details.representation().IsDouble()) return true;
4860 if (details.representation().IsHeapObject()) return true;
4861 if (details.type() == CONSTANT) return true;
4867 void Map::NotifyLeafMapLayoutChange() {
4870 dependent_code()->DeoptimizeDependentCodeGroup(
4872 DependentCode::kPrototypeCheckGroup);
4877 bool Map::CanOmitMapChecks() {
4878 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4882 int DependentCode::number_of_entries(DependencyGroup group) {
4883 if (length() == 0) return 0;
4884 return Smi::cast(get(group))->value();
4888 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4889 set(group, Smi::FromInt(value));
4893 bool DependentCode::is_code_at(int i) {
4894 return get(kCodesStartIndex + i)->IsCode();
4897 Code* DependentCode::code_at(int i) {
4898 return Code::cast(get(kCodesStartIndex + i));
4902 CompilationInfo* DependentCode::compilation_info_at(int i) {
4903 return reinterpret_cast<CompilationInfo*>(
4904 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4908 void DependentCode::set_object_at(int i, Object* object) {
4909 set(kCodesStartIndex + i, object);
4913 Object* DependentCode::object_at(int i) {
4914 return get(kCodesStartIndex + i);
4918 Object** DependentCode::slot_at(int i) {
4919 return RawFieldOfElementAt(kCodesStartIndex + i);
4923 void DependentCode::clear_at(int i) {
4924 set_undefined(kCodesStartIndex + i);
4928 void DependentCode::copy(int from, int to) {
4929 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4933 void DependentCode::ExtendGroup(DependencyGroup group) {
4934 GroupStartIndexes starts(this);
4935 for (int g = kGroupCount - 1; g > group; g--) {
4936 if (starts.at(g) < starts.at(g + 1)) {
4937 copy(starts.at(g), starts.at(g + 1));
4943 void Code::set_flags(Code::Flags flags) {
4944 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4945 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4949 Code::Kind Code::kind() {
4950 return ExtractKindFromFlags(flags());
4954 bool Code::IsCodeStubOrIC() {
4955 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4956 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4957 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4958 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4959 kind() == TO_BOOLEAN_IC;
4963 InlineCacheState Code::ic_state() {
4964 InlineCacheState result = ExtractICStateFromFlags(flags());
4965 // Only allow uninitialized or debugger states for non-IC code
4966 // objects. This is used in the debugger to determine whether or not
4967 // a call to code object has been replaced with a debug break call.
4968 DCHECK(is_inline_cache_stub() ||
4969 result == UNINITIALIZED ||
4970 result == DEBUG_STUB);
4975 ExtraICState Code::extra_ic_state() {
4976 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4977 return ExtractExtraICStateFromFlags(flags());
4981 Code::StubType Code::type() {
4982 return ExtractTypeFromFlags(flags());
4986 // For initialization.
4987 void Code::set_raw_kind_specific_flags1(int value) {
4988 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4992 void Code::set_raw_kind_specific_flags2(int value) {
4993 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4997 inline bool Code::is_crankshafted() {
4998 return IsCrankshaftedField::decode(
4999 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5003 inline bool Code::is_hydrogen_stub() {
5004 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
5008 inline void Code::set_is_crankshafted(bool value) {
5009 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5010 int updated = IsCrankshaftedField::update(previous, value);
5011 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5015 inline bool Code::is_turbofanned() {
5016 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
5017 return IsTurbofannedField::decode(
5018 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5022 inline void Code::set_is_turbofanned(bool value) {
5023 DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
5024 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5025 int updated = IsTurbofannedField::update(previous, value);
5026 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5030 bool Code::optimizable() {
5031 DCHECK_EQ(FUNCTION, kind());
5032 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
5036 void Code::set_optimizable(bool value) {
5037 DCHECK_EQ(FUNCTION, kind());
5038 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
5042 bool Code::has_deoptimization_support() {
5043 DCHECK_EQ(FUNCTION, kind());
5044 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5045 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
5049 void Code::set_has_deoptimization_support(bool value) {
5050 DCHECK_EQ(FUNCTION, kind());
5051 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5052 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
5053 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5057 bool Code::has_debug_break_slots() {
5058 DCHECK_EQ(FUNCTION, kind());
5059 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5060 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
5064 void Code::set_has_debug_break_slots(bool value) {
5065 DCHECK_EQ(FUNCTION, kind());
5066 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5067 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
5068 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5072 bool Code::is_compiled_optimizable() {
5073 DCHECK_EQ(FUNCTION, kind());
5074 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5075 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
5079 void Code::set_compiled_optimizable(bool value) {
5080 DCHECK_EQ(FUNCTION, kind());
5081 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
5082 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
5083 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
5087 int Code::allow_osr_at_loop_nesting_level() {
5088 DCHECK_EQ(FUNCTION, kind());
5089 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5090 return AllowOSRAtLoopNestingLevelField::decode(fields);
5094 void Code::set_allow_osr_at_loop_nesting_level(int level) {
5095 DCHECK_EQ(FUNCTION, kind());
5096 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
5097 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5098 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5099 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5103 int Code::profiler_ticks() {
5104 DCHECK_EQ(FUNCTION, kind());
5105 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
5109 void Code::set_profiler_ticks(int ticks) {
5110 DCHECK_EQ(FUNCTION, kind());
5111 DCHECK(ticks < 256);
5112 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
5116 int Code::builtin_index() {
5117 DCHECK_EQ(BUILTIN, kind());
5118 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
5122 void Code::set_builtin_index(int index) {
5123 DCHECK_EQ(BUILTIN, kind());
5124 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
5128 unsigned Code::stack_slots() {
5129 DCHECK(is_crankshafted());
5130 return StackSlotsField::decode(
5131 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5135 void Code::set_stack_slots(unsigned slots) {
5136 CHECK(slots <= (1 << kStackSlotsBitCount));
5137 DCHECK(is_crankshafted());
5138 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5139 int updated = StackSlotsField::update(previous, slots);
5140 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5144 unsigned Code::safepoint_table_offset() {
5145 DCHECK(is_crankshafted());
5146 return SafepointTableOffsetField::decode(
5147 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5151 void Code::set_safepoint_table_offset(unsigned offset) {
5152 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5153 DCHECK(is_crankshafted());
5154 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5155 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5156 int updated = SafepointTableOffsetField::update(previous, offset);
5157 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5161 unsigned Code::back_edge_table_offset() {
5162 DCHECK_EQ(FUNCTION, kind());
5163 return BackEdgeTableOffsetField::decode(
5164 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5168 void Code::set_back_edge_table_offset(unsigned offset) {
5169 DCHECK_EQ(FUNCTION, kind());
5170 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5171 offset = offset >> kPointerSizeLog2;
5172 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5173 int updated = BackEdgeTableOffsetField::update(previous, offset);
5174 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5178 bool Code::back_edges_patched_for_osr() {
5179 DCHECK_EQ(FUNCTION, kind());
5180 return allow_osr_at_loop_nesting_level() > 0;
5184 byte Code::to_boolean_state() {
5185 return extra_ic_state();
5189 bool Code::has_function_cache() {
5190 DCHECK(kind() == STUB);
5191 return HasFunctionCacheField::decode(
5192 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5196 void Code::set_has_function_cache(bool flag) {
5197 DCHECK(kind() == STUB);
5198 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5199 int updated = HasFunctionCacheField::update(previous, flag);
5200 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5204 bool Code::marked_for_deoptimization() {
5205 DCHECK(kind() == OPTIMIZED_FUNCTION);
5206 return MarkedForDeoptimizationField::decode(
5207 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5211 void Code::set_marked_for_deoptimization(bool flag) {
5212 DCHECK(kind() == OPTIMIZED_FUNCTION);
5213 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5214 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5215 int updated = MarkedForDeoptimizationField::update(previous, flag);
5216 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5220 bool Code::is_weak_stub() {
5221 return CanBeWeakStub() && WeakStubField::decode(
5222 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5226 void Code::mark_as_weak_stub() {
5227 DCHECK(CanBeWeakStub());
5228 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5229 int updated = WeakStubField::update(previous, true);
5230 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5234 bool Code::is_invalidated_weak_stub() {
5235 return is_weak_stub() && InvalidatedWeakStubField::decode(
5236 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5240 void Code::mark_as_invalidated_weak_stub() {
5241 DCHECK(is_inline_cache_stub());
5242 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5243 int updated = InvalidatedWeakStubField::update(previous, true);
5244 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5248 bool Code::is_inline_cache_stub() {
5249 Kind kind = this->kind();
5251 #define CASE(name) case name: return true;
5254 default: return false;
5259 bool Code::is_keyed_stub() {
5260 return is_keyed_load_stub() || is_keyed_store_stub();
5264 bool Code::is_debug_stub() {
5265 return ic_state() == DEBUG_STUB;
5269 ConstantPoolArray* Code::constant_pool() {
5270 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
5274 void Code::set_constant_pool(Object* value) {
5275 DCHECK(value->IsConstantPoolArray());
5276 WRITE_FIELD(this, kConstantPoolOffset, value);
5277 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
5281 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5282 ExtraICState extra_ic_state, StubType type,
5283 CacheHolderFlag holder) {
5284 // Compute the bit mask.
5285 unsigned int bits = KindField::encode(kind)
5286 | ICStateField::encode(ic_state)
5287 | TypeField::encode(type)
5288 | ExtraICStateField::encode(extra_ic_state)
5289 | CacheHolderField::encode(holder);
5290 return static_cast<Flags>(bits);
5294 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5295 ExtraICState extra_ic_state,
5296 CacheHolderFlag holder,
5298 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5302 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5303 CacheHolderFlag holder) {
5304 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5308 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5309 return KindField::decode(flags);
5313 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5314 return ICStateField::decode(flags);
5318 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5319 return ExtraICStateField::decode(flags);
5323 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5324 return TypeField::decode(flags);
5328 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5329 return CacheHolderField::decode(flags);
5333 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5334 int bits = flags & ~TypeField::kMask;
5335 return static_cast<Flags>(bits);
5339 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5340 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5341 return static_cast<Flags>(bits);
5345 Code* Code::GetCodeFromTargetAddress(Address address) {
5346 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5347 // GetCodeFromTargetAddress might be called when marking objects during mark
5348 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5349 // Code::cast. Code::cast does not work when the object's map is
5351 Code* result = reinterpret_cast<Code*>(code);
5356 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5358 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5362 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5363 if (!FLAG_collect_maps) return false;
5364 if (object->IsMap()) {
5365 return Map::cast(object)->CanTransition() &&
5366 FLAG_weak_embedded_maps_in_optimized_code;
5368 if (object->IsJSObject() ||
5369 (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
5370 return FLAG_weak_embedded_objects_in_optimized_code;
5376 class Code::FindAndReplacePattern {
5378 FindAndReplacePattern() : count_(0) { }
5379 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5380 DCHECK(count_ < kMaxCount);
5381 find_[count_] = map_to_find;
5382 replace_[count_] = obj_to_replace;
5386 static const int kMaxCount = 4;
5388 Handle<Map> find_[kMaxCount];
5389 Handle<Object> replace_[kMaxCount];
5394 bool Code::IsWeakObjectInIC(Object* object) {
5395 return object->IsMap() && Map::cast(object)->CanTransition() &&
5396 FLAG_collect_maps &&
5397 FLAG_weak_embedded_maps_in_ic;
5401 Object* Map::prototype() const {
5402 return READ_FIELD(this, kPrototypeOffset);
5406 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5407 DCHECK(value->IsNull() || value->IsJSReceiver());
5408 WRITE_FIELD(this, kPrototypeOffset, value);
5409 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5413 // If the descriptor is using the empty transition array, install a new empty
5414 // transition array that will have place for an element transition.
5415 static void EnsureHasTransitionArray(Handle<Map> map) {
5416 Handle<TransitionArray> transitions;
5417 if (!map->HasTransitionArray()) {
5418 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5419 transitions->set_back_pointer_storage(map->GetBackPointer());
5420 } else if (!map->transitions()->IsFullTransitionArray()) {
5421 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5425 map->set_transitions(*transitions);
5429 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5430 int len = descriptors->number_of_descriptors();
5431 set_instance_descriptors(descriptors);
5432 SetNumberOfOwnDescriptors(len);
5436 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5439 void Map::set_bit_field3(uint32_t bits) {
5440 if (kInt32Size != kPointerSize) {
5441 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5443 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5447 uint32_t Map::bit_field3() {
5448 return READ_UINT32_FIELD(this, kBitField3Offset);
5452 void Map::AppendDescriptor(Descriptor* desc) {
5453 DescriptorArray* descriptors = instance_descriptors();
5454 int number_of_own_descriptors = NumberOfOwnDescriptors();
5455 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5456 descriptors->Append(desc);
5457 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5461 Object* Map::GetBackPointer() {
5462 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5463 if (object->IsDescriptorArray()) {
5464 return TransitionArray::cast(object)->back_pointer_storage();
5466 DCHECK(object->IsMap() || object->IsUndefined());
5472 bool Map::HasElementsTransition() {
5473 return HasTransitionArray() && transitions()->HasElementsTransition();
5477 bool Map::HasTransitionArray() const {
5478 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5479 return object->IsTransitionArray();
5483 Map* Map::elements_transition_map() {
5484 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5485 return transitions()->GetTarget(index);
5489 bool Map::CanHaveMoreTransitions() {
5490 if (!HasTransitionArray()) return true;
5491 return FixedArray::SizeFor(transitions()->length() +
5492 TransitionArray::kTransitionSize)
5493 <= Page::kMaxRegularHeapObjectSize;
5497 Map* Map::GetTransition(int transition_index) {
5498 return transitions()->GetTarget(transition_index);
5502 int Map::SearchTransition(Name* name) {
5503 if (HasTransitionArray()) return transitions()->Search(name);
5504 return TransitionArray::kNotFound;
5508 FixedArray* Map::GetPrototypeTransitions() {
5509 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5510 if (!transitions()->HasPrototypeTransitions()) {
5511 return GetHeap()->empty_fixed_array();
5513 return transitions()->GetPrototypeTransitions();
5517 void Map::SetPrototypeTransitions(
5518 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5519 EnsureHasTransitionArray(map);
5520 int old_number_of_transitions = map->NumberOfProtoTransitions();
5522 if (map->HasPrototypeTransitions()) {
5523 DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
5524 map->ZapPrototypeTransitions();
5527 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5528 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5532 bool Map::HasPrototypeTransitions() {
5533 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5537 TransitionArray* Map::transitions() const {
5538 DCHECK(HasTransitionArray());
5539 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5540 return TransitionArray::cast(object);
5544 void Map::set_transitions(TransitionArray* transition_array,
5545 WriteBarrierMode mode) {
5546 // Transition arrays are not shared. When one is replaced, it should not
5547 // keep referenced objects alive, so we zap it.
5548 // When there is another reference to the array somewhere (e.g. a handle),
5549 // not zapping turns from a waste of memory into a source of crashes.
5550 if (HasTransitionArray()) {
5552 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5553 Map* target = transitions()->GetTarget(i);
5554 if (target->instance_descriptors() == instance_descriptors()) {
5555 Name* key = transitions()->GetKey(i);
5556 int new_target_index = transition_array->Search(key);
5557 DCHECK(new_target_index != TransitionArray::kNotFound);
5558 DCHECK(transition_array->GetTarget(new_target_index) == target);
5562 DCHECK(transitions() != transition_array);
5566 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5567 CONDITIONAL_WRITE_BARRIER(
5568 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5572 void Map::init_back_pointer(Object* undefined) {
5573 DCHECK(undefined->IsUndefined());
5574 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5578 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5579 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5580 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5581 (value->IsMap() && GetBackPointer()->IsUndefined()));
5582 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5583 if (object->IsTransitionArray()) {
5584 TransitionArray::cast(object)->set_back_pointer_storage(value);
5586 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5587 CONDITIONAL_WRITE_BARRIER(
5588 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5593 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5594 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5595 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5597 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5598 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5599 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5601 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5602 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5603 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5604 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5606 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5607 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5609 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5610 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5611 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5612 kExpectedReceiverTypeOffset)
5614 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5615 kSerializedDataOffset)
5617 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5620 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5621 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5622 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5624 ACCESSORS(Box, value, Object, kValueOffset)
5626 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5627 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5629 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5630 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5631 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5633 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5634 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5635 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5636 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5637 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5638 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5640 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5641 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5643 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5644 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5645 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5647 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5648 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5649 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5650 kPrototypeTemplateOffset)
5651 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5652 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5653 kNamedPropertyHandlerOffset)
5654 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5655 kIndexedPropertyHandlerOffset)
5656 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5657 kInstanceTemplateOffset)
5658 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5659 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5660 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5661 kInstanceCallHandlerOffset)
5662 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5663 kAccessCheckInfoOffset)
5664 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5666 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5667 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5668 kInternalFieldCountOffset)
5670 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5671 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5673 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5675 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5676 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5677 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5678 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5679 kPretenureCreateCountOffset)
5680 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5681 kDependentCodeOffset)
5682 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5683 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5685 ACCESSORS(Script, source, Object, kSourceOffset)
5686 ACCESSORS(Script, name, Object, kNameOffset)
5687 ACCESSORS(Script, id, Smi, kIdOffset)
5688 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5689 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5690 ACCESSORS(Script, context_data, Object, kContextOffset)
5691 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5692 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5693 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5694 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5695 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5696 kEvalFrominstructionsOffsetOffset)
5697 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5698 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5699 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5700 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5702 Script::CompilationType Script::compilation_type() {
5703 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5704 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5706 void Script::set_compilation_type(CompilationType type) {
5707 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5708 type == COMPILATION_TYPE_EVAL));
5710 Script::CompilationState Script::compilation_state() {
5711 return BooleanBit::get(flags(), kCompilationStateBit) ?
5712 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5714 void Script::set_compilation_state(CompilationState state) {
5715 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5716 state == COMPILATION_STATE_COMPILED));
5720 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5721 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5722 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5723 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5725 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5726 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5727 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5728 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5730 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5731 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5732 kOptimizedCodeMapOffset)
5733 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5734 ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
5735 kFeedbackVectorOffset)
5736 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5737 kInstanceClassNameOffset)
5738 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5739 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5740 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5741 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5744 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5745 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5746 kHiddenPrototypeBit)
5747 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5748 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5749 kNeedsAccessCheckBit)
5750 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5751 kReadOnlyPrototypeBit)
5752 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5753 kRemovePrototypeBit)
5754 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5756 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5758 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5761 BOOL_ACCESSORS(SharedFunctionInfo,
5763 allows_lazy_compilation,
5764 kAllowLazyCompilation)
5765 BOOL_ACCESSORS(SharedFunctionInfo,
5767 allows_lazy_compilation_without_context,
5768 kAllowLazyCompilationWithoutContext)
5769 BOOL_ACCESSORS(SharedFunctionInfo,
5773 BOOL_ACCESSORS(SharedFunctionInfo,
5775 has_duplicate_parameters,
5776 kHasDuplicateParameters)
5779 #if V8_HOST_ARCH_32_BIT
5780 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5781 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5782 kFormalParameterCountOffset)
5783 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5784 kExpectedNofPropertiesOffset)
5785 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5786 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5787 kStartPositionAndTypeOffset)
5788 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5789 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5790 kFunctionTokenPositionOffset)
5791 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5792 kCompilerHintsOffset)
5793 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5794 kOptCountAndBailoutReasonOffset)
5795 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5796 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5797 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5801 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5802 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5803 int holder::name() const { \
5804 int value = READ_INT_FIELD(this, offset); \
5805 DCHECK(kHeapObjectTag == 1); \
5806 DCHECK((value & kHeapObjectTag) == 0); \
5807 return value >> 1; \
5809 void holder::set_##name(int value) { \
5810 DCHECK(kHeapObjectTag == 1); \
5811 DCHECK((value & 0xC0000000) == 0xC0000000 || \
5812 (value & 0xC0000000) == 0x0); \
5813 WRITE_INT_FIELD(this, \
5815 (value << 1) & ~kHeapObjectTag); \
5818 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5819 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5820 INT_ACCESSORS(holder, name, offset)
5823 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5824 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5825 formal_parameter_count,
5826 kFormalParameterCountOffset)
5828 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5829 expected_nof_properties,
5830 kExpectedNofPropertiesOffset)
5831 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5833 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5834 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5835 start_position_and_type,
5836 kStartPositionAndTypeOffset)
5838 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5839 function_token_position,
5840 kFunctionTokenPositionOffset)
5841 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5843 kCompilerHintsOffset)
5845 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5846 opt_count_and_bailout_reason,
5847 kOptCountAndBailoutReasonOffset)
5848 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5850 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5852 kAstNodeCountOffset)
5853 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5855 kProfilerTicksOffset)
5860 BOOL_GETTER(SharedFunctionInfo,
5862 optimization_disabled,
5863 kOptimizationDisabled)
5866 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5867 set_compiler_hints(BooleanBit::set(compiler_hints(),
5868 kOptimizationDisabled,
5870 // If disabling optimizations we reflect that in the code object so
5871 // it will not be counted as optimizable code.
5872 if ((code()->kind() == Code::FUNCTION) && disable) {
5873 code()->set_optimizable(false);
5878 StrictMode SharedFunctionInfo::strict_mode() {
5879 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5884 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5885 // We only allow mode transitions from sloppy to strict.
5886 DCHECK(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5887 int hints = compiler_hints();
5888 hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5889 set_compiler_hints(hints);
5893 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5894 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5896 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5897 name_should_print_as_anonymous,
5898 kNameShouldPrintAsAnonymous)
5899 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5900 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5901 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5902 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5903 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5904 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5905 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5907 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5908 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5910 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5912 bool Script::HasValidSource() {
5913 Object* src = this->source();
5914 if (!src->IsString()) return true;
5915 String* src_str = String::cast(src);
5916 if (!StringShape(src_str).IsExternal()) return true;
5917 if (src_str->IsOneByteRepresentation()) {
5918 return ExternalAsciiString::cast(src)->resource() != NULL;
5919 } else if (src_str->IsTwoByteRepresentation()) {
5920 return ExternalTwoByteString::cast(src)->resource() != NULL;
5926 void SharedFunctionInfo::DontAdaptArguments() {
5927 DCHECK(code()->kind() == Code::BUILTIN);
5928 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5932 int SharedFunctionInfo::start_position() const {
5933 return start_position_and_type() >> kStartPositionShift;
5937 void SharedFunctionInfo::set_start_position(int start_position) {
5938 set_start_position_and_type((start_position << kStartPositionShift)
5939 | (start_position_and_type() & ~kStartPositionMask));
5943 Code* SharedFunctionInfo::code() const {
5944 return Code::cast(READ_FIELD(this, kCodeOffset));
5948 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5949 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5950 WRITE_FIELD(this, kCodeOffset, value);
5951 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5955 void SharedFunctionInfo::ReplaceCode(Code* value) {
5956 // If the GC metadata field is already used then the function was
5957 // enqueued as a code flushing candidate and we remove it now.
5958 if (code()->gc_metadata() != NULL) {
5959 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5960 flusher->EvictCandidate(this);
5963 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5969 ScopeInfo* SharedFunctionInfo::scope_info() const {
5970 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5974 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5975 WriteBarrierMode mode) {
5976 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5977 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5980 reinterpret_cast<Object*>(value),
5985 bool SharedFunctionInfo::is_compiled() {
5987 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
5991 bool SharedFunctionInfo::IsApiFunction() {
5992 return function_data()->IsFunctionTemplateInfo();
5996 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5997 DCHECK(IsApiFunction());
5998 return FunctionTemplateInfo::cast(function_data());
6002 bool SharedFunctionInfo::HasBuiltinFunctionId() {
6003 return function_data()->IsSmi();
6007 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
6008 DCHECK(HasBuiltinFunctionId());
6009 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
6013 int SharedFunctionInfo::ic_age() {
6014 return ICAgeBits::decode(counters());
6018 void SharedFunctionInfo::set_ic_age(int ic_age) {
6019 set_counters(ICAgeBits::update(counters(), ic_age));
6023 int SharedFunctionInfo::deopt_count() {
6024 return DeoptCountBits::decode(counters());
6028 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6029 set_counters(DeoptCountBits::update(counters(), deopt_count));
6033 void SharedFunctionInfo::increment_deopt_count() {
6034 int value = counters();
6035 int deopt_count = DeoptCountBits::decode(value);
6036 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6037 set_counters(DeoptCountBits::update(value, deopt_count));
6041 int SharedFunctionInfo::opt_reenable_tries() {
6042 return OptReenableTriesBits::decode(counters());
6046 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6047 set_counters(OptReenableTriesBits::update(counters(), tries));
6051 int SharedFunctionInfo::opt_count() {
6052 return OptCountBits::decode(opt_count_and_bailout_reason());
6056 void SharedFunctionInfo::set_opt_count(int opt_count) {
6057 set_opt_count_and_bailout_reason(
6058 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6062 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
6063 BailoutReason reason = static_cast<BailoutReason>(
6064 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6069 bool SharedFunctionInfo::has_deoptimization_support() {
6070 Code* code = this->code();
6071 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6075 void SharedFunctionInfo::TryReenableOptimization() {
6076 int tries = opt_reenable_tries();
6077 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6078 // We reenable optimization whenever the number of tries is a large
6079 // enough power of 2.
6080 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6081 set_optimization_disabled(false);
6084 code()->set_optimizable(true);
6089 bool JSFunction::IsBuiltin() {
6090 return context()->global_object()->IsJSBuiltinsObject();
6094 bool JSFunction::IsFromNativeScript() {
6095 Object* script = shared()->script();
6096 bool native = script->IsScript() &&
6097 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
6098 DCHECK(!IsBuiltin() || native); // All builtins are also native.
6103 bool JSFunction::IsFromExtensionScript() {
6104 Object* script = shared()->script();
6105 return script->IsScript() &&
6106 Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
6110 bool JSFunction::NeedsArgumentsAdaption() {
6111 return shared()->formal_parameter_count() !=
6112 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
6116 bool JSFunction::IsOptimized() {
6117 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6121 bool JSFunction::IsOptimizable() {
6122 return code()->kind() == Code::FUNCTION && code()->optimizable();
6126 bool JSFunction::IsMarkedForOptimization() {
6127 return code() == GetIsolate()->builtins()->builtin(
6128 Builtins::kCompileOptimized);
6132 bool JSFunction::IsMarkedForConcurrentOptimization() {
6133 return code() == GetIsolate()->builtins()->builtin(
6134 Builtins::kCompileOptimizedConcurrent);
6138 bool JSFunction::IsInOptimizationQueue() {
6139 return code() == GetIsolate()->builtins()->builtin(
6140 Builtins::kInOptimizationQueue);
6144 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6145 return has_initial_map() &&
6146 initial_map()->construction_count() != JSFunction::kNoSlackTracking;
6150 Code* JSFunction::code() {
6152 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6156 void JSFunction::set_code(Code* value) {
6157 DCHECK(!GetHeap()->InNewSpace(value));
6158 Address entry = value->entry();
6159 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6160 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6162 HeapObject::RawField(this, kCodeEntryOffset),
6167 void JSFunction::set_code_no_write_barrier(Code* value) {
6168 DCHECK(!GetHeap()->InNewSpace(value));
6169 Address entry = value->entry();
6170 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6174 void JSFunction::ReplaceCode(Code* code) {
6175 bool was_optimized = IsOptimized();
6176 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6178 if (was_optimized && is_optimized) {
6179 shared()->EvictFromOptimizedCodeMap(this->code(),
6180 "Replacing with another optimized code");
6185 // Add/remove the function from the list of optimized functions for this
6186 // context based on the state change.
6187 if (!was_optimized && is_optimized) {
6188 context()->native_context()->AddOptimizedFunction(this);
6190 if (was_optimized && !is_optimized) {
6191 // TODO(titzer): linear in the number of optimized functions; fix!
6192 context()->native_context()->RemoveOptimizedFunction(this);
6197 Context* JSFunction::context() {
6198 return Context::cast(READ_FIELD(this, kContextOffset));
6202 JSObject* JSFunction::global_proxy() {
6203 return context()->global_proxy();
6207 void JSFunction::set_context(Object* value) {
6208 DCHECK(value->IsUndefined() || value->IsContext());
6209 WRITE_FIELD(this, kContextOffset, value);
6210 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6213 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6214 kPrototypeOrInitialMapOffset)
6217 Map* JSFunction::initial_map() {
6218 return Map::cast(prototype_or_initial_map());
6222 bool JSFunction::has_initial_map() {
6223 return prototype_or_initial_map()->IsMap();
6227 bool JSFunction::has_instance_prototype() {
6228 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6232 bool JSFunction::has_prototype() {
6233 return map()->has_non_instance_prototype() || has_instance_prototype();
6237 Object* JSFunction::instance_prototype() {
6238 DCHECK(has_instance_prototype());
6239 if (has_initial_map()) return initial_map()->prototype();
6240 // When there is no initial map and the prototype is a JSObject, the
6241 // initial map field is used for the prototype field.
6242 return prototype_or_initial_map();
6246 Object* JSFunction::prototype() {
6247 DCHECK(has_prototype());
6248 // If the function's prototype property has been set to a non-JSObject
6249 // value, that value is stored in the constructor field of the map.
6250 if (map()->has_non_instance_prototype()) return map()->constructor();
6251 return instance_prototype();
6255 bool JSFunction::should_have_prototype() {
6256 return map()->function_with_prototype();
6260 bool JSFunction::is_compiled() {
6262 GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
6266 FixedArray* JSFunction::literals() {
6267 DCHECK(!shared()->bound());
6268 return literals_or_bindings();
6272 void JSFunction::set_literals(FixedArray* literals) {
6273 DCHECK(!shared()->bound());
6274 set_literals_or_bindings(literals);
6278 FixedArray* JSFunction::function_bindings() {
6279 DCHECK(shared()->bound());
6280 return literals_or_bindings();
6284 void JSFunction::set_function_bindings(FixedArray* bindings) {
6285 DCHECK(shared()->bound());
6286 // Bound function literal may be initialized to the empty fixed array
6287 // before the bindings are set.
6288 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6289 bindings->map() == GetHeap()->fixed_cow_array_map());
6290 set_literals_or_bindings(bindings);
6294 int JSFunction::NumberOfLiterals() {
6295 DCHECK(!shared()->bound());
6296 return literals()->length();
6300 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6301 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6302 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6306 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6308 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6309 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6310 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6314 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6315 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6316 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6320 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6322 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6323 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6324 DCHECK(!GetHeap()->InNewSpace(value));
6328 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6329 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6330 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6331 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6334 void JSProxy::InitializeBody(int object_size, Object* value) {
6335 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6336 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6337 WRITE_FIELD(this, offset, value);
6342 ACCESSORS(JSCollection, table, Object, kTableOffset)
6345 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6346 template<class Derived, class TableType> \
6347 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6348 return type::cast(READ_FIELD(this, offset)); \
6350 template<class Derived, class TableType> \
6351 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6352 type* value, WriteBarrierMode mode) { \
6353 WRITE_FIELD(this, offset, value); \
6354 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6357 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6358 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Smi, kIndexOffset)
6359 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Smi, kKindOffset)
6361 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6364 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6365 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6368 Address Foreign::foreign_address() {
6369 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6373 void Foreign::set_foreign_address(Address value) {
6374 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6378 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6379 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6380 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6381 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6382 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6383 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6385 bool JSGeneratorObject::is_suspended() {
6386 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6387 DCHECK_EQ(kGeneratorClosed, 0);
6388 return continuation() > 0;
6391 bool JSGeneratorObject::is_closed() {
6392 return continuation() == kGeneratorClosed;
6395 bool JSGeneratorObject::is_executing() {
6396 return continuation() == kGeneratorExecuting;
6399 ACCESSORS(JSModule, context, Object, kContextOffset)
6400 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6403 ACCESSORS(JSValue, value, Object, kValueOffset)
6406 HeapNumber* HeapNumber::cast(Object* object) {
6407 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6408 return reinterpret_cast<HeapNumber*>(object);
6412 const HeapNumber* HeapNumber::cast(const Object* object) {
6413 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6414 return reinterpret_cast<const HeapNumber*>(object);
6418 ACCESSORS(JSDate, value, Object, kValueOffset)
6419 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6420 ACCESSORS(JSDate, year, Object, kYearOffset)
6421 ACCESSORS(JSDate, month, Object, kMonthOffset)
6422 ACCESSORS(JSDate, day, Object, kDayOffset)
6423 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6424 ACCESSORS(JSDate, hour, Object, kHourOffset)
6425 ACCESSORS(JSDate, min, Object, kMinOffset)
6426 ACCESSORS(JSDate, sec, Object, kSecOffset)
6429 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6430 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6431 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6432 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6433 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6434 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6437 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6438 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6439 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6440 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6441 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6442 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6443 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6446 void Code::WipeOutHeader() {
6447 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6448 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6449 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6450 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6451 // Do not wipe out major/minor keys on a code stub or IC
6452 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6453 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6458 Object* Code::type_feedback_info() {
6459 DCHECK(kind() == FUNCTION);
6460 return raw_type_feedback_info();
6464 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6465 DCHECK(kind() == FUNCTION);
6466 set_raw_type_feedback_info(value, mode);
6467 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6472 uint32_t Code::stub_key() {
6473 DCHECK(IsCodeStubOrIC());
6474 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6475 return static_cast<uint32_t>(smi_key->value());
6479 void Code::set_stub_key(uint32_t key) {
6480 DCHECK(IsCodeStubOrIC());
6481 set_raw_type_feedback_info(Smi::FromInt(key));
6485 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6486 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6489 byte* Code::instruction_start() {
6490 return FIELD_ADDR(this, kHeaderSize);
6494 byte* Code::instruction_end() {
6495 return instruction_start() + instruction_size();
6499 int Code::body_size() {
6500 return RoundUp(instruction_size(), kObjectAlignment);
6504 ByteArray* Code::unchecked_relocation_info() {
6505 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6509 byte* Code::relocation_start() {
6510 return unchecked_relocation_info()->GetDataStartAddress();
6514 int Code::relocation_size() {
6515 return unchecked_relocation_info()->length();
6519 byte* Code::entry() {
6520 return instruction_start();
6524 bool Code::contains(byte* inner_pointer) {
6525 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6529 ACCESSORS(JSArray, length, Object, kLengthOffset)
6532 void* JSArrayBuffer::backing_store() const {
6533 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6534 return reinterpret_cast<void*>(ptr);
6538 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6539 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6540 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6544 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6545 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6548 bool JSArrayBuffer::is_external() {
6549 return BooleanBit::get(flag(), kIsExternalBit);
6553 void JSArrayBuffer::set_is_external(bool value) {
6554 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6558 bool JSArrayBuffer::should_be_freed() {
6559 return BooleanBit::get(flag(), kShouldBeFreed);
6563 void JSArrayBuffer::set_should_be_freed(bool value) {
6564 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6568 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6569 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6572 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6573 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6574 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6575 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6576 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6578 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6581 JSRegExp::Type JSRegExp::TypeTag() {
6582 Object* data = this->data();
6583 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6584 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6585 return static_cast<JSRegExp::Type>(smi->value());
6589 int JSRegExp::CaptureCount() {
6590 switch (TypeTag()) {
6594 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6602 JSRegExp::Flags JSRegExp::GetFlags() {
6603 DCHECK(this->data()->IsFixedArray());
6604 Object* data = this->data();
6605 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6606 return Flags(smi->value());
6610 String* JSRegExp::Pattern() {
6611 DCHECK(this->data()->IsFixedArray());
6612 Object* data = this->data();
6613 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6618 Object* JSRegExp::DataAt(int index) {
6619 DCHECK(TypeTag() != NOT_COMPILED);
6620 return FixedArray::cast(data())->get(index);
6624 void JSRegExp::SetDataAt(int index, Object* value) {
6625 DCHECK(TypeTag() != NOT_COMPILED);
6626 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6627 FixedArray::cast(data())->set(index, value);
6631 ElementsKind JSObject::GetElementsKind() {
6632 ElementsKind kind = map()->elements_kind();
6634 FixedArrayBase* fixed_array =
6635 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6637 // If a GC was caused while constructing this object, the elements
6638 // pointer may point to a one pointer filler map.
6639 if (ElementsAreSafeToExamine()) {
6640 Map* map = fixed_array->map();
6641 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6642 (map == GetHeap()->fixed_array_map() ||
6643 map == GetHeap()->fixed_cow_array_map())) ||
6644 (IsFastDoubleElementsKind(kind) &&
6645 (fixed_array->IsFixedDoubleArray() ||
6646 fixed_array == GetHeap()->empty_fixed_array())) ||
6647 (kind == DICTIONARY_ELEMENTS &&
6648 fixed_array->IsFixedArray() &&
6649 fixed_array->IsDictionary()) ||
6650 (kind > DICTIONARY_ELEMENTS));
6651 DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6652 (elements()->IsFixedArray() && elements()->length() >= 2));
6659 ElementsAccessor* JSObject::GetElementsAccessor() {
6660 return ElementsAccessor::ForKind(GetElementsKind());
6664 bool JSObject::HasFastObjectElements() {
6665 return IsFastObjectElementsKind(GetElementsKind());
6669 bool JSObject::HasFastSmiElements() {
6670 return IsFastSmiElementsKind(GetElementsKind());
6674 bool JSObject::HasFastSmiOrObjectElements() {
6675 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6679 bool JSObject::HasFastDoubleElements() {
6680 return IsFastDoubleElementsKind(GetElementsKind());
6684 bool JSObject::HasFastHoleyElements() {
6685 return IsFastHoleyElementsKind(GetElementsKind());
6689 bool JSObject::HasFastElements() {
6690 return IsFastElementsKind(GetElementsKind());
6694 bool JSObject::HasDictionaryElements() {
6695 return GetElementsKind() == DICTIONARY_ELEMENTS;
6699 bool JSObject::HasSloppyArgumentsElements() {
6700 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6704 bool JSObject::HasExternalArrayElements() {
6705 HeapObject* array = elements();
6706 DCHECK(array != NULL);
6707 return array->IsExternalArray();
6711 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6712 bool JSObject::HasExternal##Type##Elements() { \
6713 HeapObject* array = elements(); \
6714 DCHECK(array != NULL); \
6715 if (!array->IsHeapObject()) \
6717 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6720 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6722 #undef EXTERNAL_ELEMENTS_CHECK
6725 bool JSObject::HasFixedTypedArrayElements() {
6726 HeapObject* array = elements();
6727 DCHECK(array != NULL);
6728 return array->IsFixedTypedArrayBase();
6732 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6733 bool JSObject::HasFixed##Type##Elements() { \
6734 HeapObject* array = elements(); \
6735 DCHECK(array != NULL); \
6736 if (!array->IsHeapObject()) \
6738 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6741 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6743 #undef FIXED_TYPED_ELEMENTS_CHECK
6746 bool JSObject::HasNamedInterceptor() {
6747 return map()->has_named_interceptor();
6751 bool JSObject::HasIndexedInterceptor() {
6752 return map()->has_indexed_interceptor();
6756 NameDictionary* JSObject::property_dictionary() {
6757 DCHECK(!HasFastProperties());
6758 return NameDictionary::cast(properties());
6762 SeededNumberDictionary* JSObject::element_dictionary() {
6763 DCHECK(HasDictionaryElements());
6764 return SeededNumberDictionary::cast(elements());
6768 bool Name::IsHashFieldComputed(uint32_t field) {
6769 return (field & kHashNotComputedMask) == 0;
6773 bool Name::HasHashCode() {
6774 return IsHashFieldComputed(hash_field());
6778 uint32_t Name::Hash() {
6779 // Fast case: has hash code already been computed?
6780 uint32_t field = hash_field();
6781 if (IsHashFieldComputed(field)) return field >> kHashShift;
6782 // Slow case: compute hash code and set it. Has to be a string.
6783 return String::cast(this)->ComputeAndSetHash();
6787 StringHasher::StringHasher(int length, uint32_t seed)
6789 raw_running_hash_(seed),
6791 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6792 is_first_char_(true) {
6793 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6797 bool StringHasher::has_trivial_hash() {
6798 return length_ > String::kMaxHashCalcLength;
6802 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6804 running_hash += (running_hash << 10);
6805 running_hash ^= (running_hash >> 6);
6806 return running_hash;
6810 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6811 running_hash += (running_hash << 3);
6812 running_hash ^= (running_hash >> 11);
6813 running_hash += (running_hash << 15);
6814 if ((running_hash & String::kHashBitMask) == 0) {
6817 return running_hash;
6821 void StringHasher::AddCharacter(uint16_t c) {
6822 // Use the Jenkins one-at-a-time hash function to update the hash
6823 // for the given character.
6824 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6828 bool StringHasher::UpdateIndex(uint16_t c) {
6829 DCHECK(is_array_index_);
6830 if (c < '0' || c > '9') {
6831 is_array_index_ = false;
6835 if (is_first_char_) {
6836 is_first_char_ = false;
6837 if (c == '0' && length_ > 1) {
6838 is_array_index_ = false;
6842 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6843 is_array_index_ = false;
6846 array_index_ = array_index_ * 10 + d;
6851 template<typename Char>
6852 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6853 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6855 if (is_array_index_) {
6856 for (; i < length; i++) {
6857 AddCharacter(chars[i]);
6858 if (!UpdateIndex(chars[i])) {
6864 for (; i < length; i++) {
6865 DCHECK(!is_array_index_);
6866 AddCharacter(chars[i]);
6871 template <typename schar>
6872 uint32_t StringHasher::HashSequentialString(const schar* chars,
6875 StringHasher hasher(length, seed);
6876 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6877 return hasher.GetHashField();
6881 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6882 IteratingStringHasher hasher(string->length(), seed);
6884 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6885 ConsString* cons_string = String::VisitFlat(&hasher, string);
6886 // The string was flat.
6887 if (cons_string == NULL) return hasher.GetHashField();
6888 // This is a ConsString, iterate across it.
6889 ConsStringIteratorOp op(cons_string);
6891 while (NULL != (string = op.Next(&offset))) {
6892 String::VisitFlat(&hasher, string, offset);
6894 return hasher.GetHashField();
6898 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6900 AddCharacters(chars, length);
6904 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6906 AddCharacters(chars, length);
6910 bool Name::AsArrayIndex(uint32_t* index) {
6911 return IsString() && String::cast(this)->AsArrayIndex(index);
6915 bool String::AsArrayIndex(uint32_t* index) {
6916 uint32_t field = hash_field();
6917 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6920 return SlowAsArrayIndex(index);
6924 void String::SetForwardedInternalizedString(String* canonical) {
6925 DCHECK(IsInternalizedString());
6926 DCHECK(HasHashCode());
6927 if (canonical == this) return; // No need to forward.
6928 DCHECK(SlowEquals(canonical));
6929 DCHECK(canonical->IsInternalizedString());
6930 DCHECK(canonical->HasHashCode());
6931 WRITE_FIELD(this, kHashFieldOffset, canonical);
6932 // Setting the hash field to a tagged value sets the LSB, causing the hash
6933 // code to be interpreted as uninitialized. We use this fact to recognize
6934 // that we have a forwarded string.
6935 DCHECK(!HasHashCode());
6939 String* String::GetForwardedInternalizedString() {
6940 DCHECK(IsInternalizedString());
6941 if (HasHashCode()) return this;
6942 String* canonical = String::cast(READ_FIELD(this, kHashFieldOffset));
6943 DCHECK(canonical->IsInternalizedString());
6944 DCHECK(SlowEquals(canonical));
6945 DCHECK(canonical->HasHashCode());
6950 Object* JSReceiver::GetConstructor() {
6951 return map()->constructor();
6955 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6956 Handle<Name> name) {
6957 if (object->IsJSProxy()) {
6958 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6959 return JSProxy::HasPropertyWithHandler(proxy, name);
6961 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6962 if (!result.has_value) return Maybe<bool>();
6963 return maybe(result.value != ABSENT);
6967 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6968 Handle<Name> name) {
6969 if (object->IsJSProxy()) {
6970 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6971 return JSProxy::HasPropertyWithHandler(proxy, name);
6973 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6974 if (!result.has_value) return Maybe<bool>();
6975 return maybe(result.value != ABSENT);
6979 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6980 Handle<JSReceiver> object, Handle<Name> key) {
6982 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6983 return GetElementAttribute(object, index);
6985 LookupIterator it(object, key);
6986 return GetPropertyAttributes(&it);
6990 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6991 Handle<JSReceiver> object, uint32_t index) {
6992 if (object->IsJSProxy()) {
6993 return JSProxy::GetElementAttributeWithHandler(
6994 Handle<JSProxy>::cast(object), object, index);
6996 return JSObject::GetElementAttributeWithReceiver(
6997 Handle<JSObject>::cast(object), object, index, true);
7001 bool JSGlobalObject::IsDetached() {
7002 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7006 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
7007 const PrototypeIterator iter(this->GetIsolate(),
7008 const_cast<JSGlobalProxy*>(this));
7009 return iter.GetCurrent() != global;
7013 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
7014 return object->IsJSProxy()
7015 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
7016 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
7020 Object* JSReceiver::GetIdentityHash() {
7022 ? JSProxy::cast(this)->GetIdentityHash()
7023 : JSObject::cast(this)->GetIdentityHash();
7027 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7028 if (object->IsJSProxy()) {
7029 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7030 return JSProxy::HasElementWithHandler(proxy, index);
7032 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
7033 Handle<JSObject>::cast(object), object, index, true);
7034 if (!result.has_value) return Maybe<bool>();
7035 return maybe(result.value != ABSENT);
7039 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
7041 if (object->IsJSProxy()) {
7042 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7043 return JSProxy::HasElementWithHandler(proxy, index);
7045 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
7046 Handle<JSObject>::cast(object), object, index, false);
7047 if (!result.has_value) return Maybe<bool>();
7048 return maybe(result.value != ABSENT);
7052 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
7053 Handle<JSReceiver> object, uint32_t index) {
7054 if (object->IsJSProxy()) {
7055 return JSProxy::GetElementAttributeWithHandler(
7056 Handle<JSProxy>::cast(object), object, index);
7058 return JSObject::GetElementAttributeWithReceiver(
7059 Handle<JSObject>::cast(object), object, index, false);
7063 bool AccessorInfo::all_can_read() {
7064 return BooleanBit::get(flag(), kAllCanReadBit);
7068 void AccessorInfo::set_all_can_read(bool value) {
7069 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7073 bool AccessorInfo::all_can_write() {
7074 return BooleanBit::get(flag(), kAllCanWriteBit);
7078 void AccessorInfo::set_all_can_write(bool value) {
7079 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7083 PropertyAttributes AccessorInfo::property_attributes() {
7084 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
7088 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7089 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
7093 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7094 if (!HasExpectedReceiverType()) return true;
7095 if (!receiver->IsJSObject()) return false;
7096 return FunctionTemplateInfo::cast(expected_receiver_type())
7097 ->IsTemplateFor(JSObject::cast(receiver)->map());
7101 void ExecutableAccessorInfo::clear_setter() {
7102 set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
7106 template<typename Derived, typename Shape, typename Key>
7107 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7109 Handle<Object> value) {
7110 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7114 template<typename Derived, typename Shape, typename Key>
7115 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7117 Handle<Object> value,
7118 PropertyDetails details) {
7119 DCHECK(!key->IsName() ||
7120 details.IsDeleted() ||
7121 details.dictionary_index() > 0);
7122 int index = DerivedHashTable::EntryToIndex(entry);
7123 DisallowHeapAllocation no_gc;
7124 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
7125 FixedArray::set(index, *key, mode);
7126 FixedArray::set(index+1, *value, mode);
7127 FixedArray::set(index+2, details.AsSmi());
7131 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7132 DCHECK(other->IsNumber());
7133 return key == static_cast<uint32_t>(other->Number());
7137 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7138 return ComputeIntegerHash(key, 0);
7142 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7144 DCHECK(other->IsNumber());
7145 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7149 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7150 return ComputeIntegerHash(key, seed);
7154 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7157 DCHECK(other->IsNumber());
7158 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7162 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7163 return isolate->factory()->NewNumberFromUint(key);
7167 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7168 // We know that all entries in a hash table had their hash keys created.
7169 // Use that knowledge to have fast failure.
7170 if (key->Hash() != Name::cast(other)->Hash()) return false;
7171 return key->Equals(Name::cast(other));
7175 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7180 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7181 return Name::cast(other)->Hash();
7185 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7187 DCHECK(key->IsUniqueName());
7192 void NameDictionary::DoGenerateNewEnumerationIndices(
7193 Handle<NameDictionary> dictionary) {
7194 DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7198 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7199 return key->SameValue(other);
7203 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7204 return Smi::cast(key->GetHash())->value();
7208 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7210 return Smi::cast(other->GetHash())->value();
7214 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7215 Handle<Object> key) {
7220 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7221 Handle<ObjectHashTable> table, Handle<Object> key) {
7222 return DerivedHashTable::Shrink(table, key);
7226 template <int entrysize>
7227 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7228 return key->SameValue(other);
7232 template <int entrysize>
7233 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7234 intptr_t hash = reinterpret_cast<intptr_t>(*key);
7235 return (uint32_t)(hash & 0xFFFFFFFF);
7239 template <int entrysize>
7240 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7242 intptr_t hash = reinterpret_cast<intptr_t>(other);
7243 return (uint32_t)(hash & 0xFFFFFFFF);
7247 template <int entrysize>
7248 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7249 Handle<Object> key) {
7254 void Map::ClearCodeCache(Heap* heap) {
7255 // No write barrier is needed since empty_fixed_array is not in new space.
7256 // Please note this function is used during marking:
7257 // - MarkCompactCollector::MarkUnmarkedObject
7258 // - IncrementalMarking::Step
7259 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7260 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7264 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
7265 DCHECK(array->HasFastSmiOrObjectElements());
7266 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
7267 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
7268 if (elts->length() < required_size) {
7269 // Doubling in size would be overkill, but leave some slack to avoid
7270 // constantly growing.
7271 Expand(array, required_size + (required_size >> 3));
7272 // It's a performance benefit to keep a frequently used array in new-space.
7273 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
7274 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
7275 // Expand will allocate a new backing store in new space even if the size
7276 // we asked for isn't larger than what we had before.
7277 Expand(array, required_size);
7282 void JSArray::set_length(Smi* length) {
7283 // Don't need a write barrier for a Smi.
7284 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7288 bool JSArray::AllowsSetElementsLength() {
7289 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7290 DCHECK(result == !HasExternalArrayElements());
7295 void JSArray::SetContent(Handle<JSArray> array,
7296 Handle<FixedArrayBase> storage) {
7297 EnsureCanContainElements(array, storage, storage->length(),
7298 ALLOW_COPIED_DOUBLE_ELEMENTS);
7300 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7301 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7302 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7303 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7304 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7305 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7306 array->set_elements(*storage);
7307 array->set_length(Smi::FromInt(storage->length()));
7311 Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
7312 return isolate->factory()->uninitialized_symbol();
7316 Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
7317 return isolate->factory()->megamorphic_symbol();
7321 Handle<Object> TypeFeedbackInfo::MonomorphicArraySentinel(Isolate* isolate,
7322 ElementsKind elements_kind) {
7323 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
7327 Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) {
7328 return heap->uninitialized_symbol();
7332 int TypeFeedbackInfo::ic_total_count() {
7333 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7334 return ICTotalCountField::decode(current);
7338 void TypeFeedbackInfo::set_ic_total_count(int count) {
7339 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7340 value = ICTotalCountField::update(value,
7341 ICTotalCountField::decode(count));
7342 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7346 int TypeFeedbackInfo::ic_with_type_info_count() {
7347 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7348 return ICsWithTypeInfoCountField::decode(current);
7352 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7353 if (delta == 0) return;
7354 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7355 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7356 // We can get negative count here when the type-feedback info is
7357 // shared between two code objects. The can only happen when
7358 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7359 // Since we do not optimize when the debugger is active, we can skip
7360 // this counter update.
7361 if (new_count >= 0) {
7362 new_count &= ICsWithTypeInfoCountField::kMask;
7363 value = ICsWithTypeInfoCountField::update(value, new_count);
7364 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7369 int TypeFeedbackInfo::ic_generic_count() {
7370 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7374 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7375 if (delta == 0) return;
7376 int new_count = ic_generic_count() + delta;
7377 if (new_count >= 0) {
7378 new_count &= ~Smi::kMinValue;
7379 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7384 void TypeFeedbackInfo::initialize_storage() {
7385 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7386 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7387 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7391 void TypeFeedbackInfo::change_own_type_change_checksum() {
7392 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7393 int checksum = OwnTypeChangeChecksum::decode(value);
7394 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7395 value = OwnTypeChangeChecksum::update(value, checksum);
7396 // Ensure packed bit field is in Smi range.
7397 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7398 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7399 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7403 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7404 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7405 int mask = (1 << kTypeChangeChecksumBits) - 1;
7406 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7407 // Ensure packed bit field is in Smi range.
7408 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7409 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7410 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7414 int TypeFeedbackInfo::own_type_change_checksum() {
7415 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7416 return OwnTypeChangeChecksum::decode(value);
7420 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7421 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7422 int mask = (1 << kTypeChangeChecksumBits) - 1;
7423 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7427 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7430 Relocatable::Relocatable(Isolate* isolate) {
7432 prev_ = isolate->relocatable_top();
7433 isolate->set_relocatable_top(this);
7437 Relocatable::~Relocatable() {
7438 DCHECK_EQ(isolate_->relocatable_top(), this);
7439 isolate_->set_relocatable_top(prev_);
7443 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7444 return map->instance_size();
7448 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7449 v->VisitExternalReference(
7450 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7454 template<typename StaticVisitor>
7455 void Foreign::ForeignIterateBody() {
7456 StaticVisitor::VisitExternalReference(
7457 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7461 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
7462 typedef v8::String::ExternalAsciiStringResource Resource;
7463 v->VisitExternalAsciiString(
7464 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7468 template<typename StaticVisitor>
7469 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
7470 typedef v8::String::ExternalAsciiStringResource Resource;
7471 StaticVisitor::VisitExternalAsciiString(
7472 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7476 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7477 typedef v8::String::ExternalStringResource Resource;
7478 v->VisitExternalTwoByteString(
7479 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7483 template<typename StaticVisitor>
7484 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7485 typedef v8::String::ExternalStringResource Resource;
7486 StaticVisitor::VisitExternalTwoByteString(
7487 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7491 template<int start_offset, int end_offset, int size>
7492 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7495 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7496 HeapObject::RawField(obj, end_offset));
7500 template<int start_offset>
7501 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7504 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7505 HeapObject::RawField(obj, object_size));
7509 template<class Derived, class TableType>
7510 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7511 TableType* table(TableType::cast(this->table()));
7512 int index = Smi::cast(this->index())->value();
7513 Object* key = table->KeyAt(index);
7514 DCHECK(!key->IsTheHole());
7519 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7520 array->set(0, CurrentKey());
7524 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7525 array->set(0, CurrentKey());
7526 array->set(1, CurrentValue());
7530 Object* JSMapIterator::CurrentValue() {
7531 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7532 int index = Smi::cast(this->index())->value();
7533 Object* value = table->ValueAt(index);
7534 DCHECK(!value->IsTheHole());
7540 #undef CAST_ACCESSOR
7541 #undef INT_ACCESSORS
7543 #undef ACCESSORS_TO_SMI
7544 #undef SMI_ACCESSORS
7545 #undef SYNCHRONIZED_SMI_ACCESSORS
7546 #undef NOBARRIER_SMI_ACCESSORS
7548 #undef BOOL_ACCESSORS
7550 #undef FIELD_ADDR_CONST
7552 #undef NOBARRIER_READ_FIELD
7554 #undef NOBARRIER_WRITE_FIELD
7555 #undef WRITE_BARRIER
7556 #undef CONDITIONAL_WRITE_BARRIER
7557 #undef READ_DOUBLE_FIELD
7558 #undef WRITE_DOUBLE_FIELD
7559 #undef READ_INT_FIELD
7560 #undef WRITE_INT_FIELD
7561 #undef READ_INTPTR_FIELD
7562 #undef WRITE_INTPTR_FIELD
7563 #undef READ_UINT32_FIELD
7564 #undef WRITE_UINT32_FIELD
7565 #undef READ_SHORT_FIELD
7566 #undef WRITE_SHORT_FIELD
7567 #undef READ_BYTE_FIELD
7568 #undef WRITE_BYTE_FIELD
7569 #undef NOBARRIER_READ_BYTE_FIELD
7570 #undef NOBARRIER_WRITE_BYTE_FIELD
7572 } } // namespace v8::internal
7574 #endif // V8_OBJECTS_INL_H_