1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/elements.h"
20 #include "src/factory.h"
21 #include "src/field-index-inl.h"
22 #include "src/heap/heap-inl.h"
23 #include "src/heap/heap.h"
24 #include "src/heap/incremental-marking.h"
25 #include "src/heap/objects-visiting.h"
26 #include "src/heap/spaces.h"
27 #include "src/heap/store-buffer.h"
28 #include "src/isolate.h"
29 #include "src/layout-descriptor-inl.h"
30 #include "src/lookup.h"
31 #include "src/objects.h"
32 #include "src/property.h"
33 #include "src/prototype.h"
34 #include "src/transitions-inl.h"
35 #include "src/type-feedback-vector-inl.h"
36 #include "src/v8memory.h"
41 PropertyDetails::PropertyDetails(Smi* smi) {
42 value_ = smi->value();
46 Smi* PropertyDetails::AsSmi() const {
47 // Ensure the upper 2 bits have the same value by sign extending it. This is
48 // necessary to be able to use the 31st bit of the property details.
49 int value = value_ << 1;
50 return Smi::FromInt(value >> 1);
54 int PropertyDetails::field_width_in_words() const {
55 DCHECK(location() == kField);
56 if (!FLAG_unbox_double_fields) return 1;
57 if (kDoubleSize == kPointerSize) return 1;
58 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
62 #define TYPE_CHECKER(type, instancetype) \
63 bool Object::Is##type() const { \
64 return Object::IsHeapObject() && \
65 HeapObject::cast(this)->map()->instance_type() == instancetype; \
69 #define CAST_ACCESSOR(type) \
70 type* type::cast(Object* object) { \
71 SLOW_DCHECK(object->Is##type()); \
72 return reinterpret_cast<type*>(object); \
74 const type* type::cast(const Object* object) { \
75 SLOW_DCHECK(object->Is##type()); \
76 return reinterpret_cast<const type*>(object); \
80 #define INT_ACCESSORS(holder, name, offset) \
81 int holder::name() const { return READ_INT_FIELD(this, offset); } \
82 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
85 #define ACCESSORS(holder, name, type, offset) \
86 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
87 void holder::set_##name(type* value, WriteBarrierMode mode) { \
88 WRITE_FIELD(this, offset, value); \
89 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
93 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
94 #define ACCESSORS_TO_SMI(holder, name, offset) \
95 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
96 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
97 WRITE_FIELD(this, offset, value); \
101 // Getter that returns a Smi as an int and writes an int as a Smi.
102 #define SMI_ACCESSORS(holder, name, offset) \
103 int holder::name() const { \
104 Object* value = READ_FIELD(this, offset); \
105 return Smi::cast(value)->value(); \
107 void holder::set_##name(int value) { \
108 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
111 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
112 int holder::synchronized_##name() const { \
113 Object* value = ACQUIRE_READ_FIELD(this, offset); \
114 return Smi::cast(value)->value(); \
116 void holder::synchronized_set_##name(int value) { \
117 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
120 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
121 int holder::nobarrier_##name() const { \
122 Object* value = NOBARRIER_READ_FIELD(this, offset); \
123 return Smi::cast(value)->value(); \
125 void holder::nobarrier_set_##name(int value) { \
126 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
129 #define BOOL_GETTER(holder, field, name, offset) \
130 bool holder::name() const { \
131 return BooleanBit::get(field(), offset); \
135 #define BOOL_ACCESSORS(holder, field, name, offset) \
136 bool holder::name() const { \
137 return BooleanBit::get(field(), offset); \
139 void holder::set_##name(bool value) { \
140 set_##field(BooleanBit::set(field(), offset, value)); \
144 bool Object::IsFixedArrayBase() const {
145 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
146 IsFixedTypedArrayBase() || IsExternalArray();
150 // External objects are not extensible, so the map check is enough.
151 bool Object::IsExternal() const {
152 return Object::IsHeapObject() &&
153 HeapObject::cast(this)->map() ==
154 HeapObject::cast(this)->GetHeap()->external_map();
158 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
161 bool Object::IsSmi() const {
162 return HAS_SMI_TAG(this);
166 bool Object::IsHeapObject() const {
167 return Internals::HasHeapObjectTag(this);
171 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
172 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
173 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
176 bool Object::IsString() const {
177 return Object::IsHeapObject()
178 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
182 bool Object::IsName() const {
183 return IsString() || IsSymbol();
187 bool Object::IsUniqueName() const {
188 return IsInternalizedString() || IsSymbol();
192 bool Object::IsSpecObject() const {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() const {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsTemplateInfo() const {
206 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
210 bool Object::IsInternalizedString() const {
211 if (!this->IsHeapObject()) return false;
212 uint32_t type = HeapObject::cast(this)->map()->instance_type();
213 STATIC_ASSERT(kNotInternalizedTag != 0);
214 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
215 (kStringTag | kInternalizedTag);
219 bool Object::IsConsString() const {
220 if (!IsString()) return false;
221 return StringShape(String::cast(this)).IsCons();
225 bool Object::IsSlicedString() const {
226 if (!IsString()) return false;
227 return StringShape(String::cast(this)).IsSliced();
231 bool Object::IsSeqString() const {
232 if (!IsString()) return false;
233 return StringShape(String::cast(this)).IsSequential();
237 bool Object::IsSeqOneByteString() const {
238 if (!IsString()) return false;
239 return StringShape(String::cast(this)).IsSequential() &&
240 String::cast(this)->IsOneByteRepresentation();
244 bool Object::IsSeqTwoByteString() const {
245 if (!IsString()) return false;
246 return StringShape(String::cast(this)).IsSequential() &&
247 String::cast(this)->IsTwoByteRepresentation();
251 bool Object::IsExternalString() const {
252 if (!IsString()) return false;
253 return StringShape(String::cast(this)).IsExternal();
257 bool Object::IsExternalOneByteString() const {
258 if (!IsString()) return false;
259 return StringShape(String::cast(this)).IsExternal() &&
260 String::cast(this)->IsOneByteRepresentation();
264 bool Object::IsExternalTwoByteString() const {
265 if (!IsString()) return false;
266 return StringShape(String::cast(this)).IsExternal() &&
267 String::cast(this)->IsTwoByteRepresentation();
271 bool Object::HasValidElements() {
272 // Dictionary is covered under FixedArray.
273 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
274 IsFixedTypedArrayBase();
278 Handle<Object> Object::NewStorageFor(Isolate* isolate,
279 Handle<Object> object,
280 Representation representation) {
281 if (representation.IsSmi() && object->IsUninitialized()) {
282 return handle(Smi::FromInt(0), isolate);
284 if (!representation.IsDouble()) return object;
286 if (object->IsUninitialized()) {
288 } else if (object->IsMutableHeapNumber()) {
289 value = HeapNumber::cast(*object)->value();
291 value = object->Number();
293 return isolate->factory()->NewHeapNumber(value, MUTABLE);
297 Handle<Object> Object::WrapForRead(Isolate* isolate,
298 Handle<Object> object,
299 Representation representation) {
300 DCHECK(!object->IsUninitialized());
301 if (!representation.IsDouble()) {
302 DCHECK(object->FitsRepresentation(representation));
305 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
309 StringShape::StringShape(const String* str)
310 : type_(str->map()->instance_type()) {
312 DCHECK((type_ & kIsNotStringMask) == kStringTag);
316 StringShape::StringShape(Map* map)
317 : type_(map->instance_type()) {
319 DCHECK((type_ & kIsNotStringMask) == kStringTag);
323 StringShape::StringShape(InstanceType t)
324 : type_(static_cast<uint32_t>(t)) {
326 DCHECK((type_ & kIsNotStringMask) == kStringTag);
330 bool StringShape::IsInternalized() {
332 STATIC_ASSERT(kNotInternalizedTag != 0);
333 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
334 (kStringTag | kInternalizedTag);
338 bool String::IsOneByteRepresentation() const {
339 uint32_t type = map()->instance_type();
340 return (type & kStringEncodingMask) == kOneByteStringTag;
344 bool String::IsTwoByteRepresentation() const {
345 uint32_t type = map()->instance_type();
346 return (type & kStringEncodingMask) == kTwoByteStringTag;
350 bool String::IsOneByteRepresentationUnderneath() {
351 uint32_t type = map()->instance_type();
352 STATIC_ASSERT(kIsIndirectStringTag != 0);
353 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
355 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
356 case kOneByteStringTag:
358 case kTwoByteStringTag:
360 default: // Cons or sliced string. Need to go deeper.
361 return GetUnderlying()->IsOneByteRepresentation();
366 bool String::IsTwoByteRepresentationUnderneath() {
367 uint32_t type = map()->instance_type();
368 STATIC_ASSERT(kIsIndirectStringTag != 0);
369 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
371 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
372 case kOneByteStringTag:
374 case kTwoByteStringTag:
376 default: // Cons or sliced string. Need to go deeper.
377 return GetUnderlying()->IsTwoByteRepresentation();
382 bool String::HasOnlyOneByteChars() {
383 uint32_t type = map()->instance_type();
384 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
385 IsOneByteRepresentation();
389 bool StringShape::IsCons() {
390 return (type_ & kStringRepresentationMask) == kConsStringTag;
394 bool StringShape::IsSliced() {
395 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
399 bool StringShape::IsIndirect() {
400 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
404 bool StringShape::IsExternal() {
405 return (type_ & kStringRepresentationMask) == kExternalStringTag;
409 bool StringShape::IsSequential() {
410 return (type_ & kStringRepresentationMask) == kSeqStringTag;
414 StringRepresentationTag StringShape::representation_tag() {
415 uint32_t tag = (type_ & kStringRepresentationMask);
416 return static_cast<StringRepresentationTag>(tag);
420 uint32_t StringShape::encoding_tag() {
421 return type_ & kStringEncodingMask;
425 uint32_t StringShape::full_representation_tag() {
426 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
430 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
431 Internals::kFullStringRepresentationMask);
433 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
434 Internals::kStringEncodingMask);
437 bool StringShape::IsSequentialOneByte() {
438 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
442 bool StringShape::IsSequentialTwoByte() {
443 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
447 bool StringShape::IsExternalOneByte() {
448 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
452 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
453 Internals::kExternalOneByteRepresentationTag);
455 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
458 bool StringShape::IsExternalTwoByte() {
459 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
463 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
464 Internals::kExternalTwoByteRepresentationTag);
466 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
469 uc32 FlatStringReader::Get(int index) {
471 return Get<uint8_t>(index);
473 return Get<uc16>(index);
478 template <typename Char>
479 Char FlatStringReader::Get(int index) {
480 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
481 DCHECK(0 <= index && index <= length_);
482 if (sizeof(Char) == 1) {
483 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
485 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
490 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
491 return key->AsHandle(isolate);
495 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
497 return key->AsHandle(isolate);
501 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
503 return key->AsHandle(isolate);
506 template <typename Char>
507 class SequentialStringKey : public HashTableKey {
509 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
510 : string_(string), hash_field_(0), seed_(seed) { }
512 uint32_t Hash() OVERRIDE {
513 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
517 uint32_t result = hash_field_ >> String::kHashShift;
518 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
523 uint32_t HashForObject(Object* other) OVERRIDE {
524 return String::cast(other)->Hash();
527 Vector<const Char> string_;
528 uint32_t hash_field_;
533 class OneByteStringKey : public SequentialStringKey<uint8_t> {
535 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
536 : SequentialStringKey<uint8_t>(str, seed) { }
538 bool IsMatch(Object* string) OVERRIDE {
539 return String::cast(string)->IsOneByteEqualTo(string_);
542 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
546 class SeqOneByteSubStringKey : public HashTableKey {
548 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
549 : string_(string), from_(from), length_(length) {
550 DCHECK(string_->IsSeqOneByteString());
553 uint32_t Hash() OVERRIDE {
554 DCHECK(length_ >= 0);
555 DCHECK(from_ + length_ <= string_->length());
556 const uint8_t* chars = string_->GetChars() + from_;
557 hash_field_ = StringHasher::HashSequentialString(
558 chars, length_, string_->GetHeap()->HashSeed());
559 uint32_t result = hash_field_ >> String::kHashShift;
560 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
564 uint32_t HashForObject(Object* other) OVERRIDE {
565 return String::cast(other)->Hash();
568 bool IsMatch(Object* string) OVERRIDE;
569 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
572 Handle<SeqOneByteString> string_;
575 uint32_t hash_field_;
579 class TwoByteStringKey : public SequentialStringKey<uc16> {
581 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
582 : SequentialStringKey<uc16>(str, seed) { }
584 bool IsMatch(Object* string) OVERRIDE {
585 return String::cast(string)->IsTwoByteEqualTo(string_);
588 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
592 // Utf8StringKey carries a vector of chars as key.
593 class Utf8StringKey : public HashTableKey {
595 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
596 : string_(string), hash_field_(0), seed_(seed) { }
598 bool IsMatch(Object* string) OVERRIDE {
599 return String::cast(string)->IsUtf8EqualTo(string_);
602 uint32_t Hash() OVERRIDE {
603 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
604 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
605 uint32_t result = hash_field_ >> String::kHashShift;
606 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
610 uint32_t HashForObject(Object* other) OVERRIDE {
611 return String::cast(other)->Hash();
614 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
615 if (hash_field_ == 0) Hash();
616 return isolate->factory()->NewInternalizedStringFromUtf8(
617 string_, chars_, hash_field_);
620 Vector<const char> string_;
621 uint32_t hash_field_;
622 int chars_; // Caches the number of characters when computing the hash code.
627 bool Object::IsNumber() const {
628 return IsSmi() || IsHeapNumber();
632 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
633 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
636 bool Object::IsFiller() const {
637 if (!Object::IsHeapObject()) return false;
638 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
639 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
643 bool Object::IsExternalArray() const {
644 if (!Object::IsHeapObject())
646 InstanceType instance_type =
647 HeapObject::cast(this)->map()->instance_type();
648 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
649 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
653 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
654 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
655 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
657 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
658 #undef TYPED_ARRAY_TYPE_CHECKER
661 bool Object::IsFixedTypedArrayBase() const {
662 if (!Object::IsHeapObject()) return false;
664 InstanceType instance_type =
665 HeapObject::cast(this)->map()->instance_type();
666 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
667 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
671 bool Object::IsJSReceiver() const {
672 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
673 return IsHeapObject() &&
674 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
678 bool Object::IsJSObject() const {
679 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
680 return IsHeapObject() &&
681 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
685 bool Object::IsJSProxy() const {
686 if (!Object::IsHeapObject()) return false;
687 return HeapObject::cast(this)->map()->IsJSProxyMap();
691 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
692 TYPE_CHECKER(JSSet, JS_SET_TYPE)
693 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
694 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
695 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
696 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
697 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
698 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
699 TYPE_CHECKER(Map, MAP_TYPE)
700 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
701 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
702 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
703 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
706 bool Object::IsJSWeakCollection() const {
707 return IsJSWeakMap() || IsJSWeakSet();
711 bool Object::IsDescriptorArray() const {
712 return IsFixedArray();
716 bool Object::IsArrayList() const { return IsFixedArray(); }
719 bool Object::IsLayoutDescriptor() const {
720 return IsSmi() || IsFixedTypedArrayBase();
724 bool Object::IsTransitionArray() const {
725 return IsFixedArray();
729 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
732 bool Object::IsDeoptimizationInputData() const {
733 // Must be a fixed array.
734 if (!IsFixedArray()) return false;
736 // There's no sure way to detect the difference between a fixed array and
737 // a deoptimization data array. Since this is used for asserts we can
738 // check that the length is zero or else the fixed size plus a multiple of
740 int length = FixedArray::cast(this)->length();
741 if (length == 0) return true;
743 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
744 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
748 bool Object::IsDeoptimizationOutputData() const {
749 if (!IsFixedArray()) return false;
750 // There's actually no way to see the difference between a fixed array and
751 // a deoptimization data array. Since this is used for asserts we can check
752 // that the length is plausible though.
753 if (FixedArray::cast(this)->length() % 2 != 0) return false;
758 bool Object::IsHandlerTable() const {
759 if (!IsFixedArray()) return false;
760 // There's actually no way to see the difference between a fixed array and
761 // a handler table array.
766 bool Object::IsDependentCode() const {
767 if (!IsFixedArray()) return false;
768 // There's actually no way to see the difference between a fixed array and
769 // a dependent codes array.
774 bool Object::IsContext() const {
775 if (!Object::IsHeapObject()) return false;
776 Map* map = HeapObject::cast(this)->map();
777 Heap* heap = map->GetHeap();
778 return (map == heap->function_context_map() ||
779 map == heap->catch_context_map() ||
780 map == heap->with_context_map() ||
781 map == heap->native_context_map() ||
782 map == heap->block_context_map() ||
783 map == heap->module_context_map() ||
784 map == heap->script_context_map());
788 bool Object::IsNativeContext() const {
789 return Object::IsHeapObject() &&
790 HeapObject::cast(this)->map() ==
791 HeapObject::cast(this)->GetHeap()->native_context_map();
795 bool Object::IsScriptContextTable() const {
796 if (!Object::IsHeapObject()) return false;
797 Map* map = HeapObject::cast(this)->map();
798 Heap* heap = map->GetHeap();
799 return map == heap->script_context_table_map();
803 bool Object::IsScopeInfo() const {
804 return Object::IsHeapObject() &&
805 HeapObject::cast(this)->map() ==
806 HeapObject::cast(this)->GetHeap()->scope_info_map();
810 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
813 template <> inline bool Is<JSFunction>(Object* obj) {
814 return obj->IsJSFunction();
818 TYPE_CHECKER(Code, CODE_TYPE)
819 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
820 TYPE_CHECKER(Cell, CELL_TYPE)
821 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
822 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
823 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
824 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
825 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
826 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
827 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
828 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
831 bool Object::IsStringWrapper() const {
832 return IsJSValue() && JSValue::cast(this)->value()->IsString();
836 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
839 bool Object::IsBoolean() const {
840 return IsOddball() &&
841 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
845 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
846 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
847 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
848 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
851 bool Object::IsJSArrayBufferView() const {
852 return IsJSDataView() || IsJSTypedArray();
856 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
859 template <> inline bool Is<JSArray>(Object* obj) {
860 return obj->IsJSArray();
864 bool Object::IsHashTable() const {
865 return Object::IsHeapObject() &&
866 HeapObject::cast(this)->map() ==
867 HeapObject::cast(this)->GetHeap()->hash_table_map();
871 bool Object::IsWeakHashTable() const {
872 return IsHashTable();
876 bool Object::IsDictionary() const {
877 return IsHashTable() &&
878 this != HeapObject::cast(this)->GetHeap()->string_table();
882 bool Object::IsNameDictionary() const {
883 return IsDictionary();
887 bool Object::IsSeededNumberDictionary() const {
888 return IsDictionary();
892 bool Object::IsUnseededNumberDictionary() const {
893 return IsDictionary();
897 bool Object::IsStringTable() const {
898 return IsHashTable();
902 bool Object::IsJSFunctionResultCache() const {
903 if (!IsFixedArray()) return false;
904 const FixedArray* self = FixedArray::cast(this);
905 int length = self->length();
906 if (length < JSFunctionResultCache::kEntriesIndex) return false;
907 if ((length - JSFunctionResultCache::kEntriesIndex)
908 % JSFunctionResultCache::kEntrySize != 0) {
912 if (FLAG_verify_heap) {
913 // TODO(svenpanne) We use const_cast here and below to break our dependency
914 // cycle between the predicates and the verifiers. This can be removed when
915 // the verifiers are const-correct, too.
916 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
917 JSFunctionResultCacheVerify();
924 bool Object::IsNormalizedMapCache() const {
925 return NormalizedMapCache::IsNormalizedMapCache(this);
929 int NormalizedMapCache::GetIndex(Handle<Map> map) {
930 return map->Hash() % NormalizedMapCache::kEntries;
934 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
935 if (!obj->IsFixedArray()) return false;
936 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
940 if (FLAG_verify_heap) {
941 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
942 NormalizedMapCacheVerify();
949 bool Object::IsCompilationCacheTable() const {
950 return IsHashTable();
954 bool Object::IsCodeCacheHashTable() const {
955 return IsHashTable();
959 bool Object::IsPolymorphicCodeCacheHashTable() const {
960 return IsHashTable();
964 bool Object::IsMapCache() const {
965 return IsHashTable();
969 bool Object::IsObjectHashTable() const {
970 return IsHashTable();
974 bool Object::IsOrderedHashTable() const {
975 return IsHeapObject() &&
976 HeapObject::cast(this)->map() ==
977 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
981 bool Object::IsOrderedHashSet() const {
982 return IsOrderedHashTable();
986 bool Object::IsOrderedHashMap() const {
987 return IsOrderedHashTable();
991 bool Object::IsPrimitive() const {
992 return IsOddball() || IsNumber() || IsString();
996 bool Object::IsJSGlobalProxy() const {
997 bool result = IsHeapObject() &&
998 (HeapObject::cast(this)->map()->instance_type() ==
999 JS_GLOBAL_PROXY_TYPE);
1001 HeapObject::cast(this)->map()->is_access_check_needed());
1006 bool Object::IsGlobalObject() const {
1007 if (!IsHeapObject()) return false;
1009 InstanceType type = HeapObject::cast(this)->map()->instance_type();
1010 return type == JS_GLOBAL_OBJECT_TYPE ||
1011 type == JS_BUILTINS_OBJECT_TYPE;
1015 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
1016 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1019 bool Object::IsUndetectableObject() const {
1020 return IsHeapObject()
1021 && HeapObject::cast(this)->map()->is_undetectable();
1025 bool Object::IsAccessCheckNeeded() const {
1026 if (!IsHeapObject()) return false;
1027 if (IsJSGlobalProxy()) {
1028 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1029 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1030 return proxy->IsDetachedFrom(global);
1032 return HeapObject::cast(this)->map()->is_access_check_needed();
1036 bool Object::IsStruct() const {
1037 if (!IsHeapObject()) return false;
1038 switch (HeapObject::cast(this)->map()->instance_type()) {
1039 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1040 STRUCT_LIST(MAKE_STRUCT_CASE)
1041 #undef MAKE_STRUCT_CASE
1042 default: return false;
1047 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1048 bool Object::Is##Name() const { \
1049 return Object::IsHeapObject() \
1050 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1052 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1053 #undef MAKE_STRUCT_PREDICATE
1056 bool Object::IsUndefined() const {
1057 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1061 bool Object::IsNull() const {
1062 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1066 bool Object::IsTheHole() const {
1067 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1071 bool Object::IsException() const {
1072 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1076 bool Object::IsUninitialized() const {
1077 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1081 bool Object::IsTrue() const {
1082 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1086 bool Object::IsFalse() const {
1087 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1091 bool Object::IsArgumentsMarker() const {
1092 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1096 double Object::Number() {
1099 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1100 : reinterpret_cast<HeapNumber*>(this)->value();
1104 bool Object::IsNaN() const {
1105 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1109 bool Object::IsMinusZero() const {
1110 return this->IsHeapNumber() &&
1111 i::IsMinusZero(HeapNumber::cast(this)->value());
1115 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1116 if (object->IsSmi()) return Handle<Smi>::cast(object);
1117 if (object->IsHeapNumber()) {
1118 double value = Handle<HeapNumber>::cast(object)->value();
1119 int int_value = FastD2I(value);
1120 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1121 return handle(Smi::FromInt(int_value), isolate);
1124 return Handle<Smi>();
1128 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1129 Handle<Object> object) {
1131 isolate, object, handle(isolate->context()->native_context(), isolate));
1135 bool Object::HasSpecificClassOf(String* name) {
1136 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1140 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1141 Handle<Name> name) {
1142 LookupIterator it(object, name);
1143 return GetProperty(&it);
1147 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1148 Handle<Object> object,
1150 // GetElement can trigger a getter which can cause allocation.
1151 // This was not always the case. This DCHECK is here to catch
1152 // leftover incorrect uses.
1153 DCHECK(AllowHeapAllocation::IsAllowed());
1154 return Object::GetElementWithReceiver(isolate, object, object, index);
1158 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1159 Isolate* isolate, Handle<Object> receiver) {
1160 PrototypeIterator iter(isolate, receiver);
1161 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1162 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1163 return PrototypeIterator::GetCurrent(iter);
1167 return PrototypeIterator::GetCurrent(iter);
1171 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1172 Handle<Name> name) {
1174 Isolate* isolate = name->GetIsolate();
1175 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1176 return GetProperty(object, name);
1180 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1181 Handle<Object> object,
1183 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1184 DCHECK(!str.is_null());
1186 uint32_t index; // Assert that the name is not an array index.
1187 DCHECK(!str->AsArrayIndex(&index));
1189 return GetProperty(object, str);
1193 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1194 Handle<Object> receiver,
1196 return GetPropertyWithHandler(
1197 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1201 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1202 Handle<JSReceiver> receiver,
1204 Handle<Object> value,
1205 LanguageMode language_mode) {
1206 Isolate* isolate = proxy->GetIsolate();
1207 Handle<String> name = isolate->factory()->Uint32ToString(index);
1208 return SetPropertyWithHandler(proxy, receiver, name, value, language_mode);
1212 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1214 Isolate* isolate = proxy->GetIsolate();
1215 Handle<String> name = isolate->factory()->Uint32ToString(index);
1216 return HasPropertyWithHandler(proxy, name);
1220 #define FIELD_ADDR(p, offset) \
1221 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1223 #define FIELD_ADDR_CONST(p, offset) \
1224 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1226 #define READ_FIELD(p, offset) \
1227 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1229 #define ACQUIRE_READ_FIELD(p, offset) \
1230 reinterpret_cast<Object*>(base::Acquire_Load( \
1231 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1233 #define NOBARRIER_READ_FIELD(p, offset) \
1234 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1235 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1237 #define WRITE_FIELD(p, offset, value) \
1238 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1240 #define RELEASE_WRITE_FIELD(p, offset, value) \
1241 base::Release_Store( \
1242 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1243 reinterpret_cast<base::AtomicWord>(value));
1245 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1246 base::NoBarrier_Store( \
1247 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1248 reinterpret_cast<base::AtomicWord>(value));
1250 #define WRITE_BARRIER(heap, object, offset, value) \
1251 heap->incremental_marking()->RecordWrite( \
1252 object, HeapObject::RawField(object, offset), value); \
1253 if (heap->InNewSpace(value)) { \
1254 heap->RecordWrite(object->address(), offset); \
1257 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1258 if (mode == UPDATE_WRITE_BARRIER) { \
1259 heap->incremental_marking()->RecordWrite( \
1260 object, HeapObject::RawField(object, offset), value); \
1261 if (heap->InNewSpace(value)) { \
1262 heap->RecordWrite(object->address(), offset); \
1266 #ifndef V8_TARGET_ARCH_MIPS
1267 #define READ_DOUBLE_FIELD(p, offset) \
1268 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1269 #else // V8_TARGET_ARCH_MIPS
1270 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1271 // non-64-bit aligned HeapNumber::value.
1272 static inline double read_double_field(const void* p, int offset) {
1277 c.u[0] = (*reinterpret_cast<const uint32_t*>(
1278 FIELD_ADDR_CONST(p, offset)));
1279 c.u[1] = (*reinterpret_cast<const uint32_t*>(
1280 FIELD_ADDR_CONST(p, offset + 4)));
1283 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1284 #endif // V8_TARGET_ARCH_MIPS
1286 #ifndef V8_TARGET_ARCH_MIPS
1287 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1288 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1289 #else // V8_TARGET_ARCH_MIPS
1290 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1291 // non-64-bit aligned HeapNumber::value.
1292 static inline void write_double_field(void* p, int offset,
1299 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1300 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1302 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1303 write_double_field(p, offset, value)
1304 #endif // V8_TARGET_ARCH_MIPS
1307 #define READ_INT_FIELD(p, offset) \
1308 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1310 #define WRITE_INT_FIELD(p, offset, value) \
1311 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1313 #define READ_INTPTR_FIELD(p, offset) \
1314 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1316 #define WRITE_INTPTR_FIELD(p, offset, value) \
1317 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1319 #define READ_UINT32_FIELD(p, offset) \
1320 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1322 #define WRITE_UINT32_FIELD(p, offset, value) \
1323 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1325 #define READ_INT32_FIELD(p, offset) \
1326 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1328 #define WRITE_INT32_FIELD(p, offset, value) \
1329 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1331 #define READ_UINT64_FIELD(p, offset) \
1332 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1334 #define WRITE_UINT64_FIELD(p, offset, value) \
1335 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1337 #define READ_INT64_FIELD(p, offset) \
1338 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1340 #define WRITE_INT64_FIELD(p, offset, value) \
1341 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1343 #define READ_SHORT_FIELD(p, offset) \
1344 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1346 #define WRITE_SHORT_FIELD(p, offset, value) \
1347 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1349 #define READ_BYTE_FIELD(p, offset) \
1350 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1352 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1353 static_cast<byte>(base::NoBarrier_Load( \
1354 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1356 #define WRITE_BYTE_FIELD(p, offset, value) \
1357 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1359 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1360 base::NoBarrier_Store( \
1361 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1362 static_cast<base::Atomic8>(value));
1364 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1365 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1369 int Smi::value() const {
1370 return Internals::SmiValue(this);
1374 Smi* Smi::FromInt(int value) {
1375 DCHECK(Smi::IsValid(value));
1376 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1380 Smi* Smi::FromIntptr(intptr_t value) {
1381 DCHECK(Smi::IsValid(value));
1382 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1383 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1387 bool Smi::IsValid(intptr_t value) {
1388 bool result = Internals::IsValidSmi(value);
1389 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1394 MapWord MapWord::FromMap(const Map* map) {
1395 return MapWord(reinterpret_cast<uintptr_t>(map));
1399 Map* MapWord::ToMap() {
1400 return reinterpret_cast<Map*>(value_);
1404 bool MapWord::IsForwardingAddress() {
1405 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1409 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1410 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1411 return MapWord(reinterpret_cast<uintptr_t>(raw));
1415 HeapObject* MapWord::ToForwardingAddress() {
1416 DCHECK(IsForwardingAddress());
1417 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1422 void HeapObject::VerifyObjectField(int offset) {
1423 VerifyPointer(READ_FIELD(this, offset));
1426 void HeapObject::VerifySmiField(int offset) {
1427 CHECK(READ_FIELD(this, offset)->IsSmi());
1432 Heap* HeapObject::GetHeap() const {
1434 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1435 SLOW_DCHECK(heap != NULL);
1440 Isolate* HeapObject::GetIsolate() const {
1441 return GetHeap()->isolate();
1445 Map* HeapObject::map() const {
1447 // Clear mark potentially added by PathTracer.
1448 uintptr_t raw_value =
1449 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1450 return MapWord::FromRawValue(raw_value).ToMap();
1452 return map_word().ToMap();
1457 void HeapObject::set_map(Map* value) {
1458 set_map_word(MapWord::FromMap(value));
1459 if (value != NULL) {
1460 // TODO(1600) We are passing NULL as a slot because maps can never be on
1461 // evacuation candidate.
1462 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1467 Map* HeapObject::synchronized_map() {
1468 return synchronized_map_word().ToMap();
1472 void HeapObject::synchronized_set_map(Map* value) {
1473 synchronized_set_map_word(MapWord::FromMap(value));
1474 if (value != NULL) {
1475 // TODO(1600) We are passing NULL as a slot because maps can never be on
1476 // evacuation candidate.
1477 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1482 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1483 synchronized_set_map_word(MapWord::FromMap(value));
1487 // Unsafe accessor omitting write barrier.
1488 void HeapObject::set_map_no_write_barrier(Map* value) {
1489 set_map_word(MapWord::FromMap(value));
1493 MapWord HeapObject::map_word() const {
1495 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1499 void HeapObject::set_map_word(MapWord map_word) {
1500 NOBARRIER_WRITE_FIELD(
1501 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1505 MapWord HeapObject::synchronized_map_word() const {
1507 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1511 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1512 RELEASE_WRITE_FIELD(
1513 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1517 HeapObject* HeapObject::FromAddress(Address address) {
1518 DCHECK_TAG_ALIGNED(address);
1519 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1523 Address HeapObject::address() {
1524 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1528 int HeapObject::Size() {
1529 return SizeFromMap(map());
1533 bool HeapObject::MayContainRawValues() {
1534 InstanceType type = map()->instance_type();
1535 if (type <= LAST_NAME_TYPE) {
1536 if (type == SYMBOL_TYPE) {
1539 DCHECK(type < FIRST_NONSTRING_TYPE);
1540 // There are four string representations: sequential strings, external
1541 // strings, cons strings, and sliced strings.
1542 // Only the former two contain raw values and no heap pointers (besides the
1544 return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
1546 // The ConstantPoolArray contains heap pointers, but also raw values.
1547 if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
1548 return (type <= LAST_DATA_TYPE);
1552 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1553 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1554 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1558 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1559 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1563 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1564 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1568 double HeapNumber::value() const {
1569 return READ_DOUBLE_FIELD(this, kValueOffset);
1573 void HeapNumber::set_value(double value) {
1574 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1578 int HeapNumber::get_exponent() {
1579 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1580 kExponentShift) - kExponentBias;
1584 int HeapNumber::get_sign() {
1585 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1589 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1592 Object** FixedArray::GetFirstElementAddress() {
1593 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1597 bool FixedArray::ContainsOnlySmisOrHoles() {
1598 Object* the_hole = GetHeap()->the_hole_value();
1599 Object** current = GetFirstElementAddress();
1600 for (int i = 0; i < length(); ++i) {
1601 Object* candidate = *current++;
1602 if (!candidate->IsSmi() && candidate != the_hole) return false;
1608 FixedArrayBase* JSObject::elements() const {
1609 Object* array = READ_FIELD(this, kElementsOffset);
1610 return static_cast<FixedArrayBase*>(array);
1614 void JSObject::ValidateElements(Handle<JSObject> object) {
1615 #ifdef ENABLE_SLOW_DCHECKS
1616 if (FLAG_enable_slow_asserts) {
1617 ElementsAccessor* accessor = object->GetElementsAccessor();
1618 accessor->Validate(object);
1624 void AllocationSite::Initialize() {
1625 set_transition_info(Smi::FromInt(0));
1626 SetElementsKind(GetInitialFastElementsKind());
1627 set_nested_site(Smi::FromInt(0));
1628 set_pretenure_data(Smi::FromInt(0));
1629 set_pretenure_create_count(Smi::FromInt(0));
1630 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1631 SKIP_WRITE_BARRIER);
1635 void AllocationSite::MarkZombie() {
1636 DCHECK(!IsZombie());
1638 set_pretenure_decision(kZombie);
1642 // Heuristic: We only need to create allocation site info if the boilerplate
1643 // elements kind is the initial elements kind.
1644 AllocationSiteMode AllocationSite::GetMode(
1645 ElementsKind boilerplate_elements_kind) {
1646 if (FLAG_pretenuring_call_new ||
1647 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1648 return TRACK_ALLOCATION_SITE;
1651 return DONT_TRACK_ALLOCATION_SITE;
1655 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1657 if (FLAG_pretenuring_call_new ||
1658 (IsFastSmiElementsKind(from) &&
1659 IsMoreGeneralElementsKindTransition(from, to))) {
1660 return TRACK_ALLOCATION_SITE;
1663 return DONT_TRACK_ALLOCATION_SITE;
1667 inline bool AllocationSite::CanTrack(InstanceType type) {
1668 if (FLAG_allocation_site_pretenuring) {
1669 return type == JS_ARRAY_TYPE ||
1670 type == JS_OBJECT_TYPE ||
1671 type < FIRST_NONSTRING_TYPE;
1673 return type == JS_ARRAY_TYPE;
1677 inline void AllocationSite::set_memento_found_count(int count) {
1678 int value = pretenure_data()->value();
1679 // Verify that we can count more mementos than we can possibly find in one
1680 // new space collection.
1681 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1682 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1683 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1684 DCHECK(count < MementoFoundCountBits::kMax);
1686 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1687 SKIP_WRITE_BARRIER);
1690 inline bool AllocationSite::IncrementMementoFoundCount() {
1691 if (IsZombie()) return false;
1693 int value = memento_found_count();
1694 set_memento_found_count(value + 1);
1695 return memento_found_count() == kPretenureMinimumCreated;
1699 inline void AllocationSite::IncrementMementoCreateCount() {
1700 DCHECK(FLAG_allocation_site_pretenuring);
1701 int value = memento_create_count();
1702 set_memento_create_count(value + 1);
1706 inline bool AllocationSite::MakePretenureDecision(
1707 PretenureDecision current_decision,
1709 bool maximum_size_scavenge) {
1710 // Here we just allow state transitions from undecided or maybe tenure
1711 // to don't tenure, maybe tenure, or tenure.
1712 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1713 if (ratio >= kPretenureRatio) {
1714 // We just transition into tenure state when the semi-space was at
1715 // maximum capacity.
1716 if (maximum_size_scavenge) {
1717 set_deopt_dependent_code(true);
1718 set_pretenure_decision(kTenure);
1719 // Currently we just need to deopt when we make a state transition to
1723 set_pretenure_decision(kMaybeTenure);
1725 set_pretenure_decision(kDontTenure);
1732 inline bool AllocationSite::DigestPretenuringFeedback(
1733 bool maximum_size_scavenge) {
1735 int create_count = memento_create_count();
1736 int found_count = memento_found_count();
1737 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1739 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1740 static_cast<double>(found_count) / create_count : 0.0;
1741 PretenureDecision current_decision = pretenure_decision();
1743 if (minimum_mementos_created) {
1744 deopt = MakePretenureDecision(
1745 current_decision, ratio, maximum_size_scavenge);
1748 if (FLAG_trace_pretenuring_statistics) {
1750 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1751 static_cast<void*>(this), create_count, found_count, ratio,
1752 PretenureDecisionName(current_decision),
1753 PretenureDecisionName(pretenure_decision()));
1756 // Clear feedback calculation fields until the next gc.
1757 set_memento_found_count(0);
1758 set_memento_create_count(0);
1763 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1764 JSObject::ValidateElements(object);
1765 ElementsKind elements_kind = object->map()->elements_kind();
1766 if (!IsFastObjectElementsKind(elements_kind)) {
1767 if (IsFastHoleyElementsKind(elements_kind)) {
1768 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1770 TransitionElementsKind(object, FAST_ELEMENTS);
1776 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1779 EnsureElementsMode mode) {
1780 ElementsKind current_kind = object->map()->elements_kind();
1781 ElementsKind target_kind = current_kind;
1783 DisallowHeapAllocation no_allocation;
1784 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1785 bool is_holey = IsFastHoleyElementsKind(current_kind);
1786 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1787 Heap* heap = object->GetHeap();
1788 Object* the_hole = heap->the_hole_value();
1789 for (uint32_t i = 0; i < count; ++i) {
1790 Object* current = *objects++;
1791 if (current == the_hole) {
1793 target_kind = GetHoleyElementsKind(target_kind);
1794 } else if (!current->IsSmi()) {
1795 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1796 if (IsFastSmiElementsKind(target_kind)) {
1798 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1800 target_kind = FAST_DOUBLE_ELEMENTS;
1803 } else if (is_holey) {
1804 target_kind = FAST_HOLEY_ELEMENTS;
1807 target_kind = FAST_ELEMENTS;
1812 if (target_kind != current_kind) {
1813 TransitionElementsKind(object, target_kind);
1818 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1819 Handle<FixedArrayBase> elements,
1821 EnsureElementsMode mode) {
1822 Heap* heap = object->GetHeap();
1823 if (elements->map() != heap->fixed_double_array_map()) {
1824 DCHECK(elements->map() == heap->fixed_array_map() ||
1825 elements->map() == heap->fixed_cow_array_map());
1826 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1827 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1830 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1831 EnsureCanContainElements(object, objects, length, mode);
1835 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1836 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1837 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1838 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1839 Handle<FixedDoubleArray> double_array =
1840 Handle<FixedDoubleArray>::cast(elements);
1841 for (uint32_t i = 0; i < length; ++i) {
1842 if (double_array->is_the_hole(i)) {
1843 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1847 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1852 void JSObject::SetMapAndElements(Handle<JSObject> object,
1853 Handle<Map> new_map,
1854 Handle<FixedArrayBase> value) {
1855 JSObject::MigrateToMap(object, new_map);
1856 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1857 (*value == object->GetHeap()->empty_fixed_array())) ==
1858 (value->map() == object->GetHeap()->fixed_array_map() ||
1859 value->map() == object->GetHeap()->fixed_cow_array_map()));
1860 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1861 (object->map()->has_fast_double_elements() ==
1862 value->IsFixedDoubleArray()));
1863 object->set_elements(*value);
1867 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1868 WRITE_FIELD(this, kElementsOffset, value);
1869 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1873 void JSObject::initialize_properties() {
1874 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1875 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1879 void JSObject::initialize_elements() {
1880 FixedArrayBase* elements = map()->GetInitialElements();
1881 WRITE_FIELD(this, kElementsOffset, elements);
1885 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1886 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1889 byte Oddball::kind() const {
1890 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1894 void Oddball::set_kind(byte value) {
1895 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1899 Object* Cell::value() const {
1900 return READ_FIELD(this, kValueOffset);
1904 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1905 // The write barrier is not used for global property cells.
1906 DCHECK(!val->IsPropertyCell() && !val->IsCell());
1907 WRITE_FIELD(this, kValueOffset, val);
1910 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1911 ACCESSORS(PropertyCell, value, Object, kValueOffset)
1913 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
1916 void WeakCell::clear() {
1917 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
1918 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
1922 void WeakCell::initialize(HeapObject* val) {
1923 WRITE_FIELD(this, kValueOffset, val);
1924 WRITE_BARRIER(GetHeap(), this, kValueOffset, val);
1928 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
1931 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
1934 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
1935 WRITE_FIELD(this, kNextOffset, val);
1936 if (mode == UPDATE_WRITE_BARRIER) {
1937 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
1942 int JSObject::GetHeaderSize() {
1943 InstanceType type = map()->instance_type();
1944 // Check for the most common kind of JavaScript object before
1945 // falling into the generic switch. This speeds up the internal
1946 // field operations considerably on average.
1947 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1949 case JS_GENERATOR_OBJECT_TYPE:
1950 return JSGeneratorObject::kSize;
1951 case JS_MODULE_TYPE:
1952 return JSModule::kSize;
1953 case JS_GLOBAL_PROXY_TYPE:
1954 return JSGlobalProxy::kSize;
1955 case JS_GLOBAL_OBJECT_TYPE:
1956 return JSGlobalObject::kSize;
1957 case JS_BUILTINS_OBJECT_TYPE:
1958 return JSBuiltinsObject::kSize;
1959 case JS_FUNCTION_TYPE:
1960 return JSFunction::kSize;
1962 return JSValue::kSize;
1964 return JSDate::kSize;
1966 return JSArray::kSize;
1967 case JS_ARRAY_BUFFER_TYPE:
1968 return JSArrayBuffer::kSize;
1969 case JS_TYPED_ARRAY_TYPE:
1970 return JSTypedArray::kSize;
1971 case JS_DATA_VIEW_TYPE:
1972 return JSDataView::kSize;
1974 return JSSet::kSize;
1976 return JSMap::kSize;
1977 case JS_SET_ITERATOR_TYPE:
1978 return JSSetIterator::kSize;
1979 case JS_MAP_ITERATOR_TYPE:
1980 return JSMapIterator::kSize;
1981 case JS_WEAK_MAP_TYPE:
1982 return JSWeakMap::kSize;
1983 case JS_WEAK_SET_TYPE:
1984 return JSWeakSet::kSize;
1985 case JS_REGEXP_TYPE:
1986 return JSRegExp::kSize;
1987 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1988 return JSObject::kHeaderSize;
1989 case JS_MESSAGE_OBJECT_TYPE:
1990 return JSMessageObject::kSize;
1992 // TODO(jkummerow): Re-enable this. Blink currently hits this
1993 // from its CustomElementConstructorBuilder.
2000 int JSObject::GetInternalFieldCount() {
2001 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2002 // Make sure to adjust for the number of in-object properties. These
2003 // properties do contribute to the size, but are not internal fields.
2004 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2005 map()->inobject_properties();
2009 int JSObject::GetInternalFieldOffset(int index) {
2010 DCHECK(index < GetInternalFieldCount() && index >= 0);
2011 return GetHeaderSize() + (kPointerSize * index);
2015 Object* JSObject::GetInternalField(int index) {
2016 DCHECK(index < GetInternalFieldCount() && index >= 0);
2017 // Internal objects do follow immediately after the header, whereas in-object
2018 // properties are at the end of the object. Therefore there is no need
2019 // to adjust the index here.
2020 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2024 void JSObject::SetInternalField(int index, Object* value) {
2025 DCHECK(index < GetInternalFieldCount() && index >= 0);
2026 // Internal objects do follow immediately after the header, whereas in-object
2027 // properties are at the end of the object. Therefore there is no need
2028 // to adjust the index here.
2029 int offset = GetHeaderSize() + (kPointerSize * index);
2030 WRITE_FIELD(this, offset, value);
2031 WRITE_BARRIER(GetHeap(), this, offset, value);
2035 void JSObject::SetInternalField(int index, Smi* value) {
2036 DCHECK(index < GetInternalFieldCount() && index >= 0);
2037 // Internal objects do follow immediately after the header, whereas in-object
2038 // properties are at the end of the object. Therefore there is no need
2039 // to adjust the index here.
2040 int offset = GetHeaderSize() + (kPointerSize * index);
2041 WRITE_FIELD(this, offset, value);
2045 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2046 if (!FLAG_unbox_double_fields) return false;
2047 return map()->IsUnboxedDoubleField(index);
2051 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2052 if (!FLAG_unbox_double_fields) return false;
2053 if (index.is_hidden_field() || !index.is_inobject()) return false;
2054 return !layout_descriptor()->IsTagged(index.property_index());
2058 // Access fast-case object properties at index. The use of these routines
2059 // is needed to correctly distinguish between properties stored in-object and
2060 // properties stored in the properties array.
2061 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2062 DCHECK(!IsUnboxedDoubleField(index));
2063 if (index.is_inobject()) {
2064 return READ_FIELD(this, index.offset());
2066 return properties()->get(index.outobject_array_index());
2071 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2072 DCHECK(IsUnboxedDoubleField(index));
2073 return READ_DOUBLE_FIELD(this, index.offset());
2077 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2078 if (index.is_inobject()) {
2079 int offset = index.offset();
2080 WRITE_FIELD(this, offset, value);
2081 WRITE_BARRIER(GetHeap(), this, offset, value);
2083 properties()->set(index.outobject_array_index(), value);
2088 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2089 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2093 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2094 if (IsUnboxedDoubleField(index)) {
2095 DCHECK(value->IsMutableHeapNumber());
2096 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2098 RawFastPropertyAtPut(index, value);
2103 void JSObject::WriteToField(int descriptor, Object* value) {
2104 DisallowHeapAllocation no_gc;
2106 DescriptorArray* desc = map()->instance_descriptors();
2107 PropertyDetails details = desc->GetDetails(descriptor);
2109 DCHECK(details.type() == DATA);
2111 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2112 if (details.representation().IsDouble()) {
2113 // Nothing more to be done.
2114 if (value->IsUninitialized()) return;
2115 if (IsUnboxedDoubleField(index)) {
2116 RawFastDoublePropertyAtPut(index, value->Number());
2118 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2119 DCHECK(box->IsMutableHeapNumber());
2120 box->set_value(value->Number());
2123 RawFastPropertyAtPut(index, value);
2128 int JSObject::GetInObjectPropertyOffset(int index) {
2129 return map()->GetInObjectPropertyOffset(index);
2133 Object* JSObject::InObjectPropertyAt(int index) {
2134 int offset = GetInObjectPropertyOffset(index);
2135 return READ_FIELD(this, offset);
2139 Object* JSObject::InObjectPropertyAtPut(int index,
2141 WriteBarrierMode mode) {
2142 // Adjust for the number of properties stored in the object.
2143 int offset = GetInObjectPropertyOffset(index);
2144 WRITE_FIELD(this, offset, value);
2145 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2151 void JSObject::InitializeBody(Map* map,
2152 Object* pre_allocated_value,
2153 Object* filler_value) {
2154 DCHECK(!filler_value->IsHeapObject() ||
2155 !GetHeap()->InNewSpace(filler_value));
2156 DCHECK(!pre_allocated_value->IsHeapObject() ||
2157 !GetHeap()->InNewSpace(pre_allocated_value));
2158 int size = map->instance_size();
2159 int offset = kHeaderSize;
2160 if (filler_value != pre_allocated_value) {
2161 int pre_allocated = map->pre_allocated_property_fields();
2162 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2163 for (int i = 0; i < pre_allocated; i++) {
2164 WRITE_FIELD(this, offset, pre_allocated_value);
2165 offset += kPointerSize;
2168 while (offset < size) {
2169 WRITE_FIELD(this, offset, filler_value);
2170 offset += kPointerSize;
2175 bool JSObject::HasFastProperties() {
2176 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2177 return !properties()->IsDictionary();
2181 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
2183 Handle<Object> value,
2184 LanguageMode language_mode) {
2185 return JSObject::SetOwnElement(object, index, value, NONE, language_mode);
2189 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2190 if (unused_property_fields() != 0) return false;
2191 if (is_prototype_map()) return false;
2192 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2193 int limit = Max(minimum, inobject_properties());
2194 int external = NumberOfFields() - inobject_properties();
2195 return external > limit;
2199 void Struct::InitializeBody(int object_size) {
2200 Object* value = GetHeap()->undefined_value();
2201 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2202 WRITE_FIELD(this, offset, value);
2207 bool Object::ToArrayIndex(uint32_t* index) {
2209 int value = Smi::cast(this)->value();
2210 if (value < 0) return false;
2214 if (IsHeapNumber()) {
2215 double value = HeapNumber::cast(this)->value();
2216 uint32_t uint_value = static_cast<uint32_t>(value);
2217 if (value == static_cast<double>(uint_value)) {
2218 *index = uint_value;
2226 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2227 if (!this->IsJSValue()) return false;
2229 JSValue* js_value = JSValue::cast(this);
2230 if (!js_value->value()->IsString()) return false;
2232 String* str = String::cast(js_value->value());
2233 if (index >= static_cast<uint32_t>(str->length())) return false;
2239 void Object::VerifyApiCallResultType() {
2250 FATAL("API call returned invalid object");
2256 Object* FixedArray::get(int index) const {
2257 SLOW_DCHECK(index >= 0 && index < this->length());
2258 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2262 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2263 return handle(array->get(index), array->GetIsolate());
2267 bool FixedArray::is_the_hole(int index) {
2268 return get(index) == GetHeap()->the_hole_value();
2272 void FixedArray::set(int index, Smi* value) {
2273 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2274 DCHECK(index >= 0 && index < this->length());
2275 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2276 int offset = kHeaderSize + index * kPointerSize;
2277 WRITE_FIELD(this, offset, value);
2281 void FixedArray::set(int index, Object* value) {
2282 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2283 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2284 DCHECK(index >= 0 && index < this->length());
2285 int offset = kHeaderSize + index * kPointerSize;
2286 WRITE_FIELD(this, offset, value);
2287 WRITE_BARRIER(GetHeap(), this, offset, value);
2291 double FixedDoubleArray::get_scalar(int index) {
2292 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2293 map() != GetHeap()->fixed_array_map());
2294 DCHECK(index >= 0 && index < this->length());
2295 DCHECK(!is_the_hole(index));
2296 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2300 uint64_t FixedDoubleArray::get_representation(int index) {
2301 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2302 map() != GetHeap()->fixed_array_map());
2303 DCHECK(index >= 0 && index < this->length());
2304 int offset = kHeaderSize + index * kDoubleSize;
2305 return READ_UINT64_FIELD(this, offset);
2309 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2311 if (array->is_the_hole(index)) {
2312 return array->GetIsolate()->factory()->the_hole_value();
2314 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2319 void FixedDoubleArray::set(int index, double value) {
2320 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2321 map() != GetHeap()->fixed_array_map());
2322 int offset = kHeaderSize + index * kDoubleSize;
2323 if (std::isnan(value)) {
2324 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2326 WRITE_DOUBLE_FIELD(this, offset, value);
2328 DCHECK(!is_the_hole(index));
2332 void FixedDoubleArray::set_the_hole(int index) {
2333 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2334 map() != GetHeap()->fixed_array_map());
2335 int offset = kHeaderSize + index * kDoubleSize;
2336 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2340 bool FixedDoubleArray::is_the_hole(int index) {
2341 return get_representation(index) == kHoleNanInt64;
2345 double* FixedDoubleArray::data_start() {
2346 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2350 void FixedDoubleArray::FillWithHoles(int from, int to) {
2351 for (int i = from; i < to; i++) {
2357 Object* WeakFixedArray::Get(int index) const {
2358 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2359 if (raw->IsSmi()) return raw;
2360 DCHECK(raw->IsWeakCell());
2361 return WeakCell::cast(raw)->value();
2365 bool WeakFixedArray::IsEmptySlot(int index) const {
2366 DCHECK(index < Length());
2367 return Get(index)->IsSmi();
2371 void WeakFixedArray::clear(int index) {
2372 FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2376 int WeakFixedArray::Length() const {
2377 return FixedArray::cast(this)->length() - kFirstIndex;
2381 int WeakFixedArray::last_used_index() const {
2382 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2386 void WeakFixedArray::set_last_used_index(int index) {
2387 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2391 int ArrayList::Length() {
2392 if (FixedArray::cast(this)->length() == 0) return 0;
2393 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2397 void ArrayList::SetLength(int length) {
2398 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2402 Object* ArrayList::Get(int index) {
2403 return FixedArray::cast(this)->get(kFirstIndex + index);
2407 Object** ArrayList::Slot(int index) {
2408 return data_start() + kFirstIndex + index;
2412 void ArrayList::Set(int index, Object* obj) {
2413 FixedArray::cast(this)->set(kFirstIndex + index, obj);
2417 void ArrayList::Clear(int index, Object* undefined) {
2418 DCHECK(undefined->IsUndefined());
2419 FixedArray::cast(this)
2420 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2424 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2425 DCHECK(type < NUMBER_OF_TYPES);
2426 element_counts_[type]++;
2430 int ConstantPoolArray::NumberOfEntries::equals(
2431 const ConstantPoolArray::NumberOfEntries& other) const {
2432 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2433 if (element_counts_[i] != other.element_counts_[i]) return false;
2439 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2440 return total_count() == 0;
2444 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2445 DCHECK(type < NUMBER_OF_TYPES);
2446 return element_counts_[type];
2450 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2452 DCHECK(type < NUMBER_OF_TYPES);
2453 for (int i = 0; i < type; i++) {
2454 base += element_counts_[i];
2460 int ConstantPoolArray::NumberOfEntries::total_count() const {
2462 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2463 count += element_counts_[i];
2469 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2470 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2471 if (element_counts_[i] < min || element_counts_[i] > max) {
2479 int ConstantPoolArray::Iterator::next_index() {
2480 DCHECK(!is_finished());
2481 int ret = next_index_++;
2487 bool ConstantPoolArray::Iterator::is_finished() {
2488 return next_index_ > array_->last_index(type_, final_section_);
2492 void ConstantPoolArray::Iterator::update_section() {
2493 if (next_index_ > array_->last_index(type_, current_section_) &&
2494 current_section_ != final_section_) {
2495 DCHECK(final_section_ == EXTENDED_SECTION);
2496 current_section_ = EXTENDED_SECTION;
2497 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2502 bool ConstantPoolArray::is_extended_layout() {
2503 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2504 return IsExtendedField::decode(small_layout_1);
2508 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2509 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2513 int ConstantPoolArray::first_extended_section_index() {
2514 DCHECK(is_extended_layout());
2515 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2516 return TotalCountField::decode(small_layout_2);
2520 int ConstantPoolArray::get_extended_section_header_offset() {
2521 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2525 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2526 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2527 return WeakObjectStateField::decode(small_layout_2);
2531 void ConstantPoolArray::set_weak_object_state(
2532 ConstantPoolArray::WeakObjectState state) {
2533 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2534 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2535 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2539 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2541 if (section == EXTENDED_SECTION) {
2542 DCHECK(is_extended_layout());
2543 index += first_extended_section_index();
2546 for (Type type_iter = FIRST_TYPE; type_iter < type;
2547 type_iter = next_type(type_iter)) {
2548 index += number_of_entries(type_iter, section);
2555 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2556 return first_index(type, section) + number_of_entries(type, section) - 1;
2560 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2561 if (section == SMALL_SECTION) {
2562 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2563 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2566 return Int64CountField::decode(small_layout_1);
2568 return CodePtrCountField::decode(small_layout_1);
2570 return HeapPtrCountField::decode(small_layout_1);
2572 return Int32CountField::decode(small_layout_2);
2578 DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2579 int offset = get_extended_section_header_offset();
2582 offset += kExtendedInt64CountOffset;
2585 offset += kExtendedCodePtrCountOffset;
2588 offset += kExtendedHeapPtrCountOffset;
2591 offset += kExtendedInt32CountOffset;
2596 return READ_INT_FIELD(this, offset);
2601 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2602 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2603 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2604 (is_extended_layout() &&
2605 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2606 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2610 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2611 LayoutSection section;
2612 if (is_extended_layout() && index >= first_extended_section_index()) {
2613 section = EXTENDED_SECTION;
2615 section = SMALL_SECTION;
2618 Type type = FIRST_TYPE;
2619 while (index > last_index(type, section)) {
2620 type = next_type(type);
2622 DCHECK(type <= LAST_TYPE);
2627 int64_t ConstantPoolArray::get_int64_entry(int index) {
2628 DCHECK(map() == GetHeap()->constant_pool_array_map());
2629 DCHECK(get_type(index) == INT64);
2630 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2634 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2635 STATIC_ASSERT(kDoubleSize == kInt64Size);
2636 DCHECK(map() == GetHeap()->constant_pool_array_map());
2637 DCHECK(get_type(index) == INT64);
2638 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2642 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2643 DCHECK(map() == GetHeap()->constant_pool_array_map());
2644 DCHECK(get_type(index) == CODE_PTR);
2645 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2649 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2650 DCHECK(map() == GetHeap()->constant_pool_array_map());
2651 DCHECK(get_type(index) == HEAP_PTR);
2652 return READ_FIELD(this, OffsetOfElementAt(index));
2656 int32_t ConstantPoolArray::get_int32_entry(int index) {
2657 DCHECK(map() == GetHeap()->constant_pool_array_map());
2658 DCHECK(get_type(index) == INT32);
2659 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2663 void ConstantPoolArray::set(int index, int64_t value) {
2664 DCHECK(map() == GetHeap()->constant_pool_array_map());
2665 DCHECK(get_type(index) == INT64);
2666 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2670 void ConstantPoolArray::set(int index, double value) {
2671 STATIC_ASSERT(kDoubleSize == kInt64Size);
2672 DCHECK(map() == GetHeap()->constant_pool_array_map());
2673 DCHECK(get_type(index) == INT64);
2674 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2678 void ConstantPoolArray::set(int index, Address value) {
2679 DCHECK(map() == GetHeap()->constant_pool_array_map());
2680 DCHECK(get_type(index) == CODE_PTR);
2681 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2685 void ConstantPoolArray::set(int index, Object* value) {
2686 DCHECK(map() == GetHeap()->constant_pool_array_map());
2687 DCHECK(!GetHeap()->InNewSpace(value));
2688 DCHECK(get_type(index) == HEAP_PTR);
2689 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2690 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2694 void ConstantPoolArray::set(int index, int32_t value) {
2695 DCHECK(map() == GetHeap()->constant_pool_array_map());
2696 DCHECK(get_type(index) == INT32);
2697 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2701 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2702 DCHECK(map() == GetHeap()->constant_pool_array_map());
2703 DCHECK(offset_is_type(offset, INT32));
2704 WRITE_INT32_FIELD(this, offset, value);
2708 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2709 DCHECK(map() == GetHeap()->constant_pool_array_map());
2710 DCHECK(offset_is_type(offset, INT64));
2711 WRITE_INT64_FIELD(this, offset, value);
2715 void ConstantPoolArray::set_at_offset(int offset, double value) {
2716 DCHECK(map() == GetHeap()->constant_pool_array_map());
2717 DCHECK(offset_is_type(offset, INT64));
2718 WRITE_DOUBLE_FIELD(this, offset, value);
2722 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2723 DCHECK(map() == GetHeap()->constant_pool_array_map());
2724 DCHECK(offset_is_type(offset, CODE_PTR));
2725 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2726 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2730 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2731 DCHECK(map() == GetHeap()->constant_pool_array_map());
2732 DCHECK(!GetHeap()->InNewSpace(value));
2733 DCHECK(offset_is_type(offset, HEAP_PTR));
2734 WRITE_FIELD(this, offset, value);
2735 WRITE_BARRIER(GetHeap(), this, offset, value);
2739 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2740 uint32_t small_layout_1 =
2741 Int64CountField::encode(small.count_of(INT64)) |
2742 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2743 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2744 IsExtendedField::encode(false);
2745 uint32_t small_layout_2 =
2746 Int32CountField::encode(small.count_of(INT32)) |
2747 TotalCountField::encode(small.total_count()) |
2748 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2749 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2750 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2751 if (kHeaderSize != kFirstEntryOffset) {
2752 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2753 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2758 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2759 const NumberOfEntries& extended) {
2760 // Initialize small layout fields first.
2763 // Set is_extended_layout field.
2764 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2765 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2766 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2768 // Initialize the extended layout fields.
2769 int extended_header_offset = get_extended_section_header_offset();
2770 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2771 extended.count_of(INT64));
2772 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2773 extended.count_of(CODE_PTR));
2774 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2775 extended.count_of(HEAP_PTR));
2776 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2777 extended.count_of(INT32));
2781 int ConstantPoolArray::size() {
2782 NumberOfEntries small(this, SMALL_SECTION);
2783 if (!is_extended_layout()) {
2784 return SizeFor(small);
2786 NumberOfEntries extended(this, EXTENDED_SECTION);
2787 return SizeForExtended(small, extended);
2792 int ConstantPoolArray::length() {
2793 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2794 int length = TotalCountField::decode(small_layout_2);
2795 if (is_extended_layout()) {
2796 length += number_of_entries(INT64, EXTENDED_SECTION) +
2797 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2798 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2799 number_of_entries(INT32, EXTENDED_SECTION);
2805 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2806 const DisallowHeapAllocation& promise) {
2807 Heap* heap = GetHeap();
2808 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2809 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2810 return UPDATE_WRITE_BARRIER;
2814 bool HeapObject::NeedsToEnsureDoubleAlignment() {
2815 #ifndef V8_HOST_ARCH_64_BIT
2816 return (IsFixedFloat64Array() || IsFixedDoubleArray() ||
2817 IsConstantPoolArray()) &&
2818 FixedArrayBase::cast(this)->length() != 0;
2821 #endif // V8_HOST_ARCH_64_BIT
2825 void FixedArray::set(int index,
2827 WriteBarrierMode mode) {
2828 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2829 DCHECK(index >= 0 && index < this->length());
2830 int offset = kHeaderSize + index * kPointerSize;
2831 WRITE_FIELD(this, offset, value);
2832 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2836 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2839 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2840 DCHECK(index >= 0 && index < array->length());
2841 int offset = kHeaderSize + index * kPointerSize;
2842 WRITE_FIELD(array, offset, value);
2843 Heap* heap = array->GetHeap();
2844 if (heap->InNewSpace(value)) {
2845 heap->RecordWrite(array->address(), offset);
2850 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2853 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2854 DCHECK(index >= 0 && index < array->length());
2855 DCHECK(!array->GetHeap()->InNewSpace(value));
2856 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2860 void FixedArray::set_undefined(int index) {
2861 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2862 DCHECK(index >= 0 && index < this->length());
2863 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2865 kHeaderSize + index * kPointerSize,
2866 GetHeap()->undefined_value());
2870 void FixedArray::set_null(int index) {
2871 DCHECK(index >= 0 && index < this->length());
2872 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2874 kHeaderSize + index * kPointerSize,
2875 GetHeap()->null_value());
2879 void FixedArray::set_the_hole(int index) {
2880 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2881 DCHECK(index >= 0 && index < this->length());
2882 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2884 kHeaderSize + index * kPointerSize,
2885 GetHeap()->the_hole_value());
2889 void FixedArray::FillWithHoles(int from, int to) {
2890 for (int i = from; i < to; i++) {
2896 Object** FixedArray::data_start() {
2897 return HeapObject::RawField(this, kHeaderSize);
2901 bool DescriptorArray::IsEmpty() {
2902 DCHECK(length() >= kFirstIndex ||
2903 this == GetHeap()->empty_descriptor_array());
2904 return length() < kFirstIndex;
2908 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2910 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2914 // Perform a binary search in a fixed array. Low and high are entry indices. If
2915 // there are three entries in this array it should be called with low=0 and
2917 template <SearchMode search_mode, typename T>
2918 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2919 int* out_insertion_index) {
2920 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2921 uint32_t hash = name->Hash();
2924 DCHECK(low <= high);
2926 while (low != high) {
2927 int mid = (low + high) / 2;
2928 Name* mid_name = array->GetSortedKey(mid);
2929 uint32_t mid_hash = mid_name->Hash();
2931 if (mid_hash >= hash) {
2938 for (; low <= limit; ++low) {
2939 int sort_index = array->GetSortedKeyIndex(low);
2940 Name* entry = array->GetKey(sort_index);
2941 uint32_t current_hash = entry->Hash();
2942 if (current_hash != hash) {
2943 if (out_insertion_index != NULL) {
2944 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2946 return T::kNotFound;
2948 if (entry->Equals(name)) {
2949 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2952 return T::kNotFound;
2956 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2957 return T::kNotFound;
2961 // Perform a linear search in this fixed array. len is the number of entry
2962 // indices that are valid.
2963 template <SearchMode search_mode, typename T>
2964 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2965 int* out_insertion_index) {
2966 uint32_t hash = name->Hash();
2967 if (search_mode == ALL_ENTRIES) {
2968 for (int number = 0; number < len; number++) {
2969 int sorted_index = array->GetSortedKeyIndex(number);
2970 Name* entry = array->GetKey(sorted_index);
2971 uint32_t current_hash = entry->Hash();
2972 if (current_hash > hash) {
2973 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2974 return T::kNotFound;
2976 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2978 if (out_insertion_index != NULL) *out_insertion_index = len;
2979 return T::kNotFound;
2981 DCHECK(len >= valid_entries);
2982 DCHECK_NULL(out_insertion_index); // Not supported here.
2983 for (int number = 0; number < valid_entries; number++) {
2984 Name* entry = array->GetKey(number);
2985 uint32_t current_hash = entry->Hash();
2986 if (current_hash == hash && entry->Equals(name)) return number;
2988 return T::kNotFound;
2993 template <SearchMode search_mode, typename T>
2994 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2995 if (search_mode == VALID_ENTRIES) {
2996 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2998 SLOW_DCHECK(array->IsSortedNoDuplicates());
3001 int nof = array->number_of_entries();
3003 if (out_insertion_index != NULL) *out_insertion_index = 0;
3004 return T::kNotFound;
3007 // Fast case: do linear search for small arrays.
3008 const int kMaxElementsForLinearSearch = 8;
3009 if ((search_mode == ALL_ENTRIES &&
3010 nof <= kMaxElementsForLinearSearch) ||
3011 (search_mode == VALID_ENTRIES &&
3012 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
3013 return LinearSearch<search_mode>(array, name, nof, valid_entries,
3014 out_insertion_index);
3017 // Slow case: perform binary search.
3018 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
3019 out_insertion_index);
3023 int DescriptorArray::Search(Name* name, int valid_descriptors) {
3024 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
3028 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
3029 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
3030 if (number_of_own_descriptors == 0) return kNotFound;
3032 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
3033 int number = cache->Lookup(map, name);
3035 if (number == DescriptorLookupCache::kAbsent) {
3036 number = Search(name, number_of_own_descriptors);
3037 cache->Update(map, name, number);
3044 PropertyDetails Map::GetLastDescriptorDetails() {
3045 return instance_descriptors()->GetDetails(LastAdded());
3049 FixedArrayBase* Map::GetInitialElements() {
3050 if (has_fast_smi_or_object_elements() ||
3051 has_fast_double_elements()) {
3052 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
3053 return GetHeap()->empty_fixed_array();
3054 } else if (has_external_array_elements()) {
3055 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
3056 DCHECK(!GetHeap()->InNewSpace(empty_array));
3058 } else if (has_fixed_typed_array_elements()) {
3059 FixedTypedArrayBase* empty_array =
3060 GetHeap()->EmptyFixedTypedArrayForMap(this);
3061 DCHECK(!GetHeap()->InNewSpace(empty_array));
3070 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
3071 DCHECK(descriptor_number < number_of_descriptors());
3072 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
3076 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
3077 return GetKeySlot(descriptor_number);
3081 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
3082 return GetValueSlot(descriptor_number - 1) + 1;
3086 Name* DescriptorArray::GetKey(int descriptor_number) {
3087 DCHECK(descriptor_number < number_of_descriptors());
3088 return Name::cast(get(ToKeyIndex(descriptor_number)));
3092 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
3093 return GetDetails(descriptor_number).pointer();
3097 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
3098 return GetKey(GetSortedKeyIndex(descriptor_number));
3102 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
3103 PropertyDetails details = GetDetails(descriptor_index);
3104 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
3108 void DescriptorArray::SetRepresentation(int descriptor_index,
3109 Representation representation) {
3110 DCHECK(!representation.IsNone());
3111 PropertyDetails details = GetDetails(descriptor_index);
3112 set(ToDetailsIndex(descriptor_index),
3113 details.CopyWithRepresentation(representation).AsSmi());
3117 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3118 DCHECK(descriptor_number < number_of_descriptors());
3119 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3123 int DescriptorArray::GetValueOffset(int descriptor_number) {
3124 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3128 Object* DescriptorArray::GetValue(int descriptor_number) {
3129 DCHECK(descriptor_number < number_of_descriptors());
3130 return get(ToValueIndex(descriptor_number));
3134 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3135 set(ToValueIndex(descriptor_index), value);
3139 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3140 DCHECK(descriptor_number < number_of_descriptors());
3141 Object* details = get(ToDetailsIndex(descriptor_number));
3142 return PropertyDetails(Smi::cast(details));
3146 PropertyType DescriptorArray::GetType(int descriptor_number) {
3147 return GetDetails(descriptor_number).type();
3151 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3152 DCHECK(GetDetails(descriptor_number).location() == kField);
3153 return GetDetails(descriptor_number).field_index();
3157 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3158 DCHECK(GetDetails(descriptor_number).location() == kField);
3159 Object* value = GetValue(descriptor_number);
3160 if (value->IsWeakCell()) {
3161 if (WeakCell::cast(value)->cleared()) return HeapType::None();
3162 value = WeakCell::cast(value)->value();
3164 return HeapType::cast(value);
3168 Object* DescriptorArray::GetConstant(int descriptor_number) {
3169 return GetValue(descriptor_number);
3173 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3174 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3175 return GetValue(descriptor_number);
3179 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3180 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3181 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3182 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3186 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3187 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3188 handle(GetValue(descriptor_number), GetIsolate()),
3189 GetDetails(descriptor_number));
3193 void DescriptorArray::Set(int descriptor_number,
3195 const WhitenessWitness&) {
3197 DCHECK(descriptor_number < number_of_descriptors());
3199 NoIncrementalWriteBarrierSet(this,
3200 ToKeyIndex(descriptor_number),
3202 NoIncrementalWriteBarrierSet(this,
3203 ToValueIndex(descriptor_number),
3205 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3206 desc->GetDetails().AsSmi());
3210 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3212 DCHECK(descriptor_number < number_of_descriptors());
3214 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3215 set(ToValueIndex(descriptor_number), *desc->GetValue());
3216 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3220 void DescriptorArray::Append(Descriptor* desc) {
3221 DisallowHeapAllocation no_gc;
3222 int descriptor_number = number_of_descriptors();
3223 SetNumberOfDescriptors(descriptor_number + 1);
3224 Set(descriptor_number, desc);
3226 uint32_t hash = desc->GetKey()->Hash();
3230 for (insertion = descriptor_number; insertion > 0; --insertion) {
3231 Name* key = GetSortedKey(insertion - 1);
3232 if (key->Hash() <= hash) break;
3233 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3236 SetSortedKey(insertion, descriptor_number);
3240 void DescriptorArray::SwapSortedKeys(int first, int second) {
3241 int first_key = GetSortedKeyIndex(first);
3242 SetSortedKey(first, GetSortedKeyIndex(second));
3243 SetSortedKey(second, first_key);
3247 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3248 : marking_(array->GetHeap()->incremental_marking()) {
3249 marking_->EnterNoMarkingScope();
3250 DCHECK(!marking_->IsMarking() ||
3251 Marking::Color(array) == Marking::WHITE_OBJECT);
3255 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3256 marking_->LeaveNoMarkingScope();
3260 template<typename Derived, typename Shape, typename Key>
3261 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3262 const int kMinCapacity = 32;
3263 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3264 if (capacity < kMinCapacity) {
3265 capacity = kMinCapacity; // Guarantee min capacity.
3271 template<typename Derived, typename Shape, typename Key>
3272 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3273 return FindEntry(GetIsolate(), key);
3277 // Find entry for key otherwise return kNotFound.
3278 template<typename Derived, typename Shape, typename Key>
3279 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3280 uint32_t capacity = Capacity();
3281 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3283 // EnsureCapacity will guarantee the hash table is never full.
3285 Object* element = KeyAt(entry);
3286 // Empty entry. Uses raw unchecked accessors because it is called by the
3287 // string table during bootstrapping.
3288 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3289 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3290 Shape::IsMatch(key, element)) return entry;
3291 entry = NextProbe(entry, count++, capacity);
3297 bool SeededNumberDictionary::requires_slow_elements() {
3298 Object* max_index_object = get(kMaxNumberKeyIndex);
3299 if (!max_index_object->IsSmi()) return false;
3301 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3304 uint32_t SeededNumberDictionary::max_number_key() {
3305 DCHECK(!requires_slow_elements());
3306 Object* max_index_object = get(kMaxNumberKeyIndex);
3307 if (!max_index_object->IsSmi()) return 0;
3308 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3309 return value >> kRequiresSlowElementsTagSize;
3312 void SeededNumberDictionary::set_requires_slow_elements() {
3313 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3317 // ------------------------------------
3321 CAST_ACCESSOR(AccessorInfo)
3322 CAST_ACCESSOR(ArrayList)
3323 CAST_ACCESSOR(ByteArray)
3326 CAST_ACCESSOR(CodeCacheHashTable)
3327 CAST_ACCESSOR(CompilationCacheTable)
3328 CAST_ACCESSOR(ConsString)
3329 CAST_ACCESSOR(ConstantPoolArray)
3330 CAST_ACCESSOR(DeoptimizationInputData)
3331 CAST_ACCESSOR(DeoptimizationOutputData)
3332 CAST_ACCESSOR(DependentCode)
3333 CAST_ACCESSOR(DescriptorArray)
3334 CAST_ACCESSOR(ExternalArray)
3335 CAST_ACCESSOR(ExternalOneByteString)
3336 CAST_ACCESSOR(ExternalFloat32Array)
3337 CAST_ACCESSOR(ExternalFloat64Array)
3338 CAST_ACCESSOR(ExternalInt16Array)
3339 CAST_ACCESSOR(ExternalInt32Array)
3340 CAST_ACCESSOR(ExternalInt8Array)
3341 CAST_ACCESSOR(ExternalString)
3342 CAST_ACCESSOR(ExternalTwoByteString)
3343 CAST_ACCESSOR(ExternalUint16Array)
3344 CAST_ACCESSOR(ExternalUint32Array)
3345 CAST_ACCESSOR(ExternalUint8Array)
3346 CAST_ACCESSOR(ExternalUint8ClampedArray)
3347 CAST_ACCESSOR(FixedArray)
3348 CAST_ACCESSOR(FixedArrayBase)
3349 CAST_ACCESSOR(FixedDoubleArray)
3350 CAST_ACCESSOR(FixedTypedArrayBase)
3351 CAST_ACCESSOR(Foreign)
3352 CAST_ACCESSOR(GlobalObject)
3353 CAST_ACCESSOR(HandlerTable)
3354 CAST_ACCESSOR(HeapObject)
3355 CAST_ACCESSOR(JSArray)
3356 CAST_ACCESSOR(JSArrayBuffer)
3357 CAST_ACCESSOR(JSArrayBufferView)
3358 CAST_ACCESSOR(JSBuiltinsObject)
3359 CAST_ACCESSOR(JSDataView)
3360 CAST_ACCESSOR(JSDate)
3361 CAST_ACCESSOR(JSFunction)
3362 CAST_ACCESSOR(JSFunctionProxy)
3363 CAST_ACCESSOR(JSFunctionResultCache)
3364 CAST_ACCESSOR(JSGeneratorObject)
3365 CAST_ACCESSOR(JSGlobalObject)
3366 CAST_ACCESSOR(JSGlobalProxy)
3367 CAST_ACCESSOR(JSMap)
3368 CAST_ACCESSOR(JSMapIterator)
3369 CAST_ACCESSOR(JSMessageObject)
3370 CAST_ACCESSOR(JSModule)
3371 CAST_ACCESSOR(JSObject)
3372 CAST_ACCESSOR(JSProxy)
3373 CAST_ACCESSOR(JSReceiver)
3374 CAST_ACCESSOR(JSRegExp)
3375 CAST_ACCESSOR(JSSet)
3376 CAST_ACCESSOR(JSSetIterator)
3377 CAST_ACCESSOR(JSTypedArray)
3378 CAST_ACCESSOR(JSValue)
3379 CAST_ACCESSOR(JSWeakMap)
3380 CAST_ACCESSOR(JSWeakSet)
3381 CAST_ACCESSOR(LayoutDescriptor)
3384 CAST_ACCESSOR(NameDictionary)
3385 CAST_ACCESSOR(NormalizedMapCache)
3386 CAST_ACCESSOR(Object)
3387 CAST_ACCESSOR(ObjectHashTable)
3388 CAST_ACCESSOR(Oddball)
3389 CAST_ACCESSOR(OrderedHashMap)
3390 CAST_ACCESSOR(OrderedHashSet)
3391 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3392 CAST_ACCESSOR(PropertyCell)
3393 CAST_ACCESSOR(ScopeInfo)
3394 CAST_ACCESSOR(SeededNumberDictionary)
3395 CAST_ACCESSOR(SeqOneByteString)
3396 CAST_ACCESSOR(SeqString)
3397 CAST_ACCESSOR(SeqTwoByteString)
3398 CAST_ACCESSOR(SharedFunctionInfo)
3399 CAST_ACCESSOR(SlicedString)
3401 CAST_ACCESSOR(String)
3402 CAST_ACCESSOR(StringTable)
3403 CAST_ACCESSOR(Struct)
3404 CAST_ACCESSOR(Symbol)
3405 CAST_ACCESSOR(UnseededNumberDictionary)
3406 CAST_ACCESSOR(WeakCell)
3407 CAST_ACCESSOR(WeakFixedArray)
3408 CAST_ACCESSOR(WeakHashTable)
3412 template <class Traits>
3413 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3414 FixedTypedArray<Traits>::kInstanceType;
3417 template <class Traits>
3418 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3419 SLOW_DCHECK(object->IsHeapObject() &&
3420 HeapObject::cast(object)->map()->instance_type() ==
3421 Traits::kInstanceType);
3422 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3426 template <class Traits>
3427 const FixedTypedArray<Traits>*
3428 FixedTypedArray<Traits>::cast(const Object* object) {
3429 SLOW_DCHECK(object->IsHeapObject() &&
3430 HeapObject::cast(object)->map()->instance_type() ==
3431 Traits::kInstanceType);
3432 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3436 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3437 STRUCT_LIST(MAKE_STRUCT_CAST)
3438 #undef MAKE_STRUCT_CAST
3441 template <typename Derived, typename Shape, typename Key>
3442 HashTable<Derived, Shape, Key>*
3443 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3444 SLOW_DCHECK(obj->IsHashTable());
3445 return reinterpret_cast<HashTable*>(obj);
3449 template <typename Derived, typename Shape, typename Key>
3450 const HashTable<Derived, Shape, Key>*
3451 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3452 SLOW_DCHECK(obj->IsHashTable());
3453 return reinterpret_cast<const HashTable*>(obj);
3457 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3458 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3460 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3461 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3463 SMI_ACCESSORS(String, length, kLengthOffset)
3464 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3467 FreeSpace* FreeSpace::next() {
3468 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3469 (!GetHeap()->deserialization_complete() && map() == NULL));
3470 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3471 return reinterpret_cast<FreeSpace*>(
3472 Memory::Address_at(address() + kNextOffset));
3476 FreeSpace** FreeSpace::next_address() {
3477 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3478 (!GetHeap()->deserialization_complete() && map() == NULL));
3479 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3480 return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
3484 void FreeSpace::set_next(FreeSpace* next) {
3485 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3486 (!GetHeap()->deserialization_complete() && map() == NULL));
3487 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3488 base::NoBarrier_Store(
3489 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3490 reinterpret_cast<base::AtomicWord>(next));
3494 FreeSpace* FreeSpace::cast(HeapObject* o) {
3495 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3496 return reinterpret_cast<FreeSpace*>(o);
3500 uint32_t Name::hash_field() {
3501 return READ_UINT32_FIELD(this, kHashFieldOffset);
3505 void Name::set_hash_field(uint32_t value) {
3506 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3507 #if V8_HOST_ARCH_64_BIT
3508 #if V8_TARGET_LITTLE_ENDIAN
3509 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3511 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3517 bool Name::Equals(Name* other) {
3518 if (other == this) return true;
3519 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3520 this->IsSymbol() || other->IsSymbol()) {
3523 return String::cast(this)->SlowEquals(String::cast(other));
3527 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3528 if (one.is_identical_to(two)) return true;
3529 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3530 one->IsSymbol() || two->IsSymbol()) {
3533 return String::SlowEquals(Handle<String>::cast(one),
3534 Handle<String>::cast(two));
3538 ACCESSORS(Symbol, name, Object, kNameOffset)
3539 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3540 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3541 BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit)
3544 bool String::Equals(String* other) {
3545 if (other == this) return true;
3546 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3549 return SlowEquals(other);
3553 bool String::Equals(Handle<String> one, Handle<String> two) {
3554 if (one.is_identical_to(two)) return true;
3555 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3558 return SlowEquals(one, two);
3562 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3563 if (!string->IsConsString()) return string;
3564 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3565 if (cons->IsFlat()) return handle(cons->first());
3566 return SlowFlatten(cons, pretenure);
3570 uint16_t String::Get(int index) {
3571 DCHECK(index >= 0 && index < length());
3572 switch (StringShape(this).full_representation_tag()) {
3573 case kSeqStringTag | kOneByteStringTag:
3574 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3575 case kSeqStringTag | kTwoByteStringTag:
3576 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3577 case kConsStringTag | kOneByteStringTag:
3578 case kConsStringTag | kTwoByteStringTag:
3579 return ConsString::cast(this)->ConsStringGet(index);
3580 case kExternalStringTag | kOneByteStringTag:
3581 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3582 case kExternalStringTag | kTwoByteStringTag:
3583 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3584 case kSlicedStringTag | kOneByteStringTag:
3585 case kSlicedStringTag | kTwoByteStringTag:
3586 return SlicedString::cast(this)->SlicedStringGet(index);
3596 void String::Set(int index, uint16_t value) {
3597 DCHECK(index >= 0 && index < length());
3598 DCHECK(StringShape(this).IsSequential());
3600 return this->IsOneByteRepresentation()
3601 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3602 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3606 bool String::IsFlat() {
3607 if (!StringShape(this).IsCons()) return true;
3608 return ConsString::cast(this)->second()->length() == 0;
3612 String* String::GetUnderlying() {
3613 // Giving direct access to underlying string only makes sense if the
3614 // wrapping string is already flattened.
3615 DCHECK(this->IsFlat());
3616 DCHECK(StringShape(this).IsIndirect());
3617 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3618 const int kUnderlyingOffset = SlicedString::kParentOffset;
3619 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3623 template<class Visitor>
3624 ConsString* String::VisitFlat(Visitor* visitor,
3627 int slice_offset = offset;
3628 const int length = string->length();
3629 DCHECK(offset <= length);
3631 int32_t type = string->map()->instance_type();
3632 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3633 case kSeqStringTag | kOneByteStringTag:
3634 visitor->VisitOneByteString(
3635 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3639 case kSeqStringTag | kTwoByteStringTag:
3640 visitor->VisitTwoByteString(
3641 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3645 case kExternalStringTag | kOneByteStringTag:
3646 visitor->VisitOneByteString(
3647 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3651 case kExternalStringTag | kTwoByteStringTag:
3652 visitor->VisitTwoByteString(
3653 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3657 case kSlicedStringTag | kOneByteStringTag:
3658 case kSlicedStringTag | kTwoByteStringTag: {
3659 SlicedString* slicedString = SlicedString::cast(string);
3660 slice_offset += slicedString->offset();
3661 string = slicedString->parent();
3665 case kConsStringTag | kOneByteStringTag:
3666 case kConsStringTag | kTwoByteStringTag:
3667 return ConsString::cast(string);
3678 inline Vector<const uint8_t> String::GetCharVector() {
3679 String::FlatContent flat = GetFlatContent();
3680 DCHECK(flat.IsOneByte());
3681 return flat.ToOneByteVector();
3686 inline Vector<const uc16> String::GetCharVector() {
3687 String::FlatContent flat = GetFlatContent();
3688 DCHECK(flat.IsTwoByte());
3689 return flat.ToUC16Vector();
3693 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3694 DCHECK(index >= 0 && index < length());
3695 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3699 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3700 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3701 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3702 static_cast<byte>(value));
3706 Address SeqOneByteString::GetCharsAddress() {
3707 return FIELD_ADDR(this, kHeaderSize);
3711 uint8_t* SeqOneByteString::GetChars() {
3712 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3716 Address SeqTwoByteString::GetCharsAddress() {
3717 return FIELD_ADDR(this, kHeaderSize);
3721 uc16* SeqTwoByteString::GetChars() {
3722 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3726 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3727 DCHECK(index >= 0 && index < length());
3728 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3732 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3733 DCHECK(index >= 0 && index < length());
3734 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3738 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3739 return SizeFor(length());
3743 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3744 return SizeFor(length());
3748 String* SlicedString::parent() {
3749 return String::cast(READ_FIELD(this, kParentOffset));
3753 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3754 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3755 WRITE_FIELD(this, kParentOffset, parent);
3756 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3760 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3763 String* ConsString::first() {
3764 return String::cast(READ_FIELD(this, kFirstOffset));
3768 Object* ConsString::unchecked_first() {
3769 return READ_FIELD(this, kFirstOffset);
3773 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3774 WRITE_FIELD(this, kFirstOffset, value);
3775 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3779 String* ConsString::second() {
3780 return String::cast(READ_FIELD(this, kSecondOffset));
3784 Object* ConsString::unchecked_second() {
3785 return READ_FIELD(this, kSecondOffset);
3789 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3790 WRITE_FIELD(this, kSecondOffset, value);
3791 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3795 bool ExternalString::is_short() {
3796 InstanceType type = map()->instance_type();
3797 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3801 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3802 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3806 void ExternalOneByteString::update_data_cache() {
3807 if (is_short()) return;
3808 const char** data_field =
3809 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3810 *data_field = resource()->data();
3814 void ExternalOneByteString::set_resource(
3815 const ExternalOneByteString::Resource* resource) {
3816 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3817 *reinterpret_cast<const Resource**>(
3818 FIELD_ADDR(this, kResourceOffset)) = resource;
3819 if (resource != NULL) update_data_cache();
3823 const uint8_t* ExternalOneByteString::GetChars() {
3824 return reinterpret_cast<const uint8_t*>(resource()->data());
3828 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3829 DCHECK(index >= 0 && index < length());
3830 return GetChars()[index];
3834 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3835 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3839 void ExternalTwoByteString::update_data_cache() {
3840 if (is_short()) return;
3841 const uint16_t** data_field =
3842 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3843 *data_field = resource()->data();
3847 void ExternalTwoByteString::set_resource(
3848 const ExternalTwoByteString::Resource* resource) {
3849 *reinterpret_cast<const Resource**>(
3850 FIELD_ADDR(this, kResourceOffset)) = resource;
3851 if (resource != NULL) update_data_cache();
3855 const uint16_t* ExternalTwoByteString::GetChars() {
3856 return resource()->data();
3860 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3861 DCHECK(index >= 0 && index < length());
3862 return GetChars()[index];
3866 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3868 return GetChars() + start;
3872 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3875 void ConsStringIterator::PushLeft(ConsString* string) {
3876 frames_[depth_++ & kDepthMask] = string;
3880 void ConsStringIterator::PushRight(ConsString* string) {
3882 frames_[(depth_-1) & kDepthMask] = string;
3886 void ConsStringIterator::AdjustMaximumDepth() {
3887 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3891 void ConsStringIterator::Pop() {
3893 DCHECK(depth_ <= maximum_depth_);
3898 uint16_t StringCharacterStream::GetNext() {
3899 DCHECK(buffer8_ != NULL && end_ != NULL);
3900 // Advance cursor if needed.
3901 if (buffer8_ == end_) HasMore();
3902 DCHECK(buffer8_ < end_);
3903 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3907 StringCharacterStream::StringCharacterStream(String* string, int offset)
3908 : is_one_byte_(false) {
3909 Reset(string, offset);
3913 void StringCharacterStream::Reset(String* string, int offset) {
3916 ConsString* cons_string = String::VisitFlat(this, string, offset);
3917 iter_.Reset(cons_string, offset);
3918 if (cons_string != NULL) {
3919 string = iter_.Next(&offset);
3920 if (string != NULL) String::VisitFlat(this, string, offset);
3925 bool StringCharacterStream::HasMore() {
3926 if (buffer8_ != end_) return true;
3928 String* string = iter_.Next(&offset);
3929 DCHECK_EQ(offset, 0);
3930 if (string == NULL) return false;
3931 String::VisitFlat(this, string);
3932 DCHECK(buffer8_ != end_);
3937 void StringCharacterStream::VisitOneByteString(
3938 const uint8_t* chars, int length) {
3939 is_one_byte_ = true;
3941 end_ = chars + length;
3945 void StringCharacterStream::VisitTwoByteString(
3946 const uint16_t* chars, int length) {
3947 is_one_byte_ = false;
3949 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3953 void JSFunctionResultCache::MakeZeroSize() {
3954 set_finger_index(kEntriesIndex);
3955 set_size(kEntriesIndex);
3959 void JSFunctionResultCache::Clear() {
3960 int cache_size = size();
3961 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3962 MemsetPointer(entries_start,
3963 GetHeap()->the_hole_value(),
3964 cache_size - kEntriesIndex);
3969 int JSFunctionResultCache::size() {
3970 return Smi::cast(get(kCacheSizeIndex))->value();
3974 void JSFunctionResultCache::set_size(int size) {
3975 set(kCacheSizeIndex, Smi::FromInt(size));
3979 int JSFunctionResultCache::finger_index() {
3980 return Smi::cast(get(kFingerIndex))->value();
3984 void JSFunctionResultCache::set_finger_index(int finger_index) {
3985 set(kFingerIndex, Smi::FromInt(finger_index));
3989 byte ByteArray::get(int index) {
3990 DCHECK(index >= 0 && index < this->length());
3991 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3995 void ByteArray::set(int index, byte value) {
3996 DCHECK(index >= 0 && index < this->length());
3997 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4001 int ByteArray::get_int(int index) {
4002 DCHECK(index >= 0 && (index * kIntSize) < this->length());
4003 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
4007 ByteArray* ByteArray::FromDataStartAddress(Address address) {
4008 DCHECK_TAG_ALIGNED(address);
4009 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
4013 Address ByteArray::GetDataStartAddress() {
4014 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4018 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
4019 return reinterpret_cast<uint8_t*>(external_pointer());
4023 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
4024 DCHECK((index >= 0) && (index < this->length()));
4025 uint8_t* ptr = external_uint8_clamped_pointer();
4030 Handle<Object> ExternalUint8ClampedArray::get(
4031 Handle<ExternalUint8ClampedArray> array,
4033 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4034 array->GetIsolate());
4038 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
4039 DCHECK((index >= 0) && (index < this->length()));
4040 uint8_t* ptr = external_uint8_clamped_pointer();
4045 void* ExternalArray::external_pointer() const {
4046 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4047 return reinterpret_cast<void*>(ptr);
4051 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
4052 intptr_t ptr = reinterpret_cast<intptr_t>(value);
4053 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4057 int8_t ExternalInt8Array::get_scalar(int index) {
4058 DCHECK((index >= 0) && (index < this->length()));
4059 int8_t* ptr = static_cast<int8_t*>(external_pointer());
4064 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
4066 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4067 array->GetIsolate());
4071 void ExternalInt8Array::set(int index, int8_t value) {
4072 DCHECK((index >= 0) && (index < this->length()));
4073 int8_t* ptr = static_cast<int8_t*>(external_pointer());
4078 uint8_t ExternalUint8Array::get_scalar(int index) {
4079 DCHECK((index >= 0) && (index < this->length()));
4080 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4085 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
4087 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4088 array->GetIsolate());
4092 void ExternalUint8Array::set(int index, uint8_t value) {
4093 DCHECK((index >= 0) && (index < this->length()));
4094 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4099 int16_t ExternalInt16Array::get_scalar(int index) {
4100 DCHECK((index >= 0) && (index < this->length()));
4101 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4106 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
4108 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4109 array->GetIsolate());
4113 void ExternalInt16Array::set(int index, int16_t value) {
4114 DCHECK((index >= 0) && (index < this->length()));
4115 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4120 uint16_t ExternalUint16Array::get_scalar(int index) {
4121 DCHECK((index >= 0) && (index < this->length()));
4122 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4127 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
4129 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4130 array->GetIsolate());
4134 void ExternalUint16Array::set(int index, uint16_t value) {
4135 DCHECK((index >= 0) && (index < this->length()));
4136 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4141 int32_t ExternalInt32Array::get_scalar(int index) {
4142 DCHECK((index >= 0) && (index < this->length()));
4143 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4148 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
4150 return array->GetIsolate()->factory()->
4151 NewNumberFromInt(array->get_scalar(index));
4155 void ExternalInt32Array::set(int index, int32_t value) {
4156 DCHECK((index >= 0) && (index < this->length()));
4157 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4162 uint32_t ExternalUint32Array::get_scalar(int index) {
4163 DCHECK((index >= 0) && (index < this->length()));
4164 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4169 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
4171 return array->GetIsolate()->factory()->
4172 NewNumberFromUint(array->get_scalar(index));
4176 void ExternalUint32Array::set(int index, uint32_t value) {
4177 DCHECK((index >= 0) && (index < this->length()));
4178 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4183 float ExternalFloat32Array::get_scalar(int index) {
4184 DCHECK((index >= 0) && (index < this->length()));
4185 float* ptr = static_cast<float*>(external_pointer());
4190 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
4192 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4196 void ExternalFloat32Array::set(int index, float value) {
4197 DCHECK((index >= 0) && (index < this->length()));
4198 float* ptr = static_cast<float*>(external_pointer());
4203 double ExternalFloat64Array::get_scalar(int index) {
4204 DCHECK((index >= 0) && (index < this->length()));
4205 double* ptr = static_cast<double*>(external_pointer());
4210 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
4212 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4216 void ExternalFloat64Array::set(int index, double value) {
4217 DCHECK((index >= 0) && (index < this->length()));
4218 double* ptr = static_cast<double*>(external_pointer());
4223 void* FixedTypedArrayBase::DataPtr() {
4224 return FIELD_ADDR(this, kDataOffset);
4228 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4231 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4232 case FIXED_##TYPE##_ARRAY_TYPE: \
4233 element_size = size; \
4236 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4237 #undef TYPED_ARRAY_CASE
4242 return element_size;
4246 int FixedTypedArrayBase::DataSize(InstanceType type) {
4247 return length() * ElementSize(type);
4251 int FixedTypedArrayBase::DataSize() {
4252 return DataSize(map()->instance_type());
4256 int FixedTypedArrayBase::size() {
4257 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4261 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4262 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4266 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4267 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4271 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4274 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4277 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4280 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4283 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4286 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4289 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4292 float Float32ArrayTraits::defaultValue() {
4293 return std::numeric_limits<float>::quiet_NaN();
4297 double Float64ArrayTraits::defaultValue() {
4298 return std::numeric_limits<double>::quiet_NaN();
4302 template <class Traits>
4303 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4304 DCHECK((index >= 0) && (index < this->length()));
4305 ElementType* ptr = reinterpret_cast<ElementType*>(
4306 FIELD_ADDR(this, kDataOffset));
4312 FixedTypedArray<Float64ArrayTraits>::ElementType
4313 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4314 DCHECK((index >= 0) && (index < this->length()));
4315 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4319 template <class Traits>
4320 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4321 DCHECK((index >= 0) && (index < this->length()));
4322 ElementType* ptr = reinterpret_cast<ElementType*>(
4323 FIELD_ADDR(this, kDataOffset));
4329 void FixedTypedArray<Float64ArrayTraits>::set(
4330 int index, Float64ArrayTraits::ElementType value) {
4331 DCHECK((index >= 0) && (index < this->length()));
4332 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4336 template <class Traits>
4337 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4338 return static_cast<ElementType>(value);
4343 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4344 if (value < 0) return 0;
4345 if (value > 0xFF) return 0xFF;
4346 return static_cast<uint8_t>(value);
4350 template <class Traits>
4351 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4353 return static_cast<ElementType>(DoubleToInt32(value));
4358 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4359 // Handle NaNs and less than zero values which clamp to zero.
4360 if (!(value > 0)) return 0;
4361 if (value > 0xFF) return 0xFF;
4362 return static_cast<uint8_t>(lrint(value));
4367 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4368 return static_cast<float>(value);
4373 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4378 template <class Traits>
4379 Handle<Object> FixedTypedArray<Traits>::get(
4380 Handle<FixedTypedArray<Traits> > array,
4382 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4386 template <class Traits>
4387 Handle<Object> FixedTypedArray<Traits>::SetValue(
4388 Handle<FixedTypedArray<Traits> > array,
4390 Handle<Object> value) {
4391 ElementType cast_value = Traits::defaultValue();
4392 if (index < static_cast<uint32_t>(array->length())) {
4393 if (value->IsSmi()) {
4394 int int_value = Handle<Smi>::cast(value)->value();
4395 cast_value = from_int(int_value);
4396 } else if (value->IsHeapNumber()) {
4397 double double_value = Handle<HeapNumber>::cast(value)->value();
4398 cast_value = from_double(double_value);
4400 // Clamp undefined to the default value. All other types have been
4401 // converted to a number type further up in the call chain.
4402 DCHECK(value->IsUndefined());
4404 array->set(index, cast_value);
4406 return Traits::ToHandle(array->GetIsolate(), cast_value);
4410 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4411 return handle(Smi::FromInt(scalar), isolate);
4415 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4417 return handle(Smi::FromInt(scalar), isolate);
4421 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4422 return handle(Smi::FromInt(scalar), isolate);
4426 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4427 return handle(Smi::FromInt(scalar), isolate);
4431 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4432 return handle(Smi::FromInt(scalar), isolate);
4436 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4437 return isolate->factory()->NewNumberFromUint(scalar);
4441 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4442 return isolate->factory()->NewNumberFromInt(scalar);
4446 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4447 return isolate->factory()->NewNumber(scalar);
4451 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4452 return isolate->factory()->NewNumber(scalar);
4456 int Map::visitor_id() {
4457 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4461 void Map::set_visitor_id(int id) {
4462 DCHECK(0 <= id && id < 256);
4463 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4467 int Map::instance_size() {
4468 return NOBARRIER_READ_BYTE_FIELD(
4469 this, kInstanceSizeOffset) << kPointerSizeLog2;
4473 int Map::inobject_properties() {
4474 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4478 int Map::pre_allocated_property_fields() {
4479 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4483 int Map::GetInObjectPropertyOffset(int index) {
4484 // Adjust for the number of properties stored in the object.
4485 index -= inobject_properties();
4487 return instance_size() + (index * kPointerSize);
4491 Handle<Map> Map::CopyInstallDescriptorsForTesting(
4492 Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
4493 Handle<LayoutDescriptor> layout_descriptor) {
4494 return CopyInstallDescriptors(map, new_descriptor, descriptors,
4499 int HeapObject::SizeFromMap(Map* map) {
4500 int instance_size = map->instance_size();
4501 if (instance_size != kVariableSizeSentinel) return instance_size;
4502 // Only inline the most frequent cases.
4503 InstanceType instance_type = map->instance_type();
4504 if (instance_type == FIXED_ARRAY_TYPE) {
4505 return FixedArray::BodyDescriptor::SizeOf(map, this);
4507 if (instance_type == ONE_BYTE_STRING_TYPE ||
4508 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4509 // Strings may get concurrently truncated, hence we have to access its
4510 // length synchronized.
4511 return SeqOneByteString::SizeFor(
4512 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4514 if (instance_type == BYTE_ARRAY_TYPE) {
4515 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4517 if (instance_type == FREE_SPACE_TYPE) {
4518 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4520 if (instance_type == STRING_TYPE ||
4521 instance_type == INTERNALIZED_STRING_TYPE) {
4522 // Strings may get concurrently truncated, hence we have to access its
4523 // length synchronized.
4524 return SeqTwoByteString::SizeFor(
4525 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4527 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4528 return FixedDoubleArray::SizeFor(
4529 reinterpret_cast<FixedDoubleArray*>(this)->length());
4531 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4532 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4534 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4535 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4536 return reinterpret_cast<FixedTypedArrayBase*>(
4537 this)->TypedArraySize(instance_type);
4539 DCHECK(instance_type == CODE_TYPE);
4540 return reinterpret_cast<Code*>(this)->CodeSize();
4544 void Map::set_instance_size(int value) {
4545 DCHECK_EQ(0, value & (kPointerSize - 1));
4546 value >>= kPointerSizeLog2;
4547 DCHECK(0 <= value && value < 256);
4548 NOBARRIER_WRITE_BYTE_FIELD(
4549 this, kInstanceSizeOffset, static_cast<byte>(value));
4553 void Map::set_inobject_properties(int value) {
4554 DCHECK(0 <= value && value < 256);
4555 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4559 void Map::set_pre_allocated_property_fields(int value) {
4560 DCHECK(0 <= value && value < 256);
4561 WRITE_BYTE_FIELD(this,
4562 kPreAllocatedPropertyFieldsOffset,
4563 static_cast<byte>(value));
4567 InstanceType Map::instance_type() {
4568 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4572 void Map::set_instance_type(InstanceType value) {
4573 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4577 int Map::unused_property_fields() {
4578 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4582 void Map::set_unused_property_fields(int value) {
4583 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4587 byte Map::bit_field() {
4588 return READ_BYTE_FIELD(this, kBitFieldOffset);
4592 void Map::set_bit_field(byte value) {
4593 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4597 byte Map::bit_field2() {
4598 return READ_BYTE_FIELD(this, kBitField2Offset);
4602 void Map::set_bit_field2(byte value) {
4603 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4607 void Map::set_non_instance_prototype(bool value) {
4609 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4611 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4616 bool Map::has_non_instance_prototype() {
4617 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4621 void Map::set_function_with_prototype(bool value) {
4622 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4626 bool Map::function_with_prototype() {
4627 return FunctionWithPrototype::decode(bit_field());
4631 void Map::set_is_access_check_needed(bool access_check_needed) {
4632 if (access_check_needed) {
4633 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4635 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4640 bool Map::is_access_check_needed() {
4641 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4645 void Map::set_is_extensible(bool value) {
4647 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4649 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4653 bool Map::is_extensible() {
4654 return ((1 << kIsExtensible) & bit_field2()) != 0;
4658 void Map::set_is_prototype_map(bool value) {
4659 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4662 bool Map::is_prototype_map() {
4663 return IsPrototypeMapBits::decode(bit_field2());
4667 void Map::set_dictionary_map(bool value) {
4668 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4669 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4670 set_bit_field3(new_bit_field3);
4674 bool Map::is_dictionary_map() {
4675 return DictionaryMap::decode(bit_field3());
4679 Code::Flags Code::flags() {
4680 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4684 void Map::set_owns_descriptors(bool owns_descriptors) {
4685 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4689 bool Map::owns_descriptors() {
4690 return OwnsDescriptors::decode(bit_field3());
4694 void Map::set_has_instance_call_handler() {
4695 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4699 bool Map::has_instance_call_handler() {
4700 return HasInstanceCallHandler::decode(bit_field3());
4704 void Map::deprecate() {
4705 set_bit_field3(Deprecated::update(bit_field3(), true));
4709 bool Map::is_deprecated() {
4710 return Deprecated::decode(bit_field3());
4714 void Map::set_migration_target(bool value) {
4715 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4719 bool Map::is_migration_target() {
4720 return IsMigrationTarget::decode(bit_field3());
4724 void Map::set_counter(int value) {
4725 set_bit_field3(Counter::update(bit_field3(), value));
4729 int Map::counter() { return Counter::decode(bit_field3()); }
4732 void Map::mark_unstable() {
4733 set_bit_field3(IsUnstable::update(bit_field3(), true));
4737 bool Map::is_stable() {
4738 return !IsUnstable::decode(bit_field3());
4742 bool Map::has_code_cache() {
4743 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4747 bool Map::CanBeDeprecated() {
4748 int descriptor = LastAdded();
4749 for (int i = 0; i <= descriptor; i++) {
4750 PropertyDetails details = instance_descriptors()->GetDetails(i);
4751 if (details.representation().IsNone()) return true;
4752 if (details.representation().IsSmi()) return true;
4753 if (details.representation().IsDouble()) return true;
4754 if (details.representation().IsHeapObject()) return true;
4755 if (details.type() == DATA_CONSTANT) return true;
4761 void Map::NotifyLeafMapLayoutChange() {
4764 dependent_code()->DeoptimizeDependentCodeGroup(
4766 DependentCode::kPrototypeCheckGroup);
4771 bool Map::CanOmitMapChecks() {
4772 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4776 int DependentCode::number_of_entries(DependencyGroup group) {
4777 if (length() == 0) return 0;
4778 return Smi::cast(get(group))->value();
4782 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4783 set(group, Smi::FromInt(value));
4787 void DependentCode::set_object_at(int i, Object* object) {
4788 set(kCodesStartIndex + i, object);
4792 Object* DependentCode::object_at(int i) {
4793 return get(kCodesStartIndex + i);
4797 void DependentCode::clear_at(int i) {
4798 set_undefined(kCodesStartIndex + i);
4802 void DependentCode::copy(int from, int to) {
4803 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4807 void DependentCode::ExtendGroup(DependencyGroup group) {
4808 GroupStartIndexes starts(this);
4809 for (int g = kGroupCount - 1; g > group; g--) {
4810 if (starts.at(g) < starts.at(g + 1)) {
4811 copy(starts.at(g), starts.at(g + 1));
4817 void Code::set_flags(Code::Flags flags) {
4818 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4819 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4823 Code::Kind Code::kind() {
4824 return ExtractKindFromFlags(flags());
4828 bool Code::IsCodeStubOrIC() {
4829 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4830 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4831 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4832 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4833 kind() == TO_BOOLEAN_IC;
4837 InlineCacheState Code::ic_state() {
4838 InlineCacheState result = ExtractICStateFromFlags(flags());
4839 // Only allow uninitialized or debugger states for non-IC code
4840 // objects. This is used in the debugger to determine whether or not
4841 // a call to code object has been replaced with a debug break call.
4842 DCHECK(is_inline_cache_stub() ||
4843 result == UNINITIALIZED ||
4844 result == DEBUG_STUB);
4849 ExtraICState Code::extra_ic_state() {
4850 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4851 return ExtractExtraICStateFromFlags(flags());
4855 Code::StubType Code::type() {
4856 return ExtractTypeFromFlags(flags());
4860 // For initialization.
4861 void Code::set_raw_kind_specific_flags1(int value) {
4862 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4866 void Code::set_raw_kind_specific_flags2(int value) {
4867 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4871 inline bool Code::is_crankshafted() {
4872 return IsCrankshaftedField::decode(
4873 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4877 inline bool Code::is_hydrogen_stub() {
4878 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4882 inline void Code::set_is_crankshafted(bool value) {
4883 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4884 int updated = IsCrankshaftedField::update(previous, value);
4885 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4889 inline bool Code::is_turbofanned() {
4890 return IsTurbofannedField::decode(
4891 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4895 inline void Code::set_is_turbofanned(bool value) {
4896 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4897 int updated = IsTurbofannedField::update(previous, value);
4898 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4902 inline bool Code::can_have_weak_objects() {
4903 DCHECK(kind() == OPTIMIZED_FUNCTION);
4904 return CanHaveWeakObjectsField::decode(
4905 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4909 inline void Code::set_can_have_weak_objects(bool value) {
4910 DCHECK(kind() == OPTIMIZED_FUNCTION);
4911 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4912 int updated = CanHaveWeakObjectsField::update(previous, value);
4913 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4917 bool Code::optimizable() {
4918 DCHECK_EQ(FUNCTION, kind());
4919 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4923 void Code::set_optimizable(bool value) {
4924 DCHECK_EQ(FUNCTION, kind());
4925 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4929 bool Code::has_deoptimization_support() {
4930 DCHECK_EQ(FUNCTION, kind());
4931 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4932 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4936 void Code::set_has_deoptimization_support(bool value) {
4937 DCHECK_EQ(FUNCTION, kind());
4938 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4939 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4940 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4944 bool Code::has_debug_break_slots() {
4945 DCHECK_EQ(FUNCTION, kind());
4946 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4947 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4951 void Code::set_has_debug_break_slots(bool value) {
4952 DCHECK_EQ(FUNCTION, kind());
4953 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4954 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4955 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4959 bool Code::is_compiled_optimizable() {
4960 DCHECK_EQ(FUNCTION, kind());
4961 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4962 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4966 void Code::set_compiled_optimizable(bool value) {
4967 DCHECK_EQ(FUNCTION, kind());
4968 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4969 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4970 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4974 bool Code::has_reloc_info_for_serialization() {
4975 DCHECK_EQ(FUNCTION, kind());
4976 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4977 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4981 void Code::set_has_reloc_info_for_serialization(bool value) {
4982 DCHECK_EQ(FUNCTION, kind());
4983 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4984 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4985 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4989 int Code::allow_osr_at_loop_nesting_level() {
4990 DCHECK_EQ(FUNCTION, kind());
4991 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4992 return AllowOSRAtLoopNestingLevelField::decode(fields);
4996 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4997 DCHECK_EQ(FUNCTION, kind());
4998 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4999 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5000 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5001 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5005 int Code::profiler_ticks() {
5006 DCHECK_EQ(FUNCTION, kind());
5007 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
5011 void Code::set_profiler_ticks(int ticks) {
5012 DCHECK(ticks < 256);
5013 if (kind() == FUNCTION) {
5014 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
5019 int Code::builtin_index() {
5020 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
5024 void Code::set_builtin_index(int index) {
5025 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
5029 unsigned Code::stack_slots() {
5030 DCHECK(is_crankshafted());
5031 return StackSlotsField::decode(
5032 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5036 void Code::set_stack_slots(unsigned slots) {
5037 CHECK(slots <= (1 << kStackSlotsBitCount));
5038 DCHECK(is_crankshafted());
5039 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5040 int updated = StackSlotsField::update(previous, slots);
5041 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5045 unsigned Code::safepoint_table_offset() {
5046 DCHECK(is_crankshafted());
5047 return SafepointTableOffsetField::decode(
5048 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5052 void Code::set_safepoint_table_offset(unsigned offset) {
5053 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5054 DCHECK(is_crankshafted());
5055 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5056 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5057 int updated = SafepointTableOffsetField::update(previous, offset);
5058 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5062 unsigned Code::back_edge_table_offset() {
5063 DCHECK_EQ(FUNCTION, kind());
5064 return BackEdgeTableOffsetField::decode(
5065 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5069 void Code::set_back_edge_table_offset(unsigned offset) {
5070 DCHECK_EQ(FUNCTION, kind());
5071 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5072 offset = offset >> kPointerSizeLog2;
5073 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5074 int updated = BackEdgeTableOffsetField::update(previous, offset);
5075 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5079 bool Code::back_edges_patched_for_osr() {
5080 DCHECK_EQ(FUNCTION, kind());
5081 return allow_osr_at_loop_nesting_level() > 0;
5085 byte Code::to_boolean_state() {
5086 return extra_ic_state();
5090 bool Code::has_function_cache() {
5091 DCHECK(kind() == STUB);
5092 return HasFunctionCacheField::decode(
5093 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5097 void Code::set_has_function_cache(bool flag) {
5098 DCHECK(kind() == STUB);
5099 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5100 int updated = HasFunctionCacheField::update(previous, flag);
5101 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5105 bool Code::marked_for_deoptimization() {
5106 DCHECK(kind() == OPTIMIZED_FUNCTION);
5107 return MarkedForDeoptimizationField::decode(
5108 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5112 void Code::set_marked_for_deoptimization(bool flag) {
5113 DCHECK(kind() == OPTIMIZED_FUNCTION);
5114 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5115 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5116 int updated = MarkedForDeoptimizationField::update(previous, flag);
5117 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5121 bool Code::is_inline_cache_stub() {
5122 Kind kind = this->kind();
5124 #define CASE(name) case name: return true;
5127 default: return false;
5132 bool Code::is_keyed_stub() {
5133 return is_keyed_load_stub() || is_keyed_store_stub();
5137 bool Code::is_debug_stub() {
5138 return ic_state() == DEBUG_STUB;
5142 ConstantPoolArray* Code::constant_pool() {
5143 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
5147 void Code::set_constant_pool(Object* value) {
5148 DCHECK(value->IsConstantPoolArray());
5149 WRITE_FIELD(this, kConstantPoolOffset, value);
5150 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
5154 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5155 ExtraICState extra_ic_state, StubType type,
5156 CacheHolderFlag holder) {
5157 // Compute the bit mask.
5158 unsigned int bits = KindField::encode(kind)
5159 | ICStateField::encode(ic_state)
5160 | TypeField::encode(type)
5161 | ExtraICStateField::encode(extra_ic_state)
5162 | CacheHolderField::encode(holder);
5163 return static_cast<Flags>(bits);
5167 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5168 ExtraICState extra_ic_state,
5169 CacheHolderFlag holder,
5171 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5175 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5176 CacheHolderFlag holder) {
5177 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5181 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5182 return KindField::decode(flags);
5186 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5187 return ICStateField::decode(flags);
5191 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5192 return ExtraICStateField::decode(flags);
5196 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5197 return TypeField::decode(flags);
5201 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5202 return CacheHolderField::decode(flags);
5206 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5207 int bits = flags & ~TypeField::kMask;
5208 return static_cast<Flags>(bits);
5212 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5213 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5214 return static_cast<Flags>(bits);
5218 Code* Code::GetCodeFromTargetAddress(Address address) {
5219 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5220 // GetCodeFromTargetAddress might be called when marking objects during mark
5221 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5222 // Code::cast. Code::cast does not work when the object's map is
5224 Code* result = reinterpret_cast<Code*>(code);
5229 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5231 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5235 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5236 if (!FLAG_collect_maps) return false;
5237 if (object->IsMap()) {
5238 return Map::cast(object)->CanTransition() &&
5239 FLAG_weak_embedded_maps_in_optimized_code;
5241 if (object->IsCell()) {
5242 object = Cell::cast(object)->value();
5243 } else if (object->IsPropertyCell()) {
5244 object = PropertyCell::cast(object)->value();
5246 if (object->IsJSObject()) {
5247 return FLAG_weak_embedded_objects_in_optimized_code;
5249 if (object->IsFixedArray()) {
5250 // Contexts of inlined functions are embedded in optimized code.
5251 Map* map = HeapObject::cast(object)->map();
5252 Heap* heap = map->GetHeap();
5253 return FLAG_weak_embedded_objects_in_optimized_code &&
5254 map == heap->function_context_map();
5260 class Code::FindAndReplacePattern {
5262 FindAndReplacePattern() : count_(0) { }
5263 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5264 DCHECK(count_ < kMaxCount);
5265 find_[count_] = map_to_find;
5266 replace_[count_] = obj_to_replace;
5270 static const int kMaxCount = 4;
5272 Handle<Map> find_[kMaxCount];
5273 Handle<Object> replace_[kMaxCount];
5278 Object* Map::prototype() const {
5279 return READ_FIELD(this, kPrototypeOffset);
5283 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5284 DCHECK(value->IsNull() || value->IsJSReceiver());
5285 WRITE_FIELD(this, kPrototypeOffset, value);
5286 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5290 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5291 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5292 return LayoutDescriptor::cast_gc_safe(layout_desc);
5296 bool Map::HasFastPointerLayout() const {
5297 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5298 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5302 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5303 LayoutDescriptor* layout_desc) {
5304 set_instance_descriptors(descriptors);
5305 if (FLAG_unbox_double_fields) {
5306 if (layout_descriptor()->IsSlowLayout()) {
5307 set_layout_descriptor(layout_desc);
5310 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5311 if (FLAG_verify_heap) {
5312 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5313 CHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5316 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5317 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5323 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5324 LayoutDescriptor* layout_desc) {
5325 int len = descriptors->number_of_descriptors();
5326 set_instance_descriptors(descriptors);
5327 SetNumberOfOwnDescriptors(len);
5329 if (FLAG_unbox_double_fields) {
5330 set_layout_descriptor(layout_desc);
5332 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5333 if (FLAG_verify_heap) {
5334 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5337 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5339 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5344 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5345 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5348 void Map::set_bit_field3(uint32_t bits) {
5349 if (kInt32Size != kPointerSize) {
5350 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5352 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5356 uint32_t Map::bit_field3() {
5357 return READ_UINT32_FIELD(this, kBitField3Offset);
5361 LayoutDescriptor* Map::GetLayoutDescriptor() {
5362 return FLAG_unbox_double_fields ? layout_descriptor()
5363 : LayoutDescriptor::FastPointerLayout();
5367 void Map::AppendDescriptor(Descriptor* desc) {
5368 DescriptorArray* descriptors = instance_descriptors();
5369 int number_of_own_descriptors = NumberOfOwnDescriptors();
5370 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5371 descriptors->Append(desc);
5372 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5374 // This function does not support appending double field descriptors and
5375 // it should never try to (otherwise, layout descriptor must be updated too).
5377 PropertyDetails details = desc->GetDetails();
5378 CHECK(details.type() != DATA || !details.representation().IsDouble());
5383 Object* Map::GetBackPointer() {
5384 Object* object = constructor_or_backpointer();
5385 if (object->IsMap()) {
5388 return GetIsolate()->heap()->undefined_value();
5392 Map* Map::ElementsTransitionMap() {
5393 return TransitionArray::SearchSpecial(
5394 this, GetHeap()->elements_transition_symbol());
5398 ACCESSORS(Map, raw_transitions, Object, kTransitionsOffset)
5401 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5402 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5403 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5404 (value->IsMap() && GetBackPointer()->IsUndefined()));
5405 DCHECK(!value->IsMap() ||
5406 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5407 set_constructor_or_backpointer(value, mode);
5411 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5412 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5413 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5414 ACCESSORS(Map, constructor_or_backpointer, Object,
5415 kConstructorOrBackPointerOffset)
5418 Object* Map::GetConstructor() const {
5419 Object* maybe_constructor = constructor_or_backpointer();
5420 // Follow any back pointers.
5421 while (maybe_constructor->IsMap()) {
5423 Map::cast(maybe_constructor)->constructor_or_backpointer();
5425 return maybe_constructor;
5429 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5430 // Never overwrite a back pointer with a constructor.
5431 DCHECK(!constructor_or_backpointer()->IsMap());
5432 set_constructor_or_backpointer(constructor, mode);
5436 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5437 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5438 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5440 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5441 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5442 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5444 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5445 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5447 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5448 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5449 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5450 kExpectedReceiverTypeOffset)
5452 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5453 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5454 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5456 ACCESSORS(Box, value, Object, kValueOffset)
5458 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5459 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5461 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5462 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5463 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5465 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5466 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5467 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5468 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5469 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5470 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5471 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5472 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5473 kCanInterceptSymbolsBit)
5474 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5475 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5477 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5478 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5480 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5481 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5482 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5484 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5485 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5486 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5487 kPrototypeTemplateOffset)
5488 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5489 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5490 kNamedPropertyHandlerOffset)
5491 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5492 kIndexedPropertyHandlerOffset)
5493 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5494 kInstanceTemplateOffset)
5495 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5496 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5497 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5498 kInstanceCallHandlerOffset)
5499 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5500 kAccessCheckInfoOffset)
5501 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5503 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5504 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5505 kInternalFieldCountOffset)
5507 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5509 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5510 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5511 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5512 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5513 kPretenureCreateCountOffset)
5514 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5515 kDependentCodeOffset)
5516 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5517 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5519 ACCESSORS(Script, source, Object, kSourceOffset)
5520 ACCESSORS(Script, name, Object, kNameOffset)
5521 ACCESSORS(Script, id, Smi, kIdOffset)
5522 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5523 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5524 ACCESSORS(Script, context_data, Object, kContextOffset)
5525 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5526 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5527 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5528 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5529 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5530 kEvalFrominstructionsOffsetOffset)
5531 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5532 BOOL_ACCESSORS(Script, flags, is_embedder_debug_script,
5533 kIsEmbedderDebugScriptBit)
5534 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5535 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5536 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5538 Script::CompilationType Script::compilation_type() {
5539 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5540 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5542 void Script::set_compilation_type(CompilationType type) {
5543 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5544 type == COMPILATION_TYPE_EVAL));
5546 Script::CompilationState Script::compilation_state() {
5547 return BooleanBit::get(flags(), kCompilationStateBit) ?
5548 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5550 void Script::set_compilation_state(CompilationState state) {
5551 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5552 state == COMPILATION_STATE_COMPILED));
5556 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5557 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5558 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5559 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5561 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5562 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5563 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5564 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5566 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5567 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5568 kOptimizedCodeMapOffset)
5569 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5570 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5571 kFeedbackVectorOffset)
5573 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5575 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5576 kInstanceClassNameOffset)
5577 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5578 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5579 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5580 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5583 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5584 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5585 kHiddenPrototypeBit)
5586 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5587 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5588 kNeedsAccessCheckBit)
5589 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5590 kReadOnlyPrototypeBit)
5591 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5592 kRemovePrototypeBit)
5593 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5595 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5596 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5598 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5600 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5603 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5604 kAllowLazyCompilation)
5605 BOOL_ACCESSORS(SharedFunctionInfo,
5607 allows_lazy_compilation_without_context,
5608 kAllowLazyCompilationWithoutContext)
5609 BOOL_ACCESSORS(SharedFunctionInfo,
5613 BOOL_ACCESSORS(SharedFunctionInfo,
5615 has_duplicate_parameters,
5616 kHasDuplicateParameters)
5617 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5618 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5621 #if V8_HOST_ARCH_32_BIT
5622 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5623 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5624 kFormalParameterCountOffset)
5625 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5626 kExpectedNofPropertiesOffset)
5627 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5628 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5629 kStartPositionAndTypeOffset)
5630 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5631 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5632 kFunctionTokenPositionOffset)
5633 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5634 kCompilerHintsOffset)
5635 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5636 kOptCountAndBailoutReasonOffset)
5637 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5638 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5639 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5643 #if V8_TARGET_LITTLE_ENDIAN
5644 #define PSEUDO_SMI_LO_ALIGN 0
5645 #define PSEUDO_SMI_HI_ALIGN kIntSize
5647 #define PSEUDO_SMI_LO_ALIGN kIntSize
5648 #define PSEUDO_SMI_HI_ALIGN 0
5651 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5652 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5653 int holder::name() const { \
5654 int value = READ_INT_FIELD(this, offset); \
5655 DCHECK(kHeapObjectTag == 1); \
5656 DCHECK((value & kHeapObjectTag) == 0); \
5657 return value >> 1; \
5659 void holder::set_##name(int value) { \
5660 DCHECK(kHeapObjectTag == 1); \
5661 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5662 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5665 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5666 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5667 INT_ACCESSORS(holder, name, offset)
5670 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5671 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5672 kFormalParameterCountOffset)
5674 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5675 expected_nof_properties,
5676 kExpectedNofPropertiesOffset)
5677 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5679 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5680 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5681 start_position_and_type,
5682 kStartPositionAndTypeOffset)
5684 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5685 function_token_position,
5686 kFunctionTokenPositionOffset)
5687 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5689 kCompilerHintsOffset)
5691 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5692 opt_count_and_bailout_reason,
5693 kOptCountAndBailoutReasonOffset)
5694 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5696 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5698 kAstNodeCountOffset)
5699 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5701 kProfilerTicksOffset)
5706 BOOL_GETTER(SharedFunctionInfo,
5708 optimization_disabled,
5709 kOptimizationDisabled)
5712 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5713 set_compiler_hints(BooleanBit::set(compiler_hints(),
5714 kOptimizationDisabled,
5716 // If disabling optimizations we reflect that in the code object so
5717 // it will not be counted as optimizable code.
5718 if ((code()->kind() == Code::FUNCTION) && disable) {
5719 code()->set_optimizable(false);
5724 LanguageMode SharedFunctionInfo::language_mode() {
5725 STATIC_ASSERT(LANGUAGE_END == 3);
5726 return construct_language_mode(
5727 BooleanBit::get(compiler_hints(), kStrictModeFunction),
5728 BooleanBit::get(compiler_hints(), kStrongModeFunction));
5732 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5733 STATIC_ASSERT(LANGUAGE_END == 3);
5734 // We only allow language mode transitions that set the same language mode
5735 // again or go up in the chain:
5736 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5737 int hints = compiler_hints();
5738 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5739 hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5740 set_compiler_hints(hints);
5744 FunctionKind SharedFunctionInfo::kind() {
5745 return FunctionKindBits::decode(compiler_hints());
5749 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5750 DCHECK(IsValidFunctionKind(kind));
5751 int hints = compiler_hints();
5752 hints = FunctionKindBits::update(hints, kind);
5753 set_compiler_hints(hints);
5757 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_super_property,
5759 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5760 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5762 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5763 name_should_print_as_anonymous,
5764 kNameShouldPrintAsAnonymous)
5765 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5766 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5767 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5768 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5769 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5770 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5771 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5772 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5774 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5775 kIsAccessorFunction)
5776 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5777 kIsDefaultConstructor)
5779 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5780 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5782 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5784 bool Script::HasValidSource() {
5785 Object* src = this->source();
5786 if (!src->IsString()) return true;
5787 String* src_str = String::cast(src);
5788 if (!StringShape(src_str).IsExternal()) return true;
5789 if (src_str->IsOneByteRepresentation()) {
5790 return ExternalOneByteString::cast(src)->resource() != NULL;
5791 } else if (src_str->IsTwoByteRepresentation()) {
5792 return ExternalTwoByteString::cast(src)->resource() != NULL;
5798 void SharedFunctionInfo::DontAdaptArguments() {
5799 DCHECK(code()->kind() == Code::BUILTIN);
5800 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5804 int SharedFunctionInfo::start_position() const {
5805 return start_position_and_type() >> kStartPositionShift;
5809 void SharedFunctionInfo::set_start_position(int start_position) {
5810 set_start_position_and_type((start_position << kStartPositionShift)
5811 | (start_position_and_type() & ~kStartPositionMask));
5815 Code* SharedFunctionInfo::code() const {
5816 return Code::cast(READ_FIELD(this, kCodeOffset));
5820 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5821 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5822 WRITE_FIELD(this, kCodeOffset, value);
5823 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5827 void SharedFunctionInfo::ReplaceCode(Code* value) {
5828 // If the GC metadata field is already used then the function was
5829 // enqueued as a code flushing candidate and we remove it now.
5830 if (code()->gc_metadata() != NULL) {
5831 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5832 flusher->EvictCandidate(this);
5835 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5841 ScopeInfo* SharedFunctionInfo::scope_info() const {
5842 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5846 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5847 WriteBarrierMode mode) {
5848 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5849 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5852 reinterpret_cast<Object*>(value),
5857 bool SharedFunctionInfo::is_compiled() {
5858 Builtins* builtins = GetIsolate()->builtins();
5859 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5860 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5861 return code() != builtins->builtin(Builtins::kCompileLazy);
5865 bool SharedFunctionInfo::is_simple_parameter_list() {
5866 return scope_info()->IsSimpleParameterList();
5870 bool SharedFunctionInfo::IsApiFunction() {
5871 return function_data()->IsFunctionTemplateInfo();
5875 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5876 DCHECK(IsApiFunction());
5877 return FunctionTemplateInfo::cast(function_data());
5881 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5882 return function_data()->IsSmi();
5886 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5887 DCHECK(HasBuiltinFunctionId());
5888 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5892 int SharedFunctionInfo::ic_age() {
5893 return ICAgeBits::decode(counters());
5897 void SharedFunctionInfo::set_ic_age(int ic_age) {
5898 set_counters(ICAgeBits::update(counters(), ic_age));
5902 int SharedFunctionInfo::deopt_count() {
5903 return DeoptCountBits::decode(counters());
5907 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5908 set_counters(DeoptCountBits::update(counters(), deopt_count));
5912 void SharedFunctionInfo::increment_deopt_count() {
5913 int value = counters();
5914 int deopt_count = DeoptCountBits::decode(value);
5915 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5916 set_counters(DeoptCountBits::update(value, deopt_count));
5920 int SharedFunctionInfo::opt_reenable_tries() {
5921 return OptReenableTriesBits::decode(counters());
5925 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5926 set_counters(OptReenableTriesBits::update(counters(), tries));
5930 int SharedFunctionInfo::opt_count() {
5931 return OptCountBits::decode(opt_count_and_bailout_reason());
5935 void SharedFunctionInfo::set_opt_count(int opt_count) {
5936 set_opt_count_and_bailout_reason(
5937 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5941 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
5942 return static_cast<BailoutReason>(
5943 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5947 bool SharedFunctionInfo::has_deoptimization_support() {
5948 Code* code = this->code();
5949 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5953 void SharedFunctionInfo::TryReenableOptimization() {
5954 int tries = opt_reenable_tries();
5955 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5956 // We reenable optimization whenever the number of tries is a large
5957 // enough power of 2.
5958 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5959 set_optimization_disabled(false);
5962 code()->set_optimizable(true);
5967 bool JSFunction::IsBuiltin() {
5968 return context()->global_object()->IsJSBuiltinsObject();
5972 bool JSFunction::IsFromNativeScript() {
5973 Object* script = shared()->script();
5974 bool native = script->IsScript() &&
5975 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
5976 DCHECK(!IsBuiltin() || native); // All builtins are also native.
5981 bool JSFunction::IsFromExtensionScript() {
5982 Object* script = shared()->script();
5983 return script->IsScript() &&
5984 Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
5988 bool JSFunction::NeedsArgumentsAdaption() {
5989 return shared()->internal_formal_parameter_count() !=
5990 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5994 bool JSFunction::IsOptimized() {
5995 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5999 bool JSFunction::IsOptimizable() {
6000 return code()->kind() == Code::FUNCTION && code()->optimizable();
6004 bool JSFunction::IsMarkedForOptimization() {
6005 return code() == GetIsolate()->builtins()->builtin(
6006 Builtins::kCompileOptimized);
6010 bool JSFunction::IsMarkedForConcurrentOptimization() {
6011 return code() == GetIsolate()->builtins()->builtin(
6012 Builtins::kCompileOptimizedConcurrent);
6016 bool JSFunction::IsInOptimizationQueue() {
6017 return code() == GetIsolate()->builtins()->builtin(
6018 Builtins::kInOptimizationQueue);
6022 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6023 return has_initial_map() &&
6024 initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
6028 Code* JSFunction::code() {
6030 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6034 void JSFunction::set_code(Code* value) {
6035 DCHECK(!GetHeap()->InNewSpace(value));
6036 Address entry = value->entry();
6037 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6038 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6040 HeapObject::RawField(this, kCodeEntryOffset),
6045 void JSFunction::set_code_no_write_barrier(Code* value) {
6046 DCHECK(!GetHeap()->InNewSpace(value));
6047 Address entry = value->entry();
6048 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6052 void JSFunction::ReplaceCode(Code* code) {
6053 bool was_optimized = IsOptimized();
6054 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6056 if (was_optimized && is_optimized) {
6057 shared()->EvictFromOptimizedCodeMap(this->code(),
6058 "Replacing with another optimized code");
6063 // Add/remove the function from the list of optimized functions for this
6064 // context based on the state change.
6065 if (!was_optimized && is_optimized) {
6066 context()->native_context()->AddOptimizedFunction(this);
6068 if (was_optimized && !is_optimized) {
6069 // TODO(titzer): linear in the number of optimized functions; fix!
6070 context()->native_context()->RemoveOptimizedFunction(this);
6075 Context* JSFunction::context() {
6076 return Context::cast(READ_FIELD(this, kContextOffset));
6080 JSObject* JSFunction::global_proxy() {
6081 return context()->global_proxy();
6085 void JSFunction::set_context(Object* value) {
6086 DCHECK(value->IsUndefined() || value->IsContext());
6087 WRITE_FIELD(this, kContextOffset, value);
6088 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6091 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6092 kPrototypeOrInitialMapOffset)
6095 Map* JSFunction::initial_map() {
6096 return Map::cast(prototype_or_initial_map());
6100 bool JSFunction::has_initial_map() {
6101 return prototype_or_initial_map()->IsMap();
6105 bool JSFunction::has_instance_prototype() {
6106 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6110 bool JSFunction::has_prototype() {
6111 return map()->has_non_instance_prototype() || has_instance_prototype();
6115 Object* JSFunction::instance_prototype() {
6116 DCHECK(has_instance_prototype());
6117 if (has_initial_map()) return initial_map()->prototype();
6118 // When there is no initial map and the prototype is a JSObject, the
6119 // initial map field is used for the prototype field.
6120 return prototype_or_initial_map();
6124 Object* JSFunction::prototype() {
6125 DCHECK(has_prototype());
6126 // If the function's prototype property has been set to a non-JSObject
6127 // value, that value is stored in the constructor field of the map.
6128 if (map()->has_non_instance_prototype()) {
6129 Object* prototype = map()->GetConstructor();
6130 // The map must have a prototype in that field, not a back pointer.
6131 DCHECK(!prototype->IsMap());
6134 return instance_prototype();
6138 bool JSFunction::should_have_prototype() {
6139 return map()->function_with_prototype();
6143 bool JSFunction::is_compiled() {
6144 Builtins* builtins = GetIsolate()->builtins();
6145 return code() != builtins->builtin(Builtins::kCompileLazy) &&
6146 code() != builtins->builtin(Builtins::kCompileOptimized) &&
6147 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6151 bool JSFunction::is_simple_parameter_list() {
6152 return shared()->is_simple_parameter_list();
6156 FixedArray* JSFunction::literals() {
6157 DCHECK(!shared()->bound());
6158 return literals_or_bindings();
6162 void JSFunction::set_literals(FixedArray* literals) {
6163 DCHECK(!shared()->bound());
6164 set_literals_or_bindings(literals);
6168 FixedArray* JSFunction::function_bindings() {
6169 DCHECK(shared()->bound());
6170 return literals_or_bindings();
6174 void JSFunction::set_function_bindings(FixedArray* bindings) {
6175 DCHECK(shared()->bound());
6176 // Bound function literal may be initialized to the empty fixed array
6177 // before the bindings are set.
6178 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6179 bindings->map() == GetHeap()->fixed_array_map());
6180 set_literals_or_bindings(bindings);
6184 int JSFunction::NumberOfLiterals() {
6185 DCHECK(!shared()->bound());
6186 return literals()->length();
6190 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6191 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6192 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6196 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6198 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6199 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6200 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6204 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6205 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6206 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6207 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6210 void JSProxy::InitializeBody(int object_size, Object* value) {
6211 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6212 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6213 WRITE_FIELD(this, offset, value);
6218 ACCESSORS(JSCollection, table, Object, kTableOffset)
6221 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6222 template<class Derived, class TableType> \
6223 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6224 return type::cast(READ_FIELD(this, offset)); \
6226 template<class Derived, class TableType> \
6227 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6228 type* value, WriteBarrierMode mode) { \
6229 WRITE_FIELD(this, offset, value); \
6230 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6233 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6234 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6235 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6237 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6240 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6241 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6244 Address Foreign::foreign_address() {
6245 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6249 void Foreign::set_foreign_address(Address value) {
6250 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6254 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6255 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6256 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6257 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6258 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6260 bool JSGeneratorObject::is_suspended() {
6261 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6262 DCHECK_EQ(kGeneratorClosed, 0);
6263 return continuation() > 0;
6266 bool JSGeneratorObject::is_closed() {
6267 return continuation() == kGeneratorClosed;
6270 bool JSGeneratorObject::is_executing() {
6271 return continuation() == kGeneratorExecuting;
6274 ACCESSORS(JSModule, context, Object, kContextOffset)
6275 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6278 ACCESSORS(JSValue, value, Object, kValueOffset)
6281 HeapNumber* HeapNumber::cast(Object* object) {
6282 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6283 return reinterpret_cast<HeapNumber*>(object);
6287 const HeapNumber* HeapNumber::cast(const Object* object) {
6288 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6289 return reinterpret_cast<const HeapNumber*>(object);
6293 ACCESSORS(JSDate, value, Object, kValueOffset)
6294 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6295 ACCESSORS(JSDate, year, Object, kYearOffset)
6296 ACCESSORS(JSDate, month, Object, kMonthOffset)
6297 ACCESSORS(JSDate, day, Object, kDayOffset)
6298 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6299 ACCESSORS(JSDate, hour, Object, kHourOffset)
6300 ACCESSORS(JSDate, min, Object, kMinOffset)
6301 ACCESSORS(JSDate, sec, Object, kSecOffset)
6304 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6305 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6306 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6307 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6308 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6309 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6312 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6313 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6314 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6315 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6316 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6317 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6318 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6321 void Code::WipeOutHeader() {
6322 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6323 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6324 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6325 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6326 // Do not wipe out major/minor keys on a code stub or IC
6327 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6328 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6333 Object* Code::type_feedback_info() {
6334 DCHECK(kind() == FUNCTION);
6335 return raw_type_feedback_info();
6339 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6340 DCHECK(kind() == FUNCTION);
6341 set_raw_type_feedback_info(value, mode);
6342 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6347 uint32_t Code::stub_key() {
6348 DCHECK(IsCodeStubOrIC());
6349 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6350 return static_cast<uint32_t>(smi_key->value());
6354 void Code::set_stub_key(uint32_t key) {
6355 DCHECK(IsCodeStubOrIC());
6356 set_raw_type_feedback_info(Smi::FromInt(key));
6360 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6361 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6364 byte* Code::instruction_start() {
6365 return FIELD_ADDR(this, kHeaderSize);
6369 byte* Code::instruction_end() {
6370 return instruction_start() + instruction_size();
6374 int Code::body_size() {
6375 return RoundUp(instruction_size(), kObjectAlignment);
6379 ByteArray* Code::unchecked_relocation_info() {
6380 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6384 byte* Code::relocation_start() {
6385 return unchecked_relocation_info()->GetDataStartAddress();
6389 int Code::relocation_size() {
6390 return unchecked_relocation_info()->length();
6394 byte* Code::entry() {
6395 return instruction_start();
6399 bool Code::contains(byte* inner_pointer) {
6400 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6404 ACCESSORS(JSArray, length, Object, kLengthOffset)
6407 void* JSArrayBuffer::backing_store() const {
6408 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6409 return reinterpret_cast<void*>(ptr);
6413 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6414 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6415 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6419 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6420 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6423 bool JSArrayBuffer::is_external() {
6424 return BooleanBit::get(flag(), kIsExternalBit);
6428 void JSArrayBuffer::set_is_external(bool value) {
6429 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6433 bool JSArrayBuffer::should_be_freed() {
6434 return BooleanBit::get(flag(), kShouldBeFreed);
6438 void JSArrayBuffer::set_should_be_freed(bool value) {
6439 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6443 bool JSArrayBuffer::is_neuterable() {
6444 return BooleanBit::get(flag(), kIsNeuterableBit);
6448 void JSArrayBuffer::set_is_neuterable(bool value) {
6449 set_flag(BooleanBit::set(flag(), kIsNeuterableBit, value));
6453 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6454 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6457 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6458 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6459 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6460 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6461 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6463 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6466 JSRegExp::Type JSRegExp::TypeTag() {
6467 Object* data = this->data();
6468 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6469 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6470 return static_cast<JSRegExp::Type>(smi->value());
6474 int JSRegExp::CaptureCount() {
6475 switch (TypeTag()) {
6479 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6487 JSRegExp::Flags JSRegExp::GetFlags() {
6488 DCHECK(this->data()->IsFixedArray());
6489 Object* data = this->data();
6490 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6491 return Flags(smi->value());
6495 String* JSRegExp::Pattern() {
6496 DCHECK(this->data()->IsFixedArray());
6497 Object* data = this->data();
6498 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6503 Object* JSRegExp::DataAt(int index) {
6504 DCHECK(TypeTag() != NOT_COMPILED);
6505 return FixedArray::cast(data())->get(index);
6509 void JSRegExp::SetDataAt(int index, Object* value) {
6510 DCHECK(TypeTag() != NOT_COMPILED);
6511 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6512 FixedArray::cast(data())->set(index, value);
6516 ElementsKind JSObject::GetElementsKind() {
6517 ElementsKind kind = map()->elements_kind();
6518 #if VERIFY_HEAP && DEBUG
6519 FixedArrayBase* fixed_array =
6520 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6522 // If a GC was caused while constructing this object, the elements
6523 // pointer may point to a one pointer filler map.
6524 if (ElementsAreSafeToExamine()) {
6525 Map* map = fixed_array->map();
6526 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6527 (map == GetHeap()->fixed_array_map() ||
6528 map == GetHeap()->fixed_cow_array_map())) ||
6529 (IsFastDoubleElementsKind(kind) &&
6530 (fixed_array->IsFixedDoubleArray() ||
6531 fixed_array == GetHeap()->empty_fixed_array())) ||
6532 (kind == DICTIONARY_ELEMENTS &&
6533 fixed_array->IsFixedArray() &&
6534 fixed_array->IsDictionary()) ||
6535 (kind > DICTIONARY_ELEMENTS));
6536 DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6537 (elements()->IsFixedArray() && elements()->length() >= 2));
6544 ElementsAccessor* JSObject::GetElementsAccessor() {
6545 return ElementsAccessor::ForKind(GetElementsKind());
6549 bool JSObject::HasFastObjectElements() {
6550 return IsFastObjectElementsKind(GetElementsKind());
6554 bool JSObject::HasFastSmiElements() {
6555 return IsFastSmiElementsKind(GetElementsKind());
6559 bool JSObject::HasFastSmiOrObjectElements() {
6560 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6564 bool JSObject::HasFastDoubleElements() {
6565 return IsFastDoubleElementsKind(GetElementsKind());
6569 bool JSObject::HasFastHoleyElements() {
6570 return IsFastHoleyElementsKind(GetElementsKind());
6574 bool JSObject::HasFastElements() {
6575 return IsFastElementsKind(GetElementsKind());
6579 bool JSObject::HasDictionaryElements() {
6580 return GetElementsKind() == DICTIONARY_ELEMENTS;
6584 bool JSObject::HasSloppyArgumentsElements() {
6585 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6589 bool JSObject::HasExternalArrayElements() {
6590 HeapObject* array = elements();
6591 DCHECK(array != NULL);
6592 return array->IsExternalArray();
6596 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6597 bool JSObject::HasExternal##Type##Elements() { \
6598 HeapObject* array = elements(); \
6599 DCHECK(array != NULL); \
6600 if (!array->IsHeapObject()) \
6602 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6605 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6607 #undef EXTERNAL_ELEMENTS_CHECK
6610 bool JSObject::HasFixedTypedArrayElements() {
6611 HeapObject* array = elements();
6612 DCHECK(array != NULL);
6613 return array->IsFixedTypedArrayBase();
6617 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6618 bool JSObject::HasFixed##Type##Elements() { \
6619 HeapObject* array = elements(); \
6620 DCHECK(array != NULL); \
6621 if (!array->IsHeapObject()) \
6623 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6626 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6628 #undef FIXED_TYPED_ELEMENTS_CHECK
6631 bool JSObject::HasNamedInterceptor() {
6632 return map()->has_named_interceptor();
6636 bool JSObject::HasIndexedInterceptor() {
6637 return map()->has_indexed_interceptor();
6641 NameDictionary* JSObject::property_dictionary() {
6642 DCHECK(!HasFastProperties());
6643 return NameDictionary::cast(properties());
6647 SeededNumberDictionary* JSObject::element_dictionary() {
6648 DCHECK(HasDictionaryElements());
6649 return SeededNumberDictionary::cast(elements());
6653 bool Name::IsHashFieldComputed(uint32_t field) {
6654 return (field & kHashNotComputedMask) == 0;
6658 bool Name::HasHashCode() {
6659 return IsHashFieldComputed(hash_field());
6663 uint32_t Name::Hash() {
6664 // Fast case: has hash code already been computed?
6665 uint32_t field = hash_field();
6666 if (IsHashFieldComputed(field)) return field >> kHashShift;
6667 // Slow case: compute hash code and set it. Has to be a string.
6668 return String::cast(this)->ComputeAndSetHash();
6671 bool Name::IsOwn() {
6672 return this->IsSymbol() && Symbol::cast(this)->is_own();
6676 StringHasher::StringHasher(int length, uint32_t seed)
6678 raw_running_hash_(seed),
6680 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6681 is_first_char_(true) {
6682 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6686 bool StringHasher::has_trivial_hash() {
6687 return length_ > String::kMaxHashCalcLength;
6691 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6693 running_hash += (running_hash << 10);
6694 running_hash ^= (running_hash >> 6);
6695 return running_hash;
6699 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6700 running_hash += (running_hash << 3);
6701 running_hash ^= (running_hash >> 11);
6702 running_hash += (running_hash << 15);
6703 if ((running_hash & String::kHashBitMask) == 0) {
6706 return running_hash;
6710 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6711 const uc16* chars, int length) {
6712 DCHECK_NOT_NULL(chars);
6713 DCHECK(length >= 0);
6714 for (int i = 0; i < length; ++i) {
6715 running_hash = AddCharacterCore(running_hash, *chars++);
6717 return running_hash;
6721 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6724 DCHECK_NOT_NULL(chars);
6725 DCHECK(length >= 0);
6726 for (int i = 0; i < length; ++i) {
6727 uint16_t c = static_cast<uint16_t>(*chars++);
6728 running_hash = AddCharacterCore(running_hash, c);
6730 return running_hash;
6734 void StringHasher::AddCharacter(uint16_t c) {
6735 // Use the Jenkins one-at-a-time hash function to update the hash
6736 // for the given character.
6737 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6741 bool StringHasher::UpdateIndex(uint16_t c) {
6742 DCHECK(is_array_index_);
6743 if (c < '0' || c > '9') {
6744 is_array_index_ = false;
6748 if (is_first_char_) {
6749 is_first_char_ = false;
6750 if (c == '0' && length_ > 1) {
6751 is_array_index_ = false;
6755 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6756 is_array_index_ = false;
6759 array_index_ = array_index_ * 10 + d;
6764 template<typename Char>
6765 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6766 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6768 if (is_array_index_) {
6769 for (; i < length; i++) {
6770 AddCharacter(chars[i]);
6771 if (!UpdateIndex(chars[i])) {
6777 for (; i < length; i++) {
6778 DCHECK(!is_array_index_);
6779 AddCharacter(chars[i]);
6784 template <typename schar>
6785 uint32_t StringHasher::HashSequentialString(const schar* chars,
6788 StringHasher hasher(length, seed);
6789 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6790 return hasher.GetHashField();
6794 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6795 IteratingStringHasher hasher(string->length(), seed);
6797 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6798 ConsString* cons_string = String::VisitFlat(&hasher, string);
6799 if (cons_string == nullptr) return hasher.GetHashField();
6800 hasher.VisitConsString(cons_string);
6801 return hasher.GetHashField();
6805 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6807 AddCharacters(chars, length);
6811 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6813 AddCharacters(chars, length);
6817 bool Name::AsArrayIndex(uint32_t* index) {
6818 return IsString() && String::cast(this)->AsArrayIndex(index);
6822 bool String::AsArrayIndex(uint32_t* index) {
6823 uint32_t field = hash_field();
6824 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6827 return SlowAsArrayIndex(index);
6831 void String::SetForwardedInternalizedString(String* canonical) {
6832 DCHECK(IsInternalizedString());
6833 DCHECK(HasHashCode());
6834 if (canonical == this) return; // No need to forward.
6835 DCHECK(SlowEquals(canonical));
6836 DCHECK(canonical->IsInternalizedString());
6837 DCHECK(canonical->HasHashCode());
6838 WRITE_FIELD(this, kHashFieldSlot, canonical);
6839 // Setting the hash field to a tagged value sets the LSB, causing the hash
6840 // code to be interpreted as uninitialized. We use this fact to recognize
6841 // that we have a forwarded string.
6842 DCHECK(!HasHashCode());
6846 String* String::GetForwardedInternalizedString() {
6847 DCHECK(IsInternalizedString());
6848 if (HasHashCode()) return this;
6849 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
6850 DCHECK(canonical->IsInternalizedString());
6851 DCHECK(SlowEquals(canonical));
6852 DCHECK(canonical->HasHashCode());
6857 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6858 Handle<Name> name) {
6859 if (object->IsJSProxy()) {
6860 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6861 return JSProxy::HasPropertyWithHandler(proxy, name);
6863 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6864 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6868 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6869 Handle<Name> name) {
6870 if (object->IsJSProxy()) {
6871 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6872 return JSProxy::HasPropertyWithHandler(proxy, name);
6874 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6875 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6879 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6880 Handle<JSReceiver> object, Handle<Name> key) {
6882 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6883 return GetElementAttribute(object, index);
6885 LookupIterator it(object, key);
6886 return GetPropertyAttributes(&it);
6890 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6891 Handle<JSReceiver> object, uint32_t index) {
6892 if (object->IsJSProxy()) {
6893 return JSProxy::GetElementAttributeWithHandler(
6894 Handle<JSProxy>::cast(object), object, index);
6896 return JSObject::GetElementAttributeWithReceiver(
6897 Handle<JSObject>::cast(object), object, index, true);
6901 bool JSGlobalObject::IsDetached() {
6902 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6906 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6907 const PrototypeIterator iter(this->GetIsolate(),
6908 const_cast<JSGlobalProxy*>(this));
6909 return iter.GetCurrent() != global;
6913 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6914 return object->IsJSProxy()
6915 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6916 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6920 Object* JSReceiver::GetIdentityHash() {
6922 ? JSProxy::cast(this)->GetIdentityHash()
6923 : JSObject::cast(this)->GetIdentityHash();
6927 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6928 if (object->IsJSProxy()) {
6929 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6930 return JSProxy::HasElementWithHandler(proxy, index);
6932 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6933 Handle<JSObject>::cast(object), object, index, true);
6934 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6938 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
6940 if (object->IsJSProxy()) {
6941 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6942 return JSProxy::HasElementWithHandler(proxy, index);
6944 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6945 Handle<JSObject>::cast(object), object, index, false);
6946 return result.IsJust() ? Just(result.FromJust() != ABSENT) : Nothing<bool>();
6950 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
6951 Handle<JSReceiver> object, uint32_t index) {
6952 if (object->IsJSProxy()) {
6953 return JSProxy::GetElementAttributeWithHandler(
6954 Handle<JSProxy>::cast(object), object, index);
6956 return JSObject::GetElementAttributeWithReceiver(
6957 Handle<JSObject>::cast(object), object, index, false);
6961 bool AccessorInfo::all_can_read() {
6962 return BooleanBit::get(flag(), kAllCanReadBit);
6966 void AccessorInfo::set_all_can_read(bool value) {
6967 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6971 bool AccessorInfo::all_can_write() {
6972 return BooleanBit::get(flag(), kAllCanWriteBit);
6976 void AccessorInfo::set_all_can_write(bool value) {
6977 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6981 PropertyAttributes AccessorInfo::property_attributes() {
6982 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6986 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6987 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6991 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6992 if (!HasExpectedReceiverType()) return true;
6993 if (!receiver->IsJSObject()) return false;
6994 return FunctionTemplateInfo::cast(expected_receiver_type())
6995 ->IsTemplateFor(JSObject::cast(receiver)->map());
6999 void ExecutableAccessorInfo::clear_setter() {
7000 auto foreign = GetIsolate()->factory()->NewForeign(
7001 reinterpret_cast<v8::internal::Address>(
7002 reinterpret_cast<intptr_t>(nullptr)));
7003 set_setter(*foreign);
7007 template<typename Derived, typename Shape, typename Key>
7008 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7010 Handle<Object> value) {
7011 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7015 template<typename Derived, typename Shape, typename Key>
7016 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7018 Handle<Object> value,
7019 PropertyDetails details) {
7020 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7021 int index = DerivedHashTable::EntryToIndex(entry);
7022 DisallowHeapAllocation no_gc;
7023 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
7024 FixedArray::set(index, *key, mode);
7025 FixedArray::set(index+1, *value, mode);
7026 FixedArray::set(index+2, details.AsSmi());
7030 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7031 DCHECK(other->IsNumber());
7032 return key == static_cast<uint32_t>(other->Number());
7036 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7037 return ComputeIntegerHash(key, 0);
7041 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7043 DCHECK(other->IsNumber());
7044 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7048 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7049 return ComputeIntegerHash(key, seed);
7053 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7056 DCHECK(other->IsNumber());
7057 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7061 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7062 return isolate->factory()->NewNumberFromUint(key);
7066 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7067 // We know that all entries in a hash table had their hash keys created.
7068 // Use that knowledge to have fast failure.
7069 if (key->Hash() != Name::cast(other)->Hash()) return false;
7070 return key->Equals(Name::cast(other));
7074 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7079 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7080 return Name::cast(other)->Hash();
7084 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7086 DCHECK(key->IsUniqueName());
7091 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7092 Handle<NameDictionary> dictionary) {
7093 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7097 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7098 return key->SameValue(other);
7102 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7103 return Smi::cast(key->GetHash())->value();
7107 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7109 return Smi::cast(other->GetHash())->value();
7113 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7114 Handle<Object> key) {
7119 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7120 Handle<ObjectHashTable> table, Handle<Object> key) {
7121 return DerivedHashTable::Shrink(table, key);
7125 template <int entrysize>
7126 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7127 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7128 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7133 template <int entrysize>
7134 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7137 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7138 : reinterpret_cast<intptr_t>(*key);
7139 return (uint32_t)(hash & 0xFFFFFFFF);
7143 template <int entrysize>
7144 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7146 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7147 intptr_t hash = reinterpret_cast<intptr_t>(other);
7148 return (uint32_t)(hash & 0xFFFFFFFF);
7152 template <int entrysize>
7153 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7154 Handle<Object> key) {
7159 void Map::ClearCodeCache(Heap* heap) {
7160 // No write barrier is needed since empty_fixed_array is not in new space.
7161 // Please note this function is used during marking:
7162 // - MarkCompactCollector::MarkUnmarkedObject
7163 // - IncrementalMarking::Step
7164 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7165 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7169 int Map::SlackForArraySize(int old_size, int size_limit) {
7170 const int max_slack = size_limit - old_size;
7171 CHECK(max_slack >= 0);
7172 if (old_size < 4) return Min(max_slack, 1);
7173 return Min(max_slack, old_size / 2);
7177 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
7178 DCHECK(array->HasFastSmiOrObjectElements());
7179 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
7180 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
7181 if (elts->length() < required_size) {
7182 // Doubling in size would be overkill, but leave some slack to avoid
7183 // constantly growing.
7184 Expand(array, required_size + (required_size >> 3));
7185 // It's a performance benefit to keep a frequently used array in new-space.
7186 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
7187 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
7188 // Expand will allocate a new backing store in new space even if the size
7189 // we asked for isn't larger than what we had before.
7190 Expand(array, required_size);
7195 void JSArray::set_length(Smi* length) {
7196 // Don't need a write barrier for a Smi.
7197 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7201 bool JSArray::AllowsSetElementsLength() {
7202 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7203 DCHECK(result == !HasExternalArrayElements());
7208 void JSArray::SetContent(Handle<JSArray> array,
7209 Handle<FixedArrayBase> storage) {
7210 EnsureCanContainElements(array, storage, storage->length(),
7211 ALLOW_COPIED_DOUBLE_ELEMENTS);
7213 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7214 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7215 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7216 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7217 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7218 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7219 array->set_elements(*storage);
7220 array->set_length(Smi::FromInt(storage->length()));
7224 int TypeFeedbackInfo::ic_total_count() {
7225 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7226 return ICTotalCountField::decode(current);
7230 void TypeFeedbackInfo::set_ic_total_count(int count) {
7231 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7232 value = ICTotalCountField::update(value,
7233 ICTotalCountField::decode(count));
7234 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7238 int TypeFeedbackInfo::ic_with_type_info_count() {
7239 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7240 return ICsWithTypeInfoCountField::decode(current);
7244 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7245 if (delta == 0) return;
7246 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7247 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7248 // We can get negative count here when the type-feedback info is
7249 // shared between two code objects. The can only happen when
7250 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7251 // Since we do not optimize when the debugger is active, we can skip
7252 // this counter update.
7253 if (new_count >= 0) {
7254 new_count &= ICsWithTypeInfoCountField::kMask;
7255 value = ICsWithTypeInfoCountField::update(value, new_count);
7256 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7261 int TypeFeedbackInfo::ic_generic_count() {
7262 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7266 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7267 if (delta == 0) return;
7268 int new_count = ic_generic_count() + delta;
7269 if (new_count >= 0) {
7270 new_count &= ~Smi::kMinValue;
7271 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7276 void TypeFeedbackInfo::initialize_storage() {
7277 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7278 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7279 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7283 void TypeFeedbackInfo::change_own_type_change_checksum() {
7284 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7285 int checksum = OwnTypeChangeChecksum::decode(value);
7286 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7287 value = OwnTypeChangeChecksum::update(value, checksum);
7288 // Ensure packed bit field is in Smi range.
7289 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7290 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7291 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7295 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7296 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7297 int mask = (1 << kTypeChangeChecksumBits) - 1;
7298 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7299 // Ensure packed bit field is in Smi range.
7300 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7301 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7302 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7306 int TypeFeedbackInfo::own_type_change_checksum() {
7307 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7308 return OwnTypeChangeChecksum::decode(value);
7312 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7313 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7314 int mask = (1 << kTypeChangeChecksumBits) - 1;
7315 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7319 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7322 Relocatable::Relocatable(Isolate* isolate) {
7324 prev_ = isolate->relocatable_top();
7325 isolate->set_relocatable_top(this);
7329 Relocatable::~Relocatable() {
7330 DCHECK_EQ(isolate_->relocatable_top(), this);
7331 isolate_->set_relocatable_top(prev_);
7335 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7336 return map->instance_size();
7340 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7341 v->VisitExternalReference(
7342 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7346 template<typename StaticVisitor>
7347 void Foreign::ForeignIterateBody() {
7348 StaticVisitor::VisitExternalReference(
7349 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7353 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7354 typedef v8::String::ExternalOneByteStringResource Resource;
7355 v->VisitExternalOneByteString(
7356 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7360 template <typename StaticVisitor>
7361 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7362 typedef v8::String::ExternalOneByteStringResource Resource;
7363 StaticVisitor::VisitExternalOneByteString(
7364 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7368 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7369 typedef v8::String::ExternalStringResource Resource;
7370 v->VisitExternalTwoByteString(
7371 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7375 template<typename StaticVisitor>
7376 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7377 typedef v8::String::ExternalStringResource Resource;
7378 StaticVisitor::VisitExternalTwoByteString(
7379 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7383 static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7387 DCHECK(FLAG_unbox_double_fields);
7388 DCHECK(IsAligned(start_offset, kPointerSize) &&
7389 IsAligned(end_offset, kPointerSize));
7391 LayoutDescriptorHelper helper(object->map());
7392 DCHECK(!helper.all_fields_tagged());
7394 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7395 // Visit all tagged fields.
7396 if (helper.IsTagged(offset)) {
7397 v->VisitPointer(HeapObject::RawField(object, offset));
7403 template<int start_offset, int end_offset, int size>
7404 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7407 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7408 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7409 HeapObject::RawField(obj, end_offset));
7411 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7416 template<int start_offset>
7417 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7420 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7421 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7422 HeapObject::RawField(obj, object_size));
7424 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7429 template<class Derived, class TableType>
7430 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7431 TableType* table(TableType::cast(this->table()));
7432 int index = Smi::cast(this->index())->value();
7433 Object* key = table->KeyAt(index);
7434 DCHECK(!key->IsTheHole());
7439 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7440 array->set(0, CurrentKey());
7444 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7445 array->set(0, CurrentKey());
7446 array->set(1, CurrentValue());
7450 Object* JSMapIterator::CurrentValue() {
7451 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7452 int index = Smi::cast(this->index())->value();
7453 Object* value = table->ValueAt(index);
7454 DCHECK(!value->IsTheHole());
7459 class String::SubStringRange::iterator FINAL {
7461 typedef std::forward_iterator_tag iterator_category;
7462 typedef int difference_type;
7463 typedef uc16 value_type;
7464 typedef uc16* pointer;
7465 typedef uc16& reference;
7467 iterator(const iterator& other)
7468 : content_(other.content_), offset_(other.offset_) {}
7470 uc16 operator*() { return content_.Get(offset_); }
7471 bool operator==(const iterator& other) const {
7472 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7474 bool operator!=(const iterator& other) const {
7475 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7477 iterator& operator++() {
7481 iterator operator++(int);
7484 friend class String;
7485 iterator(String* from, int offset)
7486 : content_(from->GetFlatContent()), offset_(offset) {}
7487 String::FlatContent content_;
7492 String::SubStringRange::iterator String::SubStringRange::begin() {
7493 return String::SubStringRange::iterator(string_, first_);
7497 String::SubStringRange::iterator String::SubStringRange::end() {
7498 return String::SubStringRange::iterator(string_, first_ + length_);
7503 #undef CAST_ACCESSOR
7504 #undef INT_ACCESSORS
7506 #undef ACCESSORS_TO_SMI
7507 #undef SMI_ACCESSORS
7508 #undef SYNCHRONIZED_SMI_ACCESSORS
7509 #undef NOBARRIER_SMI_ACCESSORS
7511 #undef BOOL_ACCESSORS
7513 #undef FIELD_ADDR_CONST
7515 #undef NOBARRIER_READ_FIELD
7517 #undef NOBARRIER_WRITE_FIELD
7518 #undef WRITE_BARRIER
7519 #undef CONDITIONAL_WRITE_BARRIER
7520 #undef READ_DOUBLE_FIELD
7521 #undef WRITE_DOUBLE_FIELD
7522 #undef READ_INT_FIELD
7523 #undef WRITE_INT_FIELD
7524 #undef READ_INTPTR_FIELD
7525 #undef WRITE_INTPTR_FIELD
7526 #undef READ_UINT32_FIELD
7527 #undef WRITE_UINT32_FIELD
7528 #undef READ_SHORT_FIELD
7529 #undef WRITE_SHORT_FIELD
7530 #undef READ_BYTE_FIELD
7531 #undef WRITE_BYTE_FIELD
7532 #undef NOBARRIER_READ_BYTE_FIELD
7533 #undef NOBARRIER_WRITE_BYTE_FIELD
7535 } } // namespace v8::internal
7537 #endif // V8_OBJECTS_INL_H_