1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/elements.h"
20 #include "src/factory.h"
21 #include "src/field-index-inl.h"
22 #include "src/heap/heap-inl.h"
23 #include "src/heap/heap.h"
24 #include "src/heap/incremental-marking.h"
25 #include "src/heap/objects-visiting.h"
26 #include "src/heap/spaces.h"
27 #include "src/heap/store-buffer.h"
28 #include "src/isolate.h"
29 #include "src/layout-descriptor-inl.h"
30 #include "src/lookup.h"
31 #include "src/objects.h"
32 #include "src/property.h"
33 #include "src/prototype.h"
34 #include "src/transitions-inl.h"
35 #include "src/type-feedback-vector-inl.h"
36 #include "src/v8memory.h"
41 PropertyDetails::PropertyDetails(Smi* smi) {
42 value_ = smi->value();
46 Smi* PropertyDetails::AsSmi() const {
47 // Ensure the upper 2 bits have the same value by sign extending it. This is
48 // necessary to be able to use the 31st bit of the property details.
49 int value = value_ << 1;
50 return Smi::FromInt(value >> 1);
54 PropertyDetails PropertyDetails::AsDeleted() const {
55 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
56 return PropertyDetails(smi);
60 int PropertyDetails::field_width_in_words() const {
61 DCHECK(location() == kField);
62 if (!FLAG_unbox_double_fields) return 1;
63 if (kDoubleSize == kPointerSize) return 1;
64 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
68 #define TYPE_CHECKER(type, instancetype) \
69 bool Object::Is##type() const { \
70 return Object::IsHeapObject() && \
71 HeapObject::cast(this)->map()->instance_type() == instancetype; \
75 #define CAST_ACCESSOR(type) \
76 type* type::cast(Object* object) { \
77 SLOW_DCHECK(object->Is##type()); \
78 return reinterpret_cast<type*>(object); \
80 const type* type::cast(const Object* object) { \
81 SLOW_DCHECK(object->Is##type()); \
82 return reinterpret_cast<const type*>(object); \
86 #define INT_ACCESSORS(holder, name, offset) \
87 int holder::name() const { return READ_INT_FIELD(this, offset); } \
88 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
91 #define ACCESSORS(holder, name, type, offset) \
92 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
93 void holder::set_##name(type* value, WriteBarrierMode mode) { \
94 WRITE_FIELD(this, offset, value); \
95 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
99 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
100 #define ACCESSORS_TO_SMI(holder, name, offset) \
101 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
102 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
103 WRITE_FIELD(this, offset, value); \
107 // Getter that returns a Smi as an int and writes an int as a Smi.
108 #define SMI_ACCESSORS(holder, name, offset) \
109 int holder::name() const { \
110 Object* value = READ_FIELD(this, offset); \
111 return Smi::cast(value)->value(); \
113 void holder::set_##name(int value) { \
114 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
117 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
118 int holder::synchronized_##name() const { \
119 Object* value = ACQUIRE_READ_FIELD(this, offset); \
120 return Smi::cast(value)->value(); \
122 void holder::synchronized_set_##name(int value) { \
123 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
126 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
127 int holder::nobarrier_##name() const { \
128 Object* value = NOBARRIER_READ_FIELD(this, offset); \
129 return Smi::cast(value)->value(); \
131 void holder::nobarrier_set_##name(int value) { \
132 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
135 #define BOOL_GETTER(holder, field, name, offset) \
136 bool holder::name() const { \
137 return BooleanBit::get(field(), offset); \
141 #define BOOL_ACCESSORS(holder, field, name, offset) \
142 bool holder::name() const { \
143 return BooleanBit::get(field(), offset); \
145 void holder::set_##name(bool value) { \
146 set_##field(BooleanBit::set(field(), offset, value)); \
150 bool Object::IsFixedArrayBase() const {
151 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
152 IsFixedTypedArrayBase() || IsExternalArray();
156 // External objects are not extensible, so the map check is enough.
157 bool Object::IsExternal() const {
158 return Object::IsHeapObject() &&
159 HeapObject::cast(this)->map() ==
160 HeapObject::cast(this)->GetHeap()->external_map();
164 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
167 bool Object::IsSmi() const {
168 return HAS_SMI_TAG(this);
172 bool Object::IsHeapObject() const {
173 return Internals::HasHeapObjectTag(this);
177 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
178 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
179 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
182 bool Object::IsString() const {
183 return Object::IsHeapObject()
184 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
188 bool Object::IsName() const {
189 return IsString() || IsSymbol();
193 bool Object::IsUniqueName() const {
194 return IsInternalizedString() || IsSymbol();
198 bool Object::IsSpecObject() const {
199 return Object::IsHeapObject()
200 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
204 bool Object::IsSpecFunction() const {
205 if (!Object::IsHeapObject()) return false;
206 InstanceType type = HeapObject::cast(this)->map()->instance_type();
207 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
211 bool Object::IsTemplateInfo() const {
212 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
216 bool Object::IsInternalizedString() const {
217 if (!this->IsHeapObject()) return false;
218 uint32_t type = HeapObject::cast(this)->map()->instance_type();
219 STATIC_ASSERT(kNotInternalizedTag != 0);
220 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
221 (kStringTag | kInternalizedTag);
225 bool Object::IsConsString() const {
226 if (!IsString()) return false;
227 return StringShape(String::cast(this)).IsCons();
231 bool Object::IsSlicedString() const {
232 if (!IsString()) return false;
233 return StringShape(String::cast(this)).IsSliced();
237 bool Object::IsSeqString() const {
238 if (!IsString()) return false;
239 return StringShape(String::cast(this)).IsSequential();
243 bool Object::IsSeqOneByteString() const {
244 if (!IsString()) return false;
245 return StringShape(String::cast(this)).IsSequential() &&
246 String::cast(this)->IsOneByteRepresentation();
250 bool Object::IsSeqTwoByteString() const {
251 if (!IsString()) return false;
252 return StringShape(String::cast(this)).IsSequential() &&
253 String::cast(this)->IsTwoByteRepresentation();
257 bool Object::IsExternalString() const {
258 if (!IsString()) return false;
259 return StringShape(String::cast(this)).IsExternal();
263 bool Object::IsExternalOneByteString() const {
264 if (!IsString()) return false;
265 return StringShape(String::cast(this)).IsExternal() &&
266 String::cast(this)->IsOneByteRepresentation();
270 bool Object::IsExternalTwoByteString() const {
271 if (!IsString()) return false;
272 return StringShape(String::cast(this)).IsExternal() &&
273 String::cast(this)->IsTwoByteRepresentation();
277 bool Object::HasValidElements() {
278 // Dictionary is covered under FixedArray.
279 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
280 IsFixedTypedArrayBase();
284 Handle<Object> Object::NewStorageFor(Isolate* isolate,
285 Handle<Object> object,
286 Representation representation) {
287 if (representation.IsSmi() && object->IsUninitialized()) {
288 return handle(Smi::FromInt(0), isolate);
290 if (!representation.IsDouble()) return object;
292 if (object->IsUninitialized()) {
294 } else if (object->IsMutableHeapNumber()) {
295 value = HeapNumber::cast(*object)->value();
297 value = object->Number();
299 return isolate->factory()->NewHeapNumber(value, MUTABLE);
303 Handle<Object> Object::WrapForRead(Isolate* isolate,
304 Handle<Object> object,
305 Representation representation) {
306 DCHECK(!object->IsUninitialized());
307 if (!representation.IsDouble()) {
308 DCHECK(object->FitsRepresentation(representation));
311 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
315 StringShape::StringShape(const String* str)
316 : type_(str->map()->instance_type()) {
318 DCHECK((type_ & kIsNotStringMask) == kStringTag);
322 StringShape::StringShape(Map* map)
323 : type_(map->instance_type()) {
325 DCHECK((type_ & kIsNotStringMask) == kStringTag);
329 StringShape::StringShape(InstanceType t)
330 : type_(static_cast<uint32_t>(t)) {
332 DCHECK((type_ & kIsNotStringMask) == kStringTag);
336 bool StringShape::IsInternalized() {
338 STATIC_ASSERT(kNotInternalizedTag != 0);
339 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
340 (kStringTag | kInternalizedTag);
344 bool String::IsOneByteRepresentation() const {
345 uint32_t type = map()->instance_type();
346 return (type & kStringEncodingMask) == kOneByteStringTag;
350 bool String::IsTwoByteRepresentation() const {
351 uint32_t type = map()->instance_type();
352 return (type & kStringEncodingMask) == kTwoByteStringTag;
356 bool String::IsOneByteRepresentationUnderneath() {
357 uint32_t type = map()->instance_type();
358 STATIC_ASSERT(kIsIndirectStringTag != 0);
359 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
361 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
362 case kOneByteStringTag:
364 case kTwoByteStringTag:
366 default: // Cons or sliced string. Need to go deeper.
367 return GetUnderlying()->IsOneByteRepresentation();
372 bool String::IsTwoByteRepresentationUnderneath() {
373 uint32_t type = map()->instance_type();
374 STATIC_ASSERT(kIsIndirectStringTag != 0);
375 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
377 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
378 case kOneByteStringTag:
380 case kTwoByteStringTag:
382 default: // Cons or sliced string. Need to go deeper.
383 return GetUnderlying()->IsTwoByteRepresentation();
388 bool String::HasOnlyOneByteChars() {
389 uint32_t type = map()->instance_type();
390 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
391 IsOneByteRepresentation();
395 bool StringShape::IsCons() {
396 return (type_ & kStringRepresentationMask) == kConsStringTag;
400 bool StringShape::IsSliced() {
401 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
405 bool StringShape::IsIndirect() {
406 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
410 bool StringShape::IsExternal() {
411 return (type_ & kStringRepresentationMask) == kExternalStringTag;
415 bool StringShape::IsSequential() {
416 return (type_ & kStringRepresentationMask) == kSeqStringTag;
420 StringRepresentationTag StringShape::representation_tag() {
421 uint32_t tag = (type_ & kStringRepresentationMask);
422 return static_cast<StringRepresentationTag>(tag);
426 uint32_t StringShape::encoding_tag() {
427 return type_ & kStringEncodingMask;
431 uint32_t StringShape::full_representation_tag() {
432 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
436 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
437 Internals::kFullStringRepresentationMask);
439 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
440 Internals::kStringEncodingMask);
443 bool StringShape::IsSequentialOneByte() {
444 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
448 bool StringShape::IsSequentialTwoByte() {
449 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
453 bool StringShape::IsExternalOneByte() {
454 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
458 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
459 Internals::kExternalOneByteRepresentationTag);
461 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
464 bool StringShape::IsExternalTwoByte() {
465 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
469 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
470 Internals::kExternalTwoByteRepresentationTag);
472 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
475 uc32 FlatStringReader::Get(int index) {
477 return Get<uint8_t>(index);
479 return Get<uc16>(index);
484 template <typename Char>
485 Char FlatStringReader::Get(int index) {
486 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
487 DCHECK(0 <= index && index <= length_);
488 if (sizeof(Char) == 1) {
489 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
491 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
496 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
497 return key->AsHandle(isolate);
501 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
503 return key->AsHandle(isolate);
507 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
509 return key->AsHandle(isolate);
512 template <typename Char>
513 class SequentialStringKey : public HashTableKey {
515 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
516 : string_(string), hash_field_(0), seed_(seed) { }
518 uint32_t Hash() OVERRIDE {
519 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
523 uint32_t result = hash_field_ >> String::kHashShift;
524 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
529 uint32_t HashForObject(Object* other) OVERRIDE {
530 return String::cast(other)->Hash();
533 Vector<const Char> string_;
534 uint32_t hash_field_;
539 class OneByteStringKey : public SequentialStringKey<uint8_t> {
541 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
542 : SequentialStringKey<uint8_t>(str, seed) { }
544 bool IsMatch(Object* string) OVERRIDE {
545 return String::cast(string)->IsOneByteEqualTo(string_);
548 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
552 class SeqOneByteSubStringKey : public HashTableKey {
554 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
555 : string_(string), from_(from), length_(length) {
556 DCHECK(string_->IsSeqOneByteString());
559 uint32_t Hash() OVERRIDE {
560 DCHECK(length_ >= 0);
561 DCHECK(from_ + length_ <= string_->length());
562 const uint8_t* chars = string_->GetChars() + from_;
563 hash_field_ = StringHasher::HashSequentialString(
564 chars, length_, string_->GetHeap()->HashSeed());
565 uint32_t result = hash_field_ >> String::kHashShift;
566 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
570 uint32_t HashForObject(Object* other) OVERRIDE {
571 return String::cast(other)->Hash();
574 bool IsMatch(Object* string) OVERRIDE;
575 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
578 Handle<SeqOneByteString> string_;
581 uint32_t hash_field_;
585 class TwoByteStringKey : public SequentialStringKey<uc16> {
587 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
588 : SequentialStringKey<uc16>(str, seed) { }
590 bool IsMatch(Object* string) OVERRIDE {
591 return String::cast(string)->IsTwoByteEqualTo(string_);
594 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
598 // Utf8StringKey carries a vector of chars as key.
599 class Utf8StringKey : public HashTableKey {
601 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
602 : string_(string), hash_field_(0), seed_(seed) { }
604 bool IsMatch(Object* string) OVERRIDE {
605 return String::cast(string)->IsUtf8EqualTo(string_);
608 uint32_t Hash() OVERRIDE {
609 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
610 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
611 uint32_t result = hash_field_ >> String::kHashShift;
612 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
616 uint32_t HashForObject(Object* other) OVERRIDE {
617 return String::cast(other)->Hash();
620 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
621 if (hash_field_ == 0) Hash();
622 return isolate->factory()->NewInternalizedStringFromUtf8(
623 string_, chars_, hash_field_);
626 Vector<const char> string_;
627 uint32_t hash_field_;
628 int chars_; // Caches the number of characters when computing the hash code.
633 bool Object::IsNumber() const {
634 return IsSmi() || IsHeapNumber();
638 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
639 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
642 bool Object::IsFiller() const {
643 if (!Object::IsHeapObject()) return false;
644 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
645 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
649 bool Object::IsExternalArray() const {
650 if (!Object::IsHeapObject())
652 InstanceType instance_type =
653 HeapObject::cast(this)->map()->instance_type();
654 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
655 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
659 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
660 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
661 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
663 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
664 #undef TYPED_ARRAY_TYPE_CHECKER
667 bool Object::IsFixedTypedArrayBase() const {
668 if (!Object::IsHeapObject()) return false;
670 InstanceType instance_type =
671 HeapObject::cast(this)->map()->instance_type();
672 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
673 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
677 bool Object::IsJSReceiver() const {
678 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
679 return IsHeapObject() &&
680 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
684 bool Object::IsJSObject() const {
685 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
686 return IsHeapObject() &&
687 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
691 bool Object::IsJSProxy() const {
692 if (!Object::IsHeapObject()) return false;
693 return HeapObject::cast(this)->map()->IsJSProxyMap();
697 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
698 TYPE_CHECKER(JSSet, JS_SET_TYPE)
699 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
700 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
701 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
702 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
703 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
704 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
705 TYPE_CHECKER(Map, MAP_TYPE)
706 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
707 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
708 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
709 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
712 bool Object::IsJSWeakCollection() const {
713 return IsJSWeakMap() || IsJSWeakSet();
717 bool Object::IsDescriptorArray() const {
718 return IsFixedArray();
722 bool Object::IsLayoutDescriptor() const {
723 return IsSmi() || IsFixedTypedArrayBase();
727 bool Object::IsTransitionArray() const {
728 return IsFixedArray();
732 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
735 bool Object::IsDeoptimizationInputData() const {
736 // Must be a fixed array.
737 if (!IsFixedArray()) return false;
739 // There's no sure way to detect the difference between a fixed array and
740 // a deoptimization data array. Since this is used for asserts we can
741 // check that the length is zero or else the fixed size plus a multiple of
743 int length = FixedArray::cast(this)->length();
744 if (length == 0) return true;
746 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
747 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
751 bool Object::IsDeoptimizationOutputData() const {
752 if (!IsFixedArray()) return false;
753 // There's actually no way to see the difference between a fixed array and
754 // a deoptimization data array. Since this is used for asserts we can check
755 // that the length is plausible though.
756 if (FixedArray::cast(this)->length() % 2 != 0) return false;
761 bool Object::IsDependentCode() const {
762 if (!IsFixedArray()) return false;
763 // There's actually no way to see the difference between a fixed array and
764 // a dependent codes array.
769 bool Object::IsContext() const {
770 if (!Object::IsHeapObject()) return false;
771 Map* map = HeapObject::cast(this)->map();
772 Heap* heap = map->GetHeap();
773 return (map == heap->function_context_map() ||
774 map == heap->catch_context_map() ||
775 map == heap->with_context_map() ||
776 map == heap->native_context_map() ||
777 map == heap->block_context_map() ||
778 map == heap->module_context_map() ||
779 map == heap->script_context_map());
783 bool Object::IsNativeContext() const {
784 return Object::IsHeapObject() &&
785 HeapObject::cast(this)->map() ==
786 HeapObject::cast(this)->GetHeap()->native_context_map();
790 bool Object::IsScriptContextTable() const {
791 if (!Object::IsHeapObject()) return false;
792 Map* map = HeapObject::cast(this)->map();
793 Heap* heap = map->GetHeap();
794 return map == heap->script_context_table_map();
798 bool Object::IsScopeInfo() const {
799 return Object::IsHeapObject() &&
800 HeapObject::cast(this)->map() ==
801 HeapObject::cast(this)->GetHeap()->scope_info_map();
805 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
808 template <> inline bool Is<JSFunction>(Object* obj) {
809 return obj->IsJSFunction();
813 TYPE_CHECKER(Code, CODE_TYPE)
814 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
815 TYPE_CHECKER(Cell, CELL_TYPE)
816 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
817 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
818 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
819 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
820 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
821 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
822 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
823 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
826 bool Object::IsStringWrapper() const {
827 return IsJSValue() && JSValue::cast(this)->value()->IsString();
831 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
834 bool Object::IsBoolean() const {
835 return IsOddball() &&
836 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
840 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
841 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
842 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
843 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
846 bool Object::IsJSArrayBufferView() const {
847 return IsJSDataView() || IsJSTypedArray();
851 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
854 template <> inline bool Is<JSArray>(Object* obj) {
855 return obj->IsJSArray();
859 bool Object::IsHashTable() const {
860 return Object::IsHeapObject() &&
861 HeapObject::cast(this)->map() ==
862 HeapObject::cast(this)->GetHeap()->hash_table_map();
866 bool Object::IsWeakHashTable() const {
867 return IsHashTable();
871 bool Object::IsDictionary() const {
872 return IsHashTable() &&
873 this != HeapObject::cast(this)->GetHeap()->string_table();
877 bool Object::IsNameDictionary() const {
878 return IsDictionary();
882 bool Object::IsSeededNumberDictionary() const {
883 return IsDictionary();
887 bool Object::IsUnseededNumberDictionary() const {
888 return IsDictionary();
892 bool Object::IsStringTable() const {
893 return IsHashTable();
897 bool Object::IsJSFunctionResultCache() const {
898 if (!IsFixedArray()) return false;
899 const FixedArray* self = FixedArray::cast(this);
900 int length = self->length();
901 if (length < JSFunctionResultCache::kEntriesIndex) return false;
902 if ((length - JSFunctionResultCache::kEntriesIndex)
903 % JSFunctionResultCache::kEntrySize != 0) {
907 if (FLAG_verify_heap) {
908 // TODO(svenpanne) We use const_cast here and below to break our dependency
909 // cycle between the predicates and the verifiers. This can be removed when
910 // the verifiers are const-correct, too.
911 reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
912 JSFunctionResultCacheVerify();
919 bool Object::IsNormalizedMapCache() const {
920 return NormalizedMapCache::IsNormalizedMapCache(this);
924 int NormalizedMapCache::GetIndex(Handle<Map> map) {
925 return map->Hash() % NormalizedMapCache::kEntries;
929 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
930 if (!obj->IsFixedArray()) return false;
931 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
935 if (FLAG_verify_heap) {
936 reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
937 NormalizedMapCacheVerify();
944 bool Object::IsCompilationCacheTable() const {
945 return IsHashTable();
949 bool Object::IsCodeCacheHashTable() const {
950 return IsHashTable();
954 bool Object::IsPolymorphicCodeCacheHashTable() const {
955 return IsHashTable();
959 bool Object::IsMapCache() const {
960 return IsHashTable();
964 bool Object::IsObjectHashTable() const {
965 return IsHashTable();
969 bool Object::IsOrderedHashTable() const {
970 return IsHeapObject() &&
971 HeapObject::cast(this)->map() ==
972 HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
976 bool Object::IsOrderedHashSet() const {
977 return IsOrderedHashTable();
981 bool Object::IsOrderedHashMap() const {
982 return IsOrderedHashTable();
986 bool Object::IsPrimitive() const {
987 return IsOddball() || IsNumber() || IsString();
991 bool Object::IsJSGlobalProxy() const {
992 bool result = IsHeapObject() &&
993 (HeapObject::cast(this)->map()->instance_type() ==
994 JS_GLOBAL_PROXY_TYPE);
996 HeapObject::cast(this)->map()->is_access_check_needed());
1001 bool Object::IsGlobalObject() const {
1002 if (!IsHeapObject()) return false;
1004 InstanceType type = HeapObject::cast(this)->map()->instance_type();
1005 return type == JS_GLOBAL_OBJECT_TYPE ||
1006 type == JS_BUILTINS_OBJECT_TYPE;
1010 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
1011 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
1014 bool Object::IsUndetectableObject() const {
1015 return IsHeapObject()
1016 && HeapObject::cast(this)->map()->is_undetectable();
1020 bool Object::IsAccessCheckNeeded() const {
1021 if (!IsHeapObject()) return false;
1022 if (IsJSGlobalProxy()) {
1023 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1024 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
1025 return proxy->IsDetachedFrom(global);
1027 return HeapObject::cast(this)->map()->is_access_check_needed();
1031 bool Object::IsStruct() const {
1032 if (!IsHeapObject()) return false;
1033 switch (HeapObject::cast(this)->map()->instance_type()) {
1034 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1035 STRUCT_LIST(MAKE_STRUCT_CASE)
1036 #undef MAKE_STRUCT_CASE
1037 default: return false;
1042 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1043 bool Object::Is##Name() const { \
1044 return Object::IsHeapObject() \
1045 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1047 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1048 #undef MAKE_STRUCT_PREDICATE
1051 bool Object::IsUndefined() const {
1052 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1056 bool Object::IsNull() const {
1057 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1061 bool Object::IsTheHole() const {
1062 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1066 bool Object::IsException() const {
1067 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1071 bool Object::IsUninitialized() const {
1072 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1076 bool Object::IsTrue() const {
1077 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1081 bool Object::IsFalse() const {
1082 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1086 bool Object::IsArgumentsMarker() const {
1087 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1091 double Object::Number() {
1094 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1095 : reinterpret_cast<HeapNumber*>(this)->value();
1099 bool Object::IsNaN() const {
1100 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1104 bool Object::IsMinusZero() const {
1105 return this->IsHeapNumber() &&
1106 i::IsMinusZero(HeapNumber::cast(this)->value());
1110 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1111 if (object->IsSmi()) return Handle<Smi>::cast(object);
1112 if (object->IsHeapNumber()) {
1113 double value = Handle<HeapNumber>::cast(object)->value();
1114 int int_value = FastD2I(value);
1115 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1116 return handle(Smi::FromInt(int_value), isolate);
1119 return Handle<Smi>();
1123 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1124 Handle<Object> object) {
1126 isolate, object, handle(isolate->context()->native_context(), isolate));
1130 bool Object::HasSpecificClassOf(String* name) {
1131 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1135 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1136 Handle<Name> name) {
1137 LookupIterator it(object, name);
1138 return GetProperty(&it);
1142 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1143 Handle<Object> object,
1145 // GetElement can trigger a getter which can cause allocation.
1146 // This was not always the case. This DCHECK is here to catch
1147 // leftover incorrect uses.
1148 DCHECK(AllowHeapAllocation::IsAllowed());
1149 return Object::GetElementWithReceiver(isolate, object, object, index);
1153 Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
1154 Isolate* isolate, Handle<Object> receiver) {
1155 PrototypeIterator iter(isolate, receiver);
1156 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
1157 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
1158 return PrototypeIterator::GetCurrent(iter);
1162 return PrototypeIterator::GetCurrent(iter);
1166 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1167 Handle<Name> name) {
1169 Isolate* isolate = name->GetIsolate();
1170 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1171 return GetProperty(object, name);
1175 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1176 Handle<Object> object,
1178 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1179 DCHECK(!str.is_null());
1181 uint32_t index; // Assert that the name is not an array index.
1182 DCHECK(!str->AsArrayIndex(&index));
1184 return GetProperty(object, str);
1188 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1189 Handle<Object> receiver,
1191 return GetPropertyWithHandler(
1192 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1196 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1197 Handle<JSReceiver> receiver,
1199 Handle<Object> value,
1200 LanguageMode language_mode) {
1201 Isolate* isolate = proxy->GetIsolate();
1202 Handle<String> name = isolate->factory()->Uint32ToString(index);
1203 return SetPropertyWithHandler(proxy, receiver, name, value, language_mode);
1207 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1209 Isolate* isolate = proxy->GetIsolate();
1210 Handle<String> name = isolate->factory()->Uint32ToString(index);
1211 return HasPropertyWithHandler(proxy, name);
1215 #define FIELD_ADDR(p, offset) \
1216 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1218 #define FIELD_ADDR_CONST(p, offset) \
1219 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1221 #define READ_FIELD(p, offset) \
1222 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1224 #define ACQUIRE_READ_FIELD(p, offset) \
1225 reinterpret_cast<Object*>(base::Acquire_Load( \
1226 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1228 #define NOBARRIER_READ_FIELD(p, offset) \
1229 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1230 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1232 #define WRITE_FIELD(p, offset, value) \
1233 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1235 #define RELEASE_WRITE_FIELD(p, offset, value) \
1236 base::Release_Store( \
1237 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1238 reinterpret_cast<base::AtomicWord>(value));
1240 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1241 base::NoBarrier_Store( \
1242 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1243 reinterpret_cast<base::AtomicWord>(value));
1245 #define WRITE_BARRIER(heap, object, offset, value) \
1246 heap->incremental_marking()->RecordWrite( \
1247 object, HeapObject::RawField(object, offset), value); \
1248 if (heap->InNewSpace(value)) { \
1249 heap->RecordWrite(object->address(), offset); \
1252 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1253 if (mode == UPDATE_WRITE_BARRIER) { \
1254 heap->incremental_marking()->RecordWrite( \
1255 object, HeapObject::RawField(object, offset), value); \
1256 if (heap->InNewSpace(value)) { \
1257 heap->RecordWrite(object->address(), offset); \
1261 #ifndef V8_TARGET_ARCH_MIPS
1262 #define READ_DOUBLE_FIELD(p, offset) \
1263 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1264 #else // V8_TARGET_ARCH_MIPS
1265 // Prevent gcc from using load-double (mips ldc1) on (possibly)
1266 // non-64-bit aligned HeapNumber::value.
1267 static inline double read_double_field(const void* p, int offset) {
1272 c.u[0] = (*reinterpret_cast<const uint32_t*>(
1273 FIELD_ADDR_CONST(p, offset)));
1274 c.u[1] = (*reinterpret_cast<const uint32_t*>(
1275 FIELD_ADDR_CONST(p, offset + 4)));
1278 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1279 #endif // V8_TARGET_ARCH_MIPS
1281 #ifndef V8_TARGET_ARCH_MIPS
1282 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1283 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1284 #else // V8_TARGET_ARCH_MIPS
1285 // Prevent gcc from using store-double (mips sdc1) on (possibly)
1286 // non-64-bit aligned HeapNumber::value.
1287 static inline void write_double_field(void* p, int offset,
1294 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1295 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1297 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1298 write_double_field(p, offset, value)
1299 #endif // V8_TARGET_ARCH_MIPS
1302 #define READ_INT_FIELD(p, offset) \
1303 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1305 #define WRITE_INT_FIELD(p, offset, value) \
1306 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1308 #define READ_INTPTR_FIELD(p, offset) \
1309 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1311 #define WRITE_INTPTR_FIELD(p, offset, value) \
1312 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1314 #define READ_UINT32_FIELD(p, offset) \
1315 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1317 #define WRITE_UINT32_FIELD(p, offset, value) \
1318 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1320 #define READ_INT32_FIELD(p, offset) \
1321 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1323 #define WRITE_INT32_FIELD(p, offset, value) \
1324 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1326 #define READ_UINT64_FIELD(p, offset) \
1327 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1329 #define WRITE_UINT64_FIELD(p, offset, value) \
1330 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1332 #define READ_INT64_FIELD(p, offset) \
1333 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1335 #define WRITE_INT64_FIELD(p, offset, value) \
1336 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1338 #define READ_SHORT_FIELD(p, offset) \
1339 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1341 #define WRITE_SHORT_FIELD(p, offset, value) \
1342 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1344 #define READ_BYTE_FIELD(p, offset) \
1345 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1347 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1348 static_cast<byte>(base::NoBarrier_Load( \
1349 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1351 #define WRITE_BYTE_FIELD(p, offset, value) \
1352 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1354 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1355 base::NoBarrier_Store( \
1356 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1357 static_cast<base::Atomic8>(value));
1359 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1360 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1364 int Smi::value() const {
1365 return Internals::SmiValue(this);
1369 Smi* Smi::FromInt(int value) {
1370 DCHECK(Smi::IsValid(value));
1371 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1375 Smi* Smi::FromIntptr(intptr_t value) {
1376 DCHECK(Smi::IsValid(value));
1377 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1378 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1382 bool Smi::IsValid(intptr_t value) {
1383 bool result = Internals::IsValidSmi(value);
1384 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1389 MapWord MapWord::FromMap(const Map* map) {
1390 return MapWord(reinterpret_cast<uintptr_t>(map));
1394 Map* MapWord::ToMap() {
1395 return reinterpret_cast<Map*>(value_);
1399 bool MapWord::IsForwardingAddress() {
1400 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1404 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1405 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1406 return MapWord(reinterpret_cast<uintptr_t>(raw));
1410 HeapObject* MapWord::ToForwardingAddress() {
1411 DCHECK(IsForwardingAddress());
1412 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1417 void HeapObject::VerifyObjectField(int offset) {
1418 VerifyPointer(READ_FIELD(this, offset));
1421 void HeapObject::VerifySmiField(int offset) {
1422 CHECK(READ_FIELD(this, offset)->IsSmi());
1427 Heap* HeapObject::GetHeap() const {
1429 MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1430 SLOW_DCHECK(heap != NULL);
1435 Isolate* HeapObject::GetIsolate() const {
1436 return GetHeap()->isolate();
1440 Map* HeapObject::map() const {
1442 // Clear mark potentially added by PathTracer.
1443 uintptr_t raw_value =
1444 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1445 return MapWord::FromRawValue(raw_value).ToMap();
1447 return map_word().ToMap();
1452 void HeapObject::set_map(Map* value) {
1453 set_map_word(MapWord::FromMap(value));
1454 if (value != NULL) {
1455 // TODO(1600) We are passing NULL as a slot because maps can never be on
1456 // evacuation candidate.
1457 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1462 Map* HeapObject::synchronized_map() {
1463 return synchronized_map_word().ToMap();
1467 void HeapObject::synchronized_set_map(Map* value) {
1468 synchronized_set_map_word(MapWord::FromMap(value));
1469 if (value != NULL) {
1470 // TODO(1600) We are passing NULL as a slot because maps can never be on
1471 // evacuation candidate.
1472 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1477 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1478 synchronized_set_map_word(MapWord::FromMap(value));
1482 // Unsafe accessor omitting write barrier.
1483 void HeapObject::set_map_no_write_barrier(Map* value) {
1484 set_map_word(MapWord::FromMap(value));
1488 MapWord HeapObject::map_word() const {
1490 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1494 void HeapObject::set_map_word(MapWord map_word) {
1495 NOBARRIER_WRITE_FIELD(
1496 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1500 MapWord HeapObject::synchronized_map_word() const {
1502 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1506 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1507 RELEASE_WRITE_FIELD(
1508 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1512 HeapObject* HeapObject::FromAddress(Address address) {
1513 DCHECK_TAG_ALIGNED(address);
1514 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1518 Address HeapObject::address() {
1519 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1523 int HeapObject::Size() {
1524 return SizeFromMap(map());
1528 bool HeapObject::MayContainRawValues() {
1529 InstanceType type = map()->instance_type();
1530 if (type <= LAST_NAME_TYPE) {
1531 if (type == SYMBOL_TYPE) {
1534 DCHECK(type < FIRST_NONSTRING_TYPE);
1535 // There are four string representations: sequential strings, external
1536 // strings, cons strings, and sliced strings.
1537 // Only the former two contain raw values and no heap pointers (besides the
1539 return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
1541 // The ConstantPoolArray contains heap pointers, but also raw values.
1542 if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
1543 return (type <= LAST_DATA_TYPE);
1547 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1548 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1549 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1553 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1554 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1558 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1559 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1563 double HeapNumber::value() const {
1564 return READ_DOUBLE_FIELD(this, kValueOffset);
1568 void HeapNumber::set_value(double value) {
1569 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1573 int HeapNumber::get_exponent() {
1574 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1575 kExponentShift) - kExponentBias;
1579 int HeapNumber::get_sign() {
1580 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1584 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1587 Object** FixedArray::GetFirstElementAddress() {
1588 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1592 bool FixedArray::ContainsOnlySmisOrHoles() {
1593 Object* the_hole = GetHeap()->the_hole_value();
1594 Object** current = GetFirstElementAddress();
1595 for (int i = 0; i < length(); ++i) {
1596 Object* candidate = *current++;
1597 if (!candidate->IsSmi() && candidate != the_hole) return false;
1603 FixedArrayBase* JSObject::elements() const {
1604 Object* array = READ_FIELD(this, kElementsOffset);
1605 return static_cast<FixedArrayBase*>(array);
1609 void JSObject::ValidateElements(Handle<JSObject> object) {
1610 #ifdef ENABLE_SLOW_DCHECKS
1611 if (FLAG_enable_slow_asserts) {
1612 ElementsAccessor* accessor = object->GetElementsAccessor();
1613 accessor->Validate(object);
1619 void AllocationSite::Initialize() {
1620 set_transition_info(Smi::FromInt(0));
1621 SetElementsKind(GetInitialFastElementsKind());
1622 set_nested_site(Smi::FromInt(0));
1623 set_pretenure_data(Smi::FromInt(0));
1624 set_pretenure_create_count(Smi::FromInt(0));
1625 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1626 SKIP_WRITE_BARRIER);
1630 void AllocationSite::MarkZombie() {
1631 DCHECK(!IsZombie());
1633 set_pretenure_decision(kZombie);
1637 // Heuristic: We only need to create allocation site info if the boilerplate
1638 // elements kind is the initial elements kind.
1639 AllocationSiteMode AllocationSite::GetMode(
1640 ElementsKind boilerplate_elements_kind) {
1641 if (FLAG_pretenuring_call_new ||
1642 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1643 return TRACK_ALLOCATION_SITE;
1646 return DONT_TRACK_ALLOCATION_SITE;
1650 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1652 if (FLAG_pretenuring_call_new ||
1653 (IsFastSmiElementsKind(from) &&
1654 IsMoreGeneralElementsKindTransition(from, to))) {
1655 return TRACK_ALLOCATION_SITE;
1658 return DONT_TRACK_ALLOCATION_SITE;
1662 inline bool AllocationSite::CanTrack(InstanceType type) {
1663 if (FLAG_allocation_site_pretenuring) {
1664 return type == JS_ARRAY_TYPE ||
1665 type == JS_OBJECT_TYPE ||
1666 type < FIRST_NONSTRING_TYPE;
1668 return type == JS_ARRAY_TYPE;
1672 inline void AllocationSite::set_memento_found_count(int count) {
1673 int value = pretenure_data()->value();
1674 // Verify that we can count more mementos than we can possibly find in one
1675 // new space collection.
1676 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1677 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1678 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1679 DCHECK(count < MementoFoundCountBits::kMax);
1681 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1682 SKIP_WRITE_BARRIER);
1685 inline bool AllocationSite::IncrementMementoFoundCount() {
1686 if (IsZombie()) return false;
1688 int value = memento_found_count();
1689 set_memento_found_count(value + 1);
1690 return memento_found_count() == kPretenureMinimumCreated;
1694 inline void AllocationSite::IncrementMementoCreateCount() {
1695 DCHECK(FLAG_allocation_site_pretenuring);
1696 int value = memento_create_count();
1697 set_memento_create_count(value + 1);
1701 inline bool AllocationSite::MakePretenureDecision(
1702 PretenureDecision current_decision,
1704 bool maximum_size_scavenge) {
1705 // Here we just allow state transitions from undecided or maybe tenure
1706 // to don't tenure, maybe tenure, or tenure.
1707 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1708 if (ratio >= kPretenureRatio) {
1709 // We just transition into tenure state when the semi-space was at
1710 // maximum capacity.
1711 if (maximum_size_scavenge) {
1712 set_deopt_dependent_code(true);
1713 set_pretenure_decision(kTenure);
1714 // Currently we just need to deopt when we make a state transition to
1718 set_pretenure_decision(kMaybeTenure);
1720 set_pretenure_decision(kDontTenure);
1727 inline bool AllocationSite::DigestPretenuringFeedback(
1728 bool maximum_size_scavenge) {
1730 int create_count = memento_create_count();
1731 int found_count = memento_found_count();
1732 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1734 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1735 static_cast<double>(found_count) / create_count : 0.0;
1736 PretenureDecision current_decision = pretenure_decision();
1738 if (minimum_mementos_created) {
1739 deopt = MakePretenureDecision(
1740 current_decision, ratio, maximum_size_scavenge);
1743 if (FLAG_trace_pretenuring_statistics) {
1745 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1746 static_cast<void*>(this), create_count, found_count, ratio,
1747 PretenureDecisionName(current_decision),
1748 PretenureDecisionName(pretenure_decision()));
1751 // Clear feedback calculation fields until the next gc.
1752 set_memento_found_count(0);
1753 set_memento_create_count(0);
1758 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1759 JSObject::ValidateElements(object);
1760 ElementsKind elements_kind = object->map()->elements_kind();
1761 if (!IsFastObjectElementsKind(elements_kind)) {
1762 if (IsFastHoleyElementsKind(elements_kind)) {
1763 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1765 TransitionElementsKind(object, FAST_ELEMENTS);
1771 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1774 EnsureElementsMode mode) {
1775 ElementsKind current_kind = object->map()->elements_kind();
1776 ElementsKind target_kind = current_kind;
1778 DisallowHeapAllocation no_allocation;
1779 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1780 bool is_holey = IsFastHoleyElementsKind(current_kind);
1781 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1782 Heap* heap = object->GetHeap();
1783 Object* the_hole = heap->the_hole_value();
1784 for (uint32_t i = 0; i < count; ++i) {
1785 Object* current = *objects++;
1786 if (current == the_hole) {
1788 target_kind = GetHoleyElementsKind(target_kind);
1789 } else if (!current->IsSmi()) {
1790 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1791 if (IsFastSmiElementsKind(target_kind)) {
1793 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1795 target_kind = FAST_DOUBLE_ELEMENTS;
1798 } else if (is_holey) {
1799 target_kind = FAST_HOLEY_ELEMENTS;
1802 target_kind = FAST_ELEMENTS;
1807 if (target_kind != current_kind) {
1808 TransitionElementsKind(object, target_kind);
1813 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1814 Handle<FixedArrayBase> elements,
1816 EnsureElementsMode mode) {
1817 Heap* heap = object->GetHeap();
1818 if (elements->map() != heap->fixed_double_array_map()) {
1819 DCHECK(elements->map() == heap->fixed_array_map() ||
1820 elements->map() == heap->fixed_cow_array_map());
1821 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1822 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1825 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1826 EnsureCanContainElements(object, objects, length, mode);
1830 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1831 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1832 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1833 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1834 Handle<FixedDoubleArray> double_array =
1835 Handle<FixedDoubleArray>::cast(elements);
1836 for (uint32_t i = 0; i < length; ++i) {
1837 if (double_array->is_the_hole(i)) {
1838 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1842 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1847 void JSObject::SetMapAndElements(Handle<JSObject> object,
1848 Handle<Map> new_map,
1849 Handle<FixedArrayBase> value) {
1850 JSObject::MigrateToMap(object, new_map);
1851 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1852 (*value == object->GetHeap()->empty_fixed_array())) ==
1853 (value->map() == object->GetHeap()->fixed_array_map() ||
1854 value->map() == object->GetHeap()->fixed_cow_array_map()));
1855 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1856 (object->map()->has_fast_double_elements() ==
1857 value->IsFixedDoubleArray()));
1858 object->set_elements(*value);
1862 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1863 WRITE_FIELD(this, kElementsOffset, value);
1864 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1868 void JSObject::initialize_properties() {
1869 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1870 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1874 void JSObject::initialize_elements() {
1875 FixedArrayBase* elements = map()->GetInitialElements();
1876 WRITE_FIELD(this, kElementsOffset, elements);
1880 Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
1881 DisallowHeapAllocation no_gc;
1882 if (!map->HasTransitionArray()) return Handle<String>::null();
1883 TransitionArray* transitions = map->transitions();
1884 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1885 int transition = TransitionArray::kSimpleTransitionIndex;
1886 PropertyDetails details = transitions->GetTargetDetails(transition);
1887 Name* name = transitions->GetKey(transition);
1888 if (details.type() != DATA) return Handle<String>::null();
1889 if (details.attributes() != NONE) return Handle<String>::null();
1890 if (!name->IsString()) return Handle<String>::null();
1891 return Handle<String>(String::cast(name));
1895 Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
1896 DCHECK(!ExpectedTransitionKey(map).is_null());
1897 return Handle<Map>(map->transitions()->GetTarget(
1898 TransitionArray::kSimpleTransitionIndex));
1902 Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1903 DisallowHeapAllocation no_allocation;
1904 if (!map->HasTransitionArray()) return Handle<Map>::null();
1905 TransitionArray* transitions = map->transitions();
1906 int transition = transitions->Search(kData, *key, NONE);
1907 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1908 PropertyDetails details = transitions->GetTargetDetails(transition);
1909 if (details.type() != DATA) return Handle<Map>::null();
1910 DCHECK_EQ(NONE, details.attributes());
1911 return Handle<Map>(transitions->GetTarget(transition));
1915 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1916 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1919 byte Oddball::kind() const {
1920 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1924 void Oddball::set_kind(byte value) {
1925 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1929 Object* Cell::value() const {
1930 return READ_FIELD(this, kValueOffset);
1934 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1935 // The write barrier is not used for global property cells.
1936 DCHECK(!val->IsPropertyCell() && !val->IsCell());
1937 WRITE_FIELD(this, kValueOffset, val);
1940 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1942 Object* PropertyCell::type_raw() const {
1943 return READ_FIELD(this, kTypeOffset);
1947 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1948 WRITE_FIELD(this, kTypeOffset, val);
1952 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
1955 void WeakCell::clear() {
1956 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
1957 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
1961 void WeakCell::initialize(HeapObject* val) {
1962 WRITE_FIELD(this, kValueOffset, val);
1963 WRITE_BARRIER(GetHeap(), this, kValueOffset, val);
1967 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
1970 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
1973 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
1974 WRITE_FIELD(this, kNextOffset, val);
1975 if (mode == UPDATE_WRITE_BARRIER) {
1976 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
1981 int JSObject::GetHeaderSize() {
1982 InstanceType type = map()->instance_type();
1983 // Check for the most common kind of JavaScript object before
1984 // falling into the generic switch. This speeds up the internal
1985 // field operations considerably on average.
1986 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1988 case JS_GENERATOR_OBJECT_TYPE:
1989 return JSGeneratorObject::kSize;
1990 case JS_MODULE_TYPE:
1991 return JSModule::kSize;
1992 case JS_GLOBAL_PROXY_TYPE:
1993 return JSGlobalProxy::kSize;
1994 case JS_GLOBAL_OBJECT_TYPE:
1995 return JSGlobalObject::kSize;
1996 case JS_BUILTINS_OBJECT_TYPE:
1997 return JSBuiltinsObject::kSize;
1998 case JS_FUNCTION_TYPE:
1999 return JSFunction::kSize;
2001 return JSValue::kSize;
2003 return JSDate::kSize;
2005 return JSArray::kSize;
2006 case JS_ARRAY_BUFFER_TYPE:
2007 return JSArrayBuffer::kSize;
2008 case JS_TYPED_ARRAY_TYPE:
2009 return JSTypedArray::kSize;
2010 case JS_DATA_VIEW_TYPE:
2011 return JSDataView::kSize;
2013 return JSSet::kSize;
2015 return JSMap::kSize;
2016 case JS_SET_ITERATOR_TYPE:
2017 return JSSetIterator::kSize;
2018 case JS_MAP_ITERATOR_TYPE:
2019 return JSMapIterator::kSize;
2020 case JS_WEAK_MAP_TYPE:
2021 return JSWeakMap::kSize;
2022 case JS_WEAK_SET_TYPE:
2023 return JSWeakSet::kSize;
2024 case JS_REGEXP_TYPE:
2025 return JSRegExp::kSize;
2026 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2027 return JSObject::kHeaderSize;
2028 case JS_MESSAGE_OBJECT_TYPE:
2029 return JSMessageObject::kSize;
2031 // TODO(jkummerow): Re-enable this. Blink currently hits this
2032 // from its CustomElementConstructorBuilder.
2039 int JSObject::GetInternalFieldCount() {
2040 DCHECK(1 << kPointerSizeLog2 == kPointerSize);
2041 // Make sure to adjust for the number of in-object properties. These
2042 // properties do contribute to the size, but are not internal fields.
2043 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
2044 map()->inobject_properties();
2048 int JSObject::GetInternalFieldOffset(int index) {
2049 DCHECK(index < GetInternalFieldCount() && index >= 0);
2050 return GetHeaderSize() + (kPointerSize * index);
2054 Object* JSObject::GetInternalField(int index) {
2055 DCHECK(index < GetInternalFieldCount() && index >= 0);
2056 // Internal objects do follow immediately after the header, whereas in-object
2057 // properties are at the end of the object. Therefore there is no need
2058 // to adjust the index here.
2059 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2063 void JSObject::SetInternalField(int index, Object* value) {
2064 DCHECK(index < GetInternalFieldCount() && index >= 0);
2065 // Internal objects do follow immediately after the header, whereas in-object
2066 // properties are at the end of the object. Therefore there is no need
2067 // to adjust the index here.
2068 int offset = GetHeaderSize() + (kPointerSize * index);
2069 WRITE_FIELD(this, offset, value);
2070 WRITE_BARRIER(GetHeap(), this, offset, value);
2074 void JSObject::SetInternalField(int index, Smi* value) {
2075 DCHECK(index < GetInternalFieldCount() && index >= 0);
2076 // Internal objects do follow immediately after the header, whereas in-object
2077 // properties are at the end of the object. Therefore there is no need
2078 // to adjust the index here.
2079 int offset = GetHeaderSize() + (kPointerSize * index);
2080 WRITE_FIELD(this, offset, value);
2084 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2085 if (!FLAG_unbox_double_fields) return false;
2086 return map()->IsUnboxedDoubleField(index);
2090 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2091 if (!FLAG_unbox_double_fields) return false;
2092 if (index.is_hidden_field() || !index.is_inobject()) return false;
2093 return !layout_descriptor()->IsTagged(index.property_index());
2097 // Access fast-case object properties at index. The use of these routines
2098 // is needed to correctly distinguish between properties stored in-object and
2099 // properties stored in the properties array.
2100 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2101 DCHECK(!IsUnboxedDoubleField(index));
2102 if (index.is_inobject()) {
2103 return READ_FIELD(this, index.offset());
2105 return properties()->get(index.outobject_array_index());
2110 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2111 DCHECK(IsUnboxedDoubleField(index));
2112 return READ_DOUBLE_FIELD(this, index.offset());
2116 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2117 if (index.is_inobject()) {
2118 int offset = index.offset();
2119 WRITE_FIELD(this, offset, value);
2120 WRITE_BARRIER(GetHeap(), this, offset, value);
2122 properties()->set(index.outobject_array_index(), value);
2127 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2128 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2132 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2133 if (IsUnboxedDoubleField(index)) {
2134 DCHECK(value->IsMutableHeapNumber());
2135 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2137 RawFastPropertyAtPut(index, value);
2142 int JSObject::GetInObjectPropertyOffset(int index) {
2143 return map()->GetInObjectPropertyOffset(index);
2147 Object* JSObject::InObjectPropertyAt(int index) {
2148 int offset = GetInObjectPropertyOffset(index);
2149 return READ_FIELD(this, offset);
2153 Object* JSObject::InObjectPropertyAtPut(int index,
2155 WriteBarrierMode mode) {
2156 // Adjust for the number of properties stored in the object.
2157 int offset = GetInObjectPropertyOffset(index);
2158 WRITE_FIELD(this, offset, value);
2159 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2165 void JSObject::InitializeBody(Map* map,
2166 Object* pre_allocated_value,
2167 Object* filler_value) {
2168 DCHECK(!filler_value->IsHeapObject() ||
2169 !GetHeap()->InNewSpace(filler_value));
2170 DCHECK(!pre_allocated_value->IsHeapObject() ||
2171 !GetHeap()->InNewSpace(pre_allocated_value));
2172 int size = map->instance_size();
2173 int offset = kHeaderSize;
2174 if (filler_value != pre_allocated_value) {
2175 int pre_allocated = map->pre_allocated_property_fields();
2176 DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2177 for (int i = 0; i < pre_allocated; i++) {
2178 WRITE_FIELD(this, offset, pre_allocated_value);
2179 offset += kPointerSize;
2182 while (offset < size) {
2183 WRITE_FIELD(this, offset, filler_value);
2184 offset += kPointerSize;
2189 bool JSObject::HasFastProperties() {
2190 DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2191 return !properties()->IsDictionary();
2195 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
2197 Handle<Object> value,
2198 LanguageMode language_mode) {
2199 return JSObject::SetOwnElement(object, index, value, NONE, language_mode);
2203 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2204 if (unused_property_fields() != 0) return false;
2205 if (is_prototype_map()) return false;
2206 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2207 int limit = Max(minimum, inobject_properties());
2208 int external = NumberOfFields() - inobject_properties();
2209 return external > limit;
2213 void Struct::InitializeBody(int object_size) {
2214 Object* value = GetHeap()->undefined_value();
2215 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2216 WRITE_FIELD(this, offset, value);
2221 bool Object::ToArrayIndex(uint32_t* index) {
2223 int value = Smi::cast(this)->value();
2224 if (value < 0) return false;
2228 if (IsHeapNumber()) {
2229 double value = HeapNumber::cast(this)->value();
2230 uint32_t uint_value = static_cast<uint32_t>(value);
2231 if (value == static_cast<double>(uint_value)) {
2232 *index = uint_value;
2240 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2241 if (!this->IsJSValue()) return false;
2243 JSValue* js_value = JSValue::cast(this);
2244 if (!js_value->value()->IsString()) return false;
2246 String* str = String::cast(js_value->value());
2247 if (index >= static_cast<uint32_t>(str->length())) return false;
2253 void Object::VerifyApiCallResultType() {
2264 FATAL("API call returned invalid object");
2270 Object* FixedArray::get(int index) const {
2271 SLOW_DCHECK(index >= 0 && index < this->length());
2272 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2276 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2277 return handle(array->get(index), array->GetIsolate());
2281 bool FixedArray::is_the_hole(int index) {
2282 return get(index) == GetHeap()->the_hole_value();
2286 void FixedArray::set(int index, Smi* value) {
2287 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2288 DCHECK(index >= 0 && index < this->length());
2289 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2290 int offset = kHeaderSize + index * kPointerSize;
2291 WRITE_FIELD(this, offset, value);
2295 void FixedArray::set(int index, Object* value) {
2296 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2297 DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2298 DCHECK(index >= 0 && index < this->length());
2299 int offset = kHeaderSize + index * kPointerSize;
2300 WRITE_FIELD(this, offset, value);
2301 WRITE_BARRIER(GetHeap(), this, offset, value);
2305 double FixedDoubleArray::get_scalar(int index) {
2306 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2307 map() != GetHeap()->fixed_array_map());
2308 DCHECK(index >= 0 && index < this->length());
2309 DCHECK(!is_the_hole(index));
2310 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2314 uint64_t FixedDoubleArray::get_representation(int index) {
2315 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2316 map() != GetHeap()->fixed_array_map());
2317 DCHECK(index >= 0 && index < this->length());
2318 int offset = kHeaderSize + index * kDoubleSize;
2319 return READ_UINT64_FIELD(this, offset);
2323 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2325 if (array->is_the_hole(index)) {
2326 return array->GetIsolate()->factory()->the_hole_value();
2328 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2333 void FixedDoubleArray::set(int index, double value) {
2334 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2335 map() != GetHeap()->fixed_array_map());
2336 int offset = kHeaderSize + index * kDoubleSize;
2337 if (std::isnan(value)) {
2338 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2340 WRITE_DOUBLE_FIELD(this, offset, value);
2342 DCHECK(!is_the_hole(index));
2346 void FixedDoubleArray::set_the_hole(int index) {
2347 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2348 map() != GetHeap()->fixed_array_map());
2349 int offset = kHeaderSize + index * kDoubleSize;
2350 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2354 bool FixedDoubleArray::is_the_hole(int index) {
2355 return get_representation(index) == kHoleNanInt64;
2359 double* FixedDoubleArray::data_start() {
2360 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2364 void FixedDoubleArray::FillWithHoles(int from, int to) {
2365 for (int i = from; i < to; i++) {
2371 Object* WeakFixedArray::Get(int index) const {
2372 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2373 if (raw->IsSmi()) return raw;
2374 return WeakCell::cast(raw)->value();
2378 bool WeakFixedArray::IsEmptySlot(int index) const {
2379 DCHECK(index < Length());
2380 return Get(index)->IsSmi();
2384 void WeakFixedArray::clear(int index) {
2385 FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2389 int WeakFixedArray::Length() const {
2390 return FixedArray::cast(this)->length() - kFirstIndex;
2394 int WeakFixedArray::last_used_index() const {
2395 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2399 void WeakFixedArray::set_last_used_index(int index) {
2400 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2404 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2405 DCHECK(type < NUMBER_OF_TYPES);
2406 element_counts_[type]++;
2410 int ConstantPoolArray::NumberOfEntries::equals(
2411 const ConstantPoolArray::NumberOfEntries& other) const {
2412 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2413 if (element_counts_[i] != other.element_counts_[i]) return false;
2419 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2420 return total_count() == 0;
2424 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2425 DCHECK(type < NUMBER_OF_TYPES);
2426 return element_counts_[type];
2430 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2432 DCHECK(type < NUMBER_OF_TYPES);
2433 for (int i = 0; i < type; i++) {
2434 base += element_counts_[i];
2440 int ConstantPoolArray::NumberOfEntries::total_count() const {
2442 for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2443 count += element_counts_[i];
2449 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2450 for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2451 if (element_counts_[i] < min || element_counts_[i] > max) {
2459 int ConstantPoolArray::Iterator::next_index() {
2460 DCHECK(!is_finished());
2461 int ret = next_index_++;
2467 bool ConstantPoolArray::Iterator::is_finished() {
2468 return next_index_ > array_->last_index(type_, final_section_);
2472 void ConstantPoolArray::Iterator::update_section() {
2473 if (next_index_ > array_->last_index(type_, current_section_) &&
2474 current_section_ != final_section_) {
2475 DCHECK(final_section_ == EXTENDED_SECTION);
2476 current_section_ = EXTENDED_SECTION;
2477 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2482 bool ConstantPoolArray::is_extended_layout() {
2483 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2484 return IsExtendedField::decode(small_layout_1);
2488 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2489 return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2493 int ConstantPoolArray::first_extended_section_index() {
2494 DCHECK(is_extended_layout());
2495 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2496 return TotalCountField::decode(small_layout_2);
2500 int ConstantPoolArray::get_extended_section_header_offset() {
2501 return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2505 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2506 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2507 return WeakObjectStateField::decode(small_layout_2);
2511 void ConstantPoolArray::set_weak_object_state(
2512 ConstantPoolArray::WeakObjectState state) {
2513 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2514 small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2515 WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2519 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2521 if (section == EXTENDED_SECTION) {
2522 DCHECK(is_extended_layout());
2523 index += first_extended_section_index();
2526 for (Type type_iter = FIRST_TYPE; type_iter < type;
2527 type_iter = next_type(type_iter)) {
2528 index += number_of_entries(type_iter, section);
2535 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2536 return first_index(type, section) + number_of_entries(type, section) - 1;
2540 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2541 if (section == SMALL_SECTION) {
2542 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2543 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2546 return Int64CountField::decode(small_layout_1);
2548 return CodePtrCountField::decode(small_layout_1);
2550 return HeapPtrCountField::decode(small_layout_1);
2552 return Int32CountField::decode(small_layout_2);
2558 DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2559 int offset = get_extended_section_header_offset();
2562 offset += kExtendedInt64CountOffset;
2565 offset += kExtendedCodePtrCountOffset;
2568 offset += kExtendedHeapPtrCountOffset;
2571 offset += kExtendedInt32CountOffset;
2576 return READ_INT_FIELD(this, offset);
2581 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2582 return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2583 offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2584 (is_extended_layout() &&
2585 offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2586 offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2590 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2591 LayoutSection section;
2592 if (is_extended_layout() && index >= first_extended_section_index()) {
2593 section = EXTENDED_SECTION;
2595 section = SMALL_SECTION;
2598 Type type = FIRST_TYPE;
2599 while (index > last_index(type, section)) {
2600 type = next_type(type);
2602 DCHECK(type <= LAST_TYPE);
2607 int64_t ConstantPoolArray::get_int64_entry(int index) {
2608 DCHECK(map() == GetHeap()->constant_pool_array_map());
2609 DCHECK(get_type(index) == INT64);
2610 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2614 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2615 STATIC_ASSERT(kDoubleSize == kInt64Size);
2616 DCHECK(map() == GetHeap()->constant_pool_array_map());
2617 DCHECK(get_type(index) == INT64);
2618 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2622 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2623 DCHECK(map() == GetHeap()->constant_pool_array_map());
2624 DCHECK(get_type(index) == CODE_PTR);
2625 return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2629 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2630 DCHECK(map() == GetHeap()->constant_pool_array_map());
2631 DCHECK(get_type(index) == HEAP_PTR);
2632 return READ_FIELD(this, OffsetOfElementAt(index));
2636 int32_t ConstantPoolArray::get_int32_entry(int index) {
2637 DCHECK(map() == GetHeap()->constant_pool_array_map());
2638 DCHECK(get_type(index) == INT32);
2639 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2643 void ConstantPoolArray::set(int index, int64_t value) {
2644 DCHECK(map() == GetHeap()->constant_pool_array_map());
2645 DCHECK(get_type(index) == INT64);
2646 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2650 void ConstantPoolArray::set(int index, double value) {
2651 STATIC_ASSERT(kDoubleSize == kInt64Size);
2652 DCHECK(map() == GetHeap()->constant_pool_array_map());
2653 DCHECK(get_type(index) == INT64);
2654 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2658 void ConstantPoolArray::set(int index, Address value) {
2659 DCHECK(map() == GetHeap()->constant_pool_array_map());
2660 DCHECK(get_type(index) == CODE_PTR);
2661 WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2665 void ConstantPoolArray::set(int index, Object* value) {
2666 DCHECK(map() == GetHeap()->constant_pool_array_map());
2667 DCHECK(!GetHeap()->InNewSpace(value));
2668 DCHECK(get_type(index) == HEAP_PTR);
2669 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2670 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2674 void ConstantPoolArray::set(int index, int32_t value) {
2675 DCHECK(map() == GetHeap()->constant_pool_array_map());
2676 DCHECK(get_type(index) == INT32);
2677 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2681 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2682 DCHECK(map() == GetHeap()->constant_pool_array_map());
2683 DCHECK(offset_is_type(offset, INT32));
2684 WRITE_INT32_FIELD(this, offset, value);
2688 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2689 DCHECK(map() == GetHeap()->constant_pool_array_map());
2690 DCHECK(offset_is_type(offset, INT64));
2691 WRITE_INT64_FIELD(this, offset, value);
2695 void ConstantPoolArray::set_at_offset(int offset, double value) {
2696 DCHECK(map() == GetHeap()->constant_pool_array_map());
2697 DCHECK(offset_is_type(offset, INT64));
2698 WRITE_DOUBLE_FIELD(this, offset, value);
2702 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2703 DCHECK(map() == GetHeap()->constant_pool_array_map());
2704 DCHECK(offset_is_type(offset, CODE_PTR));
2705 WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2706 WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2710 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2711 DCHECK(map() == GetHeap()->constant_pool_array_map());
2712 DCHECK(!GetHeap()->InNewSpace(value));
2713 DCHECK(offset_is_type(offset, HEAP_PTR));
2714 WRITE_FIELD(this, offset, value);
2715 WRITE_BARRIER(GetHeap(), this, offset, value);
2719 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2720 uint32_t small_layout_1 =
2721 Int64CountField::encode(small.count_of(INT64)) |
2722 CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2723 HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2724 IsExtendedField::encode(false);
2725 uint32_t small_layout_2 =
2726 Int32CountField::encode(small.count_of(INT32)) |
2727 TotalCountField::encode(small.total_count()) |
2728 WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2729 WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2730 WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2731 if (kHeaderSize != kFirstEntryOffset) {
2732 DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2733 WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
2738 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2739 const NumberOfEntries& extended) {
2740 // Initialize small layout fields first.
2743 // Set is_extended_layout field.
2744 uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2745 small_layout_1 = IsExtendedField::update(small_layout_1, true);
2746 WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2748 // Initialize the extended layout fields.
2749 int extended_header_offset = get_extended_section_header_offset();
2750 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2751 extended.count_of(INT64));
2752 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2753 extended.count_of(CODE_PTR));
2754 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2755 extended.count_of(HEAP_PTR));
2756 WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2757 extended.count_of(INT32));
2761 int ConstantPoolArray::size() {
2762 NumberOfEntries small(this, SMALL_SECTION);
2763 if (!is_extended_layout()) {
2764 return SizeFor(small);
2766 NumberOfEntries extended(this, EXTENDED_SECTION);
2767 return SizeForExtended(small, extended);
2772 int ConstantPoolArray::length() {
2773 uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2774 int length = TotalCountField::decode(small_layout_2);
2775 if (is_extended_layout()) {
2776 length += number_of_entries(INT64, EXTENDED_SECTION) +
2777 number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2778 number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2779 number_of_entries(INT32, EXTENDED_SECTION);
2785 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2786 const DisallowHeapAllocation& promise) {
2787 Heap* heap = GetHeap();
2788 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2789 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2790 return UPDATE_WRITE_BARRIER;
2794 bool HeapObject::NeedsToEnsureDoubleAlignment() {
2795 #ifndef V8_HOST_ARCH_64_BIT
2796 return (IsFixedFloat64Array() || IsFixedDoubleArray() ||
2797 IsConstantPoolArray()) &&
2798 FixedArrayBase::cast(this)->length() != 0;
2801 #endif // V8_HOST_ARCH_64_BIT
2805 void FixedArray::set(int index,
2807 WriteBarrierMode mode) {
2808 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2809 DCHECK(index >= 0 && index < this->length());
2810 int offset = kHeaderSize + index * kPointerSize;
2811 WRITE_FIELD(this, offset, value);
2812 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2816 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2819 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2820 DCHECK(index >= 0 && index < array->length());
2821 int offset = kHeaderSize + index * kPointerSize;
2822 WRITE_FIELD(array, offset, value);
2823 Heap* heap = array->GetHeap();
2824 if (heap->InNewSpace(value)) {
2825 heap->RecordWrite(array->address(), offset);
2830 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2833 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2834 DCHECK(index >= 0 && index < array->length());
2835 DCHECK(!array->GetHeap()->InNewSpace(value));
2836 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2840 void FixedArray::set_undefined(int index) {
2841 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2842 DCHECK(index >= 0 && index < this->length());
2843 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2845 kHeaderSize + index * kPointerSize,
2846 GetHeap()->undefined_value());
2850 void FixedArray::set_null(int index) {
2851 DCHECK(index >= 0 && index < this->length());
2852 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2854 kHeaderSize + index * kPointerSize,
2855 GetHeap()->null_value());
2859 void FixedArray::set_the_hole(int index) {
2860 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2861 DCHECK(index >= 0 && index < this->length());
2862 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2864 kHeaderSize + index * kPointerSize,
2865 GetHeap()->the_hole_value());
2869 void FixedArray::FillWithHoles(int from, int to) {
2870 for (int i = from; i < to; i++) {
2876 Object** FixedArray::data_start() {
2877 return HeapObject::RawField(this, kHeaderSize);
2881 bool DescriptorArray::IsEmpty() {
2882 DCHECK(length() >= kFirstIndex ||
2883 this == GetHeap()->empty_descriptor_array());
2884 return length() < kFirstIndex;
2888 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2890 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2894 // Perform a binary search in a fixed array. Low and high are entry indices. If
2895 // there are three entries in this array it should be called with low=0 and
2897 template <SearchMode search_mode, typename T>
2898 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2899 int* out_insertion_index) {
2900 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2901 uint32_t hash = name->Hash();
2904 DCHECK(low <= high);
2906 while (low != high) {
2907 int mid = (low + high) / 2;
2908 Name* mid_name = array->GetSortedKey(mid);
2909 uint32_t mid_hash = mid_name->Hash();
2911 if (mid_hash >= hash) {
2918 for (; low <= limit; ++low) {
2919 int sort_index = array->GetSortedKeyIndex(low);
2920 Name* entry = array->GetKey(sort_index);
2921 uint32_t current_hash = entry->Hash();
2922 if (current_hash != hash) {
2923 if (out_insertion_index != NULL) {
2924 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2926 return T::kNotFound;
2928 if (entry->Equals(name)) {
2929 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2932 return T::kNotFound;
2936 if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2937 return T::kNotFound;
2941 // Perform a linear search in this fixed array. len is the number of entry
2942 // indices that are valid.
2943 template <SearchMode search_mode, typename T>
2944 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2945 int* out_insertion_index) {
2946 uint32_t hash = name->Hash();
2947 if (search_mode == ALL_ENTRIES) {
2948 for (int number = 0; number < len; number++) {
2949 int sorted_index = array->GetSortedKeyIndex(number);
2950 Name* entry = array->GetKey(sorted_index);
2951 uint32_t current_hash = entry->Hash();
2952 if (current_hash > hash) {
2953 if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2954 return T::kNotFound;
2956 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2958 if (out_insertion_index != NULL) *out_insertion_index = len;
2959 return T::kNotFound;
2961 DCHECK(len >= valid_entries);
2962 DCHECK_NULL(out_insertion_index); // Not supported here.
2963 for (int number = 0; number < valid_entries; number++) {
2964 Name* entry = array->GetKey(number);
2965 uint32_t current_hash = entry->Hash();
2966 if (current_hash == hash && entry->Equals(name)) return number;
2968 return T::kNotFound;
2973 template <SearchMode search_mode, typename T>
2974 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2975 if (search_mode == VALID_ENTRIES) {
2976 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2978 SLOW_DCHECK(array->IsSortedNoDuplicates());
2981 int nof = array->number_of_entries();
2983 if (out_insertion_index != NULL) *out_insertion_index = 0;
2984 return T::kNotFound;
2987 // Fast case: do linear search for small arrays.
2988 const int kMaxElementsForLinearSearch = 8;
2989 if ((search_mode == ALL_ENTRIES &&
2990 nof <= kMaxElementsForLinearSearch) ||
2991 (search_mode == VALID_ENTRIES &&
2992 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2993 return LinearSearch<search_mode>(array, name, nof, valid_entries,
2994 out_insertion_index);
2997 // Slow case: perform binary search.
2998 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2999 out_insertion_index);
3003 int DescriptorArray::Search(Name* name, int valid_descriptors) {
3004 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
3008 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
3009 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
3010 if (number_of_own_descriptors == 0) return kNotFound;
3012 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
3013 int number = cache->Lookup(map, name);
3015 if (number == DescriptorLookupCache::kAbsent) {
3016 number = Search(name, number_of_own_descriptors);
3017 cache->Update(map, name, number);
3024 PropertyDetails Map::GetLastDescriptorDetails() {
3025 return instance_descriptors()->GetDetails(LastAdded());
3029 FixedArrayBase* Map::GetInitialElements() {
3030 if (has_fast_smi_or_object_elements() ||
3031 has_fast_double_elements()) {
3032 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
3033 return GetHeap()->empty_fixed_array();
3034 } else if (has_external_array_elements()) {
3035 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
3036 DCHECK(!GetHeap()->InNewSpace(empty_array));
3038 } else if (has_fixed_typed_array_elements()) {
3039 FixedTypedArrayBase* empty_array =
3040 GetHeap()->EmptyFixedTypedArrayForMap(this);
3041 DCHECK(!GetHeap()->InNewSpace(empty_array));
3050 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
3051 DCHECK(descriptor_number < number_of_descriptors());
3052 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
3056 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
3057 return GetKeySlot(descriptor_number);
3061 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
3062 return GetValueSlot(descriptor_number - 1) + 1;
3066 Name* DescriptorArray::GetKey(int descriptor_number) {
3067 DCHECK(descriptor_number < number_of_descriptors());
3068 return Name::cast(get(ToKeyIndex(descriptor_number)));
3072 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
3073 return GetDetails(descriptor_number).pointer();
3077 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
3078 return GetKey(GetSortedKeyIndex(descriptor_number));
3082 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
3083 PropertyDetails details = GetDetails(descriptor_index);
3084 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
3088 void DescriptorArray::SetRepresentation(int descriptor_index,
3089 Representation representation) {
3090 DCHECK(!representation.IsNone());
3091 PropertyDetails details = GetDetails(descriptor_index);
3092 set(ToDetailsIndex(descriptor_index),
3093 details.CopyWithRepresentation(representation).AsSmi());
3097 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3098 DCHECK(descriptor_number < number_of_descriptors());
3099 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3103 int DescriptorArray::GetValueOffset(int descriptor_number) {
3104 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3108 Object* DescriptorArray::GetValue(int descriptor_number) {
3109 DCHECK(descriptor_number < number_of_descriptors());
3110 return get(ToValueIndex(descriptor_number));
3114 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3115 set(ToValueIndex(descriptor_index), value);
3119 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3120 DCHECK(descriptor_number < number_of_descriptors());
3121 Object* details = get(ToDetailsIndex(descriptor_number));
3122 return PropertyDetails(Smi::cast(details));
3126 PropertyType DescriptorArray::GetType(int descriptor_number) {
3127 return GetDetails(descriptor_number).type();
3131 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3132 DCHECK(GetDetails(descriptor_number).location() == kField);
3133 return GetDetails(descriptor_number).field_index();
3137 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3138 DCHECK(GetDetails(descriptor_number).location() == kField);
3139 return HeapType::cast(GetValue(descriptor_number));
3143 Object* DescriptorArray::GetConstant(int descriptor_number) {
3144 return GetValue(descriptor_number);
3148 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3149 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3150 return GetValue(descriptor_number);
3154 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3155 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3156 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3157 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3161 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3162 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3163 handle(GetValue(descriptor_number), GetIsolate()),
3164 GetDetails(descriptor_number));
3168 void DescriptorArray::Set(int descriptor_number,
3170 const WhitenessWitness&) {
3172 DCHECK(descriptor_number < number_of_descriptors());
3174 NoIncrementalWriteBarrierSet(this,
3175 ToKeyIndex(descriptor_number),
3177 NoIncrementalWriteBarrierSet(this,
3178 ToValueIndex(descriptor_number),
3180 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3181 desc->GetDetails().AsSmi());
3185 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3187 DCHECK(descriptor_number < number_of_descriptors());
3189 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3190 set(ToValueIndex(descriptor_number), *desc->GetValue());
3191 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3195 void DescriptorArray::Append(Descriptor* desc) {
3196 DisallowHeapAllocation no_gc;
3197 int descriptor_number = number_of_descriptors();
3198 SetNumberOfDescriptors(descriptor_number + 1);
3199 Set(descriptor_number, desc);
3201 uint32_t hash = desc->GetKey()->Hash();
3205 for (insertion = descriptor_number; insertion > 0; --insertion) {
3206 Name* key = GetSortedKey(insertion - 1);
3207 if (key->Hash() <= hash) break;
3208 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3211 SetSortedKey(insertion, descriptor_number);
3215 void DescriptorArray::SwapSortedKeys(int first, int second) {
3216 int first_key = GetSortedKeyIndex(first);
3217 SetSortedKey(first, GetSortedKeyIndex(second));
3218 SetSortedKey(second, first_key);
3222 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3223 : marking_(array->GetHeap()->incremental_marking()) {
3224 marking_->EnterNoMarkingScope();
3225 DCHECK(!marking_->IsMarking() ||
3226 Marking::Color(array) == Marking::WHITE_OBJECT);
3230 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3231 marking_->LeaveNoMarkingScope();
3235 template<typename Derived, typename Shape, typename Key>
3236 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3237 const int kMinCapacity = 32;
3238 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3239 if (capacity < kMinCapacity) {
3240 capacity = kMinCapacity; // Guarantee min capacity.
3246 template<typename Derived, typename Shape, typename Key>
3247 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3248 return FindEntry(GetIsolate(), key);
3252 // Find entry for key otherwise return kNotFound.
3253 template<typename Derived, typename Shape, typename Key>
3254 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3255 uint32_t capacity = Capacity();
3256 uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3258 // EnsureCapacity will guarantee the hash table is never full.
3260 Object* element = KeyAt(entry);
3261 // Empty entry. Uses raw unchecked accessors because it is called by the
3262 // string table during bootstrapping.
3263 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3264 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3265 Shape::IsMatch(key, element)) return entry;
3266 entry = NextProbe(entry, count++, capacity);
3272 bool SeededNumberDictionary::requires_slow_elements() {
3273 Object* max_index_object = get(kMaxNumberKeyIndex);
3274 if (!max_index_object->IsSmi()) return false;
3276 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3279 uint32_t SeededNumberDictionary::max_number_key() {
3280 DCHECK(!requires_slow_elements());
3281 Object* max_index_object = get(kMaxNumberKeyIndex);
3282 if (!max_index_object->IsSmi()) return 0;
3283 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3284 return value >> kRequiresSlowElementsTagSize;
3287 void SeededNumberDictionary::set_requires_slow_elements() {
3288 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3292 // ------------------------------------
3296 CAST_ACCESSOR(AccessorInfo)
3297 CAST_ACCESSOR(ByteArray)
3300 CAST_ACCESSOR(CodeCacheHashTable)
3301 CAST_ACCESSOR(CompilationCacheTable)
3302 CAST_ACCESSOR(ConsString)
3303 CAST_ACCESSOR(ConstantPoolArray)
3304 CAST_ACCESSOR(DeoptimizationInputData)
3305 CAST_ACCESSOR(DeoptimizationOutputData)
3306 CAST_ACCESSOR(DependentCode)
3307 CAST_ACCESSOR(DescriptorArray)
3308 CAST_ACCESSOR(ExternalArray)
3309 CAST_ACCESSOR(ExternalOneByteString)
3310 CAST_ACCESSOR(ExternalFloat32Array)
3311 CAST_ACCESSOR(ExternalFloat64Array)
3312 CAST_ACCESSOR(ExternalInt16Array)
3313 CAST_ACCESSOR(ExternalInt32Array)
3314 CAST_ACCESSOR(ExternalInt8Array)
3315 CAST_ACCESSOR(ExternalString)
3316 CAST_ACCESSOR(ExternalTwoByteString)
3317 CAST_ACCESSOR(ExternalUint16Array)
3318 CAST_ACCESSOR(ExternalUint32Array)
3319 CAST_ACCESSOR(ExternalUint8Array)
3320 CAST_ACCESSOR(ExternalUint8ClampedArray)
3321 CAST_ACCESSOR(FixedArray)
3322 CAST_ACCESSOR(FixedArrayBase)
3323 CAST_ACCESSOR(FixedDoubleArray)
3324 CAST_ACCESSOR(FixedTypedArrayBase)
3325 CAST_ACCESSOR(Foreign)
3326 CAST_ACCESSOR(GlobalObject)
3327 CAST_ACCESSOR(HeapObject)
3328 CAST_ACCESSOR(JSArray)
3329 CAST_ACCESSOR(JSArrayBuffer)
3330 CAST_ACCESSOR(JSArrayBufferView)
3331 CAST_ACCESSOR(JSBuiltinsObject)
3332 CAST_ACCESSOR(JSDataView)
3333 CAST_ACCESSOR(JSDate)
3334 CAST_ACCESSOR(JSFunction)
3335 CAST_ACCESSOR(JSFunctionProxy)
3336 CAST_ACCESSOR(JSFunctionResultCache)
3337 CAST_ACCESSOR(JSGeneratorObject)
3338 CAST_ACCESSOR(JSGlobalObject)
3339 CAST_ACCESSOR(JSGlobalProxy)
3340 CAST_ACCESSOR(JSMap)
3341 CAST_ACCESSOR(JSMapIterator)
3342 CAST_ACCESSOR(JSMessageObject)
3343 CAST_ACCESSOR(JSModule)
3344 CAST_ACCESSOR(JSObject)
3345 CAST_ACCESSOR(JSProxy)
3346 CAST_ACCESSOR(JSReceiver)
3347 CAST_ACCESSOR(JSRegExp)
3348 CAST_ACCESSOR(JSSet)
3349 CAST_ACCESSOR(JSSetIterator)
3350 CAST_ACCESSOR(JSTypedArray)
3351 CAST_ACCESSOR(JSValue)
3352 CAST_ACCESSOR(JSWeakMap)
3353 CAST_ACCESSOR(JSWeakSet)
3354 CAST_ACCESSOR(LayoutDescriptor)
3357 CAST_ACCESSOR(NameDictionary)
3358 CAST_ACCESSOR(NormalizedMapCache)
3359 CAST_ACCESSOR(Object)
3360 CAST_ACCESSOR(ObjectHashTable)
3361 CAST_ACCESSOR(Oddball)
3362 CAST_ACCESSOR(OrderedHashMap)
3363 CAST_ACCESSOR(OrderedHashSet)
3364 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3365 CAST_ACCESSOR(PropertyCell)
3366 CAST_ACCESSOR(ScopeInfo)
3367 CAST_ACCESSOR(SeededNumberDictionary)
3368 CAST_ACCESSOR(SeqOneByteString)
3369 CAST_ACCESSOR(SeqString)
3370 CAST_ACCESSOR(SeqTwoByteString)
3371 CAST_ACCESSOR(SharedFunctionInfo)
3372 CAST_ACCESSOR(SlicedString)
3374 CAST_ACCESSOR(String)
3375 CAST_ACCESSOR(StringTable)
3376 CAST_ACCESSOR(Struct)
3377 CAST_ACCESSOR(Symbol)
3378 CAST_ACCESSOR(UnseededNumberDictionary)
3379 CAST_ACCESSOR(WeakCell)
3380 CAST_ACCESSOR(WeakFixedArray)
3381 CAST_ACCESSOR(WeakHashTable)
3385 template <class Traits>
3386 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3387 FixedTypedArray<Traits>::kInstanceType;
3390 template <class Traits>
3391 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3392 SLOW_DCHECK(object->IsHeapObject() &&
3393 HeapObject::cast(object)->map()->instance_type() ==
3394 Traits::kInstanceType);
3395 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3399 template <class Traits>
3400 const FixedTypedArray<Traits>*
3401 FixedTypedArray<Traits>::cast(const Object* object) {
3402 SLOW_DCHECK(object->IsHeapObject() &&
3403 HeapObject::cast(object)->map()->instance_type() ==
3404 Traits::kInstanceType);
3405 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3409 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3410 STRUCT_LIST(MAKE_STRUCT_CAST)
3411 #undef MAKE_STRUCT_CAST
3414 template <typename Derived, typename Shape, typename Key>
3415 HashTable<Derived, Shape, Key>*
3416 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3417 SLOW_DCHECK(obj->IsHashTable());
3418 return reinterpret_cast<HashTable*>(obj);
3422 template <typename Derived, typename Shape, typename Key>
3423 const HashTable<Derived, Shape, Key>*
3424 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3425 SLOW_DCHECK(obj->IsHashTable());
3426 return reinterpret_cast<const HashTable*>(obj);
3430 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3431 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3433 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3434 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3436 SMI_ACCESSORS(String, length, kLengthOffset)
3437 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3440 FreeSpace* FreeSpace::next() {
3441 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3442 (!GetHeap()->deserialization_complete() && map() == NULL));
3443 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3444 return reinterpret_cast<FreeSpace*>(
3445 Memory::Address_at(address() + kNextOffset));
3449 FreeSpace** FreeSpace::next_address() {
3450 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3451 (!GetHeap()->deserialization_complete() && map() == NULL));
3452 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3453 return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
3457 void FreeSpace::set_next(FreeSpace* next) {
3458 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3459 (!GetHeap()->deserialization_complete() && map() == NULL));
3460 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3461 base::NoBarrier_Store(
3462 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3463 reinterpret_cast<base::AtomicWord>(next));
3467 FreeSpace* FreeSpace::cast(HeapObject* o) {
3468 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3469 return reinterpret_cast<FreeSpace*>(o);
3473 uint32_t Name::hash_field() {
3474 return READ_UINT32_FIELD(this, kHashFieldOffset);
3478 void Name::set_hash_field(uint32_t value) {
3479 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3480 #if V8_HOST_ARCH_64_BIT
3481 #if V8_TARGET_LITTLE_ENDIAN
3482 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3484 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3490 bool Name::Equals(Name* other) {
3491 if (other == this) return true;
3492 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3493 this->IsSymbol() || other->IsSymbol()) {
3496 return String::cast(this)->SlowEquals(String::cast(other));
3500 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3501 if (one.is_identical_to(two)) return true;
3502 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3503 one->IsSymbol() || two->IsSymbol()) {
3506 return String::SlowEquals(Handle<String>::cast(one),
3507 Handle<String>::cast(two));
3511 ACCESSORS(Symbol, name, Object, kNameOffset)
3512 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3513 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3514 BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit)
3517 bool String::Equals(String* other) {
3518 if (other == this) return true;
3519 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3522 return SlowEquals(other);
3526 bool String::Equals(Handle<String> one, Handle<String> two) {
3527 if (one.is_identical_to(two)) return true;
3528 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3531 return SlowEquals(one, two);
3535 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3536 if (!string->IsConsString()) return string;
3537 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3538 if (cons->IsFlat()) return handle(cons->first());
3539 return SlowFlatten(cons, pretenure);
3543 uint16_t String::Get(int index) {
3544 DCHECK(index >= 0 && index < length());
3545 switch (StringShape(this).full_representation_tag()) {
3546 case kSeqStringTag | kOneByteStringTag:
3547 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3548 case kSeqStringTag | kTwoByteStringTag:
3549 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3550 case kConsStringTag | kOneByteStringTag:
3551 case kConsStringTag | kTwoByteStringTag:
3552 return ConsString::cast(this)->ConsStringGet(index);
3553 case kExternalStringTag | kOneByteStringTag:
3554 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3555 case kExternalStringTag | kTwoByteStringTag:
3556 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3557 case kSlicedStringTag | kOneByteStringTag:
3558 case kSlicedStringTag | kTwoByteStringTag:
3559 return SlicedString::cast(this)->SlicedStringGet(index);
3569 void String::Set(int index, uint16_t value) {
3570 DCHECK(index >= 0 && index < length());
3571 DCHECK(StringShape(this).IsSequential());
3573 return this->IsOneByteRepresentation()
3574 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3575 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3579 bool String::IsFlat() {
3580 if (!StringShape(this).IsCons()) return true;
3581 return ConsString::cast(this)->second()->length() == 0;
3585 String* String::GetUnderlying() {
3586 // Giving direct access to underlying string only makes sense if the
3587 // wrapping string is already flattened.
3588 DCHECK(this->IsFlat());
3589 DCHECK(StringShape(this).IsIndirect());
3590 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3591 const int kUnderlyingOffset = SlicedString::kParentOffset;
3592 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3596 template<class Visitor>
3597 ConsString* String::VisitFlat(Visitor* visitor,
3600 int slice_offset = offset;
3601 const int length = string->length();
3602 DCHECK(offset <= length);
3604 int32_t type = string->map()->instance_type();
3605 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3606 case kSeqStringTag | kOneByteStringTag:
3607 visitor->VisitOneByteString(
3608 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3612 case kSeqStringTag | kTwoByteStringTag:
3613 visitor->VisitTwoByteString(
3614 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3618 case kExternalStringTag | kOneByteStringTag:
3619 visitor->VisitOneByteString(
3620 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3624 case kExternalStringTag | kTwoByteStringTag:
3625 visitor->VisitTwoByteString(
3626 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3630 case kSlicedStringTag | kOneByteStringTag:
3631 case kSlicedStringTag | kTwoByteStringTag: {
3632 SlicedString* slicedString = SlicedString::cast(string);
3633 slice_offset += slicedString->offset();
3634 string = slicedString->parent();
3638 case kConsStringTag | kOneByteStringTag:
3639 case kConsStringTag | kTwoByteStringTag:
3640 return ConsString::cast(string);
3651 inline Vector<const uint8_t> String::GetCharVector() {
3652 String::FlatContent flat = GetFlatContent();
3653 DCHECK(flat.IsOneByte());
3654 return flat.ToOneByteVector();
3659 inline Vector<const uc16> String::GetCharVector() {
3660 String::FlatContent flat = GetFlatContent();
3661 DCHECK(flat.IsTwoByte());
3662 return flat.ToUC16Vector();
3666 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3667 DCHECK(index >= 0 && index < length());
3668 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3672 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3673 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3674 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3675 static_cast<byte>(value));
3679 Address SeqOneByteString::GetCharsAddress() {
3680 return FIELD_ADDR(this, kHeaderSize);
3684 uint8_t* SeqOneByteString::GetChars() {
3685 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3689 Address SeqTwoByteString::GetCharsAddress() {
3690 return FIELD_ADDR(this, kHeaderSize);
3694 uc16* SeqTwoByteString::GetChars() {
3695 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3699 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3700 DCHECK(index >= 0 && index < length());
3701 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3705 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3706 DCHECK(index >= 0 && index < length());
3707 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3711 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3712 return SizeFor(length());
3716 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3717 return SizeFor(length());
3721 String* SlicedString::parent() {
3722 return String::cast(READ_FIELD(this, kParentOffset));
3726 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3727 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3728 WRITE_FIELD(this, kParentOffset, parent);
3729 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3733 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3736 String* ConsString::first() {
3737 return String::cast(READ_FIELD(this, kFirstOffset));
3741 Object* ConsString::unchecked_first() {
3742 return READ_FIELD(this, kFirstOffset);
3746 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3747 WRITE_FIELD(this, kFirstOffset, value);
3748 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3752 String* ConsString::second() {
3753 return String::cast(READ_FIELD(this, kSecondOffset));
3757 Object* ConsString::unchecked_second() {
3758 return READ_FIELD(this, kSecondOffset);
3762 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3763 WRITE_FIELD(this, kSecondOffset, value);
3764 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3768 bool ExternalString::is_short() {
3769 InstanceType type = map()->instance_type();
3770 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3774 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3775 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3779 void ExternalOneByteString::update_data_cache() {
3780 if (is_short()) return;
3781 const char** data_field =
3782 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3783 *data_field = resource()->data();
3787 void ExternalOneByteString::set_resource(
3788 const ExternalOneByteString::Resource* resource) {
3789 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3790 *reinterpret_cast<const Resource**>(
3791 FIELD_ADDR(this, kResourceOffset)) = resource;
3792 if (resource != NULL) update_data_cache();
3796 const uint8_t* ExternalOneByteString::GetChars() {
3797 return reinterpret_cast<const uint8_t*>(resource()->data());
3801 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3802 DCHECK(index >= 0 && index < length());
3803 return GetChars()[index];
3807 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3808 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3812 void ExternalTwoByteString::update_data_cache() {
3813 if (is_short()) return;
3814 const uint16_t** data_field =
3815 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3816 *data_field = resource()->data();
3820 void ExternalTwoByteString::set_resource(
3821 const ExternalTwoByteString::Resource* resource) {
3822 *reinterpret_cast<const Resource**>(
3823 FIELD_ADDR(this, kResourceOffset)) = resource;
3824 if (resource != NULL) update_data_cache();
3828 const uint16_t* ExternalTwoByteString::GetChars() {
3829 return resource()->data();
3833 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3834 DCHECK(index >= 0 && index < length());
3835 return GetChars()[index];
3839 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3841 return GetChars() + start;
3845 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3848 void ConsStringIterator::PushLeft(ConsString* string) {
3849 frames_[depth_++ & kDepthMask] = string;
3853 void ConsStringIterator::PushRight(ConsString* string) {
3855 frames_[(depth_-1) & kDepthMask] = string;
3859 void ConsStringIterator::AdjustMaximumDepth() {
3860 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3864 void ConsStringIterator::Pop() {
3866 DCHECK(depth_ <= maximum_depth_);
3871 uint16_t StringCharacterStream::GetNext() {
3872 DCHECK(buffer8_ != NULL && end_ != NULL);
3873 // Advance cursor if needed.
3874 if (buffer8_ == end_) HasMore();
3875 DCHECK(buffer8_ < end_);
3876 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3880 StringCharacterStream::StringCharacterStream(String* string, int offset)
3881 : is_one_byte_(false) {
3882 Reset(string, offset);
3886 void StringCharacterStream::Reset(String* string, int offset) {
3889 ConsString* cons_string = String::VisitFlat(this, string, offset);
3890 iter_.Reset(cons_string, offset);
3891 if (cons_string != NULL) {
3892 string = iter_.Next(&offset);
3893 if (string != NULL) String::VisitFlat(this, string, offset);
3898 bool StringCharacterStream::HasMore() {
3899 if (buffer8_ != end_) return true;
3901 String* string = iter_.Next(&offset);
3902 DCHECK_EQ(offset, 0);
3903 if (string == NULL) return false;
3904 String::VisitFlat(this, string);
3905 DCHECK(buffer8_ != end_);
3910 void StringCharacterStream::VisitOneByteString(
3911 const uint8_t* chars, int length) {
3912 is_one_byte_ = true;
3914 end_ = chars + length;
3918 void StringCharacterStream::VisitTwoByteString(
3919 const uint16_t* chars, int length) {
3920 is_one_byte_ = false;
3922 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3926 void JSFunctionResultCache::MakeZeroSize() {
3927 set_finger_index(kEntriesIndex);
3928 set_size(kEntriesIndex);
3932 void JSFunctionResultCache::Clear() {
3933 int cache_size = size();
3934 Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3935 MemsetPointer(entries_start,
3936 GetHeap()->the_hole_value(),
3937 cache_size - kEntriesIndex);
3942 int JSFunctionResultCache::size() {
3943 return Smi::cast(get(kCacheSizeIndex))->value();
3947 void JSFunctionResultCache::set_size(int size) {
3948 set(kCacheSizeIndex, Smi::FromInt(size));
3952 int JSFunctionResultCache::finger_index() {
3953 return Smi::cast(get(kFingerIndex))->value();
3957 void JSFunctionResultCache::set_finger_index(int finger_index) {
3958 set(kFingerIndex, Smi::FromInt(finger_index));
3962 byte ByteArray::get(int index) {
3963 DCHECK(index >= 0 && index < this->length());
3964 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3968 void ByteArray::set(int index, byte value) {
3969 DCHECK(index >= 0 && index < this->length());
3970 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3974 int ByteArray::get_int(int index) {
3975 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3976 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3980 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3981 DCHECK_TAG_ALIGNED(address);
3982 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3986 Address ByteArray::GetDataStartAddress() {
3987 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3991 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3992 return reinterpret_cast<uint8_t*>(external_pointer());
3996 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3997 DCHECK((index >= 0) && (index < this->length()));
3998 uint8_t* ptr = external_uint8_clamped_pointer();
4003 Handle<Object> ExternalUint8ClampedArray::get(
4004 Handle<ExternalUint8ClampedArray> array,
4006 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4007 array->GetIsolate());
4011 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
4012 DCHECK((index >= 0) && (index < this->length()));
4013 uint8_t* ptr = external_uint8_clamped_pointer();
4018 void* ExternalArray::external_pointer() const {
4019 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4020 return reinterpret_cast<void*>(ptr);
4024 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
4025 intptr_t ptr = reinterpret_cast<intptr_t>(value);
4026 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4030 int8_t ExternalInt8Array::get_scalar(int index) {
4031 DCHECK((index >= 0) && (index < this->length()));
4032 int8_t* ptr = static_cast<int8_t*>(external_pointer());
4037 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
4039 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4040 array->GetIsolate());
4044 void ExternalInt8Array::set(int index, int8_t value) {
4045 DCHECK((index >= 0) && (index < this->length()));
4046 int8_t* ptr = static_cast<int8_t*>(external_pointer());
4051 uint8_t ExternalUint8Array::get_scalar(int index) {
4052 DCHECK((index >= 0) && (index < this->length()));
4053 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4058 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
4060 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4061 array->GetIsolate());
4065 void ExternalUint8Array::set(int index, uint8_t value) {
4066 DCHECK((index >= 0) && (index < this->length()));
4067 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
4072 int16_t ExternalInt16Array::get_scalar(int index) {
4073 DCHECK((index >= 0) && (index < this->length()));
4074 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4079 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
4081 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4082 array->GetIsolate());
4086 void ExternalInt16Array::set(int index, int16_t value) {
4087 DCHECK((index >= 0) && (index < this->length()));
4088 int16_t* ptr = static_cast<int16_t*>(external_pointer());
4093 uint16_t ExternalUint16Array::get_scalar(int index) {
4094 DCHECK((index >= 0) && (index < this->length()));
4095 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4100 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
4102 return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
4103 array->GetIsolate());
4107 void ExternalUint16Array::set(int index, uint16_t value) {
4108 DCHECK((index >= 0) && (index < this->length()));
4109 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
4114 int32_t ExternalInt32Array::get_scalar(int index) {
4115 DCHECK((index >= 0) && (index < this->length()));
4116 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4121 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
4123 return array->GetIsolate()->factory()->
4124 NewNumberFromInt(array->get_scalar(index));
4128 void ExternalInt32Array::set(int index, int32_t value) {
4129 DCHECK((index >= 0) && (index < this->length()));
4130 int32_t* ptr = static_cast<int32_t*>(external_pointer());
4135 uint32_t ExternalUint32Array::get_scalar(int index) {
4136 DCHECK((index >= 0) && (index < this->length()));
4137 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4142 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
4144 return array->GetIsolate()->factory()->
4145 NewNumberFromUint(array->get_scalar(index));
4149 void ExternalUint32Array::set(int index, uint32_t value) {
4150 DCHECK((index >= 0) && (index < this->length()));
4151 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
4156 float ExternalFloat32Array::get_scalar(int index) {
4157 DCHECK((index >= 0) && (index < this->length()));
4158 float* ptr = static_cast<float*>(external_pointer());
4163 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
4165 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4169 void ExternalFloat32Array::set(int index, float value) {
4170 DCHECK((index >= 0) && (index < this->length()));
4171 float* ptr = static_cast<float*>(external_pointer());
4176 double ExternalFloat64Array::get_scalar(int index) {
4177 DCHECK((index >= 0) && (index < this->length()));
4178 double* ptr = static_cast<double*>(external_pointer());
4183 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
4185 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
4189 void ExternalFloat64Array::set(int index, double value) {
4190 DCHECK((index >= 0) && (index < this->length()));
4191 double* ptr = static_cast<double*>(external_pointer());
4196 void* FixedTypedArrayBase::DataPtr() {
4197 return FIELD_ADDR(this, kDataOffset);
4201 int FixedTypedArrayBase::DataSize(InstanceType type) {
4204 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4205 case FIXED_##TYPE##_ARRAY_TYPE: \
4206 element_size = size; \
4209 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4210 #undef TYPED_ARRAY_CASE
4215 return length() * element_size;
4219 int FixedTypedArrayBase::DataSize() {
4220 return DataSize(map()->instance_type());
4224 int FixedTypedArrayBase::size() {
4225 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4229 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4230 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4234 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4237 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4240 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4243 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4246 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4249 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4252 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4255 float Float32ArrayTraits::defaultValue() {
4256 return std::numeric_limits<float>::quiet_NaN();
4260 double Float64ArrayTraits::defaultValue() {
4261 return std::numeric_limits<double>::quiet_NaN();
4265 template <class Traits>
4266 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4267 DCHECK((index >= 0) && (index < this->length()));
4268 ElementType* ptr = reinterpret_cast<ElementType*>(
4269 FIELD_ADDR(this, kDataOffset));
4275 FixedTypedArray<Float64ArrayTraits>::ElementType
4276 FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4277 DCHECK((index >= 0) && (index < this->length()));
4278 return READ_DOUBLE_FIELD(this, ElementOffset(index));
4282 template <class Traits>
4283 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4284 DCHECK((index >= 0) && (index < this->length()));
4285 ElementType* ptr = reinterpret_cast<ElementType*>(
4286 FIELD_ADDR(this, kDataOffset));
4292 void FixedTypedArray<Float64ArrayTraits>::set(
4293 int index, Float64ArrayTraits::ElementType value) {
4294 DCHECK((index >= 0) && (index < this->length()));
4295 WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4299 template <class Traits>
4300 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4301 return static_cast<ElementType>(value);
4306 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4307 if (value < 0) return 0;
4308 if (value > 0xFF) return 0xFF;
4309 return static_cast<uint8_t>(value);
4313 template <class Traits>
4314 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4316 return static_cast<ElementType>(DoubleToInt32(value));
4321 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4322 // Handle NaNs and less than zero values which clamp to zero.
4323 if (!(value > 0)) return 0;
4324 if (value > 0xFF) return 0xFF;
4325 return static_cast<uint8_t>(lrint(value));
4330 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4331 return static_cast<float>(value);
4336 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4341 template <class Traits>
4342 Handle<Object> FixedTypedArray<Traits>::get(
4343 Handle<FixedTypedArray<Traits> > array,
4345 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4349 template <class Traits>
4350 Handle<Object> FixedTypedArray<Traits>::SetValue(
4351 Handle<FixedTypedArray<Traits> > array,
4353 Handle<Object> value) {
4354 ElementType cast_value = Traits::defaultValue();
4355 if (index < static_cast<uint32_t>(array->length())) {
4356 if (value->IsSmi()) {
4357 int int_value = Handle<Smi>::cast(value)->value();
4358 cast_value = from_int(int_value);
4359 } else if (value->IsHeapNumber()) {
4360 double double_value = Handle<HeapNumber>::cast(value)->value();
4361 cast_value = from_double(double_value);
4363 // Clamp undefined to the default value. All other types have been
4364 // converted to a number type further up in the call chain.
4365 DCHECK(value->IsUndefined());
4367 array->set(index, cast_value);
4369 return Traits::ToHandle(array->GetIsolate(), cast_value);
4373 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4374 return handle(Smi::FromInt(scalar), isolate);
4378 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4380 return handle(Smi::FromInt(scalar), isolate);
4384 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4385 return handle(Smi::FromInt(scalar), isolate);
4389 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4390 return handle(Smi::FromInt(scalar), isolate);
4394 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4395 return handle(Smi::FromInt(scalar), isolate);
4399 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4400 return isolate->factory()->NewNumberFromUint(scalar);
4404 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4405 return isolate->factory()->NewNumberFromInt(scalar);
4409 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4410 return isolate->factory()->NewNumber(scalar);
4414 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4415 return isolate->factory()->NewNumber(scalar);
4419 int Map::visitor_id() {
4420 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4424 void Map::set_visitor_id(int id) {
4425 DCHECK(0 <= id && id < 256);
4426 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4430 int Map::instance_size() {
4431 return NOBARRIER_READ_BYTE_FIELD(
4432 this, kInstanceSizeOffset) << kPointerSizeLog2;
4436 int Map::inobject_properties() {
4437 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4441 int Map::pre_allocated_property_fields() {
4442 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4446 int Map::GetInObjectPropertyOffset(int index) {
4447 // Adjust for the number of properties stored in the object.
4448 index -= inobject_properties();
4450 return instance_size() + (index * kPointerSize);
4454 Handle<Map> Map::CopyInstallDescriptorsForTesting(
4455 Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
4456 Handle<LayoutDescriptor> layout_descriptor) {
4457 return CopyInstallDescriptors(map, new_descriptor, descriptors,
4462 int HeapObject::SizeFromMap(Map* map) {
4463 int instance_size = map->instance_size();
4464 if (instance_size != kVariableSizeSentinel) return instance_size;
4465 // Only inline the most frequent cases.
4466 InstanceType instance_type = map->instance_type();
4467 if (instance_type == FIXED_ARRAY_TYPE) {
4468 return FixedArray::BodyDescriptor::SizeOf(map, this);
4470 if (instance_type == ONE_BYTE_STRING_TYPE ||
4471 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4472 // Strings may get concurrently truncated, hence we have to access its
4473 // length synchronized.
4474 return SeqOneByteString::SizeFor(
4475 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4477 if (instance_type == BYTE_ARRAY_TYPE) {
4478 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4480 if (instance_type == FREE_SPACE_TYPE) {
4481 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4483 if (instance_type == STRING_TYPE ||
4484 instance_type == INTERNALIZED_STRING_TYPE) {
4485 // Strings may get concurrently truncated, hence we have to access its
4486 // length synchronized.
4487 return SeqTwoByteString::SizeFor(
4488 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4490 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4491 return FixedDoubleArray::SizeFor(
4492 reinterpret_cast<FixedDoubleArray*>(this)->length());
4494 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4495 return reinterpret_cast<ConstantPoolArray*>(this)->size();
4497 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4498 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4499 return reinterpret_cast<FixedTypedArrayBase*>(
4500 this)->TypedArraySize(instance_type);
4502 DCHECK(instance_type == CODE_TYPE);
4503 return reinterpret_cast<Code*>(this)->CodeSize();
4507 void Map::set_instance_size(int value) {
4508 DCHECK_EQ(0, value & (kPointerSize - 1));
4509 value >>= kPointerSizeLog2;
4510 DCHECK(0 <= value && value < 256);
4511 NOBARRIER_WRITE_BYTE_FIELD(
4512 this, kInstanceSizeOffset, static_cast<byte>(value));
4516 void Map::set_inobject_properties(int value) {
4517 DCHECK(0 <= value && value < 256);
4518 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4522 void Map::set_pre_allocated_property_fields(int value) {
4523 DCHECK(0 <= value && value < 256);
4524 WRITE_BYTE_FIELD(this,
4525 kPreAllocatedPropertyFieldsOffset,
4526 static_cast<byte>(value));
4530 InstanceType Map::instance_type() {
4531 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4535 void Map::set_instance_type(InstanceType value) {
4536 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4540 int Map::unused_property_fields() {
4541 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4545 void Map::set_unused_property_fields(int value) {
4546 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4550 byte Map::bit_field() {
4551 return READ_BYTE_FIELD(this, kBitFieldOffset);
4555 void Map::set_bit_field(byte value) {
4556 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4560 byte Map::bit_field2() {
4561 return READ_BYTE_FIELD(this, kBitField2Offset);
4565 void Map::set_bit_field2(byte value) {
4566 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4570 void Map::set_non_instance_prototype(bool value) {
4572 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4574 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4579 bool Map::has_non_instance_prototype() {
4580 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4584 void Map::set_function_with_prototype(bool value) {
4585 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4589 bool Map::function_with_prototype() {
4590 return FunctionWithPrototype::decode(bit_field());
4594 void Map::set_is_access_check_needed(bool access_check_needed) {
4595 if (access_check_needed) {
4596 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4598 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4603 bool Map::is_access_check_needed() {
4604 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4608 void Map::set_is_extensible(bool value) {
4610 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4612 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4616 bool Map::is_extensible() {
4617 return ((1 << kIsExtensible) & bit_field2()) != 0;
4621 void Map::set_is_prototype_map(bool value) {
4622 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4625 bool Map::is_prototype_map() {
4626 return IsPrototypeMapBits::decode(bit_field2());
4630 void Map::set_dictionary_map(bool value) {
4631 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4632 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4633 set_bit_field3(new_bit_field3);
4637 bool Map::is_dictionary_map() {
4638 return DictionaryMap::decode(bit_field3());
4642 Code::Flags Code::flags() {
4643 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4647 void Map::set_owns_descriptors(bool owns_descriptors) {
4648 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4652 bool Map::owns_descriptors() {
4653 return OwnsDescriptors::decode(bit_field3());
4657 void Map::set_has_instance_call_handler() {
4658 set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4662 bool Map::has_instance_call_handler() {
4663 return HasInstanceCallHandler::decode(bit_field3());
4667 void Map::deprecate() {
4668 set_bit_field3(Deprecated::update(bit_field3(), true));
4672 bool Map::is_deprecated() {
4673 return Deprecated::decode(bit_field3());
4677 void Map::set_migration_target(bool value) {
4678 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4682 bool Map::is_migration_target() {
4683 return IsMigrationTarget::decode(bit_field3());
4687 void Map::set_counter(int value) {
4688 set_bit_field3(Counter::update(bit_field3(), value));
4692 int Map::counter() { return Counter::decode(bit_field3()); }
4695 void Map::mark_unstable() {
4696 set_bit_field3(IsUnstable::update(bit_field3(), true));
4700 bool Map::is_stable() {
4701 return !IsUnstable::decode(bit_field3());
4705 bool Map::has_code_cache() {
4706 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4710 bool Map::CanBeDeprecated() {
4711 int descriptor = LastAdded();
4712 for (int i = 0; i <= descriptor; i++) {
4713 PropertyDetails details = instance_descriptors()->GetDetails(i);
4714 if (details.representation().IsNone()) return true;
4715 if (details.representation().IsSmi()) return true;
4716 if (details.representation().IsDouble()) return true;
4717 if (details.representation().IsHeapObject()) return true;
4718 if (details.type() == DATA_CONSTANT) return true;
4724 void Map::NotifyLeafMapLayoutChange() {
4727 dependent_code()->DeoptimizeDependentCodeGroup(
4729 DependentCode::kPrototypeCheckGroup);
4734 bool Map::CanOmitMapChecks() {
4735 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4739 int DependentCode::number_of_entries(DependencyGroup group) {
4740 if (length() == 0) return 0;
4741 return Smi::cast(get(group))->value();
4745 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4746 set(group, Smi::FromInt(value));
4750 void DependentCode::set_object_at(int i, Object* object) {
4751 set(kCodesStartIndex + i, object);
4755 Object* DependentCode::object_at(int i) {
4756 return get(kCodesStartIndex + i);
4760 void DependentCode::clear_at(int i) {
4761 set_undefined(kCodesStartIndex + i);
4765 void DependentCode::copy(int from, int to) {
4766 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4770 void DependentCode::ExtendGroup(DependencyGroup group) {
4771 GroupStartIndexes starts(this);
4772 for (int g = kGroupCount - 1; g > group; g--) {
4773 if (starts.at(g) < starts.at(g + 1)) {
4774 copy(starts.at(g), starts.at(g + 1));
4780 void Code::set_flags(Code::Flags flags) {
4781 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4782 WRITE_INT_FIELD(this, kFlagsOffset, flags);
4786 Code::Kind Code::kind() {
4787 return ExtractKindFromFlags(flags());
4791 bool Code::IsCodeStubOrIC() {
4792 return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4793 kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4794 kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4795 kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4796 kind() == TO_BOOLEAN_IC;
4800 InlineCacheState Code::ic_state() {
4801 InlineCacheState result = ExtractICStateFromFlags(flags());
4802 // Only allow uninitialized or debugger states for non-IC code
4803 // objects. This is used in the debugger to determine whether or not
4804 // a call to code object has been replaced with a debug break call.
4805 DCHECK(is_inline_cache_stub() ||
4806 result == UNINITIALIZED ||
4807 result == DEBUG_STUB);
4812 ExtraICState Code::extra_ic_state() {
4813 DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4814 return ExtractExtraICStateFromFlags(flags());
4818 Code::StubType Code::type() {
4819 return ExtractTypeFromFlags(flags());
4823 // For initialization.
4824 void Code::set_raw_kind_specific_flags1(int value) {
4825 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4829 void Code::set_raw_kind_specific_flags2(int value) {
4830 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4834 inline bool Code::is_crankshafted() {
4835 return IsCrankshaftedField::decode(
4836 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4840 inline bool Code::is_hydrogen_stub() {
4841 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4845 inline void Code::set_is_crankshafted(bool value) {
4846 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4847 int updated = IsCrankshaftedField::update(previous, value);
4848 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4852 inline bool Code::is_turbofanned() {
4853 return IsTurbofannedField::decode(
4854 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4858 inline void Code::set_is_turbofanned(bool value) {
4859 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4860 int updated = IsTurbofannedField::update(previous, value);
4861 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4865 inline bool Code::can_have_weak_objects() {
4866 DCHECK(kind() == OPTIMIZED_FUNCTION);
4867 return CanHaveWeakObjectsField::decode(
4868 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4872 inline void Code::set_can_have_weak_objects(bool value) {
4873 DCHECK(kind() == OPTIMIZED_FUNCTION);
4874 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4875 int updated = CanHaveWeakObjectsField::update(previous, value);
4876 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4880 bool Code::optimizable() {
4881 DCHECK_EQ(FUNCTION, kind());
4882 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4886 void Code::set_optimizable(bool value) {
4887 DCHECK_EQ(FUNCTION, kind());
4888 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4892 bool Code::has_deoptimization_support() {
4893 DCHECK_EQ(FUNCTION, kind());
4894 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4895 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4899 void Code::set_has_deoptimization_support(bool value) {
4900 DCHECK_EQ(FUNCTION, kind());
4901 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4902 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4903 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4907 bool Code::has_debug_break_slots() {
4908 DCHECK_EQ(FUNCTION, kind());
4909 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4910 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4914 void Code::set_has_debug_break_slots(bool value) {
4915 DCHECK_EQ(FUNCTION, kind());
4916 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4917 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4918 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4922 bool Code::is_compiled_optimizable() {
4923 DCHECK_EQ(FUNCTION, kind());
4924 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4925 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4929 void Code::set_compiled_optimizable(bool value) {
4930 DCHECK_EQ(FUNCTION, kind());
4931 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4932 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4933 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4937 bool Code::has_reloc_info_for_serialization() {
4938 DCHECK_EQ(FUNCTION, kind());
4939 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4940 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4944 void Code::set_has_reloc_info_for_serialization(bool value) {
4945 DCHECK_EQ(FUNCTION, kind());
4946 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4947 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4948 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4952 int Code::allow_osr_at_loop_nesting_level() {
4953 DCHECK_EQ(FUNCTION, kind());
4954 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4955 return AllowOSRAtLoopNestingLevelField::decode(fields);
4959 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4960 DCHECK_EQ(FUNCTION, kind());
4961 DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4962 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4963 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4964 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4968 int Code::profiler_ticks() {
4969 DCHECK_EQ(FUNCTION, kind());
4970 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4974 void Code::set_profiler_ticks(int ticks) {
4975 DCHECK(ticks < 256);
4976 if (kind() == FUNCTION) {
4977 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4982 int Code::builtin_index() {
4983 return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
4987 void Code::set_builtin_index(int index) {
4988 WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
4992 unsigned Code::stack_slots() {
4993 DCHECK(is_crankshafted());
4994 return StackSlotsField::decode(
4995 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4999 void Code::set_stack_slots(unsigned slots) {
5000 CHECK(slots <= (1 << kStackSlotsBitCount));
5001 DCHECK(is_crankshafted());
5002 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5003 int updated = StackSlotsField::update(previous, slots);
5004 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5008 unsigned Code::safepoint_table_offset() {
5009 DCHECK(is_crankshafted());
5010 return SafepointTableOffsetField::decode(
5011 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5015 void Code::set_safepoint_table_offset(unsigned offset) {
5016 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5017 DCHECK(is_crankshafted());
5018 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5019 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5020 int updated = SafepointTableOffsetField::update(previous, offset);
5021 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5025 unsigned Code::back_edge_table_offset() {
5026 DCHECK_EQ(FUNCTION, kind());
5027 return BackEdgeTableOffsetField::decode(
5028 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5032 void Code::set_back_edge_table_offset(unsigned offset) {
5033 DCHECK_EQ(FUNCTION, kind());
5034 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5035 offset = offset >> kPointerSizeLog2;
5036 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5037 int updated = BackEdgeTableOffsetField::update(previous, offset);
5038 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5042 bool Code::back_edges_patched_for_osr() {
5043 DCHECK_EQ(FUNCTION, kind());
5044 return allow_osr_at_loop_nesting_level() > 0;
5048 byte Code::to_boolean_state() {
5049 return extra_ic_state();
5053 bool Code::has_function_cache() {
5054 DCHECK(kind() == STUB);
5055 return HasFunctionCacheField::decode(
5056 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5060 void Code::set_has_function_cache(bool flag) {
5061 DCHECK(kind() == STUB);
5062 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5063 int updated = HasFunctionCacheField::update(previous, flag);
5064 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5068 bool Code::marked_for_deoptimization() {
5069 DCHECK(kind() == OPTIMIZED_FUNCTION);
5070 return MarkedForDeoptimizationField::decode(
5071 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5075 void Code::set_marked_for_deoptimization(bool flag) {
5076 DCHECK(kind() == OPTIMIZED_FUNCTION);
5077 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5078 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5079 int updated = MarkedForDeoptimizationField::update(previous, flag);
5080 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5084 bool Code::is_inline_cache_stub() {
5085 Kind kind = this->kind();
5087 #define CASE(name) case name: return true;
5090 default: return false;
5095 bool Code::is_keyed_stub() {
5096 return is_keyed_load_stub() || is_keyed_store_stub();
5100 bool Code::is_debug_stub() {
5101 return ic_state() == DEBUG_STUB;
5105 ConstantPoolArray* Code::constant_pool() {
5106 return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
5110 void Code::set_constant_pool(Object* value) {
5111 DCHECK(value->IsConstantPoolArray());
5112 WRITE_FIELD(this, kConstantPoolOffset, value);
5113 WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
5117 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5118 ExtraICState extra_ic_state, StubType type,
5119 CacheHolderFlag holder) {
5120 // Compute the bit mask.
5121 unsigned int bits = KindField::encode(kind)
5122 | ICStateField::encode(ic_state)
5123 | TypeField::encode(type)
5124 | ExtraICStateField::encode(extra_ic_state)
5125 | CacheHolderField::encode(holder);
5126 return static_cast<Flags>(bits);
5130 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5131 ExtraICState extra_ic_state,
5132 CacheHolderFlag holder,
5134 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5138 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5139 CacheHolderFlag holder) {
5140 return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5144 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5145 return KindField::decode(flags);
5149 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5150 return ICStateField::decode(flags);
5154 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5155 return ExtraICStateField::decode(flags);
5159 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5160 return TypeField::decode(flags);
5164 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5165 return CacheHolderField::decode(flags);
5169 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5170 int bits = flags & ~TypeField::kMask;
5171 return static_cast<Flags>(bits);
5175 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5176 int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5177 return static_cast<Flags>(bits);
5181 Code* Code::GetCodeFromTargetAddress(Address address) {
5182 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5183 // GetCodeFromTargetAddress might be called when marking objects during mark
5184 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5185 // Code::cast. Code::cast does not work when the object's map is
5187 Code* result = reinterpret_cast<Code*>(code);
5192 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5194 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5198 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5199 if (!FLAG_collect_maps) return false;
5200 if (object->IsMap()) {
5201 return Map::cast(object)->CanTransition() &&
5202 FLAG_weak_embedded_maps_in_optimized_code;
5204 if (object->IsCell()) {
5205 object = Cell::cast(object)->value();
5206 } else if (object->IsPropertyCell()) {
5207 object = PropertyCell::cast(object)->value();
5209 if (object->IsJSObject()) {
5210 return FLAG_weak_embedded_objects_in_optimized_code;
5212 if (object->IsFixedArray()) {
5213 // Contexts of inlined functions are embedded in optimized code.
5214 Map* map = HeapObject::cast(object)->map();
5215 Heap* heap = map->GetHeap();
5216 return FLAG_weak_embedded_objects_in_optimized_code &&
5217 map == heap->function_context_map();
5223 class Code::FindAndReplacePattern {
5225 FindAndReplacePattern() : count_(0) { }
5226 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5227 DCHECK(count_ < kMaxCount);
5228 find_[count_] = map_to_find;
5229 replace_[count_] = obj_to_replace;
5233 static const int kMaxCount = 4;
5235 Handle<Map> find_[kMaxCount];
5236 Handle<Object> replace_[kMaxCount];
5241 Object* Map::prototype() const {
5242 return READ_FIELD(this, kPrototypeOffset);
5246 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5247 DCHECK(value->IsNull() || value->IsJSReceiver());
5248 WRITE_FIELD(this, kPrototypeOffset, value);
5249 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5253 // If the descriptor is using the empty transition array, install a new empty
5254 // transition array that will have place for an element transition.
5255 static void EnsureHasTransitionArray(Handle<Map> map) {
5256 Handle<TransitionArray> transitions;
5257 if (!map->HasTransitionArray()) {
5258 transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5259 transitions->set_back_pointer_storage(map->GetBackPointer());
5260 } else if (!map->transitions()->IsFullTransitionArray()) {
5261 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5265 map->set_transitions(*transitions);
5269 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5270 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5271 return LayoutDescriptor::cast_gc_safe(layout_desc);
5275 bool Map::HasFastPointerLayout() const {
5276 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5277 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5281 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5282 LayoutDescriptor* layout_desc) {
5283 set_instance_descriptors(descriptors);
5284 if (FLAG_unbox_double_fields) {
5285 if (layout_descriptor()->IsSlowLayout()) {
5286 set_layout_descriptor(layout_desc);
5289 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5290 if (FLAG_verify_heap) {
5291 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5292 CHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5295 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5296 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5302 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5303 LayoutDescriptor* layout_desc) {
5304 int len = descriptors->number_of_descriptors();
5305 set_instance_descriptors(descriptors);
5306 SetNumberOfOwnDescriptors(len);
5308 if (FLAG_unbox_double_fields) {
5309 set_layout_descriptor(layout_desc);
5311 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5312 if (FLAG_verify_heap) {
5313 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5316 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5318 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5323 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5324 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5327 void Map::set_bit_field3(uint32_t bits) {
5328 if (kInt32Size != kPointerSize) {
5329 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5331 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5335 uint32_t Map::bit_field3() {
5336 return READ_UINT32_FIELD(this, kBitField3Offset);
5340 LayoutDescriptor* Map::GetLayoutDescriptor() {
5341 return FLAG_unbox_double_fields ? layout_descriptor()
5342 : LayoutDescriptor::FastPointerLayout();
5346 void Map::AppendDescriptor(Descriptor* desc) {
5347 DescriptorArray* descriptors = instance_descriptors();
5348 int number_of_own_descriptors = NumberOfOwnDescriptors();
5349 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5350 descriptors->Append(desc);
5351 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5353 // This function does not support appending double field descriptors and
5354 // it should never try to (otherwise, layout descriptor must be updated too).
5356 PropertyDetails details = desc->GetDetails();
5357 CHECK(details.type() != DATA || !details.representation().IsDouble());
5362 Object* Map::GetBackPointer() {
5363 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5364 if (object->IsTransitionArray()) {
5365 return TransitionArray::cast(object)->back_pointer_storage();
5367 DCHECK(object->IsMap() || object->IsUndefined());
5373 bool Map::HasElementsTransition() {
5374 return HasTransitionArray() && transitions()->HasElementsTransition();
5378 bool Map::HasTransitionArray() const {
5379 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5380 return object->IsTransitionArray();
5384 Map* Map::elements_transition_map() {
5386 transitions()->SearchSpecial(GetHeap()->elements_transition_symbol());
5387 return transitions()->GetTarget(index);
5391 bool Map::CanHaveMoreTransitions() {
5392 if (!HasTransitionArray()) return true;
5393 return transitions()->number_of_transitions() <
5394 TransitionArray::kMaxNumberOfTransitions;
5398 Map* Map::GetTransition(int transition_index) {
5399 return transitions()->GetTarget(transition_index);
5403 int Map::SearchSpecialTransition(Symbol* name) {
5404 if (HasTransitionArray()) {
5405 return transitions()->SearchSpecial(name);
5407 return TransitionArray::kNotFound;
5411 int Map::SearchTransition(PropertyKind kind, Name* name,
5412 PropertyAttributes attributes) {
5413 if (HasTransitionArray()) {
5414 return transitions()->Search(kind, name, attributes);
5416 return TransitionArray::kNotFound;
5420 FixedArray* Map::GetPrototypeTransitions() {
5421 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5422 if (!transitions()->HasPrototypeTransitions()) {
5423 return GetHeap()->empty_fixed_array();
5425 return transitions()->GetPrototypeTransitions();
5429 void Map::SetPrototypeTransitions(
5430 Handle<Map> map, Handle<FixedArray> proto_transitions) {
5431 EnsureHasTransitionArray(map);
5432 int old_number_of_transitions = map->NumberOfProtoTransitions();
5433 if (Heap::ShouldZapGarbage() && map->HasPrototypeTransitions()) {
5434 DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
5435 map->ZapPrototypeTransitions();
5437 map->transitions()->SetPrototypeTransitions(*proto_transitions);
5438 map->SetNumberOfProtoTransitions(old_number_of_transitions);
5442 bool Map::HasPrototypeTransitions() {
5443 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5447 TransitionArray* Map::transitions() const {
5448 DCHECK(HasTransitionArray());
5449 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5450 return TransitionArray::cast(object);
5454 void Map::set_transitions(TransitionArray* transition_array,
5455 WriteBarrierMode mode) {
5456 // Transition arrays are not shared. When one is replaced, it should not
5457 // keep referenced objects alive, so we zap it.
5458 // When there is another reference to the array somewhere (e.g. a handle),
5459 // not zapping turns from a waste of memory into a source of crashes.
5460 if (HasTransitionArray()) {
5462 for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5463 Map* target = transitions()->GetTarget(i);
5464 if (target->instance_descriptors() == instance_descriptors()) {
5465 Name* key = transitions()->GetKey(i);
5466 int new_target_index;
5467 if (TransitionArray::IsSpecialTransition(key)) {
5468 new_target_index = transition_array->SearchSpecial(Symbol::cast(key));
5470 PropertyDetails details =
5471 TransitionArray::GetTargetDetails(key, target);
5472 new_target_index = transition_array->Search(details.kind(), key,
5473 details.attributes());
5475 DCHECK_NE(TransitionArray::kNotFound, new_target_index);
5476 DCHECK_EQ(target, transition_array->GetTarget(new_target_index));
5480 DCHECK(transitions() != transition_array);
5484 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5485 CONDITIONAL_WRITE_BARRIER(
5486 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5490 void Map::init_back_pointer(Object* undefined) {
5491 DCHECK(undefined->IsUndefined());
5492 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5496 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5497 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5498 DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5499 (value->IsMap() && GetBackPointer()->IsUndefined()));
5500 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5501 if (object->IsTransitionArray()) {
5502 TransitionArray::cast(object)->set_back_pointer_storage(value);
5504 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5505 CONDITIONAL_WRITE_BARRIER(
5506 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5511 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5512 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5513 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5515 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5516 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5517 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5519 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5520 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5521 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5523 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5524 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5526 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5527 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5528 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5529 kExpectedReceiverTypeOffset)
5531 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5532 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5533 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5535 ACCESSORS(Box, value, Object, kValueOffset)
5537 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5538 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5540 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5541 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5542 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5544 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5545 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5546 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5547 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5548 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5549 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5550 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5551 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5552 kCanInterceptSymbolsBit)
5553 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5555 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5556 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5558 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5559 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5560 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5562 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5563 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5564 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5565 kPrototypeTemplateOffset)
5566 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5567 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5568 kNamedPropertyHandlerOffset)
5569 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5570 kIndexedPropertyHandlerOffset)
5571 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5572 kInstanceTemplateOffset)
5573 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5574 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5575 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5576 kInstanceCallHandlerOffset)
5577 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5578 kAccessCheckInfoOffset)
5579 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5581 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5582 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5583 kInternalFieldCountOffset)
5585 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5587 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5588 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5589 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5590 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5591 kPretenureCreateCountOffset)
5592 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5593 kDependentCodeOffset)
5594 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5595 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5597 ACCESSORS(Script, source, Object, kSourceOffset)
5598 ACCESSORS(Script, name, Object, kNameOffset)
5599 ACCESSORS(Script, id, Smi, kIdOffset)
5600 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5601 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5602 ACCESSORS(Script, context_data, Object, kContextOffset)
5603 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5604 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5605 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5606 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5607 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5608 kEvalFrominstructionsOffsetOffset)
5609 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5610 BOOL_ACCESSORS(Script, flags, is_embedder_debug_script,
5611 kIsEmbedderDebugScriptBit)
5612 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5613 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5614 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5616 Script::CompilationType Script::compilation_type() {
5617 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5618 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5620 void Script::set_compilation_type(CompilationType type) {
5621 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5622 type == COMPILATION_TYPE_EVAL));
5624 Script::CompilationState Script::compilation_state() {
5625 return BooleanBit::get(flags(), kCompilationStateBit) ?
5626 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5628 void Script::set_compilation_state(CompilationState state) {
5629 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5630 state == COMPILATION_STATE_COMPILED));
5634 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5635 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5636 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5637 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5639 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5640 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5641 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5642 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5644 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5645 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5646 kOptimizedCodeMapOffset)
5647 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5648 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5649 kFeedbackVectorOffset)
5651 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5653 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5654 kInstanceClassNameOffset)
5655 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5656 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5657 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5658 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5661 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5662 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5663 kHiddenPrototypeBit)
5664 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5665 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5666 kNeedsAccessCheckBit)
5667 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5668 kReadOnlyPrototypeBit)
5669 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5670 kRemovePrototypeBit)
5671 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5673 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5674 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5676 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5679 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5680 kAllowLazyCompilation)
5681 BOOL_ACCESSORS(SharedFunctionInfo,
5683 allows_lazy_compilation_without_context,
5684 kAllowLazyCompilationWithoutContext)
5685 BOOL_ACCESSORS(SharedFunctionInfo,
5689 BOOL_ACCESSORS(SharedFunctionInfo,
5691 has_duplicate_parameters,
5692 kHasDuplicateParameters)
5693 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5694 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5697 #if V8_HOST_ARCH_32_BIT
5698 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5699 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5700 kFormalParameterCountOffset)
5701 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5702 kExpectedNofPropertiesOffset)
5703 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5704 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5705 kStartPositionAndTypeOffset)
5706 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5707 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5708 kFunctionTokenPositionOffset)
5709 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5710 kCompilerHintsOffset)
5711 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5712 kOptCountAndBailoutReasonOffset)
5713 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5714 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5715 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5719 #if V8_TARGET_LITTLE_ENDIAN
5720 #define PSEUDO_SMI_LO_ALIGN 0
5721 #define PSEUDO_SMI_HI_ALIGN kIntSize
5723 #define PSEUDO_SMI_LO_ALIGN kIntSize
5724 #define PSEUDO_SMI_HI_ALIGN 0
5727 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5728 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
5729 int holder::name() const { \
5730 int value = READ_INT_FIELD(this, offset); \
5731 DCHECK(kHeapObjectTag == 1); \
5732 DCHECK((value & kHeapObjectTag) == 0); \
5733 return value >> 1; \
5735 void holder::set_##name(int value) { \
5736 DCHECK(kHeapObjectTag == 1); \
5737 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5738 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
5741 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5742 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5743 INT_ACCESSORS(holder, name, offset)
5746 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5747 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5748 kFormalParameterCountOffset)
5750 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5751 expected_nof_properties,
5752 kExpectedNofPropertiesOffset)
5753 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5755 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5756 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5757 start_position_and_type,
5758 kStartPositionAndTypeOffset)
5760 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5761 function_token_position,
5762 kFunctionTokenPositionOffset)
5763 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5765 kCompilerHintsOffset)
5767 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5768 opt_count_and_bailout_reason,
5769 kOptCountAndBailoutReasonOffset)
5770 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5772 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5774 kAstNodeCountOffset)
5775 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5777 kProfilerTicksOffset)
5782 BOOL_GETTER(SharedFunctionInfo,
5784 optimization_disabled,
5785 kOptimizationDisabled)
5788 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5789 set_compiler_hints(BooleanBit::set(compiler_hints(),
5790 kOptimizationDisabled,
5792 // If disabling optimizations we reflect that in the code object so
5793 // it will not be counted as optimizable code.
5794 if ((code()->kind() == Code::FUNCTION) && disable) {
5795 code()->set_optimizable(false);
5800 LanguageMode SharedFunctionInfo::language_mode() {
5801 STATIC_ASSERT(LANGUAGE_END == 3);
5802 return construct_language_mode(
5803 BooleanBit::get(compiler_hints(), kStrictModeFunction),
5804 BooleanBit::get(compiler_hints(), kStrongModeFunction));
5808 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5809 STATIC_ASSERT(LANGUAGE_END == 3);
5810 // We only allow language mode transitions that set the same language mode
5811 // again or go up in the chain:
5812 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5813 int hints = compiler_hints();
5814 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5815 hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5816 set_compiler_hints(hints);
5820 FunctionKind SharedFunctionInfo::kind() {
5821 return FunctionKindBits::decode(compiler_hints());
5825 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5826 DCHECK(IsValidFunctionKind(kind));
5827 int hints = compiler_hints();
5828 hints = FunctionKindBits::update(hints, kind);
5829 set_compiler_hints(hints);
5833 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_super_property,
5835 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5836 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5838 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5839 name_should_print_as_anonymous,
5840 kNameShouldPrintAsAnonymous)
5841 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5842 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5843 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5844 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5845 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5846 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5847 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5848 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5850 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5851 kIsAccessorFunction)
5852 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5853 kIsDefaultConstructor)
5855 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5856 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5857 ACCESSORS(CodeCache, weak_cell_cache, Object, kWeakCellCacheOffset)
5859 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5861 bool Script::HasValidSource() {
5862 Object* src = this->source();
5863 if (!src->IsString()) return true;
5864 String* src_str = String::cast(src);
5865 if (!StringShape(src_str).IsExternal()) return true;
5866 if (src_str->IsOneByteRepresentation()) {
5867 return ExternalOneByteString::cast(src)->resource() != NULL;
5868 } else if (src_str->IsTwoByteRepresentation()) {
5869 return ExternalTwoByteString::cast(src)->resource() != NULL;
5875 void SharedFunctionInfo::DontAdaptArguments() {
5876 DCHECK(code()->kind() == Code::BUILTIN);
5877 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5881 int SharedFunctionInfo::start_position() const {
5882 return start_position_and_type() >> kStartPositionShift;
5886 void SharedFunctionInfo::set_start_position(int start_position) {
5887 set_start_position_and_type((start_position << kStartPositionShift)
5888 | (start_position_and_type() & ~kStartPositionMask));
5892 Code* SharedFunctionInfo::code() const {
5893 return Code::cast(READ_FIELD(this, kCodeOffset));
5897 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5898 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5899 WRITE_FIELD(this, kCodeOffset, value);
5900 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5904 void SharedFunctionInfo::ReplaceCode(Code* value) {
5905 // If the GC metadata field is already used then the function was
5906 // enqueued as a code flushing candidate and we remove it now.
5907 if (code()->gc_metadata() != NULL) {
5908 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5909 flusher->EvictCandidate(this);
5912 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5918 ScopeInfo* SharedFunctionInfo::scope_info() const {
5919 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5923 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5924 WriteBarrierMode mode) {
5925 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5926 CONDITIONAL_WRITE_BARRIER(GetHeap(),
5929 reinterpret_cast<Object*>(value),
5934 bool SharedFunctionInfo::is_compiled() {
5935 return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
5939 bool SharedFunctionInfo::is_simple_parameter_list() {
5940 return scope_info()->IsSimpleParameterList();
5944 bool SharedFunctionInfo::IsApiFunction() {
5945 return function_data()->IsFunctionTemplateInfo();
5949 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5950 DCHECK(IsApiFunction());
5951 return FunctionTemplateInfo::cast(function_data());
5955 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5956 return function_data()->IsSmi();
5960 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5961 DCHECK(HasBuiltinFunctionId());
5962 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5966 int SharedFunctionInfo::ic_age() {
5967 return ICAgeBits::decode(counters());
5971 void SharedFunctionInfo::set_ic_age(int ic_age) {
5972 set_counters(ICAgeBits::update(counters(), ic_age));
5976 int SharedFunctionInfo::deopt_count() {
5977 return DeoptCountBits::decode(counters());
5981 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5982 set_counters(DeoptCountBits::update(counters(), deopt_count));
5986 void SharedFunctionInfo::increment_deopt_count() {
5987 int value = counters();
5988 int deopt_count = DeoptCountBits::decode(value);
5989 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5990 set_counters(DeoptCountBits::update(value, deopt_count));
5994 int SharedFunctionInfo::opt_reenable_tries() {
5995 return OptReenableTriesBits::decode(counters());
5999 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6000 set_counters(OptReenableTriesBits::update(counters(), tries));
6004 int SharedFunctionInfo::opt_count() {
6005 return OptCountBits::decode(opt_count_and_bailout_reason());
6009 void SharedFunctionInfo::set_opt_count(int opt_count) {
6010 set_opt_count_and_bailout_reason(
6011 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6015 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6016 return static_cast<BailoutReason>(
6017 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6021 bool SharedFunctionInfo::has_deoptimization_support() {
6022 Code* code = this->code();
6023 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6027 void SharedFunctionInfo::TryReenableOptimization() {
6028 int tries = opt_reenable_tries();
6029 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6030 // We reenable optimization whenever the number of tries is a large
6031 // enough power of 2.
6032 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6033 set_optimization_disabled(false);
6036 code()->set_optimizable(true);
6041 bool JSFunction::IsBuiltin() {
6042 return context()->global_object()->IsJSBuiltinsObject();
6046 bool JSFunction::IsFromNativeScript() {
6047 Object* script = shared()->script();
6048 bool native = script->IsScript() &&
6049 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
6050 DCHECK(!IsBuiltin() || native); // All builtins are also native.
6055 bool JSFunction::IsFromExtensionScript() {
6056 Object* script = shared()->script();
6057 return script->IsScript() &&
6058 Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
6062 bool JSFunction::NeedsArgumentsAdaption() {
6063 return shared()->internal_formal_parameter_count() !=
6064 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
6068 bool JSFunction::IsOptimized() {
6069 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6073 bool JSFunction::IsOptimizable() {
6074 return code()->kind() == Code::FUNCTION && code()->optimizable();
6078 bool JSFunction::IsMarkedForOptimization() {
6079 return code() == GetIsolate()->builtins()->builtin(
6080 Builtins::kCompileOptimized);
6084 bool JSFunction::IsMarkedForConcurrentOptimization() {
6085 return code() == GetIsolate()->builtins()->builtin(
6086 Builtins::kCompileOptimizedConcurrent);
6090 bool JSFunction::IsInOptimizationQueue() {
6091 return code() == GetIsolate()->builtins()->builtin(
6092 Builtins::kInOptimizationQueue);
6096 bool JSFunction::IsInobjectSlackTrackingInProgress() {
6097 return has_initial_map() &&
6098 initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
6102 Code* JSFunction::code() {
6104 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6108 void JSFunction::set_code(Code* value) {
6109 DCHECK(!GetHeap()->InNewSpace(value));
6110 Address entry = value->entry();
6111 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6112 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6114 HeapObject::RawField(this, kCodeEntryOffset),
6119 void JSFunction::set_code_no_write_barrier(Code* value) {
6120 DCHECK(!GetHeap()->InNewSpace(value));
6121 Address entry = value->entry();
6122 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6126 void JSFunction::ReplaceCode(Code* code) {
6127 bool was_optimized = IsOptimized();
6128 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6130 if (was_optimized && is_optimized) {
6131 shared()->EvictFromOptimizedCodeMap(this->code(),
6132 "Replacing with another optimized code");
6137 // Add/remove the function from the list of optimized functions for this
6138 // context based on the state change.
6139 if (!was_optimized && is_optimized) {
6140 context()->native_context()->AddOptimizedFunction(this);
6142 if (was_optimized && !is_optimized) {
6143 // TODO(titzer): linear in the number of optimized functions; fix!
6144 context()->native_context()->RemoveOptimizedFunction(this);
6149 Context* JSFunction::context() {
6150 return Context::cast(READ_FIELD(this, kContextOffset));
6154 JSObject* JSFunction::global_proxy() {
6155 return context()->global_proxy();
6159 void JSFunction::set_context(Object* value) {
6160 DCHECK(value->IsUndefined() || value->IsContext());
6161 WRITE_FIELD(this, kContextOffset, value);
6162 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6165 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6166 kPrototypeOrInitialMapOffset)
6169 Map* JSFunction::initial_map() {
6170 return Map::cast(prototype_or_initial_map());
6174 bool JSFunction::has_initial_map() {
6175 return prototype_or_initial_map()->IsMap();
6179 bool JSFunction::has_instance_prototype() {
6180 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6184 bool JSFunction::has_prototype() {
6185 return map()->has_non_instance_prototype() || has_instance_prototype();
6189 Object* JSFunction::instance_prototype() {
6190 DCHECK(has_instance_prototype());
6191 if (has_initial_map()) return initial_map()->prototype();
6192 // When there is no initial map and the prototype is a JSObject, the
6193 // initial map field is used for the prototype field.
6194 return prototype_or_initial_map();
6198 Object* JSFunction::prototype() {
6199 DCHECK(has_prototype());
6200 // If the function's prototype property has been set to a non-JSObject
6201 // value, that value is stored in the constructor field of the map.
6202 if (map()->has_non_instance_prototype()) return map()->constructor();
6203 return instance_prototype();
6207 bool JSFunction::should_have_prototype() {
6208 return map()->function_with_prototype();
6212 bool JSFunction::is_compiled() {
6213 return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
6217 bool JSFunction::is_simple_parameter_list() {
6218 return shared()->is_simple_parameter_list();
6222 FixedArray* JSFunction::literals() {
6223 DCHECK(!shared()->bound());
6224 return literals_or_bindings();
6228 void JSFunction::set_literals(FixedArray* literals) {
6229 DCHECK(!shared()->bound());
6230 set_literals_or_bindings(literals);
6234 FixedArray* JSFunction::function_bindings() {
6235 DCHECK(shared()->bound());
6236 return literals_or_bindings();
6240 void JSFunction::set_function_bindings(FixedArray* bindings) {
6241 DCHECK(shared()->bound());
6242 // Bound function literal may be initialized to the empty fixed array
6243 // before the bindings are set.
6244 DCHECK(bindings == GetHeap()->empty_fixed_array() ||
6245 bindings->map() == GetHeap()->fixed_array_map());
6246 set_literals_or_bindings(bindings);
6250 int JSFunction::NumberOfLiterals() {
6251 DCHECK(!shared()->bound());
6252 return literals()->length();
6256 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
6257 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6258 return READ_FIELD(this, OffsetOfFunctionWithId(id));
6262 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
6264 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6265 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
6266 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
6270 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6271 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6272 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6276 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6278 DCHECK(id < kJSBuiltinsCount); // id is unsigned.
6279 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6280 DCHECK(!GetHeap()->InNewSpace(value));
6284 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6285 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6286 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6287 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6290 void JSProxy::InitializeBody(int object_size, Object* value) {
6291 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6292 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6293 WRITE_FIELD(this, offset, value);
6298 ACCESSORS(JSCollection, table, Object, kTableOffset)
6301 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6302 template<class Derived, class TableType> \
6303 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6304 return type::cast(READ_FIELD(this, offset)); \
6306 template<class Derived, class TableType> \
6307 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6308 type* value, WriteBarrierMode mode) { \
6309 WRITE_FIELD(this, offset, value); \
6310 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6313 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6314 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6315 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6317 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6320 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6321 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6324 Address Foreign::foreign_address() {
6325 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6329 void Foreign::set_foreign_address(Address value) {
6330 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6334 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6335 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6336 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6337 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6338 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6339 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6341 bool JSGeneratorObject::is_suspended() {
6342 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6343 DCHECK_EQ(kGeneratorClosed, 0);
6344 return continuation() > 0;
6347 bool JSGeneratorObject::is_closed() {
6348 return continuation() == kGeneratorClosed;
6351 bool JSGeneratorObject::is_executing() {
6352 return continuation() == kGeneratorExecuting;
6355 ACCESSORS(JSModule, context, Object, kContextOffset)
6356 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6359 ACCESSORS(JSValue, value, Object, kValueOffset)
6362 HeapNumber* HeapNumber::cast(Object* object) {
6363 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6364 return reinterpret_cast<HeapNumber*>(object);
6368 const HeapNumber* HeapNumber::cast(const Object* object) {
6369 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6370 return reinterpret_cast<const HeapNumber*>(object);
6374 ACCESSORS(JSDate, value, Object, kValueOffset)
6375 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6376 ACCESSORS(JSDate, year, Object, kYearOffset)
6377 ACCESSORS(JSDate, month, Object, kMonthOffset)
6378 ACCESSORS(JSDate, day, Object, kDayOffset)
6379 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6380 ACCESSORS(JSDate, hour, Object, kHourOffset)
6381 ACCESSORS(JSDate, min, Object, kMinOffset)
6382 ACCESSORS(JSDate, sec, Object, kSecOffset)
6385 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6386 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6387 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6388 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6389 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6390 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6393 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6394 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6395 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6396 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6397 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6398 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6399 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6402 void Code::WipeOutHeader() {
6403 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6404 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6405 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6406 WRITE_FIELD(this, kConstantPoolOffset, NULL);
6407 // Do not wipe out major/minor keys on a code stub or IC
6408 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6409 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6414 Object* Code::type_feedback_info() {
6415 DCHECK(kind() == FUNCTION);
6416 return raw_type_feedback_info();
6420 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6421 DCHECK(kind() == FUNCTION);
6422 set_raw_type_feedback_info(value, mode);
6423 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6428 uint32_t Code::stub_key() {
6429 DCHECK(IsCodeStubOrIC());
6430 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6431 return static_cast<uint32_t>(smi_key->value());
6435 void Code::set_stub_key(uint32_t key) {
6436 DCHECK(IsCodeStubOrIC());
6437 set_raw_type_feedback_info(Smi::FromInt(key));
6441 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6442 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6445 byte* Code::instruction_start() {
6446 return FIELD_ADDR(this, kHeaderSize);
6450 byte* Code::instruction_end() {
6451 return instruction_start() + instruction_size();
6455 int Code::body_size() {
6456 return RoundUp(instruction_size(), kObjectAlignment);
6460 ByteArray* Code::unchecked_relocation_info() {
6461 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6465 byte* Code::relocation_start() {
6466 return unchecked_relocation_info()->GetDataStartAddress();
6470 int Code::relocation_size() {
6471 return unchecked_relocation_info()->length();
6475 byte* Code::entry() {
6476 return instruction_start();
6480 bool Code::contains(byte* inner_pointer) {
6481 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6485 ACCESSORS(JSArray, length, Object, kLengthOffset)
6488 void* JSArrayBuffer::backing_store() const {
6489 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6490 return reinterpret_cast<void*>(ptr);
6494 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6495 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6496 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6500 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6501 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6504 bool JSArrayBuffer::is_external() {
6505 return BooleanBit::get(flag(), kIsExternalBit);
6509 void JSArrayBuffer::set_is_external(bool value) {
6510 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6514 bool JSArrayBuffer::should_be_freed() {
6515 return BooleanBit::get(flag(), kShouldBeFreed);
6519 void JSArrayBuffer::set_should_be_freed(bool value) {
6520 set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6524 bool JSArrayBuffer::is_neuterable() {
6525 return BooleanBit::get(flag(), kIsNeuterableBit);
6529 void JSArrayBuffer::set_is_neuterable(bool value) {
6530 set_flag(BooleanBit::set(flag(), kIsNeuterableBit, value));
6534 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6535 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6538 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6539 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6540 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6541 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6542 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6544 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6547 JSRegExp::Type JSRegExp::TypeTag() {
6548 Object* data = this->data();
6549 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6550 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6551 return static_cast<JSRegExp::Type>(smi->value());
6555 int JSRegExp::CaptureCount() {
6556 switch (TypeTag()) {
6560 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6568 JSRegExp::Flags JSRegExp::GetFlags() {
6569 DCHECK(this->data()->IsFixedArray());
6570 Object* data = this->data();
6571 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6572 return Flags(smi->value());
6576 String* JSRegExp::Pattern() {
6577 DCHECK(this->data()->IsFixedArray());
6578 Object* data = this->data();
6579 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6584 Object* JSRegExp::DataAt(int index) {
6585 DCHECK(TypeTag() != NOT_COMPILED);
6586 return FixedArray::cast(data())->get(index);
6590 void JSRegExp::SetDataAt(int index, Object* value) {
6591 DCHECK(TypeTag() != NOT_COMPILED);
6592 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
6593 FixedArray::cast(data())->set(index, value);
6597 ElementsKind JSObject::GetElementsKind() {
6598 ElementsKind kind = map()->elements_kind();
6599 #if VERIFY_HEAP && DEBUG
6600 FixedArrayBase* fixed_array =
6601 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6603 // If a GC was caused while constructing this object, the elements
6604 // pointer may point to a one pointer filler map.
6605 if (ElementsAreSafeToExamine()) {
6606 Map* map = fixed_array->map();
6607 DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6608 (map == GetHeap()->fixed_array_map() ||
6609 map == GetHeap()->fixed_cow_array_map())) ||
6610 (IsFastDoubleElementsKind(kind) &&
6611 (fixed_array->IsFixedDoubleArray() ||
6612 fixed_array == GetHeap()->empty_fixed_array())) ||
6613 (kind == DICTIONARY_ELEMENTS &&
6614 fixed_array->IsFixedArray() &&
6615 fixed_array->IsDictionary()) ||
6616 (kind > DICTIONARY_ELEMENTS));
6617 DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6618 (elements()->IsFixedArray() && elements()->length() >= 2));
6625 ElementsAccessor* JSObject::GetElementsAccessor() {
6626 return ElementsAccessor::ForKind(GetElementsKind());
6630 bool JSObject::HasFastObjectElements() {
6631 return IsFastObjectElementsKind(GetElementsKind());
6635 bool JSObject::HasFastSmiElements() {
6636 return IsFastSmiElementsKind(GetElementsKind());
6640 bool JSObject::HasFastSmiOrObjectElements() {
6641 return IsFastSmiOrObjectElementsKind(GetElementsKind());
6645 bool JSObject::HasFastDoubleElements() {
6646 return IsFastDoubleElementsKind(GetElementsKind());
6650 bool JSObject::HasFastHoleyElements() {
6651 return IsFastHoleyElementsKind(GetElementsKind());
6655 bool JSObject::HasFastElements() {
6656 return IsFastElementsKind(GetElementsKind());
6660 bool JSObject::HasDictionaryElements() {
6661 return GetElementsKind() == DICTIONARY_ELEMENTS;
6665 bool JSObject::HasSloppyArgumentsElements() {
6666 return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6670 bool JSObject::HasExternalArrayElements() {
6671 HeapObject* array = elements();
6672 DCHECK(array != NULL);
6673 return array->IsExternalArray();
6677 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6678 bool JSObject::HasExternal##Type##Elements() { \
6679 HeapObject* array = elements(); \
6680 DCHECK(array != NULL); \
6681 if (!array->IsHeapObject()) \
6683 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6686 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6688 #undef EXTERNAL_ELEMENTS_CHECK
6691 bool JSObject::HasFixedTypedArrayElements() {
6692 HeapObject* array = elements();
6693 DCHECK(array != NULL);
6694 return array->IsFixedTypedArrayBase();
6698 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6699 bool JSObject::HasFixed##Type##Elements() { \
6700 HeapObject* array = elements(); \
6701 DCHECK(array != NULL); \
6702 if (!array->IsHeapObject()) \
6704 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6707 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6709 #undef FIXED_TYPED_ELEMENTS_CHECK
6712 bool JSObject::HasNamedInterceptor() {
6713 return map()->has_named_interceptor();
6717 bool JSObject::HasIndexedInterceptor() {
6718 return map()->has_indexed_interceptor();
6722 NameDictionary* JSObject::property_dictionary() {
6723 DCHECK(!HasFastProperties());
6724 return NameDictionary::cast(properties());
6728 SeededNumberDictionary* JSObject::element_dictionary() {
6729 DCHECK(HasDictionaryElements());
6730 return SeededNumberDictionary::cast(elements());
6734 bool Name::IsHashFieldComputed(uint32_t field) {
6735 return (field & kHashNotComputedMask) == 0;
6739 bool Name::HasHashCode() {
6740 return IsHashFieldComputed(hash_field());
6744 uint32_t Name::Hash() {
6745 // Fast case: has hash code already been computed?
6746 uint32_t field = hash_field();
6747 if (IsHashFieldComputed(field)) return field >> kHashShift;
6748 // Slow case: compute hash code and set it. Has to be a string.
6749 return String::cast(this)->ComputeAndSetHash();
6752 bool Name::IsOwn() {
6753 return this->IsSymbol() && Symbol::cast(this)->is_own();
6757 StringHasher::StringHasher(int length, uint32_t seed)
6759 raw_running_hash_(seed),
6761 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6762 is_first_char_(true) {
6763 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6767 bool StringHasher::has_trivial_hash() {
6768 return length_ > String::kMaxHashCalcLength;
6772 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6774 running_hash += (running_hash << 10);
6775 running_hash ^= (running_hash >> 6);
6776 return running_hash;
6780 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6781 running_hash += (running_hash << 3);
6782 running_hash ^= (running_hash >> 11);
6783 running_hash += (running_hash << 15);
6784 if ((running_hash & String::kHashBitMask) == 0) {
6787 return running_hash;
6791 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6792 const uc16* chars, int length) {
6793 DCHECK_NOT_NULL(chars);
6794 DCHECK(length >= 0);
6795 for (int i = 0; i < length; ++i) {
6796 running_hash = AddCharacterCore(running_hash, *chars++);
6798 return running_hash;
6802 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6805 DCHECK_NOT_NULL(chars);
6806 DCHECK(length >= 0);
6807 for (int i = 0; i < length; ++i) {
6808 uint16_t c = static_cast<uint16_t>(*chars++);
6809 running_hash = AddCharacterCore(running_hash, c);
6811 return running_hash;
6815 void StringHasher::AddCharacter(uint16_t c) {
6816 // Use the Jenkins one-at-a-time hash function to update the hash
6817 // for the given character.
6818 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6822 bool StringHasher::UpdateIndex(uint16_t c) {
6823 DCHECK(is_array_index_);
6824 if (c < '0' || c > '9') {
6825 is_array_index_ = false;
6829 if (is_first_char_) {
6830 is_first_char_ = false;
6831 if (c == '0' && length_ > 1) {
6832 is_array_index_ = false;
6836 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6837 is_array_index_ = false;
6840 array_index_ = array_index_ * 10 + d;
6845 template<typename Char>
6846 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6847 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6849 if (is_array_index_) {
6850 for (; i < length; i++) {
6851 AddCharacter(chars[i]);
6852 if (!UpdateIndex(chars[i])) {
6858 for (; i < length; i++) {
6859 DCHECK(!is_array_index_);
6860 AddCharacter(chars[i]);
6865 template <typename schar>
6866 uint32_t StringHasher::HashSequentialString(const schar* chars,
6869 StringHasher hasher(length, seed);
6870 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6871 return hasher.GetHashField();
6875 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6876 IteratingStringHasher hasher(string->length(), seed);
6878 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6879 ConsString* cons_string = String::VisitFlat(&hasher, string);
6880 if (cons_string == nullptr) return hasher.GetHashField();
6881 hasher.VisitConsString(cons_string);
6882 return hasher.GetHashField();
6886 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6888 AddCharacters(chars, length);
6892 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6894 AddCharacters(chars, length);
6898 bool Name::AsArrayIndex(uint32_t* index) {
6899 return IsString() && String::cast(this)->AsArrayIndex(index);
6903 bool String::AsArrayIndex(uint32_t* index) {
6904 uint32_t field = hash_field();
6905 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6908 return SlowAsArrayIndex(index);
6912 void String::SetForwardedInternalizedString(String* canonical) {
6913 DCHECK(IsInternalizedString());
6914 DCHECK(HasHashCode());
6915 if (canonical == this) return; // No need to forward.
6916 DCHECK(SlowEquals(canonical));
6917 DCHECK(canonical->IsInternalizedString());
6918 DCHECK(canonical->HasHashCode());
6919 WRITE_FIELD(this, kHashFieldSlot, canonical);
6920 // Setting the hash field to a tagged value sets the LSB, causing the hash
6921 // code to be interpreted as uninitialized. We use this fact to recognize
6922 // that we have a forwarded string.
6923 DCHECK(!HasHashCode());
6927 String* String::GetForwardedInternalizedString() {
6928 DCHECK(IsInternalizedString());
6929 if (HasHashCode()) return this;
6930 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
6931 DCHECK(canonical->IsInternalizedString());
6932 DCHECK(SlowEquals(canonical));
6933 DCHECK(canonical->HasHashCode());
6938 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6939 Handle<Name> name) {
6940 if (object->IsJSProxy()) {
6941 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6942 return JSProxy::HasPropertyWithHandler(proxy, name);
6944 Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6945 if (!result.has_value) return Maybe<bool>();
6946 return maybe(result.value != ABSENT);
6950 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6951 Handle<Name> name) {
6952 if (object->IsJSProxy()) {
6953 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6954 return JSProxy::HasPropertyWithHandler(proxy, name);
6956 Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6957 if (!result.has_value) return Maybe<bool>();
6958 return maybe(result.value != ABSENT);
6962 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6963 Handle<JSReceiver> object, Handle<Name> key) {
6965 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6966 return GetElementAttribute(object, index);
6968 LookupIterator it(object, key);
6969 return GetPropertyAttributes(&it);
6973 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6974 Handle<JSReceiver> object, uint32_t index) {
6975 if (object->IsJSProxy()) {
6976 return JSProxy::GetElementAttributeWithHandler(
6977 Handle<JSProxy>::cast(object), object, index);
6979 return JSObject::GetElementAttributeWithReceiver(
6980 Handle<JSObject>::cast(object), object, index, true);
6984 bool JSGlobalObject::IsDetached() {
6985 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6989 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6990 const PrototypeIterator iter(this->GetIsolate(),
6991 const_cast<JSGlobalProxy*>(this));
6992 return iter.GetCurrent() != global;
6996 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6997 return object->IsJSProxy()
6998 ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6999 : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
7003 Object* JSReceiver::GetIdentityHash() {
7005 ? JSProxy::cast(this)->GetIdentityHash()
7006 : JSObject::cast(this)->GetIdentityHash();
7010 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7011 if (object->IsJSProxy()) {
7012 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7013 return JSProxy::HasElementWithHandler(proxy, index);
7015 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
7016 Handle<JSObject>::cast(object), object, index, true);
7017 if (!result.has_value) return Maybe<bool>();
7018 return maybe(result.value != ABSENT);
7022 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
7024 if (object->IsJSProxy()) {
7025 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
7026 return JSProxy::HasElementWithHandler(proxy, index);
7028 Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
7029 Handle<JSObject>::cast(object), object, index, false);
7030 if (!result.has_value) return Maybe<bool>();
7031 return maybe(result.value != ABSENT);
7035 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
7036 Handle<JSReceiver> object, uint32_t index) {
7037 if (object->IsJSProxy()) {
7038 return JSProxy::GetElementAttributeWithHandler(
7039 Handle<JSProxy>::cast(object), object, index);
7041 return JSObject::GetElementAttributeWithReceiver(
7042 Handle<JSObject>::cast(object), object, index, false);
7046 bool AccessorInfo::all_can_read() {
7047 return BooleanBit::get(flag(), kAllCanReadBit);
7051 void AccessorInfo::set_all_can_read(bool value) {
7052 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7056 bool AccessorInfo::all_can_write() {
7057 return BooleanBit::get(flag(), kAllCanWriteBit);
7061 void AccessorInfo::set_all_can_write(bool value) {
7062 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7066 PropertyAttributes AccessorInfo::property_attributes() {
7067 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
7071 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7072 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
7076 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7077 if (!HasExpectedReceiverType()) return true;
7078 if (!receiver->IsJSObject()) return false;
7079 return FunctionTemplateInfo::cast(expected_receiver_type())
7080 ->IsTemplateFor(JSObject::cast(receiver)->map());
7084 void ExecutableAccessorInfo::clear_setter() {
7085 auto foreign = GetIsolate()->factory()->NewForeign(
7086 reinterpret_cast<v8::internal::Address>(
7087 reinterpret_cast<intptr_t>(nullptr)));
7088 set_setter(*foreign);
7092 template<typename Derived, typename Shape, typename Key>
7093 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7095 Handle<Object> value) {
7096 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7100 template<typename Derived, typename Shape, typename Key>
7101 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7103 Handle<Object> value,
7104 PropertyDetails details) {
7105 DCHECK(!key->IsName() ||
7106 details.IsDeleted() ||
7107 details.dictionary_index() > 0);
7108 int index = DerivedHashTable::EntryToIndex(entry);
7109 DisallowHeapAllocation no_gc;
7110 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
7111 FixedArray::set(index, *key, mode);
7112 FixedArray::set(index+1, *value, mode);
7113 FixedArray::set(index+2, details.AsSmi());
7117 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7118 DCHECK(other->IsNumber());
7119 return key == static_cast<uint32_t>(other->Number());
7123 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7124 return ComputeIntegerHash(key, 0);
7128 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7130 DCHECK(other->IsNumber());
7131 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7135 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7136 return ComputeIntegerHash(key, seed);
7140 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7143 DCHECK(other->IsNumber());
7144 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7148 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7149 return isolate->factory()->NewNumberFromUint(key);
7153 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7154 // We know that all entries in a hash table had their hash keys created.
7155 // Use that knowledge to have fast failure.
7156 if (key->Hash() != Name::cast(other)->Hash()) return false;
7157 return key->Equals(Name::cast(other));
7161 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7166 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7167 return Name::cast(other)->Hash();
7171 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7173 DCHECK(key->IsUniqueName());
7178 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7179 Handle<NameDictionary> dictionary) {
7180 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7184 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7185 return key->SameValue(other);
7189 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7190 return Smi::cast(key->GetHash())->value();
7194 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7196 return Smi::cast(other->GetHash())->value();
7200 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7201 Handle<Object> key) {
7206 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7207 Handle<ObjectHashTable> table, Handle<Object> key) {
7208 return DerivedHashTable::Shrink(table, key);
7212 template <int entrysize>
7213 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7214 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7215 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7220 template <int entrysize>
7221 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7224 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7225 : reinterpret_cast<intptr_t>(*key);
7226 return (uint32_t)(hash & 0xFFFFFFFF);
7230 template <int entrysize>
7231 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7233 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7234 intptr_t hash = reinterpret_cast<intptr_t>(other);
7235 return (uint32_t)(hash & 0xFFFFFFFF);
7239 template <int entrysize>
7240 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7241 Handle<Object> key) {
7246 void Map::ClearCodeCache(Heap* heap) {
7247 // No write barrier is needed since empty_fixed_array is not in new space.
7248 // Please note this function is used during marking:
7249 // - MarkCompactCollector::MarkUnmarkedObject
7250 // - IncrementalMarking::Step
7251 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7252 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7256 int Map::SlackForArraySize(int old_size, int size_limit) {
7257 const int max_slack = size_limit - old_size;
7258 CHECK(max_slack >= 0);
7259 if (old_size < 4) return Min(max_slack, 1);
7260 return Min(max_slack, old_size / 2);
7264 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
7265 DCHECK(array->HasFastSmiOrObjectElements());
7266 Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
7267 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
7268 if (elts->length() < required_size) {
7269 // Doubling in size would be overkill, but leave some slack to avoid
7270 // constantly growing.
7271 Expand(array, required_size + (required_size >> 3));
7272 // It's a performance benefit to keep a frequently used array in new-space.
7273 } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
7274 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
7275 // Expand will allocate a new backing store in new space even if the size
7276 // we asked for isn't larger than what we had before.
7277 Expand(array, required_size);
7282 void JSArray::set_length(Smi* length) {
7283 // Don't need a write barrier for a Smi.
7284 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7288 bool JSArray::AllowsSetElementsLength() {
7289 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7290 DCHECK(result == !HasExternalArrayElements());
7295 void JSArray::SetContent(Handle<JSArray> array,
7296 Handle<FixedArrayBase> storage) {
7297 EnsureCanContainElements(array, storage, storage->length(),
7298 ALLOW_COPIED_DOUBLE_ELEMENTS);
7300 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7301 IsFastDoubleElementsKind(array->GetElementsKind())) ||
7302 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7303 (IsFastObjectElementsKind(array->GetElementsKind()) ||
7304 (IsFastSmiElementsKind(array->GetElementsKind()) &&
7305 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7306 array->set_elements(*storage);
7307 array->set_length(Smi::FromInt(storage->length()));
7311 int TypeFeedbackInfo::ic_total_count() {
7312 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7313 return ICTotalCountField::decode(current);
7317 void TypeFeedbackInfo::set_ic_total_count(int count) {
7318 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7319 value = ICTotalCountField::update(value,
7320 ICTotalCountField::decode(count));
7321 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7325 int TypeFeedbackInfo::ic_with_type_info_count() {
7326 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7327 return ICsWithTypeInfoCountField::decode(current);
7331 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7332 if (delta == 0) return;
7333 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7334 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7335 // We can get negative count here when the type-feedback info is
7336 // shared between two code objects. The can only happen when
7337 // the debugger made a shallow copy of code object (see Heap::CopyCode).
7338 // Since we do not optimize when the debugger is active, we can skip
7339 // this counter update.
7340 if (new_count >= 0) {
7341 new_count &= ICsWithTypeInfoCountField::kMask;
7342 value = ICsWithTypeInfoCountField::update(value, new_count);
7343 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7348 int TypeFeedbackInfo::ic_generic_count() {
7349 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7353 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7354 if (delta == 0) return;
7355 int new_count = ic_generic_count() + delta;
7356 if (new_count >= 0) {
7357 new_count &= ~Smi::kMinValue;
7358 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7363 void TypeFeedbackInfo::initialize_storage() {
7364 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7365 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7366 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7370 void TypeFeedbackInfo::change_own_type_change_checksum() {
7371 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7372 int checksum = OwnTypeChangeChecksum::decode(value);
7373 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7374 value = OwnTypeChangeChecksum::update(value, checksum);
7375 // Ensure packed bit field is in Smi range.
7376 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7377 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7378 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7382 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7383 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7384 int mask = (1 << kTypeChangeChecksumBits) - 1;
7385 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7386 // Ensure packed bit field is in Smi range.
7387 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7388 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7389 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7393 int TypeFeedbackInfo::own_type_change_checksum() {
7394 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7395 return OwnTypeChangeChecksum::decode(value);
7399 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7400 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7401 int mask = (1 << kTypeChangeChecksumBits) - 1;
7402 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7406 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7409 Relocatable::Relocatable(Isolate* isolate) {
7411 prev_ = isolate->relocatable_top();
7412 isolate->set_relocatable_top(this);
7416 Relocatable::~Relocatable() {
7417 DCHECK_EQ(isolate_->relocatable_top(), this);
7418 isolate_->set_relocatable_top(prev_);
7422 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7423 return map->instance_size();
7427 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7428 v->VisitExternalReference(
7429 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7433 template<typename StaticVisitor>
7434 void Foreign::ForeignIterateBody() {
7435 StaticVisitor::VisitExternalReference(
7436 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7440 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7441 typedef v8::String::ExternalOneByteStringResource Resource;
7442 v->VisitExternalOneByteString(
7443 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7447 template <typename StaticVisitor>
7448 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7449 typedef v8::String::ExternalOneByteStringResource Resource;
7450 StaticVisitor::VisitExternalOneByteString(
7451 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7455 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7456 typedef v8::String::ExternalStringResource Resource;
7457 v->VisitExternalTwoByteString(
7458 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7462 template<typename StaticVisitor>
7463 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7464 typedef v8::String::ExternalStringResource Resource;
7465 StaticVisitor::VisitExternalTwoByteString(
7466 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7470 static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7474 DCHECK(FLAG_unbox_double_fields);
7475 DCHECK(IsAligned(start_offset, kPointerSize) &&
7476 IsAligned(end_offset, kPointerSize));
7478 LayoutDescriptorHelper helper(object->map());
7479 DCHECK(!helper.all_fields_tagged());
7481 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7482 // Visit all tagged fields.
7483 if (helper.IsTagged(offset)) {
7484 v->VisitPointer(HeapObject::RawField(object, offset));
7490 template<int start_offset, int end_offset, int size>
7491 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7494 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7495 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7496 HeapObject::RawField(obj, end_offset));
7498 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7503 template<int start_offset>
7504 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7507 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
7508 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7509 HeapObject::RawField(obj, object_size));
7511 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7516 template<class Derived, class TableType>
7517 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7518 TableType* table(TableType::cast(this->table()));
7519 int index = Smi::cast(this->index())->value();
7520 Object* key = table->KeyAt(index);
7521 DCHECK(!key->IsTheHole());
7526 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7527 array->set(0, CurrentKey());
7531 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7532 array->set(0, CurrentKey());
7533 array->set(1, CurrentValue());
7537 Object* JSMapIterator::CurrentValue() {
7538 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7539 int index = Smi::cast(this->index())->value();
7540 Object* value = table->ValueAt(index);
7541 DCHECK(!value->IsTheHole());
7546 class String::SubStringRange::iterator FINAL {
7548 typedef std::forward_iterator_tag iterator_category;
7549 typedef int difference_type;
7550 typedef uc16 value_type;
7551 typedef uc16* pointer;
7552 typedef uc16& reference;
7554 iterator(const iterator& other)
7555 : content_(other.content_), offset_(other.offset_) {}
7557 uc16 operator*() { return content_.Get(offset_); }
7558 bool operator==(const iterator& other) const {
7559 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7561 bool operator!=(const iterator& other) const {
7562 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7564 iterator& operator++() {
7568 iterator operator++(int);
7571 friend class String;
7572 iterator(String* from, int offset)
7573 : content_(from->GetFlatContent()), offset_(offset) {}
7574 String::FlatContent content_;
7579 String::SubStringRange::iterator String::SubStringRange::begin() {
7580 return String::SubStringRange::iterator(string_, first_);
7584 String::SubStringRange::iterator String::SubStringRange::end() {
7585 return String::SubStringRange::iterator(string_, first_ + length_);
7590 #undef CAST_ACCESSOR
7591 #undef INT_ACCESSORS
7593 #undef ACCESSORS_TO_SMI
7594 #undef SMI_ACCESSORS
7595 #undef SYNCHRONIZED_SMI_ACCESSORS
7596 #undef NOBARRIER_SMI_ACCESSORS
7598 #undef BOOL_ACCESSORS
7600 #undef FIELD_ADDR_CONST
7602 #undef NOBARRIER_READ_FIELD
7604 #undef NOBARRIER_WRITE_FIELD
7605 #undef WRITE_BARRIER
7606 #undef CONDITIONAL_WRITE_BARRIER
7607 #undef READ_DOUBLE_FIELD
7608 #undef WRITE_DOUBLE_FIELD
7609 #undef READ_INT_FIELD
7610 #undef WRITE_INT_FIELD
7611 #undef READ_INTPTR_FIELD
7612 #undef WRITE_INTPTR_FIELD
7613 #undef READ_UINT32_FIELD
7614 #undef WRITE_UINT32_FIELD
7615 #undef READ_SHORT_FIELD
7616 #undef WRITE_SHORT_FIELD
7617 #undef READ_BYTE_FIELD
7618 #undef WRITE_BYTE_FIELD
7619 #undef NOBARRIER_READ_BYTE_FIELD
7620 #undef NOBARRIER_WRITE_BYTE_FIELD
7622 } } // namespace v8::internal
7624 #endif // V8_OBJECTS_INL_H_