1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
50 #include "transitions-inl.h"
55 PropertyDetails::PropertyDetails(Smi* smi) {
56 value_ = smi->value();
60 Smi* PropertyDetails::AsSmi() {
61 // Ensure the upper 2 bits have the same value by sign extending it. This is
62 // necessary to be able to use the 31st bit of the property details.
63 int value = value_ << 1;
64 return Smi::FromInt(value >> 1);
68 PropertyDetails PropertyDetails::AsDeleted() {
69 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
70 return PropertyDetails(smi);
74 #define TYPE_CHECKER(type, instancetype) \
75 bool Object::Is##type() { \
76 return Object::IsHeapObject() && \
77 HeapObject::cast(this)->map()->instance_type() == instancetype; \
81 #define CAST_ACCESSOR(type) \
82 type* type::cast(Object* object) { \
83 SLOW_ASSERT(object->Is##type()); \
84 return reinterpret_cast<type*>(object); \
88 #define INT_ACCESSORS(holder, name, offset) \
89 int holder::name() { return READ_INT_FIELD(this, offset); } \
90 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
93 #define ACCESSORS(holder, name, type, offset) \
94 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
95 void holder::set_##name(type* value, WriteBarrierMode mode) { \
96 WRITE_FIELD(this, offset, value); \
97 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
101 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
102 #define ACCESSORS_TO_SMI(holder, name, offset) \
103 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
104 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
105 WRITE_FIELD(this, offset, value); \
109 // Getter that returns a Smi as an int and writes an int as a Smi.
110 #define SMI_ACCESSORS(holder, name, offset) \
111 int holder::name() { \
112 Object* value = READ_FIELD(this, offset); \
113 return Smi::cast(value)->value(); \
115 void holder::set_##name(int value) { \
116 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
120 #define BOOL_GETTER(holder, field, name, offset) \
121 bool holder::name() { \
122 return BooleanBit::get(field(), offset); \
126 #define BOOL_ACCESSORS(holder, field, name, offset) \
127 bool holder::name() { \
128 return BooleanBit::get(field(), offset); \
130 void holder::set_##name(bool value) { \
131 set_##field(BooleanBit::set(field(), offset, value)); \
135 bool Object::IsFixedArrayBase() {
136 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray();
140 // External objects are not extensible, so the map check is enough.
141 bool Object::IsExternal() {
142 return Object::IsHeapObject() &&
143 HeapObject::cast(this)->map() ==
144 HeapObject::cast(this)->GetHeap()->external_map();
148 bool Object::IsAccessorInfo() {
149 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
153 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
154 // There is a constraint on the object; check.
155 if (!this->IsJSObject()) return false;
156 // Fetch the constructor function of the object.
157 Object* cons_obj = JSObject::cast(this)->map()->constructor();
158 if (!cons_obj->IsJSFunction()) return false;
159 JSFunction* fun = JSFunction::cast(cons_obj);
160 // Iterate through the chain of inheriting function templates to
161 // see if the required one occurs.
162 for (Object* type = fun->shared()->function_data();
163 type->IsFunctionTemplateInfo();
164 type = FunctionTemplateInfo::cast(type)->parent_template()) {
165 if (type == expected) return true;
167 // Didn't find the required type in the inheritance chain.
172 bool Object::IsSmi() {
173 return HAS_SMI_TAG(this);
177 bool Object::IsHeapObject() {
178 return Internals::HasHeapObjectTag(this);
182 bool Object::NonFailureIsHeapObject() {
183 ASSERT(!this->IsFailure());
184 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
188 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
189 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
192 bool Object::IsString() {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
198 bool Object::IsName() {
199 return IsString() || IsSymbol();
203 bool Object::IsUniqueName() {
204 return IsInternalizedString() || IsSymbol();
208 bool Object::IsSpecObject() {
209 return Object::IsHeapObject()
210 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
214 bool Object::IsSpecFunction() {
215 if (!Object::IsHeapObject()) return false;
216 InstanceType type = HeapObject::cast(this)->map()->instance_type();
217 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
221 bool Object::IsInternalizedString() {
222 if (!this->IsHeapObject()) return false;
223 uint32_t type = HeapObject::cast(this)->map()->instance_type();
224 STATIC_ASSERT(kNotInternalizedTag != 0);
225 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
226 (kStringTag | kInternalizedTag);
230 bool Object::IsConsString() {
231 if (!IsString()) return false;
232 return StringShape(String::cast(this)).IsCons();
236 bool Object::IsSlicedString() {
237 if (!IsString()) return false;
238 return StringShape(String::cast(this)).IsSliced();
242 bool Object::IsSeqString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential();
248 bool Object::IsSeqOneByteString() {
249 if (!IsString()) return false;
250 return StringShape(String::cast(this)).IsSequential() &&
251 String::cast(this)->IsOneByteRepresentation();
255 bool Object::IsSeqTwoByteString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsSequential() &&
258 String::cast(this)->IsTwoByteRepresentation();
262 bool Object::IsExternalString() {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal();
268 bool Object::IsExternalAsciiString() {
269 if (!IsString()) return false;
270 return StringShape(String::cast(this)).IsExternal() &&
271 String::cast(this)->IsOneByteRepresentation();
275 bool Object::IsExternalTwoByteString() {
276 if (!IsString()) return false;
277 return StringShape(String::cast(this)).IsExternal() &&
278 String::cast(this)->IsTwoByteRepresentation();
281 bool Object::HasValidElements() {
282 // Dictionary is covered under FixedArray.
283 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
287 MaybeObject* Object::AllocateNewStorageFor(Heap* heap,
288 Representation representation) {
289 if (!FLAG_track_double_fields) return this;
290 if (!representation.IsDouble()) return this;
291 if (IsUninitialized()) {
292 return heap->AllocateHeapNumber(0);
294 return heap->AllocateHeapNumber(Number());
298 StringShape::StringShape(String* str)
299 : type_(str->map()->instance_type()) {
301 ASSERT((type_ & kIsNotStringMask) == kStringTag);
305 StringShape::StringShape(Map* map)
306 : type_(map->instance_type()) {
308 ASSERT((type_ & kIsNotStringMask) == kStringTag);
312 StringShape::StringShape(InstanceType t)
313 : type_(static_cast<uint32_t>(t)) {
315 ASSERT((type_ & kIsNotStringMask) == kStringTag);
319 bool StringShape::IsInternalized() {
321 STATIC_ASSERT(kNotInternalizedTag != 0);
322 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
323 (kStringTag | kInternalizedTag);
327 bool String::IsOneByteRepresentation() {
328 uint32_t type = map()->instance_type();
329 return (type & kStringEncodingMask) == kOneByteStringTag;
333 bool String::IsTwoByteRepresentation() {
334 uint32_t type = map()->instance_type();
335 return (type & kStringEncodingMask) == kTwoByteStringTag;
339 bool String::IsOneByteRepresentationUnderneath() {
340 uint32_t type = map()->instance_type();
341 STATIC_ASSERT(kIsIndirectStringTag != 0);
342 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
344 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
345 case kOneByteStringTag:
347 case kTwoByteStringTag:
349 default: // Cons or sliced string. Need to go deeper.
350 return GetUnderlying()->IsOneByteRepresentation();
355 bool String::IsTwoByteRepresentationUnderneath() {
356 uint32_t type = map()->instance_type();
357 STATIC_ASSERT(kIsIndirectStringTag != 0);
358 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
360 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
361 case kOneByteStringTag:
363 case kTwoByteStringTag:
365 default: // Cons or sliced string. Need to go deeper.
366 return GetUnderlying()->IsTwoByteRepresentation();
371 bool String::HasOnlyOneByteChars() {
372 uint32_t type = map()->instance_type();
373 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
374 IsOneByteRepresentation();
378 bool StringShape::IsCons() {
379 return (type_ & kStringRepresentationMask) == kConsStringTag;
383 bool StringShape::IsSliced() {
384 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
388 bool StringShape::IsIndirect() {
389 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
393 bool StringShape::IsExternal() {
394 return (type_ & kStringRepresentationMask) == kExternalStringTag;
398 bool StringShape::IsSequential() {
399 return (type_ & kStringRepresentationMask) == kSeqStringTag;
403 StringRepresentationTag StringShape::representation_tag() {
404 uint32_t tag = (type_ & kStringRepresentationMask);
405 return static_cast<StringRepresentationTag>(tag);
409 uint32_t StringShape::encoding_tag() {
410 return type_ & kStringEncodingMask;
414 uint32_t StringShape::full_representation_tag() {
415 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
419 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
420 Internals::kFullStringRepresentationMask);
422 STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
423 Internals::kStringEncodingMask);
426 bool StringShape::IsSequentialAscii() {
427 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
431 bool StringShape::IsSequentialTwoByte() {
432 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
436 bool StringShape::IsExternalAscii() {
437 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
441 STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
442 Internals::kExternalAsciiRepresentationTag);
444 STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
447 bool StringShape::IsExternalTwoByte() {
448 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
452 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
453 Internals::kExternalTwoByteRepresentationTag);
455 STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
457 uc32 FlatStringReader::Get(int index) {
458 ASSERT(0 <= index && index <= length_);
460 return static_cast<const byte*>(start_)[index];
462 return static_cast<const uc16*>(start_)[index];
467 bool Object::IsNumber() {
468 return IsSmi() || IsHeapNumber();
472 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
473 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
476 bool Object::IsFiller() {
477 if (!Object::IsHeapObject()) return false;
478 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
479 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
483 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
486 bool Object::IsExternalArray() {
487 if (!Object::IsHeapObject())
489 InstanceType instance_type =
490 HeapObject::cast(this)->map()->instance_type();
491 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
492 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
496 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
497 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
498 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
499 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
500 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
501 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
502 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
503 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
506 bool MaybeObject::IsFailure() {
507 return HAS_FAILURE_TAG(this);
511 bool MaybeObject::IsRetryAfterGC() {
512 return HAS_FAILURE_TAG(this)
513 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
517 bool MaybeObject::IsOutOfMemory() {
518 return HAS_FAILURE_TAG(this)
519 && Failure::cast(this)->IsOutOfMemoryException();
523 bool MaybeObject::IsException() {
524 return this == Failure::Exception();
528 bool MaybeObject::IsTheHole() {
529 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
533 bool MaybeObject::IsUninitialized() {
534 return !IsFailure() && ToObjectUnchecked()->IsUninitialized();
538 Failure* Failure::cast(MaybeObject* obj) {
539 ASSERT(HAS_FAILURE_TAG(obj));
540 return reinterpret_cast<Failure*>(obj);
544 bool Object::IsJSReceiver() {
545 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
546 return IsHeapObject() &&
547 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
551 bool Object::IsJSObject() {
552 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
553 return IsHeapObject() &&
554 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
558 bool Object::IsJSProxy() {
559 if (!Object::IsHeapObject()) return false;
560 InstanceType type = HeapObject::cast(this)->map()->instance_type();
561 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
565 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
566 TYPE_CHECKER(JSSet, JS_SET_TYPE)
567 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
568 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
569 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
570 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
571 TYPE_CHECKER(Map, MAP_TYPE)
572 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
573 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
574 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
577 bool Object::IsJSWeakCollection() {
578 return IsJSWeakMap() || IsJSWeakSet();
582 bool Object::IsDescriptorArray() {
583 return IsFixedArray();
587 bool Object::IsTransitionArray() {
588 return IsFixedArray();
592 bool Object::IsDeoptimizationInputData() {
593 // Must be a fixed array.
594 if (!IsFixedArray()) return false;
596 // There's no sure way to detect the difference between a fixed array and
597 // a deoptimization data array. Since this is used for asserts we can
598 // check that the length is zero or else the fixed size plus a multiple of
600 int length = FixedArray::cast(this)->length();
601 if (length == 0) return true;
603 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
604 return length >= 0 &&
605 length % DeoptimizationInputData::kDeoptEntrySize == 0;
609 bool Object::IsDeoptimizationOutputData() {
610 if (!IsFixedArray()) return false;
611 // There's actually no way to see the difference between a fixed array and
612 // a deoptimization data array. Since this is used for asserts we can check
613 // that the length is plausible though.
614 if (FixedArray::cast(this)->length() % 2 != 0) return false;
619 bool Object::IsDependentCode() {
620 if (!IsFixedArray()) return false;
621 // There's actually no way to see the difference between a fixed array and
622 // a dependent codes array.
627 bool Object::IsTypeFeedbackCells() {
628 if (!IsFixedArray()) return false;
629 // There's actually no way to see the difference between a fixed array and
630 // a cache cells array. Since this is used for asserts we can check that
631 // the length is plausible though.
632 if (FixedArray::cast(this)->length() % 2 != 0) return false;
637 bool Object::IsContext() {
638 if (!Object::IsHeapObject()) return false;
639 Map* map = HeapObject::cast(this)->map();
640 Heap* heap = map->GetHeap();
641 return (map == heap->function_context_map() ||
642 map == heap->catch_context_map() ||
643 map == heap->with_context_map() ||
644 map == heap->native_context_map() ||
645 map == heap->block_context_map() ||
646 map == heap->module_context_map() ||
647 map == heap->global_context_map());
651 bool Object::IsNativeContext() {
652 return Object::IsHeapObject() &&
653 HeapObject::cast(this)->map() ==
654 HeapObject::cast(this)->GetHeap()->native_context_map();
658 bool Object::IsScopeInfo() {
659 return Object::IsHeapObject() &&
660 HeapObject::cast(this)->map() ==
661 HeapObject::cast(this)->GetHeap()->scope_info_map();
665 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
668 template <> inline bool Is<JSFunction>(Object* obj) {
669 return obj->IsJSFunction();
673 TYPE_CHECKER(Code, CODE_TYPE)
674 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
675 TYPE_CHECKER(Cell, CELL_TYPE)
676 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
677 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
678 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
679 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
680 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
681 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
682 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
685 bool Object::IsStringWrapper() {
686 return IsJSValue() && JSValue::cast(this)->value()->IsString();
690 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
693 bool Object::IsBoolean() {
694 return IsOddball() &&
695 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
699 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
700 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
701 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
702 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
705 bool Object::IsJSArrayBufferView() {
706 return IsJSDataView() || IsJSTypedArray();
710 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
713 template <> inline bool Is<JSArray>(Object* obj) {
714 return obj->IsJSArray();
718 bool Object::IsHashTable() {
719 return Object::IsHeapObject() &&
720 HeapObject::cast(this)->map() ==
721 HeapObject::cast(this)->GetHeap()->hash_table_map();
725 bool Object::IsDictionary() {
726 return IsHashTable() &&
727 this != HeapObject::cast(this)->GetHeap()->string_table();
731 bool Object::IsStringTable() {
732 return IsHashTable() &&
733 this == HeapObject::cast(this)->GetHeap()->raw_unchecked_string_table();
737 bool Object::IsJSFunctionResultCache() {
738 if (!IsFixedArray()) return false;
739 FixedArray* self = FixedArray::cast(this);
740 int length = self->length();
741 if (length < JSFunctionResultCache::kEntriesIndex) return false;
742 if ((length - JSFunctionResultCache::kEntriesIndex)
743 % JSFunctionResultCache::kEntrySize != 0) {
747 if (FLAG_verify_heap) {
748 reinterpret_cast<JSFunctionResultCache*>(this)->
749 JSFunctionResultCacheVerify();
756 bool Object::IsNormalizedMapCache() {
757 if (!IsFixedArray()) return false;
758 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
762 if (FLAG_verify_heap) {
763 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
770 bool Object::IsCompilationCacheTable() {
771 return IsHashTable();
775 bool Object::IsCodeCacheHashTable() {
776 return IsHashTable();
780 bool Object::IsPolymorphicCodeCacheHashTable() {
781 return IsHashTable();
785 bool Object::IsMapCache() {
786 return IsHashTable();
790 bool Object::IsObjectHashTable() {
791 return IsHashTable();
795 bool Object::IsPrimitive() {
796 return IsOddball() || IsNumber() || IsString();
800 bool Object::IsJSGlobalProxy() {
801 bool result = IsHeapObject() &&
802 (HeapObject::cast(this)->map()->instance_type() ==
803 JS_GLOBAL_PROXY_TYPE);
804 ASSERT(!result || IsAccessCheckNeeded());
809 bool Object::IsGlobalObject() {
810 if (!IsHeapObject()) return false;
812 InstanceType type = HeapObject::cast(this)->map()->instance_type();
813 return type == JS_GLOBAL_OBJECT_TYPE ||
814 type == JS_BUILTINS_OBJECT_TYPE;
818 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
819 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
822 bool Object::IsUndetectableObject() {
823 return IsHeapObject()
824 && HeapObject::cast(this)->map()->is_undetectable();
828 bool Object::IsAccessCheckNeeded() {
829 return IsHeapObject()
830 && HeapObject::cast(this)->map()->is_access_check_needed();
834 bool Object::IsStruct() {
835 if (!IsHeapObject()) return false;
836 switch (HeapObject::cast(this)->map()->instance_type()) {
837 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
838 STRUCT_LIST(MAKE_STRUCT_CASE)
839 #undef MAKE_STRUCT_CASE
840 default: return false;
845 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
846 bool Object::Is##Name() { \
847 return Object::IsHeapObject() \
848 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
850 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
851 #undef MAKE_STRUCT_PREDICATE
854 bool Object::IsUndefined() {
855 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
859 bool Object::IsNull() {
860 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
864 bool Object::IsTheHole() {
865 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
869 bool Object::IsUninitialized() {
870 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
874 bool Object::IsTrue() {
875 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
879 bool Object::IsFalse() {
880 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
884 bool Object::IsArgumentsMarker() {
885 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
889 double Object::Number() {
892 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
893 : reinterpret_cast<HeapNumber*>(this)->value();
897 bool Object::IsNaN() {
898 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
902 MaybeObject* Object::ToSmi() {
903 if (IsSmi()) return this;
904 if (IsHeapNumber()) {
905 double value = HeapNumber::cast(this)->value();
906 int int_value = FastD2I(value);
907 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
908 return Smi::FromInt(int_value);
911 return Failure::Exception();
915 bool Object::HasSpecificClassOf(String* name) {
916 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
920 MaybeObject* Object::GetElement(Isolate* isolate, uint32_t index) {
921 // GetElement can trigger a getter which can cause allocation.
922 // This was not always the case. This ASSERT is here to catch
923 // leftover incorrect uses.
924 ASSERT(AllowHeapAllocation::IsAllowed());
925 return GetElementWithReceiver(isolate, this, index);
929 Object* Object::GetElementNoExceptionThrown(Isolate* isolate, uint32_t index) {
930 MaybeObject* maybe = GetElementWithReceiver(isolate, this, index);
931 ASSERT(!maybe->IsFailure());
932 Object* result = NULL; // Initialization to please compiler.
933 maybe->ToObject(&result);
938 MaybeObject* Object::GetProperty(Name* key) {
939 PropertyAttributes attributes;
940 return GetPropertyWithReceiver(this, key, &attributes);
944 MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) {
945 return GetPropertyWithReceiver(this, key, attributes);
949 #define FIELD_ADDR(p, offset) \
950 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
952 #define READ_FIELD(p, offset) \
953 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
955 #define WRITE_FIELD(p, offset, value) \
956 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
958 #define WRITE_BARRIER(heap, object, offset, value) \
959 heap->incremental_marking()->RecordWrite( \
960 object, HeapObject::RawField(object, offset), value); \
961 if (heap->InNewSpace(value)) { \
962 heap->RecordWrite(object->address(), offset); \
965 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
966 if (mode == UPDATE_WRITE_BARRIER) { \
967 heap->incremental_marking()->RecordWrite( \
968 object, HeapObject::RawField(object, offset), value); \
969 if (heap->InNewSpace(value)) { \
970 heap->RecordWrite(object->address(), offset); \
974 #ifndef V8_TARGET_ARCH_MIPS
975 #define READ_DOUBLE_FIELD(p, offset) \
976 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
977 #else // V8_TARGET_ARCH_MIPS
978 // Prevent gcc from using load-double (mips ldc1) on (possibly)
979 // non-64-bit aligned HeapNumber::value.
980 static inline double read_double_field(void* p, int offset) {
985 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
986 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
989 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
990 #endif // V8_TARGET_ARCH_MIPS
992 #ifndef V8_TARGET_ARCH_MIPS
993 #define WRITE_DOUBLE_FIELD(p, offset, value) \
994 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
995 #else // V8_TARGET_ARCH_MIPS
996 // Prevent gcc from using store-double (mips sdc1) on (possibly)
997 // non-64-bit aligned HeapNumber::value.
998 static inline void write_double_field(void* p, int offset,
1005 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1006 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1008 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1009 write_double_field(p, offset, value)
1010 #endif // V8_TARGET_ARCH_MIPS
1013 #define READ_INT_FIELD(p, offset) \
1014 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1016 #define WRITE_INT_FIELD(p, offset, value) \
1017 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1019 #define READ_INTPTR_FIELD(p, offset) \
1020 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1022 #define WRITE_INTPTR_FIELD(p, offset, value) \
1023 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1025 #define READ_UINT32_FIELD(p, offset) \
1026 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1028 #define WRITE_UINT32_FIELD(p, offset, value) \
1029 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1031 #define READ_INT32_FIELD(p, offset) \
1032 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1034 #define WRITE_INT32_FIELD(p, offset, value) \
1035 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1037 #define READ_INT64_FIELD(p, offset) \
1038 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1040 #define WRITE_INT64_FIELD(p, offset, value) \
1041 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1043 #define READ_SHORT_FIELD(p, offset) \
1044 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1046 #define WRITE_SHORT_FIELD(p, offset, value) \
1047 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1049 #define READ_BYTE_FIELD(p, offset) \
1050 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1052 #define WRITE_BYTE_FIELD(p, offset, value) \
1053 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1056 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1057 return &READ_FIELD(obj, byte_offset);
1062 return Internals::SmiValue(this);
1066 Smi* Smi::FromInt(int value) {
1067 ASSERT(Smi::IsValid(value));
1068 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1072 Smi* Smi::FromIntptr(intptr_t value) {
1073 ASSERT(Smi::IsValid(value));
1074 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1075 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1079 Failure::Type Failure::type() const {
1080 return static_cast<Type>(value() & kFailureTypeTagMask);
1084 bool Failure::IsInternalError() const {
1085 return type() == INTERNAL_ERROR;
1089 bool Failure::IsOutOfMemoryException() const {
1090 return type() == OUT_OF_MEMORY_EXCEPTION;
1094 AllocationSpace Failure::allocation_space() const {
1095 ASSERT_EQ(RETRY_AFTER_GC, type());
1096 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1101 Failure* Failure::InternalError() {
1102 return Construct(INTERNAL_ERROR);
1106 Failure* Failure::Exception() {
1107 return Construct(EXCEPTION);
1111 Failure* Failure::OutOfMemoryException(intptr_t value) {
1112 return Construct(OUT_OF_MEMORY_EXCEPTION, value);
1116 intptr_t Failure::value() const {
1117 return static_cast<intptr_t>(
1118 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1122 Failure* Failure::RetryAfterGC() {
1123 return RetryAfterGC(NEW_SPACE);
1127 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1128 ASSERT((space & ~kSpaceTagMask) == 0);
1129 return Construct(RETRY_AFTER_GC, space);
1133 Failure* Failure::Construct(Type type, intptr_t value) {
1135 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1136 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1137 // Fill the unused bits with a pattern that's easy to recognize in crash
1139 static const int kFailureMagicPattern = 0x0BAD0000;
1140 return reinterpret_cast<Failure*>(
1141 (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern);
1145 bool Smi::IsValid(intptr_t value) {
1146 bool result = Internals::IsValidSmi(value);
1147 ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1152 MapWord MapWord::FromMap(Map* map) {
1153 return MapWord(reinterpret_cast<uintptr_t>(map));
1157 Map* MapWord::ToMap() {
1158 return reinterpret_cast<Map*>(value_);
1162 bool MapWord::IsForwardingAddress() {
1163 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1167 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1168 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1169 return MapWord(reinterpret_cast<uintptr_t>(raw));
1173 HeapObject* MapWord::ToForwardingAddress() {
1174 ASSERT(IsForwardingAddress());
1175 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1180 void HeapObject::VerifyObjectField(int offset) {
1181 VerifyPointer(READ_FIELD(this, offset));
1184 void HeapObject::VerifySmiField(int offset) {
1185 CHECK(READ_FIELD(this, offset)->IsSmi());
1190 Heap* HeapObject::GetHeap() {
1192 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1193 SLOW_ASSERT(heap != NULL);
1198 Isolate* HeapObject::GetIsolate() {
1199 return GetHeap()->isolate();
1203 Map* HeapObject::map() {
1204 return map_word().ToMap();
1208 void HeapObject::set_map(Map* value) {
1209 set_map_word(MapWord::FromMap(value));
1210 if (value != NULL) {
1211 // TODO(1600) We are passing NULL as a slot because maps can never be on
1212 // evacuation candidate.
1213 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1218 // Unsafe accessor omitting write barrier.
1219 void HeapObject::set_map_no_write_barrier(Map* value) {
1220 set_map_word(MapWord::FromMap(value));
1224 MapWord HeapObject::map_word() {
1225 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1229 void HeapObject::set_map_word(MapWord map_word) {
1230 // WRITE_FIELD does not invoke write barrier, but there is no need
1232 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1236 HeapObject* HeapObject::FromAddress(Address address) {
1237 ASSERT_TAG_ALIGNED(address);
1238 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1242 Address HeapObject::address() {
1243 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1247 int HeapObject::Size() {
1248 return SizeFromMap(map());
1252 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1253 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1254 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1258 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1259 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1263 double HeapNumber::value() {
1264 return READ_DOUBLE_FIELD(this, kValueOffset);
1268 void HeapNumber::set_value(double value) {
1269 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1273 int HeapNumber::get_exponent() {
1274 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1275 kExponentShift) - kExponentBias;
1279 int HeapNumber::get_sign() {
1280 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1284 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1287 Object** FixedArray::GetFirstElementAddress() {
1288 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1292 bool FixedArray::ContainsOnlySmisOrHoles() {
1293 Object* the_hole = GetHeap()->the_hole_value();
1294 Object** current = GetFirstElementAddress();
1295 for (int i = 0; i < length(); ++i) {
1296 Object* candidate = *current++;
1297 if (!candidate->IsSmi() && candidate != the_hole) return false;
1303 FixedArrayBase* JSObject::elements() {
1304 Object* array = READ_FIELD(this, kElementsOffset);
1305 return static_cast<FixedArrayBase*>(array);
1309 void JSObject::ValidateElements() {
1310 #ifdef ENABLE_SLOW_ASSERTS
1311 if (FLAG_enable_slow_asserts) {
1312 ElementsAccessor* accessor = GetElementsAccessor();
1313 accessor->Validate(this);
1319 bool JSObject::ShouldTrackAllocationInfo() {
1320 if (AllocationSite::CanTrack(map()->instance_type())) {
1325 return AllocationSite::GetMode(GetElementsKind()) ==
1326 TRACK_ALLOCATION_SITE;
1332 void AllocationSite::Initialize() {
1333 SetElementsKind(GetInitialFastElementsKind());
1334 set_nested_site(Smi::FromInt(0));
1335 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1336 SKIP_WRITE_BARRIER);
1340 // Heuristic: We only need to create allocation site info if the boilerplate
1341 // elements kind is the initial elements kind.
1342 AllocationSiteMode AllocationSite::GetMode(
1343 ElementsKind boilerplate_elements_kind) {
1344 if (FLAG_track_allocation_sites &&
1345 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1346 return TRACK_ALLOCATION_SITE;
1349 return DONT_TRACK_ALLOCATION_SITE;
1353 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1355 if (FLAG_track_allocation_sites &&
1356 IsFastSmiElementsKind(from) &&
1357 IsMoreGeneralElementsKindTransition(from, to)) {
1358 return TRACK_ALLOCATION_SITE;
1361 return DONT_TRACK_ALLOCATION_SITE;
1365 inline bool AllocationSite::CanTrack(InstanceType type) {
1366 return type == JS_ARRAY_TYPE;
1370 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1371 object->ValidateElements();
1372 ElementsKind elements_kind = object->map()->elements_kind();
1373 if (!IsFastObjectElementsKind(elements_kind)) {
1374 if (IsFastHoleyElementsKind(elements_kind)) {
1375 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1377 TransitionElementsKind(object, FAST_ELEMENTS);
1383 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1385 EnsureElementsMode mode) {
1386 ElementsKind current_kind = map()->elements_kind();
1387 ElementsKind target_kind = current_kind;
1388 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1389 bool is_holey = IsFastHoleyElementsKind(current_kind);
1390 if (current_kind == FAST_HOLEY_ELEMENTS) return this;
1391 Heap* heap = GetHeap();
1392 Object* the_hole = heap->the_hole_value();
1393 for (uint32_t i = 0; i < count; ++i) {
1394 Object* current = *objects++;
1395 if (current == the_hole) {
1397 target_kind = GetHoleyElementsKind(target_kind);
1398 } else if (!current->IsSmi()) {
1399 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1400 if (IsFastSmiElementsKind(target_kind)) {
1402 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1404 target_kind = FAST_DOUBLE_ELEMENTS;
1407 } else if (is_holey) {
1408 target_kind = FAST_HOLEY_ELEMENTS;
1411 target_kind = FAST_ELEMENTS;
1416 if (target_kind != current_kind) {
1417 return TransitionElementsKind(target_kind);
1423 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1425 EnsureElementsMode mode) {
1426 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1427 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1428 elements->map() == GetHeap()->fixed_cow_array_map());
1429 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1430 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1432 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1433 return EnsureCanContainElements(objects, length, mode);
1436 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1437 if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1438 return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
1439 } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
1440 FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
1441 for (uint32_t i = 0; i < length; ++i) {
1442 if (double_array->is_the_hole(i)) {
1443 return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
1446 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1453 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1454 ElementsKind to_kind) {
1455 Map* current_map = map();
1456 ElementsKind from_kind = current_map->elements_kind();
1457 if (from_kind == to_kind) return current_map;
1459 Context* native_context = isolate->context()->native_context();
1460 Object* maybe_array_maps = native_context->js_array_maps();
1461 if (maybe_array_maps->IsFixedArray()) {
1462 FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
1463 if (array_maps->get(from_kind) == current_map) {
1464 Object* maybe_transitioned_map = array_maps->get(to_kind);
1465 if (maybe_transitioned_map->IsMap()) {
1466 return Map::cast(maybe_transitioned_map);
1471 return GetElementsTransitionMapSlow(to_kind);
1475 void JSObject::set_map_and_elements(Map* new_map,
1476 FixedArrayBase* value,
1477 WriteBarrierMode mode) {
1478 ASSERT(value->HasValidElements());
1479 if (new_map != NULL) {
1480 if (mode == UPDATE_WRITE_BARRIER) {
1483 ASSERT(mode == SKIP_WRITE_BARRIER);
1484 set_map_no_write_barrier(new_map);
1487 ASSERT((map()->has_fast_smi_or_object_elements() ||
1488 (value == GetHeap()->empty_fixed_array())) ==
1489 (value->map() == GetHeap()->fixed_array_map() ||
1490 value->map() == GetHeap()->fixed_cow_array_map()));
1491 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1492 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1493 WRITE_FIELD(this, kElementsOffset, value);
1494 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1498 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1499 set_map_and_elements(NULL, value, mode);
1503 void JSObject::initialize_properties() {
1504 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1505 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1509 void JSObject::initialize_elements() {
1510 if (map()->has_fast_smi_or_object_elements() ||
1511 map()->has_fast_double_elements()) {
1512 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1513 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1514 } else if (map()->has_external_array_elements()) {
1515 ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(map());
1516 ASSERT(!GetHeap()->InNewSpace(empty_array));
1517 WRITE_FIELD(this, kElementsOffset, empty_array);
1524 MaybeObject* JSObject::ResetElements() {
1525 if (map()->is_observed()) {
1526 // Maintain invariant that observed elements are always in dictionary mode.
1527 SeededNumberDictionary* dictionary;
1528 MaybeObject* maybe = SeededNumberDictionary::Allocate(GetHeap(), 0);
1529 if (!maybe->To(&dictionary)) return maybe;
1530 if (map() == GetHeap()->non_strict_arguments_elements_map()) {
1531 FixedArray::cast(elements())->set(1, dictionary);
1533 set_elements(dictionary);
1538 ElementsKind elements_kind = GetInitialFastElementsKind();
1539 if (!FLAG_smi_only_arrays) {
1540 elements_kind = FastSmiToObjectElementsKind(elements_kind);
1542 MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
1544 if (!maybe->To(&map)) return maybe;
1546 initialize_elements();
1552 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
1553 DisallowHeapAllocation no_gc;
1554 if (!map->HasTransitionArray()) return Handle<String>::null();
1555 TransitionArray* transitions = map->transitions();
1556 if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1557 int transition = TransitionArray::kSimpleTransitionIndex;
1558 PropertyDetails details = transitions->GetTargetDetails(transition);
1559 Name* name = transitions->GetKey(transition);
1560 if (details.type() != FIELD) return Handle<String>::null();
1561 if (details.attributes() != NONE) return Handle<String>::null();
1562 if (!name->IsString()) return Handle<String>::null();
1563 return Handle<String>(String::cast(name));
1567 Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
1568 ASSERT(!ExpectedTransitionKey(map).is_null());
1569 return Handle<Map>(map->transitions()->GetTarget(
1570 TransitionArray::kSimpleTransitionIndex));
1574 Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1575 DisallowHeapAllocation no_allocation;
1576 if (!map->HasTransitionArray()) return Handle<Map>::null();
1577 TransitionArray* transitions = map->transitions();
1578 int transition = transitions->Search(*key);
1579 if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1580 PropertyDetails target_details = transitions->GetTargetDetails(transition);
1581 if (target_details.type() != FIELD) return Handle<Map>::null();
1582 if (target_details.attributes() != NONE) return Handle<Map>::null();
1583 return Handle<Map>(transitions->GetTarget(transition));
1587 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1588 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1591 byte Oddball::kind() {
1592 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1596 void Oddball::set_kind(byte value) {
1597 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1601 Object* Cell::value() {
1602 return READ_FIELD(this, kValueOffset);
1606 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1607 // The write barrier is not used for global property cells.
1608 ASSERT(!val->IsPropertyCell() && !val->IsCell());
1609 WRITE_FIELD(this, kValueOffset, val);
1612 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1614 Object* PropertyCell::type_raw() {
1615 return READ_FIELD(this, kTypeOffset);
1619 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1620 WRITE_FIELD(this, kTypeOffset, val);
1624 int JSObject::GetHeaderSize() {
1625 InstanceType type = map()->instance_type();
1626 // Check for the most common kind of JavaScript object before
1627 // falling into the generic switch. This speeds up the internal
1628 // field operations considerably on average.
1629 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1631 case JS_GENERATOR_OBJECT_TYPE:
1632 return JSGeneratorObject::kSize;
1633 case JS_MODULE_TYPE:
1634 return JSModule::kSize;
1635 case JS_GLOBAL_PROXY_TYPE:
1636 return JSGlobalProxy::kSize;
1637 case JS_GLOBAL_OBJECT_TYPE:
1638 return JSGlobalObject::kSize;
1639 case JS_BUILTINS_OBJECT_TYPE:
1640 return JSBuiltinsObject::kSize;
1641 case JS_FUNCTION_TYPE:
1642 return JSFunction::kSize;
1644 return JSValue::kSize;
1646 return JSDate::kSize;
1648 return JSArray::kSize;
1649 case JS_ARRAY_BUFFER_TYPE:
1650 return JSArrayBuffer::kSize;
1651 case JS_TYPED_ARRAY_TYPE:
1652 return JSTypedArray::kSize;
1653 case JS_DATA_VIEW_TYPE:
1654 return JSDataView::kSize;
1656 return JSSet::kSize;
1658 return JSMap::kSize;
1659 case JS_WEAK_MAP_TYPE:
1660 return JSWeakMap::kSize;
1661 case JS_WEAK_SET_TYPE:
1662 return JSWeakSet::kSize;
1663 case JS_REGEXP_TYPE:
1664 return JSRegExp::kSize;
1665 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1666 return JSObject::kHeaderSize;
1667 case JS_MESSAGE_OBJECT_TYPE:
1668 return JSMessageObject::kSize;
1670 // TODO(jkummerow): Re-enable this. Blink currently hits this
1671 // from its CustomElementConstructorBuilder.
1678 int JSObject::GetInternalFieldCount() {
1679 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1680 // Make sure to adjust for the number of in-object properties. These
1681 // properties do contribute to the size, but are not internal fields.
1682 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1683 map()->inobject_properties();
1687 int JSObject::GetInternalFieldOffset(int index) {
1688 ASSERT(index < GetInternalFieldCount() && index >= 0);
1689 return GetHeaderSize() + (kPointerSize * index);
1693 Object* JSObject::GetInternalField(int index) {
1694 ASSERT(index < GetInternalFieldCount() && index >= 0);
1695 // Internal objects do follow immediately after the header, whereas in-object
1696 // properties are at the end of the object. Therefore there is no need
1697 // to adjust the index here.
1698 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1702 void JSObject::SetInternalField(int index, Object* value) {
1703 ASSERT(index < GetInternalFieldCount() && index >= 0);
1704 // Internal objects do follow immediately after the header, whereas in-object
1705 // properties are at the end of the object. Therefore there is no need
1706 // to adjust the index here.
1707 int offset = GetHeaderSize() + (kPointerSize * index);
1708 WRITE_FIELD(this, offset, value);
1709 WRITE_BARRIER(GetHeap(), this, offset, value);
1713 void JSObject::SetInternalField(int index, Smi* value) {
1714 ASSERT(index < GetInternalFieldCount() && index >= 0);
1715 // Internal objects do follow immediately after the header, whereas in-object
1716 // properties are at the end of the object. Therefore there is no need
1717 // to adjust the index here.
1718 int offset = GetHeaderSize() + (kPointerSize * index);
1719 WRITE_FIELD(this, offset, value);
1723 MaybeObject* JSObject::FastPropertyAt(Representation representation,
1725 Object* raw_value = RawFastPropertyAt(index);
1726 return raw_value->AllocateNewStorageFor(GetHeap(), representation);
1730 // Access fast-case object properties at index. The use of these routines
1731 // is needed to correctly distinguish between properties stored in-object and
1732 // properties stored in the properties array.
1733 Object* JSObject::RawFastPropertyAt(int index) {
1734 // Adjust for the number of properties stored in the object.
1735 index -= map()->inobject_properties();
1737 int offset = map()->instance_size() + (index * kPointerSize);
1738 return READ_FIELD(this, offset);
1740 ASSERT(index < properties()->length());
1741 return properties()->get(index);
1746 void JSObject::FastPropertyAtPut(int index, Object* value) {
1747 // Adjust for the number of properties stored in the object.
1748 index -= map()->inobject_properties();
1750 int offset = map()->instance_size() + (index * kPointerSize);
1751 WRITE_FIELD(this, offset, value);
1752 WRITE_BARRIER(GetHeap(), this, offset, value);
1754 ASSERT(index < properties()->length());
1755 properties()->set(index, value);
1760 int JSObject::GetInObjectPropertyOffset(int index) {
1761 // Adjust for the number of properties stored in the object.
1762 index -= map()->inobject_properties();
1764 return map()->instance_size() + (index * kPointerSize);
1768 Object* JSObject::InObjectPropertyAt(int index) {
1769 // Adjust for the number of properties stored in the object.
1770 index -= map()->inobject_properties();
1772 int offset = map()->instance_size() + (index * kPointerSize);
1773 return READ_FIELD(this, offset);
1777 Object* JSObject::InObjectPropertyAtPut(int index,
1779 WriteBarrierMode mode) {
1780 // Adjust for the number of properties stored in the object.
1781 index -= map()->inobject_properties();
1783 int offset = map()->instance_size() + (index * kPointerSize);
1784 WRITE_FIELD(this, offset, value);
1785 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1791 void JSObject::InitializeBody(Map* map,
1792 Object* pre_allocated_value,
1793 Object* filler_value) {
1794 ASSERT(!filler_value->IsHeapObject() ||
1795 !GetHeap()->InNewSpace(filler_value));
1796 ASSERT(!pre_allocated_value->IsHeapObject() ||
1797 !GetHeap()->InNewSpace(pre_allocated_value));
1798 int size = map->instance_size();
1799 int offset = kHeaderSize;
1800 if (filler_value != pre_allocated_value) {
1801 int pre_allocated = map->pre_allocated_property_fields();
1802 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1803 for (int i = 0; i < pre_allocated; i++) {
1804 WRITE_FIELD(this, offset, pre_allocated_value);
1805 offset += kPointerSize;
1808 while (offset < size) {
1809 WRITE_FIELD(this, offset, filler_value);
1810 offset += kPointerSize;
1815 bool JSObject::HasFastProperties() {
1816 ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
1817 return !properties()->IsDictionary();
1821 bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
1822 // Allow extra fast properties if the object has more than
1823 // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
1824 // very unlikely that the object is being used as a dictionary and there is a
1825 // good chance that allowing more map transitions will be worth it.
1826 Map* map = this->map();
1827 if (map->unused_property_fields() != 0) return false;
1829 int inobject = map->inobject_properties();
1832 if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
1833 limit = Max(inobject, kMaxFastProperties);
1835 limit = Max(inobject, kFastPropertiesSoftLimit);
1837 return properties()->length() > limit;
1841 void Struct::InitializeBody(int object_size) {
1842 Object* value = GetHeap()->undefined_value();
1843 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1844 WRITE_FIELD(this, offset, value);
1849 bool Object::ToArrayIndex(uint32_t* index) {
1851 int value = Smi::cast(this)->value();
1852 if (value < 0) return false;
1856 if (IsHeapNumber()) {
1857 double value = HeapNumber::cast(this)->value();
1858 uint32_t uint_value = static_cast<uint32_t>(value);
1859 if (value == static_cast<double>(uint_value)) {
1860 *index = uint_value;
1868 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1869 if (!this->IsJSValue()) return false;
1871 JSValue* js_value = JSValue::cast(this);
1872 if (!js_value->value()->IsString()) return false;
1874 String* str = String::cast(js_value->value());
1875 if (index >= static_cast<uint32_t>(str->length())) return false;
1882 void Object::VerifyApiCallResultType() {
1883 #if ENABLE_EXTRA_CHECKS
1892 FATAL("API call returned invalid object");
1894 #endif // ENABLE_EXTRA_CHECKS
1898 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1899 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray() ||
1900 object->IsConstantPoolArray());
1901 return reinterpret_cast<FixedArrayBase*>(object);
1905 Object* FixedArray::get(int index) {
1906 SLOW_ASSERT(index >= 0 && index < this->length());
1907 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1911 bool FixedArray::is_the_hole(int index) {
1912 return get(index) == GetHeap()->the_hole_value();
1916 void FixedArray::set(int index, Smi* value) {
1917 ASSERT(map() != GetHeap()->fixed_cow_array_map());
1918 ASSERT(index >= 0 && index < this->length());
1919 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1920 int offset = kHeaderSize + index * kPointerSize;
1921 WRITE_FIELD(this, offset, value);
1925 void FixedArray::set(int index, Object* value) {
1926 ASSERT(map() != GetHeap()->fixed_cow_array_map());
1927 ASSERT(index >= 0 && index < this->length());
1928 int offset = kHeaderSize + index * kPointerSize;
1929 WRITE_FIELD(this, offset, value);
1930 WRITE_BARRIER(GetHeap(), this, offset, value);
1934 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1935 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1939 inline double FixedDoubleArray::hole_nan_as_double() {
1940 return BitCast<double, uint64_t>(kHoleNanInt64);
1944 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1945 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1946 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1947 return OS::nan_value();
1951 double FixedDoubleArray::get_scalar(int index) {
1952 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
1953 map() != GetHeap()->fixed_array_map());
1954 ASSERT(index >= 0 && index < this->length());
1955 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1956 ASSERT(!is_the_hole_nan(result));
1960 int64_t FixedDoubleArray::get_representation(int index) {
1961 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
1962 map() != GetHeap()->fixed_array_map());
1963 ASSERT(index >= 0 && index < this->length());
1964 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1967 MaybeObject* FixedDoubleArray::get(int index) {
1968 if (is_the_hole(index)) {
1969 return GetHeap()->the_hole_value();
1971 return GetHeap()->NumberFromDouble(get_scalar(index));
1976 void FixedDoubleArray::set(int index, double value) {
1977 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
1978 map() != GetHeap()->fixed_array_map());
1979 int offset = kHeaderSize + index * kDoubleSize;
1980 if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
1981 WRITE_DOUBLE_FIELD(this, offset, value);
1985 void FixedDoubleArray::set_the_hole(int index) {
1986 ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
1987 map() != GetHeap()->fixed_array_map());
1988 int offset = kHeaderSize + index * kDoubleSize;
1989 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1993 bool FixedDoubleArray::is_the_hole(int index) {
1994 int offset = kHeaderSize + index * kDoubleSize;
1995 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1999 SMI_ACCESSORS(ConstantPoolArray, first_ptr_index, kFirstPointerIndexOffset)
2000 SMI_ACCESSORS(ConstantPoolArray, first_int32_index, kFirstInt32IndexOffset)
2003 int ConstantPoolArray::first_int64_index() {
2008 int ConstantPoolArray::count_of_int64_entries() {
2009 return first_ptr_index();
2013 int ConstantPoolArray::count_of_ptr_entries() {
2014 return first_int32_index() - first_ptr_index();
2018 int ConstantPoolArray::count_of_int32_entries() {
2019 return length() - first_int32_index();
2023 void ConstantPoolArray::SetEntryCounts(int number_of_int64_entries,
2024 int number_of_ptr_entries,
2025 int number_of_int32_entries) {
2026 set_first_ptr_index(number_of_int64_entries);
2027 set_first_int32_index(number_of_int64_entries + number_of_ptr_entries);
2028 set_length(number_of_int64_entries + number_of_ptr_entries +
2029 number_of_int32_entries);
2033 int64_t ConstantPoolArray::get_int64_entry(int index) {
2034 ASSERT(map() == GetHeap()->constant_pool_array_map());
2035 ASSERT(index >= 0 && index < first_ptr_index());
2036 return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2039 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2040 STATIC_ASSERT(kDoubleSize == kInt64Size);
2041 ASSERT(map() == GetHeap()->constant_pool_array_map());
2042 ASSERT(index >= 0 && index < first_ptr_index());
2043 return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2047 Object* ConstantPoolArray::get_ptr_entry(int index) {
2048 ASSERT(map() == GetHeap()->constant_pool_array_map());
2049 ASSERT(index >= first_ptr_index() && index < first_int32_index());
2050 return READ_FIELD(this, OffsetOfElementAt(index));
2054 int32_t ConstantPoolArray::get_int32_entry(int index) {
2055 ASSERT(map() == GetHeap()->constant_pool_array_map());
2056 ASSERT(index >= first_int32_index() && index < length());
2057 return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2061 void ConstantPoolArray::set(int index, Object* value) {
2062 ASSERT(map() == GetHeap()->constant_pool_array_map());
2063 ASSERT(index >= first_ptr_index() && index < first_int32_index());
2064 WRITE_FIELD(this, OffsetOfElementAt(index), value);
2065 WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2069 void ConstantPoolArray::set(int index, int64_t value) {
2070 ASSERT(map() == GetHeap()->constant_pool_array_map());
2071 ASSERT(index >= first_int64_index() && index < first_ptr_index());
2072 WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2076 void ConstantPoolArray::set(int index, double value) {
2077 STATIC_ASSERT(kDoubleSize == kInt64Size);
2078 ASSERT(map() == GetHeap()->constant_pool_array_map());
2079 ASSERT(index >= first_int64_index() && index < first_ptr_index());
2080 WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2084 void ConstantPoolArray::set(int index, int32_t value) {
2085 ASSERT(map() == GetHeap()->constant_pool_array_map());
2086 ASSERT(index >= this->first_int32_index() && index < length());
2087 WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2091 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2092 const DisallowHeapAllocation& promise) {
2093 Heap* heap = GetHeap();
2094 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2095 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2096 return UPDATE_WRITE_BARRIER;
2100 void FixedArray::set(int index,
2102 WriteBarrierMode mode) {
2103 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2104 ASSERT(index >= 0 && index < this->length());
2105 int offset = kHeaderSize + index * kPointerSize;
2106 WRITE_FIELD(this, offset, value);
2107 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2111 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2114 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2115 ASSERT(index >= 0 && index < array->length());
2116 int offset = kHeaderSize + index * kPointerSize;
2117 WRITE_FIELD(array, offset, value);
2118 Heap* heap = array->GetHeap();
2119 if (heap->InNewSpace(value)) {
2120 heap->RecordWrite(array->address(), offset);
2125 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2128 ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2129 ASSERT(index >= 0 && index < array->length());
2130 ASSERT(!array->GetHeap()->InNewSpace(value));
2131 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2135 void FixedArray::set_undefined(int index) {
2136 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2137 ASSERT(index >= 0 && index < this->length());
2138 ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2140 kHeaderSize + index * kPointerSize,
2141 GetHeap()->undefined_value());
2145 void FixedArray::set_null(int index) {
2146 ASSERT(index >= 0 && index < this->length());
2147 ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2149 kHeaderSize + index * kPointerSize,
2150 GetHeap()->null_value());
2154 void FixedArray::set_the_hole(int index) {
2155 ASSERT(map() != GetHeap()->fixed_cow_array_map());
2156 ASSERT(index >= 0 && index < this->length());
2157 ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2159 kHeaderSize + index * kPointerSize,
2160 GetHeap()->the_hole_value());
2164 double* FixedDoubleArray::data_start() {
2165 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2169 Object** FixedArray::data_start() {
2170 return HeapObject::RawField(this, kHeaderSize);
2174 bool DescriptorArray::IsEmpty() {
2175 ASSERT(length() >= kFirstIndex ||
2176 this == GetHeap()->empty_descriptor_array());
2177 return length() < kFirstIndex;
2181 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2183 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2187 // Perform a binary search in a fixed array. Low and high are entry indices. If
2188 // there are three entries in this array it should be called with low=0 and
2190 template<SearchMode search_mode, typename T>
2191 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2192 uint32_t hash = name->Hash();
2195 ASSERT(low <= high);
2197 while (low != high) {
2198 int mid = (low + high) / 2;
2199 Name* mid_name = array->GetSortedKey(mid);
2200 uint32_t mid_hash = mid_name->Hash();
2202 if (mid_hash >= hash) {
2209 for (; low <= limit; ++low) {
2210 int sort_index = array->GetSortedKeyIndex(low);
2211 Name* entry = array->GetKey(sort_index);
2212 if (entry->Hash() != hash) break;
2213 if (entry->Equals(name)) {
2214 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2217 return T::kNotFound;
2221 return T::kNotFound;
2225 // Perform a linear search in this fixed array. len is the number of entry
2226 // indices that are valid.
2227 template<SearchMode search_mode, typename T>
2228 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2229 uint32_t hash = name->Hash();
2230 if (search_mode == ALL_ENTRIES) {
2231 for (int number = 0; number < len; number++) {
2232 int sorted_index = array->GetSortedKeyIndex(number);
2233 Name* entry = array->GetKey(sorted_index);
2234 uint32_t current_hash = entry->Hash();
2235 if (current_hash > hash) break;
2236 if (current_hash == hash && entry->Equals(name)) return sorted_index;
2239 ASSERT(len >= valid_entries);
2240 for (int number = 0; number < valid_entries; number++) {
2241 Name* entry = array->GetKey(number);
2242 uint32_t current_hash = entry->Hash();
2243 if (current_hash == hash && entry->Equals(name)) return number;
2246 return T::kNotFound;
2250 template<SearchMode search_mode, typename T>
2251 int Search(T* array, Name* name, int valid_entries) {
2252 if (search_mode == VALID_ENTRIES) {
2253 SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2255 SLOW_ASSERT(array->IsSortedNoDuplicates());
2258 int nof = array->number_of_entries();
2259 if (nof == 0) return T::kNotFound;
2261 // Fast case: do linear search for small arrays.
2262 const int kMaxElementsForLinearSearch = 8;
2263 if ((search_mode == ALL_ENTRIES &&
2264 nof <= kMaxElementsForLinearSearch) ||
2265 (search_mode == VALID_ENTRIES &&
2266 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2267 return LinearSearch<search_mode>(array, name, nof, valid_entries);
2270 // Slow case: perform binary search.
2271 return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2275 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2276 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2280 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2281 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2282 if (number_of_own_descriptors == 0) return kNotFound;
2284 DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2285 int number = cache->Lookup(map, name);
2287 if (number == DescriptorLookupCache::kAbsent) {
2288 number = Search(name, number_of_own_descriptors);
2289 cache->Update(map, name, number);
2296 void Map::LookupDescriptor(JSObject* holder,
2298 LookupResult* result) {
2299 DescriptorArray* descriptors = this->instance_descriptors();
2300 int number = descriptors->SearchWithCache(name, this);
2301 if (number == DescriptorArray::kNotFound) return result->NotFound();
2302 result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2306 void Map::LookupTransition(JSObject* holder,
2308 LookupResult* result) {
2309 if (HasTransitionArray()) {
2310 TransitionArray* transition_array = transitions();
2311 int number = transition_array->Search(name);
2312 if (number != TransitionArray::kNotFound) {
2313 return result->TransitionResult(holder, number);
2320 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2321 ASSERT(descriptor_number < number_of_descriptors());
2322 return HeapObject::RawField(
2323 reinterpret_cast<HeapObject*>(this),
2324 OffsetOfElementAt(ToKeyIndex(descriptor_number)));
2328 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2329 return GetKeySlot(descriptor_number);
2333 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2334 return GetValueSlot(descriptor_number - 1) + 1;
2338 Name* DescriptorArray::GetKey(int descriptor_number) {
2339 ASSERT(descriptor_number < number_of_descriptors());
2340 return Name::cast(get(ToKeyIndex(descriptor_number)));
2344 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2345 return GetDetails(descriptor_number).pointer();
2349 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2350 return GetKey(GetSortedKeyIndex(descriptor_number));
2354 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2355 PropertyDetails details = GetDetails(descriptor_index);
2356 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2360 void DescriptorArray::SetRepresentation(int descriptor_index,
2361 Representation representation) {
2362 ASSERT(!representation.IsNone());
2363 PropertyDetails details = GetDetails(descriptor_index);
2364 set(ToDetailsIndex(descriptor_index),
2365 details.CopyWithRepresentation(representation).AsSmi());
2369 void DescriptorArray::InitializeRepresentations(Representation representation) {
2370 int length = number_of_descriptors();
2371 for (int i = 0; i < length; i++) {
2372 SetRepresentation(i, representation);
2377 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2378 ASSERT(descriptor_number < number_of_descriptors());
2379 return HeapObject::RawField(
2380 reinterpret_cast<HeapObject*>(this),
2381 OffsetOfElementAt(ToValueIndex(descriptor_number)));
2385 Object* DescriptorArray::GetValue(int descriptor_number) {
2386 ASSERT(descriptor_number < number_of_descriptors());
2387 return get(ToValueIndex(descriptor_number));
2391 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2392 ASSERT(descriptor_number < number_of_descriptors());
2393 Object* details = get(ToDetailsIndex(descriptor_number));
2394 return PropertyDetails(Smi::cast(details));
2398 PropertyType DescriptorArray::GetType(int descriptor_number) {
2399 return GetDetails(descriptor_number).type();
2403 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2404 ASSERT(GetDetails(descriptor_number).type() == FIELD);
2405 return GetDetails(descriptor_number).field_index();
2409 Object* DescriptorArray::GetConstant(int descriptor_number) {
2410 return GetValue(descriptor_number);
2414 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2415 ASSERT(GetType(descriptor_number) == CALLBACKS);
2416 return GetValue(descriptor_number);
2420 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2421 ASSERT(GetType(descriptor_number) == CALLBACKS);
2422 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2423 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2427 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2428 desc->Init(GetKey(descriptor_number),
2429 GetValue(descriptor_number),
2430 GetDetails(descriptor_number));
2434 void DescriptorArray::Set(int descriptor_number,
2436 const WhitenessWitness&) {
2438 ASSERT(descriptor_number < number_of_descriptors());
2440 NoIncrementalWriteBarrierSet(this,
2441 ToKeyIndex(descriptor_number),
2443 NoIncrementalWriteBarrierSet(this,
2444 ToValueIndex(descriptor_number),
2446 NoIncrementalWriteBarrierSet(this,
2447 ToDetailsIndex(descriptor_number),
2448 desc->GetDetails().AsSmi());
2452 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2454 ASSERT(descriptor_number < number_of_descriptors());
2456 set(ToKeyIndex(descriptor_number), desc->GetKey());
2457 set(ToValueIndex(descriptor_number), desc->GetValue());
2458 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2462 void DescriptorArray::Append(Descriptor* desc,
2463 const WhitenessWitness& witness) {
2464 int descriptor_number = number_of_descriptors();
2465 SetNumberOfDescriptors(descriptor_number + 1);
2466 Set(descriptor_number, desc, witness);
2468 uint32_t hash = desc->GetKey()->Hash();
2472 for (insertion = descriptor_number; insertion > 0; --insertion) {
2473 Name* key = GetSortedKey(insertion - 1);
2474 if (key->Hash() <= hash) break;
2475 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2478 SetSortedKey(insertion, descriptor_number);
2482 void DescriptorArray::Append(Descriptor* desc) {
2483 int descriptor_number = number_of_descriptors();
2484 SetNumberOfDescriptors(descriptor_number + 1);
2485 Set(descriptor_number, desc);
2487 uint32_t hash = desc->GetKey()->Hash();
2491 for (insertion = descriptor_number; insertion > 0; --insertion) {
2492 Name* key = GetSortedKey(insertion - 1);
2493 if (key->Hash() <= hash) break;
2494 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2497 SetSortedKey(insertion, descriptor_number);
2501 void DescriptorArray::SwapSortedKeys(int first, int second) {
2502 int first_key = GetSortedKeyIndex(first);
2503 SetSortedKey(first, GetSortedKeyIndex(second));
2504 SetSortedKey(second, first_key);
2508 DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array)
2509 : marking_(array->GetHeap()->incremental_marking()) {
2510 marking_->EnterNoMarkingScope();
2511 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2515 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2516 marking_->LeaveNoMarkingScope();
2520 template<typename Shape, typename Key>
2521 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2522 const int kMinCapacity = 32;
2523 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2524 if (capacity < kMinCapacity) {
2525 capacity = kMinCapacity; // Guarantee min capacity.
2531 template<typename Shape, typename Key>
2532 int HashTable<Shape, Key>::FindEntry(Key key) {
2533 return FindEntry(GetIsolate(), key);
2537 // Find entry for key otherwise return kNotFound.
2538 template<typename Shape, typename Key>
2539 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2540 uint32_t capacity = Capacity();
2541 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2543 // EnsureCapacity will guarantee the hash table is never full.
2545 Object* element = KeyAt(entry);
2546 // Empty entry. Uses raw unchecked accessors because it is called by the
2547 // string table during bootstrapping.
2548 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2549 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2550 Shape::IsMatch(key, element)) return entry;
2551 entry = NextProbe(entry, count++, capacity);
2557 bool SeededNumberDictionary::requires_slow_elements() {
2558 Object* max_index_object = get(kMaxNumberKeyIndex);
2559 if (!max_index_object->IsSmi()) return false;
2561 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2564 uint32_t SeededNumberDictionary::max_number_key() {
2565 ASSERT(!requires_slow_elements());
2566 Object* max_index_object = get(kMaxNumberKeyIndex);
2567 if (!max_index_object->IsSmi()) return 0;
2568 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2569 return value >> kRequiresSlowElementsTagSize;
2572 void SeededNumberDictionary::set_requires_slow_elements() {
2573 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2577 // ------------------------------------
2581 CAST_ACCESSOR(FixedArray)
2582 CAST_ACCESSOR(FixedDoubleArray)
2583 CAST_ACCESSOR(ConstantPoolArray)
2584 CAST_ACCESSOR(DescriptorArray)
2585 CAST_ACCESSOR(DeoptimizationInputData)
2586 CAST_ACCESSOR(DeoptimizationOutputData)
2587 CAST_ACCESSOR(DependentCode)
2588 CAST_ACCESSOR(TypeFeedbackCells)
2589 CAST_ACCESSOR(StringTable)
2590 CAST_ACCESSOR(JSFunctionResultCache)
2591 CAST_ACCESSOR(NormalizedMapCache)
2592 CAST_ACCESSOR(ScopeInfo)
2593 CAST_ACCESSOR(CompilationCacheTable)
2594 CAST_ACCESSOR(CodeCacheHashTable)
2595 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2596 CAST_ACCESSOR(MapCache)
2597 CAST_ACCESSOR(String)
2598 CAST_ACCESSOR(SeqString)
2599 CAST_ACCESSOR(SeqOneByteString)
2600 CAST_ACCESSOR(SeqTwoByteString)
2601 CAST_ACCESSOR(SlicedString)
2602 CAST_ACCESSOR(ConsString)
2603 CAST_ACCESSOR(ExternalString)
2604 CAST_ACCESSOR(ExternalAsciiString)
2605 CAST_ACCESSOR(ExternalTwoByteString)
2606 CAST_ACCESSOR(Symbol)
2608 CAST_ACCESSOR(JSReceiver)
2609 CAST_ACCESSOR(JSObject)
2611 CAST_ACCESSOR(HeapObject)
2612 CAST_ACCESSOR(HeapNumber)
2613 CAST_ACCESSOR(Oddball)
2615 CAST_ACCESSOR(PropertyCell)
2616 CAST_ACCESSOR(SharedFunctionInfo)
2618 CAST_ACCESSOR(JSFunction)
2619 CAST_ACCESSOR(GlobalObject)
2620 CAST_ACCESSOR(JSGlobalProxy)
2621 CAST_ACCESSOR(JSGlobalObject)
2622 CAST_ACCESSOR(JSBuiltinsObject)
2624 CAST_ACCESSOR(JSArray)
2625 CAST_ACCESSOR(JSArrayBuffer)
2626 CAST_ACCESSOR(JSArrayBufferView)
2627 CAST_ACCESSOR(JSTypedArray)
2628 CAST_ACCESSOR(JSDataView)
2629 CAST_ACCESSOR(JSRegExp)
2630 CAST_ACCESSOR(JSProxy)
2631 CAST_ACCESSOR(JSFunctionProxy)
2632 CAST_ACCESSOR(JSSet)
2633 CAST_ACCESSOR(JSMap)
2634 CAST_ACCESSOR(JSWeakMap)
2635 CAST_ACCESSOR(JSWeakSet)
2636 CAST_ACCESSOR(Foreign)
2637 CAST_ACCESSOR(ByteArray)
2638 CAST_ACCESSOR(FreeSpace)
2639 CAST_ACCESSOR(ExternalArray)
2640 CAST_ACCESSOR(ExternalByteArray)
2641 CAST_ACCESSOR(ExternalUnsignedByteArray)
2642 CAST_ACCESSOR(ExternalShortArray)
2643 CAST_ACCESSOR(ExternalUnsignedShortArray)
2644 CAST_ACCESSOR(ExternalIntArray)
2645 CAST_ACCESSOR(ExternalUnsignedIntArray)
2646 CAST_ACCESSOR(ExternalFloatArray)
2647 CAST_ACCESSOR(ExternalDoubleArray)
2648 CAST_ACCESSOR(ExternalPixelArray)
2649 CAST_ACCESSOR(Struct)
2650 CAST_ACCESSOR(AccessorInfo)
2653 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2654 STRUCT_LIST(MAKE_STRUCT_CAST)
2655 #undef MAKE_STRUCT_CAST
2658 template <typename Shape, typename Key>
2659 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2660 ASSERT(obj->IsHashTable());
2661 return reinterpret_cast<HashTable*>(obj);
2665 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2666 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2668 SMI_ACCESSORS(String, length, kLengthOffset)
2671 uint32_t Name::hash_field() {
2672 return READ_UINT32_FIELD(this, kHashFieldOffset);
2676 void Name::set_hash_field(uint32_t value) {
2677 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2678 #if V8_HOST_ARCH_64_BIT
2679 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2684 bool Name::Equals(Name* other) {
2685 if (other == this) return true;
2686 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
2687 this->IsSymbol() || other->IsSymbol()) {
2690 return String::cast(this)->SlowEquals(String::cast(other));
2694 ACCESSORS(Symbol, name, Object, kNameOffset)
2697 bool String::Equals(String* other) {
2698 if (other == this) return true;
2699 if (this->IsInternalizedString() && other->IsInternalizedString()) {
2702 return SlowEquals(other);
2706 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2707 if (!StringShape(this).IsCons()) return this;
2708 ConsString* cons = ConsString::cast(this);
2709 if (cons->IsFlat()) return cons->first();
2710 return SlowTryFlatten(pretenure);
2714 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2715 MaybeObject* flat = TryFlatten(pretenure);
2716 Object* successfully_flattened;
2717 if (!flat->ToObject(&successfully_flattened)) return this;
2718 return String::cast(successfully_flattened);
2722 uint16_t String::Get(int index) {
2723 ASSERT(index >= 0 && index < length());
2724 switch (StringShape(this).full_representation_tag()) {
2725 case kSeqStringTag | kOneByteStringTag:
2726 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
2727 case kSeqStringTag | kTwoByteStringTag:
2728 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2729 case kConsStringTag | kOneByteStringTag:
2730 case kConsStringTag | kTwoByteStringTag:
2731 return ConsString::cast(this)->ConsStringGet(index);
2732 case kExternalStringTag | kOneByteStringTag:
2733 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2734 case kExternalStringTag | kTwoByteStringTag:
2735 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2736 case kSlicedStringTag | kOneByteStringTag:
2737 case kSlicedStringTag | kTwoByteStringTag:
2738 return SlicedString::cast(this)->SlicedStringGet(index);
2748 void String::Set(int index, uint16_t value) {
2749 ASSERT(index >= 0 && index < length());
2750 ASSERT(StringShape(this).IsSequential());
2752 return this->IsOneByteRepresentation()
2753 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
2754 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2758 bool String::IsFlat() {
2759 if (!StringShape(this).IsCons()) return true;
2760 return ConsString::cast(this)->second()->length() == 0;
2764 String* String::GetUnderlying() {
2765 // Giving direct access to underlying string only makes sense if the
2766 // wrapping string is already flattened.
2767 ASSERT(this->IsFlat());
2768 ASSERT(StringShape(this).IsIndirect());
2769 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2770 const int kUnderlyingOffset = SlicedString::kParentOffset;
2771 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2775 template<class Visitor, class ConsOp>
2783 ASSERT(length == static_cast<unsigned>(string->length()));
2784 ASSERT(offset <= length);
2785 unsigned slice_offset = offset;
2787 ASSERT(type == string->map()->instance_type());
2789 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
2790 case kSeqStringTag | kOneByteStringTag:
2791 visitor.VisitOneByteString(
2792 SeqOneByteString::cast(string)->GetChars() + slice_offset,
2796 case kSeqStringTag | kTwoByteStringTag:
2797 visitor.VisitTwoByteString(
2798 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
2802 case kExternalStringTag | kOneByteStringTag:
2803 visitor.VisitOneByteString(
2804 ExternalAsciiString::cast(string)->GetChars() + slice_offset,
2808 case kExternalStringTag | kTwoByteStringTag:
2809 visitor.VisitTwoByteString(
2810 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
2814 case kSlicedStringTag | kOneByteStringTag:
2815 case kSlicedStringTag | kTwoByteStringTag: {
2816 SlicedString* slicedString = SlicedString::cast(string);
2817 slice_offset += slicedString->offset();
2818 string = slicedString->parent();
2819 type = string->map()->instance_type();
2823 case kConsStringTag | kOneByteStringTag:
2824 case kConsStringTag | kTwoByteStringTag:
2825 string = cons_op.Operate(string, &offset, &type, &length);
2826 if (string == NULL) return;
2827 slice_offset = offset;
2828 ASSERT(length == static_cast<unsigned>(string->length()));
2839 // TODO(dcarney): Remove this class after conversion to VisitFlat.
2840 class ConsStringCaptureOp {
2842 inline ConsStringCaptureOp() : cons_string_(NULL) {}
2843 inline String* Operate(String* string, unsigned*, int32_t*, unsigned*) {
2844 cons_string_ = ConsString::cast(string);
2847 ConsString* cons_string_;
2851 template<class Visitor>
2852 ConsString* String::VisitFlat(Visitor* visitor,
2857 ASSERT(length >= 0 && length == string->length());
2858 ASSERT(offset >= 0 && offset <= length);
2859 ConsStringCaptureOp op;
2860 Visit(string, offset, *visitor, op, type, static_cast<unsigned>(length));
2861 return op.cons_string_;
2865 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
2866 ASSERT(index >= 0 && index < length());
2867 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2871 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
2872 ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
2873 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2874 static_cast<byte>(value));
2878 Address SeqOneByteString::GetCharsAddress() {
2879 return FIELD_ADDR(this, kHeaderSize);
2883 uint8_t* SeqOneByteString::GetChars() {
2884 return reinterpret_cast<uint8_t*>(GetCharsAddress());
2888 Address SeqTwoByteString::GetCharsAddress() {
2889 return FIELD_ADDR(this, kHeaderSize);
2893 uc16* SeqTwoByteString::GetChars() {
2894 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2898 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2899 ASSERT(index >= 0 && index < length());
2900 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2904 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2905 ASSERT(index >= 0 && index < length());
2906 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2910 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2911 return SizeFor(length());
2915 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
2916 return SizeFor(length());
2920 String* SlicedString::parent() {
2921 return String::cast(READ_FIELD(this, kParentOffset));
2925 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
2926 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2927 WRITE_FIELD(this, kParentOffset, parent);
2928 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
2932 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2935 String* ConsString::first() {
2936 return String::cast(READ_FIELD(this, kFirstOffset));
2940 Object* ConsString::unchecked_first() {
2941 return READ_FIELD(this, kFirstOffset);
2945 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2946 WRITE_FIELD(this, kFirstOffset, value);
2947 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2951 String* ConsString::second() {
2952 return String::cast(READ_FIELD(this, kSecondOffset));
2956 Object* ConsString::unchecked_second() {
2957 return READ_FIELD(this, kSecondOffset);
2961 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2962 WRITE_FIELD(this, kSecondOffset, value);
2963 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2967 bool ExternalString::is_short() {
2968 InstanceType type = map()->instance_type();
2969 return (type & kShortExternalStringMask) == kShortExternalStringTag;
2973 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2974 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2978 void ExternalAsciiString::update_data_cache() {
2979 if (is_short()) return;
2980 const char** data_field =
2981 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2982 *data_field = resource()->data();
2986 void ExternalAsciiString::set_resource(
2987 const ExternalAsciiString::Resource* resource) {
2988 *reinterpret_cast<const Resource**>(
2989 FIELD_ADDR(this, kResourceOffset)) = resource;
2990 if (resource != NULL) update_data_cache();
2994 const uint8_t* ExternalAsciiString::GetChars() {
2995 return reinterpret_cast<const uint8_t*>(resource()->data());
2999 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3000 ASSERT(index >= 0 && index < length());
3001 return GetChars()[index];
3005 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3006 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3010 void ExternalTwoByteString::update_data_cache() {
3011 if (is_short()) return;
3012 const uint16_t** data_field =
3013 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3014 *data_field = resource()->data();
3018 void ExternalTwoByteString::set_resource(
3019 const ExternalTwoByteString::Resource* resource) {
3020 *reinterpret_cast<const Resource**>(
3021 FIELD_ADDR(this, kResourceOffset)) = resource;
3022 if (resource != NULL) update_data_cache();
3026 const uint16_t* ExternalTwoByteString::GetChars() {
3027 return resource()->data();
3031 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3032 ASSERT(index >= 0 && index < length());
3033 return GetChars()[index];
3037 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3039 return GetChars() + start;
3043 String* ConsStringNullOp::Operate(String*, unsigned*, int32_t*, unsigned*) {
3048 unsigned ConsStringIteratorOp::OffsetForDepth(unsigned depth) {
3049 return depth & kDepthMask;
3053 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3054 frames_[depth_++ & kDepthMask] = string;
3058 void ConsStringIteratorOp::PushRight(ConsString* string) {
3060 frames_[(depth_-1) & kDepthMask] = string;
3064 void ConsStringIteratorOp::AdjustMaximumDepth() {
3065 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3069 void ConsStringIteratorOp::Pop() {
3071 ASSERT(depth_ <= maximum_depth_);
3076 bool ConsStringIteratorOp::HasMore() {
3081 void ConsStringIteratorOp::Reset() {
3086 String* ConsStringIteratorOp::ContinueOperation(int32_t* type_out,
3087 unsigned* length_out) {
3088 bool blew_stack = false;
3089 String* string = NextLeaf(&blew_stack, type_out, length_out);
3091 if (string != NULL) {
3093 ASSERT(*length_out == static_cast<unsigned>(string->length()));
3094 ASSERT(*type_out == string->map()->instance_type());
3097 // Traversal complete.
3098 if (!blew_stack) return NULL;
3099 // Restart search from root.
3100 unsigned offset_out;
3101 string = Search(&offset_out, type_out, length_out);
3103 ASSERT(string == NULL || offset_out == 0);
3104 ASSERT(string == NULL ||
3105 *length_out == static_cast<unsigned>(string->length()));
3106 ASSERT(string == NULL || *type_out == string->map()->instance_type());
3111 uint16_t StringCharacterStream::GetNext() {
3112 ASSERT(buffer8_ != NULL && end_ != NULL);
3113 // Advance cursor if needed.
3114 // TODO(dcarney): Ensure uses of the api call HasMore first and avoid this.
3115 if (buffer8_ == end_) HasMore();
3116 ASSERT(buffer8_ < end_);
3117 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3121 StringCharacterStream::StringCharacterStream(String* string,
3122 ConsStringIteratorOp* op,
3124 : is_one_byte_(false),
3126 Reset(string, offset);
3130 void StringCharacterStream::Reset(String* string, unsigned offset) {
3134 int32_t type = string->map()->instance_type();
3135 unsigned length = string->length();
3136 String::Visit(string, offset, *this, *op_, type, length);
3140 bool StringCharacterStream::HasMore() {
3141 if (buffer8_ != end_) return true;
3142 if (!op_->HasMore()) return false;
3145 String* string = op_->ContinueOperation(&type, &length);
3146 if (string == NULL) return false;
3147 ASSERT(!string->IsConsString());
3148 ASSERT(string->length() != 0);
3149 ConsStringNullOp null_op;
3150 String::Visit(string, 0, *this, null_op, type, length);
3151 ASSERT(buffer8_ != end_);
3156 void StringCharacterStream::VisitOneByteString(
3157 const uint8_t* chars, unsigned length) {
3158 is_one_byte_ = true;
3160 end_ = chars + length;
3164 void StringCharacterStream::VisitTwoByteString(
3165 const uint16_t* chars, unsigned length) {
3166 is_one_byte_ = false;
3168 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3172 void JSFunctionResultCache::MakeZeroSize() {
3173 set_finger_index(kEntriesIndex);
3174 set_size(kEntriesIndex);
3178 void JSFunctionResultCache::Clear() {
3179 int cache_size = size();
3180 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
3181 MemsetPointer(entries_start,
3182 GetHeap()->the_hole_value(),
3183 cache_size - kEntriesIndex);
3188 int JSFunctionResultCache::size() {
3189 return Smi::cast(get(kCacheSizeIndex))->value();
3193 void JSFunctionResultCache::set_size(int size) {
3194 set(kCacheSizeIndex, Smi::FromInt(size));
3198 int JSFunctionResultCache::finger_index() {
3199 return Smi::cast(get(kFingerIndex))->value();
3203 void JSFunctionResultCache::set_finger_index(int finger_index) {
3204 set(kFingerIndex, Smi::FromInt(finger_index));
3208 byte ByteArray::get(int index) {
3209 ASSERT(index >= 0 && index < this->length());
3210 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3214 void ByteArray::set(int index, byte value) {
3215 ASSERT(index >= 0 && index < this->length());
3216 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3220 int ByteArray::get_int(int index) {
3221 ASSERT(index >= 0 && (index * kIntSize) < this->length());
3222 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3226 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3227 ASSERT_TAG_ALIGNED(address);
3228 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3232 Address ByteArray::GetDataStartAddress() {
3233 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3237 uint8_t* ExternalPixelArray::external_pixel_pointer() {
3238 return reinterpret_cast<uint8_t*>(external_pointer());
3242 uint8_t ExternalPixelArray::get_scalar(int index) {
3243 ASSERT((index >= 0) && (index < this->length()));
3244 uint8_t* ptr = external_pixel_pointer();
3249 MaybeObject* ExternalPixelArray::get(int index) {
3250 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3254 void ExternalPixelArray::set(int index, uint8_t value) {
3255 ASSERT((index >= 0) && (index < this->length()));
3256 uint8_t* ptr = external_pixel_pointer();
3261 void* ExternalArray::external_pointer() {
3262 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3263 return reinterpret_cast<void*>(ptr);
3267 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3268 intptr_t ptr = reinterpret_cast<intptr_t>(value);
3269 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3273 int8_t ExternalByteArray::get_scalar(int index) {
3274 ASSERT((index >= 0) && (index < this->length()));
3275 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3280 MaybeObject* ExternalByteArray::get(int index) {
3281 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3285 void ExternalByteArray::set(int index, int8_t value) {
3286 ASSERT((index >= 0) && (index < this->length()));
3287 int8_t* ptr = static_cast<int8_t*>(external_pointer());
3292 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
3293 ASSERT((index >= 0) && (index < this->length()));
3294 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3299 MaybeObject* ExternalUnsignedByteArray::get(int index) {
3300 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3304 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
3305 ASSERT((index >= 0) && (index < this->length()));
3306 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3311 int16_t ExternalShortArray::get_scalar(int index) {
3312 ASSERT((index >= 0) && (index < this->length()));
3313 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3318 MaybeObject* ExternalShortArray::get(int index) {
3319 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3323 void ExternalShortArray::set(int index, int16_t value) {
3324 ASSERT((index >= 0) && (index < this->length()));
3325 int16_t* ptr = static_cast<int16_t*>(external_pointer());
3330 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
3331 ASSERT((index >= 0) && (index < this->length()));
3332 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3337 MaybeObject* ExternalUnsignedShortArray::get(int index) {
3338 return Smi::FromInt(static_cast<int>(get_scalar(index)));
3342 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
3343 ASSERT((index >= 0) && (index < this->length()));
3344 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3349 int32_t ExternalIntArray::get_scalar(int index) {
3350 ASSERT((index >= 0) && (index < this->length()));
3351 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3356 MaybeObject* ExternalIntArray::get(int index) {
3357 return GetHeap()->NumberFromInt32(get_scalar(index));
3361 void ExternalIntArray::set(int index, int32_t value) {
3362 ASSERT((index >= 0) && (index < this->length()));
3363 int32_t* ptr = static_cast<int32_t*>(external_pointer());
3368 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
3369 ASSERT((index >= 0) && (index < this->length()));
3370 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3375 MaybeObject* ExternalUnsignedIntArray::get(int index) {
3376 return GetHeap()->NumberFromUint32(get_scalar(index));
3380 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
3381 ASSERT((index >= 0) && (index < this->length()));
3382 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3387 float ExternalFloatArray::get_scalar(int index) {
3388 ASSERT((index >= 0) && (index < this->length()));
3389 float* ptr = static_cast<float*>(external_pointer());
3394 MaybeObject* ExternalFloatArray::get(int index) {
3395 return GetHeap()->NumberFromDouble(get_scalar(index));
3399 void ExternalFloatArray::set(int index, float value) {
3400 ASSERT((index >= 0) && (index < this->length()));
3401 float* ptr = static_cast<float*>(external_pointer());
3406 double ExternalDoubleArray::get_scalar(int index) {
3407 ASSERT((index >= 0) && (index < this->length()));
3408 double* ptr = static_cast<double*>(external_pointer());
3413 MaybeObject* ExternalDoubleArray::get(int index) {
3414 return GetHeap()->NumberFromDouble(get_scalar(index));
3418 void ExternalDoubleArray::set(int index, double value) {
3419 ASSERT((index >= 0) && (index < this->length()));
3420 double* ptr = static_cast<double*>(external_pointer());
3425 int Map::visitor_id() {
3426 return READ_BYTE_FIELD(this, kVisitorIdOffset);
3430 void Map::set_visitor_id(int id) {
3431 ASSERT(0 <= id && id < 256);
3432 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
3436 int Map::instance_size() {
3437 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
3441 int Map::inobject_properties() {
3442 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
3446 int Map::pre_allocated_property_fields() {
3447 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
3451 int HeapObject::SizeFromMap(Map* map) {
3452 int instance_size = map->instance_size();
3453 if (instance_size != kVariableSizeSentinel) return instance_size;
3454 // Only inline the most frequent cases.
3455 int instance_type = static_cast<int>(map->instance_type());
3456 if (instance_type == FIXED_ARRAY_TYPE) {
3457 return FixedArray::BodyDescriptor::SizeOf(map, this);
3459 if (instance_type == ASCII_STRING_TYPE ||
3460 instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
3461 return SeqOneByteString::SizeFor(
3462 reinterpret_cast<SeqOneByteString*>(this)->length());
3464 if (instance_type == BYTE_ARRAY_TYPE) {
3465 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
3467 if (instance_type == FREE_SPACE_TYPE) {
3468 return reinterpret_cast<FreeSpace*>(this)->size();
3470 if (instance_type == STRING_TYPE ||
3471 instance_type == INTERNALIZED_STRING_TYPE) {
3472 return SeqTwoByteString::SizeFor(
3473 reinterpret_cast<SeqTwoByteString*>(this)->length());
3475 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
3476 return FixedDoubleArray::SizeFor(
3477 reinterpret_cast<FixedDoubleArray*>(this)->length());
3479 if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
3480 return ConstantPoolArray::SizeFor(
3481 reinterpret_cast<ConstantPoolArray*>(this)->count_of_int64_entries(),
3482 reinterpret_cast<ConstantPoolArray*>(this)->count_of_ptr_entries(),
3483 reinterpret_cast<ConstantPoolArray*>(this)->count_of_int32_entries());
3485 ASSERT(instance_type == CODE_TYPE);
3486 return reinterpret_cast<Code*>(this)->CodeSize();
3490 void Map::set_instance_size(int value) {
3491 ASSERT_EQ(0, value & (kPointerSize - 1));
3492 value >>= kPointerSizeLog2;
3493 ASSERT(0 <= value && value < 256);
3494 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
3498 void Map::set_inobject_properties(int value) {
3499 ASSERT(0 <= value && value < 256);
3500 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
3504 void Map::set_pre_allocated_property_fields(int value) {
3505 ASSERT(0 <= value && value < 256);
3506 WRITE_BYTE_FIELD(this,
3507 kPreAllocatedPropertyFieldsOffset,
3508 static_cast<byte>(value));
3512 InstanceType Map::instance_type() {
3513 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
3517 void Map::set_instance_type(InstanceType value) {
3518 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
3522 int Map::unused_property_fields() {
3523 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
3527 void Map::set_unused_property_fields(int value) {
3528 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
3532 byte Map::bit_field() {
3533 return READ_BYTE_FIELD(this, kBitFieldOffset);
3537 void Map::set_bit_field(byte value) {
3538 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
3542 byte Map::bit_field2() {
3543 return READ_BYTE_FIELD(this, kBitField2Offset);
3547 void Map::set_bit_field2(byte value) {
3548 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
3552 void Map::set_non_instance_prototype(bool value) {
3554 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
3556 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
3561 bool Map::has_non_instance_prototype() {
3562 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
3566 void Map::set_function_with_prototype(bool value) {
3567 set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
3571 bool Map::function_with_prototype() {
3572 return FunctionWithPrototype::decode(bit_field3());
3576 void Map::set_is_access_check_needed(bool access_check_needed) {
3577 if (access_check_needed) {
3578 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
3580 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
3585 bool Map::is_access_check_needed() {
3586 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
3590 void Map::set_is_extensible(bool value) {
3592 set_bit_field2(bit_field2() | (1 << kIsExtensible));
3594 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
3598 bool Map::is_extensible() {
3599 return ((1 << kIsExtensible) & bit_field2()) != 0;
3603 void Map::set_attached_to_shared_function_info(bool value) {
3605 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
3607 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
3611 bool Map::attached_to_shared_function_info() {
3612 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
3616 void Map::set_is_shared(bool value) {
3617 set_bit_field3(IsShared::update(bit_field3(), value));
3621 bool Map::is_shared() {
3622 return IsShared::decode(bit_field3());
3626 void Map::set_dictionary_map(bool value) {
3627 if (value) mark_unstable();
3628 set_bit_field3(DictionaryMap::update(bit_field3(), value));
3632 bool Map::is_dictionary_map() {
3633 return DictionaryMap::decode(bit_field3());
3637 Code::Flags Code::flags() {
3638 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
3642 void Map::set_owns_descriptors(bool is_shared) {
3643 set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
3647 bool Map::owns_descriptors() {
3648 return OwnsDescriptors::decode(bit_field3());
3652 void Map::set_is_observed(bool is_observed) {
3653 ASSERT(instance_type() < FIRST_JS_OBJECT_TYPE ||
3654 instance_type() > LAST_JS_OBJECT_TYPE ||
3655 has_slow_elements_kind() || has_external_array_elements());
3656 set_bit_field3(IsObserved::update(bit_field3(), is_observed));
3660 bool Map::is_observed() {
3661 return IsObserved::decode(bit_field3());
3665 void Map::deprecate() {
3666 set_bit_field3(Deprecated::update(bit_field3(), true));
3670 bool Map::is_deprecated() {
3671 if (!FLAG_track_fields) return false;
3672 return Deprecated::decode(bit_field3());
3676 void Map::set_migration_target(bool value) {
3677 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
3681 bool Map::is_migration_target() {
3682 if (!FLAG_track_fields) return false;
3683 return IsMigrationTarget::decode(bit_field3());
3687 void Map::freeze() {
3688 set_bit_field3(IsFrozen::update(bit_field3(), true));
3692 bool Map::is_frozen() {
3693 return IsFrozen::decode(bit_field3());
3697 void Map::mark_unstable() {
3698 set_bit_field3(IsUnstable::update(bit_field3(), true));
3702 bool Map::is_stable() {
3703 return !IsUnstable::decode(bit_field3());
3707 bool Map::has_code_cache() {
3708 return code_cache() != GetIsolate()->heap()->empty_fixed_array();
3712 bool Map::CanBeDeprecated() {
3713 int descriptor = LastAdded();
3714 for (int i = 0; i <= descriptor; i++) {
3715 PropertyDetails details = instance_descriptors()->GetDetails(i);
3716 if (FLAG_track_fields && details.representation().IsNone()) {
3719 if (FLAG_track_fields && details.representation().IsSmi()) {
3722 if (FLAG_track_double_fields && details.representation().IsDouble()) {
3725 if (FLAG_track_heap_object_fields &&
3726 details.representation().IsHeapObject()) {
3729 if (FLAG_track_fields && details.type() == CONSTANT) {
3737 void Map::NotifyLeafMapLayoutChange() {
3740 dependent_code()->DeoptimizeDependentCodeGroup(
3742 DependentCode::kPrototypeCheckGroup);
3747 bool Map::CanOmitMapChecks() {
3748 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
3752 int DependentCode::number_of_entries(DependencyGroup group) {
3753 if (length() == 0) return 0;
3754 return Smi::cast(get(group))->value();
3758 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
3759 set(group, Smi::FromInt(value));
3763 bool DependentCode::is_code_at(int i) {
3764 return get(kCodesStartIndex + i)->IsCode();
3767 Code* DependentCode::code_at(int i) {
3768 return Code::cast(get(kCodesStartIndex + i));
3772 CompilationInfo* DependentCode::compilation_info_at(int i) {
3773 return reinterpret_cast<CompilationInfo*>(
3774 Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
3778 void DependentCode::set_object_at(int i, Object* object) {
3779 set(kCodesStartIndex + i, object);
3783 Object* DependentCode::object_at(int i) {
3784 return get(kCodesStartIndex + i);
3788 Object** DependentCode::slot_at(int i) {
3789 return HeapObject::RawField(
3790 this, FixedArray::OffsetOfElementAt(kCodesStartIndex + i));
3794 void DependentCode::clear_at(int i) {
3795 set_undefined(kCodesStartIndex + i);
3799 void DependentCode::copy(int from, int to) {
3800 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
3804 void DependentCode::ExtendGroup(DependencyGroup group) {
3805 GroupStartIndexes starts(this);
3806 for (int g = kGroupCount - 1; g > group; g--) {
3807 if (starts.at(g) < starts.at(g + 1)) {
3808 copy(starts.at(g), starts.at(g + 1));
3814 void Code::set_flags(Code::Flags flags) {
3815 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
3816 // Make sure that all call stubs have an arguments count.
3817 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
3818 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
3819 ExtractArgumentsCountFromFlags(flags) >= 0);
3820 WRITE_INT_FIELD(this, kFlagsOffset, flags);
3824 Code::Kind Code::kind() {
3825 return ExtractKindFromFlags(flags());
3829 InlineCacheState Code::ic_state() {
3830 InlineCacheState result = ExtractICStateFromFlags(flags());
3831 // Only allow uninitialized or debugger states for non-IC code
3832 // objects. This is used in the debugger to determine whether or not
3833 // a call to code object has been replaced with a debug break call.
3834 ASSERT(is_inline_cache_stub() ||
3835 result == UNINITIALIZED ||
3836 result == DEBUG_STUB);
3841 Code::ExtraICState Code::extra_ic_state() {
3842 ASSERT((is_inline_cache_stub() && !needs_extended_extra_ic_state(kind()))
3843 || ic_state() == DEBUG_STUB);
3844 return ExtractExtraICStateFromFlags(flags());
3848 Code::ExtraICState Code::extended_extra_ic_state() {
3849 ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
3850 ASSERT(needs_extended_extra_ic_state(kind()));
3851 return ExtractExtendedExtraICStateFromFlags(flags());
3855 Code::StubType Code::type() {
3856 return ExtractTypeFromFlags(flags());
3860 int Code::arguments_count() {
3861 ASSERT(is_call_stub() || is_keyed_call_stub() ||
3862 kind() == STUB || is_handler());
3863 return ExtractArgumentsCountFromFlags(flags());
3867 inline bool Code::is_crankshafted() {
3868 return IsCrankshaftedField::decode(
3869 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
3873 inline void Code::set_is_crankshafted(bool value) {
3874 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
3875 int updated = IsCrankshaftedField::update(previous, value);
3876 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
3880 int Code::major_key() {
3881 ASSERT(kind() == STUB ||
3882 kind() == HANDLER ||
3883 kind() == BINARY_OP_IC ||
3884 kind() == COMPARE_IC ||
3885 kind() == COMPARE_NIL_IC ||
3886 kind() == STORE_IC ||
3887 kind() == LOAD_IC ||
3888 kind() == KEYED_LOAD_IC ||
3889 kind() == TO_BOOLEAN_IC);
3890 return StubMajorKeyField::decode(
3891 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
3895 void Code::set_major_key(int major) {
3896 ASSERT(kind() == STUB ||
3897 kind() == HANDLER ||
3898 kind() == BINARY_OP_IC ||
3899 kind() == COMPARE_IC ||
3900 kind() == COMPARE_NIL_IC ||
3901 kind() == LOAD_IC ||
3902 kind() == KEYED_LOAD_IC ||
3903 kind() == STORE_IC ||
3904 kind() == KEYED_STORE_IC ||
3905 kind() == TO_BOOLEAN_IC);
3906 ASSERT(0 <= major && major < 256);
3907 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
3908 int updated = StubMajorKeyField::update(previous, major);
3909 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
3913 bool Code::is_pregenerated() {
3914 return (kind() == STUB && IsPregeneratedField::decode(flags()));
3918 void Code::set_is_pregenerated(bool value) {
3919 ASSERT(kind() == STUB);
3921 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3926 bool Code::optimizable() {
3927 ASSERT_EQ(FUNCTION, kind());
3928 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3932 void Code::set_optimizable(bool value) {
3933 ASSERT_EQ(FUNCTION, kind());
3934 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3938 bool Code::has_deoptimization_support() {
3939 ASSERT_EQ(FUNCTION, kind());
3940 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3941 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3945 void Code::set_has_deoptimization_support(bool value) {
3946 ASSERT_EQ(FUNCTION, kind());
3947 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3948 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3949 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3953 bool Code::has_debug_break_slots() {
3954 ASSERT_EQ(FUNCTION, kind());
3955 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3956 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3960 void Code::set_has_debug_break_slots(bool value) {
3961 ASSERT_EQ(FUNCTION, kind());
3962 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3963 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3964 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3968 bool Code::is_compiled_optimizable() {
3969 ASSERT_EQ(FUNCTION, kind());
3970 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3971 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3975 void Code::set_compiled_optimizable(bool value) {
3976 ASSERT_EQ(FUNCTION, kind());
3977 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3978 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3979 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3983 int Code::allow_osr_at_loop_nesting_level() {
3984 ASSERT_EQ(FUNCTION, kind());
3985 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3989 void Code::set_allow_osr_at_loop_nesting_level(int level) {
3990 ASSERT_EQ(FUNCTION, kind());
3991 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3992 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3996 int Code::profiler_ticks() {
3997 ASSERT_EQ(FUNCTION, kind());
3998 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4002 void Code::set_profiler_ticks(int ticks) {
4003 ASSERT_EQ(FUNCTION, kind());
4004 ASSERT(ticks < 256);
4005 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4009 unsigned Code::stack_slots() {
4010 ASSERT(is_crankshafted());
4011 return StackSlotsField::decode(
4012 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4016 void Code::set_stack_slots(unsigned slots) {
4017 CHECK(slots <= (1 << kStackSlotsBitCount));
4018 ASSERT(is_crankshafted());
4019 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4020 int updated = StackSlotsField::update(previous, slots);
4021 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4025 unsigned Code::safepoint_table_offset() {
4026 ASSERT(is_crankshafted());
4027 return SafepointTableOffsetField::decode(
4028 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4032 void Code::set_safepoint_table_offset(unsigned offset) {
4033 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4034 ASSERT(is_crankshafted());
4035 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4036 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4037 int updated = SafepointTableOffsetField::update(previous, offset);
4038 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4042 unsigned Code::back_edge_table_offset() {
4043 ASSERT_EQ(FUNCTION, kind());
4044 return BackEdgeTableOffsetField::decode(
4045 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4049 void Code::set_back_edge_table_offset(unsigned offset) {
4050 ASSERT_EQ(FUNCTION, kind());
4051 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4052 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4053 int updated = BackEdgeTableOffsetField::update(previous, offset);
4054 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4058 bool Code::back_edges_patched_for_osr() {
4059 ASSERT_EQ(FUNCTION, kind());
4060 return BackEdgesPatchedForOSRField::decode(
4061 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4065 void Code::set_back_edges_patched_for_osr(bool value) {
4066 ASSERT_EQ(FUNCTION, kind());
4067 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4068 int updated = BackEdgesPatchedForOSRField::update(previous, value);
4069 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4074 CheckType Code::check_type() {
4075 ASSERT(is_call_stub() || is_keyed_call_stub());
4076 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
4077 return static_cast<CheckType>(type);
4081 void Code::set_check_type(CheckType value) {
4082 ASSERT(is_call_stub() || is_keyed_call_stub());
4083 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
4087 byte Code::to_boolean_state() {
4088 return extended_extra_ic_state();
4092 bool Code::has_function_cache() {
4093 ASSERT(kind() == STUB);
4094 return HasFunctionCacheField::decode(
4095 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4099 void Code::set_has_function_cache(bool flag) {
4100 ASSERT(kind() == STUB);
4101 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4102 int updated = HasFunctionCacheField::update(previous, flag);
4103 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4107 bool Code::marked_for_deoptimization() {
4108 ASSERT(kind() == OPTIMIZED_FUNCTION);
4109 return MarkedForDeoptimizationField::decode(
4110 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4114 void Code::set_marked_for_deoptimization(bool flag) {
4115 ASSERT(kind() == OPTIMIZED_FUNCTION);
4116 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4117 int updated = MarkedForDeoptimizationField::update(previous, flag);
4118 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4122 bool Code::is_inline_cache_stub() {
4123 Kind kind = this->kind();
4125 #define CASE(name) case name: return true;
4128 default: return false;
4133 bool Code::is_keyed_stub() {
4134 return is_keyed_load_stub() || is_keyed_store_stub() || is_keyed_call_stub();
4138 bool Code::is_debug_stub() {
4139 return ic_state() == DEBUG_STUB;
4143 Code::Flags Code::ComputeFlags(Kind kind,
4144 InlineCacheState ic_state,
4145 ExtraICState extra_ic_state,
4148 InlineCacheHolderFlag holder) {
4149 ASSERT(argc <= Code::kMaxArguments);
4150 // Since the extended extra ic state overlaps with the argument count
4151 // for CALL_ICs, do so checks to make sure that they don't interfere.
4152 ASSERT((kind != Code::CALL_IC &&
4153 kind != Code::KEYED_CALL_IC) ||
4154 (ExtraICStateField::encode(extra_ic_state) | true));
4155 // Compute the bit mask.
4156 unsigned int bits = KindField::encode(kind)
4157 | ICStateField::encode(ic_state)
4158 | TypeField::encode(type)
4159 | ExtendedExtraICStateField::encode(extra_ic_state)
4160 | CacheHolderField::encode(holder);
4161 if (!Code::needs_extended_extra_ic_state(kind)) {
4162 bits |= (argc << kArgumentsCountShift);
4164 return static_cast<Flags>(bits);
4168 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4169 ExtraICState extra_ic_state,
4172 InlineCacheHolderFlag holder) {
4173 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
4177 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4178 return KindField::decode(flags);
4182 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4183 return ICStateField::decode(flags);
4187 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4188 return ExtraICStateField::decode(flags);
4192 Code::ExtraICState Code::ExtractExtendedExtraICStateFromFlags(
4194 return ExtendedExtraICStateField::decode(flags);
4198 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4199 return TypeField::decode(flags);
4203 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
4204 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
4208 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4209 return CacheHolderField::decode(flags);
4213 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4214 int bits = flags & ~TypeField::kMask;
4215 return static_cast<Flags>(bits);
4219 Code* Code::GetCodeFromTargetAddress(Address address) {
4220 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4221 // GetCodeFromTargetAddress might be called when marking objects during mark
4222 // sweep. reinterpret_cast is therefore used instead of the more appropriate
4223 // Code::cast. Code::cast does not work when the object's map is
4225 Code* result = reinterpret_cast<Code*>(code);
4230 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4232 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4236 Object* Map::prototype() {
4237 return READ_FIELD(this, kPrototypeOffset);
4241 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4242 ASSERT(value->IsNull() || value->IsJSReceiver());
4243 WRITE_FIELD(this, kPrototypeOffset, value);
4244 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4248 // If the descriptor is using the empty transition array, install a new empty
4249 // transition array that will have place for an element transition.
4250 static MaybeObject* EnsureHasTransitionArray(Map* map) {
4251 TransitionArray* transitions;
4252 MaybeObject* maybe_transitions;
4253 if (!map->HasTransitionArray()) {
4254 maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
4255 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4256 transitions->set_back_pointer_storage(map->GetBackPointer());
4257 } else if (!map->transitions()->IsFullTransitionArray()) {
4258 maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
4259 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4263 map->set_transitions(transitions);
4268 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
4269 int len = descriptors->number_of_descriptors();
4270 set_instance_descriptors(descriptors);
4271 SetNumberOfOwnDescriptors(len);
4275 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4278 void Map::set_bit_field3(uint32_t bits) {
4279 // Ensure the upper 2 bits have the same value by sign extending it. This is
4280 // necessary to be able to use the 31st bit.
4281 int value = bits << 1;
4282 WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
4286 uint32_t Map::bit_field3() {
4287 Object* value = READ_FIELD(this, kBitField3Offset);
4288 return Smi::cast(value)->value();
4292 void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
4293 Object* back_pointer = GetBackPointer();
4295 if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
4299 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
4300 CONDITIONAL_WRITE_BARRIER(
4301 heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
4305 void Map::AppendDescriptor(Descriptor* desc,
4306 const DescriptorArray::WhitenessWitness& witness) {
4307 DescriptorArray* descriptors = instance_descriptors();
4308 int number_of_own_descriptors = NumberOfOwnDescriptors();
4309 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
4310 descriptors->Append(desc, witness);
4311 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4315 Object* Map::GetBackPointer() {
4316 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4317 if (object->IsDescriptorArray()) {
4318 return TransitionArray::cast(object)->back_pointer_storage();
4320 ASSERT(object->IsMap() || object->IsUndefined());
4326 bool Map::HasElementsTransition() {
4327 return HasTransitionArray() && transitions()->HasElementsTransition();
4331 bool Map::HasTransitionArray() {
4332 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4333 return object->IsTransitionArray();
4337 Map* Map::elements_transition_map() {
4338 int index = transitions()->Search(GetHeap()->elements_transition_symbol());
4339 return transitions()->GetTarget(index);
4343 bool Map::CanHaveMoreTransitions() {
4344 if (!HasTransitionArray()) return true;
4345 return FixedArray::SizeFor(transitions()->length() +
4346 TransitionArray::kTransitionSize)
4347 <= Page::kMaxNonCodeHeapObjectSize;
4351 MaybeObject* Map::AddTransition(Name* key,
4353 SimpleTransitionFlag flag) {
4354 if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
4355 return TransitionArray::NewWith(flag, key, target, GetBackPointer());
4359 void Map::SetTransition(int transition_index, Map* target) {
4360 transitions()->SetTarget(transition_index, target);
4364 Map* Map::GetTransition(int transition_index) {
4365 return transitions()->GetTarget(transition_index);
4369 MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
4370 TransitionArray* transitions;
4371 MaybeObject* maybe_transitions = AddTransition(
4372 GetHeap()->elements_transition_symbol(),
4375 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4376 set_transitions(transitions);
4381 FixedArray* Map::GetPrototypeTransitions() {
4382 if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
4383 if (!transitions()->HasPrototypeTransitions()) {
4384 return GetHeap()->empty_fixed_array();
4386 return transitions()->GetPrototypeTransitions();
4390 MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
4391 MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
4392 if (allow_prototype->IsFailure()) return allow_prototype;
4393 int old_number_of_transitions = NumberOfProtoTransitions();
4395 if (HasPrototypeTransitions()) {
4396 ASSERT(GetPrototypeTransitions() != proto_transitions);
4397 ZapPrototypeTransitions();
4400 transitions()->SetPrototypeTransitions(proto_transitions);
4401 SetNumberOfProtoTransitions(old_number_of_transitions);
4406 bool Map::HasPrototypeTransitions() {
4407 return HasTransitionArray() && transitions()->HasPrototypeTransitions();
4411 TransitionArray* Map::transitions() {
4412 ASSERT(HasTransitionArray());
4413 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4414 return TransitionArray::cast(object);
4418 void Map::set_transitions(TransitionArray* transition_array,
4419 WriteBarrierMode mode) {
4420 // Transition arrays are not shared. When one is replaced, it should not
4421 // keep referenced objects alive, so we zap it.
4422 // When there is another reference to the array somewhere (e.g. a handle),
4423 // not zapping turns from a waste of memory into a source of crashes.
4424 if (HasTransitionArray()) {
4425 ASSERT(transitions() != transition_array);
4429 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
4430 CONDITIONAL_WRITE_BARRIER(
4431 GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
4435 void Map::init_back_pointer(Object* undefined) {
4436 ASSERT(undefined->IsUndefined());
4437 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
4441 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
4442 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
4443 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
4444 (value->IsMap() && GetBackPointer()->IsUndefined()));
4445 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4446 if (object->IsTransitionArray()) {
4447 TransitionArray::cast(object)->set_back_pointer_storage(value);
4449 WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
4450 CONDITIONAL_WRITE_BARRIER(
4451 GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
4456 // Can either be Smi (no transitions), normal transition array, or a transition
4457 // array with the header overwritten as a Smi (thus iterating).
4458 TransitionArray* Map::unchecked_transition_array() {
4459 Object* object = *HeapObject::RawField(this,
4460 Map::kTransitionsOrBackPointerOffset);
4461 TransitionArray* transition_array = static_cast<TransitionArray*>(object);
4462 return transition_array;
4466 HeapObject* Map::UncheckedPrototypeTransitions() {
4467 ASSERT(HasTransitionArray());
4468 ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
4469 return unchecked_transition_array()->UncheckedPrototypeTransitions();
4473 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
4474 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
4475 ACCESSORS(Map, constructor, Object, kConstructorOffset)
4477 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
4478 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
4479 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
4481 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
4482 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
4483 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
4484 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
4486 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
4488 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
4489 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
4490 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
4491 kExpectedReceiverTypeOffset)
4493 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
4494 kSerializedDataOffset)
4496 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
4499 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
4500 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
4501 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
4503 ACCESSORS(Box, value, Object, kValueOffset)
4505 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
4506 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
4507 ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
4509 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
4510 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
4511 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
4513 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
4514 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
4515 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
4516 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
4517 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
4518 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
4520 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
4521 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
4523 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
4524 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
4525 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
4527 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
4528 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
4529 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
4530 kPrototypeTemplateOffset)
4531 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
4532 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
4533 kNamedPropertyHandlerOffset)
4534 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
4535 kIndexedPropertyHandlerOffset)
4536 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
4537 kInstanceTemplateOffset)
4538 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
4539 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
4540 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
4541 kInstanceCallHandlerOffset)
4542 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
4543 kAccessCheckInfoOffset)
4544 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
4546 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
4547 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
4548 kInternalFieldCountOffset)
4550 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
4551 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
4553 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
4555 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
4556 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
4557 ACCESSORS(AllocationSite, dependent_code, DependentCode,
4558 kDependentCodeOffset)
4559 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
4560 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
4562 ACCESSORS(Script, source, Object, kSourceOffset)
4563 ACCESSORS(Script, name, Object, kNameOffset)
4564 ACCESSORS(Script, id, Smi, kIdOffset)
4565 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
4566 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
4567 ACCESSORS(Script, data, Object, kDataOffset)
4568 ACCESSORS(Script, context_data, Object, kContextOffset)
4569 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
4570 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
4571 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
4572 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
4573 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
4574 kEvalFrominstructionsOffsetOffset)
4575 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
4576 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
4578 Script::CompilationType Script::compilation_type() {
4579 return BooleanBit::get(flags(), kCompilationTypeBit) ?
4580 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
4582 void Script::set_compilation_type(CompilationType type) {
4583 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
4584 type == COMPILATION_TYPE_EVAL));
4586 Script::CompilationState Script::compilation_state() {
4587 return BooleanBit::get(flags(), kCompilationStateBit) ?
4588 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
4590 void Script::set_compilation_state(CompilationState state) {
4591 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
4592 state == COMPILATION_STATE_COMPILED));
4596 #ifdef ENABLE_DEBUGGER_SUPPORT
4597 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
4598 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
4599 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
4600 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
4602 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
4603 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
4604 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
4605 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
4608 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
4609 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
4610 kOptimizedCodeMapOffset)
4611 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
4612 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
4613 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
4614 kInstanceClassNameOffset)
4615 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
4616 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
4617 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
4618 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
4619 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
4622 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
4623 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
4624 kHiddenPrototypeBit)
4625 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
4626 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
4627 kNeedsAccessCheckBit)
4628 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
4629 kReadOnlyPrototypeBit)
4630 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
4631 kRemovePrototypeBit)
4632 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
4634 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
4636 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
4639 BOOL_ACCESSORS(SharedFunctionInfo,
4641 allows_lazy_compilation,
4642 kAllowLazyCompilation)
4643 BOOL_ACCESSORS(SharedFunctionInfo,
4645 allows_lazy_compilation_without_context,
4646 kAllowLazyCompilationWithoutContext)
4647 BOOL_ACCESSORS(SharedFunctionInfo,
4651 BOOL_ACCESSORS(SharedFunctionInfo,
4653 has_duplicate_parameters,
4654 kHasDuplicateParameters)
4657 #if V8_HOST_ARCH_32_BIT
4658 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
4659 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
4660 kFormalParameterCountOffset)
4661 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
4662 kExpectedNofPropertiesOffset)
4663 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
4664 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
4665 kStartPositionAndTypeOffset)
4666 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
4667 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
4668 kFunctionTokenPositionOffset)
4669 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
4670 kCompilerHintsOffset)
4671 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
4672 kOptCountAndBailoutReasonOffset)
4673 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
4677 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
4678 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
4679 int holder::name() { \
4680 int value = READ_INT_FIELD(this, offset); \
4681 ASSERT(kHeapObjectTag == 1); \
4682 ASSERT((value & kHeapObjectTag) == 0); \
4683 return value >> 1; \
4685 void holder::set_##name(int value) { \
4686 ASSERT(kHeapObjectTag == 1); \
4687 ASSERT((value & 0xC0000000) == 0xC0000000 || \
4688 (value & 0xC0000000) == 0x000000000); \
4689 WRITE_INT_FIELD(this, \
4691 (value << 1) & ~kHeapObjectTag); \
4694 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
4695 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
4696 INT_ACCESSORS(holder, name, offset)
4699 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
4700 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4701 formal_parameter_count,
4702 kFormalParameterCountOffset)
4704 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4705 expected_nof_properties,
4706 kExpectedNofPropertiesOffset)
4707 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
4709 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
4710 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4711 start_position_and_type,
4712 kStartPositionAndTypeOffset)
4714 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4715 function_token_position,
4716 kFunctionTokenPositionOffset)
4717 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4719 kCompilerHintsOffset)
4721 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4722 opt_count_and_bailout_reason,
4723 kOptCountAndBailoutReasonOffset)
4725 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
4730 int SharedFunctionInfo::construction_count() {
4731 return READ_BYTE_FIELD(this, kConstructionCountOffset);
4735 void SharedFunctionInfo::set_construction_count(int value) {
4736 ASSERT(0 <= value && value < 256);
4737 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
4741 BOOL_ACCESSORS(SharedFunctionInfo,
4743 live_objects_may_exist,
4744 kLiveObjectsMayExist)
4747 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
4748 return initial_map() != GetHeap()->undefined_value();
4752 BOOL_GETTER(SharedFunctionInfo,
4754 optimization_disabled,
4755 kOptimizationDisabled)
4758 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
4759 set_compiler_hints(BooleanBit::set(compiler_hints(),
4760 kOptimizationDisabled,
4762 // If disabling optimizations we reflect that in the code object so
4763 // it will not be counted as optimizable code.
4764 if ((code()->kind() == Code::FUNCTION) && disable) {
4765 code()->set_optimizable(false);
4770 int SharedFunctionInfo::profiler_ticks() {
4771 if (code()->kind() != Code::FUNCTION) return 0;
4772 return code()->profiler_ticks();
4776 LanguageMode SharedFunctionInfo::language_mode() {
4777 int hints = compiler_hints();
4778 if (BooleanBit::get(hints, kExtendedModeFunction)) {
4779 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
4780 return EXTENDED_MODE;
4782 return BooleanBit::get(hints, kStrictModeFunction)
4783 ? STRICT_MODE : CLASSIC_MODE;
4787 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
4788 // We only allow language mode transitions that go set the same language mode
4789 // again or go up in the chain:
4790 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
4791 ASSERT(this->language_mode() == CLASSIC_MODE ||
4792 this->language_mode() == language_mode ||
4793 language_mode == EXTENDED_MODE);
4794 int hints = compiler_hints();
4795 hints = BooleanBit::set(
4796 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
4797 hints = BooleanBit::set(
4798 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
4799 set_compiler_hints(hints);
4803 bool SharedFunctionInfo::is_classic_mode() {
4804 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
4807 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
4808 kExtendedModeFunction)
4809 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
4810 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
4811 name_should_print_as_anonymous,
4812 kNameShouldPrintAsAnonymous)
4813 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
4814 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
4815 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
4816 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
4818 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
4819 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
4820 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
4821 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
4823 void SharedFunctionInfo::BeforeVisitingPointers() {
4824 if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
4828 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
4829 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
4831 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
4833 bool Script::HasValidSource() {
4834 Object* src = this->source();
4835 if (!src->IsString()) return true;
4836 String* src_str = String::cast(src);
4837 if (!StringShape(src_str).IsExternal()) return true;
4838 if (src_str->IsOneByteRepresentation()) {
4839 return ExternalAsciiString::cast(src)->resource() != NULL;
4840 } else if (src_str->IsTwoByteRepresentation()) {
4841 return ExternalTwoByteString::cast(src)->resource() != NULL;
4847 void SharedFunctionInfo::DontAdaptArguments() {
4848 ASSERT(code()->kind() == Code::BUILTIN);
4849 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
4853 int SharedFunctionInfo::start_position() {
4854 return start_position_and_type() >> kStartPositionShift;
4858 void SharedFunctionInfo::set_start_position(int start_position) {
4859 set_start_position_and_type((start_position << kStartPositionShift)
4860 | (start_position_and_type() & ~kStartPositionMask));
4864 Code* SharedFunctionInfo::code() {
4865 return Code::cast(READ_FIELD(this, kCodeOffset));
4869 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
4870 WRITE_FIELD(this, kCodeOffset, value);
4871 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
4875 void SharedFunctionInfo::ReplaceCode(Code* value) {
4876 // If the GC metadata field is already used then the function was
4877 // enqueued as a code flushing candidate and we remove it now.
4878 if (code()->gc_metadata() != NULL) {
4879 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
4880 flusher->EvictCandidate(this);
4883 ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
4888 ScopeInfo* SharedFunctionInfo::scope_info() {
4889 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
4893 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
4894 WriteBarrierMode mode) {
4895 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
4896 CONDITIONAL_WRITE_BARRIER(GetHeap(),
4899 reinterpret_cast<Object*>(value),
4904 bool SharedFunctionInfo::is_compiled() {
4906 GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4910 bool SharedFunctionInfo::IsApiFunction() {
4911 return function_data()->IsFunctionTemplateInfo();
4915 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
4916 ASSERT(IsApiFunction());
4917 return FunctionTemplateInfo::cast(function_data());
4921 bool SharedFunctionInfo::HasBuiltinFunctionId() {
4922 return function_data()->IsSmi();
4926 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
4927 ASSERT(HasBuiltinFunctionId());
4928 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
4932 int SharedFunctionInfo::ic_age() {
4933 return ICAgeBits::decode(counters());
4937 void SharedFunctionInfo::set_ic_age(int ic_age) {
4938 set_counters(ICAgeBits::update(counters(), ic_age));
4942 int SharedFunctionInfo::deopt_count() {
4943 return DeoptCountBits::decode(counters());
4947 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
4948 set_counters(DeoptCountBits::update(counters(), deopt_count));
4952 void SharedFunctionInfo::increment_deopt_count() {
4953 int value = counters();
4954 int deopt_count = DeoptCountBits::decode(value);
4955 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
4956 set_counters(DeoptCountBits::update(value, deopt_count));
4960 int SharedFunctionInfo::opt_reenable_tries() {
4961 return OptReenableTriesBits::decode(counters());
4965 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
4966 set_counters(OptReenableTriesBits::update(counters(), tries));
4970 int SharedFunctionInfo::opt_count() {
4971 return OptCountBits::decode(opt_count_and_bailout_reason());
4975 void SharedFunctionInfo::set_opt_count(int opt_count) {
4976 set_opt_count_and_bailout_reason(
4977 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
4981 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
4982 BailoutReason reason = static_cast<BailoutReason>(
4983 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
4988 bool SharedFunctionInfo::has_deoptimization_support() {
4989 Code* code = this->code();
4990 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
4994 void SharedFunctionInfo::TryReenableOptimization() {
4995 int tries = opt_reenable_tries();
4996 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
4997 // We reenable optimization whenever the number of tries is a large
4998 // enough power of 2.
4999 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5000 set_optimization_disabled(false);
5003 code()->set_optimizable(true);
5008 bool JSFunction::IsBuiltin() {
5009 return context()->global_object()->IsJSBuiltinsObject();
5013 bool JSFunction::NeedsArgumentsAdaption() {
5014 return shared()->formal_parameter_count() !=
5015 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5019 bool JSFunction::IsOptimized() {
5020 return code()->kind() == Code::OPTIMIZED_FUNCTION;
5024 bool JSFunction::IsOptimizable() {
5025 return code()->kind() == Code::FUNCTION && code()->optimizable();
5029 bool JSFunction::IsMarkedForLazyRecompilation() {
5030 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
5034 bool JSFunction::IsMarkedForConcurrentRecompilation() {
5035 return code() == GetIsolate()->builtins()->builtin(
5036 Builtins::kConcurrentRecompile);
5040 bool JSFunction::IsInRecompileQueue() {
5041 return code() == GetIsolate()->builtins()->builtin(
5042 Builtins::kInRecompileQueue);
5046 Code* JSFunction::code() {
5048 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5052 void JSFunction::set_code(Code* value) {
5053 ASSERT(!GetHeap()->InNewSpace(value));
5054 Address entry = value->entry();
5055 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5056 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5058 HeapObject::RawField(this, kCodeEntryOffset),
5063 void JSFunction::set_code_no_write_barrier(Code* value) {
5064 ASSERT(!GetHeap()->InNewSpace(value));
5065 Address entry = value->entry();
5066 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5070 void JSFunction::ReplaceCode(Code* code) {
5071 bool was_optimized = IsOptimized();
5072 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5076 // Add/remove the function from the list of optimized functions for this
5077 // context based on the state change.
5078 if (!was_optimized && is_optimized) {
5079 context()->native_context()->AddOptimizedFunction(this);
5081 if (was_optimized && !is_optimized) {
5082 // TODO(titzer): linear in the number of optimized functions; fix!
5083 context()->native_context()->RemoveOptimizedFunction(this);
5088 Context* JSFunction::context() {
5089 return Context::cast(READ_FIELD(this, kContextOffset));
5093 void JSFunction::set_context(Object* value) {
5094 ASSERT(value->IsUndefined() || value->IsContext());
5095 WRITE_FIELD(this, kContextOffset, value);
5096 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5099 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5100 kPrototypeOrInitialMapOffset)
5103 Map* JSFunction::initial_map() {
5104 return Map::cast(prototype_or_initial_map());
5108 void JSFunction::set_initial_map(Map* value) {
5109 set_prototype_or_initial_map(value);
5113 bool JSFunction::has_initial_map() {
5114 return prototype_or_initial_map()->IsMap();
5118 bool JSFunction::has_instance_prototype() {
5119 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5123 bool JSFunction::has_prototype() {
5124 return map()->has_non_instance_prototype() || has_instance_prototype();
5128 Object* JSFunction::instance_prototype() {
5129 ASSERT(has_instance_prototype());
5130 if (has_initial_map()) return initial_map()->prototype();
5131 // When there is no initial map and the prototype is a JSObject, the
5132 // initial map field is used for the prototype field.
5133 return prototype_or_initial_map();
5137 Object* JSFunction::prototype() {
5138 ASSERT(has_prototype());
5139 // If the function's prototype property has been set to a non-JSObject
5140 // value, that value is stored in the constructor field of the map.
5141 if (map()->has_non_instance_prototype()) return map()->constructor();
5142 return instance_prototype();
5146 bool JSFunction::should_have_prototype() {
5147 return map()->function_with_prototype();
5151 bool JSFunction::is_compiled() {
5152 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
5156 FixedArray* JSFunction::literals() {
5157 ASSERT(!shared()->bound());
5158 return literals_or_bindings();
5162 void JSFunction::set_literals(FixedArray* literals) {
5163 ASSERT(!shared()->bound());
5164 set_literals_or_bindings(literals);
5168 FixedArray* JSFunction::function_bindings() {
5169 ASSERT(shared()->bound());
5170 return literals_or_bindings();
5174 void JSFunction::set_function_bindings(FixedArray* bindings) {
5175 ASSERT(shared()->bound());
5176 // Bound function literal may be initialized to the empty fixed array
5177 // before the bindings are set.
5178 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
5179 bindings->map() == GetHeap()->fixed_cow_array_map());
5180 set_literals_or_bindings(bindings);
5184 int JSFunction::NumberOfLiterals() {
5185 ASSERT(!shared()->bound());
5186 return literals()->length();
5190 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5191 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5192 return READ_FIELD(this, OffsetOfFunctionWithId(id));
5196 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5198 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5199 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5200 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5204 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
5205 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5206 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
5210 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
5212 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
5213 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
5214 ASSERT(!GetHeap()->InNewSpace(value));
5218 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5219 ACCESSORS(JSProxy, hash, Object, kHashOffset)
5220 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5221 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5224 void JSProxy::InitializeBody(int object_size, Object* value) {
5225 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5226 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5227 WRITE_FIELD(this, offset, value);
5232 ACCESSORS(JSSet, table, Object, kTableOffset)
5233 ACCESSORS(JSMap, table, Object, kTableOffset)
5234 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5235 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5238 Address Foreign::foreign_address() {
5239 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5243 void Foreign::set_foreign_address(Address value) {
5244 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5248 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5249 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5250 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5251 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5252 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5253 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
5256 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
5257 ASSERT(obj->IsJSGeneratorObject());
5258 ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
5259 return reinterpret_cast<JSGeneratorObject*>(obj);
5263 ACCESSORS(JSModule, context, Object, kContextOffset)
5264 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5267 JSModule* JSModule::cast(Object* obj) {
5268 ASSERT(obj->IsJSModule());
5269 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
5270 return reinterpret_cast<JSModule*>(obj);
5274 ACCESSORS(JSValue, value, Object, kValueOffset)
5277 JSValue* JSValue::cast(Object* obj) {
5278 ASSERT(obj->IsJSValue());
5279 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
5280 return reinterpret_cast<JSValue*>(obj);
5284 ACCESSORS(JSDate, value, Object, kValueOffset)
5285 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5286 ACCESSORS(JSDate, year, Object, kYearOffset)
5287 ACCESSORS(JSDate, month, Object, kMonthOffset)
5288 ACCESSORS(JSDate, day, Object, kDayOffset)
5289 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5290 ACCESSORS(JSDate, hour, Object, kHourOffset)
5291 ACCESSORS(JSDate, min, Object, kMinOffset)
5292 ACCESSORS(JSDate, sec, Object, kSecOffset)
5295 JSDate* JSDate::cast(Object* obj) {
5296 ASSERT(obj->IsJSDate());
5297 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
5298 return reinterpret_cast<JSDate*>(obj);
5302 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
5303 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
5304 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5305 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
5306 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5307 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5308 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5311 JSMessageObject* JSMessageObject::cast(Object* obj) {
5312 ASSERT(obj->IsJSMessageObject());
5313 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
5314 return reinterpret_cast<JSMessageObject*>(obj);
5318 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5319 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5320 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5321 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5322 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5325 // Type feedback slot: type_feedback_info for FUNCTIONs, stub_info for STUBs.
5326 void Code::InitializeTypeFeedbackInfoNoWriteBarrier(Object* value) {
5327 WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
5331 Object* Code::type_feedback_info() {
5332 ASSERT(kind() == FUNCTION);
5333 return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
5337 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
5338 ASSERT(kind() == FUNCTION);
5339 WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
5340 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5345 Object* Code::next_code_link() {
5346 CHECK(kind() == OPTIMIZED_FUNCTION);
5347 return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
5351 void Code::set_next_code_link(Object* value, WriteBarrierMode mode) {
5352 CHECK(kind() == OPTIMIZED_FUNCTION);
5353 WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
5354 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5359 int Code::stub_info() {
5360 ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
5361 kind() == BINARY_OP_IC || kind() == LOAD_IC);
5362 Object* value = READ_FIELD(this, kTypeFeedbackInfoOffset);
5363 return Smi::cast(value)->value();
5367 void Code::set_stub_info(int value) {
5368 ASSERT(kind() == COMPARE_IC ||
5369 kind() == COMPARE_NIL_IC ||
5370 kind() == BINARY_OP_IC ||
5372 kind() == LOAD_IC ||
5373 kind() == KEYED_LOAD_IC ||
5374 kind() == STORE_IC ||
5375 kind() == KEYED_STORE_IC);
5376 WRITE_FIELD(this, kTypeFeedbackInfoOffset, Smi::FromInt(value));
5380 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5381 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
5384 byte* Code::instruction_start() {
5385 return FIELD_ADDR(this, kHeaderSize);
5389 byte* Code::instruction_end() {
5390 return instruction_start() + instruction_size();
5394 int Code::body_size() {
5395 return RoundUp(instruction_size(), kObjectAlignment);
5399 ByteArray* Code::unchecked_relocation_info() {
5400 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
5404 byte* Code::relocation_start() {
5405 return unchecked_relocation_info()->GetDataStartAddress();
5409 int Code::relocation_size() {
5410 return unchecked_relocation_info()->length();
5414 byte* Code::entry() {
5415 return instruction_start();
5419 bool Code::contains(byte* inner_pointer) {
5420 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5424 ACCESSORS(JSArray, length, Object, kLengthOffset)
5427 void* JSArrayBuffer::backing_store() {
5428 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
5429 return reinterpret_cast<void*>(ptr);
5433 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
5434 intptr_t ptr = reinterpret_cast<intptr_t>(value);
5435 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
5439 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
5440 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
5443 bool JSArrayBuffer::is_external() {
5444 return BooleanBit::get(flag(), kIsExternalBit);
5448 void JSArrayBuffer::set_is_external(bool value) {
5449 set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
5453 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
5454 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
5457 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
5458 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
5459 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
5460 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
5461 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
5463 ACCESSORS(JSRegExp, data, Object, kDataOffset)
5466 JSRegExp::Type JSRegExp::TypeTag() {
5467 Object* data = this->data();
5468 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
5469 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
5470 return static_cast<JSRegExp::Type>(smi->value());
5474 int JSRegExp::CaptureCount() {
5475 switch (TypeTag()) {
5479 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
5487 JSRegExp::Flags JSRegExp::GetFlags() {
5488 ASSERT(this->data()->IsFixedArray());
5489 Object* data = this->data();
5490 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
5491 return Flags(smi->value());
5495 String* JSRegExp::Pattern() {
5496 ASSERT(this->data()->IsFixedArray());
5497 Object* data = this->data();
5498 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
5503 Object* JSRegExp::DataAt(int index) {
5504 ASSERT(TypeTag() != NOT_COMPILED);
5505 return FixedArray::cast(data())->get(index);
5509 void JSRegExp::SetDataAt(int index, Object* value) {
5510 ASSERT(TypeTag() != NOT_COMPILED);
5511 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
5512 FixedArray::cast(data())->set(index, value);
5516 ElementsKind JSObject::GetElementsKind() {
5517 ElementsKind kind = map()->elements_kind();
5519 FixedArrayBase* fixed_array =
5520 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
5522 // If a GC was caused while constructing this object, the elements
5523 // pointer may point to a one pointer filler map.
5524 if (ElementsAreSafeToExamine()) {
5525 Map* map = fixed_array->map();
5526 ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
5527 (map == GetHeap()->fixed_array_map() ||
5528 map == GetHeap()->fixed_cow_array_map())) ||
5529 (IsFastDoubleElementsKind(kind) &&
5530 (fixed_array->IsFixedDoubleArray() ||
5531 fixed_array == GetHeap()->empty_fixed_array())) ||
5532 (kind == DICTIONARY_ELEMENTS &&
5533 fixed_array->IsFixedArray() &&
5534 fixed_array->IsDictionary()) ||
5535 (kind > DICTIONARY_ELEMENTS));
5536 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
5537 (elements()->IsFixedArray() && elements()->length() >= 2));
5544 ElementsAccessor* JSObject::GetElementsAccessor() {
5545 return ElementsAccessor::ForKind(GetElementsKind());
5549 bool JSObject::HasFastObjectElements() {
5550 return IsFastObjectElementsKind(GetElementsKind());
5554 bool JSObject::HasFastSmiElements() {
5555 return IsFastSmiElementsKind(GetElementsKind());
5559 bool JSObject::HasFastSmiOrObjectElements() {
5560 return IsFastSmiOrObjectElementsKind(GetElementsKind());
5564 bool JSObject::HasFastDoubleElements() {
5565 return IsFastDoubleElementsKind(GetElementsKind());
5569 bool JSObject::HasFastHoleyElements() {
5570 return IsFastHoleyElementsKind(GetElementsKind());
5574 bool JSObject::HasFastElements() {
5575 return IsFastElementsKind(GetElementsKind());
5579 bool JSObject::HasDictionaryElements() {
5580 return GetElementsKind() == DICTIONARY_ELEMENTS;
5584 bool JSObject::HasNonStrictArgumentsElements() {
5585 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
5589 bool JSObject::HasExternalArrayElements() {
5590 HeapObject* array = elements();
5591 ASSERT(array != NULL);
5592 return array->IsExternalArray();
5596 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
5597 bool JSObject::HasExternal##name##Elements() { \
5598 HeapObject* array = elements(); \
5599 ASSERT(array != NULL); \
5600 if (!array->IsHeapObject()) \
5602 return array->map()->instance_type() == type; \
5606 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
5607 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
5608 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
5609 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
5610 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
5611 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
5612 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
5613 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
5614 EXTERNAL_ELEMENTS_CHECK(Float,
5615 EXTERNAL_FLOAT_ARRAY_TYPE)
5616 EXTERNAL_ELEMENTS_CHECK(Double,
5617 EXTERNAL_DOUBLE_ARRAY_TYPE)
5618 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
5621 bool JSObject::HasNamedInterceptor() {
5622 return map()->has_named_interceptor();
5626 bool JSObject::HasIndexedInterceptor() {
5627 return map()->has_indexed_interceptor();
5631 MaybeObject* JSObject::EnsureWritableFastElements() {
5632 ASSERT(HasFastSmiOrObjectElements());
5633 FixedArray* elems = FixedArray::cast(elements());
5634 Isolate* isolate = GetIsolate();
5635 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
5636 Object* writable_elems;
5637 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
5638 elems, isolate->heap()->fixed_array_map());
5639 if (!maybe_writable_elems->ToObject(&writable_elems)) {
5640 return maybe_writable_elems;
5643 set_elements(FixedArray::cast(writable_elems));
5644 isolate->counters()->cow_arrays_converted()->Increment();
5645 return writable_elems;
5649 NameDictionary* JSObject::property_dictionary() {
5650 ASSERT(!HasFastProperties());
5651 return NameDictionary::cast(properties());
5655 SeededNumberDictionary* JSObject::element_dictionary() {
5656 ASSERT(HasDictionaryElements());
5657 return SeededNumberDictionary::cast(elements());
5661 bool Name::IsHashFieldComputed(uint32_t field) {
5662 return (field & kHashNotComputedMask) == 0;
5666 bool Name::HasHashCode() {
5667 return IsHashFieldComputed(hash_field());
5671 uint32_t Name::Hash() {
5672 // Fast case: has hash code already been computed?
5673 uint32_t field = hash_field();
5674 if (IsHashFieldComputed(field)) return field >> kHashShift;
5675 // Slow case: compute hash code and set it. Has to be a string.
5676 return String::cast(this)->ComputeAndSetHash();
5680 StringHasher::StringHasher(int length, uint32_t seed)
5682 raw_running_hash_(seed),
5684 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
5685 is_first_char_(true) {
5686 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
5690 bool StringHasher::has_trivial_hash() {
5691 return length_ > String::kMaxHashCalcLength;
5695 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
5697 running_hash += (running_hash << 10);
5698 running_hash ^= (running_hash >> 6);
5699 return running_hash;
5703 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
5704 running_hash += (running_hash << 3);
5705 running_hash ^= (running_hash >> 11);
5706 running_hash += (running_hash << 15);
5707 if ((running_hash & String::kHashBitMask) == 0) {
5710 return running_hash;
5714 void StringHasher::AddCharacter(uint16_t c) {
5715 // Use the Jenkins one-at-a-time hash function to update the hash
5716 // for the given character.
5717 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
5721 bool StringHasher::UpdateIndex(uint16_t c) {
5722 ASSERT(is_array_index_);
5723 if (c < '0' || c > '9') {
5724 is_array_index_ = false;
5728 if (is_first_char_) {
5729 is_first_char_ = false;
5730 if (c == '0' && length_ > 1) {
5731 is_array_index_ = false;
5735 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
5736 is_array_index_ = false;
5739 array_index_ = array_index_ * 10 + d;
5744 template<typename Char>
5745 inline void StringHasher::AddCharacters(const Char* chars, int length) {
5746 ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
5748 if (is_array_index_) {
5749 for (; i < length; i++) {
5750 AddCharacter(chars[i]);
5751 if (!UpdateIndex(chars[i])) {
5757 for (; i < length; i++) {
5758 ASSERT(!is_array_index_);
5759 AddCharacter(chars[i]);
5764 template <typename schar>
5765 uint32_t StringHasher::HashSequentialString(const schar* chars,
5768 StringHasher hasher(length, seed);
5769 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
5770 return hasher.GetHashField();
5774 bool Name::AsArrayIndex(uint32_t* index) {
5775 return IsString() && String::cast(this)->AsArrayIndex(index);
5779 bool String::AsArrayIndex(uint32_t* index) {
5780 uint32_t field = hash_field();
5781 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
5784 return SlowAsArrayIndex(index);
5788 Object* JSReceiver::GetPrototype() {
5789 return map()->prototype();
5793 Object* JSReceiver::GetConstructor() {
5794 return map()->constructor();
5798 bool JSReceiver::HasProperty(Handle<JSReceiver> object,
5799 Handle<Name> name) {
5800 if (object->IsJSProxy()) {
5801 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
5802 return JSProxy::HasPropertyWithHandler(proxy, name);
5804 return object->GetPropertyAttribute(*name) != ABSENT;
5808 bool JSReceiver::HasLocalProperty(Handle<JSReceiver> object,
5809 Handle<Name> name) {
5810 if (object->IsJSProxy()) {
5811 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
5812 return JSProxy::HasPropertyWithHandler(proxy, name);
5814 return object->GetLocalPropertyAttribute(*name) != ABSENT;
5818 PropertyAttributes JSReceiver::GetPropertyAttribute(Name* key) {
5820 if (IsJSObject() && key->AsArrayIndex(&index)) {
5821 return GetElementAttribute(index);
5823 return GetPropertyAttributeWithReceiver(this, key);
5827 PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
5829 return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
5831 return JSObject::cast(this)->GetElementAttributeWithReceiver(
5836 // TODO(504): this may be useful in other places too where JSGlobalProxy
5838 Object* JSObject::BypassGlobalProxy() {
5839 if (IsJSGlobalProxy()) {
5840 Object* proto = GetPrototype();
5841 if (proto->IsNull()) return GetHeap()->undefined_value();
5842 ASSERT(proto->IsJSGlobalObject());
5849 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
5851 ? JSProxy::cast(this)->GetIdentityHash(flag)
5852 : JSObject::cast(this)->GetIdentityHash(flag);
5856 bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
5857 if (object->IsJSProxy()) {
5858 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
5859 return JSProxy::HasElementWithHandler(proxy, index);
5861 return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
5862 *object, index, true) != ABSENT;
5866 bool JSReceiver::HasLocalElement(Handle<JSReceiver> object, uint32_t index) {
5867 if (object->IsJSProxy()) {
5868 Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
5869 return JSProxy::HasElementWithHandler(proxy, index);
5871 return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
5872 *object, index, false) != ABSENT;
5876 PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
5878 return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
5880 return JSObject::cast(this)->GetElementAttributeWithReceiver(
5881 this, index, false);
5885 bool AccessorInfo::all_can_read() {
5886 return BooleanBit::get(flag(), kAllCanReadBit);
5890 void AccessorInfo::set_all_can_read(bool value) {
5891 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
5895 bool AccessorInfo::all_can_write() {
5896 return BooleanBit::get(flag(), kAllCanWriteBit);
5900 void AccessorInfo::set_all_can_write(bool value) {
5901 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
5905 bool AccessorInfo::prohibits_overwriting() {
5906 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
5910 void AccessorInfo::set_prohibits_overwriting(bool value) {
5911 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
5915 PropertyAttributes AccessorInfo::property_attributes() {
5916 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
5920 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
5921 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
5925 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
5926 Object* function_template = expected_receiver_type();
5927 if (!function_template->IsFunctionTemplateInfo()) return true;
5928 return receiver->IsInstanceOf(FunctionTemplateInfo::cast(function_template));
5932 void AccessorPair::set_access_flags(v8::AccessControl access_control) {
5933 int current = access_flags()->value();
5934 current = BooleanBit::set(current,
5935 kProhibitsOverwritingBit,
5936 access_control & PROHIBITS_OVERWRITING);
5937 current = BooleanBit::set(current,
5939 access_control & ALL_CAN_READ);
5940 current = BooleanBit::set(current,
5942 access_control & ALL_CAN_WRITE);
5943 set_access_flags(Smi::FromInt(current));
5947 bool AccessorPair::all_can_read() {
5948 return BooleanBit::get(access_flags(), kAllCanReadBit);
5952 bool AccessorPair::all_can_write() {
5953 return BooleanBit::get(access_flags(), kAllCanWriteBit);
5957 bool AccessorPair::prohibits_overwriting() {
5958 return BooleanBit::get(access_flags(), kProhibitsOverwritingBit);
5962 template<typename Shape, typename Key>
5963 void Dictionary<Shape, Key>::SetEntry(int entry,
5966 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
5970 template<typename Shape, typename Key>
5971 void Dictionary<Shape, Key>::SetEntry(int entry,
5974 PropertyDetails details) {
5975 ASSERT(!key->IsName() ||
5976 details.IsDeleted() ||
5977 details.dictionary_index() > 0);
5978 int index = HashTable<Shape, Key>::EntryToIndex(entry);
5979 DisallowHeapAllocation no_gc;
5980 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
5981 FixedArray::set(index, key, mode);
5982 FixedArray::set(index+1, value, mode);
5983 FixedArray::set(index+2, details.AsSmi());
5987 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
5988 ASSERT(other->IsNumber());
5989 return key == static_cast<uint32_t>(other->Number());
5993 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
5994 return ComputeIntegerHash(key, 0);
5998 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6000 ASSERT(other->IsNumber());
6001 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6004 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6005 return ComputeIntegerHash(key, seed);
6008 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6011 ASSERT(other->IsNumber());
6012 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6015 MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) {
6016 return heap->NumberFromUint32(key);
6020 bool NameDictionaryShape::IsMatch(Name* key, Object* other) {
6021 // We know that all entries in a hash table had their hash keys created.
6022 // Use that knowledge to have fast failure.
6023 if (key->Hash() != Name::cast(other)->Hash()) return false;
6024 return key->Equals(Name::cast(other));
6028 uint32_t NameDictionaryShape::Hash(Name* key) {
6033 uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) {
6034 return Name::cast(other)->Hash();
6038 MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) {
6039 ASSERT(key->IsUniqueName());
6044 template <int entrysize>
6045 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6046 return key->SameValue(other);
6050 template <int entrysize>
6051 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
6052 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
6053 return Smi::cast(maybe_hash->ToObjectChecked())->value();
6057 template <int entrysize>
6058 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
6060 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
6061 return Smi::cast(maybe_hash->ToObjectChecked())->value();
6065 template <int entrysize>
6066 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Heap* heap,
6072 template <int entrysize>
6073 bool WeakHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6074 return key->SameValue(other);
6078 template <int entrysize>
6079 uint32_t WeakHashTableShape<entrysize>::Hash(Object* key) {
6080 intptr_t hash = reinterpret_cast<intptr_t>(key);
6081 return (uint32_t)(hash & 0xFFFFFFFF);
6085 template <int entrysize>
6086 uint32_t WeakHashTableShape<entrysize>::HashForObject(Object* key,
6088 intptr_t hash = reinterpret_cast<intptr_t>(other);
6089 return (uint32_t)(hash & 0xFFFFFFFF);
6093 template <int entrysize>
6094 MaybeObject* WeakHashTableShape<entrysize>::AsObject(Heap* heap,
6100 void Map::ClearCodeCache(Heap* heap) {
6101 // No write barrier is needed since empty_fixed_array is not in new space.
6102 // Please note this function is used during marking:
6103 // - MarkCompactCollector::MarkUnmarkedObject
6104 // - IncrementalMarking::Step
6105 ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
6106 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6110 void JSArray::EnsureSize(int required_size) {
6111 ASSERT(HasFastSmiOrObjectElements());
6112 FixedArray* elts = FixedArray::cast(elements());
6113 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6114 if (elts->length() < required_size) {
6115 // Doubling in size would be overkill, but leave some slack to avoid
6116 // constantly growing.
6117 Expand(required_size + (required_size >> 3));
6118 // It's a performance benefit to keep a frequently used array in new-space.
6119 } else if (!GetHeap()->new_space()->Contains(elts) &&
6120 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6121 // Expand will allocate a new backing store in new space even if the size
6122 // we asked for isn't larger than what we had before.
6123 Expand(required_size);
6128 void JSArray::set_length(Smi* length) {
6129 // Don't need a write barrier for a Smi.
6130 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6134 bool JSArray::AllowsSetElementsLength() {
6135 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6136 ASSERT(result == !HasExternalArrayElements());
6141 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
6142 MaybeObject* maybe_result = EnsureCanContainElements(
6143 storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
6144 if (maybe_result->IsFailure()) return maybe_result;
6145 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
6146 IsFastDoubleElementsKind(GetElementsKind())) ||
6147 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
6148 (IsFastObjectElementsKind(GetElementsKind()) ||
6149 (IsFastSmiElementsKind(GetElementsKind()) &&
6150 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
6151 set_elements(storage);
6152 set_length(Smi::FromInt(storage->length()));
6157 MaybeObject* FixedArray::Copy() {
6158 if (length() == 0) return this;
6159 return GetHeap()->CopyFixedArray(this);
6163 MaybeObject* FixedDoubleArray::Copy() {
6164 if (length() == 0) return this;
6165 return GetHeap()->CopyFixedDoubleArray(this);
6169 MaybeObject* ConstantPoolArray::Copy() {
6170 if (length() == 0) return this;
6171 return GetHeap()->CopyConstantPoolArray(this);
6175 void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) {
6176 set(1 + index * 2, Smi::FromInt(id.ToInt()));
6180 TypeFeedbackId TypeFeedbackCells::AstId(int index) {
6181 return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
6185 void TypeFeedbackCells::SetCell(int index, Cell* cell) {
6186 set(index * 2, cell);
6190 Cell* TypeFeedbackCells::GetCell(int index) {
6191 return Cell::cast(get(index * 2));
6195 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
6196 return isolate->factory()->the_hole_value();
6200 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
6201 return isolate->factory()->undefined_value();
6205 Handle<Object> TypeFeedbackCells::MonomorphicArraySentinel(Isolate* isolate,
6206 ElementsKind elements_kind) {
6207 return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
6211 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
6212 return heap->the_hole_value();
6216 int TypeFeedbackInfo::ic_total_count() {
6217 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6218 return ICTotalCountField::decode(current);
6222 void TypeFeedbackInfo::set_ic_total_count(int count) {
6223 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6224 value = ICTotalCountField::update(value,
6225 ICTotalCountField::decode(count));
6226 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6230 int TypeFeedbackInfo::ic_with_type_info_count() {
6231 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6232 return ICsWithTypeInfoCountField::decode(current);
6236 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
6237 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6238 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6239 // We can get negative count here when the type-feedback info is
6240 // shared between two code objects. The can only happen when
6241 // the debugger made a shallow copy of code object (see Heap::CopyCode).
6242 // Since we do not optimize when the debugger is active, we can skip
6243 // this counter update.
6244 if (new_count >= 0) {
6245 new_count &= ICsWithTypeInfoCountField::kMask;
6246 value = ICsWithTypeInfoCountField::update(value, new_count);
6247 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6252 void TypeFeedbackInfo::initialize_storage() {
6253 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
6254 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
6258 void TypeFeedbackInfo::change_own_type_change_checksum() {
6259 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6260 int checksum = OwnTypeChangeChecksum::decode(value);
6261 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
6262 value = OwnTypeChangeChecksum::update(value, checksum);
6263 // Ensure packed bit field is in Smi range.
6264 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6265 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6266 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6270 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
6271 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6272 int mask = (1 << kTypeChangeChecksumBits) - 1;
6273 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
6274 // Ensure packed bit field is in Smi range.
6275 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6276 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6277 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6281 int TypeFeedbackInfo::own_type_change_checksum() {
6282 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6283 return OwnTypeChangeChecksum::decode(value);
6287 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
6288 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6289 int mask = (1 << kTypeChangeChecksumBits) - 1;
6290 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
6294 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
6295 kTypeFeedbackCellsOffset)
6298 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
6301 Relocatable::Relocatable(Isolate* isolate) {
6303 prev_ = isolate->relocatable_top();
6304 isolate->set_relocatable_top(this);
6308 Relocatable::~Relocatable() {
6309 ASSERT_EQ(isolate_->relocatable_top(), this);
6310 isolate_->set_relocatable_top(prev_);
6314 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
6315 return map->instance_size();
6319 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
6320 v->VisitExternalReference(
6321 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6325 template<typename StaticVisitor>
6326 void Foreign::ForeignIterateBody() {
6327 StaticVisitor::VisitExternalReference(
6328 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6332 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
6333 typedef v8::String::ExternalAsciiStringResource Resource;
6334 v->VisitExternalAsciiString(
6335 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6339 template<typename StaticVisitor>
6340 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
6341 typedef v8::String::ExternalAsciiStringResource Resource;
6342 StaticVisitor::VisitExternalAsciiString(
6343 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6347 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
6348 typedef v8::String::ExternalStringResource Resource;
6349 v->VisitExternalTwoByteString(
6350 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6354 template<typename StaticVisitor>
6355 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
6356 typedef v8::String::ExternalStringResource Resource;
6357 StaticVisitor::VisitExternalTwoByteString(
6358 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6362 template<int start_offset, int end_offset, int size>
6363 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
6366 v->VisitPointers(HeapObject::RawField(obj, start_offset),
6367 HeapObject::RawField(obj, end_offset));
6371 template<int start_offset>
6372 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
6375 v->VisitPointers(HeapObject::RawField(obj, start_offset),
6376 HeapObject::RawField(obj, object_size));
6381 #undef CAST_ACCESSOR
6382 #undef INT_ACCESSORS
6384 #undef ACCESSORS_TO_SMI
6385 #undef SMI_ACCESSORS
6387 #undef BOOL_ACCESSORS
6391 #undef WRITE_BARRIER
6392 #undef CONDITIONAL_WRITE_BARRIER
6393 #undef READ_DOUBLE_FIELD
6394 #undef WRITE_DOUBLE_FIELD
6395 #undef READ_INT_FIELD
6396 #undef WRITE_INT_FIELD
6397 #undef READ_INTPTR_FIELD
6398 #undef WRITE_INTPTR_FIELD
6399 #undef READ_UINT32_FIELD
6400 #undef WRITE_UINT32_FIELD
6401 #undef READ_SHORT_FIELD
6402 #undef WRITE_SHORT_FIELD
6403 #undef READ_BYTE_FIELD
6404 #undef WRITE_BYTE_FIELD
6407 } } // namespace v8::internal
6409 #endif // V8_OBJECTS_INL_H_