1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
98 #define ACCESSORS_TO_SMI(holder, name, offset) \
99 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
100 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
101 WRITE_FIELD(this, offset, value); \
105 // Getter that returns a Smi as an int and writes an int as a Smi.
106 #define SMI_ACCESSORS(holder, name, offset) \
107 int holder::name() { \
108 Object* value = READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::set_##name(int value) { \
112 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define BOOL_GETTER(holder, field, name, offset) \
117 bool holder::name() { \
118 return BooleanBit::get(field(), offset); \
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123 bool holder::name() { \
124 return BooleanBit::get(field(), offset); \
126 void holder::set_##name(bool value) { \
127 set_##field(BooleanBit::set(field(), offset, value)); \
131 bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
132 ElementsKind to_kind) {
133 if (to_kind == FAST_ELEMENTS) {
134 return from_kind == FAST_SMI_ONLY_ELEMENTS ||
135 from_kind == FAST_DOUBLE_ELEMENTS;
137 return to_kind == FAST_DOUBLE_ELEMENTS &&
138 from_kind == FAST_SMI_ONLY_ELEMENTS;
143 bool Object::IsFixedArrayBase() {
144 return IsFixedArray() || IsFixedDoubleArray();
148 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
149 // There is a constraint on the object; check.
150 if (!this->IsJSObject()) return false;
151 // Fetch the constructor function of the object.
152 Object* cons_obj = JSObject::cast(this)->map()->constructor();
153 if (!cons_obj->IsJSFunction()) return false;
154 JSFunction* fun = JSFunction::cast(cons_obj);
155 // Iterate through the chain of inheriting function templates to
156 // see if the required one occurs.
157 for (Object* type = fun->shared()->function_data();
158 type->IsFunctionTemplateInfo();
159 type = FunctionTemplateInfo::cast(type)->parent_template()) {
160 if (type == expected) return true;
162 // Didn't find the required type in the inheritance chain.
167 bool Object::IsSmi() {
168 return HAS_SMI_TAG(this);
172 bool Object::IsHeapObject() {
173 return Internals::HasHeapObjectTag(this);
177 bool Object::NonFailureIsHeapObject() {
178 ASSERT(!this->IsFailure());
179 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
183 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
186 bool Object::IsString() {
187 return Object::IsHeapObject()
188 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
192 bool Object::IsSpecObject() {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsSymbol() {
206 if (!this->IsHeapObject()) return false;
207 uint32_t type = HeapObject::cast(this)->map()->instance_type();
208 // Because the symbol tag is non-zero and no non-string types have the
209 // symbol bit set we can test for symbols with a very simple test
211 STATIC_ASSERT(kSymbolTag != 0);
212 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
213 return (type & kIsSymbolMask) != 0;
217 bool Object::IsConsString() {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqAsciiString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsAsciiRepresentation();
242 bool Object::IsSeqTwoByteString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalAsciiString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsAsciiRepresentation();
262 bool Object::IsExternalTwoByteString() {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
273 StringShape::StringShape(String* str)
274 : type_(str->map()->instance_type()) {
276 ASSERT((type_ & kIsNotStringMask) == kStringTag);
280 StringShape::StringShape(Map* map)
281 : type_(map->instance_type()) {
283 ASSERT((type_ & kIsNotStringMask) == kStringTag);
287 StringShape::StringShape(InstanceType t)
288 : type_(static_cast<uint32_t>(t)) {
290 ASSERT((type_ & kIsNotStringMask) == kStringTag);
294 bool StringShape::IsSymbol() {
296 STATIC_ASSERT(kSymbolTag != 0);
297 return (type_ & kIsSymbolMask) != 0;
301 bool String::IsAsciiRepresentation() {
302 uint32_t type = map()->instance_type();
303 return (type & kStringEncodingMask) == kAsciiStringTag;
307 bool String::IsTwoByteRepresentation() {
308 uint32_t type = map()->instance_type();
309 return (type & kStringEncodingMask) == kTwoByteStringTag;
313 bool String::IsAsciiRepresentationUnderneath() {
314 uint32_t type = map()->instance_type();
315 STATIC_ASSERT(kIsIndirectStringTag != 0);
316 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
318 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
319 case kAsciiStringTag:
321 case kTwoByteStringTag:
323 default: // Cons or sliced string. Need to go deeper.
324 return GetUnderlying()->IsAsciiRepresentation();
329 bool String::IsTwoByteRepresentationUnderneath() {
330 uint32_t type = map()->instance_type();
331 STATIC_ASSERT(kIsIndirectStringTag != 0);
332 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
334 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
335 case kAsciiStringTag:
337 case kTwoByteStringTag:
339 default: // Cons or sliced string. Need to go deeper.
340 return GetUnderlying()->IsTwoByteRepresentation();
345 bool String::HasOnlyAsciiChars() {
346 uint32_t type = map()->instance_type();
347 return (type & kStringEncodingMask) == kAsciiStringTag ||
348 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
352 bool StringShape::IsCons() {
353 return (type_ & kStringRepresentationMask) == kConsStringTag;
357 bool StringShape::IsSliced() {
358 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
362 bool StringShape::IsIndirect() {
363 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
367 bool StringShape::IsExternal() {
368 return (type_ & kStringRepresentationMask) == kExternalStringTag;
372 bool StringShape::IsSequential() {
373 return (type_ & kStringRepresentationMask) == kSeqStringTag;
377 StringRepresentationTag StringShape::representation_tag() {
378 uint32_t tag = (type_ & kStringRepresentationMask);
379 return static_cast<StringRepresentationTag>(tag);
383 uint32_t StringShape::encoding_tag() {
384 return type_ & kStringEncodingMask;
388 uint32_t StringShape::full_representation_tag() {
389 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
393 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
394 Internals::kFullStringRepresentationMask);
397 bool StringShape::IsSequentialAscii() {
398 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
402 bool StringShape::IsSequentialTwoByte() {
403 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
407 bool StringShape::IsExternalAscii() {
408 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
412 bool StringShape::IsExternalTwoByte() {
413 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
417 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
418 Internals::kExternalTwoByteRepresentationTag);
421 uc32 FlatStringReader::Get(int index) {
422 ASSERT(0 <= index && index <= length_);
424 return static_cast<const byte*>(start_)[index];
426 return static_cast<const uc16*>(start_)[index];
431 bool Object::IsNumber() {
432 return IsSmi() || IsHeapNumber();
436 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
437 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
440 bool Object::IsFiller() {
441 if (!Object::IsHeapObject()) return false;
442 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
443 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
447 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
450 bool Object::IsExternalArray() {
451 if (!Object::IsHeapObject())
453 InstanceType instance_type =
454 HeapObject::cast(this)->map()->instance_type();
455 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
456 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
460 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
461 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
462 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
463 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
464 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
465 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
466 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
467 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
470 bool MaybeObject::IsFailure() {
471 return HAS_FAILURE_TAG(this);
475 bool MaybeObject::IsRetryAfterGC() {
476 return HAS_FAILURE_TAG(this)
477 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
481 bool MaybeObject::IsOutOfMemory() {
482 return HAS_FAILURE_TAG(this)
483 && Failure::cast(this)->IsOutOfMemoryException();
487 bool MaybeObject::IsException() {
488 return this == Failure::Exception();
492 bool MaybeObject::IsTheHole() {
493 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
497 Failure* Failure::cast(MaybeObject* obj) {
498 ASSERT(HAS_FAILURE_TAG(obj));
499 return reinterpret_cast<Failure*>(obj);
503 bool Object::IsJSReceiver() {
504 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
505 return IsHeapObject() &&
506 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
510 bool Object::IsJSObject() {
511 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
512 return IsHeapObject() &&
513 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
517 bool Object::IsJSProxy() {
518 if (!Object::IsHeapObject()) return false;
519 InstanceType type = HeapObject::cast(this)->map()->instance_type();
520 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
524 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
525 TYPE_CHECKER(JSSet, JS_SET_TYPE)
526 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
527 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
528 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
529 TYPE_CHECKER(Map, MAP_TYPE)
530 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
531 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
534 bool Object::IsDescriptorArray() {
535 return IsFixedArray();
539 bool Object::IsDeoptimizationInputData() {
540 // Must be a fixed array.
541 if (!IsFixedArray()) return false;
543 // There's no sure way to detect the difference between a fixed array and
544 // a deoptimization data array. Since this is used for asserts we can
545 // check that the length is zero or else the fixed size plus a multiple of
547 int length = FixedArray::cast(this)->length();
548 if (length == 0) return true;
550 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
551 return length >= 0 &&
552 length % DeoptimizationInputData::kDeoptEntrySize == 0;
556 bool Object::IsDeoptimizationOutputData() {
557 if (!IsFixedArray()) return false;
558 // There's actually no way to see the difference between a fixed array and
559 // a deoptimization data array. Since this is used for asserts we can check
560 // that the length is plausible though.
561 if (FixedArray::cast(this)->length() % 2 != 0) return false;
566 bool Object::IsTypeFeedbackCells() {
567 if (!IsFixedArray()) return false;
568 // There's actually no way to see the difference between a fixed array and
569 // a cache cells array. Since this is used for asserts we can check that
570 // the length is plausible though.
571 if (FixedArray::cast(this)->length() % 2 != 0) return false;
576 bool Object::IsContext() {
577 if (Object::IsHeapObject()) {
578 Map* map = HeapObject::cast(this)->map();
579 Heap* heap = map->GetHeap();
580 return (map == heap->function_context_map() ||
581 map == heap->catch_context_map() ||
582 map == heap->with_context_map() ||
583 map == heap->global_context_map() ||
584 map == heap->block_context_map());
590 bool Object::IsGlobalContext() {
591 return Object::IsHeapObject() &&
592 HeapObject::cast(this)->map() ==
593 HeapObject::cast(this)->GetHeap()->global_context_map();
597 bool Object::IsScopeInfo() {
598 return Object::IsHeapObject() &&
599 HeapObject::cast(this)->map() ==
600 HeapObject::cast(this)->GetHeap()->scope_info_map();
604 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
607 template <> inline bool Is<JSFunction>(Object* obj) {
608 return obj->IsJSFunction();
612 TYPE_CHECKER(Code, CODE_TYPE)
613 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
614 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
615 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
616 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
617 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
618 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
621 bool Object::IsStringWrapper() {
622 return IsJSValue() && JSValue::cast(this)->value()->IsString();
626 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
629 bool Object::IsBoolean() {
630 return IsOddball() &&
631 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
635 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
636 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
639 template <> inline bool Is<JSArray>(Object* obj) {
640 return obj->IsJSArray();
644 bool Object::IsHashTable() {
645 return Object::IsHeapObject() &&
646 HeapObject::cast(this)->map() ==
647 HeapObject::cast(this)->GetHeap()->hash_table_map();
651 bool Object::IsDictionary() {
652 return IsHashTable() &&
653 this != HeapObject::cast(this)->GetHeap()->symbol_table();
657 bool Object::IsSymbolTable() {
658 return IsHashTable() && this ==
659 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
663 bool Object::IsJSFunctionResultCache() {
664 if (!IsFixedArray()) return false;
665 FixedArray* self = FixedArray::cast(this);
666 int length = self->length();
667 if (length < JSFunctionResultCache::kEntriesIndex) return false;
668 if ((length - JSFunctionResultCache::kEntriesIndex)
669 % JSFunctionResultCache::kEntrySize != 0) {
673 if (FLAG_verify_heap) {
674 reinterpret_cast<JSFunctionResultCache*>(this)->
675 JSFunctionResultCacheVerify();
682 bool Object::IsNormalizedMapCache() {
683 if (!IsFixedArray()) return false;
684 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
688 if (FLAG_verify_heap) {
689 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
696 bool Object::IsCompilationCacheTable() {
697 return IsHashTable();
701 bool Object::IsCodeCacheHashTable() {
702 return IsHashTable();
706 bool Object::IsPolymorphicCodeCacheHashTable() {
707 return IsHashTable();
711 bool Object::IsMapCache() {
712 return IsHashTable();
716 bool Object::IsPrimitive() {
717 return IsOddball() || IsNumber() || IsString();
721 bool Object::IsJSGlobalProxy() {
722 bool result = IsHeapObject() &&
723 (HeapObject::cast(this)->map()->instance_type() ==
724 JS_GLOBAL_PROXY_TYPE);
725 ASSERT(!result || IsAccessCheckNeeded());
730 bool Object::IsGlobalObject() {
731 if (!IsHeapObject()) return false;
733 InstanceType type = HeapObject::cast(this)->map()->instance_type();
734 return type == JS_GLOBAL_OBJECT_TYPE ||
735 type == JS_BUILTINS_OBJECT_TYPE;
739 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
740 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
743 bool Object::IsUndetectableObject() {
744 return IsHeapObject()
745 && HeapObject::cast(this)->map()->is_undetectable();
749 bool Object::IsAccessCheckNeeded() {
750 return IsHeapObject()
751 && HeapObject::cast(this)->map()->is_access_check_needed();
755 bool Object::IsStruct() {
756 if (!IsHeapObject()) return false;
757 switch (HeapObject::cast(this)->map()->instance_type()) {
758 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
759 STRUCT_LIST(MAKE_STRUCT_CASE)
760 #undef MAKE_STRUCT_CASE
761 default: return false;
766 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
767 bool Object::Is##Name() { \
768 return Object::IsHeapObject() \
769 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
771 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
772 #undef MAKE_STRUCT_PREDICATE
775 bool Object::IsUndefined() {
776 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
780 bool Object::IsNull() {
781 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
785 bool Object::IsTheHole() {
786 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
790 bool Object::IsTrue() {
791 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
795 bool Object::IsFalse() {
796 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
800 bool Object::IsArgumentsMarker() {
801 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
805 double Object::Number() {
808 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
809 : reinterpret_cast<HeapNumber*>(this)->value();
813 bool Object::IsNaN() {
814 return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
818 MaybeObject* Object::ToSmi() {
819 if (IsSmi()) return this;
820 if (IsHeapNumber()) {
821 double value = HeapNumber::cast(this)->value();
822 int int_value = FastD2I(value);
823 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
824 return Smi::FromInt(int_value);
827 return Failure::Exception();
831 bool Object::HasSpecificClassOf(String* name) {
832 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
836 MaybeObject* Object::GetElement(uint32_t index) {
837 // GetElement can trigger a getter which can cause allocation.
838 // This was not always the case. This ASSERT is here to catch
839 // leftover incorrect uses.
840 ASSERT(HEAP->IsAllocationAllowed());
841 return GetElementWithReceiver(this, index);
845 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
846 MaybeObject* maybe = GetElementWithReceiver(this, index);
847 ASSERT(!maybe->IsFailure());
848 Object* result = NULL; // Initialization to please compiler.
849 maybe->ToObject(&result);
854 MaybeObject* Object::GetProperty(String* key) {
855 PropertyAttributes attributes;
856 return GetPropertyWithReceiver(this, key, &attributes);
860 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
861 return GetPropertyWithReceiver(this, key, attributes);
865 #define FIELD_ADDR(p, offset) \
866 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
868 #define READ_FIELD(p, offset) \
869 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
871 #define WRITE_FIELD(p, offset, value) \
872 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
874 #define WRITE_BARRIER(heap, object, offset, value) \
875 heap->incremental_marking()->RecordWrite( \
876 object, HeapObject::RawField(object, offset), value); \
877 if (heap->InNewSpace(value)) { \
878 heap->RecordWrite(object->address(), offset); \
881 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
882 if (mode == UPDATE_WRITE_BARRIER) { \
883 heap->incremental_marking()->RecordWrite( \
884 object, HeapObject::RawField(object, offset), value); \
885 if (heap->InNewSpace(value)) { \
886 heap->RecordWrite(object->address(), offset); \
890 #ifndef V8_TARGET_ARCH_MIPS
891 #define READ_DOUBLE_FIELD(p, offset) \
892 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
893 #else // V8_TARGET_ARCH_MIPS
894 // Prevent gcc from using load-double (mips ldc1) on (possibly)
895 // non-64-bit aligned HeapNumber::value.
896 static inline double read_double_field(void* p, int offset) {
901 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
902 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
905 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
906 #endif // V8_TARGET_ARCH_MIPS
908 #ifndef V8_TARGET_ARCH_MIPS
909 #define WRITE_DOUBLE_FIELD(p, offset, value) \
910 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
911 #else // V8_TARGET_ARCH_MIPS
912 // Prevent gcc from using store-double (mips sdc1) on (possibly)
913 // non-64-bit aligned HeapNumber::value.
914 static inline void write_double_field(void* p, int offset,
921 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
922 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
924 #define WRITE_DOUBLE_FIELD(p, offset, value) \
925 write_double_field(p, offset, value)
926 #endif // V8_TARGET_ARCH_MIPS
929 #define READ_INT_FIELD(p, offset) \
930 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
932 #define WRITE_INT_FIELD(p, offset, value) \
933 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
935 #define READ_INTPTR_FIELD(p, offset) \
936 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
938 #define WRITE_INTPTR_FIELD(p, offset, value) \
939 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
941 #define READ_UINT32_FIELD(p, offset) \
942 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
944 #define WRITE_UINT32_FIELD(p, offset, value) \
945 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
947 #define READ_INT64_FIELD(p, offset) \
948 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
950 #define WRITE_INT64_FIELD(p, offset, value) \
951 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
953 #define READ_SHORT_FIELD(p, offset) \
954 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
956 #define WRITE_SHORT_FIELD(p, offset, value) \
957 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
959 #define READ_BYTE_FIELD(p, offset) \
960 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
962 #define WRITE_BYTE_FIELD(p, offset, value) \
963 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
966 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
967 return &READ_FIELD(obj, byte_offset);
972 return Internals::SmiValue(this);
976 Smi* Smi::FromInt(int value) {
977 ASSERT(Smi::IsValid(value));
978 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
979 intptr_t tagged_value =
980 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
981 return reinterpret_cast<Smi*>(tagged_value);
985 Smi* Smi::FromIntptr(intptr_t value) {
986 ASSERT(Smi::IsValid(value));
987 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
988 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
992 Failure::Type Failure::type() const {
993 return static_cast<Type>(value() & kFailureTypeTagMask);
997 bool Failure::IsInternalError() const {
998 return type() == INTERNAL_ERROR;
1002 bool Failure::IsOutOfMemoryException() const {
1003 return type() == OUT_OF_MEMORY_EXCEPTION;
1007 AllocationSpace Failure::allocation_space() const {
1008 ASSERT_EQ(RETRY_AFTER_GC, type());
1009 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1014 Failure* Failure::InternalError() {
1015 return Construct(INTERNAL_ERROR);
1019 Failure* Failure::Exception() {
1020 return Construct(EXCEPTION);
1024 Failure* Failure::OutOfMemoryException() {
1025 return Construct(OUT_OF_MEMORY_EXCEPTION);
1029 intptr_t Failure::value() const {
1030 return static_cast<intptr_t>(
1031 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1035 Failure* Failure::RetryAfterGC() {
1036 return RetryAfterGC(NEW_SPACE);
1040 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1041 ASSERT((space & ~kSpaceTagMask) == 0);
1042 return Construct(RETRY_AFTER_GC, space);
1046 Failure* Failure::Construct(Type type, intptr_t value) {
1048 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1049 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1050 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1054 bool Smi::IsValid(intptr_t value) {
1056 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1059 #ifdef V8_TARGET_ARCH_X64
1060 // To be representable as a long smi, the value must be a 32-bit integer.
1061 bool result = (value == static_cast<int32_t>(value));
1063 // To be representable as an tagged small integer, the two
1064 // most-significant bits of 'value' must be either 00 or 11 due to
1065 // sign-extension. To check this we add 01 to the two
1066 // most-significant bits, and check if the most-significant bit is 0
1068 // CAUTION: The original code below:
1069 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1070 // may lead to incorrect results according to the C language spec, and
1071 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1072 // compiler may produce undefined results in case of signed integer
1073 // overflow. The computation must be done w/ unsigned ints.
1074 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1076 ASSERT(result == in_range);
1081 MapWord MapWord::FromMap(Map* map) {
1082 return MapWord(reinterpret_cast<uintptr_t>(map));
1086 Map* MapWord::ToMap() {
1087 return reinterpret_cast<Map*>(value_);
1091 bool MapWord::IsForwardingAddress() {
1092 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1096 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1097 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1098 return MapWord(reinterpret_cast<uintptr_t>(raw));
1102 HeapObject* MapWord::ToForwardingAddress() {
1103 ASSERT(IsForwardingAddress());
1104 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1109 void HeapObject::VerifyObjectField(int offset) {
1110 VerifyPointer(READ_FIELD(this, offset));
1113 void HeapObject::VerifySmiField(int offset) {
1114 ASSERT(READ_FIELD(this, offset)->IsSmi());
1119 Heap* HeapObject::GetHeap() {
1121 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1122 ASSERT(heap != NULL);
1123 ASSERT(heap->isolate() == Isolate::Current());
1128 Isolate* HeapObject::GetIsolate() {
1129 return GetHeap()->isolate();
1133 Map* HeapObject::map() {
1134 return map_word().ToMap();
1138 void HeapObject::set_map(Map* value) {
1139 set_map_word(MapWord::FromMap(value));
1140 if (value != NULL) {
1141 // TODO(1600) We are passing NULL as a slot because maps can never be on
1142 // evacuation candidate.
1143 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1148 // Unsafe accessor omitting write barrier.
1149 void HeapObject::set_map_no_write_barrier(Map* value) {
1150 set_map_word(MapWord::FromMap(value));
1154 MapWord HeapObject::map_word() {
1155 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1159 void HeapObject::set_map_word(MapWord map_word) {
1160 // WRITE_FIELD does not invoke write barrier, but there is no need
1162 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1166 HeapObject* HeapObject::FromAddress(Address address) {
1167 ASSERT_TAG_ALIGNED(address);
1168 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1172 Address HeapObject::address() {
1173 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1177 int HeapObject::Size() {
1178 return SizeFromMap(map());
1182 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1183 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1184 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1188 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1189 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1193 double HeapNumber::value() {
1194 return READ_DOUBLE_FIELD(this, kValueOffset);
1198 void HeapNumber::set_value(double value) {
1199 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1203 int HeapNumber::get_exponent() {
1204 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1205 kExponentShift) - kExponentBias;
1209 int HeapNumber::get_sign() {
1210 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1214 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1217 Object** FixedArray::GetFirstElementAddress() {
1218 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1222 bool FixedArray::ContainsOnlySmisOrHoles() {
1223 Object* the_hole = GetHeap()->the_hole_value();
1224 Object** current = GetFirstElementAddress();
1225 for (int i = 0; i < length(); ++i) {
1226 Object* candidate = *current++;
1227 if (!candidate->IsSmi() && candidate != the_hole) return false;
1233 FixedArrayBase* JSObject::elements() {
1234 Object* array = READ_FIELD(this, kElementsOffset);
1235 return static_cast<FixedArrayBase*>(array);
1238 void JSObject::ValidateSmiOnlyElements() {
1240 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1241 Heap* heap = GetHeap();
1242 // Don't use elements, since integrity checks will fail if there
1243 // are filler pointers in the array.
1244 FixedArray* fixed_array =
1245 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1246 Map* map = fixed_array->map();
1247 // Arrays that have been shifted in place can't be verified.
1248 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1249 map != heap->raw_unchecked_two_pointer_filler_map() &&
1250 map != heap->free_space_map()) {
1251 for (int i = 0; i < fixed_array->length(); i++) {
1252 Object* current = fixed_array->get(i);
1253 ASSERT(current->IsSmi() || current->IsTheHole());
1261 MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
1263 ValidateSmiOnlyElements();
1265 if ((map()->elements_kind() != FAST_ELEMENTS)) {
1266 return TransitionElementsKind(FAST_ELEMENTS);
1272 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1274 EnsureElementsMode mode) {
1275 ElementsKind current_kind = map()->elements_kind();
1276 ElementsKind target_kind = current_kind;
1277 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1278 if (current_kind == FAST_ELEMENTS) return this;
1280 Heap* heap = GetHeap();
1281 Object* the_hole = heap->the_hole_value();
1282 Object* heap_number_map = heap->heap_number_map();
1283 for (uint32_t i = 0; i < count; ++i) {
1284 Object* current = *objects++;
1285 if (!current->IsSmi() && current != the_hole) {
1286 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
1287 HeapObject::cast(current)->map() == heap_number_map) {
1288 target_kind = FAST_DOUBLE_ELEMENTS;
1290 target_kind = FAST_ELEMENTS;
1296 if (target_kind != current_kind) {
1297 return TransitionElementsKind(target_kind);
1303 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1304 EnsureElementsMode mode) {
1305 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1306 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1307 elements->map() == GetHeap()->fixed_cow_array_map());
1308 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1309 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1311 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1312 return EnsureCanContainElements(objects, elements->length(), mode);
1315 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1316 if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
1317 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1324 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1325 ElementsKind to_kind) {
1326 Map* current_map = map();
1327 ElementsKind from_kind = current_map->elements_kind();
1329 if (from_kind == to_kind) return current_map;
1331 Context* global_context = isolate->context()->global_context();
1332 if (current_map == global_context->smi_js_array_map()) {
1333 if (to_kind == FAST_ELEMENTS) {
1334 return global_context->object_js_array_map();
1336 if (to_kind == FAST_DOUBLE_ELEMENTS) {
1337 return global_context->double_js_array_map();
1339 ASSERT(to_kind == DICTIONARY_ELEMENTS);
1343 return GetElementsTransitionMapSlow(to_kind);
1347 void JSObject::set_map_and_elements(Map* new_map,
1348 FixedArrayBase* value,
1349 WriteBarrierMode mode) {
1350 ASSERT(value->HasValidElements());
1352 ValidateSmiOnlyElements();
1354 if (new_map != NULL) {
1355 if (mode == UPDATE_WRITE_BARRIER) {
1358 ASSERT(mode == SKIP_WRITE_BARRIER);
1359 set_map_no_write_barrier(new_map);
1362 ASSERT((map()->has_fast_elements() ||
1363 map()->has_fast_smi_only_elements() ||
1364 (value == GetHeap()->empty_fixed_array())) ==
1365 (value->map() == GetHeap()->fixed_array_map() ||
1366 value->map() == GetHeap()->fixed_cow_array_map()));
1367 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1368 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1369 WRITE_FIELD(this, kElementsOffset, value);
1370 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1374 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1375 set_map_and_elements(NULL, value, mode);
1379 void JSObject::initialize_properties() {
1380 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1381 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1385 void JSObject::initialize_elements() {
1386 ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
1387 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1388 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1392 MaybeObject* JSObject::ResetElements() {
1394 ElementsKind elements_kind = FLAG_smi_only_arrays
1395 ? FAST_SMI_ONLY_ELEMENTS
1397 MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
1399 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1400 set_map(Map::cast(obj));
1401 initialize_elements();
1406 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1407 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1410 byte Oddball::kind() {
1411 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1415 void Oddball::set_kind(byte value) {
1416 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1420 Object* JSGlobalPropertyCell::value() {
1421 return READ_FIELD(this, kValueOffset);
1425 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1426 // The write barrier is not used for global property cells.
1427 ASSERT(!val->IsJSGlobalPropertyCell());
1428 WRITE_FIELD(this, kValueOffset, val);
1432 int JSObject::GetHeaderSize() {
1433 InstanceType type = map()->instance_type();
1434 // Check for the most common kind of JavaScript object before
1435 // falling into the generic switch. This speeds up the internal
1436 // field operations considerably on average.
1437 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1439 case JS_GLOBAL_PROXY_TYPE:
1440 return JSGlobalProxy::kSize;
1441 case JS_GLOBAL_OBJECT_TYPE:
1442 return JSGlobalObject::kSize;
1443 case JS_BUILTINS_OBJECT_TYPE:
1444 return JSBuiltinsObject::kSize;
1445 case JS_FUNCTION_TYPE:
1446 return JSFunction::kSize;
1448 return JSValue::kSize;
1450 return JSDate::kSize;
1452 return JSArray::kSize;
1453 case JS_WEAK_MAP_TYPE:
1454 return JSWeakMap::kSize;
1455 case JS_REGEXP_TYPE:
1456 return JSRegExp::kSize;
1457 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1458 return JSObject::kHeaderSize;
1459 case JS_MESSAGE_OBJECT_TYPE:
1460 return JSMessageObject::kSize;
1468 int JSObject::GetInternalFieldCount() {
1469 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1470 // Make sure to adjust for the number of in-object properties. These
1471 // properties do contribute to the size, but are not internal fields.
1472 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1473 map()->inobject_properties();
1477 int JSObject::GetInternalFieldOffset(int index) {
1478 ASSERT(index < GetInternalFieldCount() && index >= 0);
1479 return GetHeaderSize() + (kPointerSize * index);
1483 Object* JSObject::GetInternalField(int index) {
1484 ASSERT(index < GetInternalFieldCount() && index >= 0);
1485 // Internal objects do follow immediately after the header, whereas in-object
1486 // properties are at the end of the object. Therefore there is no need
1487 // to adjust the index here.
1488 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1492 void JSObject::SetInternalField(int index, Object* value) {
1493 ASSERT(index < GetInternalFieldCount() && index >= 0);
1494 // Internal objects do follow immediately after the header, whereas in-object
1495 // properties are at the end of the object. Therefore there is no need
1496 // to adjust the index here.
1497 int offset = GetHeaderSize() + (kPointerSize * index);
1498 WRITE_FIELD(this, offset, value);
1499 WRITE_BARRIER(GetHeap(), this, offset, value);
1503 void JSObject::SetInternalField(int index, Smi* value) {
1504 ASSERT(index < GetInternalFieldCount() && index >= 0);
1505 // Internal objects do follow immediately after the header, whereas in-object
1506 // properties are at the end of the object. Therefore there is no need
1507 // to adjust the index here.
1508 int offset = GetHeaderSize() + (kPointerSize * index);
1509 WRITE_FIELD(this, offset, value);
1513 // Access fast-case object properties at index. The use of these routines
1514 // is needed to correctly distinguish between properties stored in-object and
1515 // properties stored in the properties array.
1516 Object* JSObject::FastPropertyAt(int index) {
1517 // Adjust for the number of properties stored in the object.
1518 index -= map()->inobject_properties();
1520 int offset = map()->instance_size() + (index * kPointerSize);
1521 return READ_FIELD(this, offset);
1523 ASSERT(index < properties()->length());
1524 return properties()->get(index);
1529 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1530 // Adjust for the number of properties stored in the object.
1531 index -= map()->inobject_properties();
1533 int offset = map()->instance_size() + (index * kPointerSize);
1534 WRITE_FIELD(this, offset, value);
1535 WRITE_BARRIER(GetHeap(), this, offset, value);
1537 ASSERT(index < properties()->length());
1538 properties()->set(index, value);
1544 int JSObject::GetInObjectPropertyOffset(int index) {
1545 // Adjust for the number of properties stored in the object.
1546 index -= map()->inobject_properties();
1548 return map()->instance_size() + (index * kPointerSize);
1552 Object* JSObject::InObjectPropertyAt(int index) {
1553 // Adjust for the number of properties stored in the object.
1554 index -= map()->inobject_properties();
1556 int offset = map()->instance_size() + (index * kPointerSize);
1557 return READ_FIELD(this, offset);
1561 Object* JSObject::InObjectPropertyAtPut(int index,
1563 WriteBarrierMode mode) {
1564 // Adjust for the number of properties stored in the object.
1565 index -= map()->inobject_properties();
1567 int offset = map()->instance_size() + (index * kPointerSize);
1568 WRITE_FIELD(this, offset, value);
1569 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1575 void JSObject::InitializeBody(Map* map,
1576 Object* pre_allocated_value,
1577 Object* filler_value) {
1578 ASSERT(!filler_value->IsHeapObject() ||
1579 !GetHeap()->InNewSpace(filler_value));
1580 ASSERT(!pre_allocated_value->IsHeapObject() ||
1581 !GetHeap()->InNewSpace(pre_allocated_value));
1582 int size = map->instance_size();
1583 int offset = kHeaderSize;
1584 if (filler_value != pre_allocated_value) {
1585 int pre_allocated = map->pre_allocated_property_fields();
1586 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1587 for (int i = 0; i < pre_allocated; i++) {
1588 WRITE_FIELD(this, offset, pre_allocated_value);
1589 offset += kPointerSize;
1592 while (offset < size) {
1593 WRITE_FIELD(this, offset, filler_value);
1594 offset += kPointerSize;
1599 bool JSObject::HasFastProperties() {
1600 return !properties()->IsDictionary();
1604 int JSObject::MaxFastProperties() {
1605 // Allow extra fast properties if the object has more than
1606 // kMaxFastProperties in-object properties. When this is the case,
1607 // it is very unlikely that the object is being used as a dictionary
1608 // and there is a good chance that allowing more map transitions
1609 // will be worth it.
1610 return Max(map()->inobject_properties(), kMaxFastProperties);
1614 void Struct::InitializeBody(int object_size) {
1615 Object* value = GetHeap()->undefined_value();
1616 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1617 WRITE_FIELD(this, offset, value);
1622 bool Object::ToArrayIndex(uint32_t* index) {
1624 int value = Smi::cast(this)->value();
1625 if (value < 0) return false;
1629 if (IsHeapNumber()) {
1630 double value = HeapNumber::cast(this)->value();
1631 uint32_t uint_value = static_cast<uint32_t>(value);
1632 if (value == static_cast<double>(uint_value)) {
1633 *index = uint_value;
1641 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1642 if (!this->IsJSValue()) return false;
1644 JSValue* js_value = JSValue::cast(this);
1645 if (!js_value->value()->IsString()) return false;
1647 String* str = String::cast(js_value->value());
1648 if (index >= (uint32_t)str->length()) return false;
1654 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1655 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1656 return reinterpret_cast<FixedArrayBase*>(object);
1660 Object* FixedArray::get(int index) {
1661 ASSERT(index >= 0 && index < this->length());
1662 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1666 void FixedArray::set(int index, Smi* value) {
1667 ASSERT(map() != HEAP->fixed_cow_array_map());
1668 ASSERT(index >= 0 && index < this->length());
1669 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1670 int offset = kHeaderSize + index * kPointerSize;
1671 WRITE_FIELD(this, offset, value);
1675 void FixedArray::set(int index, Object* value) {
1676 ASSERT(map() != HEAP->fixed_cow_array_map());
1677 ASSERT(index >= 0 && index < this->length());
1678 int offset = kHeaderSize + index * kPointerSize;
1679 WRITE_FIELD(this, offset, value);
1680 WRITE_BARRIER(GetHeap(), this, offset, value);
1684 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1685 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1689 inline double FixedDoubleArray::hole_nan_as_double() {
1690 return BitCast<double, uint64_t>(kHoleNanInt64);
1694 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1695 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1696 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1697 return OS::nan_value();
1701 double FixedDoubleArray::get_scalar(int index) {
1702 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1703 map() != HEAP->fixed_array_map());
1704 ASSERT(index >= 0 && index < this->length());
1705 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1706 ASSERT(!is_the_hole_nan(result));
1710 int64_t FixedDoubleArray::get_representation(int index) {
1711 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1712 map() != HEAP->fixed_array_map());
1713 ASSERT(index >= 0 && index < this->length());
1714 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1717 MaybeObject* FixedDoubleArray::get(int index) {
1718 if (is_the_hole(index)) {
1719 return GetHeap()->the_hole_value();
1721 return GetHeap()->NumberFromDouble(get_scalar(index));
1726 void FixedDoubleArray::set(int index, double value) {
1727 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1728 map() != HEAP->fixed_array_map());
1729 int offset = kHeaderSize + index * kDoubleSize;
1730 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1731 WRITE_DOUBLE_FIELD(this, offset, value);
1735 void FixedDoubleArray::set_the_hole(int index) {
1736 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1737 map() != HEAP->fixed_array_map());
1738 int offset = kHeaderSize + index * kDoubleSize;
1739 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1743 bool FixedDoubleArray::is_the_hole(int index) {
1744 int offset = kHeaderSize + index * kDoubleSize;
1745 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1749 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1750 Heap* heap = GetHeap();
1751 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1752 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1753 return UPDATE_WRITE_BARRIER;
1757 void FixedArray::set(int index,
1759 WriteBarrierMode mode) {
1760 ASSERT(map() != HEAP->fixed_cow_array_map());
1761 ASSERT(index >= 0 && index < this->length());
1762 int offset = kHeaderSize + index * kPointerSize;
1763 WRITE_FIELD(this, offset, value);
1764 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1768 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
1771 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1772 ASSERT(index >= 0 && index < array->length());
1773 int offset = kHeaderSize + index * kPointerSize;
1774 WRITE_FIELD(array, offset, value);
1775 Heap* heap = array->GetHeap();
1776 if (heap->InNewSpace(value)) {
1777 heap->RecordWrite(array->address(), offset);
1782 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1785 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1786 ASSERT(index >= 0 && index < array->length());
1787 ASSERT(!HEAP->InNewSpace(value));
1788 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1792 void FixedArray::set_undefined(int index) {
1793 ASSERT(map() != HEAP->fixed_cow_array_map());
1794 set_undefined(GetHeap(), index);
1798 void FixedArray::set_undefined(Heap* heap, int index) {
1799 ASSERT(index >= 0 && index < this->length());
1800 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1801 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1802 heap->undefined_value());
1806 void FixedArray::set_null(int index) {
1807 set_null(GetHeap(), index);
1811 void FixedArray::set_null(Heap* heap, int index) {
1812 ASSERT(index >= 0 && index < this->length());
1813 ASSERT(!heap->InNewSpace(heap->null_value()));
1814 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1818 void FixedArray::set_the_hole(int index) {
1819 ASSERT(map() != HEAP->fixed_cow_array_map());
1820 ASSERT(index >= 0 && index < this->length());
1821 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1823 kHeaderSize + index * kPointerSize,
1824 GetHeap()->the_hole_value());
1828 void FixedArray::set_unchecked(int index, Smi* value) {
1829 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1830 int offset = kHeaderSize + index * kPointerSize;
1831 WRITE_FIELD(this, offset, value);
1835 void FixedArray::set_unchecked(Heap* heap,
1838 WriteBarrierMode mode) {
1839 int offset = kHeaderSize + index * kPointerSize;
1840 WRITE_FIELD(this, offset, value);
1841 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1845 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1846 ASSERT(index >= 0 && index < this->length());
1847 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1848 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1852 Object** FixedArray::data_start() {
1853 return HeapObject::RawField(this, kHeaderSize);
1857 bool DescriptorArray::IsEmpty() {
1858 ASSERT(this->IsSmi() ||
1859 this->length() > kFirstIndex ||
1860 this == HEAP->empty_descriptor_array());
1861 return this->IsSmi() || length() <= kFirstIndex;
1865 int DescriptorArray::bit_field3_storage() {
1866 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1867 return Smi::cast(storage)->value();
1870 void DescriptorArray::set_bit_field3_storage(int value) {
1872 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1876 void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
1879 Object* tmp = array->get(first);
1880 NoIncrementalWriteBarrierSet(array, first, array->get(second));
1881 NoIncrementalWriteBarrierSet(array, second, tmp);
1885 int DescriptorArray::Search(String* name) {
1886 SLOW_ASSERT(IsSortedNoDuplicates());
1888 // Check for empty descriptor array.
1889 int nof = number_of_descriptors();
1890 if (nof == 0) return kNotFound;
1892 // Fast case: do linear search for small arrays.
1893 const int kMaxElementsForLinearSearch = 8;
1894 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1895 return LinearSearch(name, nof);
1898 // Slow case: perform binary search.
1899 return BinarySearch(name, 0, nof - 1);
1903 int DescriptorArray::SearchWithCache(String* name) {
1904 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1905 if (number == DescriptorLookupCache::kAbsent) {
1906 number = Search(name);
1907 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1913 String* DescriptorArray::GetKey(int descriptor_number) {
1914 ASSERT(descriptor_number < number_of_descriptors());
1915 return String::cast(get(ToKeyIndex(descriptor_number)));
1919 Object* DescriptorArray::GetValue(int descriptor_number) {
1920 ASSERT(descriptor_number < number_of_descriptors());
1921 return GetContentArray()->get(ToValueIndex(descriptor_number));
1925 Smi* DescriptorArray::GetDetails(int descriptor_number) {
1926 ASSERT(descriptor_number < number_of_descriptors());
1927 return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1931 PropertyType DescriptorArray::GetType(int descriptor_number) {
1932 ASSERT(descriptor_number < number_of_descriptors());
1933 return PropertyDetails(GetDetails(descriptor_number)).type();
1937 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1938 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1942 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1943 return JSFunction::cast(GetValue(descriptor_number));
1947 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1948 ASSERT(GetType(descriptor_number) == CALLBACKS);
1949 return GetValue(descriptor_number);
1953 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1954 ASSERT(GetType(descriptor_number) == CALLBACKS);
1955 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1956 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
1960 bool DescriptorArray::IsProperty(int descriptor_number) {
1961 Entry entry(this, descriptor_number);
1962 return IsPropertyDescriptor(&entry);
1966 bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
1967 switch (GetType(descriptor_number)) {
1968 case MAP_TRANSITION:
1969 case CONSTANT_TRANSITION:
1970 case ELEMENTS_TRANSITION:
1973 Object* value = GetValue(descriptor_number);
1974 if (!value->IsAccessorPair()) return false;
1975 AccessorPair* accessors = AccessorPair::cast(value);
1976 return accessors->getter()->IsMap() && accessors->setter()->IsMap();
1980 case CONSTANT_FUNCTION:
1983 case NULL_DESCRIPTOR:
1986 UNREACHABLE(); // Keep the compiler happy.
1991 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1992 return GetType(descriptor_number) == NULL_DESCRIPTOR;
1996 bool DescriptorArray::IsDontEnum(int descriptor_number) {
1997 return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
2001 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2002 desc->Init(GetKey(descriptor_number),
2003 GetValue(descriptor_number),
2004 PropertyDetails(GetDetails(descriptor_number)));
2008 void DescriptorArray::Set(int descriptor_number,
2010 const WhitenessWitness&) {
2012 ASSERT(descriptor_number < number_of_descriptors());
2014 NoIncrementalWriteBarrierSet(this,
2015 ToKeyIndex(descriptor_number),
2017 FixedArray* content_array = GetContentArray();
2018 NoIncrementalWriteBarrierSet(content_array,
2019 ToValueIndex(descriptor_number),
2021 NoIncrementalWriteBarrierSet(content_array,
2022 ToDetailsIndex(descriptor_number),
2023 desc->GetDetails().AsSmi());
2027 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
2028 int first, int second) {
2029 NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
2030 FixedArray* content_array = GetContentArray();
2031 NoIncrementalWriteBarrierSwap(content_array,
2032 ToValueIndex(first),
2033 ToValueIndex(second));
2034 NoIncrementalWriteBarrierSwap(content_array,
2035 ToDetailsIndex(first),
2036 ToDetailsIndex(second));
2040 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2041 : marking_(array->GetHeap()->incremental_marking()) {
2042 marking_->EnterNoMarkingScope();
2043 if (array->number_of_descriptors() > 0) {
2044 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2045 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
2050 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2051 marking_->LeaveNoMarkingScope();
2055 template<typename Shape, typename Key>
2056 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2057 const int kMinCapacity = 32;
2058 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2059 if (capacity < kMinCapacity) {
2060 capacity = kMinCapacity; // Guarantee min capacity.
2066 template<typename Shape, typename Key>
2067 int HashTable<Shape, Key>::FindEntry(Key key) {
2068 return FindEntry(GetIsolate(), key);
2072 // Find entry for key otherwise return kNotFound.
2073 template<typename Shape, typename Key>
2074 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2075 uint32_t capacity = Capacity();
2076 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2078 // EnsureCapacity will guarantee the hash table is never full.
2080 Object* element = KeyAt(entry);
2082 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2083 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2084 Shape::IsMatch(key, element)) return entry;
2085 entry = NextProbe(entry, count++, capacity);
2091 bool SeededNumberDictionary::requires_slow_elements() {
2092 Object* max_index_object = get(kMaxNumberKeyIndex);
2093 if (!max_index_object->IsSmi()) return false;
2095 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2098 uint32_t SeededNumberDictionary::max_number_key() {
2099 ASSERT(!requires_slow_elements());
2100 Object* max_index_object = get(kMaxNumberKeyIndex);
2101 if (!max_index_object->IsSmi()) return 0;
2102 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2103 return value >> kRequiresSlowElementsTagSize;
2106 void SeededNumberDictionary::set_requires_slow_elements() {
2107 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2111 // ------------------------------------
2115 CAST_ACCESSOR(FixedArray)
2116 CAST_ACCESSOR(FixedDoubleArray)
2117 CAST_ACCESSOR(DescriptorArray)
2118 CAST_ACCESSOR(DeoptimizationInputData)
2119 CAST_ACCESSOR(DeoptimizationOutputData)
2120 CAST_ACCESSOR(TypeFeedbackCells)
2121 CAST_ACCESSOR(SymbolTable)
2122 CAST_ACCESSOR(JSFunctionResultCache)
2123 CAST_ACCESSOR(NormalizedMapCache)
2124 CAST_ACCESSOR(ScopeInfo)
2125 CAST_ACCESSOR(CompilationCacheTable)
2126 CAST_ACCESSOR(CodeCacheHashTable)
2127 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2128 CAST_ACCESSOR(MapCache)
2129 CAST_ACCESSOR(String)
2130 CAST_ACCESSOR(SeqString)
2131 CAST_ACCESSOR(SeqAsciiString)
2132 CAST_ACCESSOR(SeqTwoByteString)
2133 CAST_ACCESSOR(SlicedString)
2134 CAST_ACCESSOR(ConsString)
2135 CAST_ACCESSOR(ExternalString)
2136 CAST_ACCESSOR(ExternalAsciiString)
2137 CAST_ACCESSOR(ExternalTwoByteString)
2138 CAST_ACCESSOR(JSReceiver)
2139 CAST_ACCESSOR(JSObject)
2141 CAST_ACCESSOR(HeapObject)
2142 CAST_ACCESSOR(HeapNumber)
2143 CAST_ACCESSOR(Oddball)
2144 CAST_ACCESSOR(JSGlobalPropertyCell)
2145 CAST_ACCESSOR(SharedFunctionInfo)
2147 CAST_ACCESSOR(JSFunction)
2148 CAST_ACCESSOR(GlobalObject)
2149 CAST_ACCESSOR(JSGlobalProxy)
2150 CAST_ACCESSOR(JSGlobalObject)
2151 CAST_ACCESSOR(JSBuiltinsObject)
2153 CAST_ACCESSOR(JSArray)
2154 CAST_ACCESSOR(JSRegExp)
2155 CAST_ACCESSOR(JSProxy)
2156 CAST_ACCESSOR(JSFunctionProxy)
2157 CAST_ACCESSOR(JSSet)
2158 CAST_ACCESSOR(JSMap)
2159 CAST_ACCESSOR(JSWeakMap)
2160 CAST_ACCESSOR(Foreign)
2161 CAST_ACCESSOR(ByteArray)
2162 CAST_ACCESSOR(FreeSpace)
2163 CAST_ACCESSOR(ExternalArray)
2164 CAST_ACCESSOR(ExternalByteArray)
2165 CAST_ACCESSOR(ExternalUnsignedByteArray)
2166 CAST_ACCESSOR(ExternalShortArray)
2167 CAST_ACCESSOR(ExternalUnsignedShortArray)
2168 CAST_ACCESSOR(ExternalIntArray)
2169 CAST_ACCESSOR(ExternalUnsignedIntArray)
2170 CAST_ACCESSOR(ExternalFloatArray)
2171 CAST_ACCESSOR(ExternalDoubleArray)
2172 CAST_ACCESSOR(ExternalPixelArray)
2173 CAST_ACCESSOR(Struct)
2176 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2177 STRUCT_LIST(MAKE_STRUCT_CAST)
2178 #undef MAKE_STRUCT_CAST
2181 template <typename Shape, typename Key>
2182 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2183 ASSERT(obj->IsHashTable());
2184 return reinterpret_cast<HashTable*>(obj);
2188 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2189 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2191 SMI_ACCESSORS(String, length, kLengthOffset)
2194 uint32_t String::hash_field() {
2195 return READ_UINT32_FIELD(this, kHashFieldOffset);
2199 void String::set_hash_field(uint32_t value) {
2200 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2201 #if V8_HOST_ARCH_64_BIT
2202 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2207 bool String::Equals(String* other) {
2208 if (other == this) return true;
2209 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2212 return SlowEquals(other);
2216 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2217 if (!StringShape(this).IsCons()) return this;
2218 ConsString* cons = ConsString::cast(this);
2219 if (cons->IsFlat()) return cons->first();
2220 return SlowTryFlatten(pretenure);
2224 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2225 MaybeObject* flat = TryFlatten(pretenure);
2226 Object* successfully_flattened;
2227 if (!flat->ToObject(&successfully_flattened)) return this;
2228 return String::cast(successfully_flattened);
2232 uint16_t String::Get(int index) {
2233 ASSERT(index >= 0 && index < length());
2234 switch (StringShape(this).full_representation_tag()) {
2235 case kSeqStringTag | kAsciiStringTag:
2236 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2237 case kSeqStringTag | kTwoByteStringTag:
2238 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2239 case kConsStringTag | kAsciiStringTag:
2240 case kConsStringTag | kTwoByteStringTag:
2241 return ConsString::cast(this)->ConsStringGet(index);
2242 case kExternalStringTag | kAsciiStringTag:
2243 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2244 case kExternalStringTag | kTwoByteStringTag:
2245 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2246 case kSlicedStringTag | kAsciiStringTag:
2247 case kSlicedStringTag | kTwoByteStringTag:
2248 return SlicedString::cast(this)->SlicedStringGet(index);
2258 void String::Set(int index, uint16_t value) {
2259 ASSERT(index >= 0 && index < length());
2260 ASSERT(StringShape(this).IsSequential());
2262 return this->IsAsciiRepresentation()
2263 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2264 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2268 bool String::IsFlat() {
2269 if (!StringShape(this).IsCons()) return true;
2270 return ConsString::cast(this)->second()->length() == 0;
2274 String* String::GetUnderlying() {
2275 // Giving direct access to underlying string only makes sense if the
2276 // wrapping string is already flattened.
2277 ASSERT(this->IsFlat());
2278 ASSERT(StringShape(this).IsIndirect());
2279 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2280 const int kUnderlyingOffset = SlicedString::kParentOffset;
2281 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2285 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2286 ASSERT(index >= 0 && index < length());
2287 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2291 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2292 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2293 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2294 static_cast<byte>(value));
2298 Address SeqAsciiString::GetCharsAddress() {
2299 return FIELD_ADDR(this, kHeaderSize);
2303 char* SeqAsciiString::GetChars() {
2304 return reinterpret_cast<char*>(GetCharsAddress());
2308 Address SeqTwoByteString::GetCharsAddress() {
2309 return FIELD_ADDR(this, kHeaderSize);
2313 uc16* SeqTwoByteString::GetChars() {
2314 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2318 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2319 ASSERT(index >= 0 && index < length());
2320 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2324 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2325 ASSERT(index >= 0 && index < length());
2326 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2330 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2331 return SizeFor(length());
2335 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2336 return SizeFor(length());
2340 String* SlicedString::parent() {
2341 return String::cast(READ_FIELD(this, kParentOffset));
2345 void SlicedString::set_parent(String* parent) {
2346 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2347 WRITE_FIELD(this, kParentOffset, parent);
2351 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2354 String* ConsString::first() {
2355 return String::cast(READ_FIELD(this, kFirstOffset));
2359 Object* ConsString::unchecked_first() {
2360 return READ_FIELD(this, kFirstOffset);
2364 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2365 WRITE_FIELD(this, kFirstOffset, value);
2366 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2370 String* ConsString::second() {
2371 return String::cast(READ_FIELD(this, kSecondOffset));
2375 Object* ConsString::unchecked_second() {
2376 return READ_FIELD(this, kSecondOffset);
2380 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2381 WRITE_FIELD(this, kSecondOffset, value);
2382 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2386 bool ExternalString::is_short() {
2387 InstanceType type = map()->instance_type();
2388 return (type & kShortExternalStringMask) == kShortExternalStringTag;
2392 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2393 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2397 void ExternalAsciiString::update_data_cache() {
2398 if (is_short()) return;
2399 const char** data_field =
2400 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2401 *data_field = resource()->data();
2405 void ExternalAsciiString::set_resource(
2406 const ExternalAsciiString::Resource* resource) {
2407 *reinterpret_cast<const Resource**>(
2408 FIELD_ADDR(this, kResourceOffset)) = resource;
2409 if (resource != NULL) update_data_cache();
2413 const char* ExternalAsciiString::GetChars() {
2414 return resource()->data();
2418 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
2419 ASSERT(index >= 0 && index < length());
2420 return GetChars()[index];
2424 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2425 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2429 void ExternalTwoByteString::update_data_cache() {
2430 if (is_short()) return;
2431 const uint16_t** data_field =
2432 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
2433 *data_field = resource()->data();
2437 void ExternalTwoByteString::set_resource(
2438 const ExternalTwoByteString::Resource* resource) {
2439 *reinterpret_cast<const Resource**>(
2440 FIELD_ADDR(this, kResourceOffset)) = resource;
2441 if (resource != NULL) update_data_cache();
2445 const uint16_t* ExternalTwoByteString::GetChars() {
2446 return resource()->data();
2450 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
2451 ASSERT(index >= 0 && index < length());
2452 return GetChars()[index];
2456 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
2458 return GetChars() + start;
2462 void JSFunctionResultCache::MakeZeroSize() {
2463 set_finger_index(kEntriesIndex);
2464 set_size(kEntriesIndex);
2468 void JSFunctionResultCache::Clear() {
2469 int cache_size = size();
2470 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2471 MemsetPointer(entries_start,
2472 GetHeap()->the_hole_value(),
2473 cache_size - kEntriesIndex);
2478 int JSFunctionResultCache::size() {
2479 return Smi::cast(get(kCacheSizeIndex))->value();
2483 void JSFunctionResultCache::set_size(int size) {
2484 set(kCacheSizeIndex, Smi::FromInt(size));
2488 int JSFunctionResultCache::finger_index() {
2489 return Smi::cast(get(kFingerIndex))->value();
2493 void JSFunctionResultCache::set_finger_index(int finger_index) {
2494 set(kFingerIndex, Smi::FromInt(finger_index));
2498 byte ByteArray::get(int index) {
2499 ASSERT(index >= 0 && index < this->length());
2500 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2504 void ByteArray::set(int index, byte value) {
2505 ASSERT(index >= 0 && index < this->length());
2506 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2510 int ByteArray::get_int(int index) {
2511 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2512 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2516 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2517 ASSERT_TAG_ALIGNED(address);
2518 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2522 Address ByteArray::GetDataStartAddress() {
2523 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2527 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2528 return reinterpret_cast<uint8_t*>(external_pointer());
2532 uint8_t ExternalPixelArray::get_scalar(int index) {
2533 ASSERT((index >= 0) && (index < this->length()));
2534 uint8_t* ptr = external_pixel_pointer();
2539 MaybeObject* ExternalPixelArray::get(int index) {
2540 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2544 void ExternalPixelArray::set(int index, uint8_t value) {
2545 ASSERT((index >= 0) && (index < this->length()));
2546 uint8_t* ptr = external_pixel_pointer();
2551 void* ExternalArray::external_pointer() {
2552 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2553 return reinterpret_cast<void*>(ptr);
2557 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2558 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2559 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2563 int8_t ExternalByteArray::get_scalar(int index) {
2564 ASSERT((index >= 0) && (index < this->length()));
2565 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2570 MaybeObject* ExternalByteArray::get(int index) {
2571 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2575 void ExternalByteArray::set(int index, int8_t value) {
2576 ASSERT((index >= 0) && (index < this->length()));
2577 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2582 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2583 ASSERT((index >= 0) && (index < this->length()));
2584 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2589 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2590 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2594 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2595 ASSERT((index >= 0) && (index < this->length()));
2596 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2601 int16_t ExternalShortArray::get_scalar(int index) {
2602 ASSERT((index >= 0) && (index < this->length()));
2603 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2608 MaybeObject* ExternalShortArray::get(int index) {
2609 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2613 void ExternalShortArray::set(int index, int16_t value) {
2614 ASSERT((index >= 0) && (index < this->length()));
2615 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2620 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2621 ASSERT((index >= 0) && (index < this->length()));
2622 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2627 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2628 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2632 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2633 ASSERT((index >= 0) && (index < this->length()));
2634 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2639 int32_t ExternalIntArray::get_scalar(int index) {
2640 ASSERT((index >= 0) && (index < this->length()));
2641 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2646 MaybeObject* ExternalIntArray::get(int index) {
2647 return GetHeap()->NumberFromInt32(get_scalar(index));
2651 void ExternalIntArray::set(int index, int32_t value) {
2652 ASSERT((index >= 0) && (index < this->length()));
2653 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2658 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2659 ASSERT((index >= 0) && (index < this->length()));
2660 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2665 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2666 return GetHeap()->NumberFromUint32(get_scalar(index));
2670 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2671 ASSERT((index >= 0) && (index < this->length()));
2672 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2677 float ExternalFloatArray::get_scalar(int index) {
2678 ASSERT((index >= 0) && (index < this->length()));
2679 float* ptr = static_cast<float*>(external_pointer());
2684 MaybeObject* ExternalFloatArray::get(int index) {
2685 return GetHeap()->NumberFromDouble(get_scalar(index));
2689 void ExternalFloatArray::set(int index, float value) {
2690 ASSERT((index >= 0) && (index < this->length()));
2691 float* ptr = static_cast<float*>(external_pointer());
2696 double ExternalDoubleArray::get_scalar(int index) {
2697 ASSERT((index >= 0) && (index < this->length()));
2698 double* ptr = static_cast<double*>(external_pointer());
2703 MaybeObject* ExternalDoubleArray::get(int index) {
2704 return GetHeap()->NumberFromDouble(get_scalar(index));
2708 void ExternalDoubleArray::set(int index, double value) {
2709 ASSERT((index >= 0) && (index < this->length()));
2710 double* ptr = static_cast<double*>(external_pointer());
2715 int Map::visitor_id() {
2716 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2720 void Map::set_visitor_id(int id) {
2721 ASSERT(0 <= id && id < 256);
2722 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2726 int Map::instance_size() {
2727 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2731 int Map::inobject_properties() {
2732 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2736 int Map::pre_allocated_property_fields() {
2737 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2741 int HeapObject::SizeFromMap(Map* map) {
2742 int instance_size = map->instance_size();
2743 if (instance_size != kVariableSizeSentinel) return instance_size;
2744 // We can ignore the "symbol" bit becase it is only set for symbols
2745 // and implies a string type.
2746 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2747 // Only inline the most frequent cases.
2748 if (instance_type == FIXED_ARRAY_TYPE) {
2749 return FixedArray::BodyDescriptor::SizeOf(map, this);
2751 if (instance_type == ASCII_STRING_TYPE) {
2752 return SeqAsciiString::SizeFor(
2753 reinterpret_cast<SeqAsciiString*>(this)->length());
2755 if (instance_type == BYTE_ARRAY_TYPE) {
2756 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2758 if (instance_type == FREE_SPACE_TYPE) {
2759 return reinterpret_cast<FreeSpace*>(this)->size();
2761 if (instance_type == STRING_TYPE) {
2762 return SeqTwoByteString::SizeFor(
2763 reinterpret_cast<SeqTwoByteString*>(this)->length());
2765 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2766 return FixedDoubleArray::SizeFor(
2767 reinterpret_cast<FixedDoubleArray*>(this)->length());
2769 ASSERT(instance_type == CODE_TYPE);
2770 return reinterpret_cast<Code*>(this)->CodeSize();
2774 void Map::set_instance_size(int value) {
2775 ASSERT_EQ(0, value & (kPointerSize - 1));
2776 value >>= kPointerSizeLog2;
2777 ASSERT(0 <= value && value < 256);
2778 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2782 void Map::set_inobject_properties(int value) {
2783 ASSERT(0 <= value && value < 256);
2784 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2788 void Map::set_pre_allocated_property_fields(int value) {
2789 ASSERT(0 <= value && value < 256);
2790 WRITE_BYTE_FIELD(this,
2791 kPreAllocatedPropertyFieldsOffset,
2792 static_cast<byte>(value));
2796 InstanceType Map::instance_type() {
2797 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2801 void Map::set_instance_type(InstanceType value) {
2802 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2806 int Map::unused_property_fields() {
2807 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2811 void Map::set_unused_property_fields(int value) {
2812 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2816 byte Map::bit_field() {
2817 return READ_BYTE_FIELD(this, kBitFieldOffset);
2821 void Map::set_bit_field(byte value) {
2822 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2826 byte Map::bit_field2() {
2827 return READ_BYTE_FIELD(this, kBitField2Offset);
2831 void Map::set_bit_field2(byte value) {
2832 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2836 void Map::set_non_instance_prototype(bool value) {
2838 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2840 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2845 bool Map::has_non_instance_prototype() {
2846 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2850 void Map::set_function_with_prototype(bool value) {
2852 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2854 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2859 bool Map::function_with_prototype() {
2860 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2864 void Map::set_is_access_check_needed(bool access_check_needed) {
2865 if (access_check_needed) {
2866 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2868 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2873 bool Map::is_access_check_needed() {
2874 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2878 void Map::set_is_extensible(bool value) {
2880 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2882 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2886 bool Map::is_extensible() {
2887 return ((1 << kIsExtensible) & bit_field2()) != 0;
2891 void Map::set_attached_to_shared_function_info(bool value) {
2893 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2895 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2899 bool Map::attached_to_shared_function_info() {
2900 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2904 void Map::set_is_shared(bool value) {
2906 set_bit_field3(bit_field3() | (1 << kIsShared));
2908 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2912 bool Map::is_shared() {
2913 return ((1 << kIsShared) & bit_field3()) != 0;
2917 JSFunction* Map::unchecked_constructor() {
2918 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2922 Code::Flags Code::flags() {
2923 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2927 void Code::set_flags(Code::Flags flags) {
2928 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2929 // Make sure that all call stubs have an arguments count.
2930 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2931 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2932 ExtractArgumentsCountFromFlags(flags) >= 0);
2933 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2937 Code::Kind Code::kind() {
2938 return ExtractKindFromFlags(flags());
2942 InlineCacheState Code::ic_state() {
2943 InlineCacheState result = ExtractICStateFromFlags(flags());
2944 // Only allow uninitialized or debugger states for non-IC code
2945 // objects. This is used in the debugger to determine whether or not
2946 // a call to code object has been replaced with a debug break call.
2947 ASSERT(is_inline_cache_stub() ||
2948 result == UNINITIALIZED ||
2949 result == DEBUG_BREAK ||
2950 result == DEBUG_PREPARE_STEP_IN);
2955 Code::ExtraICState Code::extra_ic_state() {
2956 ASSERT(is_inline_cache_stub());
2957 return ExtractExtraICStateFromFlags(flags());
2961 PropertyType Code::type() {
2962 return ExtractTypeFromFlags(flags());
2966 int Code::arguments_count() {
2967 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2968 return ExtractArgumentsCountFromFlags(flags());
2972 int Code::major_key() {
2973 ASSERT(kind() == STUB ||
2974 kind() == UNARY_OP_IC ||
2975 kind() == BINARY_OP_IC ||
2976 kind() == COMPARE_IC ||
2977 kind() == TO_BOOLEAN_IC);
2978 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2982 void Code::set_major_key(int major) {
2983 ASSERT(kind() == STUB ||
2984 kind() == UNARY_OP_IC ||
2985 kind() == BINARY_OP_IC ||
2986 kind() == COMPARE_IC ||
2987 kind() == TO_BOOLEAN_IC);
2988 ASSERT(0 <= major && major < 256);
2989 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2993 bool Code::is_pregenerated() {
2994 return kind() == STUB && IsPregeneratedField::decode(flags());
2998 void Code::set_is_pregenerated(bool value) {
2999 ASSERT(kind() == STUB);
3001 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3006 bool Code::optimizable() {
3007 ASSERT_EQ(FUNCTION, kind());
3008 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3012 void Code::set_optimizable(bool value) {
3013 ASSERT_EQ(FUNCTION, kind());
3014 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3018 bool Code::has_deoptimization_support() {
3019 ASSERT_EQ(FUNCTION, kind());
3020 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3021 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3025 void Code::set_has_deoptimization_support(bool value) {
3026 ASSERT_EQ(FUNCTION, kind());
3027 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3028 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3029 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3033 bool Code::has_debug_break_slots() {
3034 ASSERT_EQ(FUNCTION, kind());
3035 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3036 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3040 void Code::set_has_debug_break_slots(bool value) {
3041 ASSERT_EQ(FUNCTION, kind());
3042 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3043 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3044 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3048 bool Code::is_compiled_optimizable() {
3049 ASSERT_EQ(FUNCTION, kind());
3050 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3051 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3055 void Code::set_compiled_optimizable(bool value) {
3056 ASSERT_EQ(FUNCTION, kind());
3057 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3058 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3059 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3063 int Code::allow_osr_at_loop_nesting_level() {
3064 ASSERT_EQ(FUNCTION, kind());
3065 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3069 void Code::set_allow_osr_at_loop_nesting_level(int level) {
3070 ASSERT_EQ(FUNCTION, kind());
3071 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3072 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3076 int Code::profiler_ticks() {
3077 ASSERT_EQ(FUNCTION, kind());
3078 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3082 void Code::set_profiler_ticks(int ticks) {
3083 ASSERT_EQ(FUNCTION, kind());
3084 ASSERT(ticks < 256);
3085 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
3089 unsigned Code::stack_slots() {
3090 ASSERT(kind() == OPTIMIZED_FUNCTION);
3091 return READ_UINT32_FIELD(this, kStackSlotsOffset);
3095 void Code::set_stack_slots(unsigned slots) {
3096 ASSERT(kind() == OPTIMIZED_FUNCTION);
3097 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
3101 unsigned Code::safepoint_table_offset() {
3102 ASSERT(kind() == OPTIMIZED_FUNCTION);
3103 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
3107 void Code::set_safepoint_table_offset(unsigned offset) {
3108 ASSERT(kind() == OPTIMIZED_FUNCTION);
3109 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3110 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3114 unsigned Code::stack_check_table_offset() {
3115 ASSERT_EQ(FUNCTION, kind());
3116 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3120 void Code::set_stack_check_table_offset(unsigned offset) {
3121 ASSERT_EQ(FUNCTION, kind());
3122 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3123 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3127 CheckType Code::check_type() {
3128 ASSERT(is_call_stub() || is_keyed_call_stub());
3129 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3130 return static_cast<CheckType>(type);
3134 void Code::set_check_type(CheckType value) {
3135 ASSERT(is_call_stub() || is_keyed_call_stub());
3136 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3140 byte Code::unary_op_type() {
3141 ASSERT(is_unary_op_stub());
3142 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3146 void Code::set_unary_op_type(byte value) {
3147 ASSERT(is_unary_op_stub());
3148 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3152 byte Code::binary_op_type() {
3153 ASSERT(is_binary_op_stub());
3154 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3158 void Code::set_binary_op_type(byte value) {
3159 ASSERT(is_binary_op_stub());
3160 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3164 byte Code::binary_op_result_type() {
3165 ASSERT(is_binary_op_stub());
3166 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3170 void Code::set_binary_op_result_type(byte value) {
3171 ASSERT(is_binary_op_stub());
3172 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3176 byte Code::compare_state() {
3177 ASSERT(is_compare_ic_stub());
3178 return READ_BYTE_FIELD(this, kCompareStateOffset);
3182 void Code::set_compare_state(byte value) {
3183 ASSERT(is_compare_ic_stub());
3184 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3188 byte Code::to_boolean_state() {
3189 ASSERT(is_to_boolean_ic_stub());
3190 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3194 void Code::set_to_boolean_state(byte value) {
3195 ASSERT(is_to_boolean_ic_stub());
3196 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3200 bool Code::has_function_cache() {
3201 ASSERT(kind() == STUB);
3202 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3206 void Code::set_has_function_cache(bool flag) {
3207 ASSERT(kind() == STUB);
3208 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3212 bool Code::is_inline_cache_stub() {
3213 Kind kind = this->kind();
3214 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3218 Code::Flags Code::ComputeFlags(Kind kind,
3219 InlineCacheState ic_state,
3220 ExtraICState extra_ic_state,
3223 InlineCacheHolderFlag holder) {
3224 // Extra IC state is only allowed for call IC stubs or for store IC
3226 ASSERT(extra_ic_state == kNoExtraICState ||
3229 kind == KEYED_STORE_IC);
3230 // Compute the bit mask.
3231 int bits = KindField::encode(kind)
3232 | ICStateField::encode(ic_state)
3233 | TypeField::encode(type)
3234 | ExtraICStateField::encode(extra_ic_state)
3235 | (argc << kArgumentsCountShift)
3236 | CacheHolderField::encode(holder);
3237 return static_cast<Flags>(bits);
3241 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3243 ExtraICState extra_ic_state,
3244 InlineCacheHolderFlag holder,
3246 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3250 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3251 return KindField::decode(flags);
3255 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3256 return ICStateField::decode(flags);
3260 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3261 return ExtraICStateField::decode(flags);
3265 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3266 return TypeField::decode(flags);
3270 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3271 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3275 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3276 return CacheHolderField::decode(flags);
3280 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3281 int bits = flags & ~TypeField::kMask;
3282 return static_cast<Flags>(bits);
3286 Code* Code::GetCodeFromTargetAddress(Address address) {
3287 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3288 // GetCodeFromTargetAddress might be called when marking objects during mark
3289 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3290 // Code::cast. Code::cast does not work when the object's map is
3292 Code* result = reinterpret_cast<Code*>(code);
3297 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3299 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3303 Object* Map::prototype() {
3304 return READ_FIELD(this, kPrototypeOffset);
3308 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3309 ASSERT(value->IsNull() || value->IsJSReceiver());
3310 WRITE_FIELD(this, kPrototypeOffset, value);
3311 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3315 DescriptorArray* Map::instance_descriptors() {
3316 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3317 if (object->IsSmi()) {
3318 return GetHeap()->empty_descriptor_array();
3320 return DescriptorArray::cast(object);
3325 void Map::init_instance_descriptors() {
3326 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3330 void Map::clear_instance_descriptors() {
3331 Object* object = READ_FIELD(this,
3332 kInstanceDescriptorsOrBitField3Offset);
3333 if (!object->IsSmi()) {
3336 kInstanceDescriptorsOrBitField3Offset,
3337 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3342 void Map::set_instance_descriptors(DescriptorArray* value,
3343 WriteBarrierMode mode) {
3344 Object* object = READ_FIELD(this,
3345 kInstanceDescriptorsOrBitField3Offset);
3346 Heap* heap = GetHeap();
3347 if (value == heap->empty_descriptor_array()) {
3348 clear_instance_descriptors();
3351 if (object->IsSmi()) {
3352 value->set_bit_field3_storage(Smi::cast(object)->value());
3354 value->set_bit_field3_storage(
3355 DescriptorArray::cast(object)->bit_field3_storage());
3358 ASSERT(!is_shared());
3359 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3360 CONDITIONAL_WRITE_BARRIER(
3361 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3365 int Map::bit_field3() {
3366 Object* object = READ_FIELD(this,
3367 kInstanceDescriptorsOrBitField3Offset);
3368 if (object->IsSmi()) {
3369 return Smi::cast(object)->value();
3371 return DescriptorArray::cast(object)->bit_field3_storage();
3376 void Map::set_bit_field3(int value) {
3377 ASSERT(Smi::IsValid(value));
3378 Object* object = READ_FIELD(this,
3379 kInstanceDescriptorsOrBitField3Offset);
3380 if (object->IsSmi()) {
3382 kInstanceDescriptorsOrBitField3Offset,
3383 Smi::FromInt(value));
3385 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3390 FixedArray* Map::unchecked_prototype_transitions() {
3391 return reinterpret_cast<FixedArray*>(
3392 READ_FIELD(this, kPrototypeTransitionsOffset));
3396 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3397 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3398 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3400 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3401 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3402 ACCESSORS(JSFunction,
3405 kNextFunctionLinkOffset)
3407 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3408 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3409 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3411 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3413 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3414 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3415 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3416 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3417 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
3419 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
3420 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3422 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3423 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3424 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3426 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3427 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3428 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3429 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3430 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3431 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3433 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3434 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3436 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3437 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3439 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3440 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3441 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3442 kPropertyAccessorsOffset)
3443 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3444 kPrototypeTemplateOffset)
3445 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3446 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3447 kNamedPropertyHandlerOffset)
3448 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3449 kIndexedPropertyHandlerOffset)
3450 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3451 kInstanceTemplateOffset)
3452 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3453 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3454 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3455 kInstanceCallHandlerOffset)
3456 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3457 kAccessCheckInfoOffset)
3458 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
3460 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3461 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3462 kInternalFieldCountOffset)
3464 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3465 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3467 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3469 ACCESSORS(Script, source, Object, kSourceOffset)
3470 ACCESSORS(Script, name, Object, kNameOffset)
3471 ACCESSORS(Script, id, Object, kIdOffset)
3472 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
3473 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
3474 ACCESSORS(Script, data, Object, kDataOffset)
3475 ACCESSORS(Script, context_data, Object, kContextOffset)
3476 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3477 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
3478 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
3479 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
3480 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3481 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3482 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
3483 kEvalFrominstructionsOffsetOffset)
3485 #ifdef ENABLE_DEBUGGER_SUPPORT
3486 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3487 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3488 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3489 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3491 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
3492 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
3493 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3494 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3497 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3498 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3499 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3500 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3501 kInstanceClassNameOffset)
3502 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3503 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3504 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3505 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3506 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3507 kThisPropertyAssignmentsOffset)
3508 SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
3511 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3512 kHiddenPrototypeBit)
3513 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3514 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3515 kNeedsAccessCheckBit)
3516 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3517 kReadOnlyPrototypeBit)
3518 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3520 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3522 BOOL_GETTER(SharedFunctionInfo,
3524 has_only_simple_this_property_assignments,
3525 kHasOnlySimpleThisPropertyAssignments)
3526 BOOL_ACCESSORS(SharedFunctionInfo,
3528 allows_lazy_compilation,
3529 kAllowLazyCompilation)
3530 BOOL_ACCESSORS(SharedFunctionInfo,
3534 BOOL_ACCESSORS(SharedFunctionInfo,
3536 has_duplicate_parameters,
3537 kHasDuplicateParameters)
3540 #if V8_HOST_ARCH_32_BIT
3541 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3542 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3543 kFormalParameterCountOffset)
3544 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3545 kExpectedNofPropertiesOffset)
3546 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3547 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3548 kStartPositionAndTypeOffset)
3549 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3550 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3551 kFunctionTokenPositionOffset)
3552 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3553 kCompilerHintsOffset)
3554 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3555 kThisPropertyAssignmentsCountOffset)
3556 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3557 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3558 SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3561 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3562 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3563 int holder::name() { \
3564 int value = READ_INT_FIELD(this, offset); \
3565 ASSERT(kHeapObjectTag == 1); \
3566 ASSERT((value & kHeapObjectTag) == 0); \
3567 return value >> 1; \
3569 void holder::set_##name(int value) { \
3570 ASSERT(kHeapObjectTag == 1); \
3571 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3572 (value & 0xC0000000) == 0x000000000); \
3573 WRITE_INT_FIELD(this, \
3575 (value << 1) & ~kHeapObjectTag); \
3578 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3579 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3580 INT_ACCESSORS(holder, name, offset)
3583 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3584 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3585 formal_parameter_count,
3586 kFormalParameterCountOffset)
3588 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3589 expected_nof_properties,
3590 kExpectedNofPropertiesOffset)
3591 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3593 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3594 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3595 start_position_and_type,
3596 kStartPositionAndTypeOffset)
3598 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3599 function_token_position,
3600 kFunctionTokenPositionOffset)
3601 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3603 kCompilerHintsOffset)
3605 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3606 this_property_assignments_count,
3607 kThisPropertyAssignmentsCountOffset)
3608 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3610 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3611 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3615 int SharedFunctionInfo::construction_count() {
3616 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3620 void SharedFunctionInfo::set_construction_count(int value) {
3621 ASSERT(0 <= value && value < 256);
3622 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3626 BOOL_ACCESSORS(SharedFunctionInfo,
3628 live_objects_may_exist,
3629 kLiveObjectsMayExist)
3632 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3633 return initial_map() != GetHeap()->undefined_value();
3637 BOOL_GETTER(SharedFunctionInfo,
3639 optimization_disabled,
3640 kOptimizationDisabled)
3643 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3644 set_compiler_hints(BooleanBit::set(compiler_hints(),
3645 kOptimizationDisabled,
3647 // If disabling optimizations we reflect that in the code object so
3648 // it will not be counted as optimizable code.
3649 if ((code()->kind() == Code::FUNCTION) && disable) {
3650 code()->set_optimizable(false);
3655 LanguageMode SharedFunctionInfo::language_mode() {
3656 int hints = compiler_hints();
3657 if (BooleanBit::get(hints, kExtendedModeFunction)) {
3658 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
3659 return EXTENDED_MODE;
3661 return BooleanBit::get(hints, kStrictModeFunction)
3662 ? STRICT_MODE : CLASSIC_MODE;
3666 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
3667 // We only allow language mode transitions that go set the same language mode
3668 // again or go up in the chain:
3669 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
3670 ASSERT(this->language_mode() == CLASSIC_MODE ||
3671 this->language_mode() == language_mode ||
3672 language_mode == EXTENDED_MODE);
3673 int hints = compiler_hints();
3674 hints = BooleanBit::set(
3675 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
3676 hints = BooleanBit::set(
3677 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
3678 set_compiler_hints(hints);
3682 bool SharedFunctionInfo::is_classic_mode() {
3683 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
3686 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
3687 kExtendedModeFunction)
3688 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3689 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3690 name_should_print_as_anonymous,
3691 kNameShouldPrintAsAnonymous)
3692 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3693 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3694 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
3695 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
3697 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
3699 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3700 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3702 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3704 bool Script::HasValidSource() {
3705 Object* src = this->source();
3706 if (!src->IsString()) return true;
3707 String* src_str = String::cast(src);
3708 if (!StringShape(src_str).IsExternal()) return true;
3709 if (src_str->IsAsciiRepresentation()) {
3710 return ExternalAsciiString::cast(src)->resource() != NULL;
3711 } else if (src_str->IsTwoByteRepresentation()) {
3712 return ExternalTwoByteString::cast(src)->resource() != NULL;
3718 void SharedFunctionInfo::DontAdaptArguments() {
3719 ASSERT(code()->kind() == Code::BUILTIN);
3720 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3724 int SharedFunctionInfo::start_position() {
3725 return start_position_and_type() >> kStartPositionShift;
3729 void SharedFunctionInfo::set_start_position(int start_position) {
3730 set_start_position_and_type((start_position << kStartPositionShift)
3731 | (start_position_and_type() & ~kStartPositionMask));
3735 Code* SharedFunctionInfo::code() {
3736 return Code::cast(READ_FIELD(this, kCodeOffset));
3740 Code* SharedFunctionInfo::unchecked_code() {
3741 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3745 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3746 WRITE_FIELD(this, kCodeOffset, value);
3747 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3751 ScopeInfo* SharedFunctionInfo::scope_info() {
3752 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
3756 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
3757 WriteBarrierMode mode) {
3758 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3759 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3762 reinterpret_cast<Object*>(value),
3767 bool SharedFunctionInfo::is_compiled() {
3769 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3773 bool SharedFunctionInfo::IsApiFunction() {
3774 return function_data()->IsFunctionTemplateInfo();
3778 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3779 ASSERT(IsApiFunction());
3780 return FunctionTemplateInfo::cast(function_data());
3784 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3785 return function_data()->IsSmi();
3789 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3790 ASSERT(HasBuiltinFunctionId());
3791 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3795 int SharedFunctionInfo::code_age() {
3796 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3800 void SharedFunctionInfo::set_code_age(int code_age) {
3801 int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
3802 set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
3806 bool SharedFunctionInfo::has_deoptimization_support() {
3807 Code* code = this->code();
3808 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3812 bool JSFunction::IsBuiltin() {
3813 return context()->global()->IsJSBuiltinsObject();
3817 bool JSFunction::NeedsArgumentsAdaption() {
3818 return shared()->formal_parameter_count() !=
3819 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3823 bool JSFunction::IsOptimized() {
3824 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3828 bool JSFunction::IsOptimizable() {
3829 return code()->kind() == Code::FUNCTION && code()->optimizable();
3833 bool JSFunction::IsMarkedForLazyRecompilation() {
3834 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3838 Code* JSFunction::code() {
3839 return Code::cast(unchecked_code());
3843 Code* JSFunction::unchecked_code() {
3844 return reinterpret_cast<Code*>(
3845 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3849 void JSFunction::set_code(Code* value) {
3850 ASSERT(!HEAP->InNewSpace(value));
3851 Address entry = value->entry();
3852 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3853 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3855 HeapObject::RawField(this, kCodeEntryOffset),
3860 void JSFunction::ReplaceCode(Code* code) {
3861 bool was_optimized = IsOptimized();
3862 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3866 // Add/remove the function from the list of optimized functions for this
3867 // context based on the state change.
3868 if (!was_optimized && is_optimized) {
3869 context()->global_context()->AddOptimizedFunction(this);
3871 if (was_optimized && !is_optimized) {
3872 context()->global_context()->RemoveOptimizedFunction(this);
3877 Context* JSFunction::context() {
3878 return Context::cast(READ_FIELD(this, kContextOffset));
3882 Object* JSFunction::unchecked_context() {
3883 return READ_FIELD(this, kContextOffset);
3887 SharedFunctionInfo* JSFunction::unchecked_shared() {
3888 return reinterpret_cast<SharedFunctionInfo*>(
3889 READ_FIELD(this, kSharedFunctionInfoOffset));
3893 void JSFunction::set_context(Object* value) {
3894 ASSERT(value->IsUndefined() || value->IsContext());
3895 WRITE_FIELD(this, kContextOffset, value);
3896 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3899 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3900 kPrototypeOrInitialMapOffset)
3903 Map* JSFunction::initial_map() {
3904 return Map::cast(prototype_or_initial_map());
3908 void JSFunction::set_initial_map(Map* value) {
3909 set_prototype_or_initial_map(value);
3913 MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
3915 Context* global_context = context()->global_context();
3916 Object* array_function =
3917 global_context->get(Context::ARRAY_FUNCTION_INDEX);
3918 if (array_function->IsJSFunction() &&
3919 this == JSFunction::cast(array_function)) {
3920 ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
3922 MaybeObject* maybe_map = initial_map->CopyDropTransitions();
3923 Map* new_double_map = NULL;
3924 if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
3925 new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
3926 maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
3928 if (maybe_map->IsFailure()) return maybe_map;
3930 maybe_map = new_double_map->CopyDropTransitions();
3931 Map* new_object_map = NULL;
3932 if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
3933 new_object_map->set_elements_kind(FAST_ELEMENTS);
3934 maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
3936 if (maybe_map->IsFailure()) return maybe_map;
3938 global_context->set_smi_js_array_map(initial_map);
3939 global_context->set_double_js_array_map(new_double_map);
3940 global_context->set_object_js_array_map(new_object_map);
3942 set_initial_map(initial_map);
3947 bool JSFunction::has_initial_map() {
3948 return prototype_or_initial_map()->IsMap();
3952 bool JSFunction::has_instance_prototype() {
3953 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3957 bool JSFunction::has_prototype() {
3958 return map()->has_non_instance_prototype() || has_instance_prototype();
3962 Object* JSFunction::instance_prototype() {
3963 ASSERT(has_instance_prototype());
3964 if (has_initial_map()) return initial_map()->prototype();
3965 // When there is no initial map and the prototype is a JSObject, the
3966 // initial map field is used for the prototype field.
3967 return prototype_or_initial_map();
3971 Object* JSFunction::prototype() {
3972 ASSERT(has_prototype());
3973 // If the function's prototype property has been set to a non-JSObject
3974 // value, that value is stored in the constructor field of the map.
3975 if (map()->has_non_instance_prototype()) return map()->constructor();
3976 return instance_prototype();
3979 bool JSFunction::should_have_prototype() {
3980 return map()->function_with_prototype();
3984 bool JSFunction::is_compiled() {
3985 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3989 FixedArray* JSFunction::literals() {
3990 ASSERT(!shared()->bound());
3991 return literals_or_bindings();
3995 void JSFunction::set_literals(FixedArray* literals) {
3996 ASSERT(!shared()->bound());
3997 set_literals_or_bindings(literals);
4001 FixedArray* JSFunction::function_bindings() {
4002 ASSERT(shared()->bound());
4003 return literals_or_bindings();
4007 void JSFunction::set_function_bindings(FixedArray* bindings) {
4008 ASSERT(shared()->bound());
4009 // Bound function literal may be initialized to the empty fixed array
4010 // before the bindings are set.
4011 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
4012 bindings->map() == GetHeap()->fixed_cow_array_map());
4013 set_literals_or_bindings(bindings);
4017 int JSFunction::NumberOfLiterals() {
4018 ASSERT(!shared()->bound());
4019 return literals()->length();
4023 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
4024 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4025 return READ_FIELD(this, OffsetOfFunctionWithId(id));
4029 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
4031 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4032 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
4033 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
4037 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
4038 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4039 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
4043 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
4045 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4046 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
4047 ASSERT(!HEAP->InNewSpace(value));
4051 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
4052 ACCESSORS(JSProxy, hash, Object, kHashOffset)
4053 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
4054 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4057 void JSProxy::InitializeBody(int object_size, Object* value) {
4058 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
4059 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
4060 WRITE_FIELD(this, offset, value);
4065 ACCESSORS(JSSet, table, Object, kTableOffset)
4066 ACCESSORS(JSMap, table, Object, kTableOffset)
4067 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
4068 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4071 Address Foreign::foreign_address() {
4072 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
4076 void Foreign::set_foreign_address(Address value) {
4077 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
4081 ACCESSORS(JSValue, value, Object, kValueOffset)
4084 JSValue* JSValue::cast(Object* obj) {
4085 ASSERT(obj->IsJSValue());
4086 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
4087 return reinterpret_cast<JSValue*>(obj);
4091 ACCESSORS(JSDate, value, Object, kValueOffset)
4092 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4093 ACCESSORS(JSDate, year, Object, kYearOffset)
4094 ACCESSORS(JSDate, month, Object, kMonthOffset)
4095 ACCESSORS(JSDate, day, Object, kDayOffset)
4096 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4097 ACCESSORS(JSDate, hour, Object, kHourOffset)
4098 ACCESSORS(JSDate, min, Object, kMinOffset)
4099 ACCESSORS(JSDate, sec, Object, kSecOffset)
4102 JSDate* JSDate::cast(Object* obj) {
4103 ASSERT(obj->IsJSDate());
4104 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
4105 return reinterpret_cast<JSDate*>(obj);
4109 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4110 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
4111 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4112 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4113 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4114 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4115 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4118 JSMessageObject* JSMessageObject::cast(Object* obj) {
4119 ASSERT(obj->IsJSMessageObject());
4120 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
4121 return reinterpret_cast<JSMessageObject*>(obj);
4125 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4126 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
4127 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4128 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4129 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4130 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4131 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
4133 byte* Code::instruction_start() {
4134 return FIELD_ADDR(this, kHeaderSize);
4138 byte* Code::instruction_end() {
4139 return instruction_start() + instruction_size();
4143 int Code::body_size() {
4144 return RoundUp(instruction_size(), kObjectAlignment);
4148 FixedArray* Code::unchecked_deoptimization_data() {
4149 return reinterpret_cast<FixedArray*>(
4150 READ_FIELD(this, kDeoptimizationDataOffset));
4154 ByteArray* Code::unchecked_relocation_info() {
4155 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4159 byte* Code::relocation_start() {
4160 return unchecked_relocation_info()->GetDataStartAddress();
4164 int Code::relocation_size() {
4165 return unchecked_relocation_info()->length();
4169 byte* Code::entry() {
4170 return instruction_start();
4174 bool Code::contains(byte* inner_pointer) {
4175 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4179 ACCESSORS(JSArray, length, Object, kLengthOffset)
4182 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4185 JSRegExp::Type JSRegExp::TypeTag() {
4186 Object* data = this->data();
4187 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4188 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4189 return static_cast<JSRegExp::Type>(smi->value());
4193 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4194 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4195 return static_cast<JSRegExp::Type>(smi->value());
4199 int JSRegExp::CaptureCount() {
4200 switch (TypeTag()) {
4204 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4212 JSRegExp::Flags JSRegExp::GetFlags() {
4213 ASSERT(this->data()->IsFixedArray());
4214 Object* data = this->data();
4215 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4216 return Flags(smi->value());
4220 String* JSRegExp::Pattern() {
4221 ASSERT(this->data()->IsFixedArray());
4222 Object* data = this->data();
4223 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4228 Object* JSRegExp::DataAt(int index) {
4229 ASSERT(TypeTag() != NOT_COMPILED);
4230 return FixedArray::cast(data())->get(index);
4234 Object* JSRegExp::DataAtUnchecked(int index) {
4235 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4236 int offset = FixedArray::kHeaderSize + index * kPointerSize;
4237 return READ_FIELD(fa, offset);
4241 void JSRegExp::SetDataAt(int index, Object* value) {
4242 ASSERT(TypeTag() != NOT_COMPILED);
4243 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4244 FixedArray::cast(data())->set(index, value);
4248 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4249 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4250 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4251 if (value->IsSmi()) {
4252 fa->set_unchecked(index, Smi::cast(value));
4254 // We only do this during GC, so we don't need to notify the write barrier.
4255 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4260 ElementsKind JSObject::GetElementsKind() {
4261 ElementsKind kind = map()->elements_kind();
4263 FixedArrayBase* fixed_array =
4264 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4265 Map* map = fixed_array->map();
4266 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4267 (map == GetHeap()->fixed_array_map() ||
4268 map == GetHeap()->fixed_cow_array_map())) ||
4269 (kind == FAST_DOUBLE_ELEMENTS &&
4270 (fixed_array->IsFixedDoubleArray() ||
4271 fixed_array == GetHeap()->empty_fixed_array())) ||
4272 (kind == DICTIONARY_ELEMENTS &&
4273 fixed_array->IsFixedArray() &&
4274 fixed_array->IsDictionary()) ||
4275 (kind > DICTIONARY_ELEMENTS));
4276 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4277 (elements()->IsFixedArray() && elements()->length() >= 2));
4283 ElementsAccessor* JSObject::GetElementsAccessor() {
4284 return ElementsAccessor::ForKind(GetElementsKind());
4288 bool JSObject::HasFastElements() {
4289 return GetElementsKind() == FAST_ELEMENTS;
4293 bool JSObject::HasFastSmiOnlyElements() {
4294 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4298 bool JSObject::HasFastTypeElements() {
4299 ElementsKind elements_kind = GetElementsKind();
4300 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4301 elements_kind == FAST_ELEMENTS;
4305 bool JSObject::HasFastDoubleElements() {
4306 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4310 bool JSObject::HasDictionaryElements() {
4311 return GetElementsKind() == DICTIONARY_ELEMENTS;
4315 bool JSObject::HasNonStrictArgumentsElements() {
4316 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4320 bool JSObject::HasExternalArrayElements() {
4321 HeapObject* array = elements();
4322 ASSERT(array != NULL);
4323 return array->IsExternalArray();
4327 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4328 bool JSObject::HasExternal##name##Elements() { \
4329 HeapObject* array = elements(); \
4330 ASSERT(array != NULL); \
4331 if (!array->IsHeapObject()) \
4333 return array->map()->instance_type() == type; \
4337 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4338 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4339 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4340 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4341 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4342 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4343 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4344 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4345 EXTERNAL_ELEMENTS_CHECK(Float,
4346 EXTERNAL_FLOAT_ARRAY_TYPE)
4347 EXTERNAL_ELEMENTS_CHECK(Double,
4348 EXTERNAL_DOUBLE_ARRAY_TYPE)
4349 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4352 bool JSObject::HasNamedInterceptor() {
4353 return map()->has_named_interceptor();
4357 bool JSObject::HasIndexedInterceptor() {
4358 return map()->has_indexed_interceptor();
4362 MaybeObject* JSObject::EnsureWritableFastElements() {
4363 ASSERT(HasFastTypeElements());
4364 FixedArray* elems = FixedArray::cast(elements());
4365 Isolate* isolate = GetIsolate();
4366 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4367 Object* writable_elems;
4368 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4369 elems, isolate->heap()->fixed_array_map());
4370 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4371 return maybe_writable_elems;
4374 set_elements(FixedArray::cast(writable_elems));
4375 isolate->counters()->cow_arrays_converted()->Increment();
4376 return writable_elems;
4380 StringDictionary* JSObject::property_dictionary() {
4381 ASSERT(!HasFastProperties());
4382 return StringDictionary::cast(properties());
4386 SeededNumberDictionary* JSObject::element_dictionary() {
4387 ASSERT(HasDictionaryElements());
4388 return SeededNumberDictionary::cast(elements());
4392 bool String::IsHashFieldComputed(uint32_t field) {
4393 return (field & kHashNotComputedMask) == 0;
4397 bool String::HasHashCode() {
4398 return IsHashFieldComputed(hash_field());
4402 uint32_t String::Hash() {
4403 // Fast case: has hash code already been computed?
4404 uint32_t field = hash_field();
4405 if (IsHashFieldComputed(field)) return field >> kHashShift;
4406 // Slow case: compute hash code and set it.
4407 return ComputeAndSetHash();
4411 StringHasher::StringHasher(int length, uint32_t seed)
4413 raw_running_hash_(seed),
4415 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4416 is_first_char_(true),
4418 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4422 bool StringHasher::has_trivial_hash() {
4423 return length_ > String::kMaxHashCalcLength;
4427 void StringHasher::AddCharacter(uint32_t c) {
4428 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4429 AddSurrogatePair(c); // Not inlined.
4432 // Use the Jenkins one-at-a-time hash function to update the hash
4433 // for the given character.
4434 raw_running_hash_ += c;
4435 raw_running_hash_ += (raw_running_hash_ << 10);
4436 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4437 // Incremental array index computation.
4438 if (is_array_index_) {
4439 if (c < '0' || c > '9') {
4440 is_array_index_ = false;
4443 if (is_first_char_) {
4444 is_first_char_ = false;
4445 if (c == '0' && length_ > 1) {
4446 is_array_index_ = false;
4450 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4451 is_array_index_ = false;
4453 array_index_ = array_index_ * 10 + d;
4460 void StringHasher::AddCharacterNoIndex(uint32_t c) {
4461 ASSERT(!is_array_index());
4462 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4463 AddSurrogatePairNoIndex(c); // Not inlined.
4466 raw_running_hash_ += c;
4467 raw_running_hash_ += (raw_running_hash_ << 10);
4468 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4472 uint32_t StringHasher::GetHash() {
4473 // Get the calculated raw hash value and do some more bit ops to distribute
4474 // the hash further. Ensure that we never return zero as the hash value.
4475 uint32_t result = raw_running_hash_;
4476 result += (result << 3);
4477 result ^= (result >> 11);
4478 result += (result << 15);
4479 if ((result & String::kHashBitMask) == 0) {
4486 template <typename schar>
4487 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
4488 StringHasher hasher(length, seed);
4489 if (!hasher.has_trivial_hash()) {
4491 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4492 hasher.AddCharacter(chars[i]);
4494 for (; i < length; i++) {
4495 hasher.AddCharacterNoIndex(chars[i]);
4498 return hasher.GetHashField();
4502 bool String::AsArrayIndex(uint32_t* index) {
4503 uint32_t field = hash_field();
4504 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4507 return SlowAsArrayIndex(index);
4511 Object* JSReceiver::GetPrototype() {
4512 return HeapObject::cast(this)->map()->prototype();
4516 bool JSReceiver::HasProperty(String* name) {
4518 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4520 return GetPropertyAttribute(name) != ABSENT;
4524 bool JSReceiver::HasLocalProperty(String* name) {
4526 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4528 return GetLocalPropertyAttribute(name) != ABSENT;
4532 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4533 return GetPropertyAttributeWithReceiver(this, key);
4536 // TODO(504): this may be useful in other places too where JSGlobalProxy
4538 Object* JSObject::BypassGlobalProxy() {
4539 if (IsJSGlobalProxy()) {
4540 Object* proto = GetPrototype();
4541 if (proto->IsNull()) return GetHeap()->undefined_value();
4542 ASSERT(proto->IsJSGlobalObject());
4549 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4551 ? JSProxy::cast(this)->GetIdentityHash(flag)
4552 : JSObject::cast(this)->GetIdentityHash(flag);
4556 bool JSReceiver::HasElement(uint32_t index) {
4558 return JSProxy::cast(this)->HasElementWithHandler(index);
4560 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4564 bool AccessorInfo::all_can_read() {
4565 return BooleanBit::get(flag(), kAllCanReadBit);
4569 void AccessorInfo::set_all_can_read(bool value) {
4570 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4574 bool AccessorInfo::all_can_write() {
4575 return BooleanBit::get(flag(), kAllCanWriteBit);
4579 void AccessorInfo::set_all_can_write(bool value) {
4580 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4584 bool AccessorInfo::prohibits_overwriting() {
4585 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4589 void AccessorInfo::set_prohibits_overwriting(bool value) {
4590 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4594 PropertyAttributes AccessorInfo::property_attributes() {
4595 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4599 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4600 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4604 template<typename Shape, typename Key>
4605 void Dictionary<Shape, Key>::SetEntry(int entry,
4608 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4612 template<typename Shape, typename Key>
4613 void Dictionary<Shape, Key>::SetEntry(int entry,
4616 PropertyDetails details) {
4617 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4618 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4619 AssertNoAllocation no_gc;
4620 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4621 FixedArray::set(index, key, mode);
4622 FixedArray::set(index+1, value, mode);
4623 FixedArray::set(index+2, details.AsSmi());
4627 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4628 ASSERT(other->IsNumber());
4629 return key == static_cast<uint32_t>(other->Number());
4633 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
4634 return ComputeIntegerHash(key, 0);
4638 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
4640 ASSERT(other->IsNumber());
4641 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
4644 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
4645 return ComputeIntegerHash(key, seed);
4648 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
4651 ASSERT(other->IsNumber());
4652 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
4655 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4656 return Isolate::Current()->heap()->NumberFromUint32(key);
4660 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4661 // We know that all entries in a hash table had their hash keys created.
4662 // Use that knowledge to have fast failure.
4663 if (key->Hash() != String::cast(other)->Hash()) return false;
4664 return key->Equals(String::cast(other));
4668 uint32_t StringDictionaryShape::Hash(String* key) {
4673 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4674 return String::cast(other)->Hash();
4678 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4683 template <int entrysize>
4684 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4685 return key->SameValue(other);
4689 template <int entrysize>
4690 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4691 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4692 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4696 template <int entrysize>
4697 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4699 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4700 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4704 template <int entrysize>
4705 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4710 void Map::ClearCodeCache(Heap* heap) {
4711 // No write barrier is needed since empty_fixed_array is not in new space.
4712 // Please note this function is used during marking:
4713 // - MarkCompactCollector::MarkUnmarkedObject
4714 // - IncrementalMarking::Step
4715 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4716 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4720 void JSArray::EnsureSize(int required_size) {
4721 ASSERT(HasFastTypeElements());
4722 FixedArray* elts = FixedArray::cast(elements());
4723 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4724 if (elts->length() < required_size) {
4725 // Doubling in size would be overkill, but leave some slack to avoid
4726 // constantly growing.
4727 Expand(required_size + (required_size >> 3));
4728 // It's a performance benefit to keep a frequently used array in new-space.
4729 } else if (!GetHeap()->new_space()->Contains(elts) &&
4730 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4731 // Expand will allocate a new backing store in new space even if the size
4732 // we asked for isn't larger than what we had before.
4733 Expand(required_size);
4738 void JSArray::set_length(Smi* length) {
4739 // Don't need a write barrier for a Smi.
4740 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4744 bool JSArray::AllowsSetElementsLength() {
4745 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
4746 ASSERT(result == !HasExternalArrayElements());
4751 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
4752 MaybeObject* maybe_result = EnsureCanContainElements(
4753 storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
4754 if (maybe_result->IsFailure()) return maybe_result;
4755 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
4756 GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
4757 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
4758 ((GetElementsKind() == FAST_ELEMENTS) ||
4759 (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
4760 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
4761 set_elements(storage);
4762 set_length(Smi::FromInt(storage->length()));
4767 MaybeObject* FixedArray::Copy() {
4768 if (length() == 0) return this;
4769 return GetHeap()->CopyFixedArray(this);
4773 MaybeObject* FixedDoubleArray::Copy() {
4774 if (length() == 0) return this;
4775 return GetHeap()->CopyFixedDoubleArray(this);
4779 void TypeFeedbackCells::SetAstId(int index, Smi* id) {
4780 set(1 + index * 2, id);
4784 Smi* TypeFeedbackCells::AstId(int index) {
4785 return Smi::cast(get(1 + index * 2));
4789 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
4790 set(index * 2, cell);
4794 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
4795 return JSGlobalPropertyCell::cast(get(index * 2));
4799 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
4800 return isolate->factory()->the_hole_value();
4804 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
4805 return isolate->factory()->undefined_value();
4809 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
4810 return heap->raw_unchecked_the_hole_value();
4814 SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
4815 SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
4816 kIcWithTypeinfoCountOffset)
4817 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
4818 kTypeFeedbackCellsOffset)
4821 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
4824 Relocatable::Relocatable(Isolate* isolate) {
4825 ASSERT(isolate == Isolate::Current());
4827 prev_ = isolate->relocatable_top();
4828 isolate->set_relocatable_top(this);
4832 Relocatable::~Relocatable() {
4833 ASSERT(isolate_ == Isolate::Current());
4834 ASSERT_EQ(isolate_->relocatable_top(), this);
4835 isolate_->set_relocatable_top(prev_);
4839 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4840 return map->instance_size();
4844 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4845 v->VisitExternalReference(
4846 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4850 template<typename StaticVisitor>
4851 void Foreign::ForeignIterateBody() {
4852 StaticVisitor::VisitExternalReference(
4853 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4857 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4858 typedef v8::String::ExternalAsciiStringResource Resource;
4859 v->VisitExternalAsciiString(
4860 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4864 template<typename StaticVisitor>
4865 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4866 typedef v8::String::ExternalAsciiStringResource Resource;
4867 StaticVisitor::VisitExternalAsciiString(
4868 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4872 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4873 typedef v8::String::ExternalStringResource Resource;
4874 v->VisitExternalTwoByteString(
4875 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4879 template<typename StaticVisitor>
4880 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4881 typedef v8::String::ExternalStringResource Resource;
4882 StaticVisitor::VisitExternalTwoByteString(
4883 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4886 #define SLOT_ADDR(obj, offset) \
4887 reinterpret_cast<Object**>((obj)->address() + offset)
4889 template<int start_offset, int end_offset, int size>
4890 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4893 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4897 template<int start_offset>
4898 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4901 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4907 #undef CAST_ACCESSOR
4908 #undef INT_ACCESSORS
4910 #undef ACCESSORS_TO_SMI
4911 #undef SMI_ACCESSORS
4913 #undef BOOL_ACCESSORS
4917 #undef WRITE_BARRIER
4918 #undef CONDITIONAL_WRITE_BARRIER
4919 #undef READ_DOUBLE_FIELD
4920 #undef WRITE_DOUBLE_FIELD
4921 #undef READ_INT_FIELD
4922 #undef WRITE_INT_FIELD
4923 #undef READ_INTPTR_FIELD
4924 #undef WRITE_INTPTR_FIELD
4925 #undef READ_UINT32_FIELD
4926 #undef WRITE_UINT32_FIELD
4927 #undef READ_SHORT_FIELD
4928 #undef WRITE_SHORT_FIELD
4929 #undef READ_BYTE_FIELD
4930 #undef WRITE_BYTE_FIELD
4933 } } // namespace v8::internal
4935 #endif // V8_OBJECTS_INL_H_