1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
98 #define ACCESSORS_TO_SMI(holder, name, offset) \
99 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
100 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
101 WRITE_FIELD(this, offset, value); \
105 // Getter that returns a Smi as an int and writes an int as a Smi.
106 #define SMI_ACCESSORS(holder, name, offset) \
107 int holder::name() { \
108 Object* value = READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::set_##name(int value) { \
112 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define BOOL_GETTER(holder, field, name, offset) \
117 bool holder::name() { \
118 return BooleanBit::get(field(), offset); \
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123 bool holder::name() { \
124 return BooleanBit::get(field(), offset); \
126 void holder::set_##name(bool value) { \
127 set_##field(BooleanBit::set(field(), offset, value)); \
131 bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
132 ElementsKind to_kind) {
133 if (to_kind == FAST_ELEMENTS) {
134 return from_kind == FAST_SMI_ONLY_ELEMENTS ||
135 from_kind == FAST_DOUBLE_ELEMENTS;
137 return to_kind == FAST_DOUBLE_ELEMENTS &&
138 from_kind == FAST_SMI_ONLY_ELEMENTS;
143 bool Object::IsFixedArrayBase() {
144 return IsFixedArray() || IsFixedDoubleArray();
148 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
149 // There is a constraint on the object; check.
150 if (!this->IsJSObject()) return false;
151 // Fetch the constructor function of the object.
152 Object* cons_obj = JSObject::cast(this)->map()->constructor();
153 if (!cons_obj->IsJSFunction()) return false;
154 JSFunction* fun = JSFunction::cast(cons_obj);
155 // Iterate through the chain of inheriting function templates to
156 // see if the required one occurs.
157 for (Object* type = fun->shared()->function_data();
158 type->IsFunctionTemplateInfo();
159 type = FunctionTemplateInfo::cast(type)->parent_template()) {
160 if (type == expected) return true;
162 // Didn't find the required type in the inheritance chain.
167 bool Object::IsSmi() {
168 return HAS_SMI_TAG(this);
172 bool Object::IsHeapObject() {
173 return Internals::HasHeapObjectTag(this);
177 bool Object::NonFailureIsHeapObject() {
178 ASSERT(!this->IsFailure());
179 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
183 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
186 bool Object::IsString() {
187 return Object::IsHeapObject()
188 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
192 bool Object::IsSpecObject() {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsSymbol() {
206 if (!this->IsHeapObject()) return false;
207 uint32_t type = HeapObject::cast(this)->map()->instance_type();
208 // Because the symbol tag is non-zero and no non-string types have the
209 // symbol bit set we can test for symbols with a very simple test
211 STATIC_ASSERT(kSymbolTag != 0);
212 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
213 return (type & kIsSymbolMask) != 0;
217 bool Object::IsConsString() {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqAsciiString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsAsciiRepresentation();
242 bool Object::IsSeqTwoByteString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalAsciiString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsAsciiRepresentation();
262 bool Object::IsExternalTwoByteString() {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
273 StringShape::StringShape(String* str)
274 : type_(str->map()->instance_type()) {
276 ASSERT((type_ & kIsNotStringMask) == kStringTag);
280 StringShape::StringShape(Map* map)
281 : type_(map->instance_type()) {
283 ASSERT((type_ & kIsNotStringMask) == kStringTag);
287 StringShape::StringShape(InstanceType t)
288 : type_(static_cast<uint32_t>(t)) {
290 ASSERT((type_ & kIsNotStringMask) == kStringTag);
294 bool StringShape::IsSymbol() {
296 STATIC_ASSERT(kSymbolTag != 0);
297 return (type_ & kIsSymbolMask) != 0;
301 bool String::IsAsciiRepresentation() {
302 uint32_t type = map()->instance_type();
303 return (type & kStringEncodingMask) == kAsciiStringTag;
307 bool String::IsTwoByteRepresentation() {
308 uint32_t type = map()->instance_type();
309 return (type & kStringEncodingMask) == kTwoByteStringTag;
313 bool String::IsAsciiRepresentationUnderneath() {
314 uint32_t type = map()->instance_type();
315 STATIC_ASSERT(kIsIndirectStringTag != 0);
316 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
318 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
319 case kAsciiStringTag:
321 case kTwoByteStringTag:
323 default: // Cons or sliced string. Need to go deeper.
324 return GetUnderlying()->IsAsciiRepresentation();
329 bool String::IsTwoByteRepresentationUnderneath() {
330 uint32_t type = map()->instance_type();
331 STATIC_ASSERT(kIsIndirectStringTag != 0);
332 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
334 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
335 case kAsciiStringTag:
337 case kTwoByteStringTag:
339 default: // Cons or sliced string. Need to go deeper.
340 return GetUnderlying()->IsTwoByteRepresentation();
345 bool String::HasOnlyAsciiChars() {
346 uint32_t type = map()->instance_type();
347 return (type & kStringEncodingMask) == kAsciiStringTag ||
348 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
352 bool StringShape::IsCons() {
353 return (type_ & kStringRepresentationMask) == kConsStringTag;
357 bool StringShape::IsSliced() {
358 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
362 bool StringShape::IsIndirect() {
363 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
367 bool StringShape::IsExternal() {
368 return (type_ & kStringRepresentationMask) == kExternalStringTag;
372 bool StringShape::IsSequential() {
373 return (type_ & kStringRepresentationMask) == kSeqStringTag;
377 StringRepresentationTag StringShape::representation_tag() {
378 uint32_t tag = (type_ & kStringRepresentationMask);
379 return static_cast<StringRepresentationTag>(tag);
383 uint32_t StringShape::encoding_tag() {
384 return type_ & kStringEncodingMask;
388 uint32_t StringShape::full_representation_tag() {
389 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
393 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
394 Internals::kFullStringRepresentationMask);
397 bool StringShape::IsSequentialAscii() {
398 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
402 bool StringShape::IsSequentialTwoByte() {
403 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
407 bool StringShape::IsExternalAscii() {
408 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
412 bool StringShape::IsExternalTwoByte() {
413 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
417 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
418 Internals::kExternalTwoByteRepresentationTag);
421 uc32 FlatStringReader::Get(int index) {
422 ASSERT(0 <= index && index <= length_);
424 return static_cast<const byte*>(start_)[index];
426 return static_cast<const uc16*>(start_)[index];
431 bool Object::IsNumber() {
432 return IsSmi() || IsHeapNumber();
436 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
437 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
440 bool Object::IsFiller() {
441 if (!Object::IsHeapObject()) return false;
442 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
443 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
447 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
450 bool Object::IsExternalArray() {
451 if (!Object::IsHeapObject())
453 InstanceType instance_type =
454 HeapObject::cast(this)->map()->instance_type();
455 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
456 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
460 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
461 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
462 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
463 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
464 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
465 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
466 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
467 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
470 bool MaybeObject::IsFailure() {
471 return HAS_FAILURE_TAG(this);
475 bool MaybeObject::IsRetryAfterGC() {
476 return HAS_FAILURE_TAG(this)
477 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
481 bool MaybeObject::IsOutOfMemory() {
482 return HAS_FAILURE_TAG(this)
483 && Failure::cast(this)->IsOutOfMemoryException();
487 bool MaybeObject::IsException() {
488 return this == Failure::Exception();
492 bool MaybeObject::IsTheHole() {
493 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
497 Failure* Failure::cast(MaybeObject* obj) {
498 ASSERT(HAS_FAILURE_TAG(obj));
499 return reinterpret_cast<Failure*>(obj);
503 bool Object::IsJSReceiver() {
504 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
505 return IsHeapObject() &&
506 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
510 bool Object::IsJSObject() {
511 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
512 return IsHeapObject() &&
513 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
517 bool Object::IsJSProxy() {
518 if (!Object::IsHeapObject()) return false;
519 InstanceType type = HeapObject::cast(this)->map()->instance_type();
520 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
524 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
525 TYPE_CHECKER(JSSet, JS_SET_TYPE)
526 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
527 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
528 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
529 TYPE_CHECKER(Map, MAP_TYPE)
530 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
531 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
534 bool Object::IsDescriptorArray() {
535 return IsFixedArray();
539 bool Object::IsDeoptimizationInputData() {
540 // Must be a fixed array.
541 if (!IsFixedArray()) return false;
543 // There's no sure way to detect the difference between a fixed array and
544 // a deoptimization data array. Since this is used for asserts we can
545 // check that the length is zero or else the fixed size plus a multiple of
547 int length = FixedArray::cast(this)->length();
548 if (length == 0) return true;
550 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
551 return length >= 0 &&
552 length % DeoptimizationInputData::kDeoptEntrySize == 0;
556 bool Object::IsDeoptimizationOutputData() {
557 if (!IsFixedArray()) return false;
558 // There's actually no way to see the difference between a fixed array and
559 // a deoptimization data array. Since this is used for asserts we can check
560 // that the length is plausible though.
561 if (FixedArray::cast(this)->length() % 2 != 0) return false;
566 bool Object::IsTypeFeedbackCells() {
567 if (!IsFixedArray()) return false;
568 // There's actually no way to see the difference between a fixed array and
569 // a cache cells array. Since this is used for asserts we can check that
570 // the length is plausible though.
571 if (FixedArray::cast(this)->length() % 2 != 0) return false;
576 bool Object::IsContext() {
577 if (Object::IsHeapObject()) {
578 Map* map = HeapObject::cast(this)->map();
579 Heap* heap = map->GetHeap();
580 return (map == heap->function_context_map() ||
581 map == heap->catch_context_map() ||
582 map == heap->with_context_map() ||
583 map == heap->global_context_map() ||
584 map == heap->block_context_map() ||
585 map == heap->module_context_map());
591 bool Object::IsGlobalContext() {
592 return Object::IsHeapObject() &&
593 HeapObject::cast(this)->map() ==
594 HeapObject::cast(this)->GetHeap()->global_context_map();
598 bool Object::IsModuleContext() {
599 return Object::IsHeapObject() &&
600 HeapObject::cast(this)->map() ==
601 HeapObject::cast(this)->GetHeap()->module_context_map();
605 bool Object::IsScopeInfo() {
606 return Object::IsHeapObject() &&
607 HeapObject::cast(this)->map() ==
608 HeapObject::cast(this)->GetHeap()->scope_info_map();
612 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
615 template <> inline bool Is<JSFunction>(Object* obj) {
616 return obj->IsJSFunction();
620 TYPE_CHECKER(Code, CODE_TYPE)
621 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
622 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
623 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
624 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
625 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
626 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
627 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
630 bool Object::IsStringWrapper() {
631 return IsJSValue() && JSValue::cast(this)->value()->IsString();
635 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
638 bool Object::IsBoolean() {
639 return IsOddball() &&
640 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
644 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
645 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
648 template <> inline bool Is<JSArray>(Object* obj) {
649 return obj->IsJSArray();
653 bool Object::IsHashTable() {
654 return Object::IsHeapObject() &&
655 HeapObject::cast(this)->map() ==
656 HeapObject::cast(this)->GetHeap()->hash_table_map();
660 bool Object::IsDictionary() {
661 return IsHashTable() &&
662 this != HeapObject::cast(this)->GetHeap()->symbol_table();
666 bool Object::IsSymbolTable() {
667 return IsHashTable() && this ==
668 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
672 bool Object::IsJSFunctionResultCache() {
673 if (!IsFixedArray()) return false;
674 FixedArray* self = FixedArray::cast(this);
675 int length = self->length();
676 if (length < JSFunctionResultCache::kEntriesIndex) return false;
677 if ((length - JSFunctionResultCache::kEntriesIndex)
678 % JSFunctionResultCache::kEntrySize != 0) {
682 if (FLAG_verify_heap) {
683 reinterpret_cast<JSFunctionResultCache*>(this)->
684 JSFunctionResultCacheVerify();
691 bool Object::IsNormalizedMapCache() {
692 if (!IsFixedArray()) return false;
693 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
697 if (FLAG_verify_heap) {
698 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
705 bool Object::IsCompilationCacheTable() {
706 return IsHashTable();
710 bool Object::IsCodeCacheHashTable() {
711 return IsHashTable();
715 bool Object::IsPolymorphicCodeCacheHashTable() {
716 return IsHashTable();
720 bool Object::IsMapCache() {
721 return IsHashTable();
725 bool Object::IsPrimitive() {
726 return IsOddball() || IsNumber() || IsString();
730 bool Object::IsJSGlobalProxy() {
731 bool result = IsHeapObject() &&
732 (HeapObject::cast(this)->map()->instance_type() ==
733 JS_GLOBAL_PROXY_TYPE);
734 ASSERT(!result || IsAccessCheckNeeded());
739 bool Object::IsGlobalObject() {
740 if (!IsHeapObject()) return false;
742 InstanceType type = HeapObject::cast(this)->map()->instance_type();
743 return type == JS_GLOBAL_OBJECT_TYPE ||
744 type == JS_BUILTINS_OBJECT_TYPE;
748 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
749 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
752 bool Object::IsUndetectableObject() {
753 return IsHeapObject()
754 && HeapObject::cast(this)->map()->is_undetectable();
758 bool Object::IsAccessCheckNeeded() {
759 return IsHeapObject()
760 && HeapObject::cast(this)->map()->is_access_check_needed();
764 bool Object::IsStruct() {
765 if (!IsHeapObject()) return false;
766 switch (HeapObject::cast(this)->map()->instance_type()) {
767 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
768 STRUCT_LIST(MAKE_STRUCT_CASE)
769 #undef MAKE_STRUCT_CASE
770 default: return false;
775 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
776 bool Object::Is##Name() { \
777 return Object::IsHeapObject() \
778 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
780 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
781 #undef MAKE_STRUCT_PREDICATE
784 bool Object::IsUndefined() {
785 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
789 bool Object::IsNull() {
790 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
794 bool Object::IsTheHole() {
795 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
799 bool Object::IsTrue() {
800 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
804 bool Object::IsFalse() {
805 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
809 bool Object::IsArgumentsMarker() {
810 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
814 double Object::Number() {
817 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
818 : reinterpret_cast<HeapNumber*>(this)->value();
822 bool Object::IsNaN() {
823 return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
827 MaybeObject* Object::ToSmi() {
828 if (IsSmi()) return this;
829 if (IsHeapNumber()) {
830 double value = HeapNumber::cast(this)->value();
831 int int_value = FastD2I(value);
832 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
833 return Smi::FromInt(int_value);
836 return Failure::Exception();
840 bool Object::HasSpecificClassOf(String* name) {
841 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
845 MaybeObject* Object::GetElement(uint32_t index) {
846 // GetElement can trigger a getter which can cause allocation.
847 // This was not always the case. This ASSERT is here to catch
848 // leftover incorrect uses.
849 ASSERT(HEAP->IsAllocationAllowed());
850 return GetElementWithReceiver(this, index);
854 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
855 MaybeObject* maybe = GetElementWithReceiver(this, index);
856 ASSERT(!maybe->IsFailure());
857 Object* result = NULL; // Initialization to please compiler.
858 maybe->ToObject(&result);
863 MaybeObject* Object::GetProperty(String* key) {
864 PropertyAttributes attributes;
865 return GetPropertyWithReceiver(this, key, &attributes);
869 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
870 return GetPropertyWithReceiver(this, key, attributes);
874 #define FIELD_ADDR(p, offset) \
875 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
877 #define READ_FIELD(p, offset) \
878 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
880 #define WRITE_FIELD(p, offset, value) \
881 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
883 #define WRITE_BARRIER(heap, object, offset, value) \
884 heap->incremental_marking()->RecordWrite( \
885 object, HeapObject::RawField(object, offset), value); \
886 if (heap->InNewSpace(value)) { \
887 heap->RecordWrite(object->address(), offset); \
890 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
891 if (mode == UPDATE_WRITE_BARRIER) { \
892 heap->incremental_marking()->RecordWrite( \
893 object, HeapObject::RawField(object, offset), value); \
894 if (heap->InNewSpace(value)) { \
895 heap->RecordWrite(object->address(), offset); \
899 #ifndef V8_TARGET_ARCH_MIPS
900 #define READ_DOUBLE_FIELD(p, offset) \
901 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
902 #else // V8_TARGET_ARCH_MIPS
903 // Prevent gcc from using load-double (mips ldc1) on (possibly)
904 // non-64-bit aligned HeapNumber::value.
905 static inline double read_double_field(void* p, int offset) {
910 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
911 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
914 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
915 #endif // V8_TARGET_ARCH_MIPS
917 #ifndef V8_TARGET_ARCH_MIPS
918 #define WRITE_DOUBLE_FIELD(p, offset, value) \
919 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
920 #else // V8_TARGET_ARCH_MIPS
921 // Prevent gcc from using store-double (mips sdc1) on (possibly)
922 // non-64-bit aligned HeapNumber::value.
923 static inline void write_double_field(void* p, int offset,
930 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
931 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
933 #define WRITE_DOUBLE_FIELD(p, offset, value) \
934 write_double_field(p, offset, value)
935 #endif // V8_TARGET_ARCH_MIPS
938 #define READ_INT_FIELD(p, offset) \
939 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
941 #define WRITE_INT_FIELD(p, offset, value) \
942 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
944 #define READ_INTPTR_FIELD(p, offset) \
945 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
947 #define WRITE_INTPTR_FIELD(p, offset, value) \
948 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
950 #define READ_UINT32_FIELD(p, offset) \
951 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
953 #define WRITE_UINT32_FIELD(p, offset, value) \
954 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
956 #define READ_INT64_FIELD(p, offset) \
957 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
959 #define WRITE_INT64_FIELD(p, offset, value) \
960 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
962 #define READ_SHORT_FIELD(p, offset) \
963 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
965 #define WRITE_SHORT_FIELD(p, offset, value) \
966 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
968 #define READ_BYTE_FIELD(p, offset) \
969 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
971 #define WRITE_BYTE_FIELD(p, offset, value) \
972 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
975 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
976 return &READ_FIELD(obj, byte_offset);
981 return Internals::SmiValue(this);
985 Smi* Smi::FromInt(int value) {
986 ASSERT(Smi::IsValid(value));
987 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
988 intptr_t tagged_value =
989 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
990 return reinterpret_cast<Smi*>(tagged_value);
994 Smi* Smi::FromIntptr(intptr_t value) {
995 ASSERT(Smi::IsValid(value));
996 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
997 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1001 Failure::Type Failure::type() const {
1002 return static_cast<Type>(value() & kFailureTypeTagMask);
1006 bool Failure::IsInternalError() const {
1007 return type() == INTERNAL_ERROR;
1011 bool Failure::IsOutOfMemoryException() const {
1012 return type() == OUT_OF_MEMORY_EXCEPTION;
1016 AllocationSpace Failure::allocation_space() const {
1017 ASSERT_EQ(RETRY_AFTER_GC, type());
1018 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1023 Failure* Failure::InternalError() {
1024 return Construct(INTERNAL_ERROR);
1028 Failure* Failure::Exception() {
1029 return Construct(EXCEPTION);
1033 Failure* Failure::OutOfMemoryException() {
1034 return Construct(OUT_OF_MEMORY_EXCEPTION);
1038 intptr_t Failure::value() const {
1039 return static_cast<intptr_t>(
1040 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1044 Failure* Failure::RetryAfterGC() {
1045 return RetryAfterGC(NEW_SPACE);
1049 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1050 ASSERT((space & ~kSpaceTagMask) == 0);
1051 return Construct(RETRY_AFTER_GC, space);
1055 Failure* Failure::Construct(Type type, intptr_t value) {
1057 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1058 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1059 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1063 bool Smi::IsValid(intptr_t value) {
1065 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1068 #ifdef V8_TARGET_ARCH_X64
1069 // To be representable as a long smi, the value must be a 32-bit integer.
1070 bool result = (value == static_cast<int32_t>(value));
1072 // To be representable as an tagged small integer, the two
1073 // most-significant bits of 'value' must be either 00 or 11 due to
1074 // sign-extension. To check this we add 01 to the two
1075 // most-significant bits, and check if the most-significant bit is 0
1077 // CAUTION: The original code below:
1078 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1079 // may lead to incorrect results according to the C language spec, and
1080 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1081 // compiler may produce undefined results in case of signed integer
1082 // overflow. The computation must be done w/ unsigned ints.
1083 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1085 ASSERT(result == in_range);
1090 MapWord MapWord::FromMap(Map* map) {
1091 return MapWord(reinterpret_cast<uintptr_t>(map));
1095 Map* MapWord::ToMap() {
1096 return reinterpret_cast<Map*>(value_);
1100 bool MapWord::IsForwardingAddress() {
1101 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1105 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1106 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1107 return MapWord(reinterpret_cast<uintptr_t>(raw));
1111 HeapObject* MapWord::ToForwardingAddress() {
1112 ASSERT(IsForwardingAddress());
1113 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1118 void HeapObject::VerifyObjectField(int offset) {
1119 VerifyPointer(READ_FIELD(this, offset));
1122 void HeapObject::VerifySmiField(int offset) {
1123 ASSERT(READ_FIELD(this, offset)->IsSmi());
1128 Heap* HeapObject::GetHeap() {
1130 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1131 ASSERT(heap != NULL);
1132 ASSERT(heap->isolate() == Isolate::Current());
1137 Isolate* HeapObject::GetIsolate() {
1138 return GetHeap()->isolate();
1142 Map* HeapObject::map() {
1143 return map_word().ToMap();
1147 void HeapObject::set_map(Map* value) {
1148 set_map_word(MapWord::FromMap(value));
1149 if (value != NULL) {
1150 // TODO(1600) We are passing NULL as a slot because maps can never be on
1151 // evacuation candidate.
1152 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1157 // Unsafe accessor omitting write barrier.
1158 void HeapObject::set_map_no_write_barrier(Map* value) {
1159 set_map_word(MapWord::FromMap(value));
1163 MapWord HeapObject::map_word() {
1164 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1168 void HeapObject::set_map_word(MapWord map_word) {
1169 // WRITE_FIELD does not invoke write barrier, but there is no need
1171 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1175 HeapObject* HeapObject::FromAddress(Address address) {
1176 ASSERT_TAG_ALIGNED(address);
1177 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1181 Address HeapObject::address() {
1182 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1186 int HeapObject::Size() {
1187 return SizeFromMap(map());
1191 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1192 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1193 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1197 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1198 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1202 double HeapNumber::value() {
1203 return READ_DOUBLE_FIELD(this, kValueOffset);
1207 void HeapNumber::set_value(double value) {
1208 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1212 int HeapNumber::get_exponent() {
1213 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1214 kExponentShift) - kExponentBias;
1218 int HeapNumber::get_sign() {
1219 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1223 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1226 Object** FixedArray::GetFirstElementAddress() {
1227 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1231 bool FixedArray::ContainsOnlySmisOrHoles() {
1232 Object* the_hole = GetHeap()->the_hole_value();
1233 Object** current = GetFirstElementAddress();
1234 for (int i = 0; i < length(); ++i) {
1235 Object* candidate = *current++;
1236 if (!candidate->IsSmi() && candidate != the_hole) return false;
1242 FixedArrayBase* JSObject::elements() {
1243 Object* array = READ_FIELD(this, kElementsOffset);
1244 return static_cast<FixedArrayBase*>(array);
1247 void JSObject::ValidateSmiOnlyElements() {
1249 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1250 Heap* heap = GetHeap();
1251 // Don't use elements, since integrity checks will fail if there
1252 // are filler pointers in the array.
1253 FixedArray* fixed_array =
1254 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1255 Map* map = fixed_array->map();
1256 // Arrays that have been shifted in place can't be verified.
1257 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1258 map != heap->raw_unchecked_two_pointer_filler_map() &&
1259 map != heap->free_space_map()) {
1260 for (int i = 0; i < fixed_array->length(); i++) {
1261 Object* current = fixed_array->get(i);
1262 ASSERT(current->IsSmi() || current->IsTheHole());
1270 MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
1272 ValidateSmiOnlyElements();
1274 if ((map()->elements_kind() != FAST_ELEMENTS)) {
1275 return TransitionElementsKind(FAST_ELEMENTS);
1281 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1283 EnsureElementsMode mode) {
1284 ElementsKind current_kind = map()->elements_kind();
1285 ElementsKind target_kind = current_kind;
1286 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1287 if (current_kind == FAST_ELEMENTS) return this;
1289 Heap* heap = GetHeap();
1290 Object* the_hole = heap->the_hole_value();
1291 Object* heap_number_map = heap->heap_number_map();
1292 for (uint32_t i = 0; i < count; ++i) {
1293 Object* current = *objects++;
1294 if (!current->IsSmi() && current != the_hole) {
1295 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
1296 HeapObject::cast(current)->map() == heap_number_map) {
1297 target_kind = FAST_DOUBLE_ELEMENTS;
1299 target_kind = FAST_ELEMENTS;
1305 if (target_kind != current_kind) {
1306 return TransitionElementsKind(target_kind);
1312 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1313 EnsureElementsMode mode) {
1314 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1315 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1316 elements->map() == GetHeap()->fixed_cow_array_map());
1317 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1318 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1320 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1321 return EnsureCanContainElements(objects, elements->length(), mode);
1324 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1325 if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
1326 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1333 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1334 ElementsKind to_kind) {
1335 Map* current_map = map();
1336 ElementsKind from_kind = current_map->elements_kind();
1338 if (from_kind == to_kind) return current_map;
1340 Context* global_context = isolate->context()->global_context();
1341 if (current_map == global_context->smi_js_array_map()) {
1342 if (to_kind == FAST_ELEMENTS) {
1343 return global_context->object_js_array_map();
1345 if (to_kind == FAST_DOUBLE_ELEMENTS) {
1346 return global_context->double_js_array_map();
1348 ASSERT(to_kind == DICTIONARY_ELEMENTS);
1352 return GetElementsTransitionMapSlow(to_kind);
1356 void JSObject::set_map_and_elements(Map* new_map,
1357 FixedArrayBase* value,
1358 WriteBarrierMode mode) {
1359 ASSERT(value->HasValidElements());
1361 ValidateSmiOnlyElements();
1363 if (new_map != NULL) {
1364 if (mode == UPDATE_WRITE_BARRIER) {
1367 ASSERT(mode == SKIP_WRITE_BARRIER);
1368 set_map_no_write_barrier(new_map);
1371 ASSERT((map()->has_fast_elements() ||
1372 map()->has_fast_smi_only_elements() ||
1373 (value == GetHeap()->empty_fixed_array())) ==
1374 (value->map() == GetHeap()->fixed_array_map() ||
1375 value->map() == GetHeap()->fixed_cow_array_map()));
1376 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1377 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1378 WRITE_FIELD(this, kElementsOffset, value);
1379 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1383 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1384 set_map_and_elements(NULL, value, mode);
1388 void JSObject::initialize_properties() {
1389 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1390 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1394 void JSObject::initialize_elements() {
1395 ASSERT(map()->has_fast_elements() ||
1396 map()->has_fast_smi_only_elements() ||
1397 map()->has_fast_double_elements());
1398 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1399 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1403 MaybeObject* JSObject::ResetElements() {
1405 ElementsKind elements_kind = FLAG_smi_only_arrays
1406 ? FAST_SMI_ONLY_ELEMENTS
1408 MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
1410 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1411 set_map(Map::cast(obj));
1412 initialize_elements();
1417 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1418 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1421 byte Oddball::kind() {
1422 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1426 void Oddball::set_kind(byte value) {
1427 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1431 Object* JSGlobalPropertyCell::value() {
1432 return READ_FIELD(this, kValueOffset);
1436 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1437 // The write barrier is not used for global property cells.
1438 ASSERT(!val->IsJSGlobalPropertyCell());
1439 WRITE_FIELD(this, kValueOffset, val);
1443 int JSObject::GetHeaderSize() {
1444 InstanceType type = map()->instance_type();
1445 // Check for the most common kind of JavaScript object before
1446 // falling into the generic switch. This speeds up the internal
1447 // field operations considerably on average.
1448 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1450 case JS_MODULE_TYPE:
1451 return JSModule::kSize;
1452 case JS_GLOBAL_PROXY_TYPE:
1453 return JSGlobalProxy::kSize;
1454 case JS_GLOBAL_OBJECT_TYPE:
1455 return JSGlobalObject::kSize;
1456 case JS_BUILTINS_OBJECT_TYPE:
1457 return JSBuiltinsObject::kSize;
1458 case JS_FUNCTION_TYPE:
1459 return JSFunction::kSize;
1461 return JSValue::kSize;
1463 return JSDate::kSize;
1465 return JSArray::kSize;
1466 case JS_WEAK_MAP_TYPE:
1467 return JSWeakMap::kSize;
1468 case JS_REGEXP_TYPE:
1469 return JSRegExp::kSize;
1470 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1471 return JSObject::kHeaderSize;
1472 case JS_MESSAGE_OBJECT_TYPE:
1473 return JSMessageObject::kSize;
1481 int JSObject::GetInternalFieldCount() {
1482 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1483 // Make sure to adjust for the number of in-object properties. These
1484 // properties do contribute to the size, but are not internal fields.
1485 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1486 map()->inobject_properties() - (map()->has_external_resource()?1:0);
1490 int JSObject::GetInternalFieldOffset(int index) {
1491 ASSERT(index < GetInternalFieldCount() && index >= 0);
1492 return GetHeaderSize() + (kPointerSize * index);
1496 Object* JSObject::GetInternalField(int index) {
1497 ASSERT(index < GetInternalFieldCount() && index >= 0);
1498 // Internal objects do follow immediately after the header, whereas in-object
1499 // properties are at the end of the object. Therefore there is no need
1500 // to adjust the index here.
1501 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1505 void JSObject::SetInternalField(int index, Object* value) {
1506 ASSERT(index < GetInternalFieldCount() && index >= 0);
1507 // Internal objects do follow immediately after the header, whereas in-object
1508 // properties are at the end of the object. Therefore there is no need
1509 // to adjust the index here.
1510 int offset = GetHeaderSize() + (kPointerSize * index);
1511 WRITE_FIELD(this, offset, value);
1512 WRITE_BARRIER(GetHeap(), this, offset, value);
1516 void JSObject::SetInternalField(int index, Smi* value) {
1517 ASSERT(index < GetInternalFieldCount() && index >= 0);
1518 // Internal objects do follow immediately after the header, whereas in-object
1519 // properties are at the end of the object. Therefore there is no need
1520 // to adjust the index here.
1521 int offset = GetHeaderSize() + (kPointerSize * index);
1522 WRITE_FIELD(this, offset, value);
1526 void JSObject::SetExternalResourceObject(Object *value) {
1527 ASSERT(map()->has_external_resource());
1528 int offset = GetHeaderSize() + kPointerSize * GetInternalFieldCount();
1529 WRITE_FIELD(this, offset, value);
1530 WRITE_BARRIER(GetHeap(), this, offset, value);
1534 Object *JSObject::GetExternalResourceObject() {
1535 if (map()->has_external_resource()) {
1536 return READ_FIELD(this, GetHeaderSize() + kPointerSize * GetInternalFieldCount());
1538 return GetHeap()->undefined_value();
1543 // Access fast-case object properties at index. The use of these routines
1544 // is needed to correctly distinguish between properties stored in-object and
1545 // properties stored in the properties array.
1546 Object* JSObject::FastPropertyAt(int index) {
1547 // Adjust for the number of properties stored in the object.
1548 index -= map()->inobject_properties();
1550 int offset = map()->instance_size() + (index * kPointerSize);
1551 return READ_FIELD(this, offset);
1553 ASSERT(index < properties()->length());
1554 return properties()->get(index);
1559 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1560 // Adjust for the number of properties stored in the object.
1561 index -= map()->inobject_properties();
1563 int offset = map()->instance_size() + (index * kPointerSize);
1564 WRITE_FIELD(this, offset, value);
1565 WRITE_BARRIER(GetHeap(), this, offset, value);
1567 ASSERT(index < properties()->length());
1568 properties()->set(index, value);
1574 int JSObject::GetInObjectPropertyOffset(int index) {
1575 // Adjust for the number of properties stored in the object.
1576 index -= map()->inobject_properties();
1578 return map()->instance_size() + (index * kPointerSize);
1582 Object* JSObject::InObjectPropertyAt(int index) {
1583 // Adjust for the number of properties stored in the object.
1584 index -= map()->inobject_properties();
1586 int offset = map()->instance_size() + (index * kPointerSize);
1587 return READ_FIELD(this, offset);
1591 Object* JSObject::InObjectPropertyAtPut(int index,
1593 WriteBarrierMode mode) {
1594 // Adjust for the number of properties stored in the object.
1595 index -= map()->inobject_properties();
1597 int offset = map()->instance_size() + (index * kPointerSize);
1598 WRITE_FIELD(this, offset, value);
1599 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1605 void JSObject::InitializeBody(Map* map,
1606 Object* pre_allocated_value,
1607 Object* filler_value) {
1608 ASSERT(!filler_value->IsHeapObject() ||
1609 !GetHeap()->InNewSpace(filler_value));
1610 ASSERT(!pre_allocated_value->IsHeapObject() ||
1611 !GetHeap()->InNewSpace(pre_allocated_value));
1612 int size = map->instance_size();
1613 int offset = kHeaderSize;
1614 if (filler_value != pre_allocated_value) {
1615 int pre_allocated = map->pre_allocated_property_fields();
1616 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1617 for (int i = 0; i < pre_allocated; i++) {
1618 WRITE_FIELD(this, offset, pre_allocated_value);
1619 offset += kPointerSize;
1622 while (offset < size) {
1623 WRITE_FIELD(this, offset, filler_value);
1624 offset += kPointerSize;
1629 bool JSObject::HasFastProperties() {
1630 return !properties()->IsDictionary();
1634 int JSObject::MaxFastProperties() {
1635 // Allow extra fast properties if the object has more than
1636 // kMaxFastProperties in-object properties. When this is the case,
1637 // it is very unlikely that the object is being used as a dictionary
1638 // and there is a good chance that allowing more map transitions
1639 // will be worth it.
1640 return Max(map()->inobject_properties(), kMaxFastProperties);
1644 void Struct::InitializeBody(int object_size) {
1645 Object* value = GetHeap()->undefined_value();
1646 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1647 WRITE_FIELD(this, offset, value);
1652 bool Object::ToArrayIndex(uint32_t* index) {
1654 int value = Smi::cast(this)->value();
1655 if (value < 0) return false;
1659 if (IsHeapNumber()) {
1660 double value = HeapNumber::cast(this)->value();
1661 uint32_t uint_value = static_cast<uint32_t>(value);
1662 if (value == static_cast<double>(uint_value)) {
1663 *index = uint_value;
1671 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1672 if (!this->IsJSValue()) return false;
1674 JSValue* js_value = JSValue::cast(this);
1675 if (!js_value->value()->IsString()) return false;
1677 String* str = String::cast(js_value->value());
1678 if (index >= (uint32_t)str->length()) return false;
1684 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1685 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1686 return reinterpret_cast<FixedArrayBase*>(object);
1690 Object* FixedArray::get(int index) {
1691 ASSERT(index >= 0 && index < this->length());
1692 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1696 void FixedArray::set(int index, Smi* value) {
1697 ASSERT(map() != HEAP->fixed_cow_array_map());
1698 ASSERT(index >= 0 && index < this->length());
1699 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1700 int offset = kHeaderSize + index * kPointerSize;
1701 WRITE_FIELD(this, offset, value);
1705 void FixedArray::set(int index, Object* value) {
1706 ASSERT(map() != HEAP->fixed_cow_array_map());
1707 ASSERT(index >= 0 && index < this->length());
1708 int offset = kHeaderSize + index * kPointerSize;
1709 WRITE_FIELD(this, offset, value);
1710 WRITE_BARRIER(GetHeap(), this, offset, value);
1714 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1715 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1719 inline double FixedDoubleArray::hole_nan_as_double() {
1720 return BitCast<double, uint64_t>(kHoleNanInt64);
1724 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1725 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1726 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1727 return OS::nan_value();
1731 double FixedDoubleArray::get_scalar(int index) {
1732 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1733 map() != HEAP->fixed_array_map());
1734 ASSERT(index >= 0 && index < this->length());
1735 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1736 ASSERT(!is_the_hole_nan(result));
1740 int64_t FixedDoubleArray::get_representation(int index) {
1741 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1742 map() != HEAP->fixed_array_map());
1743 ASSERT(index >= 0 && index < this->length());
1744 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1747 MaybeObject* FixedDoubleArray::get(int index) {
1748 if (is_the_hole(index)) {
1749 return GetHeap()->the_hole_value();
1751 return GetHeap()->NumberFromDouble(get_scalar(index));
1756 void FixedDoubleArray::set(int index, double value) {
1757 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1758 map() != HEAP->fixed_array_map());
1759 int offset = kHeaderSize + index * kDoubleSize;
1760 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1761 WRITE_DOUBLE_FIELD(this, offset, value);
1765 void FixedDoubleArray::set_the_hole(int index) {
1766 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1767 map() != HEAP->fixed_array_map());
1768 int offset = kHeaderSize + index * kDoubleSize;
1769 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1773 bool FixedDoubleArray::is_the_hole(int index) {
1774 int offset = kHeaderSize + index * kDoubleSize;
1775 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1779 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1780 Heap* heap = GetHeap();
1781 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1782 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1783 return UPDATE_WRITE_BARRIER;
1787 void FixedArray::set(int index,
1789 WriteBarrierMode mode) {
1790 ASSERT(map() != HEAP->fixed_cow_array_map());
1791 ASSERT(index >= 0 && index < this->length());
1792 int offset = kHeaderSize + index * kPointerSize;
1793 WRITE_FIELD(this, offset, value);
1794 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1798 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
1801 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1802 ASSERT(index >= 0 && index < array->length());
1803 int offset = kHeaderSize + index * kPointerSize;
1804 WRITE_FIELD(array, offset, value);
1805 Heap* heap = array->GetHeap();
1806 if (heap->InNewSpace(value)) {
1807 heap->RecordWrite(array->address(), offset);
1812 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1815 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1816 ASSERT(index >= 0 && index < array->length());
1817 ASSERT(!HEAP->InNewSpace(value));
1818 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1822 void FixedArray::set_undefined(int index) {
1823 ASSERT(map() != HEAP->fixed_cow_array_map());
1824 set_undefined(GetHeap(), index);
1828 void FixedArray::set_undefined(Heap* heap, int index) {
1829 ASSERT(index >= 0 && index < this->length());
1830 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1831 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1832 heap->undefined_value());
1836 void FixedArray::set_null(int index) {
1837 set_null(GetHeap(), index);
1841 void FixedArray::set_null(Heap* heap, int index) {
1842 ASSERT(index >= 0 && index < this->length());
1843 ASSERT(!heap->InNewSpace(heap->null_value()));
1844 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1848 void FixedArray::set_the_hole(int index) {
1849 ASSERT(map() != HEAP->fixed_cow_array_map());
1850 ASSERT(index >= 0 && index < this->length());
1851 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1853 kHeaderSize + index * kPointerSize,
1854 GetHeap()->the_hole_value());
1858 void FixedArray::set_unchecked(int index, Smi* value) {
1859 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1860 int offset = kHeaderSize + index * kPointerSize;
1861 WRITE_FIELD(this, offset, value);
1865 void FixedArray::set_unchecked(Heap* heap,
1868 WriteBarrierMode mode) {
1869 int offset = kHeaderSize + index * kPointerSize;
1870 WRITE_FIELD(this, offset, value);
1871 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1875 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1876 ASSERT(index >= 0 && index < this->length());
1877 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1878 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1882 Object** FixedArray::data_start() {
1883 return HeapObject::RawField(this, kHeaderSize);
1887 bool DescriptorArray::IsEmpty() {
1888 ASSERT(this->IsSmi() ||
1889 this->length() > kFirstIndex ||
1890 this == HEAP->empty_descriptor_array());
1891 return this->IsSmi() || length() <= kFirstIndex;
1895 int DescriptorArray::bit_field3_storage() {
1896 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1897 return Smi::cast(storage)->value();
1900 void DescriptorArray::set_bit_field3_storage(int value) {
1902 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1906 void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
1909 Object* tmp = array->get(first);
1910 NoIncrementalWriteBarrierSet(array, first, array->get(second));
1911 NoIncrementalWriteBarrierSet(array, second, tmp);
1915 int DescriptorArray::Search(String* name) {
1916 SLOW_ASSERT(IsSortedNoDuplicates());
1918 // Check for empty descriptor array.
1919 int nof = number_of_descriptors();
1920 if (nof == 0) return kNotFound;
1922 // Fast case: do linear search for small arrays.
1923 const int kMaxElementsForLinearSearch = 8;
1924 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1925 return LinearSearch(name, nof);
1928 // Slow case: perform binary search.
1929 return BinarySearch(name, 0, nof - 1);
1933 int DescriptorArray::SearchWithCache(String* name) {
1934 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1935 if (number == DescriptorLookupCache::kAbsent) {
1936 number = Search(name);
1937 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1943 String* DescriptorArray::GetKey(int descriptor_number) {
1944 ASSERT(descriptor_number < number_of_descriptors());
1945 return String::cast(get(ToKeyIndex(descriptor_number)));
1949 Object* DescriptorArray::GetValue(int descriptor_number) {
1950 ASSERT(descriptor_number < number_of_descriptors());
1951 return GetContentArray()->get(ToValueIndex(descriptor_number));
1955 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
1956 ASSERT(descriptor_number < number_of_descriptors());
1957 Object* details = GetContentArray()->get(ToDetailsIndex(descriptor_number));
1958 return PropertyDetails(Smi::cast(details));
1962 PropertyType DescriptorArray::GetType(int descriptor_number) {
1963 return GetDetails(descriptor_number).type();
1967 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1968 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1972 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1973 return JSFunction::cast(GetValue(descriptor_number));
1977 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1978 ASSERT(GetType(descriptor_number) == CALLBACKS);
1979 return GetValue(descriptor_number);
1983 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1984 ASSERT(GetType(descriptor_number) == CALLBACKS);
1985 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1986 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
1990 bool DescriptorArray::IsProperty(int descriptor_number) {
1991 Entry entry(this, descriptor_number);
1992 return IsPropertyDescriptor(&entry);
1996 bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
1997 switch (GetType(descriptor_number)) {
1998 case MAP_TRANSITION:
1999 case CONSTANT_TRANSITION:
2000 case ELEMENTS_TRANSITION:
2003 Object* value = GetValue(descriptor_number);
2004 if (!value->IsAccessorPair()) return false;
2005 AccessorPair* accessors = AccessorPair::cast(value);
2006 return accessors->getter()->IsMap() && accessors->setter()->IsMap();
2010 case CONSTANT_FUNCTION:
2013 case NULL_DESCRIPTOR:
2016 UNREACHABLE(); // Keep the compiler happy.
2021 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
2022 return GetType(descriptor_number) == NULL_DESCRIPTOR;
2026 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2027 desc->Init(GetKey(descriptor_number),
2028 GetValue(descriptor_number),
2029 GetDetails(descriptor_number));
2033 void DescriptorArray::Set(int descriptor_number,
2035 const WhitenessWitness&) {
2037 ASSERT(descriptor_number < number_of_descriptors());
2039 NoIncrementalWriteBarrierSet(this,
2040 ToKeyIndex(descriptor_number),
2042 FixedArray* content_array = GetContentArray();
2043 NoIncrementalWriteBarrierSet(content_array,
2044 ToValueIndex(descriptor_number),
2046 NoIncrementalWriteBarrierSet(content_array,
2047 ToDetailsIndex(descriptor_number),
2048 desc->GetDetails().AsSmi());
2052 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
2053 int first, int second) {
2054 NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
2055 FixedArray* content_array = GetContentArray();
2056 NoIncrementalWriteBarrierSwap(content_array,
2057 ToValueIndex(first),
2058 ToValueIndex(second));
2059 NoIncrementalWriteBarrierSwap(content_array,
2060 ToDetailsIndex(first),
2061 ToDetailsIndex(second));
2065 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2066 : marking_(array->GetHeap()->incremental_marking()) {
2067 marking_->EnterNoMarkingScope();
2068 if (array->number_of_descriptors() > 0) {
2069 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2070 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
2075 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2076 marking_->LeaveNoMarkingScope();
2080 template<typename Shape, typename Key>
2081 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2082 const int kMinCapacity = 32;
2083 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2084 if (capacity < kMinCapacity) {
2085 capacity = kMinCapacity; // Guarantee min capacity.
2091 template<typename Shape, typename Key>
2092 int HashTable<Shape, Key>::FindEntry(Key key) {
2093 return FindEntry(GetIsolate(), key);
2097 // Find entry for key otherwise return kNotFound.
2098 template<typename Shape, typename Key>
2099 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2100 uint32_t capacity = Capacity();
2101 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2103 // EnsureCapacity will guarantee the hash table is never full.
2105 Object* element = KeyAt(entry);
2107 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2108 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2109 Shape::IsMatch(key, element)) return entry;
2110 entry = NextProbe(entry, count++, capacity);
2116 bool SeededNumberDictionary::requires_slow_elements() {
2117 Object* max_index_object = get(kMaxNumberKeyIndex);
2118 if (!max_index_object->IsSmi()) return false;
2120 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2123 uint32_t SeededNumberDictionary::max_number_key() {
2124 ASSERT(!requires_slow_elements());
2125 Object* max_index_object = get(kMaxNumberKeyIndex);
2126 if (!max_index_object->IsSmi()) return 0;
2127 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2128 return value >> kRequiresSlowElementsTagSize;
2131 void SeededNumberDictionary::set_requires_slow_elements() {
2132 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2136 // ------------------------------------
2140 CAST_ACCESSOR(FixedArray)
2141 CAST_ACCESSOR(FixedDoubleArray)
2142 CAST_ACCESSOR(DescriptorArray)
2143 CAST_ACCESSOR(DeoptimizationInputData)
2144 CAST_ACCESSOR(DeoptimizationOutputData)
2145 CAST_ACCESSOR(TypeFeedbackCells)
2146 CAST_ACCESSOR(SymbolTable)
2147 CAST_ACCESSOR(JSFunctionResultCache)
2148 CAST_ACCESSOR(NormalizedMapCache)
2149 CAST_ACCESSOR(ScopeInfo)
2150 CAST_ACCESSOR(CompilationCacheTable)
2151 CAST_ACCESSOR(CodeCacheHashTable)
2152 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2153 CAST_ACCESSOR(MapCache)
2154 CAST_ACCESSOR(String)
2155 CAST_ACCESSOR(SeqString)
2156 CAST_ACCESSOR(SeqAsciiString)
2157 CAST_ACCESSOR(SeqTwoByteString)
2158 CAST_ACCESSOR(SlicedString)
2159 CAST_ACCESSOR(ConsString)
2160 CAST_ACCESSOR(ExternalString)
2161 CAST_ACCESSOR(ExternalAsciiString)
2162 CAST_ACCESSOR(ExternalTwoByteString)
2163 CAST_ACCESSOR(JSReceiver)
2164 CAST_ACCESSOR(JSObject)
2166 CAST_ACCESSOR(HeapObject)
2167 CAST_ACCESSOR(HeapNumber)
2168 CAST_ACCESSOR(Oddball)
2169 CAST_ACCESSOR(JSGlobalPropertyCell)
2170 CAST_ACCESSOR(SharedFunctionInfo)
2172 CAST_ACCESSOR(JSFunction)
2173 CAST_ACCESSOR(GlobalObject)
2174 CAST_ACCESSOR(JSGlobalProxy)
2175 CAST_ACCESSOR(JSGlobalObject)
2176 CAST_ACCESSOR(JSBuiltinsObject)
2178 CAST_ACCESSOR(JSArray)
2179 CAST_ACCESSOR(JSRegExp)
2180 CAST_ACCESSOR(JSProxy)
2181 CAST_ACCESSOR(JSFunctionProxy)
2182 CAST_ACCESSOR(JSSet)
2183 CAST_ACCESSOR(JSMap)
2184 CAST_ACCESSOR(JSWeakMap)
2185 CAST_ACCESSOR(Foreign)
2186 CAST_ACCESSOR(ByteArray)
2187 CAST_ACCESSOR(FreeSpace)
2188 CAST_ACCESSOR(ExternalArray)
2189 CAST_ACCESSOR(ExternalByteArray)
2190 CAST_ACCESSOR(ExternalUnsignedByteArray)
2191 CAST_ACCESSOR(ExternalShortArray)
2192 CAST_ACCESSOR(ExternalUnsignedShortArray)
2193 CAST_ACCESSOR(ExternalIntArray)
2194 CAST_ACCESSOR(ExternalUnsignedIntArray)
2195 CAST_ACCESSOR(ExternalFloatArray)
2196 CAST_ACCESSOR(ExternalDoubleArray)
2197 CAST_ACCESSOR(ExternalPixelArray)
2198 CAST_ACCESSOR(Struct)
2201 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2202 STRUCT_LIST(MAKE_STRUCT_CAST)
2203 #undef MAKE_STRUCT_CAST
2206 template <typename Shape, typename Key>
2207 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2208 ASSERT(obj->IsHashTable());
2209 return reinterpret_cast<HashTable*>(obj);
2213 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2214 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2216 SMI_ACCESSORS(String, length, kLengthOffset)
2217 SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
2220 uint32_t String::hash_field() {
2221 return READ_UINT32_FIELD(this, kHashFieldOffset);
2225 void String::set_hash_field(uint32_t value) {
2226 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2227 #if V8_HOST_ARCH_64_BIT
2228 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2233 bool String::Equals(String* other) {
2234 if (other == this) return true;
2235 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2238 return SlowEquals(other);
2242 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2243 if (!StringShape(this).IsCons()) return this;
2244 ConsString* cons = ConsString::cast(this);
2245 if (cons->IsFlat()) return cons->first();
2246 return SlowTryFlatten(pretenure);
2250 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2251 MaybeObject* flat = TryFlatten(pretenure);
2252 Object* successfully_flattened;
2253 if (!flat->ToObject(&successfully_flattened)) return this;
2254 return String::cast(successfully_flattened);
2258 uint16_t String::Get(int index) {
2259 ASSERT(index >= 0 && index < length());
2260 switch (StringShape(this).full_representation_tag()) {
2261 case kSeqStringTag | kAsciiStringTag:
2262 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2263 case kSeqStringTag | kTwoByteStringTag:
2264 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2265 case kConsStringTag | kAsciiStringTag:
2266 case kConsStringTag | kTwoByteStringTag:
2267 return ConsString::cast(this)->ConsStringGet(index);
2268 case kExternalStringTag | kAsciiStringTag:
2269 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2270 case kExternalStringTag | kTwoByteStringTag:
2271 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2272 case kSlicedStringTag | kAsciiStringTag:
2273 case kSlicedStringTag | kTwoByteStringTag:
2274 return SlicedString::cast(this)->SlicedStringGet(index);
2284 void String::Set(int index, uint16_t value) {
2285 ASSERT(index >= 0 && index < length());
2286 ASSERT(StringShape(this).IsSequential());
2288 return this->IsAsciiRepresentation()
2289 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2290 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2294 bool String::IsFlat() {
2295 if (!StringShape(this).IsCons()) return true;
2296 return ConsString::cast(this)->second()->length() == 0;
2300 String* String::GetUnderlying() {
2301 // Giving direct access to underlying string only makes sense if the
2302 // wrapping string is already flattened.
2303 ASSERT(this->IsFlat());
2304 ASSERT(StringShape(this).IsIndirect());
2305 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2306 const int kUnderlyingOffset = SlicedString::kParentOffset;
2307 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2311 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2312 ASSERT(index >= 0 && index < length());
2313 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2317 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2318 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2319 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2320 static_cast<byte>(value));
2324 Address SeqAsciiString::GetCharsAddress() {
2325 return FIELD_ADDR(this, kHeaderSize);
2329 char* SeqAsciiString::GetChars() {
2330 return reinterpret_cast<char*>(GetCharsAddress());
2334 Address SeqTwoByteString::GetCharsAddress() {
2335 return FIELD_ADDR(this, kHeaderSize);
2339 uc16* SeqTwoByteString::GetChars() {
2340 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2344 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2345 ASSERT(index >= 0 && index < length());
2346 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2350 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2351 ASSERT(index >= 0 && index < length());
2352 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2356 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2357 return SizeFor(length());
2361 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2362 return SizeFor(length());
2366 String* SlicedString::parent() {
2367 return String::cast(READ_FIELD(this, kParentOffset));
2371 void SlicedString::set_parent(String* parent) {
2372 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2373 WRITE_FIELD(this, kParentOffset, parent);
2377 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2380 String* ConsString::first() {
2381 return String::cast(READ_FIELD(this, kFirstOffset));
2385 Object* ConsString::unchecked_first() {
2386 return READ_FIELD(this, kFirstOffset);
2390 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2391 WRITE_FIELD(this, kFirstOffset, value);
2392 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2396 String* ConsString::second() {
2397 return String::cast(READ_FIELD(this, kSecondOffset));
2401 Object* ConsString::unchecked_second() {
2402 return READ_FIELD(this, kSecondOffset);
2406 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2407 WRITE_FIELD(this, kSecondOffset, value);
2408 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2412 bool ExternalString::is_short() {
2413 InstanceType type = map()->instance_type();
2414 return (type & kShortExternalStringMask) == kShortExternalStringTag;
2418 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2419 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2423 void ExternalAsciiString::update_data_cache() {
2424 if (is_short()) return;
2425 const char** data_field =
2426 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2427 *data_field = resource()->data();
2431 void ExternalAsciiString::set_resource(
2432 const ExternalAsciiString::Resource* resource) {
2433 *reinterpret_cast<const Resource**>(
2434 FIELD_ADDR(this, kResourceOffset)) = resource;
2435 if (resource != NULL) update_data_cache();
2439 const char* ExternalAsciiString::GetChars() {
2440 return resource()->data();
2444 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
2445 ASSERT(index >= 0 && index < length());
2446 return GetChars()[index];
2450 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2451 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2455 void ExternalTwoByteString::update_data_cache() {
2456 if (is_short()) return;
2457 const uint16_t** data_field =
2458 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
2459 *data_field = resource()->data();
2463 void ExternalTwoByteString::set_resource(
2464 const ExternalTwoByteString::Resource* resource) {
2465 *reinterpret_cast<const Resource**>(
2466 FIELD_ADDR(this, kResourceOffset)) = resource;
2467 if (resource != NULL) update_data_cache();
2471 const uint16_t* ExternalTwoByteString::GetChars() {
2472 return resource()->data();
2476 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
2477 ASSERT(index >= 0 && index < length());
2478 return GetChars()[index];
2482 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
2484 return GetChars() + start;
2488 void JSFunctionResultCache::MakeZeroSize() {
2489 set_finger_index(kEntriesIndex);
2490 set_size(kEntriesIndex);
2494 void JSFunctionResultCache::Clear() {
2495 int cache_size = size();
2496 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2497 MemsetPointer(entries_start,
2498 GetHeap()->the_hole_value(),
2499 cache_size - kEntriesIndex);
2504 int JSFunctionResultCache::size() {
2505 return Smi::cast(get(kCacheSizeIndex))->value();
2509 void JSFunctionResultCache::set_size(int size) {
2510 set(kCacheSizeIndex, Smi::FromInt(size));
2514 int JSFunctionResultCache::finger_index() {
2515 return Smi::cast(get(kFingerIndex))->value();
2519 void JSFunctionResultCache::set_finger_index(int finger_index) {
2520 set(kFingerIndex, Smi::FromInt(finger_index));
2524 byte ByteArray::get(int index) {
2525 ASSERT(index >= 0 && index < this->length());
2526 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2530 void ByteArray::set(int index, byte value) {
2531 ASSERT(index >= 0 && index < this->length());
2532 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2536 int ByteArray::get_int(int index) {
2537 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2538 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2542 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2543 ASSERT_TAG_ALIGNED(address);
2544 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2548 Address ByteArray::GetDataStartAddress() {
2549 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2553 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2554 return reinterpret_cast<uint8_t*>(external_pointer());
2558 uint8_t ExternalPixelArray::get_scalar(int index) {
2559 ASSERT((index >= 0) && (index < this->length()));
2560 uint8_t* ptr = external_pixel_pointer();
2565 MaybeObject* ExternalPixelArray::get(int index) {
2566 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2570 void ExternalPixelArray::set(int index, uint8_t value) {
2571 ASSERT((index >= 0) && (index < this->length()));
2572 uint8_t* ptr = external_pixel_pointer();
2577 void* ExternalArray::external_pointer() {
2578 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2579 return reinterpret_cast<void*>(ptr);
2583 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2584 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2585 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2589 int8_t ExternalByteArray::get_scalar(int index) {
2590 ASSERT((index >= 0) && (index < this->length()));
2591 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2596 MaybeObject* ExternalByteArray::get(int index) {
2597 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2601 void ExternalByteArray::set(int index, int8_t value) {
2602 ASSERT((index >= 0) && (index < this->length()));
2603 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2608 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2609 ASSERT((index >= 0) && (index < this->length()));
2610 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2615 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2616 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2620 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2621 ASSERT((index >= 0) && (index < this->length()));
2622 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2627 int16_t ExternalShortArray::get_scalar(int index) {
2628 ASSERT((index >= 0) && (index < this->length()));
2629 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2634 MaybeObject* ExternalShortArray::get(int index) {
2635 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2639 void ExternalShortArray::set(int index, int16_t value) {
2640 ASSERT((index >= 0) && (index < this->length()));
2641 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2646 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2647 ASSERT((index >= 0) && (index < this->length()));
2648 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2653 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2654 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2658 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2659 ASSERT((index >= 0) && (index < this->length()));
2660 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2665 int32_t ExternalIntArray::get_scalar(int index) {
2666 ASSERT((index >= 0) && (index < this->length()));
2667 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2672 MaybeObject* ExternalIntArray::get(int index) {
2673 return GetHeap()->NumberFromInt32(get_scalar(index));
2677 void ExternalIntArray::set(int index, int32_t value) {
2678 ASSERT((index >= 0) && (index < this->length()));
2679 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2684 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2685 ASSERT((index >= 0) && (index < this->length()));
2686 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2691 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2692 return GetHeap()->NumberFromUint32(get_scalar(index));
2696 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2697 ASSERT((index >= 0) && (index < this->length()));
2698 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2703 float ExternalFloatArray::get_scalar(int index) {
2704 ASSERT((index >= 0) && (index < this->length()));
2705 float* ptr = static_cast<float*>(external_pointer());
2710 MaybeObject* ExternalFloatArray::get(int index) {
2711 return GetHeap()->NumberFromDouble(get_scalar(index));
2715 void ExternalFloatArray::set(int index, float value) {
2716 ASSERT((index >= 0) && (index < this->length()));
2717 float* ptr = static_cast<float*>(external_pointer());
2722 double ExternalDoubleArray::get_scalar(int index) {
2723 ASSERT((index >= 0) && (index < this->length()));
2724 double* ptr = static_cast<double*>(external_pointer());
2729 MaybeObject* ExternalDoubleArray::get(int index) {
2730 return GetHeap()->NumberFromDouble(get_scalar(index));
2734 void ExternalDoubleArray::set(int index, double value) {
2735 ASSERT((index >= 0) && (index < this->length()));
2736 double* ptr = static_cast<double*>(external_pointer());
2741 int Map::visitor_id() {
2742 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2746 void Map::set_visitor_id(int id) {
2747 ASSERT(0 <= id && id < 256);
2748 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2752 int Map::instance_size() {
2753 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2757 int Map::inobject_properties() {
2758 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2762 int Map::pre_allocated_property_fields() {
2763 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2767 int HeapObject::SizeFromMap(Map* map) {
2768 int instance_size = map->instance_size();
2769 if (instance_size != kVariableSizeSentinel) return instance_size;
2770 // We can ignore the "symbol" bit becase it is only set for symbols
2771 // and implies a string type.
2772 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2773 // Only inline the most frequent cases.
2774 if (instance_type == FIXED_ARRAY_TYPE) {
2775 return FixedArray::BodyDescriptor::SizeOf(map, this);
2777 if (instance_type == ASCII_STRING_TYPE) {
2778 return SeqAsciiString::SizeFor(
2779 reinterpret_cast<SeqAsciiString*>(this)->length());
2781 if (instance_type == BYTE_ARRAY_TYPE) {
2782 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2784 if (instance_type == FREE_SPACE_TYPE) {
2785 return reinterpret_cast<FreeSpace*>(this)->size();
2787 if (instance_type == STRING_TYPE) {
2788 return SeqTwoByteString::SizeFor(
2789 reinterpret_cast<SeqTwoByteString*>(this)->length());
2791 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2792 return FixedDoubleArray::SizeFor(
2793 reinterpret_cast<FixedDoubleArray*>(this)->length());
2795 ASSERT(instance_type == CODE_TYPE);
2796 return reinterpret_cast<Code*>(this)->CodeSize();
2800 void Map::set_instance_size(int value) {
2801 ASSERT_EQ(0, value & (kPointerSize - 1));
2802 value >>= kPointerSizeLog2;
2803 ASSERT(0 <= value && value < 256);
2804 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2808 void Map::set_inobject_properties(int value) {
2809 ASSERT(0 <= value && value < 256);
2810 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2814 void Map::set_pre_allocated_property_fields(int value) {
2815 ASSERT(0 <= value && value < 256);
2816 WRITE_BYTE_FIELD(this,
2817 kPreAllocatedPropertyFieldsOffset,
2818 static_cast<byte>(value));
2822 InstanceType Map::instance_type() {
2823 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2827 void Map::set_instance_type(InstanceType value) {
2828 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2832 int Map::unused_property_fields() {
2833 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2837 void Map::set_unused_property_fields(int value) {
2838 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2842 byte Map::bit_field() {
2843 return READ_BYTE_FIELD(this, kBitFieldOffset);
2847 void Map::set_bit_field(byte value) {
2848 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2852 byte Map::bit_field2() {
2853 return READ_BYTE_FIELD(this, kBitField2Offset);
2857 void Map::set_bit_field2(byte value) {
2858 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2862 void Map::set_non_instance_prototype(bool value) {
2864 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2866 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2871 bool Map::has_non_instance_prototype() {
2872 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2876 void Map::set_function_with_prototype(bool value) {
2878 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2880 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2885 bool Map::function_with_prototype() {
2886 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2890 void Map::set_is_access_check_needed(bool access_check_needed) {
2891 if (access_check_needed) {
2892 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2894 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2899 bool Map::is_access_check_needed() {
2900 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2904 void Map::set_is_extensible(bool value) {
2906 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2908 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2912 bool Map::is_extensible() {
2913 return ((1 << kIsExtensible) & bit_field2()) != 0;
2917 void Map::set_attached_to_shared_function_info(bool value) {
2919 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2921 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2925 bool Map::attached_to_shared_function_info() {
2926 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2930 void Map::set_is_shared(bool value) {
2932 set_bit_field3(bit_field3() | (1 << kIsShared));
2934 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2938 bool Map::is_shared() {
2939 return ((1 << kIsShared) & bit_field3()) != 0;
2942 void Map::set_has_external_resource(bool value) {
2944 set_bit_field(bit_field() | (1 << kHasExternalResource));
2946 set_bit_field(bit_field() & ~(1 << kHasExternalResource));
2950 bool Map::has_external_resource()
2952 return ((1 << kHasExternalResource) & bit_field()) != 0;
2956 void Map::set_named_interceptor_is_fallback(bool value) {
2958 set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
2960 set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
2964 bool Map::named_interceptor_is_fallback() {
2965 return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
2969 JSFunction* Map::unchecked_constructor() {
2970 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2974 Code::Flags Code::flags() {
2975 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2979 void Code::set_flags(Code::Flags flags) {
2980 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2981 // Make sure that all call stubs have an arguments count.
2982 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2983 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2984 ExtractArgumentsCountFromFlags(flags) >= 0);
2985 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2989 Code::Kind Code::kind() {
2990 return ExtractKindFromFlags(flags());
2994 InlineCacheState Code::ic_state() {
2995 InlineCacheState result = ExtractICStateFromFlags(flags());
2996 // Only allow uninitialized or debugger states for non-IC code
2997 // objects. This is used in the debugger to determine whether or not
2998 // a call to code object has been replaced with a debug break call.
2999 ASSERT(is_inline_cache_stub() ||
3000 result == UNINITIALIZED ||
3001 result == DEBUG_BREAK ||
3002 result == DEBUG_PREPARE_STEP_IN);
3007 Code::ExtraICState Code::extra_ic_state() {
3008 ASSERT(is_inline_cache_stub());
3009 return ExtractExtraICStateFromFlags(flags());
3013 PropertyType Code::type() {
3014 return ExtractTypeFromFlags(flags());
3018 int Code::arguments_count() {
3019 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
3020 return ExtractArgumentsCountFromFlags(flags());
3024 int Code::major_key() {
3025 ASSERT(kind() == STUB ||
3026 kind() == UNARY_OP_IC ||
3027 kind() == BINARY_OP_IC ||
3028 kind() == COMPARE_IC ||
3029 kind() == TO_BOOLEAN_IC);
3030 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
3034 void Code::set_major_key(int major) {
3035 ASSERT(kind() == STUB ||
3036 kind() == UNARY_OP_IC ||
3037 kind() == BINARY_OP_IC ||
3038 kind() == COMPARE_IC ||
3039 kind() == TO_BOOLEAN_IC);
3040 ASSERT(0 <= major && major < 256);
3041 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
3045 bool Code::is_pregenerated() {
3046 return kind() == STUB && IsPregeneratedField::decode(flags());
3050 void Code::set_is_pregenerated(bool value) {
3051 ASSERT(kind() == STUB);
3053 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3058 bool Code::optimizable() {
3059 ASSERT_EQ(FUNCTION, kind());
3060 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3064 void Code::set_optimizable(bool value) {
3065 ASSERT_EQ(FUNCTION, kind());
3066 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3070 bool Code::has_deoptimization_support() {
3071 ASSERT_EQ(FUNCTION, kind());
3072 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3073 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3077 void Code::set_has_deoptimization_support(bool value) {
3078 ASSERT_EQ(FUNCTION, kind());
3079 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3080 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3081 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3085 bool Code::has_debug_break_slots() {
3086 ASSERT_EQ(FUNCTION, kind());
3087 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3088 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3092 void Code::set_has_debug_break_slots(bool value) {
3093 ASSERT_EQ(FUNCTION, kind());
3094 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3095 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3096 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3100 bool Code::is_compiled_optimizable() {
3101 ASSERT_EQ(FUNCTION, kind());
3102 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3103 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3107 void Code::set_compiled_optimizable(bool value) {
3108 ASSERT_EQ(FUNCTION, kind());
3109 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3110 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3111 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3115 int Code::allow_osr_at_loop_nesting_level() {
3116 ASSERT_EQ(FUNCTION, kind());
3117 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3121 void Code::set_allow_osr_at_loop_nesting_level(int level) {
3122 ASSERT_EQ(FUNCTION, kind());
3123 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3124 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3128 int Code::profiler_ticks() {
3129 ASSERT_EQ(FUNCTION, kind());
3130 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3134 void Code::set_profiler_ticks(int ticks) {
3135 ASSERT_EQ(FUNCTION, kind());
3136 ASSERT(ticks < 256);
3137 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
3141 unsigned Code::stack_slots() {
3142 ASSERT(kind() == OPTIMIZED_FUNCTION);
3143 return READ_UINT32_FIELD(this, kStackSlotsOffset);
3147 void Code::set_stack_slots(unsigned slots) {
3148 ASSERT(kind() == OPTIMIZED_FUNCTION);
3149 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
3153 unsigned Code::safepoint_table_offset() {
3154 ASSERT(kind() == OPTIMIZED_FUNCTION);
3155 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
3159 void Code::set_safepoint_table_offset(unsigned offset) {
3160 ASSERT(kind() == OPTIMIZED_FUNCTION);
3161 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3162 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3166 unsigned Code::stack_check_table_offset() {
3167 ASSERT_EQ(FUNCTION, kind());
3168 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3172 void Code::set_stack_check_table_offset(unsigned offset) {
3173 ASSERT_EQ(FUNCTION, kind());
3174 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3175 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3179 CheckType Code::check_type() {
3180 ASSERT(is_call_stub() || is_keyed_call_stub());
3181 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3182 return static_cast<CheckType>(type);
3186 void Code::set_check_type(CheckType value) {
3187 ASSERT(is_call_stub() || is_keyed_call_stub());
3188 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3192 byte Code::unary_op_type() {
3193 ASSERT(is_unary_op_stub());
3194 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3198 void Code::set_unary_op_type(byte value) {
3199 ASSERT(is_unary_op_stub());
3200 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3204 byte Code::binary_op_type() {
3205 ASSERT(is_binary_op_stub());
3206 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3210 void Code::set_binary_op_type(byte value) {
3211 ASSERT(is_binary_op_stub());
3212 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3216 byte Code::binary_op_result_type() {
3217 ASSERT(is_binary_op_stub());
3218 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3222 void Code::set_binary_op_result_type(byte value) {
3223 ASSERT(is_binary_op_stub());
3224 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3228 byte Code::compare_state() {
3229 ASSERT(is_compare_ic_stub());
3230 return READ_BYTE_FIELD(this, kCompareStateOffset);
3234 void Code::set_compare_state(byte value) {
3235 ASSERT(is_compare_ic_stub());
3236 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3240 byte Code::compare_operation() {
3241 ASSERT(is_compare_ic_stub());
3242 return READ_BYTE_FIELD(this, kCompareOperationOffset);
3246 void Code::set_compare_operation(byte value) {
3247 ASSERT(is_compare_ic_stub());
3248 WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
3252 byte Code::to_boolean_state() {
3253 ASSERT(is_to_boolean_ic_stub());
3254 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3258 void Code::set_to_boolean_state(byte value) {
3259 ASSERT(is_to_boolean_ic_stub());
3260 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3264 bool Code::has_function_cache() {
3265 ASSERT(kind() == STUB);
3266 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3270 void Code::set_has_function_cache(bool flag) {
3271 ASSERT(kind() == STUB);
3272 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3276 bool Code::is_inline_cache_stub() {
3277 Kind kind = this->kind();
3278 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3282 Code::Flags Code::ComputeFlags(Kind kind,
3283 InlineCacheState ic_state,
3284 ExtraICState extra_ic_state,
3287 InlineCacheHolderFlag holder) {
3288 // Extra IC state is only allowed for call IC stubs or for store IC
3290 ASSERT(extra_ic_state == kNoExtraICState ||
3293 kind == KEYED_STORE_IC);
3294 // Compute the bit mask.
3295 int bits = KindField::encode(kind)
3296 | ICStateField::encode(ic_state)
3297 | TypeField::encode(type)
3298 | ExtraICStateField::encode(extra_ic_state)
3299 | (argc << kArgumentsCountShift)
3300 | CacheHolderField::encode(holder);
3301 return static_cast<Flags>(bits);
3305 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3307 ExtraICState extra_ic_state,
3308 InlineCacheHolderFlag holder,
3310 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3314 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3315 return KindField::decode(flags);
3319 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3320 return ICStateField::decode(flags);
3324 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3325 return ExtraICStateField::decode(flags);
3329 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3330 return TypeField::decode(flags);
3334 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3335 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3339 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3340 return CacheHolderField::decode(flags);
3344 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3345 int bits = flags & ~TypeField::kMask;
3346 return static_cast<Flags>(bits);
3350 Code* Code::GetCodeFromTargetAddress(Address address) {
3351 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3352 // GetCodeFromTargetAddress might be called when marking objects during mark
3353 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3354 // Code::cast. Code::cast does not work when the object's map is
3356 Code* result = reinterpret_cast<Code*>(code);
3361 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3363 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3367 Object* Map::prototype() {
3368 return READ_FIELD(this, kPrototypeOffset);
3372 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3373 ASSERT(value->IsNull() || value->IsJSReceiver());
3374 WRITE_FIELD(this, kPrototypeOffset, value);
3375 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3379 DescriptorArray* Map::instance_descriptors() {
3380 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3381 if (object->IsSmi()) {
3382 return GetHeap()->empty_descriptor_array();
3384 return DescriptorArray::cast(object);
3389 void Map::init_instance_descriptors() {
3390 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3394 void Map::clear_instance_descriptors() {
3395 Object* object = READ_FIELD(this,
3396 kInstanceDescriptorsOrBitField3Offset);
3397 if (!object->IsSmi()) {
3399 ZapInstanceDescriptors();
3403 kInstanceDescriptorsOrBitField3Offset,
3404 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3409 void Map::set_instance_descriptors(DescriptorArray* value,
3410 WriteBarrierMode mode) {
3411 Object* object = READ_FIELD(this,
3412 kInstanceDescriptorsOrBitField3Offset);
3413 Heap* heap = GetHeap();
3414 if (value == heap->empty_descriptor_array()) {
3415 clear_instance_descriptors();
3418 if (object->IsSmi()) {
3419 value->set_bit_field3_storage(Smi::cast(object)->value());
3421 value->set_bit_field3_storage(
3422 DescriptorArray::cast(object)->bit_field3_storage());
3425 ASSERT(!is_shared());
3427 if (value != instance_descriptors()) {
3428 ZapInstanceDescriptors();
3431 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3432 CONDITIONAL_WRITE_BARRIER(
3433 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3437 int Map::bit_field3() {
3438 Object* object = READ_FIELD(this,
3439 kInstanceDescriptorsOrBitField3Offset);
3440 if (object->IsSmi()) {
3441 return Smi::cast(object)->value();
3443 return DescriptorArray::cast(object)->bit_field3_storage();
3448 void Map::set_bit_field3(int value) {
3449 ASSERT(Smi::IsValid(value));
3450 Object* object = READ_FIELD(this,
3451 kInstanceDescriptorsOrBitField3Offset);
3452 if (object->IsSmi()) {
3454 kInstanceDescriptorsOrBitField3Offset,
3455 Smi::FromInt(value));
3457 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3462 Object* Map::GetBackPointer() {
3463 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3464 if (object->IsFixedArray()) {
3465 return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
3472 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
3473 Heap* heap = GetHeap();
3474 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
3475 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
3476 (value->IsMap() && GetBackPointer()->IsUndefined()));
3477 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3478 if (object->IsFixedArray()) {
3479 FixedArray::cast(object)->set(
3480 kProtoTransitionBackPointerOffset, value, mode);
3482 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
3483 CONDITIONAL_WRITE_BARRIER(
3484 heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3489 FixedArray* Map::prototype_transitions() {
3490 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3491 if (object->IsFixedArray()) {
3492 return FixedArray::cast(object);
3494 return GetHeap()->empty_fixed_array();
3499 void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
3500 Heap* heap = GetHeap();
3501 ASSERT(value != heap->empty_fixed_array());
3502 value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
3504 if (value != prototype_transitions()) {
3505 ZapPrototypeTransitions();
3508 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
3509 CONDITIONAL_WRITE_BARRIER(
3510 heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3514 void Map::init_prototype_transitions(Object* undefined) {
3515 ASSERT(undefined->IsUndefined());
3516 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
3520 HeapObject* Map::unchecked_prototype_transitions() {
3521 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3522 return reinterpret_cast<HeapObject*>(object);
3526 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3527 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3529 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3530 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3531 ACCESSORS(JSFunction,
3534 kNextFunctionLinkOffset)
3536 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3537 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3538 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3540 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3542 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3543 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3544 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3545 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3546 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
3548 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
3549 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3551 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3552 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3553 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3555 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3556 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3557 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3558 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3559 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3560 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3561 ACCESSORS(InterceptorInfo, is_fallback, Smi, kFallbackOffset)
3563 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3564 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3566 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3567 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3569 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3570 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3571 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3572 kPropertyAccessorsOffset)
3573 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3574 kPrototypeTemplateOffset)
3575 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3576 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3577 kNamedPropertyHandlerOffset)
3578 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3579 kIndexedPropertyHandlerOffset)
3580 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3581 kInstanceTemplateOffset)
3582 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3583 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3584 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3585 kInstanceCallHandlerOffset)
3586 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3587 kAccessCheckInfoOffset)
3588 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
3590 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3591 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3592 kInternalFieldCountOffset)
3593 ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
3594 kHasExternalResourceOffset)
3596 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3597 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3599 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3601 ACCESSORS(Script, source, Object, kSourceOffset)
3602 ACCESSORS(Script, name, Object, kNameOffset)
3603 ACCESSORS(Script, id, Object, kIdOffset)
3604 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
3605 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
3606 ACCESSORS(Script, data, Object, kDataOffset)
3607 ACCESSORS(Script, context_data, Object, kContextOffset)
3608 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3609 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
3610 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
3611 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
3612 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3613 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3614 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
3615 kEvalFrominstructionsOffsetOffset)
3617 #ifdef ENABLE_DEBUGGER_SUPPORT
3618 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3619 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3620 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3621 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3623 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
3624 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
3625 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3626 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3629 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3630 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3631 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3632 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3633 kInstanceClassNameOffset)
3634 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3635 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3636 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3637 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3638 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3639 kThisPropertyAssignmentsOffset)
3640 SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
3643 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3644 kHiddenPrototypeBit)
3645 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3646 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3647 kNeedsAccessCheckBit)
3648 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3649 kReadOnlyPrototypeBit)
3650 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3652 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3654 BOOL_GETTER(SharedFunctionInfo,
3656 has_only_simple_this_property_assignments,
3657 kHasOnlySimpleThisPropertyAssignments)
3658 BOOL_ACCESSORS(SharedFunctionInfo,
3660 allows_lazy_compilation,
3661 kAllowLazyCompilation)
3662 BOOL_ACCESSORS(SharedFunctionInfo,
3666 BOOL_ACCESSORS(SharedFunctionInfo,
3668 has_duplicate_parameters,
3669 kHasDuplicateParameters)
3672 #if V8_HOST_ARCH_32_BIT
3673 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3674 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3675 kFormalParameterCountOffset)
3676 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3677 kExpectedNofPropertiesOffset)
3678 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3679 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3680 kStartPositionAndTypeOffset)
3681 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3682 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3683 kFunctionTokenPositionOffset)
3684 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3685 kCompilerHintsOffset)
3686 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3687 kThisPropertyAssignmentsCountOffset)
3688 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3689 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3690 SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3693 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3694 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3695 int holder::name() { \
3696 int value = READ_INT_FIELD(this, offset); \
3697 ASSERT(kHeapObjectTag == 1); \
3698 ASSERT((value & kHeapObjectTag) == 0); \
3699 return value >> 1; \
3701 void holder::set_##name(int value) { \
3702 ASSERT(kHeapObjectTag == 1); \
3703 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3704 (value & 0xC0000000) == 0x000000000); \
3705 WRITE_INT_FIELD(this, \
3707 (value << 1) & ~kHeapObjectTag); \
3710 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3711 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3712 INT_ACCESSORS(holder, name, offset)
3715 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3716 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3717 formal_parameter_count,
3718 kFormalParameterCountOffset)
3720 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3721 expected_nof_properties,
3722 kExpectedNofPropertiesOffset)
3723 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3725 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3726 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3727 start_position_and_type,
3728 kStartPositionAndTypeOffset)
3730 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3731 function_token_position,
3732 kFunctionTokenPositionOffset)
3733 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3735 kCompilerHintsOffset)
3737 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3738 this_property_assignments_count,
3739 kThisPropertyAssignmentsCountOffset)
3740 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3742 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3743 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3747 int SharedFunctionInfo::construction_count() {
3748 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3752 void SharedFunctionInfo::set_construction_count(int value) {
3753 ASSERT(0 <= value && value < 256);
3754 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3758 BOOL_ACCESSORS(SharedFunctionInfo,
3760 live_objects_may_exist,
3761 kLiveObjectsMayExist)
3764 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3765 return initial_map() != GetHeap()->undefined_value();
3769 BOOL_GETTER(SharedFunctionInfo,
3771 optimization_disabled,
3772 kOptimizationDisabled)
3775 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3776 set_compiler_hints(BooleanBit::set(compiler_hints(),
3777 kOptimizationDisabled,
3779 // If disabling optimizations we reflect that in the code object so
3780 // it will not be counted as optimizable code.
3781 if ((code()->kind() == Code::FUNCTION) && disable) {
3782 code()->set_optimizable(false);
3787 int SharedFunctionInfo::profiler_ticks() {
3788 if (code()->kind() != Code::FUNCTION) return 0;
3789 return code()->profiler_ticks();
3793 LanguageMode SharedFunctionInfo::language_mode() {
3794 int hints = compiler_hints();
3795 if (BooleanBit::get(hints, kExtendedModeFunction)) {
3796 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
3797 return EXTENDED_MODE;
3799 return BooleanBit::get(hints, kStrictModeFunction)
3800 ? STRICT_MODE : CLASSIC_MODE;
3804 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
3805 // We only allow language mode transitions that go set the same language mode
3806 // again or go up in the chain:
3807 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
3808 ASSERT(this->language_mode() == CLASSIC_MODE ||
3809 this->language_mode() == language_mode ||
3810 language_mode == EXTENDED_MODE);
3811 int hints = compiler_hints();
3812 hints = BooleanBit::set(
3813 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
3814 hints = BooleanBit::set(
3815 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
3816 set_compiler_hints(hints);
3820 bool SharedFunctionInfo::is_classic_mode() {
3821 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
3824 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
3825 kExtendedModeFunction)
3826 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, qml_mode,
3828 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3829 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3830 name_should_print_as_anonymous,
3831 kNameShouldPrintAsAnonymous)
3832 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3833 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3834 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
3835 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
3837 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
3839 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3840 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3842 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3844 bool Script::HasValidSource() {
3845 Object* src = this->source();
3846 if (!src->IsString()) return true;
3847 String* src_str = String::cast(src);
3848 if (!StringShape(src_str).IsExternal()) return true;
3849 if (src_str->IsAsciiRepresentation()) {
3850 return ExternalAsciiString::cast(src)->resource() != NULL;
3851 } else if (src_str->IsTwoByteRepresentation()) {
3852 return ExternalTwoByteString::cast(src)->resource() != NULL;
3858 void SharedFunctionInfo::DontAdaptArguments() {
3859 ASSERT(code()->kind() == Code::BUILTIN);
3860 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3864 int SharedFunctionInfo::start_position() {
3865 return start_position_and_type() >> kStartPositionShift;
3869 void SharedFunctionInfo::set_start_position(int start_position) {
3870 set_start_position_and_type((start_position << kStartPositionShift)
3871 | (start_position_and_type() & ~kStartPositionMask));
3875 Code* SharedFunctionInfo::code() {
3876 return Code::cast(READ_FIELD(this, kCodeOffset));
3880 Code* SharedFunctionInfo::unchecked_code() {
3881 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3885 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3886 WRITE_FIELD(this, kCodeOffset, value);
3887 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3891 ScopeInfo* SharedFunctionInfo::scope_info() {
3892 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
3896 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
3897 WriteBarrierMode mode) {
3898 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3899 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3902 reinterpret_cast<Object*>(value),
3907 bool SharedFunctionInfo::is_compiled() {
3909 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3913 bool SharedFunctionInfo::IsApiFunction() {
3914 return function_data()->IsFunctionTemplateInfo();
3918 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3919 ASSERT(IsApiFunction());
3920 return FunctionTemplateInfo::cast(function_data());
3924 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3925 return function_data()->IsSmi();
3929 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3930 ASSERT(HasBuiltinFunctionId());
3931 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3935 int SharedFunctionInfo::code_age() {
3936 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3940 void SharedFunctionInfo::set_code_age(int code_age) {
3941 int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
3942 set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
3946 bool SharedFunctionInfo::has_deoptimization_support() {
3947 Code* code = this->code();
3948 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3952 bool JSFunction::IsBuiltin() {
3953 return context()->global()->IsJSBuiltinsObject();
3957 bool JSFunction::NeedsArgumentsAdaption() {
3958 return shared()->formal_parameter_count() !=
3959 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3963 bool JSFunction::IsOptimized() {
3964 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3968 bool JSFunction::IsOptimizable() {
3969 return code()->kind() == Code::FUNCTION && code()->optimizable();
3973 bool JSFunction::IsMarkedForLazyRecompilation() {
3974 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3978 Code* JSFunction::code() {
3979 return Code::cast(unchecked_code());
3983 Code* JSFunction::unchecked_code() {
3984 return reinterpret_cast<Code*>(
3985 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3989 void JSFunction::set_code(Code* value) {
3990 ASSERT(!HEAP->InNewSpace(value));
3991 Address entry = value->entry();
3992 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3993 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3995 HeapObject::RawField(this, kCodeEntryOffset),
4000 void JSFunction::ReplaceCode(Code* code) {
4001 bool was_optimized = IsOptimized();
4002 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
4006 // Add/remove the function from the list of optimized functions for this
4007 // context based on the state change.
4008 if (!was_optimized && is_optimized) {
4009 context()->global_context()->AddOptimizedFunction(this);
4011 if (was_optimized && !is_optimized) {
4012 context()->global_context()->RemoveOptimizedFunction(this);
4017 Context* JSFunction::context() {
4018 return Context::cast(READ_FIELD(this, kContextOffset));
4022 Object* JSFunction::unchecked_context() {
4023 return READ_FIELD(this, kContextOffset);
4027 SharedFunctionInfo* JSFunction::unchecked_shared() {
4028 return reinterpret_cast<SharedFunctionInfo*>(
4029 READ_FIELD(this, kSharedFunctionInfoOffset));
4033 void JSFunction::set_context(Object* value) {
4034 ASSERT(value->IsUndefined() || value->IsContext());
4035 WRITE_FIELD(this, kContextOffset, value);
4036 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
4039 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
4040 kPrototypeOrInitialMapOffset)
4043 Map* JSFunction::initial_map() {
4044 return Map::cast(prototype_or_initial_map());
4048 void JSFunction::set_initial_map(Map* value) {
4049 set_prototype_or_initial_map(value);
4053 MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
4055 Context* global_context = context()->global_context();
4056 Object* array_function =
4057 global_context->get(Context::ARRAY_FUNCTION_INDEX);
4058 if (array_function->IsJSFunction() &&
4059 this == JSFunction::cast(array_function)) {
4060 ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
4062 MaybeObject* maybe_map = initial_map->CopyDropTransitions();
4063 Map* new_double_map = NULL;
4064 if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
4065 new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
4066 maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
4068 if (maybe_map->IsFailure()) return maybe_map;
4070 maybe_map = new_double_map->CopyDropTransitions();
4071 Map* new_object_map = NULL;
4072 if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
4073 new_object_map->set_elements_kind(FAST_ELEMENTS);
4074 maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
4076 if (maybe_map->IsFailure()) return maybe_map;
4078 global_context->set_smi_js_array_map(initial_map);
4079 global_context->set_double_js_array_map(new_double_map);
4080 global_context->set_object_js_array_map(new_object_map);
4082 set_initial_map(initial_map);
4087 bool JSFunction::has_initial_map() {
4088 return prototype_or_initial_map()->IsMap();
4092 bool JSFunction::has_instance_prototype() {
4093 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
4097 bool JSFunction::has_prototype() {
4098 return map()->has_non_instance_prototype() || has_instance_prototype();
4102 Object* JSFunction::instance_prototype() {
4103 ASSERT(has_instance_prototype());
4104 if (has_initial_map()) return initial_map()->prototype();
4105 // When there is no initial map and the prototype is a JSObject, the
4106 // initial map field is used for the prototype field.
4107 return prototype_or_initial_map();
4111 Object* JSFunction::prototype() {
4112 ASSERT(has_prototype());
4113 // If the function's prototype property has been set to a non-JSObject
4114 // value, that value is stored in the constructor field of the map.
4115 if (map()->has_non_instance_prototype()) return map()->constructor();
4116 return instance_prototype();
4119 bool JSFunction::should_have_prototype() {
4120 return map()->function_with_prototype();
4124 bool JSFunction::is_compiled() {
4125 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4129 FixedArray* JSFunction::literals() {
4130 ASSERT(!shared()->bound());
4131 return literals_or_bindings();
4135 void JSFunction::set_literals(FixedArray* literals) {
4136 ASSERT(!shared()->bound());
4137 set_literals_or_bindings(literals);
4141 FixedArray* JSFunction::function_bindings() {
4142 ASSERT(shared()->bound());
4143 return literals_or_bindings();
4147 void JSFunction::set_function_bindings(FixedArray* bindings) {
4148 ASSERT(shared()->bound());
4149 // Bound function literal may be initialized to the empty fixed array
4150 // before the bindings are set.
4151 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
4152 bindings->map() == GetHeap()->fixed_cow_array_map());
4153 set_literals_or_bindings(bindings);
4157 int JSFunction::NumberOfLiterals() {
4158 ASSERT(!shared()->bound());
4159 return literals()->length();
4163 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
4164 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4165 return READ_FIELD(this, OffsetOfFunctionWithId(id));
4169 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
4171 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4172 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
4173 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
4177 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
4178 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4179 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
4183 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
4185 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4186 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
4187 ASSERT(!HEAP->InNewSpace(value));
4191 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
4192 ACCESSORS(JSProxy, hash, Object, kHashOffset)
4193 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
4194 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4197 void JSProxy::InitializeBody(int object_size, Object* value) {
4198 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
4199 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
4200 WRITE_FIELD(this, offset, value);
4205 ACCESSORS(JSSet, table, Object, kTableOffset)
4206 ACCESSORS(JSMap, table, Object, kTableOffset)
4207 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
4208 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4211 Address Foreign::foreign_address() {
4212 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
4216 void Foreign::set_foreign_address(Address value) {
4217 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
4221 ACCESSORS(JSModule, context, Object, kContextOffset)
4224 JSModule* JSModule::cast(Object* obj) {
4225 ASSERT(obj->IsJSModule());
4226 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
4227 return reinterpret_cast<JSModule*>(obj);
4231 ACCESSORS(JSValue, value, Object, kValueOffset)
4234 JSValue* JSValue::cast(Object* obj) {
4235 ASSERT(obj->IsJSValue());
4236 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
4237 return reinterpret_cast<JSValue*>(obj);
4241 ACCESSORS(JSDate, value, Object, kValueOffset)
4242 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4243 ACCESSORS(JSDate, year, Object, kYearOffset)
4244 ACCESSORS(JSDate, month, Object, kMonthOffset)
4245 ACCESSORS(JSDate, day, Object, kDayOffset)
4246 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4247 ACCESSORS(JSDate, hour, Object, kHourOffset)
4248 ACCESSORS(JSDate, min, Object, kMinOffset)
4249 ACCESSORS(JSDate, sec, Object, kSecOffset)
4252 JSDate* JSDate::cast(Object* obj) {
4253 ASSERT(obj->IsJSDate());
4254 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
4255 return reinterpret_cast<JSDate*>(obj);
4259 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4260 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
4261 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4262 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4263 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4264 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4265 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4268 JSMessageObject* JSMessageObject::cast(Object* obj) {
4269 ASSERT(obj->IsJSMessageObject());
4270 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
4271 return reinterpret_cast<JSMessageObject*>(obj);
4275 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4276 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
4277 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4278 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4279 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4280 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4281 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
4283 byte* Code::instruction_start() {
4284 return FIELD_ADDR(this, kHeaderSize);
4288 byte* Code::instruction_end() {
4289 return instruction_start() + instruction_size();
4293 int Code::body_size() {
4294 return RoundUp(instruction_size(), kObjectAlignment);
4298 FixedArray* Code::unchecked_deoptimization_data() {
4299 return reinterpret_cast<FixedArray*>(
4300 READ_FIELD(this, kDeoptimizationDataOffset));
4304 ByteArray* Code::unchecked_relocation_info() {
4305 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4309 byte* Code::relocation_start() {
4310 return unchecked_relocation_info()->GetDataStartAddress();
4314 int Code::relocation_size() {
4315 return unchecked_relocation_info()->length();
4319 byte* Code::entry() {
4320 return instruction_start();
4324 bool Code::contains(byte* inner_pointer) {
4325 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4329 ACCESSORS(JSArray, length, Object, kLengthOffset)
4332 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4335 JSRegExp::Type JSRegExp::TypeTag() {
4336 Object* data = this->data();
4337 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4338 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4339 return static_cast<JSRegExp::Type>(smi->value());
4343 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4344 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4345 return static_cast<JSRegExp::Type>(smi->value());
4349 int JSRegExp::CaptureCount() {
4350 switch (TypeTag()) {
4354 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4362 JSRegExp::Flags JSRegExp::GetFlags() {
4363 ASSERT(this->data()->IsFixedArray());
4364 Object* data = this->data();
4365 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4366 return Flags(smi->value());
4370 String* JSRegExp::Pattern() {
4371 ASSERT(this->data()->IsFixedArray());
4372 Object* data = this->data();
4373 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4378 Object* JSRegExp::DataAt(int index) {
4379 ASSERT(TypeTag() != NOT_COMPILED);
4380 return FixedArray::cast(data())->get(index);
4384 Object* JSRegExp::DataAtUnchecked(int index) {
4385 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4386 int offset = FixedArray::kHeaderSize + index * kPointerSize;
4387 return READ_FIELD(fa, offset);
4391 void JSRegExp::SetDataAt(int index, Object* value) {
4392 ASSERT(TypeTag() != NOT_COMPILED);
4393 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4394 FixedArray::cast(data())->set(index, value);
4398 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4399 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4400 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4401 if (value->IsSmi()) {
4402 fa->set_unchecked(index, Smi::cast(value));
4404 // We only do this during GC, so we don't need to notify the write barrier.
4405 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4410 ElementsKind JSObject::GetElementsKind() {
4411 ElementsKind kind = map()->elements_kind();
4413 FixedArrayBase* fixed_array =
4414 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4415 Map* map = fixed_array->map();
4416 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4417 (map == GetHeap()->fixed_array_map() ||
4418 map == GetHeap()->fixed_cow_array_map())) ||
4419 (kind == FAST_DOUBLE_ELEMENTS &&
4420 (fixed_array->IsFixedDoubleArray() ||
4421 fixed_array == GetHeap()->empty_fixed_array())) ||
4422 (kind == DICTIONARY_ELEMENTS &&
4423 fixed_array->IsFixedArray() &&
4424 fixed_array->IsDictionary()) ||
4425 (kind > DICTIONARY_ELEMENTS));
4426 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4427 (elements()->IsFixedArray() && elements()->length() >= 2));
4433 ElementsAccessor* JSObject::GetElementsAccessor() {
4434 return ElementsAccessor::ForKind(GetElementsKind());
4438 bool JSObject::HasFastElements() {
4439 return GetElementsKind() == FAST_ELEMENTS;
4443 bool JSObject::HasFastSmiOnlyElements() {
4444 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4448 bool JSObject::HasFastTypeElements() {
4449 ElementsKind elements_kind = GetElementsKind();
4450 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4451 elements_kind == FAST_ELEMENTS;
4455 bool JSObject::HasFastDoubleElements() {
4456 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4460 bool JSObject::HasDictionaryElements() {
4461 return GetElementsKind() == DICTIONARY_ELEMENTS;
4465 bool JSObject::HasNonStrictArgumentsElements() {
4466 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4470 bool JSObject::HasExternalArrayElements() {
4471 HeapObject* array = elements();
4472 ASSERT(array != NULL);
4473 return array->IsExternalArray();
4477 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4478 bool JSObject::HasExternal##name##Elements() { \
4479 HeapObject* array = elements(); \
4480 ASSERT(array != NULL); \
4481 if (!array->IsHeapObject()) \
4483 return array->map()->instance_type() == type; \
4487 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4488 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4489 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4490 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4491 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4492 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4493 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4494 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4495 EXTERNAL_ELEMENTS_CHECK(Float,
4496 EXTERNAL_FLOAT_ARRAY_TYPE)
4497 EXTERNAL_ELEMENTS_CHECK(Double,
4498 EXTERNAL_DOUBLE_ARRAY_TYPE)
4499 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4502 bool JSObject::HasNamedInterceptor() {
4503 return map()->has_named_interceptor();
4507 bool JSObject::HasIndexedInterceptor() {
4508 return map()->has_indexed_interceptor();
4512 MaybeObject* JSObject::EnsureWritableFastElements() {
4513 ASSERT(HasFastTypeElements());
4514 FixedArray* elems = FixedArray::cast(elements());
4515 Isolate* isolate = GetIsolate();
4516 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4517 Object* writable_elems;
4518 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4519 elems, isolate->heap()->fixed_array_map());
4520 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4521 return maybe_writable_elems;
4524 set_elements(FixedArray::cast(writable_elems));
4525 isolate->counters()->cow_arrays_converted()->Increment();
4526 return writable_elems;
4530 StringDictionary* JSObject::property_dictionary() {
4531 ASSERT(!HasFastProperties());
4532 return StringDictionary::cast(properties());
4536 SeededNumberDictionary* JSObject::element_dictionary() {
4537 ASSERT(HasDictionaryElements());
4538 return SeededNumberDictionary::cast(elements());
4542 bool String::IsHashFieldComputed(uint32_t field) {
4543 return (field & kHashNotComputedMask) == 0;
4547 bool String::HasHashCode() {
4548 return IsHashFieldComputed(hash_field());
4552 uint32_t String::Hash() {
4553 // Fast case: has hash code already been computed?
4554 uint32_t field = hash_field();
4555 if (IsHashFieldComputed(field)) return field >> kHashShift;
4556 // Slow case: compute hash code and set it.
4557 return ComputeAndSetHash();
4561 StringHasher::StringHasher(int length, uint32_t seed)
4563 raw_running_hash_(seed),
4565 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4566 is_first_char_(true),
4568 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4572 bool StringHasher::has_trivial_hash() {
4573 return length_ > String::kMaxHashCalcLength;
4577 void StringHasher::AddCharacter(uint32_t c) {
4578 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4579 AddSurrogatePair(c); // Not inlined.
4582 // Use the Jenkins one-at-a-time hash function to update the hash
4583 // for the given character.
4584 raw_running_hash_ += c;
4585 raw_running_hash_ += (raw_running_hash_ << 10);
4586 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4587 // Incremental array index computation.
4588 if (is_array_index_) {
4589 if (c < '0' || c > '9') {
4590 is_array_index_ = false;
4593 if (is_first_char_) {
4594 is_first_char_ = false;
4595 if (c == '0' && length_ > 1) {
4596 is_array_index_ = false;
4600 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4601 is_array_index_ = false;
4603 array_index_ = array_index_ * 10 + d;
4610 void StringHasher::AddCharacterNoIndex(uint32_t c) {
4611 ASSERT(!is_array_index());
4612 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4613 AddSurrogatePairNoIndex(c); // Not inlined.
4616 raw_running_hash_ += c;
4617 raw_running_hash_ += (raw_running_hash_ << 10);
4618 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4622 uint32_t StringHasher::GetHash() {
4623 // Get the calculated raw hash value and do some more bit ops to distribute
4624 // the hash further. Ensure that we never return zero as the hash value.
4625 uint32_t result = raw_running_hash_;
4626 result += (result << 3);
4627 result ^= (result >> 11);
4628 result += (result << 15);
4629 if ((result & String::kHashBitMask) == 0) {
4636 template <typename schar>
4637 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
4638 StringHasher hasher(length, seed);
4639 if (!hasher.has_trivial_hash()) {
4641 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4642 hasher.AddCharacter(chars[i]);
4644 for (; i < length; i++) {
4645 hasher.AddCharacterNoIndex(chars[i]);
4648 return hasher.GetHashField();
4652 bool String::AsArrayIndex(uint32_t* index) {
4653 uint32_t field = hash_field();
4654 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4657 return SlowAsArrayIndex(index);
4661 Object* JSReceiver::GetPrototype() {
4662 return HeapObject::cast(this)->map()->prototype();
4666 bool JSReceiver::HasProperty(String* name) {
4668 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4670 return GetPropertyAttribute(name) != ABSENT;
4674 bool JSReceiver::HasLocalProperty(String* name) {
4676 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4678 return GetLocalPropertyAttribute(name) != ABSENT;
4682 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4683 return GetPropertyAttributeWithReceiver(this, key);
4686 // TODO(504): this may be useful in other places too where JSGlobalProxy
4688 Object* JSObject::BypassGlobalProxy() {
4689 if (IsJSGlobalProxy()) {
4690 Object* proto = GetPrototype();
4691 if (proto->IsNull()) return GetHeap()->undefined_value();
4692 ASSERT(proto->IsJSGlobalObject());
4699 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4701 ? JSProxy::cast(this)->GetIdentityHash(flag)
4702 : JSObject::cast(this)->GetIdentityHash(flag);
4706 bool JSReceiver::HasElement(uint32_t index) {
4708 return JSProxy::cast(this)->HasElementWithHandler(index);
4710 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4714 bool AccessorInfo::all_can_read() {
4715 return BooleanBit::get(flag(), kAllCanReadBit);
4719 void AccessorInfo::set_all_can_read(bool value) {
4720 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4724 bool AccessorInfo::all_can_write() {
4725 return BooleanBit::get(flag(), kAllCanWriteBit);
4729 void AccessorInfo::set_all_can_write(bool value) {
4730 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4734 bool AccessorInfo::prohibits_overwriting() {
4735 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4739 void AccessorInfo::set_prohibits_overwriting(bool value) {
4740 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4744 PropertyAttributes AccessorInfo::property_attributes() {
4745 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4749 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4750 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4754 template<typename Shape, typename Key>
4755 void Dictionary<Shape, Key>::SetEntry(int entry,
4758 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4762 template<typename Shape, typename Key>
4763 void Dictionary<Shape, Key>::SetEntry(int entry,
4766 PropertyDetails details) {
4767 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4768 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4769 AssertNoAllocation no_gc;
4770 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4771 FixedArray::set(index, key, mode);
4772 FixedArray::set(index+1, value, mode);
4773 FixedArray::set(index+2, details.AsSmi());
4777 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4778 ASSERT(other->IsNumber());
4779 return key == static_cast<uint32_t>(other->Number());
4783 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
4784 return ComputeIntegerHash(key, 0);
4788 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
4790 ASSERT(other->IsNumber());
4791 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
4794 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
4795 return ComputeIntegerHash(key, seed);
4798 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
4801 ASSERT(other->IsNumber());
4802 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
4805 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4806 return Isolate::Current()->heap()->NumberFromUint32(key);
4810 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4811 // We know that all entries in a hash table had their hash keys created.
4812 // Use that knowledge to have fast failure.
4813 if (key->Hash() != String::cast(other)->Hash()) return false;
4814 return key->Equals(String::cast(other));
4818 uint32_t StringDictionaryShape::Hash(String* key) {
4823 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4824 return String::cast(other)->Hash();
4828 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4833 template <int entrysize>
4834 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4835 return key->SameValue(other);
4839 template <int entrysize>
4840 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4841 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4842 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4846 template <int entrysize>
4847 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4849 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4850 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4854 template <int entrysize>
4855 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4860 void Map::ClearCodeCache(Heap* heap) {
4861 // No write barrier is needed since empty_fixed_array is not in new space.
4862 // Please note this function is used during marking:
4863 // - MarkCompactCollector::MarkUnmarkedObject
4864 // - IncrementalMarking::Step
4865 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4866 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4870 void JSArray::EnsureSize(int required_size) {
4871 ASSERT(HasFastTypeElements());
4872 FixedArray* elts = FixedArray::cast(elements());
4873 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4874 if (elts->length() < required_size) {
4875 // Doubling in size would be overkill, but leave some slack to avoid
4876 // constantly growing.
4877 Expand(required_size + (required_size >> 3));
4878 // It's a performance benefit to keep a frequently used array in new-space.
4879 } else if (!GetHeap()->new_space()->Contains(elts) &&
4880 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4881 // Expand will allocate a new backing store in new space even if the size
4882 // we asked for isn't larger than what we had before.
4883 Expand(required_size);
4888 void JSArray::set_length(Smi* length) {
4889 // Don't need a write barrier for a Smi.
4890 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4894 bool JSArray::AllowsSetElementsLength() {
4895 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
4896 ASSERT(result == !HasExternalArrayElements());
4901 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
4902 MaybeObject* maybe_result = EnsureCanContainElements(
4903 storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
4904 if (maybe_result->IsFailure()) return maybe_result;
4905 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
4906 GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
4907 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
4908 ((GetElementsKind() == FAST_ELEMENTS) ||
4909 (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
4910 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
4911 set_elements(storage);
4912 set_length(Smi::FromInt(storage->length()));
4917 MaybeObject* FixedArray::Copy() {
4918 if (length() == 0) return this;
4919 return GetHeap()->CopyFixedArray(this);
4923 MaybeObject* FixedDoubleArray::Copy() {
4924 if (length() == 0) return this;
4925 return GetHeap()->CopyFixedDoubleArray(this);
4929 void TypeFeedbackCells::SetAstId(int index, Smi* id) {
4930 set(1 + index * 2, id);
4934 Smi* TypeFeedbackCells::AstId(int index) {
4935 return Smi::cast(get(1 + index * 2));
4939 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
4940 set(index * 2, cell);
4944 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
4945 return JSGlobalPropertyCell::cast(get(index * 2));
4949 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
4950 return isolate->factory()->the_hole_value();
4954 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
4955 return isolate->factory()->undefined_value();
4959 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
4960 return heap->raw_unchecked_the_hole_value();
4964 SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
4965 SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
4966 kIcWithTypeinfoCountOffset)
4967 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
4968 kTypeFeedbackCellsOffset)
4971 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
4974 Relocatable::Relocatable(Isolate* isolate) {
4975 ASSERT(isolate == Isolate::Current());
4977 prev_ = isolate->relocatable_top();
4978 isolate->set_relocatable_top(this);
4982 Relocatable::~Relocatable() {
4983 ASSERT(isolate_ == Isolate::Current());
4984 ASSERT_EQ(isolate_->relocatable_top(), this);
4985 isolate_->set_relocatable_top(prev_);
4989 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4990 return map->instance_size();
4994 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4995 v->VisitExternalReference(
4996 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
5000 template<typename StaticVisitor>
5001 void Foreign::ForeignIterateBody() {
5002 StaticVisitor::VisitExternalReference(
5003 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
5007 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
5008 typedef v8::String::ExternalAsciiStringResource Resource;
5009 v->VisitExternalAsciiString(
5010 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5014 template<typename StaticVisitor>
5015 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
5016 typedef v8::String::ExternalAsciiStringResource Resource;
5017 StaticVisitor::VisitExternalAsciiString(
5018 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5022 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
5023 typedef v8::String::ExternalStringResource Resource;
5024 v->VisitExternalTwoByteString(
5025 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5029 template<typename StaticVisitor>
5030 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
5031 typedef v8::String::ExternalStringResource Resource;
5032 StaticVisitor::VisitExternalTwoByteString(
5033 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5036 #define SLOT_ADDR(obj, offset) \
5037 reinterpret_cast<Object**>((obj)->address() + offset)
5039 template<int start_offset, int end_offset, int size>
5040 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
5043 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
5047 template<int start_offset>
5048 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
5051 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
5057 #undef CAST_ACCESSOR
5058 #undef INT_ACCESSORS
5060 #undef ACCESSORS_TO_SMI
5061 #undef SMI_ACCESSORS
5063 #undef BOOL_ACCESSORS
5067 #undef WRITE_BARRIER
5068 #undef CONDITIONAL_WRITE_BARRIER
5069 #undef READ_DOUBLE_FIELD
5070 #undef WRITE_DOUBLE_FIELD
5071 #undef READ_INT_FIELD
5072 #undef WRITE_INT_FIELD
5073 #undef READ_INTPTR_FIELD
5074 #undef WRITE_INTPTR_FIELD
5075 #undef READ_UINT32_FIELD
5076 #undef WRITE_UINT32_FIELD
5077 #undef READ_SHORT_FIELD
5078 #undef WRITE_SHORT_FIELD
5079 #undef READ_BYTE_FIELD
5080 #undef WRITE_BYTE_FIELD
5083 } } // namespace v8::internal
5085 #endif // V8_OBJECTS_INL_H_