1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
98 #define ACCESSORS_TO_SMI(holder, name, offset) \
99 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
100 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
101 WRITE_FIELD(this, offset, value); \
105 // Getter that returns a Smi as an int and writes an int as a Smi.
106 #define SMI_ACCESSORS(holder, name, offset) \
107 int holder::name() { \
108 Object* value = READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::set_##name(int value) { \
112 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define BOOL_GETTER(holder, field, name, offset) \
117 bool holder::name() { \
118 return BooleanBit::get(field(), offset); \
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123 bool holder::name() { \
124 return BooleanBit::get(field(), offset); \
126 void holder::set_##name(bool value) { \
127 set_##field(BooleanBit::set(field(), offset, value)); \
131 bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
132 ElementsKind to_kind) {
133 if (to_kind == FAST_ELEMENTS) {
134 return from_kind == FAST_SMI_ONLY_ELEMENTS ||
135 from_kind == FAST_DOUBLE_ELEMENTS;
137 return to_kind == FAST_DOUBLE_ELEMENTS &&
138 from_kind == FAST_SMI_ONLY_ELEMENTS;
143 bool Object::IsFixedArrayBase() {
144 return IsFixedArray() || IsFixedDoubleArray();
148 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
149 // There is a constraint on the object; check.
150 if (!this->IsJSObject()) return false;
151 // Fetch the constructor function of the object.
152 Object* cons_obj = JSObject::cast(this)->map()->constructor();
153 if (!cons_obj->IsJSFunction()) return false;
154 JSFunction* fun = JSFunction::cast(cons_obj);
155 // Iterate through the chain of inheriting function templates to
156 // see if the required one occurs.
157 for (Object* type = fun->shared()->function_data();
158 type->IsFunctionTemplateInfo();
159 type = FunctionTemplateInfo::cast(type)->parent_template()) {
160 if (type == expected) return true;
162 // Didn't find the required type in the inheritance chain.
167 bool Object::IsSmi() {
168 return HAS_SMI_TAG(this);
172 bool Object::IsHeapObject() {
173 return Internals::HasHeapObjectTag(this);
177 bool Object::NonFailureIsHeapObject() {
178 ASSERT(!this->IsFailure());
179 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
183 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
186 bool Object::IsString() {
187 return Object::IsHeapObject()
188 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
192 bool Object::IsSpecObject() {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsSymbol() {
206 if (!this->IsHeapObject()) return false;
207 uint32_t type = HeapObject::cast(this)->map()->instance_type();
208 // Because the symbol tag is non-zero and no non-string types have the
209 // symbol bit set we can test for symbols with a very simple test
211 STATIC_ASSERT(kSymbolTag != 0);
212 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
213 return (type & kIsSymbolMask) != 0;
217 bool Object::IsConsString() {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqAsciiString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsAsciiRepresentation();
242 bool Object::IsSeqTwoByteString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalAsciiString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsAsciiRepresentation();
262 bool Object::IsExternalTwoByteString() {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
273 StringShape::StringShape(String* str)
274 : type_(str->map()->instance_type()) {
276 ASSERT((type_ & kIsNotStringMask) == kStringTag);
280 StringShape::StringShape(Map* map)
281 : type_(map->instance_type()) {
283 ASSERT((type_ & kIsNotStringMask) == kStringTag);
287 StringShape::StringShape(InstanceType t)
288 : type_(static_cast<uint32_t>(t)) {
290 ASSERT((type_ & kIsNotStringMask) == kStringTag);
294 bool StringShape::IsSymbol() {
296 STATIC_ASSERT(kSymbolTag != 0);
297 return (type_ & kIsSymbolMask) != 0;
301 bool String::IsAsciiRepresentation() {
302 uint32_t type = map()->instance_type();
303 return (type & kStringEncodingMask) == kAsciiStringTag;
307 bool String::IsTwoByteRepresentation() {
308 uint32_t type = map()->instance_type();
309 return (type & kStringEncodingMask) == kTwoByteStringTag;
313 bool String::IsAsciiRepresentationUnderneath() {
314 uint32_t type = map()->instance_type();
315 STATIC_ASSERT(kIsIndirectStringTag != 0);
316 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
318 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
319 case kAsciiStringTag:
321 case kTwoByteStringTag:
323 default: // Cons or sliced string. Need to go deeper.
324 return GetUnderlying()->IsAsciiRepresentation();
329 bool String::IsTwoByteRepresentationUnderneath() {
330 uint32_t type = map()->instance_type();
331 STATIC_ASSERT(kIsIndirectStringTag != 0);
332 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
334 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
335 case kAsciiStringTag:
337 case kTwoByteStringTag:
339 default: // Cons or sliced string. Need to go deeper.
340 return GetUnderlying()->IsTwoByteRepresentation();
345 bool String::HasOnlyAsciiChars() {
346 uint32_t type = map()->instance_type();
347 return (type & kStringEncodingMask) == kAsciiStringTag ||
348 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
352 bool StringShape::IsCons() {
353 return (type_ & kStringRepresentationMask) == kConsStringTag;
357 bool StringShape::IsSliced() {
358 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
362 bool StringShape::IsIndirect() {
363 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
367 bool StringShape::IsExternal() {
368 return (type_ & kStringRepresentationMask) == kExternalStringTag;
372 bool StringShape::IsSequential() {
373 return (type_ & kStringRepresentationMask) == kSeqStringTag;
377 StringRepresentationTag StringShape::representation_tag() {
378 uint32_t tag = (type_ & kStringRepresentationMask);
379 return static_cast<StringRepresentationTag>(tag);
383 uint32_t StringShape::encoding_tag() {
384 return type_ & kStringEncodingMask;
388 uint32_t StringShape::full_representation_tag() {
389 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
393 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
394 Internals::kFullStringRepresentationMask);
397 bool StringShape::IsSequentialAscii() {
398 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
402 bool StringShape::IsSequentialTwoByte() {
403 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
407 bool StringShape::IsExternalAscii() {
408 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
412 bool StringShape::IsExternalTwoByte() {
413 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
417 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
418 Internals::kExternalTwoByteRepresentationTag);
421 uc32 FlatStringReader::Get(int index) {
422 ASSERT(0 <= index && index <= length_);
424 return static_cast<const byte*>(start_)[index];
426 return static_cast<const uc16*>(start_)[index];
431 bool Object::IsNumber() {
432 return IsSmi() || IsHeapNumber();
436 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
437 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
440 bool Object::IsFiller() {
441 if (!Object::IsHeapObject()) return false;
442 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
443 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
447 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
450 bool Object::IsExternalArray() {
451 if (!Object::IsHeapObject())
453 InstanceType instance_type =
454 HeapObject::cast(this)->map()->instance_type();
455 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
456 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
460 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
461 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
462 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
463 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
464 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
465 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
466 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
467 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
470 bool MaybeObject::IsFailure() {
471 return HAS_FAILURE_TAG(this);
475 bool MaybeObject::IsRetryAfterGC() {
476 return HAS_FAILURE_TAG(this)
477 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
481 bool MaybeObject::IsOutOfMemory() {
482 return HAS_FAILURE_TAG(this)
483 && Failure::cast(this)->IsOutOfMemoryException();
487 bool MaybeObject::IsException() {
488 return this == Failure::Exception();
492 bool MaybeObject::IsTheHole() {
493 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
497 Failure* Failure::cast(MaybeObject* obj) {
498 ASSERT(HAS_FAILURE_TAG(obj));
499 return reinterpret_cast<Failure*>(obj);
503 bool Object::IsJSReceiver() {
504 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
505 return IsHeapObject() &&
506 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
510 bool Object::IsJSObject() {
511 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
512 return IsHeapObject() &&
513 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
517 bool Object::IsJSProxy() {
518 if (!Object::IsHeapObject()) return false;
519 InstanceType type = HeapObject::cast(this)->map()->instance_type();
520 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
524 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
525 TYPE_CHECKER(JSSet, JS_SET_TYPE)
526 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
527 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
528 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
529 TYPE_CHECKER(Map, MAP_TYPE)
530 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
531 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
534 bool Object::IsDescriptorArray() {
535 return IsFixedArray();
539 bool Object::IsDeoptimizationInputData() {
540 // Must be a fixed array.
541 if (!IsFixedArray()) return false;
543 // There's no sure way to detect the difference between a fixed array and
544 // a deoptimization data array. Since this is used for asserts we can
545 // check that the length is zero or else the fixed size plus a multiple of
547 int length = FixedArray::cast(this)->length();
548 if (length == 0) return true;
550 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
551 return length >= 0 &&
552 length % DeoptimizationInputData::kDeoptEntrySize == 0;
556 bool Object::IsDeoptimizationOutputData() {
557 if (!IsFixedArray()) return false;
558 // There's actually no way to see the difference between a fixed array and
559 // a deoptimization data array. Since this is used for asserts we can check
560 // that the length is plausible though.
561 if (FixedArray::cast(this)->length() % 2 != 0) return false;
566 bool Object::IsTypeFeedbackCells() {
567 if (!IsFixedArray()) return false;
568 // There's actually no way to see the difference between a fixed array and
569 // a cache cells array. Since this is used for asserts we can check that
570 // the length is plausible though.
571 if (FixedArray::cast(this)->length() % 2 != 0) return false;
576 bool Object::IsContext() {
577 if (Object::IsHeapObject()) {
578 Map* map = HeapObject::cast(this)->map();
579 Heap* heap = map->GetHeap();
580 return (map == heap->function_context_map() ||
581 map == heap->catch_context_map() ||
582 map == heap->with_context_map() ||
583 map == heap->global_context_map() ||
584 map == heap->block_context_map() ||
585 map == heap->module_context_map());
591 bool Object::IsGlobalContext() {
592 return Object::IsHeapObject() &&
593 HeapObject::cast(this)->map() ==
594 HeapObject::cast(this)->GetHeap()->global_context_map();
598 bool Object::IsModuleContext() {
599 return Object::IsHeapObject() &&
600 HeapObject::cast(this)->map() ==
601 HeapObject::cast(this)->GetHeap()->module_context_map();
605 bool Object::IsScopeInfo() {
606 return Object::IsHeapObject() &&
607 HeapObject::cast(this)->map() ==
608 HeapObject::cast(this)->GetHeap()->scope_info_map();
612 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
615 template <> inline bool Is<JSFunction>(Object* obj) {
616 return obj->IsJSFunction();
620 TYPE_CHECKER(Code, CODE_TYPE)
621 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
622 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
623 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
624 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
625 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
626 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
627 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
630 bool Object::IsStringWrapper() {
631 return IsJSValue() && JSValue::cast(this)->value()->IsString();
635 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
638 bool Object::IsBoolean() {
639 return IsOddball() &&
640 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
644 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
645 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
648 template <> inline bool Is<JSArray>(Object* obj) {
649 return obj->IsJSArray();
653 bool Object::IsHashTable() {
654 return Object::IsHeapObject() &&
655 HeapObject::cast(this)->map() ==
656 HeapObject::cast(this)->GetHeap()->hash_table_map();
660 bool Object::IsDictionary() {
661 return IsHashTable() &&
662 this != HeapObject::cast(this)->GetHeap()->symbol_table();
666 bool Object::IsSymbolTable() {
667 return IsHashTable() && this ==
668 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
672 bool Object::IsJSFunctionResultCache() {
673 if (!IsFixedArray()) return false;
674 FixedArray* self = FixedArray::cast(this);
675 int length = self->length();
676 if (length < JSFunctionResultCache::kEntriesIndex) return false;
677 if ((length - JSFunctionResultCache::kEntriesIndex)
678 % JSFunctionResultCache::kEntrySize != 0) {
682 if (FLAG_verify_heap) {
683 reinterpret_cast<JSFunctionResultCache*>(this)->
684 JSFunctionResultCacheVerify();
691 bool Object::IsNormalizedMapCache() {
692 if (!IsFixedArray()) return false;
693 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
697 if (FLAG_verify_heap) {
698 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
705 bool Object::IsCompilationCacheTable() {
706 return IsHashTable();
710 bool Object::IsCodeCacheHashTable() {
711 return IsHashTable();
715 bool Object::IsPolymorphicCodeCacheHashTable() {
716 return IsHashTable();
720 bool Object::IsMapCache() {
721 return IsHashTable();
725 bool Object::IsPrimitive() {
726 return IsOddball() || IsNumber() || IsString();
730 bool Object::IsJSGlobalProxy() {
731 bool result = IsHeapObject() &&
732 (HeapObject::cast(this)->map()->instance_type() ==
733 JS_GLOBAL_PROXY_TYPE);
734 ASSERT(!result || IsAccessCheckNeeded());
739 bool Object::IsGlobalObject() {
740 if (!IsHeapObject()) return false;
742 InstanceType type = HeapObject::cast(this)->map()->instance_type();
743 return type == JS_GLOBAL_OBJECT_TYPE ||
744 type == JS_BUILTINS_OBJECT_TYPE;
748 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
749 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
752 bool Object::IsUndetectableObject() {
753 return IsHeapObject()
754 && HeapObject::cast(this)->map()->is_undetectable();
758 bool Object::IsAccessCheckNeeded() {
759 return IsHeapObject()
760 && HeapObject::cast(this)->map()->is_access_check_needed();
764 bool Object::IsStruct() {
765 if (!IsHeapObject()) return false;
766 switch (HeapObject::cast(this)->map()->instance_type()) {
767 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
768 STRUCT_LIST(MAKE_STRUCT_CASE)
769 #undef MAKE_STRUCT_CASE
770 default: return false;
775 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
776 bool Object::Is##Name() { \
777 return Object::IsHeapObject() \
778 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
780 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
781 #undef MAKE_STRUCT_PREDICATE
784 bool Object::IsUndefined() {
785 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
789 bool Object::IsNull() {
790 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
794 bool Object::IsTheHole() {
795 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
799 bool Object::IsTrue() {
800 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
804 bool Object::IsFalse() {
805 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
809 bool Object::IsArgumentsMarker() {
810 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
814 double Object::Number() {
817 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
818 : reinterpret_cast<HeapNumber*>(this)->value();
822 bool Object::IsNaN() {
823 return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
827 MaybeObject* Object::ToSmi() {
828 if (IsSmi()) return this;
829 if (IsHeapNumber()) {
830 double value = HeapNumber::cast(this)->value();
831 int int_value = FastD2I(value);
832 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
833 return Smi::FromInt(int_value);
836 return Failure::Exception();
840 bool Object::HasSpecificClassOf(String* name) {
841 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
845 MaybeObject* Object::GetElement(uint32_t index) {
846 // GetElement can trigger a getter which can cause allocation.
847 // This was not always the case. This ASSERT is here to catch
848 // leftover incorrect uses.
849 ASSERT(HEAP->IsAllocationAllowed());
850 return GetElementWithReceiver(this, index);
854 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
855 MaybeObject* maybe = GetElementWithReceiver(this, index);
856 ASSERT(!maybe->IsFailure());
857 Object* result = NULL; // Initialization to please compiler.
858 maybe->ToObject(&result);
863 MaybeObject* Object::GetProperty(String* key) {
864 PropertyAttributes attributes;
865 return GetPropertyWithReceiver(this, key, &attributes);
869 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
870 return GetPropertyWithReceiver(this, key, attributes);
874 #define FIELD_ADDR(p, offset) \
875 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
877 #define READ_FIELD(p, offset) \
878 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
880 #define WRITE_FIELD(p, offset, value) \
881 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
883 #define WRITE_BARRIER(heap, object, offset, value) \
884 heap->incremental_marking()->RecordWrite( \
885 object, HeapObject::RawField(object, offset), value); \
886 if (heap->InNewSpace(value)) { \
887 heap->RecordWrite(object->address(), offset); \
890 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
891 if (mode == UPDATE_WRITE_BARRIER) { \
892 heap->incremental_marking()->RecordWrite( \
893 object, HeapObject::RawField(object, offset), value); \
894 if (heap->InNewSpace(value)) { \
895 heap->RecordWrite(object->address(), offset); \
899 #ifndef V8_TARGET_ARCH_MIPS
900 #define READ_DOUBLE_FIELD(p, offset) \
901 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
902 #else // V8_TARGET_ARCH_MIPS
903 // Prevent gcc from using load-double (mips ldc1) on (possibly)
904 // non-64-bit aligned HeapNumber::value.
905 static inline double read_double_field(void* p, int offset) {
910 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
911 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
914 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
915 #endif // V8_TARGET_ARCH_MIPS
917 #ifndef V8_TARGET_ARCH_MIPS
918 #define WRITE_DOUBLE_FIELD(p, offset, value) \
919 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
920 #else // V8_TARGET_ARCH_MIPS
921 // Prevent gcc from using store-double (mips sdc1) on (possibly)
922 // non-64-bit aligned HeapNumber::value.
923 static inline void write_double_field(void* p, int offset,
930 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
931 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
933 #define WRITE_DOUBLE_FIELD(p, offset, value) \
934 write_double_field(p, offset, value)
935 #endif // V8_TARGET_ARCH_MIPS
938 #define READ_INT_FIELD(p, offset) \
939 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
941 #define WRITE_INT_FIELD(p, offset, value) \
942 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
944 #define READ_INTPTR_FIELD(p, offset) \
945 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
947 #define WRITE_INTPTR_FIELD(p, offset, value) \
948 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
950 #define READ_UINT32_FIELD(p, offset) \
951 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
953 #define WRITE_UINT32_FIELD(p, offset, value) \
954 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
956 #define READ_INT64_FIELD(p, offset) \
957 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
959 #define WRITE_INT64_FIELD(p, offset, value) \
960 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
962 #define READ_SHORT_FIELD(p, offset) \
963 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
965 #define WRITE_SHORT_FIELD(p, offset, value) \
966 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
968 #define READ_BYTE_FIELD(p, offset) \
969 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
971 #define WRITE_BYTE_FIELD(p, offset, value) \
972 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
975 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
976 return &READ_FIELD(obj, byte_offset);
981 return Internals::SmiValue(this);
985 Smi* Smi::FromInt(int value) {
986 ASSERT(Smi::IsValid(value));
987 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
988 intptr_t tagged_value =
989 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
990 return reinterpret_cast<Smi*>(tagged_value);
994 Smi* Smi::FromIntptr(intptr_t value) {
995 ASSERT(Smi::IsValid(value));
996 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
997 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1001 Failure::Type Failure::type() const {
1002 return static_cast<Type>(value() & kFailureTypeTagMask);
1006 bool Failure::IsInternalError() const {
1007 return type() == INTERNAL_ERROR;
1011 bool Failure::IsOutOfMemoryException() const {
1012 return type() == OUT_OF_MEMORY_EXCEPTION;
1016 AllocationSpace Failure::allocation_space() const {
1017 ASSERT_EQ(RETRY_AFTER_GC, type());
1018 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1023 Failure* Failure::InternalError() {
1024 return Construct(INTERNAL_ERROR);
1028 Failure* Failure::Exception() {
1029 return Construct(EXCEPTION);
1033 Failure* Failure::OutOfMemoryException() {
1034 return Construct(OUT_OF_MEMORY_EXCEPTION);
1038 intptr_t Failure::value() const {
1039 return static_cast<intptr_t>(
1040 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1044 Failure* Failure::RetryAfterGC() {
1045 return RetryAfterGC(NEW_SPACE);
1049 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1050 ASSERT((space & ~kSpaceTagMask) == 0);
1051 return Construct(RETRY_AFTER_GC, space);
1055 Failure* Failure::Construct(Type type, intptr_t value) {
1057 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1058 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1059 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1063 bool Smi::IsValid(intptr_t value) {
1065 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1068 #ifdef V8_TARGET_ARCH_X64
1069 // To be representable as a long smi, the value must be a 32-bit integer.
1070 bool result = (value == static_cast<int32_t>(value));
1072 // To be representable as an tagged small integer, the two
1073 // most-significant bits of 'value' must be either 00 or 11 due to
1074 // sign-extension. To check this we add 01 to the two
1075 // most-significant bits, and check if the most-significant bit is 0
1077 // CAUTION: The original code below:
1078 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1079 // may lead to incorrect results according to the C language spec, and
1080 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1081 // compiler may produce undefined results in case of signed integer
1082 // overflow. The computation must be done w/ unsigned ints.
1083 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1085 ASSERT(result == in_range);
1090 MapWord MapWord::FromMap(Map* map) {
1091 return MapWord(reinterpret_cast<uintptr_t>(map));
1095 Map* MapWord::ToMap() {
1096 return reinterpret_cast<Map*>(value_);
1100 bool MapWord::IsForwardingAddress() {
1101 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1105 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1106 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1107 return MapWord(reinterpret_cast<uintptr_t>(raw));
1111 HeapObject* MapWord::ToForwardingAddress() {
1112 ASSERT(IsForwardingAddress());
1113 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1118 void HeapObject::VerifyObjectField(int offset) {
1119 VerifyPointer(READ_FIELD(this, offset));
1122 void HeapObject::VerifySmiField(int offset) {
1123 ASSERT(READ_FIELD(this, offset)->IsSmi());
1128 Heap* HeapObject::GetHeap() {
1130 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1131 ASSERT(heap != NULL);
1132 ASSERT(heap->isolate() == Isolate::Current());
1137 Isolate* HeapObject::GetIsolate() {
1138 return GetHeap()->isolate();
1142 Map* HeapObject::map() {
1143 return map_word().ToMap();
1147 void HeapObject::set_map(Map* value) {
1148 set_map_word(MapWord::FromMap(value));
1149 if (value != NULL) {
1150 // TODO(1600) We are passing NULL as a slot because maps can never be on
1151 // evacuation candidate.
1152 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1157 // Unsafe accessor omitting write barrier.
1158 void HeapObject::set_map_no_write_barrier(Map* value) {
1159 set_map_word(MapWord::FromMap(value));
1163 MapWord HeapObject::map_word() {
1164 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1168 void HeapObject::set_map_word(MapWord map_word) {
1169 // WRITE_FIELD does not invoke write barrier, but there is no need
1171 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1175 HeapObject* HeapObject::FromAddress(Address address) {
1176 ASSERT_TAG_ALIGNED(address);
1177 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1181 Address HeapObject::address() {
1182 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1186 int HeapObject::Size() {
1187 return SizeFromMap(map());
1191 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1192 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1193 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1197 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1198 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1202 double HeapNumber::value() {
1203 return READ_DOUBLE_FIELD(this, kValueOffset);
1207 void HeapNumber::set_value(double value) {
1208 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1212 int HeapNumber::get_exponent() {
1213 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1214 kExponentShift) - kExponentBias;
1218 int HeapNumber::get_sign() {
1219 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1223 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1226 Object** FixedArray::GetFirstElementAddress() {
1227 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1231 bool FixedArray::ContainsOnlySmisOrHoles() {
1232 Object* the_hole = GetHeap()->the_hole_value();
1233 Object** current = GetFirstElementAddress();
1234 for (int i = 0; i < length(); ++i) {
1235 Object* candidate = *current++;
1236 if (!candidate->IsSmi() && candidate != the_hole) return false;
1242 FixedArrayBase* JSObject::elements() {
1243 Object* array = READ_FIELD(this, kElementsOffset);
1244 return static_cast<FixedArrayBase*>(array);
1247 void JSObject::ValidateSmiOnlyElements() {
1249 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1250 Heap* heap = GetHeap();
1251 // Don't use elements, since integrity checks will fail if there
1252 // are filler pointers in the array.
1253 FixedArray* fixed_array =
1254 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1255 Map* map = fixed_array->map();
1256 // Arrays that have been shifted in place can't be verified.
1257 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1258 map != heap->raw_unchecked_two_pointer_filler_map() &&
1259 map != heap->free_space_map()) {
1260 for (int i = 0; i < fixed_array->length(); i++) {
1261 Object* current = fixed_array->get(i);
1262 ASSERT(current->IsSmi() || current->IsTheHole());
1270 MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
1272 ValidateSmiOnlyElements();
1274 if ((map()->elements_kind() != FAST_ELEMENTS)) {
1275 return TransitionElementsKind(FAST_ELEMENTS);
1281 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1283 EnsureElementsMode mode) {
1284 ElementsKind current_kind = map()->elements_kind();
1285 ElementsKind target_kind = current_kind;
1286 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1287 if (current_kind == FAST_ELEMENTS) return this;
1289 Heap* heap = GetHeap();
1290 Object* the_hole = heap->the_hole_value();
1291 Object* heap_number_map = heap->heap_number_map();
1292 for (uint32_t i = 0; i < count; ++i) {
1293 Object* current = *objects++;
1294 if (!current->IsSmi() && current != the_hole) {
1295 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
1296 HeapObject::cast(current)->map() == heap_number_map) {
1297 target_kind = FAST_DOUBLE_ELEMENTS;
1299 target_kind = FAST_ELEMENTS;
1305 if (target_kind != current_kind) {
1306 return TransitionElementsKind(target_kind);
1312 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1313 EnsureElementsMode mode) {
1314 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1315 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1316 elements->map() == GetHeap()->fixed_cow_array_map());
1317 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1318 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1320 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1321 return EnsureCanContainElements(objects, elements->length(), mode);
1324 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1325 if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
1326 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1333 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1334 ElementsKind to_kind) {
1335 Map* current_map = map();
1336 ElementsKind from_kind = current_map->elements_kind();
1338 if (from_kind == to_kind) return current_map;
1340 Context* global_context = isolate->context()->global_context();
1341 if (current_map == global_context->smi_js_array_map()) {
1342 if (to_kind == FAST_ELEMENTS) {
1343 return global_context->object_js_array_map();
1345 if (to_kind == FAST_DOUBLE_ELEMENTS) {
1346 return global_context->double_js_array_map();
1348 ASSERT(to_kind == DICTIONARY_ELEMENTS);
1352 return GetElementsTransitionMapSlow(to_kind);
1356 void JSObject::set_map_and_elements(Map* new_map,
1357 FixedArrayBase* value,
1358 WriteBarrierMode mode) {
1359 ASSERT(value->HasValidElements());
1361 ValidateSmiOnlyElements();
1363 if (new_map != NULL) {
1364 if (mode == UPDATE_WRITE_BARRIER) {
1367 ASSERT(mode == SKIP_WRITE_BARRIER);
1368 set_map_no_write_barrier(new_map);
1371 ASSERT((map()->has_fast_elements() ||
1372 map()->has_fast_smi_only_elements() ||
1373 (value == GetHeap()->empty_fixed_array())) ==
1374 (value->map() == GetHeap()->fixed_array_map() ||
1375 value->map() == GetHeap()->fixed_cow_array_map()));
1376 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1377 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1378 WRITE_FIELD(this, kElementsOffset, value);
1379 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1383 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1384 set_map_and_elements(NULL, value, mode);
1388 void JSObject::initialize_properties() {
1389 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1390 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1394 void JSObject::initialize_elements() {
1395 ASSERT(map()->has_fast_elements() ||
1396 map()->has_fast_smi_only_elements() ||
1397 map()->has_fast_double_elements());
1398 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1399 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1403 MaybeObject* JSObject::ResetElements() {
1405 ElementsKind elements_kind = FLAG_smi_only_arrays
1406 ? FAST_SMI_ONLY_ELEMENTS
1408 MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
1410 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1411 set_map(Map::cast(obj));
1412 initialize_elements();
1417 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1418 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1421 byte Oddball::kind() {
1422 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1426 void Oddball::set_kind(byte value) {
1427 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1431 Object* JSGlobalPropertyCell::value() {
1432 return READ_FIELD(this, kValueOffset);
1436 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1437 // The write barrier is not used for global property cells.
1438 ASSERT(!val->IsJSGlobalPropertyCell());
1439 WRITE_FIELD(this, kValueOffset, val);
1443 int JSObject::GetHeaderSize() {
1444 InstanceType type = map()->instance_type();
1445 // Check for the most common kind of JavaScript object before
1446 // falling into the generic switch. This speeds up the internal
1447 // field operations considerably on average.
1448 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1450 case JS_MODULE_TYPE:
1451 return JSModule::kSize;
1452 case JS_GLOBAL_PROXY_TYPE:
1453 return JSGlobalProxy::kSize;
1454 case JS_GLOBAL_OBJECT_TYPE:
1455 return JSGlobalObject::kSize;
1456 case JS_BUILTINS_OBJECT_TYPE:
1457 return JSBuiltinsObject::kSize;
1458 case JS_FUNCTION_TYPE:
1459 return JSFunction::kSize;
1461 return JSValue::kSize;
1463 return JSDate::kSize;
1465 return JSArray::kSize;
1466 case JS_WEAK_MAP_TYPE:
1467 return JSWeakMap::kSize;
1468 case JS_REGEXP_TYPE:
1469 return JSRegExp::kSize;
1470 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1471 return JSObject::kHeaderSize;
1472 case JS_MESSAGE_OBJECT_TYPE:
1473 return JSMessageObject::kSize;
1481 int JSObject::GetInternalFieldCount() {
1482 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1483 // Make sure to adjust for the number of in-object properties. These
1484 // properties do contribute to the size, but are not internal fields.
1485 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1486 map()->inobject_properties() - (map()->has_external_resource()?1:0);
1490 int JSObject::GetInternalFieldOffset(int index) {
1491 ASSERT(index < GetInternalFieldCount() && index >= 0);
1492 return GetHeaderSize() + (kPointerSize * index);
1496 Object* JSObject::GetInternalField(int index) {
1497 ASSERT(index < GetInternalFieldCount() && index >= 0);
1498 // Internal objects do follow immediately after the header, whereas in-object
1499 // properties are at the end of the object. Therefore there is no need
1500 // to adjust the index here.
1501 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1505 void JSObject::SetInternalField(int index, Object* value) {
1506 ASSERT(index < GetInternalFieldCount() && index >= 0);
1507 // Internal objects do follow immediately after the header, whereas in-object
1508 // properties are at the end of the object. Therefore there is no need
1509 // to adjust the index here.
1510 int offset = GetHeaderSize() + (kPointerSize * index);
1511 WRITE_FIELD(this, offset, value);
1512 WRITE_BARRIER(GetHeap(), this, offset, value);
1516 void JSObject::SetInternalField(int index, Smi* value) {
1517 ASSERT(index < GetInternalFieldCount() && index >= 0);
1518 // Internal objects do follow immediately after the header, whereas in-object
1519 // properties are at the end of the object. Therefore there is no need
1520 // to adjust the index here.
1521 int offset = GetHeaderSize() + (kPointerSize * index);
1522 WRITE_FIELD(this, offset, value);
1526 void JSObject::SetExternalResourceObject(Object *value) {
1527 ASSERT(map()->has_external_resource());
1528 int offset = GetHeaderSize() + kPointerSize * GetInternalFieldCount();
1529 WRITE_FIELD(this, offset, value);
1530 WRITE_BARRIER(GetHeap(), this, offset, value);
1534 Object *JSObject::GetExternalResourceObject() {
1535 if (map()->has_external_resource()) {
1536 return READ_FIELD(this, GetHeaderSize() + kPointerSize * GetInternalFieldCount());
1538 return GetHeap()->undefined_value();
1543 // Access fast-case object properties at index. The use of these routines
1544 // is needed to correctly distinguish between properties stored in-object and
1545 // properties stored in the properties array.
1546 Object* JSObject::FastPropertyAt(int index) {
1547 // Adjust for the number of properties stored in the object.
1548 index -= map()->inobject_properties();
1550 int offset = map()->instance_size() + (index * kPointerSize);
1551 return READ_FIELD(this, offset);
1553 ASSERT(index < properties()->length());
1554 return properties()->get(index);
1559 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1560 // Adjust for the number of properties stored in the object.
1561 index -= map()->inobject_properties();
1563 int offset = map()->instance_size() + (index * kPointerSize);
1564 WRITE_FIELD(this, offset, value);
1565 WRITE_BARRIER(GetHeap(), this, offset, value);
1567 ASSERT(index < properties()->length());
1568 properties()->set(index, value);
1574 int JSObject::GetInObjectPropertyOffset(int index) {
1575 // Adjust for the number of properties stored in the object.
1576 index -= map()->inobject_properties();
1578 return map()->instance_size() + (index * kPointerSize);
1582 Object* JSObject::InObjectPropertyAt(int index) {
1583 // Adjust for the number of properties stored in the object.
1584 index -= map()->inobject_properties();
1586 int offset = map()->instance_size() + (index * kPointerSize);
1587 return READ_FIELD(this, offset);
1591 Object* JSObject::InObjectPropertyAtPut(int index,
1593 WriteBarrierMode mode) {
1594 // Adjust for the number of properties stored in the object.
1595 index -= map()->inobject_properties();
1597 int offset = map()->instance_size() + (index * kPointerSize);
1598 WRITE_FIELD(this, offset, value);
1599 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1605 void JSObject::InitializeBody(Map* map,
1606 Object* pre_allocated_value,
1607 Object* filler_value) {
1608 ASSERT(!filler_value->IsHeapObject() ||
1609 !GetHeap()->InNewSpace(filler_value));
1610 ASSERT(!pre_allocated_value->IsHeapObject() ||
1611 !GetHeap()->InNewSpace(pre_allocated_value));
1612 int size = map->instance_size();
1613 int offset = kHeaderSize;
1614 if (filler_value != pre_allocated_value) {
1615 int pre_allocated = map->pre_allocated_property_fields();
1616 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1617 for (int i = 0; i < pre_allocated; i++) {
1618 WRITE_FIELD(this, offset, pre_allocated_value);
1619 offset += kPointerSize;
1622 while (offset < size) {
1623 WRITE_FIELD(this, offset, filler_value);
1624 offset += kPointerSize;
1629 bool JSObject::HasFastProperties() {
1630 return !properties()->IsDictionary();
1634 int JSObject::MaxFastProperties() {
1635 // Allow extra fast properties if the object has more than
1636 // kMaxFastProperties in-object properties. When this is the case,
1637 // it is very unlikely that the object is being used as a dictionary
1638 // and there is a good chance that allowing more map transitions
1639 // will be worth it.
1640 return Max(map()->inobject_properties(), kMaxFastProperties);
1644 void Struct::InitializeBody(int object_size) {
1645 Object* value = GetHeap()->undefined_value();
1646 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1647 WRITE_FIELD(this, offset, value);
1652 bool Object::ToArrayIndex(uint32_t* index) {
1654 int value = Smi::cast(this)->value();
1655 if (value < 0) return false;
1659 if (IsHeapNumber()) {
1660 double value = HeapNumber::cast(this)->value();
1661 uint32_t uint_value = static_cast<uint32_t>(value);
1662 if (value == static_cast<double>(uint_value)) {
1663 *index = uint_value;
1671 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1672 if (!this->IsJSValue()) return false;
1674 JSValue* js_value = JSValue::cast(this);
1675 if (!js_value->value()->IsString()) return false;
1677 String* str = String::cast(js_value->value());
1678 if (index >= (uint32_t)str->length()) return false;
1684 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1685 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1686 return reinterpret_cast<FixedArrayBase*>(object);
1690 Object* FixedArray::get(int index) {
1691 ASSERT(index >= 0 && index < this->length());
1692 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1696 void FixedArray::set(int index, Smi* value) {
1697 ASSERT(map() != HEAP->fixed_cow_array_map());
1698 ASSERT(index >= 0 && index < this->length());
1699 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1700 int offset = kHeaderSize + index * kPointerSize;
1701 WRITE_FIELD(this, offset, value);
1705 void FixedArray::set(int index, Object* value) {
1706 ASSERT(map() != HEAP->fixed_cow_array_map());
1707 ASSERT(index >= 0 && index < this->length());
1708 int offset = kHeaderSize + index * kPointerSize;
1709 WRITE_FIELD(this, offset, value);
1710 WRITE_BARRIER(GetHeap(), this, offset, value);
1714 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1715 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1719 inline double FixedDoubleArray::hole_nan_as_double() {
1720 return BitCast<double, uint64_t>(kHoleNanInt64);
1724 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1725 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1726 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1727 return OS::nan_value();
1731 double FixedDoubleArray::get_scalar(int index) {
1732 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1733 map() != HEAP->fixed_array_map());
1734 ASSERT(index >= 0 && index < this->length());
1735 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1736 ASSERT(!is_the_hole_nan(result));
1740 int64_t FixedDoubleArray::get_representation(int index) {
1741 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1742 map() != HEAP->fixed_array_map());
1743 ASSERT(index >= 0 && index < this->length());
1744 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1747 MaybeObject* FixedDoubleArray::get(int index) {
1748 if (is_the_hole(index)) {
1749 return GetHeap()->the_hole_value();
1751 return GetHeap()->NumberFromDouble(get_scalar(index));
1756 void FixedDoubleArray::set(int index, double value) {
1757 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1758 map() != HEAP->fixed_array_map());
1759 int offset = kHeaderSize + index * kDoubleSize;
1760 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1761 WRITE_DOUBLE_FIELD(this, offset, value);
1765 void FixedDoubleArray::set_the_hole(int index) {
1766 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1767 map() != HEAP->fixed_array_map());
1768 int offset = kHeaderSize + index * kDoubleSize;
1769 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1773 bool FixedDoubleArray::is_the_hole(int index) {
1774 int offset = kHeaderSize + index * kDoubleSize;
1775 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1779 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1780 Heap* heap = GetHeap();
1781 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1782 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1783 return UPDATE_WRITE_BARRIER;
1787 void FixedArray::set(int index,
1789 WriteBarrierMode mode) {
1790 ASSERT(map() != HEAP->fixed_cow_array_map());
1791 ASSERT(index >= 0 && index < this->length());
1792 int offset = kHeaderSize + index * kPointerSize;
1793 WRITE_FIELD(this, offset, value);
1794 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1798 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
1801 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1802 ASSERT(index >= 0 && index < array->length());
1803 int offset = kHeaderSize + index * kPointerSize;
1804 WRITE_FIELD(array, offset, value);
1805 Heap* heap = array->GetHeap();
1806 if (heap->InNewSpace(value)) {
1807 heap->RecordWrite(array->address(), offset);
1812 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1815 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1816 ASSERT(index >= 0 && index < array->length());
1817 ASSERT(!HEAP->InNewSpace(value));
1818 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1822 void FixedArray::set_undefined(int index) {
1823 ASSERT(map() != HEAP->fixed_cow_array_map());
1824 set_undefined(GetHeap(), index);
1828 void FixedArray::set_undefined(Heap* heap, int index) {
1829 ASSERT(index >= 0 && index < this->length());
1830 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1831 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1832 heap->undefined_value());
1836 void FixedArray::set_null(int index) {
1837 set_null(GetHeap(), index);
1841 void FixedArray::set_null(Heap* heap, int index) {
1842 ASSERT(index >= 0 && index < this->length());
1843 ASSERT(!heap->InNewSpace(heap->null_value()));
1844 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1848 void FixedArray::set_the_hole(int index) {
1849 ASSERT(map() != HEAP->fixed_cow_array_map());
1850 ASSERT(index >= 0 && index < this->length());
1851 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1853 kHeaderSize + index * kPointerSize,
1854 GetHeap()->the_hole_value());
1858 void FixedArray::set_unchecked(int index, Smi* value) {
1859 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1860 int offset = kHeaderSize + index * kPointerSize;
1861 WRITE_FIELD(this, offset, value);
1865 void FixedArray::set_unchecked(Heap* heap,
1868 WriteBarrierMode mode) {
1869 int offset = kHeaderSize + index * kPointerSize;
1870 WRITE_FIELD(this, offset, value);
1871 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1875 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1876 ASSERT(index >= 0 && index < this->length());
1877 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1878 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1882 Object** FixedArray::data_start() {
1883 return HeapObject::RawField(this, kHeaderSize);
1887 bool DescriptorArray::IsEmpty() {
1888 ASSERT(this->IsSmi() ||
1889 this->length() > kFirstIndex ||
1890 this == HEAP->empty_descriptor_array());
1891 return this->IsSmi() || length() <= kFirstIndex;
1895 int DescriptorArray::bit_field3_storage() {
1896 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1897 return Smi::cast(storage)->value();
1900 void DescriptorArray::set_bit_field3_storage(int value) {
1902 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1906 void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
1909 Object* tmp = array->get(first);
1910 NoIncrementalWriteBarrierSet(array, first, array->get(second));
1911 NoIncrementalWriteBarrierSet(array, second, tmp);
1915 int DescriptorArray::Search(String* name) {
1916 SLOW_ASSERT(IsSortedNoDuplicates());
1918 // Check for empty descriptor array.
1919 int nof = number_of_descriptors();
1920 if (nof == 0) return kNotFound;
1922 // Fast case: do linear search for small arrays.
1923 const int kMaxElementsForLinearSearch = 8;
1924 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1925 return LinearSearch(name, nof);
1928 // Slow case: perform binary search.
1929 return BinarySearch(name, 0, nof - 1);
1933 int DescriptorArray::SearchWithCache(String* name) {
1934 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1935 if (number == DescriptorLookupCache::kAbsent) {
1936 number = Search(name);
1937 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1943 String* DescriptorArray::GetKey(int descriptor_number) {
1944 ASSERT(descriptor_number < number_of_descriptors());
1945 return String::cast(get(ToKeyIndex(descriptor_number)));
1949 Object* DescriptorArray::GetValue(int descriptor_number) {
1950 ASSERT(descriptor_number < number_of_descriptors());
1951 return GetContentArray()->get(ToValueIndex(descriptor_number));
1955 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
1956 ASSERT(descriptor_number < number_of_descriptors());
1957 Object* details = GetContentArray()->get(ToDetailsIndex(descriptor_number));
1958 return PropertyDetails(Smi::cast(details));
1962 PropertyType DescriptorArray::GetType(int descriptor_number) {
1963 return GetDetails(descriptor_number).type();
1967 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1968 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1972 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1973 return JSFunction::cast(GetValue(descriptor_number));
1977 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1978 ASSERT(GetType(descriptor_number) == CALLBACKS);
1979 return GetValue(descriptor_number);
1983 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1984 ASSERT(GetType(descriptor_number) == CALLBACKS);
1985 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1986 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
1990 bool DescriptorArray::IsProperty(int descriptor_number) {
1991 Entry entry(this, descriptor_number);
1992 return IsPropertyDescriptor(&entry);
1996 bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
1997 switch (GetType(descriptor_number)) {
1998 case MAP_TRANSITION:
1999 case CONSTANT_TRANSITION:
2000 case ELEMENTS_TRANSITION:
2003 Object* value = GetValue(descriptor_number);
2004 if (!value->IsAccessorPair()) return false;
2005 AccessorPair* accessors = AccessorPair::cast(value);
2006 return accessors->getter()->IsMap() && accessors->setter()->IsMap();
2010 case CONSTANT_FUNCTION:
2013 case NULL_DESCRIPTOR:
2016 UNREACHABLE(); // Keep the compiler happy.
2021 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
2022 return GetType(descriptor_number) == NULL_DESCRIPTOR;
2026 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2027 desc->Init(GetKey(descriptor_number),
2028 GetValue(descriptor_number),
2029 GetDetails(descriptor_number));
2033 void DescriptorArray::Set(int descriptor_number,
2035 const WhitenessWitness&) {
2037 ASSERT(descriptor_number < number_of_descriptors());
2039 NoIncrementalWriteBarrierSet(this,
2040 ToKeyIndex(descriptor_number),
2042 FixedArray* content_array = GetContentArray();
2043 NoIncrementalWriteBarrierSet(content_array,
2044 ToValueIndex(descriptor_number),
2046 NoIncrementalWriteBarrierSet(content_array,
2047 ToDetailsIndex(descriptor_number),
2048 desc->GetDetails().AsSmi());
2052 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
2053 int first, int second) {
2054 NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
2055 FixedArray* content_array = GetContentArray();
2056 NoIncrementalWriteBarrierSwap(content_array,
2057 ToValueIndex(first),
2058 ToValueIndex(second));
2059 NoIncrementalWriteBarrierSwap(content_array,
2060 ToDetailsIndex(first),
2061 ToDetailsIndex(second));
2065 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2066 : marking_(array->GetHeap()->incremental_marking()) {
2067 marking_->EnterNoMarkingScope();
2068 if (array->number_of_descriptors() > 0) {
2069 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2070 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
2075 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2076 marking_->LeaveNoMarkingScope();
2080 template<typename Shape, typename Key>
2081 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2082 const int kMinCapacity = 32;
2083 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2084 if (capacity < kMinCapacity) {
2085 capacity = kMinCapacity; // Guarantee min capacity.
2091 template<typename Shape, typename Key>
2092 int HashTable<Shape, Key>::FindEntry(Key key) {
2093 return FindEntry(GetIsolate(), key);
2097 // Find entry for key otherwise return kNotFound.
2098 template<typename Shape, typename Key>
2099 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2100 uint32_t capacity = Capacity();
2101 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2103 // EnsureCapacity will guarantee the hash table is never full.
2105 Object* element = KeyAt(entry);
2107 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2108 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2109 Shape::IsMatch(key, element)) return entry;
2110 entry = NextProbe(entry, count++, capacity);
2116 bool SeededNumberDictionary::requires_slow_elements() {
2117 Object* max_index_object = get(kMaxNumberKeyIndex);
2118 if (!max_index_object->IsSmi()) return false;
2120 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2123 uint32_t SeededNumberDictionary::max_number_key() {
2124 ASSERT(!requires_slow_elements());
2125 Object* max_index_object = get(kMaxNumberKeyIndex);
2126 if (!max_index_object->IsSmi()) return 0;
2127 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2128 return value >> kRequiresSlowElementsTagSize;
2131 void SeededNumberDictionary::set_requires_slow_elements() {
2132 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2136 // ------------------------------------
2140 CAST_ACCESSOR(FixedArray)
2141 CAST_ACCESSOR(FixedDoubleArray)
2142 CAST_ACCESSOR(DescriptorArray)
2143 CAST_ACCESSOR(DeoptimizationInputData)
2144 CAST_ACCESSOR(DeoptimizationOutputData)
2145 CAST_ACCESSOR(TypeFeedbackCells)
2146 CAST_ACCESSOR(SymbolTable)
2147 CAST_ACCESSOR(JSFunctionResultCache)
2148 CAST_ACCESSOR(NormalizedMapCache)
2149 CAST_ACCESSOR(ScopeInfo)
2150 CAST_ACCESSOR(CompilationCacheTable)
2151 CAST_ACCESSOR(CodeCacheHashTable)
2152 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2153 CAST_ACCESSOR(MapCache)
2154 CAST_ACCESSOR(String)
2155 CAST_ACCESSOR(SeqString)
2156 CAST_ACCESSOR(SeqAsciiString)
2157 CAST_ACCESSOR(SeqTwoByteString)
2158 CAST_ACCESSOR(SlicedString)
2159 CAST_ACCESSOR(ConsString)
2160 CAST_ACCESSOR(ExternalString)
2161 CAST_ACCESSOR(ExternalAsciiString)
2162 CAST_ACCESSOR(ExternalTwoByteString)
2163 CAST_ACCESSOR(JSReceiver)
2164 CAST_ACCESSOR(JSObject)
2166 CAST_ACCESSOR(HeapObject)
2167 CAST_ACCESSOR(HeapNumber)
2168 CAST_ACCESSOR(Oddball)
2169 CAST_ACCESSOR(JSGlobalPropertyCell)
2170 CAST_ACCESSOR(SharedFunctionInfo)
2172 CAST_ACCESSOR(JSFunction)
2173 CAST_ACCESSOR(GlobalObject)
2174 CAST_ACCESSOR(JSGlobalProxy)
2175 CAST_ACCESSOR(JSGlobalObject)
2176 CAST_ACCESSOR(JSBuiltinsObject)
2178 CAST_ACCESSOR(JSArray)
2179 CAST_ACCESSOR(JSRegExp)
2180 CAST_ACCESSOR(JSProxy)
2181 CAST_ACCESSOR(JSFunctionProxy)
2182 CAST_ACCESSOR(JSSet)
2183 CAST_ACCESSOR(JSMap)
2184 CAST_ACCESSOR(JSWeakMap)
2185 CAST_ACCESSOR(Foreign)
2186 CAST_ACCESSOR(ByteArray)
2187 CAST_ACCESSOR(FreeSpace)
2188 CAST_ACCESSOR(ExternalArray)
2189 CAST_ACCESSOR(ExternalByteArray)
2190 CAST_ACCESSOR(ExternalUnsignedByteArray)
2191 CAST_ACCESSOR(ExternalShortArray)
2192 CAST_ACCESSOR(ExternalUnsignedShortArray)
2193 CAST_ACCESSOR(ExternalIntArray)
2194 CAST_ACCESSOR(ExternalUnsignedIntArray)
2195 CAST_ACCESSOR(ExternalFloatArray)
2196 CAST_ACCESSOR(ExternalDoubleArray)
2197 CAST_ACCESSOR(ExternalPixelArray)
2198 CAST_ACCESSOR(Struct)
2201 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2202 STRUCT_LIST(MAKE_STRUCT_CAST)
2203 #undef MAKE_STRUCT_CAST
2206 template <typename Shape, typename Key>
2207 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2208 ASSERT(obj->IsHashTable());
2209 return reinterpret_cast<HashTable*>(obj);
2213 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2214 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2216 SMI_ACCESSORS(String, length, kLengthOffset)
2217 SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
2220 uint32_t String::hash_field() {
2221 return READ_UINT32_FIELD(this, kHashFieldOffset);
2225 void String::set_hash_field(uint32_t value) {
2226 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2227 #if V8_HOST_ARCH_64_BIT
2228 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2233 bool String::Equals(String* other) {
2234 if (other == this) return true;
2235 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2238 return SlowEquals(other);
2242 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2243 if (!StringShape(this).IsCons()) return this;
2244 ConsString* cons = ConsString::cast(this);
2245 if (cons->IsFlat()) return cons->first();
2246 return SlowTryFlatten(pretenure);
2250 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2251 MaybeObject* flat = TryFlatten(pretenure);
2252 Object* successfully_flattened;
2253 if (!flat->ToObject(&successfully_flattened)) return this;
2254 return String::cast(successfully_flattened);
2258 uint16_t String::Get(int index) {
2259 ASSERT(index >= 0 && index < length());
2260 switch (StringShape(this).full_representation_tag()) {
2261 case kSeqStringTag | kAsciiStringTag:
2262 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2263 case kSeqStringTag | kTwoByteStringTag:
2264 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2265 case kConsStringTag | kAsciiStringTag:
2266 case kConsStringTag | kTwoByteStringTag:
2267 return ConsString::cast(this)->ConsStringGet(index);
2268 case kExternalStringTag | kAsciiStringTag:
2269 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2270 case kExternalStringTag | kTwoByteStringTag:
2271 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2272 case kSlicedStringTag | kAsciiStringTag:
2273 case kSlicedStringTag | kTwoByteStringTag:
2274 return SlicedString::cast(this)->SlicedStringGet(index);
2284 void String::Set(int index, uint16_t value) {
2285 ASSERT(index >= 0 && index < length());
2286 ASSERT(StringShape(this).IsSequential());
2288 return this->IsAsciiRepresentation()
2289 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2290 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2294 bool String::IsFlat() {
2295 if (!StringShape(this).IsCons()) return true;
2296 return ConsString::cast(this)->second()->length() == 0;
2300 String* String::GetUnderlying() {
2301 // Giving direct access to underlying string only makes sense if the
2302 // wrapping string is already flattened.
2303 ASSERT(this->IsFlat());
2304 ASSERT(StringShape(this).IsIndirect());
2305 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2306 const int kUnderlyingOffset = SlicedString::kParentOffset;
2307 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2311 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2312 ASSERT(index >= 0 && index < length());
2313 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2317 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2318 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2319 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2320 static_cast<byte>(value));
2324 Address SeqAsciiString::GetCharsAddress() {
2325 return FIELD_ADDR(this, kHeaderSize);
2329 char* SeqAsciiString::GetChars() {
2330 return reinterpret_cast<char*>(GetCharsAddress());
2334 Address SeqTwoByteString::GetCharsAddress() {
2335 return FIELD_ADDR(this, kHeaderSize);
2339 uc16* SeqTwoByteString::GetChars() {
2340 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2344 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2345 ASSERT(index >= 0 && index < length());
2346 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2350 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2351 ASSERT(index >= 0 && index < length());
2352 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2356 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2357 return SizeFor(length());
2361 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2362 return SizeFor(length());
2366 String* SlicedString::parent() {
2367 return String::cast(READ_FIELD(this, kParentOffset));
2371 void SlicedString::set_parent(String* parent) {
2372 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2373 WRITE_FIELD(this, kParentOffset, parent);
2377 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2380 String* ConsString::first() {
2381 return String::cast(READ_FIELD(this, kFirstOffset));
2385 Object* ConsString::unchecked_first() {
2386 return READ_FIELD(this, kFirstOffset);
2390 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2391 WRITE_FIELD(this, kFirstOffset, value);
2392 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2396 String* ConsString::second() {
2397 return String::cast(READ_FIELD(this, kSecondOffset));
2401 Object* ConsString::unchecked_second() {
2402 return READ_FIELD(this, kSecondOffset);
2406 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2407 WRITE_FIELD(this, kSecondOffset, value);
2408 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2412 bool ExternalString::is_short() {
2413 InstanceType type = map()->instance_type();
2414 return (type & kShortExternalStringMask) == kShortExternalStringTag;
2418 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2419 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2423 void ExternalAsciiString::update_data_cache() {
2424 if (is_short()) return;
2425 const char** data_field =
2426 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2427 *data_field = resource()->data();
2431 void ExternalAsciiString::set_resource(
2432 const ExternalAsciiString::Resource* resource) {
2433 *reinterpret_cast<const Resource**>(
2434 FIELD_ADDR(this, kResourceOffset)) = resource;
2435 if (resource != NULL) update_data_cache();
2439 const char* ExternalAsciiString::GetChars() {
2440 return resource()->data();
2444 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
2445 ASSERT(index >= 0 && index < length());
2446 return GetChars()[index];
2450 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2451 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2455 void ExternalTwoByteString::update_data_cache() {
2456 if (is_short()) return;
2457 const uint16_t** data_field =
2458 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
2459 *data_field = resource()->data();
2463 void ExternalTwoByteString::set_resource(
2464 const ExternalTwoByteString::Resource* resource) {
2465 *reinterpret_cast<const Resource**>(
2466 FIELD_ADDR(this, kResourceOffset)) = resource;
2467 if (resource != NULL) update_data_cache();
2471 const uint16_t* ExternalTwoByteString::GetChars() {
2472 return resource()->data();
2476 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
2477 ASSERT(index >= 0 && index < length());
2478 return GetChars()[index];
2482 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
2484 return GetChars() + start;
2488 void JSFunctionResultCache::MakeZeroSize() {
2489 set_finger_index(kEntriesIndex);
2490 set_size(kEntriesIndex);
2494 void JSFunctionResultCache::Clear() {
2495 int cache_size = size();
2496 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2497 MemsetPointer(entries_start,
2498 GetHeap()->the_hole_value(),
2499 cache_size - kEntriesIndex);
2504 int JSFunctionResultCache::size() {
2505 return Smi::cast(get(kCacheSizeIndex))->value();
2509 void JSFunctionResultCache::set_size(int size) {
2510 set(kCacheSizeIndex, Smi::FromInt(size));
2514 int JSFunctionResultCache::finger_index() {
2515 return Smi::cast(get(kFingerIndex))->value();
2519 void JSFunctionResultCache::set_finger_index(int finger_index) {
2520 set(kFingerIndex, Smi::FromInt(finger_index));
2524 byte ByteArray::get(int index) {
2525 ASSERT(index >= 0 && index < this->length());
2526 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2530 void ByteArray::set(int index, byte value) {
2531 ASSERT(index >= 0 && index < this->length());
2532 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2536 int ByteArray::get_int(int index) {
2537 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2538 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2542 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2543 ASSERT_TAG_ALIGNED(address);
2544 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2548 Address ByteArray::GetDataStartAddress() {
2549 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2553 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2554 return reinterpret_cast<uint8_t*>(external_pointer());
2558 uint8_t ExternalPixelArray::get_scalar(int index) {
2559 ASSERT((index >= 0) && (index < this->length()));
2560 uint8_t* ptr = external_pixel_pointer();
2565 MaybeObject* ExternalPixelArray::get(int index) {
2566 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2570 void ExternalPixelArray::set(int index, uint8_t value) {
2571 ASSERT((index >= 0) && (index < this->length()));
2572 uint8_t* ptr = external_pixel_pointer();
2577 void* ExternalArray::external_pointer() {
2578 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2579 return reinterpret_cast<void*>(ptr);
2583 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2584 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2585 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2589 int8_t ExternalByteArray::get_scalar(int index) {
2590 ASSERT((index >= 0) && (index < this->length()));
2591 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2596 MaybeObject* ExternalByteArray::get(int index) {
2597 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2601 void ExternalByteArray::set(int index, int8_t value) {
2602 ASSERT((index >= 0) && (index < this->length()));
2603 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2608 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2609 ASSERT((index >= 0) && (index < this->length()));
2610 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2615 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2616 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2620 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2621 ASSERT((index >= 0) && (index < this->length()));
2622 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2627 int16_t ExternalShortArray::get_scalar(int index) {
2628 ASSERT((index >= 0) && (index < this->length()));
2629 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2634 MaybeObject* ExternalShortArray::get(int index) {
2635 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2639 void ExternalShortArray::set(int index, int16_t value) {
2640 ASSERT((index >= 0) && (index < this->length()));
2641 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2646 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2647 ASSERT((index >= 0) && (index < this->length()));
2648 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2653 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2654 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2658 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2659 ASSERT((index >= 0) && (index < this->length()));
2660 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2665 int32_t ExternalIntArray::get_scalar(int index) {
2666 ASSERT((index >= 0) && (index < this->length()));
2667 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2672 MaybeObject* ExternalIntArray::get(int index) {
2673 return GetHeap()->NumberFromInt32(get_scalar(index));
2677 void ExternalIntArray::set(int index, int32_t value) {
2678 ASSERT((index >= 0) && (index < this->length()));
2679 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2684 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2685 ASSERT((index >= 0) && (index < this->length()));
2686 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2691 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2692 return GetHeap()->NumberFromUint32(get_scalar(index));
2696 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2697 ASSERT((index >= 0) && (index < this->length()));
2698 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2703 float ExternalFloatArray::get_scalar(int index) {
2704 ASSERT((index >= 0) && (index < this->length()));
2705 float* ptr = static_cast<float*>(external_pointer());
2710 MaybeObject* ExternalFloatArray::get(int index) {
2711 return GetHeap()->NumberFromDouble(get_scalar(index));
2715 void ExternalFloatArray::set(int index, float value) {
2716 ASSERT((index >= 0) && (index < this->length()));
2717 float* ptr = static_cast<float*>(external_pointer());
2722 double ExternalDoubleArray::get_scalar(int index) {
2723 ASSERT((index >= 0) && (index < this->length()));
2724 double* ptr = static_cast<double*>(external_pointer());
2729 MaybeObject* ExternalDoubleArray::get(int index) {
2730 return GetHeap()->NumberFromDouble(get_scalar(index));
2734 void ExternalDoubleArray::set(int index, double value) {
2735 ASSERT((index >= 0) && (index < this->length()));
2736 double* ptr = static_cast<double*>(external_pointer());
2741 int Map::visitor_id() {
2742 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2746 void Map::set_visitor_id(int id) {
2747 ASSERT(0 <= id && id < 256);
2748 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2752 int Map::instance_size() {
2753 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2757 int Map::inobject_properties() {
2758 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2762 int Map::pre_allocated_property_fields() {
2763 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2767 int HeapObject::SizeFromMap(Map* map) {
2768 int instance_size = map->instance_size();
2769 if (instance_size != kVariableSizeSentinel) return instance_size;
2770 // We can ignore the "symbol" bit becase it is only set for symbols
2771 // and implies a string type.
2772 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2773 // Only inline the most frequent cases.
2774 if (instance_type == FIXED_ARRAY_TYPE) {
2775 return FixedArray::BodyDescriptor::SizeOf(map, this);
2777 if (instance_type == ASCII_STRING_TYPE) {
2778 return SeqAsciiString::SizeFor(
2779 reinterpret_cast<SeqAsciiString*>(this)->length());
2781 if (instance_type == BYTE_ARRAY_TYPE) {
2782 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2784 if (instance_type == FREE_SPACE_TYPE) {
2785 return reinterpret_cast<FreeSpace*>(this)->size();
2787 if (instance_type == STRING_TYPE) {
2788 return SeqTwoByteString::SizeFor(
2789 reinterpret_cast<SeqTwoByteString*>(this)->length());
2791 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2792 return FixedDoubleArray::SizeFor(
2793 reinterpret_cast<FixedDoubleArray*>(this)->length());
2795 ASSERT(instance_type == CODE_TYPE);
2796 return reinterpret_cast<Code*>(this)->CodeSize();
2800 void Map::set_instance_size(int value) {
2801 ASSERT_EQ(0, value & (kPointerSize - 1));
2802 value >>= kPointerSizeLog2;
2803 ASSERT(0 <= value && value < 256);
2804 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2808 void Map::set_inobject_properties(int value) {
2809 ASSERT(0 <= value && value < 256);
2810 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2814 void Map::set_pre_allocated_property_fields(int value) {
2815 ASSERT(0 <= value && value < 256);
2816 WRITE_BYTE_FIELD(this,
2817 kPreAllocatedPropertyFieldsOffset,
2818 static_cast<byte>(value));
2822 InstanceType Map::instance_type() {
2823 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2827 void Map::set_instance_type(InstanceType value) {
2828 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2832 int Map::unused_property_fields() {
2833 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2837 void Map::set_unused_property_fields(int value) {
2838 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2842 byte Map::bit_field() {
2843 return READ_BYTE_FIELD(this, kBitFieldOffset);
2847 void Map::set_bit_field(byte value) {
2848 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2852 byte Map::bit_field2() {
2853 return READ_BYTE_FIELD(this, kBitField2Offset);
2857 void Map::set_bit_field2(byte value) {
2858 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2862 void Map::set_non_instance_prototype(bool value) {
2864 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2866 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2871 bool Map::has_non_instance_prototype() {
2872 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2876 void Map::set_function_with_prototype(bool value) {
2878 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2880 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2885 bool Map::function_with_prototype() {
2886 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2890 void Map::set_is_access_check_needed(bool access_check_needed) {
2891 if (access_check_needed) {
2892 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2894 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2899 bool Map::is_access_check_needed() {
2900 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2904 void Map::set_is_extensible(bool value) {
2906 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2908 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2912 bool Map::is_extensible() {
2913 return ((1 << kIsExtensible) & bit_field2()) != 0;
2917 void Map::set_attached_to_shared_function_info(bool value) {
2919 set_bit_field3(bit_field3() | (1 << kAttachedToSharedFunctionInfo));
2921 set_bit_field3(bit_field3() & ~(1 << kAttachedToSharedFunctionInfo));
2925 bool Map::attached_to_shared_function_info() {
2926 return ((1 << kAttachedToSharedFunctionInfo) & bit_field3()) != 0;
2930 void Map::set_is_shared(bool value) {
2932 set_bit_field3(bit_field3() | (1 << kIsShared));
2934 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2938 bool Map::is_shared() {
2939 return ((1 << kIsShared) & bit_field3()) != 0;
2942 void Map::set_has_external_resource(bool value) {
2944 set_bit_field(bit_field() | (1 << kHasExternalResource));
2946 set_bit_field(bit_field() & ~(1 << kHasExternalResource));
2950 bool Map::has_external_resource()
2952 return ((1 << kHasExternalResource) & bit_field()) != 0;
2956 void Map::set_use_user_object_comparison(bool value) {
2958 set_bit_field2(bit_field2() | (1 << kUseUserObjectComparison));
2960 set_bit_field2(bit_field2() & ~(1 << kUseUserObjectComparison));
2965 bool Map::use_user_object_comparison() {
2966 return ((1 << kUseUserObjectComparison) & bit_field2()) != 0;
2970 void Map::set_named_interceptor_is_fallback(bool value) {
2972 set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
2974 set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
2978 bool Map::named_interceptor_is_fallback() {
2979 return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
2983 JSFunction* Map::unchecked_constructor() {
2984 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2988 Code::Flags Code::flags() {
2989 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2993 void Code::set_flags(Code::Flags flags) {
2994 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2995 // Make sure that all call stubs have an arguments count.
2996 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2997 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2998 ExtractArgumentsCountFromFlags(flags) >= 0);
2999 WRITE_INT_FIELD(this, kFlagsOffset, flags);
3003 Code::Kind Code::kind() {
3004 return ExtractKindFromFlags(flags());
3008 InlineCacheState Code::ic_state() {
3009 InlineCacheState result = ExtractICStateFromFlags(flags());
3010 // Only allow uninitialized or debugger states for non-IC code
3011 // objects. This is used in the debugger to determine whether or not
3012 // a call to code object has been replaced with a debug break call.
3013 ASSERT(is_inline_cache_stub() ||
3014 result == UNINITIALIZED ||
3015 result == DEBUG_BREAK ||
3016 result == DEBUG_PREPARE_STEP_IN);
3021 Code::ExtraICState Code::extra_ic_state() {
3022 ASSERT(is_inline_cache_stub());
3023 return ExtractExtraICStateFromFlags(flags());
3027 PropertyType Code::type() {
3028 return ExtractTypeFromFlags(flags());
3032 int Code::arguments_count() {
3033 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
3034 return ExtractArgumentsCountFromFlags(flags());
3038 int Code::major_key() {
3039 ASSERT(kind() == STUB ||
3040 kind() == UNARY_OP_IC ||
3041 kind() == BINARY_OP_IC ||
3042 kind() == COMPARE_IC ||
3043 kind() == TO_BOOLEAN_IC);
3044 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
3048 void Code::set_major_key(int major) {
3049 ASSERT(kind() == STUB ||
3050 kind() == UNARY_OP_IC ||
3051 kind() == BINARY_OP_IC ||
3052 kind() == COMPARE_IC ||
3053 kind() == TO_BOOLEAN_IC);
3054 ASSERT(0 <= major && major < 256);
3055 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
3059 bool Code::is_pregenerated() {
3060 return kind() == STUB && IsPregeneratedField::decode(flags());
3064 void Code::set_is_pregenerated(bool value) {
3065 ASSERT(kind() == STUB);
3067 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3072 bool Code::optimizable() {
3073 ASSERT_EQ(FUNCTION, kind());
3074 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3078 void Code::set_optimizable(bool value) {
3079 ASSERT_EQ(FUNCTION, kind());
3080 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3084 bool Code::has_deoptimization_support() {
3085 ASSERT_EQ(FUNCTION, kind());
3086 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3087 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3091 void Code::set_has_deoptimization_support(bool value) {
3092 ASSERT_EQ(FUNCTION, kind());
3093 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3094 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3095 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3099 bool Code::has_debug_break_slots() {
3100 ASSERT_EQ(FUNCTION, kind());
3101 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3102 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3106 void Code::set_has_debug_break_slots(bool value) {
3107 ASSERT_EQ(FUNCTION, kind());
3108 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3109 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3110 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3114 bool Code::is_compiled_optimizable() {
3115 ASSERT_EQ(FUNCTION, kind());
3116 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3117 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3121 void Code::set_compiled_optimizable(bool value) {
3122 ASSERT_EQ(FUNCTION, kind());
3123 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3124 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3125 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3129 int Code::allow_osr_at_loop_nesting_level() {
3130 ASSERT_EQ(FUNCTION, kind());
3131 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3135 void Code::set_allow_osr_at_loop_nesting_level(int level) {
3136 ASSERT_EQ(FUNCTION, kind());
3137 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3138 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3142 int Code::profiler_ticks() {
3143 ASSERT_EQ(FUNCTION, kind());
3144 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3148 void Code::set_profiler_ticks(int ticks) {
3149 ASSERT_EQ(FUNCTION, kind());
3150 ASSERT(ticks < 256);
3151 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
3155 unsigned Code::stack_slots() {
3156 ASSERT(kind() == OPTIMIZED_FUNCTION);
3157 return READ_UINT32_FIELD(this, kStackSlotsOffset);
3161 void Code::set_stack_slots(unsigned slots) {
3162 ASSERT(kind() == OPTIMIZED_FUNCTION);
3163 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
3167 unsigned Code::safepoint_table_offset() {
3168 ASSERT(kind() == OPTIMIZED_FUNCTION);
3169 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
3173 void Code::set_safepoint_table_offset(unsigned offset) {
3174 ASSERT(kind() == OPTIMIZED_FUNCTION);
3175 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3176 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3180 unsigned Code::stack_check_table_offset() {
3181 ASSERT_EQ(FUNCTION, kind());
3182 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3186 void Code::set_stack_check_table_offset(unsigned offset) {
3187 ASSERT_EQ(FUNCTION, kind());
3188 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3189 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3193 CheckType Code::check_type() {
3194 ASSERT(is_call_stub() || is_keyed_call_stub());
3195 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3196 return static_cast<CheckType>(type);
3200 void Code::set_check_type(CheckType value) {
3201 ASSERT(is_call_stub() || is_keyed_call_stub());
3202 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3206 byte Code::unary_op_type() {
3207 ASSERT(is_unary_op_stub());
3208 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3212 void Code::set_unary_op_type(byte value) {
3213 ASSERT(is_unary_op_stub());
3214 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3218 byte Code::binary_op_type() {
3219 ASSERT(is_binary_op_stub());
3220 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3224 void Code::set_binary_op_type(byte value) {
3225 ASSERT(is_binary_op_stub());
3226 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3230 byte Code::binary_op_result_type() {
3231 ASSERT(is_binary_op_stub());
3232 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3236 void Code::set_binary_op_result_type(byte value) {
3237 ASSERT(is_binary_op_stub());
3238 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3242 byte Code::compare_state() {
3243 ASSERT(is_compare_ic_stub());
3244 return READ_BYTE_FIELD(this, kCompareStateOffset);
3248 void Code::set_compare_state(byte value) {
3249 ASSERT(is_compare_ic_stub());
3250 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3254 byte Code::compare_operation() {
3255 ASSERT(is_compare_ic_stub());
3256 return READ_BYTE_FIELD(this, kCompareOperationOffset);
3260 void Code::set_compare_operation(byte value) {
3261 ASSERT(is_compare_ic_stub());
3262 WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
3266 byte Code::to_boolean_state() {
3267 ASSERT(is_to_boolean_ic_stub());
3268 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3272 void Code::set_to_boolean_state(byte value) {
3273 ASSERT(is_to_boolean_ic_stub());
3274 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3278 bool Code::has_function_cache() {
3279 ASSERT(kind() == STUB);
3280 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3284 void Code::set_has_function_cache(bool flag) {
3285 ASSERT(kind() == STUB);
3286 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3290 bool Code::is_inline_cache_stub() {
3291 Kind kind = this->kind();
3292 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3296 Code::Flags Code::ComputeFlags(Kind kind,
3297 InlineCacheState ic_state,
3298 ExtraICState extra_ic_state,
3301 InlineCacheHolderFlag holder) {
3302 // Extra IC state is only allowed for call IC stubs or for store IC
3304 ASSERT(extra_ic_state == kNoExtraICState ||
3307 kind == KEYED_STORE_IC);
3308 // Compute the bit mask.
3309 int bits = KindField::encode(kind)
3310 | ICStateField::encode(ic_state)
3311 | TypeField::encode(type)
3312 | ExtraICStateField::encode(extra_ic_state)
3313 | (argc << kArgumentsCountShift)
3314 | CacheHolderField::encode(holder);
3315 return static_cast<Flags>(bits);
3319 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3321 ExtraICState extra_ic_state,
3322 InlineCacheHolderFlag holder,
3324 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3328 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3329 return KindField::decode(flags);
3333 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3334 return ICStateField::decode(flags);
3338 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3339 return ExtraICStateField::decode(flags);
3343 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3344 return TypeField::decode(flags);
3348 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3349 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3353 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3354 return CacheHolderField::decode(flags);
3358 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3359 int bits = flags & ~TypeField::kMask;
3360 return static_cast<Flags>(bits);
3364 Code* Code::GetCodeFromTargetAddress(Address address) {
3365 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3366 // GetCodeFromTargetAddress might be called when marking objects during mark
3367 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3368 // Code::cast. Code::cast does not work when the object's map is
3370 Code* result = reinterpret_cast<Code*>(code);
3375 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3377 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3381 Object* Map::prototype() {
3382 return READ_FIELD(this, kPrototypeOffset);
3386 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3387 ASSERT(value->IsNull() || value->IsJSReceiver());
3388 WRITE_FIELD(this, kPrototypeOffset, value);
3389 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3393 DescriptorArray* Map::instance_descriptors() {
3394 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3395 if (object->IsSmi()) {
3396 return GetHeap()->empty_descriptor_array();
3398 return DescriptorArray::cast(object);
3403 void Map::init_instance_descriptors() {
3404 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3408 void Map::clear_instance_descriptors() {
3409 Object* object = READ_FIELD(this,
3410 kInstanceDescriptorsOrBitField3Offset);
3411 if (!object->IsSmi()) {
3413 ZapInstanceDescriptors();
3417 kInstanceDescriptorsOrBitField3Offset,
3418 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3423 void Map::set_instance_descriptors(DescriptorArray* value,
3424 WriteBarrierMode mode) {
3425 Object* object = READ_FIELD(this,
3426 kInstanceDescriptorsOrBitField3Offset);
3427 Heap* heap = GetHeap();
3428 if (value == heap->empty_descriptor_array()) {
3429 clear_instance_descriptors();
3432 if (object->IsSmi()) {
3433 value->set_bit_field3_storage(Smi::cast(object)->value());
3435 value->set_bit_field3_storage(
3436 DescriptorArray::cast(object)->bit_field3_storage());
3439 ASSERT(!is_shared());
3441 if (value != instance_descriptors()) {
3442 ZapInstanceDescriptors();
3445 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3446 CONDITIONAL_WRITE_BARRIER(
3447 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3451 int Map::bit_field3() {
3452 Object* object = READ_FIELD(this,
3453 kInstanceDescriptorsOrBitField3Offset);
3454 if (object->IsSmi()) {
3455 return Smi::cast(object)->value();
3457 return DescriptorArray::cast(object)->bit_field3_storage();
3462 void Map::set_bit_field3(int value) {
3463 ASSERT(Smi::IsValid(value));
3464 Object* object = READ_FIELD(this,
3465 kInstanceDescriptorsOrBitField3Offset);
3466 if (object->IsSmi()) {
3468 kInstanceDescriptorsOrBitField3Offset,
3469 Smi::FromInt(value));
3471 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3476 Object* Map::GetBackPointer() {
3477 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3478 if (object->IsFixedArray()) {
3479 return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
3486 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
3487 Heap* heap = GetHeap();
3488 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
3489 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
3490 (value->IsMap() && GetBackPointer()->IsUndefined()));
3491 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3492 if (object->IsFixedArray()) {
3493 FixedArray::cast(object)->set(
3494 kProtoTransitionBackPointerOffset, value, mode);
3496 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
3497 CONDITIONAL_WRITE_BARRIER(
3498 heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3503 FixedArray* Map::prototype_transitions() {
3504 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3505 if (object->IsFixedArray()) {
3506 return FixedArray::cast(object);
3508 return GetHeap()->empty_fixed_array();
3513 void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
3514 Heap* heap = GetHeap();
3515 ASSERT(value != heap->empty_fixed_array());
3516 value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
3518 if (value != prototype_transitions()) {
3519 ZapPrototypeTransitions();
3522 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
3523 CONDITIONAL_WRITE_BARRIER(
3524 heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3528 void Map::init_prototype_transitions(Object* undefined) {
3529 ASSERT(undefined->IsUndefined());
3530 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
3534 HeapObject* Map::unchecked_prototype_transitions() {
3535 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3536 return reinterpret_cast<HeapObject*>(object);
3540 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3541 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3543 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3544 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3545 ACCESSORS(JSFunction,
3548 kNextFunctionLinkOffset)
3550 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3551 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3552 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3554 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3556 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3557 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3558 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3559 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3560 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
3562 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
3563 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3565 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3566 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3567 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3569 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3570 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3571 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3572 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3573 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3574 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3575 ACCESSORS(InterceptorInfo, is_fallback, Smi, kFallbackOffset)
3577 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3578 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3580 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3581 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3583 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3584 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3585 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3586 kPropertyAccessorsOffset)
3587 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3588 kPrototypeTemplateOffset)
3589 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3590 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3591 kNamedPropertyHandlerOffset)
3592 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3593 kIndexedPropertyHandlerOffset)
3594 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3595 kInstanceTemplateOffset)
3596 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3597 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3598 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3599 kInstanceCallHandlerOffset)
3600 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3601 kAccessCheckInfoOffset)
3602 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
3604 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3605 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3606 kInternalFieldCountOffset)
3607 ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
3608 kHasExternalResourceOffset)
3609 ACCESSORS(ObjectTemplateInfo, use_user_object_comparison, Object,
3610 kUseUserObjectComparisonOffset)
3612 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3613 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3615 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3617 ACCESSORS(Script, source, Object, kSourceOffset)
3618 ACCESSORS(Script, name, Object, kNameOffset)
3619 ACCESSORS(Script, id, Object, kIdOffset)
3620 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
3621 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
3622 ACCESSORS(Script, data, Object, kDataOffset)
3623 ACCESSORS(Script, context_data, Object, kContextOffset)
3624 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3625 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
3626 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
3627 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
3628 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3629 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3630 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
3631 kEvalFrominstructionsOffsetOffset)
3633 #ifdef ENABLE_DEBUGGER_SUPPORT
3634 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3635 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3636 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3637 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3639 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
3640 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
3641 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3642 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3645 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3646 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3647 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3648 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3649 kInstanceClassNameOffset)
3650 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3651 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3652 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3653 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3654 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3655 kThisPropertyAssignmentsOffset)
3656 SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
3659 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3660 kHiddenPrototypeBit)
3661 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3662 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3663 kNeedsAccessCheckBit)
3664 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3665 kReadOnlyPrototypeBit)
3666 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3668 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3670 BOOL_GETTER(SharedFunctionInfo,
3672 has_only_simple_this_property_assignments,
3673 kHasOnlySimpleThisPropertyAssignments)
3674 BOOL_ACCESSORS(SharedFunctionInfo,
3676 allows_lazy_compilation,
3677 kAllowLazyCompilation)
3678 BOOL_ACCESSORS(SharedFunctionInfo,
3682 BOOL_ACCESSORS(SharedFunctionInfo,
3684 has_duplicate_parameters,
3685 kHasDuplicateParameters)
3688 #if V8_HOST_ARCH_32_BIT
3689 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3690 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3691 kFormalParameterCountOffset)
3692 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3693 kExpectedNofPropertiesOffset)
3694 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3695 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3696 kStartPositionAndTypeOffset)
3697 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3698 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3699 kFunctionTokenPositionOffset)
3700 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3701 kCompilerHintsOffset)
3702 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3703 kThisPropertyAssignmentsCountOffset)
3704 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3705 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3706 SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3709 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3710 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3711 int holder::name() { \
3712 int value = READ_INT_FIELD(this, offset); \
3713 ASSERT(kHeapObjectTag == 1); \
3714 ASSERT((value & kHeapObjectTag) == 0); \
3715 return value >> 1; \
3717 void holder::set_##name(int value) { \
3718 ASSERT(kHeapObjectTag == 1); \
3719 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3720 (value & 0xC0000000) == 0x000000000); \
3721 WRITE_INT_FIELD(this, \
3723 (value << 1) & ~kHeapObjectTag); \
3726 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3727 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3728 INT_ACCESSORS(holder, name, offset)
3731 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3732 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3733 formal_parameter_count,
3734 kFormalParameterCountOffset)
3736 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3737 expected_nof_properties,
3738 kExpectedNofPropertiesOffset)
3739 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3741 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3742 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3743 start_position_and_type,
3744 kStartPositionAndTypeOffset)
3746 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3747 function_token_position,
3748 kFunctionTokenPositionOffset)
3749 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3751 kCompilerHintsOffset)
3753 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3754 this_property_assignments_count,
3755 kThisPropertyAssignmentsCountOffset)
3756 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3758 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3759 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3763 int SharedFunctionInfo::construction_count() {
3764 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3768 void SharedFunctionInfo::set_construction_count(int value) {
3769 ASSERT(0 <= value && value < 256);
3770 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3774 BOOL_ACCESSORS(SharedFunctionInfo,
3776 live_objects_may_exist,
3777 kLiveObjectsMayExist)
3780 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3781 return initial_map() != GetHeap()->undefined_value();
3785 BOOL_GETTER(SharedFunctionInfo,
3787 optimization_disabled,
3788 kOptimizationDisabled)
3791 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3792 set_compiler_hints(BooleanBit::set(compiler_hints(),
3793 kOptimizationDisabled,
3795 // If disabling optimizations we reflect that in the code object so
3796 // it will not be counted as optimizable code.
3797 if ((code()->kind() == Code::FUNCTION) && disable) {
3798 code()->set_optimizable(false);
3803 int SharedFunctionInfo::profiler_ticks() {
3804 if (code()->kind() != Code::FUNCTION) return 0;
3805 return code()->profiler_ticks();
3809 LanguageMode SharedFunctionInfo::language_mode() {
3810 int hints = compiler_hints();
3811 if (BooleanBit::get(hints, kExtendedModeFunction)) {
3812 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
3813 return EXTENDED_MODE;
3815 return BooleanBit::get(hints, kStrictModeFunction)
3816 ? STRICT_MODE : CLASSIC_MODE;
3820 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
3821 // We only allow language mode transitions that go set the same language mode
3822 // again or go up in the chain:
3823 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
3824 ASSERT(this->language_mode() == CLASSIC_MODE ||
3825 this->language_mode() == language_mode ||
3826 language_mode == EXTENDED_MODE);
3827 int hints = compiler_hints();
3828 hints = BooleanBit::set(
3829 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
3830 hints = BooleanBit::set(
3831 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
3832 set_compiler_hints(hints);
3836 bool SharedFunctionInfo::is_classic_mode() {
3837 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
3840 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
3841 kExtendedModeFunction)
3842 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, qml_mode,
3844 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3845 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3846 name_should_print_as_anonymous,
3847 kNameShouldPrintAsAnonymous)
3848 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3849 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3850 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
3851 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
3853 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
3855 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3856 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3858 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3860 bool Script::HasValidSource() {
3861 Object* src = this->source();
3862 if (!src->IsString()) return true;
3863 String* src_str = String::cast(src);
3864 if (!StringShape(src_str).IsExternal()) return true;
3865 if (src_str->IsAsciiRepresentation()) {
3866 return ExternalAsciiString::cast(src)->resource() != NULL;
3867 } else if (src_str->IsTwoByteRepresentation()) {
3868 return ExternalTwoByteString::cast(src)->resource() != NULL;
3874 void SharedFunctionInfo::DontAdaptArguments() {
3875 ASSERT(code()->kind() == Code::BUILTIN);
3876 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3880 int SharedFunctionInfo::start_position() {
3881 return start_position_and_type() >> kStartPositionShift;
3885 void SharedFunctionInfo::set_start_position(int start_position) {
3886 set_start_position_and_type((start_position << kStartPositionShift)
3887 | (start_position_and_type() & ~kStartPositionMask));
3891 Code* SharedFunctionInfo::code() {
3892 return Code::cast(READ_FIELD(this, kCodeOffset));
3896 Code* SharedFunctionInfo::unchecked_code() {
3897 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3901 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3902 WRITE_FIELD(this, kCodeOffset, value);
3903 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3907 ScopeInfo* SharedFunctionInfo::scope_info() {
3908 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
3912 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
3913 WriteBarrierMode mode) {
3914 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3915 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3918 reinterpret_cast<Object*>(value),
3923 bool SharedFunctionInfo::is_compiled() {
3925 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3929 bool SharedFunctionInfo::IsApiFunction() {
3930 return function_data()->IsFunctionTemplateInfo();
3934 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3935 ASSERT(IsApiFunction());
3936 return FunctionTemplateInfo::cast(function_data());
3940 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3941 return function_data()->IsSmi();
3945 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3946 ASSERT(HasBuiltinFunctionId());
3947 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3951 int SharedFunctionInfo::code_age() {
3952 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3956 void SharedFunctionInfo::set_code_age(int code_age) {
3957 int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
3958 set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
3962 bool SharedFunctionInfo::has_deoptimization_support() {
3963 Code* code = this->code();
3964 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3968 bool JSFunction::IsBuiltin() {
3969 return context()->global()->IsJSBuiltinsObject();
3973 bool JSFunction::NeedsArgumentsAdaption() {
3974 return shared()->formal_parameter_count() !=
3975 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3979 bool JSFunction::IsOptimized() {
3980 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3984 bool JSFunction::IsOptimizable() {
3985 return code()->kind() == Code::FUNCTION && code()->optimizable();
3989 bool JSFunction::IsMarkedForLazyRecompilation() {
3990 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3994 Code* JSFunction::code() {
3995 return Code::cast(unchecked_code());
3999 Code* JSFunction::unchecked_code() {
4000 return reinterpret_cast<Code*>(
4001 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
4005 void JSFunction::set_code(Code* value) {
4006 ASSERT(!HEAP->InNewSpace(value));
4007 Address entry = value->entry();
4008 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
4009 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
4011 HeapObject::RawField(this, kCodeEntryOffset),
4016 void JSFunction::ReplaceCode(Code* code) {
4017 bool was_optimized = IsOptimized();
4018 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
4022 // Add/remove the function from the list of optimized functions for this
4023 // context based on the state change.
4024 if (!was_optimized && is_optimized) {
4025 context()->global_context()->AddOptimizedFunction(this);
4027 if (was_optimized && !is_optimized) {
4028 context()->global_context()->RemoveOptimizedFunction(this);
4033 Context* JSFunction::context() {
4034 return Context::cast(READ_FIELD(this, kContextOffset));
4038 Object* JSFunction::unchecked_context() {
4039 return READ_FIELD(this, kContextOffset);
4043 SharedFunctionInfo* JSFunction::unchecked_shared() {
4044 return reinterpret_cast<SharedFunctionInfo*>(
4045 READ_FIELD(this, kSharedFunctionInfoOffset));
4049 void JSFunction::set_context(Object* value) {
4050 ASSERT(value->IsUndefined() || value->IsContext());
4051 WRITE_FIELD(this, kContextOffset, value);
4052 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
4055 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
4056 kPrototypeOrInitialMapOffset)
4059 Map* JSFunction::initial_map() {
4060 return Map::cast(prototype_or_initial_map());
4064 void JSFunction::set_initial_map(Map* value) {
4065 set_prototype_or_initial_map(value);
4069 MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
4071 Context* global_context = context()->global_context();
4072 Object* array_function =
4073 global_context->get(Context::ARRAY_FUNCTION_INDEX);
4074 if (array_function->IsJSFunction() &&
4075 this == JSFunction::cast(array_function)) {
4076 ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
4078 MaybeObject* maybe_map = initial_map->CopyDropTransitions();
4079 Map* new_double_map = NULL;
4080 if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
4081 new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
4082 maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
4084 if (maybe_map->IsFailure()) return maybe_map;
4086 maybe_map = new_double_map->CopyDropTransitions();
4087 Map* new_object_map = NULL;
4088 if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
4089 new_object_map->set_elements_kind(FAST_ELEMENTS);
4090 maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
4092 if (maybe_map->IsFailure()) return maybe_map;
4094 global_context->set_smi_js_array_map(initial_map);
4095 global_context->set_double_js_array_map(new_double_map);
4096 global_context->set_object_js_array_map(new_object_map);
4098 set_initial_map(initial_map);
4103 bool JSFunction::has_initial_map() {
4104 return prototype_or_initial_map()->IsMap();
4108 bool JSFunction::has_instance_prototype() {
4109 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
4113 bool JSFunction::has_prototype() {
4114 return map()->has_non_instance_prototype() || has_instance_prototype();
4118 Object* JSFunction::instance_prototype() {
4119 ASSERT(has_instance_prototype());
4120 if (has_initial_map()) return initial_map()->prototype();
4121 // When there is no initial map and the prototype is a JSObject, the
4122 // initial map field is used for the prototype field.
4123 return prototype_or_initial_map();
4127 Object* JSFunction::prototype() {
4128 ASSERT(has_prototype());
4129 // If the function's prototype property has been set to a non-JSObject
4130 // value, that value is stored in the constructor field of the map.
4131 if (map()->has_non_instance_prototype()) return map()->constructor();
4132 return instance_prototype();
4135 bool JSFunction::should_have_prototype() {
4136 return map()->function_with_prototype();
4140 bool JSFunction::is_compiled() {
4141 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4145 FixedArray* JSFunction::literals() {
4146 ASSERT(!shared()->bound());
4147 return literals_or_bindings();
4151 void JSFunction::set_literals(FixedArray* literals) {
4152 ASSERT(!shared()->bound());
4153 set_literals_or_bindings(literals);
4157 FixedArray* JSFunction::function_bindings() {
4158 ASSERT(shared()->bound());
4159 return literals_or_bindings();
4163 void JSFunction::set_function_bindings(FixedArray* bindings) {
4164 ASSERT(shared()->bound());
4165 // Bound function literal may be initialized to the empty fixed array
4166 // before the bindings are set.
4167 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
4168 bindings->map() == GetHeap()->fixed_cow_array_map());
4169 set_literals_or_bindings(bindings);
4173 int JSFunction::NumberOfLiterals() {
4174 ASSERT(!shared()->bound());
4175 return literals()->length();
4179 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
4180 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4181 return READ_FIELD(this, OffsetOfFunctionWithId(id));
4185 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
4187 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4188 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
4189 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
4193 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
4194 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4195 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
4199 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
4201 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4202 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
4203 ASSERT(!HEAP->InNewSpace(value));
4207 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
4208 ACCESSORS(JSProxy, hash, Object, kHashOffset)
4209 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
4210 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4213 void JSProxy::InitializeBody(int object_size, Object* value) {
4214 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
4215 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
4216 WRITE_FIELD(this, offset, value);
4221 ACCESSORS(JSSet, table, Object, kTableOffset)
4222 ACCESSORS(JSMap, table, Object, kTableOffset)
4223 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
4224 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4227 Address Foreign::foreign_address() {
4228 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
4232 void Foreign::set_foreign_address(Address value) {
4233 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
4237 ACCESSORS(JSModule, context, Object, kContextOffset)
4240 JSModule* JSModule::cast(Object* obj) {
4241 ASSERT(obj->IsJSModule());
4242 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
4243 return reinterpret_cast<JSModule*>(obj);
4247 ACCESSORS(JSValue, value, Object, kValueOffset)
4250 JSValue* JSValue::cast(Object* obj) {
4251 ASSERT(obj->IsJSValue());
4252 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
4253 return reinterpret_cast<JSValue*>(obj);
4257 ACCESSORS(JSDate, value, Object, kValueOffset)
4258 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4259 ACCESSORS(JSDate, year, Object, kYearOffset)
4260 ACCESSORS(JSDate, month, Object, kMonthOffset)
4261 ACCESSORS(JSDate, day, Object, kDayOffset)
4262 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4263 ACCESSORS(JSDate, hour, Object, kHourOffset)
4264 ACCESSORS(JSDate, min, Object, kMinOffset)
4265 ACCESSORS(JSDate, sec, Object, kSecOffset)
4268 JSDate* JSDate::cast(Object* obj) {
4269 ASSERT(obj->IsJSDate());
4270 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
4271 return reinterpret_cast<JSDate*>(obj);
4275 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4276 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
4277 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4278 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4279 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4280 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4281 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4284 JSMessageObject* JSMessageObject::cast(Object* obj) {
4285 ASSERT(obj->IsJSMessageObject());
4286 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
4287 return reinterpret_cast<JSMessageObject*>(obj);
4291 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4292 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
4293 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4294 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4295 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4296 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4297 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
4299 byte* Code::instruction_start() {
4300 return FIELD_ADDR(this, kHeaderSize);
4304 byte* Code::instruction_end() {
4305 return instruction_start() + instruction_size();
4309 int Code::body_size() {
4310 return RoundUp(instruction_size(), kObjectAlignment);
4314 FixedArray* Code::unchecked_deoptimization_data() {
4315 return reinterpret_cast<FixedArray*>(
4316 READ_FIELD(this, kDeoptimizationDataOffset));
4320 ByteArray* Code::unchecked_relocation_info() {
4321 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4325 byte* Code::relocation_start() {
4326 return unchecked_relocation_info()->GetDataStartAddress();
4330 int Code::relocation_size() {
4331 return unchecked_relocation_info()->length();
4335 byte* Code::entry() {
4336 return instruction_start();
4340 bool Code::contains(byte* inner_pointer) {
4341 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4345 ACCESSORS(JSArray, length, Object, kLengthOffset)
4348 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4351 JSRegExp::Type JSRegExp::TypeTag() {
4352 Object* data = this->data();
4353 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4354 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4355 return static_cast<JSRegExp::Type>(smi->value());
4359 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4360 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4361 return static_cast<JSRegExp::Type>(smi->value());
4365 int JSRegExp::CaptureCount() {
4366 switch (TypeTag()) {
4370 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4378 JSRegExp::Flags JSRegExp::GetFlags() {
4379 ASSERT(this->data()->IsFixedArray());
4380 Object* data = this->data();
4381 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4382 return Flags(smi->value());
4386 String* JSRegExp::Pattern() {
4387 ASSERT(this->data()->IsFixedArray());
4388 Object* data = this->data();
4389 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4394 Object* JSRegExp::DataAt(int index) {
4395 ASSERT(TypeTag() != NOT_COMPILED);
4396 return FixedArray::cast(data())->get(index);
4400 Object* JSRegExp::DataAtUnchecked(int index) {
4401 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4402 int offset = FixedArray::kHeaderSize + index * kPointerSize;
4403 return READ_FIELD(fa, offset);
4407 void JSRegExp::SetDataAt(int index, Object* value) {
4408 ASSERT(TypeTag() != NOT_COMPILED);
4409 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4410 FixedArray::cast(data())->set(index, value);
4414 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4415 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4416 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4417 if (value->IsSmi()) {
4418 fa->set_unchecked(index, Smi::cast(value));
4420 // We only do this during GC, so we don't need to notify the write barrier.
4421 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4426 ElementsKind JSObject::GetElementsKind() {
4427 ElementsKind kind = map()->elements_kind();
4429 FixedArrayBase* fixed_array =
4430 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4431 Map* map = fixed_array->map();
4432 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4433 (map == GetHeap()->fixed_array_map() ||
4434 map == GetHeap()->fixed_cow_array_map())) ||
4435 (kind == FAST_DOUBLE_ELEMENTS &&
4436 (fixed_array->IsFixedDoubleArray() ||
4437 fixed_array == GetHeap()->empty_fixed_array())) ||
4438 (kind == DICTIONARY_ELEMENTS &&
4439 fixed_array->IsFixedArray() &&
4440 fixed_array->IsDictionary()) ||
4441 (kind > DICTIONARY_ELEMENTS));
4442 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4443 (elements()->IsFixedArray() && elements()->length() >= 2));
4449 ElementsAccessor* JSObject::GetElementsAccessor() {
4450 return ElementsAccessor::ForKind(GetElementsKind());
4454 bool JSObject::HasFastElements() {
4455 return GetElementsKind() == FAST_ELEMENTS;
4459 bool JSObject::HasFastSmiOnlyElements() {
4460 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4464 bool JSObject::HasFastTypeElements() {
4465 ElementsKind elements_kind = GetElementsKind();
4466 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4467 elements_kind == FAST_ELEMENTS;
4471 bool JSObject::HasFastDoubleElements() {
4472 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4476 bool JSObject::HasDictionaryElements() {
4477 return GetElementsKind() == DICTIONARY_ELEMENTS;
4481 bool JSObject::HasNonStrictArgumentsElements() {
4482 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4486 bool JSObject::HasExternalArrayElements() {
4487 HeapObject* array = elements();
4488 ASSERT(array != NULL);
4489 return array->IsExternalArray();
4493 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4494 bool JSObject::HasExternal##name##Elements() { \
4495 HeapObject* array = elements(); \
4496 ASSERT(array != NULL); \
4497 if (!array->IsHeapObject()) \
4499 return array->map()->instance_type() == type; \
4503 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4504 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4505 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4506 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4507 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4508 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4509 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4510 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4511 EXTERNAL_ELEMENTS_CHECK(Float,
4512 EXTERNAL_FLOAT_ARRAY_TYPE)
4513 EXTERNAL_ELEMENTS_CHECK(Double,
4514 EXTERNAL_DOUBLE_ARRAY_TYPE)
4515 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4518 bool JSObject::HasNamedInterceptor() {
4519 return map()->has_named_interceptor();
4523 bool JSObject::HasIndexedInterceptor() {
4524 return map()->has_indexed_interceptor();
4528 MaybeObject* JSObject::EnsureWritableFastElements() {
4529 ASSERT(HasFastTypeElements());
4530 FixedArray* elems = FixedArray::cast(elements());
4531 Isolate* isolate = GetIsolate();
4532 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4533 Object* writable_elems;
4534 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4535 elems, isolate->heap()->fixed_array_map());
4536 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4537 return maybe_writable_elems;
4540 set_elements(FixedArray::cast(writable_elems));
4541 isolate->counters()->cow_arrays_converted()->Increment();
4542 return writable_elems;
4546 StringDictionary* JSObject::property_dictionary() {
4547 ASSERT(!HasFastProperties());
4548 return StringDictionary::cast(properties());
4552 SeededNumberDictionary* JSObject::element_dictionary() {
4553 ASSERT(HasDictionaryElements());
4554 return SeededNumberDictionary::cast(elements());
4558 bool String::IsHashFieldComputed(uint32_t field) {
4559 return (field & kHashNotComputedMask) == 0;
4563 bool String::HasHashCode() {
4564 return IsHashFieldComputed(hash_field());
4568 uint32_t String::Hash() {
4569 // Fast case: has hash code already been computed?
4570 uint32_t field = hash_field();
4571 if (IsHashFieldComputed(field)) return field >> kHashShift;
4572 // Slow case: compute hash code and set it.
4573 return ComputeAndSetHash();
4577 StringHasher::StringHasher(int length, uint32_t seed)
4579 raw_running_hash_(seed),
4581 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4582 is_first_char_(true),
4584 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4588 bool StringHasher::has_trivial_hash() {
4589 return length_ > String::kMaxHashCalcLength;
4593 void StringHasher::AddCharacter(uint32_t c) {
4594 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4595 AddSurrogatePair(c); // Not inlined.
4598 // Use the Jenkins one-at-a-time hash function to update the hash
4599 // for the given character.
4600 raw_running_hash_ += c;
4601 raw_running_hash_ += (raw_running_hash_ << 10);
4602 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4603 // Incremental array index computation.
4604 if (is_array_index_) {
4605 if (c < '0' || c > '9') {
4606 is_array_index_ = false;
4609 if (is_first_char_) {
4610 is_first_char_ = false;
4611 if (c == '0' && length_ > 1) {
4612 is_array_index_ = false;
4616 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4617 is_array_index_ = false;
4619 array_index_ = array_index_ * 10 + d;
4626 void StringHasher::AddCharacterNoIndex(uint32_t c) {
4627 ASSERT(!is_array_index());
4628 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4629 AddSurrogatePairNoIndex(c); // Not inlined.
4632 raw_running_hash_ += c;
4633 raw_running_hash_ += (raw_running_hash_ << 10);
4634 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4638 uint32_t StringHasher::GetHash() {
4639 // Get the calculated raw hash value and do some more bit ops to distribute
4640 // the hash further. Ensure that we never return zero as the hash value.
4641 uint32_t result = raw_running_hash_;
4642 result += (result << 3);
4643 result ^= (result >> 11);
4644 result += (result << 15);
4645 if ((result & String::kHashBitMask) == 0) {
4652 template <typename schar>
4653 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
4654 StringHasher hasher(length, seed);
4655 if (!hasher.has_trivial_hash()) {
4657 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4658 hasher.AddCharacter(chars[i]);
4660 for (; i < length; i++) {
4661 hasher.AddCharacterNoIndex(chars[i]);
4664 return hasher.GetHashField();
4668 bool String::AsArrayIndex(uint32_t* index) {
4669 uint32_t field = hash_field();
4670 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4673 return SlowAsArrayIndex(index);
4677 Object* JSReceiver::GetPrototype() {
4678 return HeapObject::cast(this)->map()->prototype();
4682 bool JSReceiver::HasProperty(String* name) {
4684 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4686 return GetPropertyAttribute(name) != ABSENT;
4690 bool JSReceiver::HasLocalProperty(String* name) {
4692 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4694 return GetLocalPropertyAttribute(name) != ABSENT;
4698 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4699 return GetPropertyAttributeWithReceiver(this, key);
4702 // TODO(504): this may be useful in other places too where JSGlobalProxy
4704 Object* JSObject::BypassGlobalProxy() {
4705 if (IsJSGlobalProxy()) {
4706 Object* proto = GetPrototype();
4707 if (proto->IsNull()) return GetHeap()->undefined_value();
4708 ASSERT(proto->IsJSGlobalObject());
4715 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4717 ? JSProxy::cast(this)->GetIdentityHash(flag)
4718 : JSObject::cast(this)->GetIdentityHash(flag);
4722 bool JSReceiver::HasElement(uint32_t index) {
4724 return JSProxy::cast(this)->HasElementWithHandler(index);
4726 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4730 bool AccessorInfo::all_can_read() {
4731 return BooleanBit::get(flag(), kAllCanReadBit);
4735 void AccessorInfo::set_all_can_read(bool value) {
4736 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4740 bool AccessorInfo::all_can_write() {
4741 return BooleanBit::get(flag(), kAllCanWriteBit);
4745 void AccessorInfo::set_all_can_write(bool value) {
4746 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4750 bool AccessorInfo::prohibits_overwriting() {
4751 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4755 void AccessorInfo::set_prohibits_overwriting(bool value) {
4756 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4760 PropertyAttributes AccessorInfo::property_attributes() {
4761 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4765 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4766 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4770 template<typename Shape, typename Key>
4771 void Dictionary<Shape, Key>::SetEntry(int entry,
4774 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4778 template<typename Shape, typename Key>
4779 void Dictionary<Shape, Key>::SetEntry(int entry,
4782 PropertyDetails details) {
4783 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4784 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4785 AssertNoAllocation no_gc;
4786 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4787 FixedArray::set(index, key, mode);
4788 FixedArray::set(index+1, value, mode);
4789 FixedArray::set(index+2, details.AsSmi());
4793 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4794 ASSERT(other->IsNumber());
4795 return key == static_cast<uint32_t>(other->Number());
4799 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
4800 return ComputeIntegerHash(key, 0);
4804 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
4806 ASSERT(other->IsNumber());
4807 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
4810 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
4811 return ComputeIntegerHash(key, seed);
4814 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
4817 ASSERT(other->IsNumber());
4818 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
4821 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4822 return Isolate::Current()->heap()->NumberFromUint32(key);
4826 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4827 // We know that all entries in a hash table had their hash keys created.
4828 // Use that knowledge to have fast failure.
4829 if (key->Hash() != String::cast(other)->Hash()) return false;
4830 return key->Equals(String::cast(other));
4834 uint32_t StringDictionaryShape::Hash(String* key) {
4839 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4840 return String::cast(other)->Hash();
4844 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4849 template <int entrysize>
4850 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4851 return key->SameValue(other);
4855 template <int entrysize>
4856 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4857 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4858 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4862 template <int entrysize>
4863 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4865 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4866 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4870 template <int entrysize>
4871 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4876 void Map::ClearCodeCache(Heap* heap) {
4877 // No write barrier is needed since empty_fixed_array is not in new space.
4878 // Please note this function is used during marking:
4879 // - MarkCompactCollector::MarkUnmarkedObject
4880 // - IncrementalMarking::Step
4881 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4882 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4886 void JSArray::EnsureSize(int required_size) {
4887 ASSERT(HasFastTypeElements());
4888 FixedArray* elts = FixedArray::cast(elements());
4889 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4890 if (elts->length() < required_size) {
4891 // Doubling in size would be overkill, but leave some slack to avoid
4892 // constantly growing.
4893 Expand(required_size + (required_size >> 3));
4894 // It's a performance benefit to keep a frequently used array in new-space.
4895 } else if (!GetHeap()->new_space()->Contains(elts) &&
4896 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4897 // Expand will allocate a new backing store in new space even if the size
4898 // we asked for isn't larger than what we had before.
4899 Expand(required_size);
4904 void JSArray::set_length(Smi* length) {
4905 // Don't need a write barrier for a Smi.
4906 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4910 bool JSArray::AllowsSetElementsLength() {
4911 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
4912 ASSERT(result == !HasExternalArrayElements());
4917 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
4918 MaybeObject* maybe_result = EnsureCanContainElements(
4919 storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
4920 if (maybe_result->IsFailure()) return maybe_result;
4921 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
4922 GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
4923 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
4924 ((GetElementsKind() == FAST_ELEMENTS) ||
4925 (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
4926 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
4927 set_elements(storage);
4928 set_length(Smi::FromInt(storage->length()));
4933 MaybeObject* FixedArray::Copy() {
4934 if (length() == 0) return this;
4935 return GetHeap()->CopyFixedArray(this);
4939 MaybeObject* FixedDoubleArray::Copy() {
4940 if (length() == 0) return this;
4941 return GetHeap()->CopyFixedDoubleArray(this);
4945 void TypeFeedbackCells::SetAstId(int index, Smi* id) {
4946 set(1 + index * 2, id);
4950 Smi* TypeFeedbackCells::AstId(int index) {
4951 return Smi::cast(get(1 + index * 2));
4955 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
4956 set(index * 2, cell);
4960 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
4961 return JSGlobalPropertyCell::cast(get(index * 2));
4965 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
4966 return isolate->factory()->the_hole_value();
4970 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
4971 return isolate->factory()->undefined_value();
4975 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
4976 return heap->raw_unchecked_the_hole_value();
4980 SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
4981 SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
4982 kIcWithTypeinfoCountOffset)
4983 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
4984 kTypeFeedbackCellsOffset)
4987 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
4990 Relocatable::Relocatable(Isolate* isolate) {
4991 ASSERT(isolate == Isolate::Current());
4993 prev_ = isolate->relocatable_top();
4994 isolate->set_relocatable_top(this);
4998 Relocatable::~Relocatable() {
4999 ASSERT(isolate_ == Isolate::Current());
5000 ASSERT_EQ(isolate_->relocatable_top(), this);
5001 isolate_->set_relocatable_top(prev_);
5005 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
5006 return map->instance_size();
5010 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
5011 v->VisitExternalReference(
5012 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
5016 template<typename StaticVisitor>
5017 void Foreign::ForeignIterateBody() {
5018 StaticVisitor::VisitExternalReference(
5019 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
5023 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
5024 typedef v8::String::ExternalAsciiStringResource Resource;
5025 v->VisitExternalAsciiString(
5026 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5030 template<typename StaticVisitor>
5031 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
5032 typedef v8::String::ExternalAsciiStringResource Resource;
5033 StaticVisitor::VisitExternalAsciiString(
5034 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5038 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
5039 typedef v8::String::ExternalStringResource Resource;
5040 v->VisitExternalTwoByteString(
5041 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5045 template<typename StaticVisitor>
5046 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
5047 typedef v8::String::ExternalStringResource Resource;
5048 StaticVisitor::VisitExternalTwoByteString(
5049 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5052 #define SLOT_ADDR(obj, offset) \
5053 reinterpret_cast<Object**>((obj)->address() + offset)
5055 template<int start_offset, int end_offset, int size>
5056 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
5059 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
5063 template<int start_offset>
5064 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
5067 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
5073 #undef CAST_ACCESSOR
5074 #undef INT_ACCESSORS
5076 #undef ACCESSORS_TO_SMI
5077 #undef SMI_ACCESSORS
5079 #undef BOOL_ACCESSORS
5083 #undef WRITE_BARRIER
5084 #undef CONDITIONAL_WRITE_BARRIER
5085 #undef READ_DOUBLE_FIELD
5086 #undef WRITE_DOUBLE_FIELD
5087 #undef READ_INT_FIELD
5088 #undef WRITE_INT_FIELD
5089 #undef READ_INTPTR_FIELD
5090 #undef WRITE_INTPTR_FIELD
5091 #undef READ_UINT32_FIELD
5092 #undef WRITE_UINT32_FIELD
5093 #undef READ_SHORT_FIELD
5094 #undef WRITE_SHORT_FIELD
5095 #undef READ_BYTE_FIELD
5096 #undef WRITE_BYTE_FIELD
5099 } } // namespace v8::internal
5101 #endif // V8_OBJECTS_INL_H_