1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
98 #define ACCESSORS_TO_SMI(holder, name, offset) \
99 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
100 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
101 WRITE_FIELD(this, offset, value); \
105 // Getter that returns a Smi as an int and writes an int as a Smi.
106 #define SMI_ACCESSORS(holder, name, offset) \
107 int holder::name() { \
108 Object* value = READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::set_##name(int value) { \
112 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define BOOL_GETTER(holder, field, name, offset) \
117 bool holder::name() { \
118 return BooleanBit::get(field(), offset); \
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123 bool holder::name() { \
124 return BooleanBit::get(field(), offset); \
126 void holder::set_##name(bool value) { \
127 set_##field(BooleanBit::set(field(), offset, value)); \
131 bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
132 ElementsKind to_kind) {
133 if (to_kind == FAST_ELEMENTS) {
134 return from_kind == FAST_SMI_ONLY_ELEMENTS ||
135 from_kind == FAST_DOUBLE_ELEMENTS;
137 return to_kind == FAST_DOUBLE_ELEMENTS &&
138 from_kind == FAST_SMI_ONLY_ELEMENTS;
143 bool Object::IsFixedArrayBase() {
144 return IsFixedArray() || IsFixedDoubleArray();
148 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
149 // There is a constraint on the object; check.
150 if (!this->IsJSObject()) return false;
151 // Fetch the constructor function of the object.
152 Object* cons_obj = JSObject::cast(this)->map()->constructor();
153 if (!cons_obj->IsJSFunction()) return false;
154 JSFunction* fun = JSFunction::cast(cons_obj);
155 // Iterate through the chain of inheriting function templates to
156 // see if the required one occurs.
157 for (Object* type = fun->shared()->function_data();
158 type->IsFunctionTemplateInfo();
159 type = FunctionTemplateInfo::cast(type)->parent_template()) {
160 if (type == expected) return true;
162 // Didn't find the required type in the inheritance chain.
167 bool Object::IsSmi() {
168 return HAS_SMI_TAG(this);
172 bool Object::IsHeapObject() {
173 return Internals::HasHeapObjectTag(this);
177 bool Object::NonFailureIsHeapObject() {
178 ASSERT(!this->IsFailure());
179 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
183 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
186 bool Object::IsString() {
187 return Object::IsHeapObject()
188 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
192 bool Object::IsSpecObject() {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsSymbol() {
206 if (!this->IsHeapObject()) return false;
207 uint32_t type = HeapObject::cast(this)->map()->instance_type();
208 // Because the symbol tag is non-zero and no non-string types have the
209 // symbol bit set we can test for symbols with a very simple test
211 STATIC_ASSERT(kSymbolTag != 0);
212 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
213 return (type & kIsSymbolMask) != 0;
217 bool Object::IsConsString() {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqAsciiString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsAsciiRepresentation();
242 bool Object::IsSeqTwoByteString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalAsciiString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsAsciiRepresentation();
262 bool Object::IsExternalTwoByteString() {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
273 StringShape::StringShape(String* str)
274 : type_(str->map()->instance_type()) {
276 ASSERT((type_ & kIsNotStringMask) == kStringTag);
280 StringShape::StringShape(Map* map)
281 : type_(map->instance_type()) {
283 ASSERT((type_ & kIsNotStringMask) == kStringTag);
287 StringShape::StringShape(InstanceType t)
288 : type_(static_cast<uint32_t>(t)) {
290 ASSERT((type_ & kIsNotStringMask) == kStringTag);
294 bool StringShape::IsSymbol() {
296 STATIC_ASSERT(kSymbolTag != 0);
297 return (type_ & kIsSymbolMask) != 0;
301 bool String::IsAsciiRepresentation() {
302 uint32_t type = map()->instance_type();
303 return (type & kStringEncodingMask) == kAsciiStringTag;
307 bool String::IsTwoByteRepresentation() {
308 uint32_t type = map()->instance_type();
309 return (type & kStringEncodingMask) == kTwoByteStringTag;
313 bool String::IsAsciiRepresentationUnderneath() {
314 uint32_t type = map()->instance_type();
315 STATIC_ASSERT(kIsIndirectStringTag != 0);
316 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
318 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
319 case kAsciiStringTag:
321 case kTwoByteStringTag:
323 default: // Cons or sliced string. Need to go deeper.
324 return GetUnderlying()->IsAsciiRepresentation();
329 bool String::IsTwoByteRepresentationUnderneath() {
330 uint32_t type = map()->instance_type();
331 STATIC_ASSERT(kIsIndirectStringTag != 0);
332 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
334 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
335 case kAsciiStringTag:
337 case kTwoByteStringTag:
339 default: // Cons or sliced string. Need to go deeper.
340 return GetUnderlying()->IsTwoByteRepresentation();
345 bool String::HasOnlyAsciiChars() {
346 uint32_t type = map()->instance_type();
347 return (type & kStringEncodingMask) == kAsciiStringTag ||
348 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
352 bool StringShape::IsCons() {
353 return (type_ & kStringRepresentationMask) == kConsStringTag;
357 bool StringShape::IsSliced() {
358 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
362 bool StringShape::IsIndirect() {
363 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
367 bool StringShape::IsExternal() {
368 return (type_ & kStringRepresentationMask) == kExternalStringTag;
372 bool StringShape::IsSequential() {
373 return (type_ & kStringRepresentationMask) == kSeqStringTag;
377 StringRepresentationTag StringShape::representation_tag() {
378 uint32_t tag = (type_ & kStringRepresentationMask);
379 return static_cast<StringRepresentationTag>(tag);
383 uint32_t StringShape::encoding_tag() {
384 return type_ & kStringEncodingMask;
388 uint32_t StringShape::full_representation_tag() {
389 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
393 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
394 Internals::kFullStringRepresentationMask);
397 bool StringShape::IsSequentialAscii() {
398 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
402 bool StringShape::IsSequentialTwoByte() {
403 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
407 bool StringShape::IsExternalAscii() {
408 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
412 bool StringShape::IsExternalTwoByte() {
413 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
417 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
418 Internals::kExternalTwoByteRepresentationTag);
421 uc32 FlatStringReader::Get(int index) {
422 ASSERT(0 <= index && index <= length_);
424 return static_cast<const byte*>(start_)[index];
426 return static_cast<const uc16*>(start_)[index];
431 bool Object::IsNumber() {
432 return IsSmi() || IsHeapNumber();
436 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
437 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
440 bool Object::IsFiller() {
441 if (!Object::IsHeapObject()) return false;
442 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
443 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
447 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
450 bool Object::IsExternalArray() {
451 if (!Object::IsHeapObject())
453 InstanceType instance_type =
454 HeapObject::cast(this)->map()->instance_type();
455 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
456 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
460 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
461 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
462 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
463 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
464 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
465 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
466 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
467 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
470 bool MaybeObject::IsFailure() {
471 return HAS_FAILURE_TAG(this);
475 bool MaybeObject::IsRetryAfterGC() {
476 return HAS_FAILURE_TAG(this)
477 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
481 bool MaybeObject::IsOutOfMemory() {
482 return HAS_FAILURE_TAG(this)
483 && Failure::cast(this)->IsOutOfMemoryException();
487 bool MaybeObject::IsException() {
488 return this == Failure::Exception();
492 bool MaybeObject::IsTheHole() {
493 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
497 Failure* Failure::cast(MaybeObject* obj) {
498 ASSERT(HAS_FAILURE_TAG(obj));
499 return reinterpret_cast<Failure*>(obj);
503 bool Object::IsJSReceiver() {
504 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
505 return IsHeapObject() &&
506 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
510 bool Object::IsJSObject() {
511 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
512 return IsHeapObject() &&
513 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
517 bool Object::IsJSProxy() {
518 if (!Object::IsHeapObject()) return false;
519 InstanceType type = HeapObject::cast(this)->map()->instance_type();
520 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
524 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
525 TYPE_CHECKER(JSSet, JS_SET_TYPE)
526 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
527 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
528 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
529 TYPE_CHECKER(Map, MAP_TYPE)
530 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
531 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
534 bool Object::IsDescriptorArray() {
535 return IsFixedArray();
539 bool Object::IsDeoptimizationInputData() {
540 // Must be a fixed array.
541 if (!IsFixedArray()) return false;
543 // There's no sure way to detect the difference between a fixed array and
544 // a deoptimization data array. Since this is used for asserts we can
545 // check that the length is zero or else the fixed size plus a multiple of
547 int length = FixedArray::cast(this)->length();
548 if (length == 0) return true;
550 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
551 return length >= 0 &&
552 length % DeoptimizationInputData::kDeoptEntrySize == 0;
556 bool Object::IsDeoptimizationOutputData() {
557 if (!IsFixedArray()) return false;
558 // There's actually no way to see the difference between a fixed array and
559 // a deoptimization data array. Since this is used for asserts we can check
560 // that the length is plausible though.
561 if (FixedArray::cast(this)->length() % 2 != 0) return false;
566 bool Object::IsTypeFeedbackCells() {
567 if (!IsFixedArray()) return false;
568 // There's actually no way to see the difference between a fixed array and
569 // a cache cells array. Since this is used for asserts we can check that
570 // the length is plausible though.
571 if (FixedArray::cast(this)->length() % 2 != 0) return false;
576 bool Object::IsContext() {
577 if (Object::IsHeapObject()) {
578 Map* map = HeapObject::cast(this)->map();
579 Heap* heap = map->GetHeap();
580 return (map == heap->function_context_map() ||
581 map == heap->catch_context_map() ||
582 map == heap->with_context_map() ||
583 map == heap->global_context_map() ||
584 map == heap->block_context_map() ||
585 map == heap->module_context_map());
591 bool Object::IsGlobalContext() {
592 return Object::IsHeapObject() &&
593 HeapObject::cast(this)->map() ==
594 HeapObject::cast(this)->GetHeap()->global_context_map();
598 bool Object::IsModuleContext() {
599 return Object::IsHeapObject() &&
600 HeapObject::cast(this)->map() ==
601 HeapObject::cast(this)->GetHeap()->module_context_map();
605 bool Object::IsScopeInfo() {
606 return Object::IsHeapObject() &&
607 HeapObject::cast(this)->map() ==
608 HeapObject::cast(this)->GetHeap()->scope_info_map();
612 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
615 template <> inline bool Is<JSFunction>(Object* obj) {
616 return obj->IsJSFunction();
620 TYPE_CHECKER(Code, CODE_TYPE)
621 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
622 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
623 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
624 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
625 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
626 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
627 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
630 bool Object::IsStringWrapper() {
631 return IsJSValue() && JSValue::cast(this)->value()->IsString();
635 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
638 bool Object::IsBoolean() {
639 return IsOddball() &&
640 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
644 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
645 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
648 template <> inline bool Is<JSArray>(Object* obj) {
649 return obj->IsJSArray();
653 bool Object::IsHashTable() {
654 return Object::IsHeapObject() &&
655 HeapObject::cast(this)->map() ==
656 HeapObject::cast(this)->GetHeap()->hash_table_map();
660 bool Object::IsDictionary() {
661 return IsHashTable() &&
662 this != HeapObject::cast(this)->GetHeap()->symbol_table();
666 bool Object::IsSymbolTable() {
667 return IsHashTable() && this ==
668 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
672 bool Object::IsJSFunctionResultCache() {
673 if (!IsFixedArray()) return false;
674 FixedArray* self = FixedArray::cast(this);
675 int length = self->length();
676 if (length < JSFunctionResultCache::kEntriesIndex) return false;
677 if ((length - JSFunctionResultCache::kEntriesIndex)
678 % JSFunctionResultCache::kEntrySize != 0) {
682 if (FLAG_verify_heap) {
683 reinterpret_cast<JSFunctionResultCache*>(this)->
684 JSFunctionResultCacheVerify();
691 bool Object::IsNormalizedMapCache() {
692 if (!IsFixedArray()) return false;
693 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
697 if (FLAG_verify_heap) {
698 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
705 bool Object::IsCompilationCacheTable() {
706 return IsHashTable();
710 bool Object::IsCodeCacheHashTable() {
711 return IsHashTable();
715 bool Object::IsPolymorphicCodeCacheHashTable() {
716 return IsHashTable();
720 bool Object::IsMapCache() {
721 return IsHashTable();
725 bool Object::IsPrimitive() {
726 return IsOddball() || IsNumber() || IsString();
730 bool Object::IsJSGlobalProxy() {
731 bool result = IsHeapObject() &&
732 (HeapObject::cast(this)->map()->instance_type() ==
733 JS_GLOBAL_PROXY_TYPE);
734 ASSERT(!result || IsAccessCheckNeeded());
739 bool Object::IsGlobalObject() {
740 if (!IsHeapObject()) return false;
742 InstanceType type = HeapObject::cast(this)->map()->instance_type();
743 return type == JS_GLOBAL_OBJECT_TYPE ||
744 type == JS_BUILTINS_OBJECT_TYPE;
748 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
749 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
752 bool Object::IsUndetectableObject() {
753 return IsHeapObject()
754 && HeapObject::cast(this)->map()->is_undetectable();
758 bool Object::IsAccessCheckNeeded() {
759 return IsHeapObject()
760 && HeapObject::cast(this)->map()->is_access_check_needed();
764 bool Object::IsStruct() {
765 if (!IsHeapObject()) return false;
766 switch (HeapObject::cast(this)->map()->instance_type()) {
767 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
768 STRUCT_LIST(MAKE_STRUCT_CASE)
769 #undef MAKE_STRUCT_CASE
770 default: return false;
775 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
776 bool Object::Is##Name() { \
777 return Object::IsHeapObject() \
778 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
780 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
781 #undef MAKE_STRUCT_PREDICATE
784 bool Object::IsUndefined() {
785 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
789 bool Object::IsNull() {
790 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
794 bool Object::IsTheHole() {
795 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
799 bool Object::IsTrue() {
800 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
804 bool Object::IsFalse() {
805 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
809 bool Object::IsArgumentsMarker() {
810 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
814 double Object::Number() {
817 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
818 : reinterpret_cast<HeapNumber*>(this)->value();
822 bool Object::IsNaN() {
823 return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
827 MaybeObject* Object::ToSmi() {
828 if (IsSmi()) return this;
829 if (IsHeapNumber()) {
830 double value = HeapNumber::cast(this)->value();
831 int int_value = FastD2I(value);
832 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
833 return Smi::FromInt(int_value);
836 return Failure::Exception();
840 bool Object::HasSpecificClassOf(String* name) {
841 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
845 MaybeObject* Object::GetElement(uint32_t index) {
846 // GetElement can trigger a getter which can cause allocation.
847 // This was not always the case. This ASSERT is here to catch
848 // leftover incorrect uses.
849 ASSERT(HEAP->IsAllocationAllowed());
850 return GetElementWithReceiver(this, index);
854 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
855 MaybeObject* maybe = GetElementWithReceiver(this, index);
856 ASSERT(!maybe->IsFailure());
857 Object* result = NULL; // Initialization to please compiler.
858 maybe->ToObject(&result);
863 MaybeObject* Object::GetProperty(String* key) {
864 PropertyAttributes attributes;
865 return GetPropertyWithReceiver(this, key, &attributes);
869 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
870 return GetPropertyWithReceiver(this, key, attributes);
874 #define FIELD_ADDR(p, offset) \
875 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
877 #define READ_FIELD(p, offset) \
878 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
880 #define WRITE_FIELD(p, offset, value) \
881 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
883 #define WRITE_BARRIER(heap, object, offset, value) \
884 heap->incremental_marking()->RecordWrite( \
885 object, HeapObject::RawField(object, offset), value); \
886 if (heap->InNewSpace(value)) { \
887 heap->RecordWrite(object->address(), offset); \
890 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
891 if (mode == UPDATE_WRITE_BARRIER) { \
892 heap->incremental_marking()->RecordWrite( \
893 object, HeapObject::RawField(object, offset), value); \
894 if (heap->InNewSpace(value)) { \
895 heap->RecordWrite(object->address(), offset); \
899 #ifndef V8_TARGET_ARCH_MIPS
900 #define READ_DOUBLE_FIELD(p, offset) \
901 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
902 #else // V8_TARGET_ARCH_MIPS
903 // Prevent gcc from using load-double (mips ldc1) on (possibly)
904 // non-64-bit aligned HeapNumber::value.
905 static inline double read_double_field(void* p, int offset) {
910 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
911 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
914 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
915 #endif // V8_TARGET_ARCH_MIPS
917 #ifndef V8_TARGET_ARCH_MIPS
918 #define WRITE_DOUBLE_FIELD(p, offset, value) \
919 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
920 #else // V8_TARGET_ARCH_MIPS
921 // Prevent gcc from using store-double (mips sdc1) on (possibly)
922 // non-64-bit aligned HeapNumber::value.
923 static inline void write_double_field(void* p, int offset,
930 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
931 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
933 #define WRITE_DOUBLE_FIELD(p, offset, value) \
934 write_double_field(p, offset, value)
935 #endif // V8_TARGET_ARCH_MIPS
938 #define READ_INT_FIELD(p, offset) \
939 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
941 #define WRITE_INT_FIELD(p, offset, value) \
942 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
944 #define READ_INTPTR_FIELD(p, offset) \
945 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
947 #define WRITE_INTPTR_FIELD(p, offset, value) \
948 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
950 #define READ_UINT32_FIELD(p, offset) \
951 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
953 #define WRITE_UINT32_FIELD(p, offset, value) \
954 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
956 #define READ_INT64_FIELD(p, offset) \
957 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
959 #define WRITE_INT64_FIELD(p, offset, value) \
960 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
962 #define READ_SHORT_FIELD(p, offset) \
963 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
965 #define WRITE_SHORT_FIELD(p, offset, value) \
966 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
968 #define READ_BYTE_FIELD(p, offset) \
969 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
971 #define WRITE_BYTE_FIELD(p, offset, value) \
972 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
975 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
976 return &READ_FIELD(obj, byte_offset);
981 return Internals::SmiValue(this);
985 Smi* Smi::FromInt(int value) {
986 ASSERT(Smi::IsValid(value));
987 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
988 intptr_t tagged_value =
989 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
990 return reinterpret_cast<Smi*>(tagged_value);
994 Smi* Smi::FromIntptr(intptr_t value) {
995 ASSERT(Smi::IsValid(value));
996 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
997 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1001 Failure::Type Failure::type() const {
1002 return static_cast<Type>(value() & kFailureTypeTagMask);
1006 bool Failure::IsInternalError() const {
1007 return type() == INTERNAL_ERROR;
1011 bool Failure::IsOutOfMemoryException() const {
1012 return type() == OUT_OF_MEMORY_EXCEPTION;
1016 AllocationSpace Failure::allocation_space() const {
1017 ASSERT_EQ(RETRY_AFTER_GC, type());
1018 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1023 Failure* Failure::InternalError() {
1024 return Construct(INTERNAL_ERROR);
1028 Failure* Failure::Exception() {
1029 return Construct(EXCEPTION);
1033 Failure* Failure::OutOfMemoryException() {
1034 return Construct(OUT_OF_MEMORY_EXCEPTION);
1038 intptr_t Failure::value() const {
1039 return static_cast<intptr_t>(
1040 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1044 Failure* Failure::RetryAfterGC() {
1045 return RetryAfterGC(NEW_SPACE);
1049 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1050 ASSERT((space & ~kSpaceTagMask) == 0);
1051 return Construct(RETRY_AFTER_GC, space);
1055 Failure* Failure::Construct(Type type, intptr_t value) {
1057 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1058 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1059 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1063 bool Smi::IsValid(intptr_t value) {
1065 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1068 #ifdef V8_TARGET_ARCH_X64
1069 // To be representable as a long smi, the value must be a 32-bit integer.
1070 bool result = (value == static_cast<int32_t>(value));
1072 // To be representable as an tagged small integer, the two
1073 // most-significant bits of 'value' must be either 00 or 11 due to
1074 // sign-extension. To check this we add 01 to the two
1075 // most-significant bits, and check if the most-significant bit is 0
1077 // CAUTION: The original code below:
1078 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1079 // may lead to incorrect results according to the C language spec, and
1080 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1081 // compiler may produce undefined results in case of signed integer
1082 // overflow. The computation must be done w/ unsigned ints.
1083 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1085 ASSERT(result == in_range);
1090 MapWord MapWord::FromMap(Map* map) {
1091 return MapWord(reinterpret_cast<uintptr_t>(map));
1095 Map* MapWord::ToMap() {
1096 return reinterpret_cast<Map*>(value_);
1100 bool MapWord::IsForwardingAddress() {
1101 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1105 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1106 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1107 return MapWord(reinterpret_cast<uintptr_t>(raw));
1111 HeapObject* MapWord::ToForwardingAddress() {
1112 ASSERT(IsForwardingAddress());
1113 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1118 void HeapObject::VerifyObjectField(int offset) {
1119 VerifyPointer(READ_FIELD(this, offset));
1122 void HeapObject::VerifySmiField(int offset) {
1123 ASSERT(READ_FIELD(this, offset)->IsSmi());
1128 Heap* HeapObject::GetHeap() {
1130 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1131 ASSERT(heap != NULL);
1132 ASSERT(heap->isolate() == Isolate::Current());
1137 Isolate* HeapObject::GetIsolate() {
1138 return GetHeap()->isolate();
1142 Map* HeapObject::map() {
1143 return map_word().ToMap();
1147 void HeapObject::set_map(Map* value) {
1148 set_map_word(MapWord::FromMap(value));
1149 if (value != NULL) {
1150 // TODO(1600) We are passing NULL as a slot because maps can never be on
1151 // evacuation candidate.
1152 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1157 // Unsafe accessor omitting write barrier.
1158 void HeapObject::set_map_no_write_barrier(Map* value) {
1159 set_map_word(MapWord::FromMap(value));
1163 MapWord HeapObject::map_word() {
1164 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1168 void HeapObject::set_map_word(MapWord map_word) {
1169 // WRITE_FIELD does not invoke write barrier, but there is no need
1171 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1175 HeapObject* HeapObject::FromAddress(Address address) {
1176 ASSERT_TAG_ALIGNED(address);
1177 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1181 Address HeapObject::address() {
1182 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1186 int HeapObject::Size() {
1187 return SizeFromMap(map());
1191 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1192 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1193 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1197 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1198 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1202 double HeapNumber::value() {
1203 return READ_DOUBLE_FIELD(this, kValueOffset);
1207 void HeapNumber::set_value(double value) {
1208 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1212 int HeapNumber::get_exponent() {
1213 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1214 kExponentShift) - kExponentBias;
1218 int HeapNumber::get_sign() {
1219 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1223 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1226 Object** FixedArray::GetFirstElementAddress() {
1227 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1231 bool FixedArray::ContainsOnlySmisOrHoles() {
1232 Object* the_hole = GetHeap()->the_hole_value();
1233 Object** current = GetFirstElementAddress();
1234 for (int i = 0; i < length(); ++i) {
1235 Object* candidate = *current++;
1236 if (!candidate->IsSmi() && candidate != the_hole) return false;
1242 FixedArrayBase* JSObject::elements() {
1243 Object* array = READ_FIELD(this, kElementsOffset);
1244 return static_cast<FixedArrayBase*>(array);
1247 void JSObject::ValidateSmiOnlyElements() {
1249 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1250 Heap* heap = GetHeap();
1251 // Don't use elements, since integrity checks will fail if there
1252 // are filler pointers in the array.
1253 FixedArray* fixed_array =
1254 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1255 Map* map = fixed_array->map();
1256 // Arrays that have been shifted in place can't be verified.
1257 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1258 map != heap->raw_unchecked_two_pointer_filler_map() &&
1259 map != heap->free_space_map()) {
1260 for (int i = 0; i < fixed_array->length(); i++) {
1261 Object* current = fixed_array->get(i);
1262 ASSERT(current->IsSmi() || current->IsTheHole());
1270 MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
1272 ValidateSmiOnlyElements();
1274 if ((map()->elements_kind() != FAST_ELEMENTS)) {
1275 return TransitionElementsKind(FAST_ELEMENTS);
1281 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1283 EnsureElementsMode mode) {
1284 ElementsKind current_kind = map()->elements_kind();
1285 ElementsKind target_kind = current_kind;
1286 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1287 if (current_kind == FAST_ELEMENTS) return this;
1289 Heap* heap = GetHeap();
1290 Object* the_hole = heap->the_hole_value();
1291 Object* heap_number_map = heap->heap_number_map();
1292 for (uint32_t i = 0; i < count; ++i) {
1293 Object* current = *objects++;
1294 if (!current->IsSmi() && current != the_hole) {
1295 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
1296 HeapObject::cast(current)->map() == heap_number_map) {
1297 target_kind = FAST_DOUBLE_ELEMENTS;
1299 target_kind = FAST_ELEMENTS;
1305 if (target_kind != current_kind) {
1306 return TransitionElementsKind(target_kind);
1312 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1313 EnsureElementsMode mode) {
1314 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1315 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1316 elements->map() == GetHeap()->fixed_cow_array_map());
1317 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1318 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1320 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1321 return EnsureCanContainElements(objects, elements->length(), mode);
1324 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1325 if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
1326 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1333 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1334 ElementsKind to_kind) {
1335 Map* current_map = map();
1336 ElementsKind from_kind = current_map->elements_kind();
1338 if (from_kind == to_kind) return current_map;
1340 Context* global_context = isolate->context()->global_context();
1341 if (current_map == global_context->smi_js_array_map()) {
1342 if (to_kind == FAST_ELEMENTS) {
1343 return global_context->object_js_array_map();
1345 if (to_kind == FAST_DOUBLE_ELEMENTS) {
1346 return global_context->double_js_array_map();
1348 ASSERT(to_kind == DICTIONARY_ELEMENTS);
1352 return GetElementsTransitionMapSlow(to_kind);
1356 void JSObject::set_map_and_elements(Map* new_map,
1357 FixedArrayBase* value,
1358 WriteBarrierMode mode) {
1359 ASSERT(value->HasValidElements());
1361 ValidateSmiOnlyElements();
1363 if (new_map != NULL) {
1364 if (mode == UPDATE_WRITE_BARRIER) {
1367 ASSERT(mode == SKIP_WRITE_BARRIER);
1368 set_map_no_write_barrier(new_map);
1371 ASSERT((map()->has_fast_elements() ||
1372 map()->has_fast_smi_only_elements() ||
1373 (value == GetHeap()->empty_fixed_array())) ==
1374 (value->map() == GetHeap()->fixed_array_map() ||
1375 value->map() == GetHeap()->fixed_cow_array_map()));
1376 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1377 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1378 WRITE_FIELD(this, kElementsOffset, value);
1379 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1383 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1384 set_map_and_elements(NULL, value, mode);
1388 void JSObject::initialize_properties() {
1389 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1390 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1394 void JSObject::initialize_elements() {
1395 ASSERT(map()->has_fast_elements() ||
1396 map()->has_fast_smi_only_elements() ||
1397 map()->has_fast_double_elements());
1398 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1399 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1403 MaybeObject* JSObject::ResetElements() {
1405 ElementsKind elements_kind = FLAG_smi_only_arrays
1406 ? FAST_SMI_ONLY_ELEMENTS
1408 MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
1410 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1411 set_map(Map::cast(obj));
1412 initialize_elements();
1417 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1418 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1421 byte Oddball::kind() {
1422 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1426 void Oddball::set_kind(byte value) {
1427 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1431 Object* JSGlobalPropertyCell::value() {
1432 return READ_FIELD(this, kValueOffset);
1436 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1437 // The write barrier is not used for global property cells.
1438 ASSERT(!val->IsJSGlobalPropertyCell());
1439 WRITE_FIELD(this, kValueOffset, val);
1443 int JSObject::GetHeaderSize() {
1444 InstanceType type = map()->instance_type();
1445 // Check for the most common kind of JavaScript object before
1446 // falling into the generic switch. This speeds up the internal
1447 // field operations considerably on average.
1448 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1450 case JS_MODULE_TYPE:
1451 return JSModule::kSize;
1452 case JS_GLOBAL_PROXY_TYPE:
1453 return JSGlobalProxy::kSize;
1454 case JS_GLOBAL_OBJECT_TYPE:
1455 return JSGlobalObject::kSize;
1456 case JS_BUILTINS_OBJECT_TYPE:
1457 return JSBuiltinsObject::kSize;
1458 case JS_FUNCTION_TYPE:
1459 return JSFunction::kSize;
1461 return JSValue::kSize;
1463 return JSDate::kSize;
1465 return JSArray::kSize;
1466 case JS_WEAK_MAP_TYPE:
1467 return JSWeakMap::kSize;
1468 case JS_REGEXP_TYPE:
1469 return JSRegExp::kSize;
1470 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1471 return JSObject::kHeaderSize;
1472 case JS_MESSAGE_OBJECT_TYPE:
1473 return JSMessageObject::kSize;
1481 int JSObject::GetInternalFieldCount() {
1482 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1483 // Make sure to adjust for the number of in-object properties. These
1484 // properties do contribute to the size, but are not internal fields.
1485 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1486 map()->inobject_properties();
1490 int JSObject::GetInternalFieldOffset(int index) {
1491 ASSERT(index < GetInternalFieldCount() && index >= 0);
1492 return GetHeaderSize() + (kPointerSize * index);
1496 Object* JSObject::GetInternalField(int index) {
1497 ASSERT(index < GetInternalFieldCount() && index >= 0);
1498 // Internal objects do follow immediately after the header, whereas in-object
1499 // properties are at the end of the object. Therefore there is no need
1500 // to adjust the index here.
1501 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1505 void JSObject::SetInternalField(int index, Object* value) {
1506 ASSERT(index < GetInternalFieldCount() && index >= 0);
1507 // Internal objects do follow immediately after the header, whereas in-object
1508 // properties are at the end of the object. Therefore there is no need
1509 // to adjust the index here.
1510 int offset = GetHeaderSize() + (kPointerSize * index);
1511 WRITE_FIELD(this, offset, value);
1512 WRITE_BARRIER(GetHeap(), this, offset, value);
1516 void JSObject::SetInternalField(int index, Smi* value) {
1517 ASSERT(index < GetInternalFieldCount() && index >= 0);
1518 // Internal objects do follow immediately after the header, whereas in-object
1519 // properties are at the end of the object. Therefore there is no need
1520 // to adjust the index here.
1521 int offset = GetHeaderSize() + (kPointerSize * index);
1522 WRITE_FIELD(this, offset, value);
1526 // Access fast-case object properties at index. The use of these routines
1527 // is needed to correctly distinguish between properties stored in-object and
1528 // properties stored in the properties array.
1529 Object* JSObject::FastPropertyAt(int index) {
1530 // Adjust for the number of properties stored in the object.
1531 index -= map()->inobject_properties();
1533 int offset = map()->instance_size() + (index * kPointerSize);
1534 return READ_FIELD(this, offset);
1536 ASSERT(index < properties()->length());
1537 return properties()->get(index);
1542 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1543 // Adjust for the number of properties stored in the object.
1544 index -= map()->inobject_properties();
1546 int offset = map()->instance_size() + (index * kPointerSize);
1547 WRITE_FIELD(this, offset, value);
1548 WRITE_BARRIER(GetHeap(), this, offset, value);
1550 ASSERT(index < properties()->length());
1551 properties()->set(index, value);
1557 int JSObject::GetInObjectPropertyOffset(int index) {
1558 // Adjust for the number of properties stored in the object.
1559 index -= map()->inobject_properties();
1561 return map()->instance_size() + (index * kPointerSize);
1565 Object* JSObject::InObjectPropertyAt(int index) {
1566 // Adjust for the number of properties stored in the object.
1567 index -= map()->inobject_properties();
1569 int offset = map()->instance_size() + (index * kPointerSize);
1570 return READ_FIELD(this, offset);
1574 Object* JSObject::InObjectPropertyAtPut(int index,
1576 WriteBarrierMode mode) {
1577 // Adjust for the number of properties stored in the object.
1578 index -= map()->inobject_properties();
1580 int offset = map()->instance_size() + (index * kPointerSize);
1581 WRITE_FIELD(this, offset, value);
1582 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1588 void JSObject::InitializeBody(Map* map,
1589 Object* pre_allocated_value,
1590 Object* filler_value) {
1591 ASSERT(!filler_value->IsHeapObject() ||
1592 !GetHeap()->InNewSpace(filler_value));
1593 ASSERT(!pre_allocated_value->IsHeapObject() ||
1594 !GetHeap()->InNewSpace(pre_allocated_value));
1595 int size = map->instance_size();
1596 int offset = kHeaderSize;
1597 if (filler_value != pre_allocated_value) {
1598 int pre_allocated = map->pre_allocated_property_fields();
1599 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1600 for (int i = 0; i < pre_allocated; i++) {
1601 WRITE_FIELD(this, offset, pre_allocated_value);
1602 offset += kPointerSize;
1605 while (offset < size) {
1606 WRITE_FIELD(this, offset, filler_value);
1607 offset += kPointerSize;
1612 bool JSObject::HasFastProperties() {
1613 return !properties()->IsDictionary();
1617 int JSObject::MaxFastProperties() {
1618 // Allow extra fast properties if the object has more than
1619 // kMaxFastProperties in-object properties. When this is the case,
1620 // it is very unlikely that the object is being used as a dictionary
1621 // and there is a good chance that allowing more map transitions
1622 // will be worth it.
1623 return Max(map()->inobject_properties(), kMaxFastProperties);
1627 void Struct::InitializeBody(int object_size) {
1628 Object* value = GetHeap()->undefined_value();
1629 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1630 WRITE_FIELD(this, offset, value);
1635 bool Object::ToArrayIndex(uint32_t* index) {
1637 int value = Smi::cast(this)->value();
1638 if (value < 0) return false;
1642 if (IsHeapNumber()) {
1643 double value = HeapNumber::cast(this)->value();
1644 uint32_t uint_value = static_cast<uint32_t>(value);
1645 if (value == static_cast<double>(uint_value)) {
1646 *index = uint_value;
1654 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1655 if (!this->IsJSValue()) return false;
1657 JSValue* js_value = JSValue::cast(this);
1658 if (!js_value->value()->IsString()) return false;
1660 String* str = String::cast(js_value->value());
1661 if (index >= (uint32_t)str->length()) return false;
1667 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1668 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1669 return reinterpret_cast<FixedArrayBase*>(object);
1673 Object* FixedArray::get(int index) {
1674 ASSERT(index >= 0 && index < this->length());
1675 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1679 void FixedArray::set(int index, Smi* value) {
1680 ASSERT(map() != HEAP->fixed_cow_array_map());
1681 ASSERT(index >= 0 && index < this->length());
1682 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1683 int offset = kHeaderSize + index * kPointerSize;
1684 WRITE_FIELD(this, offset, value);
1688 void FixedArray::set(int index, Object* value) {
1689 ASSERT(map() != HEAP->fixed_cow_array_map());
1690 ASSERT(index >= 0 && index < this->length());
1691 int offset = kHeaderSize + index * kPointerSize;
1692 WRITE_FIELD(this, offset, value);
1693 WRITE_BARRIER(GetHeap(), this, offset, value);
1697 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1698 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1702 inline double FixedDoubleArray::hole_nan_as_double() {
1703 return BitCast<double, uint64_t>(kHoleNanInt64);
1707 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1708 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1709 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1710 return OS::nan_value();
1714 double FixedDoubleArray::get_scalar(int index) {
1715 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1716 map() != HEAP->fixed_array_map());
1717 ASSERT(index >= 0 && index < this->length());
1718 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1719 ASSERT(!is_the_hole_nan(result));
1723 int64_t FixedDoubleArray::get_representation(int index) {
1724 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1725 map() != HEAP->fixed_array_map());
1726 ASSERT(index >= 0 && index < this->length());
1727 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1730 MaybeObject* FixedDoubleArray::get(int index) {
1731 if (is_the_hole(index)) {
1732 return GetHeap()->the_hole_value();
1734 return GetHeap()->NumberFromDouble(get_scalar(index));
1739 void FixedDoubleArray::set(int index, double value) {
1740 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1741 map() != HEAP->fixed_array_map());
1742 int offset = kHeaderSize + index * kDoubleSize;
1743 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1744 WRITE_DOUBLE_FIELD(this, offset, value);
1748 void FixedDoubleArray::set_the_hole(int index) {
1749 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1750 map() != HEAP->fixed_array_map());
1751 int offset = kHeaderSize + index * kDoubleSize;
1752 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1756 bool FixedDoubleArray::is_the_hole(int index) {
1757 int offset = kHeaderSize + index * kDoubleSize;
1758 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1762 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1763 Heap* heap = GetHeap();
1764 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1765 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1766 return UPDATE_WRITE_BARRIER;
1770 void FixedArray::set(int index,
1772 WriteBarrierMode mode) {
1773 ASSERT(map() != HEAP->fixed_cow_array_map());
1774 ASSERT(index >= 0 && index < this->length());
1775 int offset = kHeaderSize + index * kPointerSize;
1776 WRITE_FIELD(this, offset, value);
1777 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1781 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
1784 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1785 ASSERT(index >= 0 && index < array->length());
1786 int offset = kHeaderSize + index * kPointerSize;
1787 WRITE_FIELD(array, offset, value);
1788 Heap* heap = array->GetHeap();
1789 if (heap->InNewSpace(value)) {
1790 heap->RecordWrite(array->address(), offset);
1795 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1798 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1799 ASSERT(index >= 0 && index < array->length());
1800 ASSERT(!HEAP->InNewSpace(value));
1801 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1805 void FixedArray::set_undefined(int index) {
1806 ASSERT(map() != HEAP->fixed_cow_array_map());
1807 set_undefined(GetHeap(), index);
1811 void FixedArray::set_undefined(Heap* heap, int index) {
1812 ASSERT(index >= 0 && index < this->length());
1813 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1814 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1815 heap->undefined_value());
1819 void FixedArray::set_null(int index) {
1820 set_null(GetHeap(), index);
1824 void FixedArray::set_null(Heap* heap, int index) {
1825 ASSERT(index >= 0 && index < this->length());
1826 ASSERT(!heap->InNewSpace(heap->null_value()));
1827 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1831 void FixedArray::set_the_hole(int index) {
1832 ASSERT(map() != HEAP->fixed_cow_array_map());
1833 ASSERT(index >= 0 && index < this->length());
1834 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1836 kHeaderSize + index * kPointerSize,
1837 GetHeap()->the_hole_value());
1841 void FixedArray::set_unchecked(int index, Smi* value) {
1842 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1843 int offset = kHeaderSize + index * kPointerSize;
1844 WRITE_FIELD(this, offset, value);
1848 void FixedArray::set_unchecked(Heap* heap,
1851 WriteBarrierMode mode) {
1852 int offset = kHeaderSize + index * kPointerSize;
1853 WRITE_FIELD(this, offset, value);
1854 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1858 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1859 ASSERT(index >= 0 && index < this->length());
1860 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1861 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1865 Object** FixedArray::data_start() {
1866 return HeapObject::RawField(this, kHeaderSize);
1870 bool DescriptorArray::IsEmpty() {
1871 ASSERT(this->IsSmi() ||
1872 this->length() > kFirstIndex ||
1873 this == HEAP->empty_descriptor_array());
1874 return this->IsSmi() || length() <= kFirstIndex;
1878 int DescriptorArray::bit_field3_storage() {
1879 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1880 return Smi::cast(storage)->value();
1883 void DescriptorArray::set_bit_field3_storage(int value) {
1885 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1889 void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
1892 Object* tmp = array->get(first);
1893 NoIncrementalWriteBarrierSet(array, first, array->get(second));
1894 NoIncrementalWriteBarrierSet(array, second, tmp);
1898 int DescriptorArray::Search(String* name) {
1899 SLOW_ASSERT(IsSortedNoDuplicates());
1901 // Check for empty descriptor array.
1902 int nof = number_of_descriptors();
1903 if (nof == 0) return kNotFound;
1905 // Fast case: do linear search for small arrays.
1906 const int kMaxElementsForLinearSearch = 8;
1907 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1908 return LinearSearch(name, nof);
1911 // Slow case: perform binary search.
1912 return BinarySearch(name, 0, nof - 1);
1916 int DescriptorArray::SearchWithCache(String* name) {
1917 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1918 if (number == DescriptorLookupCache::kAbsent) {
1919 number = Search(name);
1920 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1926 String* DescriptorArray::GetKey(int descriptor_number) {
1927 ASSERT(descriptor_number < number_of_descriptors());
1928 return String::cast(get(ToKeyIndex(descriptor_number)));
1932 Object* DescriptorArray::GetValue(int descriptor_number) {
1933 ASSERT(descriptor_number < number_of_descriptors());
1934 return GetContentArray()->get(ToValueIndex(descriptor_number));
1938 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
1939 ASSERT(descriptor_number < number_of_descriptors());
1940 Object* details = GetContentArray()->get(ToDetailsIndex(descriptor_number));
1941 return PropertyDetails(Smi::cast(details));
1945 PropertyType DescriptorArray::GetType(int descriptor_number) {
1946 return GetDetails(descriptor_number).type();
1950 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1951 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1955 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1956 return JSFunction::cast(GetValue(descriptor_number));
1960 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1961 ASSERT(GetType(descriptor_number) == CALLBACKS);
1962 return GetValue(descriptor_number);
1966 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1967 ASSERT(GetType(descriptor_number) == CALLBACKS);
1968 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1969 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
1973 bool DescriptorArray::IsProperty(int descriptor_number) {
1974 Entry entry(this, descriptor_number);
1975 return IsPropertyDescriptor(&entry);
1979 bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
1980 switch (GetType(descriptor_number)) {
1981 case MAP_TRANSITION:
1982 case CONSTANT_TRANSITION:
1983 case ELEMENTS_TRANSITION:
1986 Object* value = GetValue(descriptor_number);
1987 if (!value->IsAccessorPair()) return false;
1988 AccessorPair* accessors = AccessorPair::cast(value);
1989 return accessors->getter()->IsMap() && accessors->setter()->IsMap();
1993 case CONSTANT_FUNCTION:
1996 case NULL_DESCRIPTOR:
1999 UNREACHABLE(); // Keep the compiler happy.
2004 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
2005 return GetType(descriptor_number) == NULL_DESCRIPTOR;
2009 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2010 desc->Init(GetKey(descriptor_number),
2011 GetValue(descriptor_number),
2012 GetDetails(descriptor_number));
2016 void DescriptorArray::Set(int descriptor_number,
2018 const WhitenessWitness&) {
2020 ASSERT(descriptor_number < number_of_descriptors());
2022 NoIncrementalWriteBarrierSet(this,
2023 ToKeyIndex(descriptor_number),
2025 FixedArray* content_array = GetContentArray();
2026 NoIncrementalWriteBarrierSet(content_array,
2027 ToValueIndex(descriptor_number),
2029 NoIncrementalWriteBarrierSet(content_array,
2030 ToDetailsIndex(descriptor_number),
2031 desc->GetDetails().AsSmi());
2035 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
2036 int first, int second) {
2037 NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
2038 FixedArray* content_array = GetContentArray();
2039 NoIncrementalWriteBarrierSwap(content_array,
2040 ToValueIndex(first),
2041 ToValueIndex(second));
2042 NoIncrementalWriteBarrierSwap(content_array,
2043 ToDetailsIndex(first),
2044 ToDetailsIndex(second));
2048 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2049 : marking_(array->GetHeap()->incremental_marking()) {
2050 marking_->EnterNoMarkingScope();
2051 if (array->number_of_descriptors() > 0) {
2052 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2053 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
2058 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2059 marking_->LeaveNoMarkingScope();
2063 template<typename Shape, typename Key>
2064 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2065 const int kMinCapacity = 32;
2066 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2067 if (capacity < kMinCapacity) {
2068 capacity = kMinCapacity; // Guarantee min capacity.
2074 template<typename Shape, typename Key>
2075 int HashTable<Shape, Key>::FindEntry(Key key) {
2076 return FindEntry(GetIsolate(), key);
2080 // Find entry for key otherwise return kNotFound.
2081 template<typename Shape, typename Key>
2082 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2083 uint32_t capacity = Capacity();
2084 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2086 // EnsureCapacity will guarantee the hash table is never full.
2088 Object* element = KeyAt(entry);
2090 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2091 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2092 Shape::IsMatch(key, element)) return entry;
2093 entry = NextProbe(entry, count++, capacity);
2099 bool SeededNumberDictionary::requires_slow_elements() {
2100 Object* max_index_object = get(kMaxNumberKeyIndex);
2101 if (!max_index_object->IsSmi()) return false;
2103 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2106 uint32_t SeededNumberDictionary::max_number_key() {
2107 ASSERT(!requires_slow_elements());
2108 Object* max_index_object = get(kMaxNumberKeyIndex);
2109 if (!max_index_object->IsSmi()) return 0;
2110 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2111 return value >> kRequiresSlowElementsTagSize;
2114 void SeededNumberDictionary::set_requires_slow_elements() {
2115 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2119 // ------------------------------------
2123 CAST_ACCESSOR(FixedArray)
2124 CAST_ACCESSOR(FixedDoubleArray)
2125 CAST_ACCESSOR(DescriptorArray)
2126 CAST_ACCESSOR(DeoptimizationInputData)
2127 CAST_ACCESSOR(DeoptimizationOutputData)
2128 CAST_ACCESSOR(TypeFeedbackCells)
2129 CAST_ACCESSOR(SymbolTable)
2130 CAST_ACCESSOR(JSFunctionResultCache)
2131 CAST_ACCESSOR(NormalizedMapCache)
2132 CAST_ACCESSOR(ScopeInfo)
2133 CAST_ACCESSOR(CompilationCacheTable)
2134 CAST_ACCESSOR(CodeCacheHashTable)
2135 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2136 CAST_ACCESSOR(MapCache)
2137 CAST_ACCESSOR(String)
2138 CAST_ACCESSOR(SeqString)
2139 CAST_ACCESSOR(SeqAsciiString)
2140 CAST_ACCESSOR(SeqTwoByteString)
2141 CAST_ACCESSOR(SlicedString)
2142 CAST_ACCESSOR(ConsString)
2143 CAST_ACCESSOR(ExternalString)
2144 CAST_ACCESSOR(ExternalAsciiString)
2145 CAST_ACCESSOR(ExternalTwoByteString)
2146 CAST_ACCESSOR(JSReceiver)
2147 CAST_ACCESSOR(JSObject)
2149 CAST_ACCESSOR(HeapObject)
2150 CAST_ACCESSOR(HeapNumber)
2151 CAST_ACCESSOR(Oddball)
2152 CAST_ACCESSOR(JSGlobalPropertyCell)
2153 CAST_ACCESSOR(SharedFunctionInfo)
2155 CAST_ACCESSOR(JSFunction)
2156 CAST_ACCESSOR(GlobalObject)
2157 CAST_ACCESSOR(JSGlobalProxy)
2158 CAST_ACCESSOR(JSGlobalObject)
2159 CAST_ACCESSOR(JSBuiltinsObject)
2161 CAST_ACCESSOR(JSArray)
2162 CAST_ACCESSOR(JSRegExp)
2163 CAST_ACCESSOR(JSProxy)
2164 CAST_ACCESSOR(JSFunctionProxy)
2165 CAST_ACCESSOR(JSSet)
2166 CAST_ACCESSOR(JSMap)
2167 CAST_ACCESSOR(JSWeakMap)
2168 CAST_ACCESSOR(Foreign)
2169 CAST_ACCESSOR(ByteArray)
2170 CAST_ACCESSOR(FreeSpace)
2171 CAST_ACCESSOR(ExternalArray)
2172 CAST_ACCESSOR(ExternalByteArray)
2173 CAST_ACCESSOR(ExternalUnsignedByteArray)
2174 CAST_ACCESSOR(ExternalShortArray)
2175 CAST_ACCESSOR(ExternalUnsignedShortArray)
2176 CAST_ACCESSOR(ExternalIntArray)
2177 CAST_ACCESSOR(ExternalUnsignedIntArray)
2178 CAST_ACCESSOR(ExternalFloatArray)
2179 CAST_ACCESSOR(ExternalDoubleArray)
2180 CAST_ACCESSOR(ExternalPixelArray)
2181 CAST_ACCESSOR(Struct)
2184 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2185 STRUCT_LIST(MAKE_STRUCT_CAST)
2186 #undef MAKE_STRUCT_CAST
2189 template <typename Shape, typename Key>
2190 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2191 ASSERT(obj->IsHashTable());
2192 return reinterpret_cast<HashTable*>(obj);
2196 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2197 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2199 SMI_ACCESSORS(String, length, kLengthOffset)
2200 SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
2203 uint32_t String::hash_field() {
2204 return READ_UINT32_FIELD(this, kHashFieldOffset);
2208 void String::set_hash_field(uint32_t value) {
2209 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2210 #if V8_HOST_ARCH_64_BIT
2211 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2216 bool String::Equals(String* other) {
2217 if (other == this) return true;
2218 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2221 return SlowEquals(other);
2225 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2226 if (!StringShape(this).IsCons()) return this;
2227 ConsString* cons = ConsString::cast(this);
2228 if (cons->IsFlat()) return cons->first();
2229 return SlowTryFlatten(pretenure);
2233 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2234 MaybeObject* flat = TryFlatten(pretenure);
2235 Object* successfully_flattened;
2236 if (!flat->ToObject(&successfully_flattened)) return this;
2237 return String::cast(successfully_flattened);
2241 uint16_t String::Get(int index) {
2242 ASSERT(index >= 0 && index < length());
2243 switch (StringShape(this).full_representation_tag()) {
2244 case kSeqStringTag | kAsciiStringTag:
2245 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2246 case kSeqStringTag | kTwoByteStringTag:
2247 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2248 case kConsStringTag | kAsciiStringTag:
2249 case kConsStringTag | kTwoByteStringTag:
2250 return ConsString::cast(this)->ConsStringGet(index);
2251 case kExternalStringTag | kAsciiStringTag:
2252 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2253 case kExternalStringTag | kTwoByteStringTag:
2254 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2255 case kSlicedStringTag | kAsciiStringTag:
2256 case kSlicedStringTag | kTwoByteStringTag:
2257 return SlicedString::cast(this)->SlicedStringGet(index);
2267 void String::Set(int index, uint16_t value) {
2268 ASSERT(index >= 0 && index < length());
2269 ASSERT(StringShape(this).IsSequential());
2271 return this->IsAsciiRepresentation()
2272 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2273 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2277 bool String::IsFlat() {
2278 if (!StringShape(this).IsCons()) return true;
2279 return ConsString::cast(this)->second()->length() == 0;
2283 String* String::GetUnderlying() {
2284 // Giving direct access to underlying string only makes sense if the
2285 // wrapping string is already flattened.
2286 ASSERT(this->IsFlat());
2287 ASSERT(StringShape(this).IsIndirect());
2288 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2289 const int kUnderlyingOffset = SlicedString::kParentOffset;
2290 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2294 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2295 ASSERT(index >= 0 && index < length());
2296 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2300 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2301 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2302 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2303 static_cast<byte>(value));
2307 Address SeqAsciiString::GetCharsAddress() {
2308 return FIELD_ADDR(this, kHeaderSize);
2312 char* SeqAsciiString::GetChars() {
2313 return reinterpret_cast<char*>(GetCharsAddress());
2317 Address SeqTwoByteString::GetCharsAddress() {
2318 return FIELD_ADDR(this, kHeaderSize);
2322 uc16* SeqTwoByteString::GetChars() {
2323 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2327 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2328 ASSERT(index >= 0 && index < length());
2329 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2333 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2334 ASSERT(index >= 0 && index < length());
2335 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2339 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2340 return SizeFor(length());
2344 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2345 return SizeFor(length());
2349 String* SlicedString::parent() {
2350 return String::cast(READ_FIELD(this, kParentOffset));
2354 void SlicedString::set_parent(String* parent) {
2355 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2356 WRITE_FIELD(this, kParentOffset, parent);
2360 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2363 String* ConsString::first() {
2364 return String::cast(READ_FIELD(this, kFirstOffset));
2368 Object* ConsString::unchecked_first() {
2369 return READ_FIELD(this, kFirstOffset);
2373 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2374 WRITE_FIELD(this, kFirstOffset, value);
2375 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2379 String* ConsString::second() {
2380 return String::cast(READ_FIELD(this, kSecondOffset));
2384 Object* ConsString::unchecked_second() {
2385 return READ_FIELD(this, kSecondOffset);
2389 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2390 WRITE_FIELD(this, kSecondOffset, value);
2391 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2395 bool ExternalString::is_short() {
2396 InstanceType type = map()->instance_type();
2397 return (type & kShortExternalStringMask) == kShortExternalStringTag;
2401 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2402 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2406 void ExternalAsciiString::update_data_cache() {
2407 if (is_short()) return;
2408 const char** data_field =
2409 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2410 *data_field = resource()->data();
2414 void ExternalAsciiString::set_resource(
2415 const ExternalAsciiString::Resource* resource) {
2416 *reinterpret_cast<const Resource**>(
2417 FIELD_ADDR(this, kResourceOffset)) = resource;
2418 if (resource != NULL) update_data_cache();
2422 const char* ExternalAsciiString::GetChars() {
2423 return resource()->data();
2427 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
2428 ASSERT(index >= 0 && index < length());
2429 return GetChars()[index];
2433 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2434 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2438 void ExternalTwoByteString::update_data_cache() {
2439 if (is_short()) return;
2440 const uint16_t** data_field =
2441 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
2442 *data_field = resource()->data();
2446 void ExternalTwoByteString::set_resource(
2447 const ExternalTwoByteString::Resource* resource) {
2448 *reinterpret_cast<const Resource**>(
2449 FIELD_ADDR(this, kResourceOffset)) = resource;
2450 if (resource != NULL) update_data_cache();
2454 const uint16_t* ExternalTwoByteString::GetChars() {
2455 return resource()->data();
2459 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
2460 ASSERT(index >= 0 && index < length());
2461 return GetChars()[index];
2465 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
2467 return GetChars() + start;
2471 void JSFunctionResultCache::MakeZeroSize() {
2472 set_finger_index(kEntriesIndex);
2473 set_size(kEntriesIndex);
2477 void JSFunctionResultCache::Clear() {
2478 int cache_size = size();
2479 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2480 MemsetPointer(entries_start,
2481 GetHeap()->the_hole_value(),
2482 cache_size - kEntriesIndex);
2487 int JSFunctionResultCache::size() {
2488 return Smi::cast(get(kCacheSizeIndex))->value();
2492 void JSFunctionResultCache::set_size(int size) {
2493 set(kCacheSizeIndex, Smi::FromInt(size));
2497 int JSFunctionResultCache::finger_index() {
2498 return Smi::cast(get(kFingerIndex))->value();
2502 void JSFunctionResultCache::set_finger_index(int finger_index) {
2503 set(kFingerIndex, Smi::FromInt(finger_index));
2507 byte ByteArray::get(int index) {
2508 ASSERT(index >= 0 && index < this->length());
2509 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2513 void ByteArray::set(int index, byte value) {
2514 ASSERT(index >= 0 && index < this->length());
2515 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2519 int ByteArray::get_int(int index) {
2520 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2521 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2525 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2526 ASSERT_TAG_ALIGNED(address);
2527 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2531 Address ByteArray::GetDataStartAddress() {
2532 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2536 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2537 return reinterpret_cast<uint8_t*>(external_pointer());
2541 uint8_t ExternalPixelArray::get_scalar(int index) {
2542 ASSERT((index >= 0) && (index < this->length()));
2543 uint8_t* ptr = external_pixel_pointer();
2548 MaybeObject* ExternalPixelArray::get(int index) {
2549 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2553 void ExternalPixelArray::set(int index, uint8_t value) {
2554 ASSERT((index >= 0) && (index < this->length()));
2555 uint8_t* ptr = external_pixel_pointer();
2560 void* ExternalArray::external_pointer() {
2561 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2562 return reinterpret_cast<void*>(ptr);
2566 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2567 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2568 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2572 int8_t ExternalByteArray::get_scalar(int index) {
2573 ASSERT((index >= 0) && (index < this->length()));
2574 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2579 MaybeObject* ExternalByteArray::get(int index) {
2580 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2584 void ExternalByteArray::set(int index, int8_t value) {
2585 ASSERT((index >= 0) && (index < this->length()));
2586 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2591 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2592 ASSERT((index >= 0) && (index < this->length()));
2593 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2598 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2599 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2603 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2604 ASSERT((index >= 0) && (index < this->length()));
2605 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2610 int16_t ExternalShortArray::get_scalar(int index) {
2611 ASSERT((index >= 0) && (index < this->length()));
2612 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2617 MaybeObject* ExternalShortArray::get(int index) {
2618 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2622 void ExternalShortArray::set(int index, int16_t value) {
2623 ASSERT((index >= 0) && (index < this->length()));
2624 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2629 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2630 ASSERT((index >= 0) && (index < this->length()));
2631 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2636 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2637 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2641 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2642 ASSERT((index >= 0) && (index < this->length()));
2643 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2648 int32_t ExternalIntArray::get_scalar(int index) {
2649 ASSERT((index >= 0) && (index < this->length()));
2650 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2655 MaybeObject* ExternalIntArray::get(int index) {
2656 return GetHeap()->NumberFromInt32(get_scalar(index));
2660 void ExternalIntArray::set(int index, int32_t value) {
2661 ASSERT((index >= 0) && (index < this->length()));
2662 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2667 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2668 ASSERT((index >= 0) && (index < this->length()));
2669 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2674 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2675 return GetHeap()->NumberFromUint32(get_scalar(index));
2679 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2680 ASSERT((index >= 0) && (index < this->length()));
2681 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2686 float ExternalFloatArray::get_scalar(int index) {
2687 ASSERT((index >= 0) && (index < this->length()));
2688 float* ptr = static_cast<float*>(external_pointer());
2693 MaybeObject* ExternalFloatArray::get(int index) {
2694 return GetHeap()->NumberFromDouble(get_scalar(index));
2698 void ExternalFloatArray::set(int index, float value) {
2699 ASSERT((index >= 0) && (index < this->length()));
2700 float* ptr = static_cast<float*>(external_pointer());
2705 double ExternalDoubleArray::get_scalar(int index) {
2706 ASSERT((index >= 0) && (index < this->length()));
2707 double* ptr = static_cast<double*>(external_pointer());
2712 MaybeObject* ExternalDoubleArray::get(int index) {
2713 return GetHeap()->NumberFromDouble(get_scalar(index));
2717 void ExternalDoubleArray::set(int index, double value) {
2718 ASSERT((index >= 0) && (index < this->length()));
2719 double* ptr = static_cast<double*>(external_pointer());
2724 int Map::visitor_id() {
2725 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2729 void Map::set_visitor_id(int id) {
2730 ASSERT(0 <= id && id < 256);
2731 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2735 int Map::instance_size() {
2736 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2740 int Map::inobject_properties() {
2741 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2745 int Map::pre_allocated_property_fields() {
2746 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2750 int HeapObject::SizeFromMap(Map* map) {
2751 int instance_size = map->instance_size();
2752 if (instance_size != kVariableSizeSentinel) return instance_size;
2753 // We can ignore the "symbol" bit becase it is only set for symbols
2754 // and implies a string type.
2755 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2756 // Only inline the most frequent cases.
2757 if (instance_type == FIXED_ARRAY_TYPE) {
2758 return FixedArray::BodyDescriptor::SizeOf(map, this);
2760 if (instance_type == ASCII_STRING_TYPE) {
2761 return SeqAsciiString::SizeFor(
2762 reinterpret_cast<SeqAsciiString*>(this)->length());
2764 if (instance_type == BYTE_ARRAY_TYPE) {
2765 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2767 if (instance_type == FREE_SPACE_TYPE) {
2768 return reinterpret_cast<FreeSpace*>(this)->size();
2770 if (instance_type == STRING_TYPE) {
2771 return SeqTwoByteString::SizeFor(
2772 reinterpret_cast<SeqTwoByteString*>(this)->length());
2774 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2775 return FixedDoubleArray::SizeFor(
2776 reinterpret_cast<FixedDoubleArray*>(this)->length());
2778 ASSERT(instance_type == CODE_TYPE);
2779 return reinterpret_cast<Code*>(this)->CodeSize();
2783 void Map::set_instance_size(int value) {
2784 ASSERT_EQ(0, value & (kPointerSize - 1));
2785 value >>= kPointerSizeLog2;
2786 ASSERT(0 <= value && value < 256);
2787 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2791 void Map::set_inobject_properties(int value) {
2792 ASSERT(0 <= value && value < 256);
2793 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2797 void Map::set_pre_allocated_property_fields(int value) {
2798 ASSERT(0 <= value && value < 256);
2799 WRITE_BYTE_FIELD(this,
2800 kPreAllocatedPropertyFieldsOffset,
2801 static_cast<byte>(value));
2805 InstanceType Map::instance_type() {
2806 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2810 void Map::set_instance_type(InstanceType value) {
2811 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2815 int Map::unused_property_fields() {
2816 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2820 void Map::set_unused_property_fields(int value) {
2821 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2825 byte Map::bit_field() {
2826 return READ_BYTE_FIELD(this, kBitFieldOffset);
2830 void Map::set_bit_field(byte value) {
2831 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2835 byte Map::bit_field2() {
2836 return READ_BYTE_FIELD(this, kBitField2Offset);
2840 void Map::set_bit_field2(byte value) {
2841 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2845 void Map::set_non_instance_prototype(bool value) {
2847 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2849 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2854 bool Map::has_non_instance_prototype() {
2855 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2859 void Map::set_function_with_prototype(bool value) {
2861 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2863 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2868 bool Map::function_with_prototype() {
2869 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2873 void Map::set_is_access_check_needed(bool access_check_needed) {
2874 if (access_check_needed) {
2875 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2877 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2882 bool Map::is_access_check_needed() {
2883 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2887 void Map::set_is_extensible(bool value) {
2889 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2891 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2895 bool Map::is_extensible() {
2896 return ((1 << kIsExtensible) & bit_field2()) != 0;
2900 void Map::set_attached_to_shared_function_info(bool value) {
2902 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2904 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2908 bool Map::attached_to_shared_function_info() {
2909 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2913 void Map::set_is_shared(bool value) {
2915 set_bit_field3(bit_field3() | (1 << kIsShared));
2917 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2921 bool Map::is_shared() {
2922 return ((1 << kIsShared) & bit_field3()) != 0;
2926 JSFunction* Map::unchecked_constructor() {
2927 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2931 Code::Flags Code::flags() {
2932 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2936 void Code::set_flags(Code::Flags flags) {
2937 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2938 // Make sure that all call stubs have an arguments count.
2939 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2940 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2941 ExtractArgumentsCountFromFlags(flags) >= 0);
2942 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2946 Code::Kind Code::kind() {
2947 return ExtractKindFromFlags(flags());
2951 InlineCacheState Code::ic_state() {
2952 InlineCacheState result = ExtractICStateFromFlags(flags());
2953 // Only allow uninitialized or debugger states for non-IC code
2954 // objects. This is used in the debugger to determine whether or not
2955 // a call to code object has been replaced with a debug break call.
2956 ASSERT(is_inline_cache_stub() ||
2957 result == UNINITIALIZED ||
2958 result == DEBUG_BREAK ||
2959 result == DEBUG_PREPARE_STEP_IN);
2964 Code::ExtraICState Code::extra_ic_state() {
2965 ASSERT(is_inline_cache_stub());
2966 return ExtractExtraICStateFromFlags(flags());
2970 PropertyType Code::type() {
2971 return ExtractTypeFromFlags(flags());
2975 int Code::arguments_count() {
2976 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2977 return ExtractArgumentsCountFromFlags(flags());
2981 int Code::major_key() {
2982 ASSERT(kind() == STUB ||
2983 kind() == UNARY_OP_IC ||
2984 kind() == BINARY_OP_IC ||
2985 kind() == COMPARE_IC ||
2986 kind() == TO_BOOLEAN_IC);
2987 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2991 void Code::set_major_key(int major) {
2992 ASSERT(kind() == STUB ||
2993 kind() == UNARY_OP_IC ||
2994 kind() == BINARY_OP_IC ||
2995 kind() == COMPARE_IC ||
2996 kind() == TO_BOOLEAN_IC);
2997 ASSERT(0 <= major && major < 256);
2998 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
3002 bool Code::is_pregenerated() {
3003 return kind() == STUB && IsPregeneratedField::decode(flags());
3007 void Code::set_is_pregenerated(bool value) {
3008 ASSERT(kind() == STUB);
3010 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3015 bool Code::optimizable() {
3016 ASSERT_EQ(FUNCTION, kind());
3017 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3021 void Code::set_optimizable(bool value) {
3022 ASSERT_EQ(FUNCTION, kind());
3023 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3027 bool Code::has_deoptimization_support() {
3028 ASSERT_EQ(FUNCTION, kind());
3029 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3030 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3034 void Code::set_has_deoptimization_support(bool value) {
3035 ASSERT_EQ(FUNCTION, kind());
3036 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3037 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3038 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3042 bool Code::has_debug_break_slots() {
3043 ASSERT_EQ(FUNCTION, kind());
3044 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3045 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3049 void Code::set_has_debug_break_slots(bool value) {
3050 ASSERT_EQ(FUNCTION, kind());
3051 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3052 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3053 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3057 bool Code::is_compiled_optimizable() {
3058 ASSERT_EQ(FUNCTION, kind());
3059 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3060 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3064 void Code::set_compiled_optimizable(bool value) {
3065 ASSERT_EQ(FUNCTION, kind());
3066 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3067 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3068 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3072 int Code::allow_osr_at_loop_nesting_level() {
3073 ASSERT_EQ(FUNCTION, kind());
3074 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3078 void Code::set_allow_osr_at_loop_nesting_level(int level) {
3079 ASSERT_EQ(FUNCTION, kind());
3080 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3081 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3085 int Code::profiler_ticks() {
3086 ASSERT_EQ(FUNCTION, kind());
3087 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3091 void Code::set_profiler_ticks(int ticks) {
3092 ASSERT_EQ(FUNCTION, kind());
3093 ASSERT(ticks < 256);
3094 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
3098 unsigned Code::stack_slots() {
3099 ASSERT(kind() == OPTIMIZED_FUNCTION);
3100 return READ_UINT32_FIELD(this, kStackSlotsOffset);
3104 void Code::set_stack_slots(unsigned slots) {
3105 ASSERT(kind() == OPTIMIZED_FUNCTION);
3106 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
3110 unsigned Code::safepoint_table_offset() {
3111 ASSERT(kind() == OPTIMIZED_FUNCTION);
3112 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
3116 void Code::set_safepoint_table_offset(unsigned offset) {
3117 ASSERT(kind() == OPTIMIZED_FUNCTION);
3118 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3119 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3123 unsigned Code::stack_check_table_offset() {
3124 ASSERT_EQ(FUNCTION, kind());
3125 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3129 void Code::set_stack_check_table_offset(unsigned offset) {
3130 ASSERT_EQ(FUNCTION, kind());
3131 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3132 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3136 CheckType Code::check_type() {
3137 ASSERT(is_call_stub() || is_keyed_call_stub());
3138 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3139 return static_cast<CheckType>(type);
3143 void Code::set_check_type(CheckType value) {
3144 ASSERT(is_call_stub() || is_keyed_call_stub());
3145 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3149 byte Code::unary_op_type() {
3150 ASSERT(is_unary_op_stub());
3151 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3155 void Code::set_unary_op_type(byte value) {
3156 ASSERT(is_unary_op_stub());
3157 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3161 byte Code::binary_op_type() {
3162 ASSERT(is_binary_op_stub());
3163 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3167 void Code::set_binary_op_type(byte value) {
3168 ASSERT(is_binary_op_stub());
3169 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3173 byte Code::binary_op_result_type() {
3174 ASSERT(is_binary_op_stub());
3175 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3179 void Code::set_binary_op_result_type(byte value) {
3180 ASSERT(is_binary_op_stub());
3181 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3185 byte Code::compare_state() {
3186 ASSERT(is_compare_ic_stub());
3187 return READ_BYTE_FIELD(this, kCompareStateOffset);
3191 void Code::set_compare_state(byte value) {
3192 ASSERT(is_compare_ic_stub());
3193 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3197 byte Code::compare_operation() {
3198 ASSERT(is_compare_ic_stub());
3199 return READ_BYTE_FIELD(this, kCompareOperationOffset);
3203 void Code::set_compare_operation(byte value) {
3204 ASSERT(is_compare_ic_stub());
3205 WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
3209 byte Code::to_boolean_state() {
3210 ASSERT(is_to_boolean_ic_stub());
3211 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3215 void Code::set_to_boolean_state(byte value) {
3216 ASSERT(is_to_boolean_ic_stub());
3217 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3221 bool Code::has_function_cache() {
3222 ASSERT(kind() == STUB);
3223 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3227 void Code::set_has_function_cache(bool flag) {
3228 ASSERT(kind() == STUB);
3229 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3233 bool Code::is_inline_cache_stub() {
3234 Kind kind = this->kind();
3235 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3239 Code::Flags Code::ComputeFlags(Kind kind,
3240 InlineCacheState ic_state,
3241 ExtraICState extra_ic_state,
3244 InlineCacheHolderFlag holder) {
3245 // Extra IC state is only allowed for call IC stubs or for store IC
3247 ASSERT(extra_ic_state == kNoExtraICState ||
3250 kind == KEYED_STORE_IC);
3251 // Compute the bit mask.
3252 int bits = KindField::encode(kind)
3253 | ICStateField::encode(ic_state)
3254 | TypeField::encode(type)
3255 | ExtraICStateField::encode(extra_ic_state)
3256 | (argc << kArgumentsCountShift)
3257 | CacheHolderField::encode(holder);
3258 return static_cast<Flags>(bits);
3262 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3264 ExtraICState extra_ic_state,
3265 InlineCacheHolderFlag holder,
3267 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3271 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3272 return KindField::decode(flags);
3276 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3277 return ICStateField::decode(flags);
3281 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3282 return ExtraICStateField::decode(flags);
3286 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3287 return TypeField::decode(flags);
3291 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3292 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3296 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3297 return CacheHolderField::decode(flags);
3301 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3302 int bits = flags & ~TypeField::kMask;
3303 return static_cast<Flags>(bits);
3307 Code* Code::GetCodeFromTargetAddress(Address address) {
3308 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3309 // GetCodeFromTargetAddress might be called when marking objects during mark
3310 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3311 // Code::cast. Code::cast does not work when the object's map is
3313 Code* result = reinterpret_cast<Code*>(code);
3318 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3320 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3324 Object* Map::prototype() {
3325 return READ_FIELD(this, kPrototypeOffset);
3329 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3330 ASSERT(value->IsNull() || value->IsJSReceiver());
3331 WRITE_FIELD(this, kPrototypeOffset, value);
3332 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3336 DescriptorArray* Map::instance_descriptors() {
3337 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3338 if (object->IsSmi()) {
3339 return GetHeap()->empty_descriptor_array();
3341 return DescriptorArray::cast(object);
3346 void Map::init_instance_descriptors() {
3347 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3351 void Map::clear_instance_descriptors() {
3352 Object* object = READ_FIELD(this,
3353 kInstanceDescriptorsOrBitField3Offset);
3354 if (!object->IsSmi()) {
3356 ZapInstanceDescriptors();
3360 kInstanceDescriptorsOrBitField3Offset,
3361 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3366 void Map::set_instance_descriptors(DescriptorArray* value,
3367 WriteBarrierMode mode) {
3368 Object* object = READ_FIELD(this,
3369 kInstanceDescriptorsOrBitField3Offset);
3370 Heap* heap = GetHeap();
3371 if (value == heap->empty_descriptor_array()) {
3372 clear_instance_descriptors();
3375 if (object->IsSmi()) {
3376 value->set_bit_field3_storage(Smi::cast(object)->value());
3378 value->set_bit_field3_storage(
3379 DescriptorArray::cast(object)->bit_field3_storage());
3382 ASSERT(!is_shared());
3384 if (value != instance_descriptors()) {
3385 ZapInstanceDescriptors();
3388 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3389 CONDITIONAL_WRITE_BARRIER(
3390 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3394 int Map::bit_field3() {
3395 Object* object = READ_FIELD(this,
3396 kInstanceDescriptorsOrBitField3Offset);
3397 if (object->IsSmi()) {
3398 return Smi::cast(object)->value();
3400 return DescriptorArray::cast(object)->bit_field3_storage();
3405 void Map::set_bit_field3(int value) {
3406 ASSERT(Smi::IsValid(value));
3407 Object* object = READ_FIELD(this,
3408 kInstanceDescriptorsOrBitField3Offset);
3409 if (object->IsSmi()) {
3411 kInstanceDescriptorsOrBitField3Offset,
3412 Smi::FromInt(value));
3414 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3419 Object* Map::GetBackPointer() {
3420 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3421 if (object->IsFixedArray()) {
3422 return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
3429 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
3430 Heap* heap = GetHeap();
3431 ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
3432 ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
3433 (value->IsMap() && GetBackPointer()->IsUndefined()));
3434 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3435 if (object->IsFixedArray()) {
3436 FixedArray::cast(object)->set(
3437 kProtoTransitionBackPointerOffset, value, mode);
3439 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
3440 CONDITIONAL_WRITE_BARRIER(
3441 heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3446 FixedArray* Map::prototype_transitions() {
3447 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3448 if (object->IsFixedArray()) {
3449 return FixedArray::cast(object);
3451 return GetHeap()->empty_fixed_array();
3456 void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
3457 Heap* heap = GetHeap();
3458 ASSERT(value != heap->empty_fixed_array());
3459 value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
3461 if (value != prototype_transitions()) {
3462 ZapPrototypeTransitions();
3465 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
3466 CONDITIONAL_WRITE_BARRIER(
3467 heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3471 void Map::init_prototype_transitions(Object* undefined) {
3472 ASSERT(undefined->IsUndefined());
3473 WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
3477 HeapObject* Map::unchecked_prototype_transitions() {
3478 Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3479 return reinterpret_cast<HeapObject*>(object);
3483 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3484 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3486 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3487 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3488 ACCESSORS(JSFunction,
3491 kNextFunctionLinkOffset)
3493 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3494 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3495 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3497 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3499 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3500 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3501 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3502 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3503 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
3505 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
3506 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3508 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3509 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3510 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3512 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3513 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3514 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3515 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3516 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3517 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3519 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3520 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3522 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3523 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3525 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3526 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3527 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3528 kPropertyAccessorsOffset)
3529 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3530 kPrototypeTemplateOffset)
3531 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3532 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3533 kNamedPropertyHandlerOffset)
3534 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3535 kIndexedPropertyHandlerOffset)
3536 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3537 kInstanceTemplateOffset)
3538 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3539 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3540 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3541 kInstanceCallHandlerOffset)
3542 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3543 kAccessCheckInfoOffset)
3544 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
3546 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3547 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3548 kInternalFieldCountOffset)
3550 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3551 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3553 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3555 ACCESSORS(Script, source, Object, kSourceOffset)
3556 ACCESSORS(Script, name, Object, kNameOffset)
3557 ACCESSORS(Script, id, Object, kIdOffset)
3558 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
3559 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
3560 ACCESSORS(Script, data, Object, kDataOffset)
3561 ACCESSORS(Script, context_data, Object, kContextOffset)
3562 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3563 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
3564 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
3565 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
3566 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3567 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3568 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
3569 kEvalFrominstructionsOffsetOffset)
3571 #ifdef ENABLE_DEBUGGER_SUPPORT
3572 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3573 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3574 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3575 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3577 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
3578 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
3579 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3580 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3583 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3584 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3585 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3586 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3587 kInstanceClassNameOffset)
3588 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3589 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3590 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3591 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3592 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3593 kThisPropertyAssignmentsOffset)
3594 SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
3597 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3598 kHiddenPrototypeBit)
3599 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3600 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3601 kNeedsAccessCheckBit)
3602 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3603 kReadOnlyPrototypeBit)
3604 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3606 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3608 BOOL_GETTER(SharedFunctionInfo,
3610 has_only_simple_this_property_assignments,
3611 kHasOnlySimpleThisPropertyAssignments)
3612 BOOL_ACCESSORS(SharedFunctionInfo,
3614 allows_lazy_compilation,
3615 kAllowLazyCompilation)
3616 BOOL_ACCESSORS(SharedFunctionInfo,
3620 BOOL_ACCESSORS(SharedFunctionInfo,
3622 has_duplicate_parameters,
3623 kHasDuplicateParameters)
3626 #if V8_HOST_ARCH_32_BIT
3627 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3628 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3629 kFormalParameterCountOffset)
3630 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3631 kExpectedNofPropertiesOffset)
3632 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3633 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3634 kStartPositionAndTypeOffset)
3635 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3636 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3637 kFunctionTokenPositionOffset)
3638 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3639 kCompilerHintsOffset)
3640 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3641 kThisPropertyAssignmentsCountOffset)
3642 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3643 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3644 SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3647 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3648 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3649 int holder::name() { \
3650 int value = READ_INT_FIELD(this, offset); \
3651 ASSERT(kHeapObjectTag == 1); \
3652 ASSERT((value & kHeapObjectTag) == 0); \
3653 return value >> 1; \
3655 void holder::set_##name(int value) { \
3656 ASSERT(kHeapObjectTag == 1); \
3657 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3658 (value & 0xC0000000) == 0x000000000); \
3659 WRITE_INT_FIELD(this, \
3661 (value << 1) & ~kHeapObjectTag); \
3664 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3665 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3666 INT_ACCESSORS(holder, name, offset)
3669 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3670 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3671 formal_parameter_count,
3672 kFormalParameterCountOffset)
3674 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3675 expected_nof_properties,
3676 kExpectedNofPropertiesOffset)
3677 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3679 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3680 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3681 start_position_and_type,
3682 kStartPositionAndTypeOffset)
3684 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3685 function_token_position,
3686 kFunctionTokenPositionOffset)
3687 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3689 kCompilerHintsOffset)
3691 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3692 this_property_assignments_count,
3693 kThisPropertyAssignmentsCountOffset)
3694 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3696 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3697 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3701 int SharedFunctionInfo::construction_count() {
3702 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3706 void SharedFunctionInfo::set_construction_count(int value) {
3707 ASSERT(0 <= value && value < 256);
3708 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3712 BOOL_ACCESSORS(SharedFunctionInfo,
3714 live_objects_may_exist,
3715 kLiveObjectsMayExist)
3718 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3719 return initial_map() != GetHeap()->undefined_value();
3723 BOOL_GETTER(SharedFunctionInfo,
3725 optimization_disabled,
3726 kOptimizationDisabled)
3729 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3730 set_compiler_hints(BooleanBit::set(compiler_hints(),
3731 kOptimizationDisabled,
3733 // If disabling optimizations we reflect that in the code object so
3734 // it will not be counted as optimizable code.
3735 if ((code()->kind() == Code::FUNCTION) && disable) {
3736 code()->set_optimizable(false);
3741 int SharedFunctionInfo::profiler_ticks() {
3742 if (code()->kind() != Code::FUNCTION) return 0;
3743 return code()->profiler_ticks();
3747 LanguageMode SharedFunctionInfo::language_mode() {
3748 int hints = compiler_hints();
3749 if (BooleanBit::get(hints, kExtendedModeFunction)) {
3750 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
3751 return EXTENDED_MODE;
3753 return BooleanBit::get(hints, kStrictModeFunction)
3754 ? STRICT_MODE : CLASSIC_MODE;
3758 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
3759 // We only allow language mode transitions that go set the same language mode
3760 // again or go up in the chain:
3761 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
3762 ASSERT(this->language_mode() == CLASSIC_MODE ||
3763 this->language_mode() == language_mode ||
3764 language_mode == EXTENDED_MODE);
3765 int hints = compiler_hints();
3766 hints = BooleanBit::set(
3767 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
3768 hints = BooleanBit::set(
3769 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
3770 set_compiler_hints(hints);
3774 bool SharedFunctionInfo::is_classic_mode() {
3775 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
3778 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
3779 kExtendedModeFunction)
3780 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3781 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3782 name_should_print_as_anonymous,
3783 kNameShouldPrintAsAnonymous)
3784 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3785 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3786 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
3787 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
3789 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
3791 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3792 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3794 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3796 bool Script::HasValidSource() {
3797 Object* src = this->source();
3798 if (!src->IsString()) return true;
3799 String* src_str = String::cast(src);
3800 if (!StringShape(src_str).IsExternal()) return true;
3801 if (src_str->IsAsciiRepresentation()) {
3802 return ExternalAsciiString::cast(src)->resource() != NULL;
3803 } else if (src_str->IsTwoByteRepresentation()) {
3804 return ExternalTwoByteString::cast(src)->resource() != NULL;
3810 void SharedFunctionInfo::DontAdaptArguments() {
3811 ASSERT(code()->kind() == Code::BUILTIN);
3812 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3816 int SharedFunctionInfo::start_position() {
3817 return start_position_and_type() >> kStartPositionShift;
3821 void SharedFunctionInfo::set_start_position(int start_position) {
3822 set_start_position_and_type((start_position << kStartPositionShift)
3823 | (start_position_and_type() & ~kStartPositionMask));
3827 Code* SharedFunctionInfo::code() {
3828 return Code::cast(READ_FIELD(this, kCodeOffset));
3832 Code* SharedFunctionInfo::unchecked_code() {
3833 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3837 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3838 WRITE_FIELD(this, kCodeOffset, value);
3839 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3843 ScopeInfo* SharedFunctionInfo::scope_info() {
3844 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
3848 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
3849 WriteBarrierMode mode) {
3850 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3851 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3854 reinterpret_cast<Object*>(value),
3859 bool SharedFunctionInfo::is_compiled() {
3861 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3865 bool SharedFunctionInfo::IsApiFunction() {
3866 return function_data()->IsFunctionTemplateInfo();
3870 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3871 ASSERT(IsApiFunction());
3872 return FunctionTemplateInfo::cast(function_data());
3876 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3877 return function_data()->IsSmi();
3881 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3882 ASSERT(HasBuiltinFunctionId());
3883 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3887 int SharedFunctionInfo::code_age() {
3888 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3892 void SharedFunctionInfo::set_code_age(int code_age) {
3893 int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
3894 set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
3898 bool SharedFunctionInfo::has_deoptimization_support() {
3899 Code* code = this->code();
3900 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3904 bool JSFunction::IsBuiltin() {
3905 return context()->global()->IsJSBuiltinsObject();
3909 bool JSFunction::NeedsArgumentsAdaption() {
3910 return shared()->formal_parameter_count() !=
3911 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3915 bool JSFunction::IsOptimized() {
3916 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3920 bool JSFunction::IsOptimizable() {
3921 return code()->kind() == Code::FUNCTION && code()->optimizable();
3925 bool JSFunction::IsMarkedForLazyRecompilation() {
3926 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3930 Code* JSFunction::code() {
3931 return Code::cast(unchecked_code());
3935 Code* JSFunction::unchecked_code() {
3936 return reinterpret_cast<Code*>(
3937 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3941 void JSFunction::set_code(Code* value) {
3942 ASSERT(!HEAP->InNewSpace(value));
3943 Address entry = value->entry();
3944 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3945 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3947 HeapObject::RawField(this, kCodeEntryOffset),
3952 void JSFunction::ReplaceCode(Code* code) {
3953 bool was_optimized = IsOptimized();
3954 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3958 // Add/remove the function from the list of optimized functions for this
3959 // context based on the state change.
3960 if (!was_optimized && is_optimized) {
3961 context()->global_context()->AddOptimizedFunction(this);
3963 if (was_optimized && !is_optimized) {
3964 context()->global_context()->RemoveOptimizedFunction(this);
3969 Context* JSFunction::context() {
3970 return Context::cast(READ_FIELD(this, kContextOffset));
3974 Object* JSFunction::unchecked_context() {
3975 return READ_FIELD(this, kContextOffset);
3979 SharedFunctionInfo* JSFunction::unchecked_shared() {
3980 return reinterpret_cast<SharedFunctionInfo*>(
3981 READ_FIELD(this, kSharedFunctionInfoOffset));
3985 void JSFunction::set_context(Object* value) {
3986 ASSERT(value->IsUndefined() || value->IsContext());
3987 WRITE_FIELD(this, kContextOffset, value);
3988 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3991 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3992 kPrototypeOrInitialMapOffset)
3995 Map* JSFunction::initial_map() {
3996 return Map::cast(prototype_or_initial_map());
4000 void JSFunction::set_initial_map(Map* value) {
4001 set_prototype_or_initial_map(value);
4005 MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
4007 Context* global_context = context()->global_context();
4008 Object* array_function =
4009 global_context->get(Context::ARRAY_FUNCTION_INDEX);
4010 if (array_function->IsJSFunction() &&
4011 this == JSFunction::cast(array_function)) {
4012 ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
4014 MaybeObject* maybe_map = initial_map->CopyDropTransitions();
4015 Map* new_double_map = NULL;
4016 if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
4017 new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
4018 maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
4020 if (maybe_map->IsFailure()) return maybe_map;
4022 maybe_map = new_double_map->CopyDropTransitions();
4023 Map* new_object_map = NULL;
4024 if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
4025 new_object_map->set_elements_kind(FAST_ELEMENTS);
4026 maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
4028 if (maybe_map->IsFailure()) return maybe_map;
4030 global_context->set_smi_js_array_map(initial_map);
4031 global_context->set_double_js_array_map(new_double_map);
4032 global_context->set_object_js_array_map(new_object_map);
4034 set_initial_map(initial_map);
4039 bool JSFunction::has_initial_map() {
4040 return prototype_or_initial_map()->IsMap();
4044 bool JSFunction::has_instance_prototype() {
4045 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
4049 bool JSFunction::has_prototype() {
4050 return map()->has_non_instance_prototype() || has_instance_prototype();
4054 Object* JSFunction::instance_prototype() {
4055 ASSERT(has_instance_prototype());
4056 if (has_initial_map()) return initial_map()->prototype();
4057 // When there is no initial map and the prototype is a JSObject, the
4058 // initial map field is used for the prototype field.
4059 return prototype_or_initial_map();
4063 Object* JSFunction::prototype() {
4064 ASSERT(has_prototype());
4065 // If the function's prototype property has been set to a non-JSObject
4066 // value, that value is stored in the constructor field of the map.
4067 if (map()->has_non_instance_prototype()) return map()->constructor();
4068 return instance_prototype();
4071 bool JSFunction::should_have_prototype() {
4072 return map()->function_with_prototype();
4076 bool JSFunction::is_compiled() {
4077 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4081 FixedArray* JSFunction::literals() {
4082 ASSERT(!shared()->bound());
4083 return literals_or_bindings();
4087 void JSFunction::set_literals(FixedArray* literals) {
4088 ASSERT(!shared()->bound());
4089 set_literals_or_bindings(literals);
4093 FixedArray* JSFunction::function_bindings() {
4094 ASSERT(shared()->bound());
4095 return literals_or_bindings();
4099 void JSFunction::set_function_bindings(FixedArray* bindings) {
4100 ASSERT(shared()->bound());
4101 // Bound function literal may be initialized to the empty fixed array
4102 // before the bindings are set.
4103 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
4104 bindings->map() == GetHeap()->fixed_cow_array_map());
4105 set_literals_or_bindings(bindings);
4109 int JSFunction::NumberOfLiterals() {
4110 ASSERT(!shared()->bound());
4111 return literals()->length();
4115 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
4116 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4117 return READ_FIELD(this, OffsetOfFunctionWithId(id));
4121 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
4123 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4124 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
4125 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
4129 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
4130 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4131 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
4135 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
4137 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4138 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
4139 ASSERT(!HEAP->InNewSpace(value));
4143 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
4144 ACCESSORS(JSProxy, hash, Object, kHashOffset)
4145 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
4146 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4149 void JSProxy::InitializeBody(int object_size, Object* value) {
4150 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
4151 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
4152 WRITE_FIELD(this, offset, value);
4157 ACCESSORS(JSSet, table, Object, kTableOffset)
4158 ACCESSORS(JSMap, table, Object, kTableOffset)
4159 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
4160 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4163 Address Foreign::foreign_address() {
4164 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
4168 void Foreign::set_foreign_address(Address value) {
4169 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
4173 ACCESSORS(JSModule, context, Object, kContextOffset)
4176 JSModule* JSModule::cast(Object* obj) {
4177 ASSERT(obj->IsJSModule());
4178 ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
4179 return reinterpret_cast<JSModule*>(obj);
4183 ACCESSORS(JSValue, value, Object, kValueOffset)
4186 JSValue* JSValue::cast(Object* obj) {
4187 ASSERT(obj->IsJSValue());
4188 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
4189 return reinterpret_cast<JSValue*>(obj);
4193 ACCESSORS(JSDate, value, Object, kValueOffset)
4194 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4195 ACCESSORS(JSDate, year, Object, kYearOffset)
4196 ACCESSORS(JSDate, month, Object, kMonthOffset)
4197 ACCESSORS(JSDate, day, Object, kDayOffset)
4198 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4199 ACCESSORS(JSDate, hour, Object, kHourOffset)
4200 ACCESSORS(JSDate, min, Object, kMinOffset)
4201 ACCESSORS(JSDate, sec, Object, kSecOffset)
4204 JSDate* JSDate::cast(Object* obj) {
4205 ASSERT(obj->IsJSDate());
4206 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
4207 return reinterpret_cast<JSDate*>(obj);
4211 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4212 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
4213 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4214 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4215 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4216 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4217 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4220 JSMessageObject* JSMessageObject::cast(Object* obj) {
4221 ASSERT(obj->IsJSMessageObject());
4222 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
4223 return reinterpret_cast<JSMessageObject*>(obj);
4227 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4228 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
4229 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4230 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4231 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4232 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4233 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
4235 byte* Code::instruction_start() {
4236 return FIELD_ADDR(this, kHeaderSize);
4240 byte* Code::instruction_end() {
4241 return instruction_start() + instruction_size();
4245 int Code::body_size() {
4246 return RoundUp(instruction_size(), kObjectAlignment);
4250 FixedArray* Code::unchecked_deoptimization_data() {
4251 return reinterpret_cast<FixedArray*>(
4252 READ_FIELD(this, kDeoptimizationDataOffset));
4256 ByteArray* Code::unchecked_relocation_info() {
4257 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4261 byte* Code::relocation_start() {
4262 return unchecked_relocation_info()->GetDataStartAddress();
4266 int Code::relocation_size() {
4267 return unchecked_relocation_info()->length();
4271 byte* Code::entry() {
4272 return instruction_start();
4276 bool Code::contains(byte* inner_pointer) {
4277 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4281 ACCESSORS(JSArray, length, Object, kLengthOffset)
4284 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4287 JSRegExp::Type JSRegExp::TypeTag() {
4288 Object* data = this->data();
4289 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4290 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4291 return static_cast<JSRegExp::Type>(smi->value());
4295 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4296 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4297 return static_cast<JSRegExp::Type>(smi->value());
4301 int JSRegExp::CaptureCount() {
4302 switch (TypeTag()) {
4306 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4314 JSRegExp::Flags JSRegExp::GetFlags() {
4315 ASSERT(this->data()->IsFixedArray());
4316 Object* data = this->data();
4317 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4318 return Flags(smi->value());
4322 String* JSRegExp::Pattern() {
4323 ASSERT(this->data()->IsFixedArray());
4324 Object* data = this->data();
4325 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4330 Object* JSRegExp::DataAt(int index) {
4331 ASSERT(TypeTag() != NOT_COMPILED);
4332 return FixedArray::cast(data())->get(index);
4336 Object* JSRegExp::DataAtUnchecked(int index) {
4337 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4338 int offset = FixedArray::kHeaderSize + index * kPointerSize;
4339 return READ_FIELD(fa, offset);
4343 void JSRegExp::SetDataAt(int index, Object* value) {
4344 ASSERT(TypeTag() != NOT_COMPILED);
4345 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4346 FixedArray::cast(data())->set(index, value);
4350 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4351 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4352 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4353 if (value->IsSmi()) {
4354 fa->set_unchecked(index, Smi::cast(value));
4356 // We only do this during GC, so we don't need to notify the write barrier.
4357 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4362 ElementsKind JSObject::GetElementsKind() {
4363 ElementsKind kind = map()->elements_kind();
4365 FixedArrayBase* fixed_array =
4366 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4367 Map* map = fixed_array->map();
4368 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4369 (map == GetHeap()->fixed_array_map() ||
4370 map == GetHeap()->fixed_cow_array_map())) ||
4371 (kind == FAST_DOUBLE_ELEMENTS &&
4372 (fixed_array->IsFixedDoubleArray() ||
4373 fixed_array == GetHeap()->empty_fixed_array())) ||
4374 (kind == DICTIONARY_ELEMENTS &&
4375 fixed_array->IsFixedArray() &&
4376 fixed_array->IsDictionary()) ||
4377 (kind > DICTIONARY_ELEMENTS));
4378 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4379 (elements()->IsFixedArray() && elements()->length() >= 2));
4385 ElementsAccessor* JSObject::GetElementsAccessor() {
4386 return ElementsAccessor::ForKind(GetElementsKind());
4390 bool JSObject::HasFastElements() {
4391 return GetElementsKind() == FAST_ELEMENTS;
4395 bool JSObject::HasFastSmiOnlyElements() {
4396 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4400 bool JSObject::HasFastTypeElements() {
4401 ElementsKind elements_kind = GetElementsKind();
4402 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4403 elements_kind == FAST_ELEMENTS;
4407 bool JSObject::HasFastDoubleElements() {
4408 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4412 bool JSObject::HasDictionaryElements() {
4413 return GetElementsKind() == DICTIONARY_ELEMENTS;
4417 bool JSObject::HasNonStrictArgumentsElements() {
4418 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4422 bool JSObject::HasExternalArrayElements() {
4423 HeapObject* array = elements();
4424 ASSERT(array != NULL);
4425 return array->IsExternalArray();
4429 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4430 bool JSObject::HasExternal##name##Elements() { \
4431 HeapObject* array = elements(); \
4432 ASSERT(array != NULL); \
4433 if (!array->IsHeapObject()) \
4435 return array->map()->instance_type() == type; \
4439 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4440 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4441 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4442 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4443 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4444 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4445 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4446 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4447 EXTERNAL_ELEMENTS_CHECK(Float,
4448 EXTERNAL_FLOAT_ARRAY_TYPE)
4449 EXTERNAL_ELEMENTS_CHECK(Double,
4450 EXTERNAL_DOUBLE_ARRAY_TYPE)
4451 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4454 bool JSObject::HasNamedInterceptor() {
4455 return map()->has_named_interceptor();
4459 bool JSObject::HasIndexedInterceptor() {
4460 return map()->has_indexed_interceptor();
4464 MaybeObject* JSObject::EnsureWritableFastElements() {
4465 ASSERT(HasFastTypeElements());
4466 FixedArray* elems = FixedArray::cast(elements());
4467 Isolate* isolate = GetIsolate();
4468 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4469 Object* writable_elems;
4470 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4471 elems, isolate->heap()->fixed_array_map());
4472 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4473 return maybe_writable_elems;
4476 set_elements(FixedArray::cast(writable_elems));
4477 isolate->counters()->cow_arrays_converted()->Increment();
4478 return writable_elems;
4482 StringDictionary* JSObject::property_dictionary() {
4483 ASSERT(!HasFastProperties());
4484 return StringDictionary::cast(properties());
4488 SeededNumberDictionary* JSObject::element_dictionary() {
4489 ASSERT(HasDictionaryElements());
4490 return SeededNumberDictionary::cast(elements());
4494 bool String::IsHashFieldComputed(uint32_t field) {
4495 return (field & kHashNotComputedMask) == 0;
4499 bool String::HasHashCode() {
4500 return IsHashFieldComputed(hash_field());
4504 uint32_t String::Hash() {
4505 // Fast case: has hash code already been computed?
4506 uint32_t field = hash_field();
4507 if (IsHashFieldComputed(field)) return field >> kHashShift;
4508 // Slow case: compute hash code and set it.
4509 return ComputeAndSetHash();
4513 StringHasher::StringHasher(int length, uint32_t seed)
4515 raw_running_hash_(seed),
4517 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4518 is_first_char_(true),
4520 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4524 bool StringHasher::has_trivial_hash() {
4525 return length_ > String::kMaxHashCalcLength;
4529 void StringHasher::AddCharacter(uint32_t c) {
4530 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4531 AddSurrogatePair(c); // Not inlined.
4534 // Use the Jenkins one-at-a-time hash function to update the hash
4535 // for the given character.
4536 raw_running_hash_ += c;
4537 raw_running_hash_ += (raw_running_hash_ << 10);
4538 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4539 // Incremental array index computation.
4540 if (is_array_index_) {
4541 if (c < '0' || c > '9') {
4542 is_array_index_ = false;
4545 if (is_first_char_) {
4546 is_first_char_ = false;
4547 if (c == '0' && length_ > 1) {
4548 is_array_index_ = false;
4552 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4553 is_array_index_ = false;
4555 array_index_ = array_index_ * 10 + d;
4562 void StringHasher::AddCharacterNoIndex(uint32_t c) {
4563 ASSERT(!is_array_index());
4564 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4565 AddSurrogatePairNoIndex(c); // Not inlined.
4568 raw_running_hash_ += c;
4569 raw_running_hash_ += (raw_running_hash_ << 10);
4570 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4574 uint32_t StringHasher::GetHash() {
4575 // Get the calculated raw hash value and do some more bit ops to distribute
4576 // the hash further. Ensure that we never return zero as the hash value.
4577 uint32_t result = raw_running_hash_;
4578 result += (result << 3);
4579 result ^= (result >> 11);
4580 result += (result << 15);
4581 if ((result & String::kHashBitMask) == 0) {
4588 template <typename schar>
4589 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
4590 StringHasher hasher(length, seed);
4591 if (!hasher.has_trivial_hash()) {
4593 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4594 hasher.AddCharacter(chars[i]);
4596 for (; i < length; i++) {
4597 hasher.AddCharacterNoIndex(chars[i]);
4600 return hasher.GetHashField();
4604 bool String::AsArrayIndex(uint32_t* index) {
4605 uint32_t field = hash_field();
4606 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4609 return SlowAsArrayIndex(index);
4613 Object* JSReceiver::GetPrototype() {
4614 return HeapObject::cast(this)->map()->prototype();
4618 bool JSReceiver::HasProperty(String* name) {
4620 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4622 return GetPropertyAttribute(name) != ABSENT;
4626 bool JSReceiver::HasLocalProperty(String* name) {
4628 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4630 return GetLocalPropertyAttribute(name) != ABSENT;
4634 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4635 return GetPropertyAttributeWithReceiver(this, key);
4638 // TODO(504): this may be useful in other places too where JSGlobalProxy
4640 Object* JSObject::BypassGlobalProxy() {
4641 if (IsJSGlobalProxy()) {
4642 Object* proto = GetPrototype();
4643 if (proto->IsNull()) return GetHeap()->undefined_value();
4644 ASSERT(proto->IsJSGlobalObject());
4651 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4653 ? JSProxy::cast(this)->GetIdentityHash(flag)
4654 : JSObject::cast(this)->GetIdentityHash(flag);
4658 bool JSReceiver::HasElement(uint32_t index) {
4660 return JSProxy::cast(this)->HasElementWithHandler(index);
4662 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4666 bool AccessorInfo::all_can_read() {
4667 return BooleanBit::get(flag(), kAllCanReadBit);
4671 void AccessorInfo::set_all_can_read(bool value) {
4672 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4676 bool AccessorInfo::all_can_write() {
4677 return BooleanBit::get(flag(), kAllCanWriteBit);
4681 void AccessorInfo::set_all_can_write(bool value) {
4682 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4686 bool AccessorInfo::prohibits_overwriting() {
4687 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4691 void AccessorInfo::set_prohibits_overwriting(bool value) {
4692 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4696 PropertyAttributes AccessorInfo::property_attributes() {
4697 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4701 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4702 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4706 template<typename Shape, typename Key>
4707 void Dictionary<Shape, Key>::SetEntry(int entry,
4710 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4714 template<typename Shape, typename Key>
4715 void Dictionary<Shape, Key>::SetEntry(int entry,
4718 PropertyDetails details) {
4719 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4720 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4721 AssertNoAllocation no_gc;
4722 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4723 FixedArray::set(index, key, mode);
4724 FixedArray::set(index+1, value, mode);
4725 FixedArray::set(index+2, details.AsSmi());
4729 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4730 ASSERT(other->IsNumber());
4731 return key == static_cast<uint32_t>(other->Number());
4735 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
4736 return ComputeIntegerHash(key, 0);
4740 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
4742 ASSERT(other->IsNumber());
4743 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
4746 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
4747 return ComputeIntegerHash(key, seed);
4750 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
4753 ASSERT(other->IsNumber());
4754 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
4757 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4758 return Isolate::Current()->heap()->NumberFromUint32(key);
4762 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4763 // We know that all entries in a hash table had their hash keys created.
4764 // Use that knowledge to have fast failure.
4765 if (key->Hash() != String::cast(other)->Hash()) return false;
4766 return key->Equals(String::cast(other));
4770 uint32_t StringDictionaryShape::Hash(String* key) {
4775 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4776 return String::cast(other)->Hash();
4780 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4785 template <int entrysize>
4786 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4787 return key->SameValue(other);
4791 template <int entrysize>
4792 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4793 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4794 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4798 template <int entrysize>
4799 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4801 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4802 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4806 template <int entrysize>
4807 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4812 void Map::ClearCodeCache(Heap* heap) {
4813 // No write barrier is needed since empty_fixed_array is not in new space.
4814 // Please note this function is used during marking:
4815 // - MarkCompactCollector::MarkUnmarkedObject
4816 // - IncrementalMarking::Step
4817 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4818 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4822 void JSArray::EnsureSize(int required_size) {
4823 ASSERT(HasFastTypeElements());
4824 FixedArray* elts = FixedArray::cast(elements());
4825 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4826 if (elts->length() < required_size) {
4827 // Doubling in size would be overkill, but leave some slack to avoid
4828 // constantly growing.
4829 Expand(required_size + (required_size >> 3));
4830 // It's a performance benefit to keep a frequently used array in new-space.
4831 } else if (!GetHeap()->new_space()->Contains(elts) &&
4832 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4833 // Expand will allocate a new backing store in new space even if the size
4834 // we asked for isn't larger than what we had before.
4835 Expand(required_size);
4840 void JSArray::set_length(Smi* length) {
4841 // Don't need a write barrier for a Smi.
4842 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4846 bool JSArray::AllowsSetElementsLength() {
4847 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
4848 ASSERT(result == !HasExternalArrayElements());
4853 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
4854 MaybeObject* maybe_result = EnsureCanContainElements(
4855 storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
4856 if (maybe_result->IsFailure()) return maybe_result;
4857 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
4858 GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
4859 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
4860 ((GetElementsKind() == FAST_ELEMENTS) ||
4861 (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
4862 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
4863 set_elements(storage);
4864 set_length(Smi::FromInt(storage->length()));
4869 MaybeObject* FixedArray::Copy() {
4870 if (length() == 0) return this;
4871 return GetHeap()->CopyFixedArray(this);
4875 MaybeObject* FixedDoubleArray::Copy() {
4876 if (length() == 0) return this;
4877 return GetHeap()->CopyFixedDoubleArray(this);
4881 void TypeFeedbackCells::SetAstId(int index, Smi* id) {
4882 set(1 + index * 2, id);
4886 Smi* TypeFeedbackCells::AstId(int index) {
4887 return Smi::cast(get(1 + index * 2));
4891 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
4892 set(index * 2, cell);
4896 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
4897 return JSGlobalPropertyCell::cast(get(index * 2));
4901 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
4902 return isolate->factory()->the_hole_value();
4906 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
4907 return isolate->factory()->undefined_value();
4911 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
4912 return heap->raw_unchecked_the_hole_value();
4916 SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
4917 SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
4918 kIcWithTypeinfoCountOffset)
4919 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
4920 kTypeFeedbackCellsOffset)
4923 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
4926 Relocatable::Relocatable(Isolate* isolate) {
4927 ASSERT(isolate == Isolate::Current());
4929 prev_ = isolate->relocatable_top();
4930 isolate->set_relocatable_top(this);
4934 Relocatable::~Relocatable() {
4935 ASSERT(isolate_ == Isolate::Current());
4936 ASSERT_EQ(isolate_->relocatable_top(), this);
4937 isolate_->set_relocatable_top(prev_);
4941 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4942 return map->instance_size();
4946 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4947 v->VisitExternalReference(
4948 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4952 template<typename StaticVisitor>
4953 void Foreign::ForeignIterateBody() {
4954 StaticVisitor::VisitExternalReference(
4955 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4959 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4960 typedef v8::String::ExternalAsciiStringResource Resource;
4961 v->VisitExternalAsciiString(
4962 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4966 template<typename StaticVisitor>
4967 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4968 typedef v8::String::ExternalAsciiStringResource Resource;
4969 StaticVisitor::VisitExternalAsciiString(
4970 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4974 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4975 typedef v8::String::ExternalStringResource Resource;
4976 v->VisitExternalTwoByteString(
4977 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4981 template<typename StaticVisitor>
4982 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4983 typedef v8::String::ExternalStringResource Resource;
4984 StaticVisitor::VisitExternalTwoByteString(
4985 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4988 #define SLOT_ADDR(obj, offset) \
4989 reinterpret_cast<Object**>((obj)->address() + offset)
4991 template<int start_offset, int end_offset, int size>
4992 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4995 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4999 template<int start_offset>
5000 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
5003 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
5009 #undef CAST_ACCESSOR
5010 #undef INT_ACCESSORS
5012 #undef ACCESSORS_TO_SMI
5013 #undef SMI_ACCESSORS
5015 #undef BOOL_ACCESSORS
5019 #undef WRITE_BARRIER
5020 #undef CONDITIONAL_WRITE_BARRIER
5021 #undef READ_DOUBLE_FIELD
5022 #undef WRITE_DOUBLE_FIELD
5023 #undef READ_INT_FIELD
5024 #undef WRITE_INT_FIELD
5025 #undef READ_INTPTR_FIELD
5026 #undef WRITE_INTPTR_FIELD
5027 #undef READ_UINT32_FIELD
5028 #undef WRITE_UINT32_FIELD
5029 #undef READ_SHORT_FIELD
5030 #undef WRITE_SHORT_FIELD
5031 #undef READ_BYTE_FIELD
5032 #undef WRITE_BYTE_FIELD
5035 } } // namespace v8::internal
5037 #endif // V8_OBJECTS_INL_H_