1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
98 #define ACCESSORS_TO_SMI(holder, name, offset) \
99 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
100 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
101 WRITE_FIELD(this, offset, value); \
105 // Getter that returns a Smi as an int and writes an int as a Smi.
106 #define SMI_ACCESSORS(holder, name, offset) \
107 int holder::name() { \
108 Object* value = READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::set_##name(int value) { \
112 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define BOOL_GETTER(holder, field, name, offset) \
117 bool holder::name() { \
118 return BooleanBit::get(field(), offset); \
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123 bool holder::name() { \
124 return BooleanBit::get(field(), offset); \
126 void holder::set_##name(bool value) { \
127 set_##field(BooleanBit::set(field(), offset, value)); \
131 bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
132 ElementsKind to_kind) {
133 if (to_kind == FAST_ELEMENTS) {
134 return from_kind == FAST_SMI_ONLY_ELEMENTS ||
135 from_kind == FAST_DOUBLE_ELEMENTS;
137 return to_kind == FAST_DOUBLE_ELEMENTS &&
138 from_kind == FAST_SMI_ONLY_ELEMENTS;
143 bool Object::IsFixedArrayBase() {
144 return IsFixedArray() || IsFixedDoubleArray();
148 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
149 // There is a constraint on the object; check.
150 if (!this->IsJSObject()) return false;
151 // Fetch the constructor function of the object.
152 Object* cons_obj = JSObject::cast(this)->map()->constructor();
153 if (!cons_obj->IsJSFunction()) return false;
154 JSFunction* fun = JSFunction::cast(cons_obj);
155 // Iterate through the chain of inheriting function templates to
156 // see if the required one occurs.
157 for (Object* type = fun->shared()->function_data();
158 type->IsFunctionTemplateInfo();
159 type = FunctionTemplateInfo::cast(type)->parent_template()) {
160 if (type == expected) return true;
162 // Didn't find the required type in the inheritance chain.
167 bool Object::IsSmi() {
168 return HAS_SMI_TAG(this);
172 bool Object::IsHeapObject() {
173 return Internals::HasHeapObjectTag(this);
177 bool Object::NonFailureIsHeapObject() {
178 ASSERT(!this->IsFailure());
179 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
183 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
186 bool Object::IsString() {
187 return Object::IsHeapObject()
188 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
192 bool Object::IsSpecObject() {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsSymbol() {
206 if (!this->IsHeapObject()) return false;
207 uint32_t type = HeapObject::cast(this)->map()->instance_type();
208 // Because the symbol tag is non-zero and no non-string types have the
209 // symbol bit set we can test for symbols with a very simple test
211 STATIC_ASSERT(kSymbolTag != 0);
212 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
213 return (type & kIsSymbolMask) != 0;
217 bool Object::IsConsString() {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqAsciiString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsAsciiRepresentation();
242 bool Object::IsSeqTwoByteString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalAsciiString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsAsciiRepresentation();
262 bool Object::IsExternalTwoByteString() {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
273 StringShape::StringShape(String* str)
274 : type_(str->map()->instance_type()) {
276 ASSERT((type_ & kIsNotStringMask) == kStringTag);
280 StringShape::StringShape(Map* map)
281 : type_(map->instance_type()) {
283 ASSERT((type_ & kIsNotStringMask) == kStringTag);
287 StringShape::StringShape(InstanceType t)
288 : type_(static_cast<uint32_t>(t)) {
290 ASSERT((type_ & kIsNotStringMask) == kStringTag);
294 bool StringShape::IsSymbol() {
296 STATIC_ASSERT(kSymbolTag != 0);
297 return (type_ & kIsSymbolMask) != 0;
301 bool String::IsAsciiRepresentation() {
302 uint32_t type = map()->instance_type();
303 return (type & kStringEncodingMask) == kAsciiStringTag;
307 bool String::IsTwoByteRepresentation() {
308 uint32_t type = map()->instance_type();
309 return (type & kStringEncodingMask) == kTwoByteStringTag;
313 bool String::IsAsciiRepresentationUnderneath() {
314 uint32_t type = map()->instance_type();
315 STATIC_ASSERT(kIsIndirectStringTag != 0);
316 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
318 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
319 case kAsciiStringTag:
321 case kTwoByteStringTag:
323 default: // Cons or sliced string. Need to go deeper.
324 return GetUnderlying()->IsAsciiRepresentation();
329 bool String::IsTwoByteRepresentationUnderneath() {
330 uint32_t type = map()->instance_type();
331 STATIC_ASSERT(kIsIndirectStringTag != 0);
332 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
334 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
335 case kAsciiStringTag:
337 case kTwoByteStringTag:
339 default: // Cons or sliced string. Need to go deeper.
340 return GetUnderlying()->IsTwoByteRepresentation();
345 bool String::HasOnlyAsciiChars() {
346 uint32_t type = map()->instance_type();
347 return (type & kStringEncodingMask) == kAsciiStringTag ||
348 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
352 bool StringShape::IsCons() {
353 return (type_ & kStringRepresentationMask) == kConsStringTag;
357 bool StringShape::IsSliced() {
358 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
362 bool StringShape::IsIndirect() {
363 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
367 bool StringShape::IsExternal() {
368 return (type_ & kStringRepresentationMask) == kExternalStringTag;
372 bool StringShape::IsSequential() {
373 return (type_ & kStringRepresentationMask) == kSeqStringTag;
377 StringRepresentationTag StringShape::representation_tag() {
378 uint32_t tag = (type_ & kStringRepresentationMask);
379 return static_cast<StringRepresentationTag>(tag);
383 uint32_t StringShape::encoding_tag() {
384 return type_ & kStringEncodingMask;
388 uint32_t StringShape::full_representation_tag() {
389 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
393 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
394 Internals::kFullStringRepresentationMask);
397 bool StringShape::IsSequentialAscii() {
398 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
402 bool StringShape::IsSequentialTwoByte() {
403 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
407 bool StringShape::IsExternalAscii() {
408 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
412 bool StringShape::IsExternalTwoByte() {
413 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
417 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
418 Internals::kExternalTwoByteRepresentationTag);
421 uc32 FlatStringReader::Get(int index) {
422 ASSERT(0 <= index && index <= length_);
424 return static_cast<const byte*>(start_)[index];
426 return static_cast<const uc16*>(start_)[index];
431 bool Object::IsNumber() {
432 return IsSmi() || IsHeapNumber();
436 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
437 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
440 bool Object::IsFiller() {
441 if (!Object::IsHeapObject()) return false;
442 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
443 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
447 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
450 bool Object::IsExternalArray() {
451 if (!Object::IsHeapObject())
453 InstanceType instance_type =
454 HeapObject::cast(this)->map()->instance_type();
455 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
456 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
460 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
461 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
462 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
463 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
464 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
465 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
466 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
467 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
470 bool MaybeObject::IsFailure() {
471 return HAS_FAILURE_TAG(this);
475 bool MaybeObject::IsRetryAfterGC() {
476 return HAS_FAILURE_TAG(this)
477 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
481 bool MaybeObject::IsOutOfMemory() {
482 return HAS_FAILURE_TAG(this)
483 && Failure::cast(this)->IsOutOfMemoryException();
487 bool MaybeObject::IsException() {
488 return this == Failure::Exception();
492 bool MaybeObject::IsTheHole() {
493 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
497 Failure* Failure::cast(MaybeObject* obj) {
498 ASSERT(HAS_FAILURE_TAG(obj));
499 return reinterpret_cast<Failure*>(obj);
503 bool Object::IsJSReceiver() {
504 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
505 return IsHeapObject() &&
506 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
510 bool Object::IsJSObject() {
511 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
512 return IsHeapObject() &&
513 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
517 bool Object::IsJSProxy() {
518 if (!Object::IsHeapObject()) return false;
519 InstanceType type = HeapObject::cast(this)->map()->instance_type();
520 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
524 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
525 TYPE_CHECKER(JSSet, JS_SET_TYPE)
526 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
527 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
528 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
529 TYPE_CHECKER(Map, MAP_TYPE)
530 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
531 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
534 bool Object::IsDescriptorArray() {
535 return IsFixedArray();
539 bool Object::IsDeoptimizationInputData() {
540 // Must be a fixed array.
541 if (!IsFixedArray()) return false;
543 // There's no sure way to detect the difference between a fixed array and
544 // a deoptimization data array. Since this is used for asserts we can
545 // check that the length is zero or else the fixed size plus a multiple of
547 int length = FixedArray::cast(this)->length();
548 if (length == 0) return true;
550 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
551 return length >= 0 &&
552 length % DeoptimizationInputData::kDeoptEntrySize == 0;
556 bool Object::IsDeoptimizationOutputData() {
557 if (!IsFixedArray()) return false;
558 // There's actually no way to see the difference between a fixed array and
559 // a deoptimization data array. Since this is used for asserts we can check
560 // that the length is plausible though.
561 if (FixedArray::cast(this)->length() % 2 != 0) return false;
566 bool Object::IsTypeFeedbackCells() {
567 if (!IsFixedArray()) return false;
568 // There's actually no way to see the difference between a fixed array and
569 // a cache cells array. Since this is used for asserts we can check that
570 // the length is plausible though.
571 if (FixedArray::cast(this)->length() % 2 != 0) return false;
576 bool Object::IsContext() {
577 if (Object::IsHeapObject()) {
578 Map* map = HeapObject::cast(this)->map();
579 Heap* heap = map->GetHeap();
580 return (map == heap->function_context_map() ||
581 map == heap->catch_context_map() ||
582 map == heap->with_context_map() ||
583 map == heap->global_context_map() ||
584 map == heap->block_context_map());
590 bool Object::IsGlobalContext() {
591 return Object::IsHeapObject() &&
592 HeapObject::cast(this)->map() ==
593 HeapObject::cast(this)->GetHeap()->global_context_map();
597 bool Object::IsScopeInfo() {
598 return Object::IsHeapObject() &&
599 HeapObject::cast(this)->map() ==
600 HeapObject::cast(this)->GetHeap()->scope_info_map();
604 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
607 template <> inline bool Is<JSFunction>(Object* obj) {
608 return obj->IsJSFunction();
612 TYPE_CHECKER(Code, CODE_TYPE)
613 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
614 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
615 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
616 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
617 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
618 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
621 bool Object::IsStringWrapper() {
622 return IsJSValue() && JSValue::cast(this)->value()->IsString();
626 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
629 bool Object::IsBoolean() {
630 return IsOddball() &&
631 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
635 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
636 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
639 template <> inline bool Is<JSArray>(Object* obj) {
640 return obj->IsJSArray();
644 bool Object::IsHashTable() {
645 return Object::IsHeapObject() &&
646 HeapObject::cast(this)->map() ==
647 HeapObject::cast(this)->GetHeap()->hash_table_map();
651 bool Object::IsDictionary() {
652 return IsHashTable() &&
653 this != HeapObject::cast(this)->GetHeap()->symbol_table();
657 bool Object::IsSymbolTable() {
658 return IsHashTable() && this ==
659 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
663 bool Object::IsJSFunctionResultCache() {
664 if (!IsFixedArray()) return false;
665 FixedArray* self = FixedArray::cast(this);
666 int length = self->length();
667 if (length < JSFunctionResultCache::kEntriesIndex) return false;
668 if ((length - JSFunctionResultCache::kEntriesIndex)
669 % JSFunctionResultCache::kEntrySize != 0) {
673 if (FLAG_verify_heap) {
674 reinterpret_cast<JSFunctionResultCache*>(this)->
675 JSFunctionResultCacheVerify();
682 bool Object::IsNormalizedMapCache() {
683 if (!IsFixedArray()) return false;
684 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
688 if (FLAG_verify_heap) {
689 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
696 bool Object::IsCompilationCacheTable() {
697 return IsHashTable();
701 bool Object::IsCodeCacheHashTable() {
702 return IsHashTable();
706 bool Object::IsPolymorphicCodeCacheHashTable() {
707 return IsHashTable();
711 bool Object::IsMapCache() {
712 return IsHashTable();
716 bool Object::IsPrimitive() {
717 return IsOddball() || IsNumber() || IsString();
721 bool Object::IsJSGlobalProxy() {
722 bool result = IsHeapObject() &&
723 (HeapObject::cast(this)->map()->instance_type() ==
724 JS_GLOBAL_PROXY_TYPE);
725 ASSERT(!result || IsAccessCheckNeeded());
730 bool Object::IsGlobalObject() {
731 if (!IsHeapObject()) return false;
733 InstanceType type = HeapObject::cast(this)->map()->instance_type();
734 return type == JS_GLOBAL_OBJECT_TYPE ||
735 type == JS_BUILTINS_OBJECT_TYPE;
739 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
740 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
743 bool Object::IsUndetectableObject() {
744 return IsHeapObject()
745 && HeapObject::cast(this)->map()->is_undetectable();
749 bool Object::IsAccessCheckNeeded() {
750 return IsHeapObject()
751 && HeapObject::cast(this)->map()->is_access_check_needed();
755 bool Object::IsStruct() {
756 if (!IsHeapObject()) return false;
757 switch (HeapObject::cast(this)->map()->instance_type()) {
758 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
759 STRUCT_LIST(MAKE_STRUCT_CASE)
760 #undef MAKE_STRUCT_CASE
761 default: return false;
766 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
767 bool Object::Is##Name() { \
768 return Object::IsHeapObject() \
769 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
771 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
772 #undef MAKE_STRUCT_PREDICATE
775 bool Object::IsUndefined() {
776 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
780 bool Object::IsNull() {
781 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
785 bool Object::IsTheHole() {
786 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
790 bool Object::IsTrue() {
791 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
795 bool Object::IsFalse() {
796 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
800 bool Object::IsArgumentsMarker() {
801 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
805 double Object::Number() {
808 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
809 : reinterpret_cast<HeapNumber*>(this)->value();
813 bool Object::IsNaN() {
814 return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
818 MaybeObject* Object::ToSmi() {
819 if (IsSmi()) return this;
820 if (IsHeapNumber()) {
821 double value = HeapNumber::cast(this)->value();
822 int int_value = FastD2I(value);
823 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
824 return Smi::FromInt(int_value);
827 return Failure::Exception();
831 bool Object::HasSpecificClassOf(String* name) {
832 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
836 MaybeObject* Object::GetElement(uint32_t index) {
837 // GetElement can trigger a getter which can cause allocation.
838 // This was not always the case. This ASSERT is here to catch
839 // leftover incorrect uses.
840 ASSERT(HEAP->IsAllocationAllowed());
841 return GetElementWithReceiver(this, index);
845 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
846 MaybeObject* maybe = GetElementWithReceiver(this, index);
847 ASSERT(!maybe->IsFailure());
848 Object* result = NULL; // Initialization to please compiler.
849 maybe->ToObject(&result);
854 MaybeObject* Object::GetProperty(String* key) {
855 PropertyAttributes attributes;
856 return GetPropertyWithReceiver(this, key, &attributes);
860 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
861 return GetPropertyWithReceiver(this, key, attributes);
865 #define FIELD_ADDR(p, offset) \
866 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
868 #define READ_FIELD(p, offset) \
869 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
871 #define WRITE_FIELD(p, offset, value) \
872 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
874 #define WRITE_BARRIER(heap, object, offset, value) \
875 heap->incremental_marking()->RecordWrite( \
876 object, HeapObject::RawField(object, offset), value); \
877 if (heap->InNewSpace(value)) { \
878 heap->RecordWrite(object->address(), offset); \
881 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
882 if (mode == UPDATE_WRITE_BARRIER) { \
883 heap->incremental_marking()->RecordWrite( \
884 object, HeapObject::RawField(object, offset), value); \
885 if (heap->InNewSpace(value)) { \
886 heap->RecordWrite(object->address(), offset); \
890 #ifndef V8_TARGET_ARCH_MIPS
891 #define READ_DOUBLE_FIELD(p, offset) \
892 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
893 #else // V8_TARGET_ARCH_MIPS
894 // Prevent gcc from using load-double (mips ldc1) on (possibly)
895 // non-64-bit aligned HeapNumber::value.
896 static inline double read_double_field(void* p, int offset) {
901 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
902 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
905 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
906 #endif // V8_TARGET_ARCH_MIPS
908 #ifndef V8_TARGET_ARCH_MIPS
909 #define WRITE_DOUBLE_FIELD(p, offset, value) \
910 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
911 #else // V8_TARGET_ARCH_MIPS
912 // Prevent gcc from using store-double (mips sdc1) on (possibly)
913 // non-64-bit aligned HeapNumber::value.
914 static inline void write_double_field(void* p, int offset,
921 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
922 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
924 #define WRITE_DOUBLE_FIELD(p, offset, value) \
925 write_double_field(p, offset, value)
926 #endif // V8_TARGET_ARCH_MIPS
929 #define READ_INT_FIELD(p, offset) \
930 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
932 #define WRITE_INT_FIELD(p, offset, value) \
933 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
935 #define READ_INTPTR_FIELD(p, offset) \
936 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
938 #define WRITE_INTPTR_FIELD(p, offset, value) \
939 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
941 #define READ_UINT32_FIELD(p, offset) \
942 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
944 #define WRITE_UINT32_FIELD(p, offset, value) \
945 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
947 #define READ_INT64_FIELD(p, offset) \
948 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
950 #define WRITE_INT64_FIELD(p, offset, value) \
951 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
953 #define READ_SHORT_FIELD(p, offset) \
954 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
956 #define WRITE_SHORT_FIELD(p, offset, value) \
957 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
959 #define READ_BYTE_FIELD(p, offset) \
960 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
962 #define WRITE_BYTE_FIELD(p, offset, value) \
963 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
966 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
967 return &READ_FIELD(obj, byte_offset);
972 return Internals::SmiValue(this);
976 Smi* Smi::FromInt(int value) {
977 ASSERT(Smi::IsValid(value));
978 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
979 intptr_t tagged_value =
980 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
981 return reinterpret_cast<Smi*>(tagged_value);
985 Smi* Smi::FromIntptr(intptr_t value) {
986 ASSERT(Smi::IsValid(value));
987 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
988 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
992 Failure::Type Failure::type() const {
993 return static_cast<Type>(value() & kFailureTypeTagMask);
997 bool Failure::IsInternalError() const {
998 return type() == INTERNAL_ERROR;
1002 bool Failure::IsOutOfMemoryException() const {
1003 return type() == OUT_OF_MEMORY_EXCEPTION;
1007 AllocationSpace Failure::allocation_space() const {
1008 ASSERT_EQ(RETRY_AFTER_GC, type());
1009 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1014 Failure* Failure::InternalError() {
1015 return Construct(INTERNAL_ERROR);
1019 Failure* Failure::Exception() {
1020 return Construct(EXCEPTION);
1024 Failure* Failure::OutOfMemoryException() {
1025 return Construct(OUT_OF_MEMORY_EXCEPTION);
1029 intptr_t Failure::value() const {
1030 return static_cast<intptr_t>(
1031 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1035 Failure* Failure::RetryAfterGC() {
1036 return RetryAfterGC(NEW_SPACE);
1040 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1041 ASSERT((space & ~kSpaceTagMask) == 0);
1042 return Construct(RETRY_AFTER_GC, space);
1046 Failure* Failure::Construct(Type type, intptr_t value) {
1048 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1049 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1050 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1054 bool Smi::IsValid(intptr_t value) {
1056 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1059 #ifdef V8_TARGET_ARCH_X64
1060 // To be representable as a long smi, the value must be a 32-bit integer.
1061 bool result = (value == static_cast<int32_t>(value));
1063 // To be representable as an tagged small integer, the two
1064 // most-significant bits of 'value' must be either 00 or 11 due to
1065 // sign-extension. To check this we add 01 to the two
1066 // most-significant bits, and check if the most-significant bit is 0
1068 // CAUTION: The original code below:
1069 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1070 // may lead to incorrect results according to the C language spec, and
1071 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1072 // compiler may produce undefined results in case of signed integer
1073 // overflow. The computation must be done w/ unsigned ints.
1074 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1076 ASSERT(result == in_range);
1081 MapWord MapWord::FromMap(Map* map) {
1082 return MapWord(reinterpret_cast<uintptr_t>(map));
1086 Map* MapWord::ToMap() {
1087 return reinterpret_cast<Map*>(value_);
1091 bool MapWord::IsForwardingAddress() {
1092 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1096 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1097 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1098 return MapWord(reinterpret_cast<uintptr_t>(raw));
1102 HeapObject* MapWord::ToForwardingAddress() {
1103 ASSERT(IsForwardingAddress());
1104 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1109 void HeapObject::VerifyObjectField(int offset) {
1110 VerifyPointer(READ_FIELD(this, offset));
1113 void HeapObject::VerifySmiField(int offset) {
1114 ASSERT(READ_FIELD(this, offset)->IsSmi());
1119 Heap* HeapObject::GetHeap() {
1121 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1122 ASSERT(heap != NULL);
1123 ASSERT(heap->isolate() == Isolate::Current());
1128 Isolate* HeapObject::GetIsolate() {
1129 return GetHeap()->isolate();
1133 Map* HeapObject::map() {
1134 return map_word().ToMap();
1138 void HeapObject::set_map(Map* value) {
1139 set_map_word(MapWord::FromMap(value));
1140 if (value != NULL) {
1141 // TODO(1600) We are passing NULL as a slot because maps can never be on
1142 // evacuation candidate.
1143 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1148 // Unsafe accessor omitting write barrier.
1149 void HeapObject::set_map_no_write_barrier(Map* value) {
1150 set_map_word(MapWord::FromMap(value));
1154 MapWord HeapObject::map_word() {
1155 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1159 void HeapObject::set_map_word(MapWord map_word) {
1160 // WRITE_FIELD does not invoke write barrier, but there is no need
1162 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1166 HeapObject* HeapObject::FromAddress(Address address) {
1167 ASSERT_TAG_ALIGNED(address);
1168 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1172 Address HeapObject::address() {
1173 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1177 int HeapObject::Size() {
1178 return SizeFromMap(map());
1182 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1183 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1184 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1188 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1189 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1193 double HeapNumber::value() {
1194 return READ_DOUBLE_FIELD(this, kValueOffset);
1198 void HeapNumber::set_value(double value) {
1199 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1203 int HeapNumber::get_exponent() {
1204 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1205 kExponentShift) - kExponentBias;
1209 int HeapNumber::get_sign() {
1210 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1214 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1217 Object** FixedArray::GetFirstElementAddress() {
1218 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1222 bool FixedArray::ContainsOnlySmisOrHoles() {
1223 Object* the_hole = GetHeap()->the_hole_value();
1224 Object** current = GetFirstElementAddress();
1225 for (int i = 0; i < length(); ++i) {
1226 Object* candidate = *current++;
1227 if (!candidate->IsSmi() && candidate != the_hole) return false;
1233 FixedArrayBase* JSObject::elements() {
1234 Object* array = READ_FIELD(this, kElementsOffset);
1235 return static_cast<FixedArrayBase*>(array);
1238 void JSObject::ValidateSmiOnlyElements() {
1240 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1241 Heap* heap = GetHeap();
1242 // Don't use elements, since integrity checks will fail if there
1243 // are filler pointers in the array.
1244 FixedArray* fixed_array =
1245 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1246 Map* map = fixed_array->map();
1247 // Arrays that have been shifted in place can't be verified.
1248 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1249 map != heap->raw_unchecked_two_pointer_filler_map() &&
1250 map != heap->free_space_map()) {
1251 for (int i = 0; i < fixed_array->length(); i++) {
1252 Object* current = fixed_array->get(i);
1253 ASSERT(current->IsSmi() || current->IsTheHole());
1261 MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
1263 ValidateSmiOnlyElements();
1265 if ((map()->elements_kind() != FAST_ELEMENTS)) {
1266 return TransitionElementsKind(FAST_ELEMENTS);
1272 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1274 EnsureElementsMode mode) {
1275 ElementsKind current_kind = map()->elements_kind();
1276 ElementsKind target_kind = current_kind;
1277 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1278 if (current_kind == FAST_ELEMENTS) return this;
1280 Heap* heap = GetHeap();
1281 Object* the_hole = heap->the_hole_value();
1282 Object* heap_number_map = heap->heap_number_map();
1283 for (uint32_t i = 0; i < count; ++i) {
1284 Object* current = *objects++;
1285 if (!current->IsSmi() && current != the_hole) {
1286 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
1287 HeapObject::cast(current)->map() == heap_number_map) {
1288 target_kind = FAST_DOUBLE_ELEMENTS;
1290 target_kind = FAST_ELEMENTS;
1296 if (target_kind != current_kind) {
1297 return TransitionElementsKind(target_kind);
1303 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1304 EnsureElementsMode mode) {
1305 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1306 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1307 elements->map() == GetHeap()->fixed_cow_array_map());
1308 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1309 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1311 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1312 return EnsureCanContainElements(objects, elements->length(), mode);
1315 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1316 if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
1317 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1324 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1325 ElementsKind to_kind) {
1326 Map* current_map = map();
1327 ElementsKind from_kind = current_map->elements_kind();
1329 if (from_kind == to_kind) return current_map;
1331 Context* global_context = isolate->context()->global_context();
1332 if (current_map == global_context->smi_js_array_map()) {
1333 if (to_kind == FAST_ELEMENTS) {
1334 return global_context->object_js_array_map();
1336 if (to_kind == FAST_DOUBLE_ELEMENTS) {
1337 return global_context->double_js_array_map();
1339 ASSERT(to_kind == DICTIONARY_ELEMENTS);
1343 return GetElementsTransitionMapSlow(to_kind);
1347 void JSObject::set_map_and_elements(Map* new_map,
1348 FixedArrayBase* value,
1349 WriteBarrierMode mode) {
1350 ASSERT(value->HasValidElements());
1352 ValidateSmiOnlyElements();
1354 if (new_map != NULL) {
1355 if (mode == UPDATE_WRITE_BARRIER) {
1358 ASSERT(mode == SKIP_WRITE_BARRIER);
1359 set_map_no_write_barrier(new_map);
1362 ASSERT((map()->has_fast_elements() ||
1363 map()->has_fast_smi_only_elements() ||
1364 (value == GetHeap()->empty_fixed_array())) ==
1365 (value->map() == GetHeap()->fixed_array_map() ||
1366 value->map() == GetHeap()->fixed_cow_array_map()));
1367 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1368 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1369 WRITE_FIELD(this, kElementsOffset, value);
1370 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1374 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1375 set_map_and_elements(NULL, value, mode);
1379 void JSObject::initialize_properties() {
1380 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1381 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1385 void JSObject::initialize_elements() {
1386 ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
1387 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1388 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1392 MaybeObject* JSObject::ResetElements() {
1394 ElementsKind elements_kind = FLAG_smi_only_arrays
1395 ? FAST_SMI_ONLY_ELEMENTS
1397 MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
1399 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1400 set_map(Map::cast(obj));
1401 initialize_elements();
1406 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1407 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1410 byte Oddball::kind() {
1411 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1415 void Oddball::set_kind(byte value) {
1416 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1420 Object* JSGlobalPropertyCell::value() {
1421 return READ_FIELD(this, kValueOffset);
1425 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1426 // The write barrier is not used for global property cells.
1427 ASSERT(!val->IsJSGlobalPropertyCell());
1428 WRITE_FIELD(this, kValueOffset, val);
1432 int JSObject::GetHeaderSize() {
1433 InstanceType type = map()->instance_type();
1434 // Check for the most common kind of JavaScript object before
1435 // falling into the generic switch. This speeds up the internal
1436 // field operations considerably on average.
1437 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1439 case JS_GLOBAL_PROXY_TYPE:
1440 return JSGlobalProxy::kSize;
1441 case JS_GLOBAL_OBJECT_TYPE:
1442 return JSGlobalObject::kSize;
1443 case JS_BUILTINS_OBJECT_TYPE:
1444 return JSBuiltinsObject::kSize;
1445 case JS_FUNCTION_TYPE:
1446 return JSFunction::kSize;
1448 return JSValue::kSize;
1450 return JSDate::kSize;
1452 return JSArray::kSize;
1453 case JS_WEAK_MAP_TYPE:
1454 return JSWeakMap::kSize;
1455 case JS_REGEXP_TYPE:
1456 return JSRegExp::kSize;
1457 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1458 return JSObject::kHeaderSize;
1459 case JS_MESSAGE_OBJECT_TYPE:
1460 return JSMessageObject::kSize;
1468 int JSObject::GetInternalFieldCount() {
1469 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1470 // Make sure to adjust for the number of in-object properties. These
1471 // properties do contribute to the size, but are not internal fields.
1472 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1473 map()->inobject_properties();
1477 int JSObject::GetInternalFieldOffset(int index) {
1478 ASSERT(index < GetInternalFieldCount() && index >= 0);
1479 return GetHeaderSize() + (kPointerSize * index);
1483 Object* JSObject::GetInternalField(int index) {
1484 ASSERT(index < GetInternalFieldCount() && index >= 0);
1485 // Internal objects do follow immediately after the header, whereas in-object
1486 // properties are at the end of the object. Therefore there is no need
1487 // to adjust the index here.
1488 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1492 void JSObject::SetInternalField(int index, Object* value) {
1493 ASSERT(index < GetInternalFieldCount() && index >= 0);
1494 // Internal objects do follow immediately after the header, whereas in-object
1495 // properties are at the end of the object. Therefore there is no need
1496 // to adjust the index here.
1497 int offset = GetHeaderSize() + (kPointerSize * index);
1498 WRITE_FIELD(this, offset, value);
1499 WRITE_BARRIER(GetHeap(), this, offset, value);
1503 void JSObject::SetInternalField(int index, Smi* value) {
1504 ASSERT(index < GetInternalFieldCount() && index >= 0);
1505 // Internal objects do follow immediately after the header, whereas in-object
1506 // properties are at the end of the object. Therefore there is no need
1507 // to adjust the index here.
1508 int offset = GetHeaderSize() + (kPointerSize * index);
1509 WRITE_FIELD(this, offset, value);
1513 // Access fast-case object properties at index. The use of these routines
1514 // is needed to correctly distinguish between properties stored in-object and
1515 // properties stored in the properties array.
1516 Object* JSObject::FastPropertyAt(int index) {
1517 // Adjust for the number of properties stored in the object.
1518 index -= map()->inobject_properties();
1520 int offset = map()->instance_size() + (index * kPointerSize);
1521 return READ_FIELD(this, offset);
1523 ASSERT(index < properties()->length());
1524 return properties()->get(index);
1529 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1530 // Adjust for the number of properties stored in the object.
1531 index -= map()->inobject_properties();
1533 int offset = map()->instance_size() + (index * kPointerSize);
1534 WRITE_FIELD(this, offset, value);
1535 WRITE_BARRIER(GetHeap(), this, offset, value);
1537 ASSERT(index < properties()->length());
1538 properties()->set(index, value);
1544 int JSObject::GetInObjectPropertyOffset(int index) {
1545 // Adjust for the number of properties stored in the object.
1546 index -= map()->inobject_properties();
1548 return map()->instance_size() + (index * kPointerSize);
1552 Object* JSObject::InObjectPropertyAt(int index) {
1553 // Adjust for the number of properties stored in the object.
1554 index -= map()->inobject_properties();
1556 int offset = map()->instance_size() + (index * kPointerSize);
1557 return READ_FIELD(this, offset);
1561 Object* JSObject::InObjectPropertyAtPut(int index,
1563 WriteBarrierMode mode) {
1564 // Adjust for the number of properties stored in the object.
1565 index -= map()->inobject_properties();
1567 int offset = map()->instance_size() + (index * kPointerSize);
1568 WRITE_FIELD(this, offset, value);
1569 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1575 void JSObject::InitializeBody(Map* map,
1576 Object* pre_allocated_value,
1577 Object* filler_value) {
1578 ASSERT(!filler_value->IsHeapObject() ||
1579 !GetHeap()->InNewSpace(filler_value));
1580 ASSERT(!pre_allocated_value->IsHeapObject() ||
1581 !GetHeap()->InNewSpace(pre_allocated_value));
1582 int size = map->instance_size();
1583 int offset = kHeaderSize;
1584 if (filler_value != pre_allocated_value) {
1585 int pre_allocated = map->pre_allocated_property_fields();
1586 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1587 for (int i = 0; i < pre_allocated; i++) {
1588 WRITE_FIELD(this, offset, pre_allocated_value);
1589 offset += kPointerSize;
1592 while (offset < size) {
1593 WRITE_FIELD(this, offset, filler_value);
1594 offset += kPointerSize;
1599 bool JSObject::HasFastProperties() {
1600 return !properties()->IsDictionary();
1604 int JSObject::MaxFastProperties() {
1605 // Allow extra fast properties if the object has more than
1606 // kMaxFastProperties in-object properties. When this is the case,
1607 // it is very unlikely that the object is being used as a dictionary
1608 // and there is a good chance that allowing more map transitions
1609 // will be worth it.
1610 return Max(map()->inobject_properties(), kMaxFastProperties);
1614 void Struct::InitializeBody(int object_size) {
1615 Object* value = GetHeap()->undefined_value();
1616 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1617 WRITE_FIELD(this, offset, value);
1622 bool Object::ToArrayIndex(uint32_t* index) {
1624 int value = Smi::cast(this)->value();
1625 if (value < 0) return false;
1629 if (IsHeapNumber()) {
1630 double value = HeapNumber::cast(this)->value();
1631 uint32_t uint_value = static_cast<uint32_t>(value);
1632 if (value == static_cast<double>(uint_value)) {
1633 *index = uint_value;
1641 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1642 if (!this->IsJSValue()) return false;
1644 JSValue* js_value = JSValue::cast(this);
1645 if (!js_value->value()->IsString()) return false;
1647 String* str = String::cast(js_value->value());
1648 if (index >= (uint32_t)str->length()) return false;
1654 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1655 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1656 return reinterpret_cast<FixedArrayBase*>(object);
1660 Object* FixedArray::get(int index) {
1661 ASSERT(index >= 0 && index < this->length());
1662 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1666 void FixedArray::set(int index, Smi* value) {
1667 ASSERT(map() != HEAP->fixed_cow_array_map());
1668 ASSERT(index >= 0 && index < this->length());
1669 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1670 int offset = kHeaderSize + index * kPointerSize;
1671 WRITE_FIELD(this, offset, value);
1675 void FixedArray::set(int index, Object* value) {
1676 ASSERT(map() != HEAP->fixed_cow_array_map());
1677 ASSERT(index >= 0 && index < this->length());
1678 int offset = kHeaderSize + index * kPointerSize;
1679 WRITE_FIELD(this, offset, value);
1680 WRITE_BARRIER(GetHeap(), this, offset, value);
1684 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1685 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1689 inline double FixedDoubleArray::hole_nan_as_double() {
1690 return BitCast<double, uint64_t>(kHoleNanInt64);
1694 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1695 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1696 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1697 return OS::nan_value();
1701 double FixedDoubleArray::get_scalar(int index) {
1702 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1703 map() != HEAP->fixed_array_map());
1704 ASSERT(index >= 0 && index < this->length());
1705 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1706 ASSERT(!is_the_hole_nan(result));
1710 int64_t FixedDoubleArray::get_representation(int index) {
1711 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1712 map() != HEAP->fixed_array_map());
1713 ASSERT(index >= 0 && index < this->length());
1714 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1717 MaybeObject* FixedDoubleArray::get(int index) {
1718 if (is_the_hole(index)) {
1719 return GetHeap()->the_hole_value();
1721 return GetHeap()->NumberFromDouble(get_scalar(index));
1726 void FixedDoubleArray::set(int index, double value) {
1727 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1728 map() != HEAP->fixed_array_map());
1729 int offset = kHeaderSize + index * kDoubleSize;
1730 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1731 WRITE_DOUBLE_FIELD(this, offset, value);
1735 void FixedDoubleArray::set_the_hole(int index) {
1736 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1737 map() != HEAP->fixed_array_map());
1738 int offset = kHeaderSize + index * kDoubleSize;
1739 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1743 bool FixedDoubleArray::is_the_hole(int index) {
1744 int offset = kHeaderSize + index * kDoubleSize;
1745 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1749 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1750 Heap* heap = GetHeap();
1751 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1752 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1753 return UPDATE_WRITE_BARRIER;
1757 void FixedArray::set(int index,
1759 WriteBarrierMode mode) {
1760 ASSERT(map() != HEAP->fixed_cow_array_map());
1761 ASSERT(index >= 0 && index < this->length());
1762 int offset = kHeaderSize + index * kPointerSize;
1763 WRITE_FIELD(this, offset, value);
1764 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1768 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
1771 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1772 ASSERT(index >= 0 && index < array->length());
1773 int offset = kHeaderSize + index * kPointerSize;
1774 WRITE_FIELD(array, offset, value);
1775 Heap* heap = array->GetHeap();
1776 if (heap->InNewSpace(value)) {
1777 heap->RecordWrite(array->address(), offset);
1782 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1785 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1786 ASSERT(index >= 0 && index < array->length());
1787 ASSERT(!HEAP->InNewSpace(value));
1788 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1792 void FixedArray::set_undefined(int index) {
1793 ASSERT(map() != HEAP->fixed_cow_array_map());
1794 set_undefined(GetHeap(), index);
1798 void FixedArray::set_undefined(Heap* heap, int index) {
1799 ASSERT(index >= 0 && index < this->length());
1800 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1801 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1802 heap->undefined_value());
1806 void FixedArray::set_null(int index) {
1807 set_null(GetHeap(), index);
1811 void FixedArray::set_null(Heap* heap, int index) {
1812 ASSERT(index >= 0 && index < this->length());
1813 ASSERT(!heap->InNewSpace(heap->null_value()));
1814 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1818 void FixedArray::set_the_hole(int index) {
1819 ASSERT(map() != HEAP->fixed_cow_array_map());
1820 ASSERT(index >= 0 && index < this->length());
1821 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1823 kHeaderSize + index * kPointerSize,
1824 GetHeap()->the_hole_value());
1828 void FixedArray::set_unchecked(int index, Smi* value) {
1829 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1830 int offset = kHeaderSize + index * kPointerSize;
1831 WRITE_FIELD(this, offset, value);
1835 void FixedArray::set_unchecked(Heap* heap,
1838 WriteBarrierMode mode) {
1839 int offset = kHeaderSize + index * kPointerSize;
1840 WRITE_FIELD(this, offset, value);
1841 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1845 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1846 ASSERT(index >= 0 && index < this->length());
1847 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1848 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1852 Object** FixedArray::data_start() {
1853 return HeapObject::RawField(this, kHeaderSize);
1857 bool DescriptorArray::IsEmpty() {
1858 ASSERT(this->IsSmi() ||
1859 this->length() > kFirstIndex ||
1860 this == HEAP->empty_descriptor_array());
1861 return this->IsSmi() || length() <= kFirstIndex;
1865 int DescriptorArray::bit_field3_storage() {
1866 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1867 return Smi::cast(storage)->value();
1870 void DescriptorArray::set_bit_field3_storage(int value) {
1872 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1876 void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
1879 Object* tmp = array->get(first);
1880 NoIncrementalWriteBarrierSet(array, first, array->get(second));
1881 NoIncrementalWriteBarrierSet(array, second, tmp);
1885 int DescriptorArray::Search(String* name) {
1886 SLOW_ASSERT(IsSortedNoDuplicates());
1888 // Check for empty descriptor array.
1889 int nof = number_of_descriptors();
1890 if (nof == 0) return kNotFound;
1892 // Fast case: do linear search for small arrays.
1893 const int kMaxElementsForLinearSearch = 8;
1894 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1895 return LinearSearch(name, nof);
1898 // Slow case: perform binary search.
1899 return BinarySearch(name, 0, nof - 1);
1903 int DescriptorArray::SearchWithCache(String* name) {
1904 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1905 if (number == DescriptorLookupCache::kAbsent) {
1906 number = Search(name);
1907 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1913 String* DescriptorArray::GetKey(int descriptor_number) {
1914 ASSERT(descriptor_number < number_of_descriptors());
1915 return String::cast(get(ToKeyIndex(descriptor_number)));
1919 Object* DescriptorArray::GetValue(int descriptor_number) {
1920 ASSERT(descriptor_number < number_of_descriptors());
1921 return GetContentArray()->get(ToValueIndex(descriptor_number));
1925 Smi* DescriptorArray::GetDetails(int descriptor_number) {
1926 ASSERT(descriptor_number < number_of_descriptors());
1927 return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1931 PropertyType DescriptorArray::GetType(int descriptor_number) {
1932 ASSERT(descriptor_number < number_of_descriptors());
1933 return PropertyDetails(GetDetails(descriptor_number)).type();
1937 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1938 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1942 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1943 return JSFunction::cast(GetValue(descriptor_number));
1947 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1948 ASSERT(GetType(descriptor_number) == CALLBACKS);
1949 return GetValue(descriptor_number);
1953 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1954 ASSERT(GetType(descriptor_number) == CALLBACKS);
1955 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1956 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
1960 bool DescriptorArray::IsProperty(int descriptor_number) {
1961 Entry entry(this, descriptor_number);
1962 return IsPropertyDescriptor(&entry);
1966 bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
1967 switch (GetType(descriptor_number)) {
1968 case MAP_TRANSITION:
1969 case CONSTANT_TRANSITION:
1970 case ELEMENTS_TRANSITION:
1973 Object* value = GetValue(descriptor_number);
1974 if (!value->IsAccessorPair()) return false;
1975 AccessorPair* accessors = AccessorPair::cast(value);
1976 return accessors->getter()->IsMap() && accessors->setter()->IsMap();
1980 case CONSTANT_FUNCTION:
1983 case NULL_DESCRIPTOR:
1986 UNREACHABLE(); // Keep the compiler happy.
1991 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1992 return GetType(descriptor_number) == NULL_DESCRIPTOR;
1996 bool DescriptorArray::IsDontEnum(int descriptor_number) {
1997 return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
2001 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2002 desc->Init(GetKey(descriptor_number),
2003 GetValue(descriptor_number),
2004 PropertyDetails(GetDetails(descriptor_number)));
2008 void DescriptorArray::Set(int descriptor_number,
2010 const WhitenessWitness&) {
2012 ASSERT(descriptor_number < number_of_descriptors());
2014 NoIncrementalWriteBarrierSet(this,
2015 ToKeyIndex(descriptor_number),
2017 FixedArray* content_array = GetContentArray();
2018 NoIncrementalWriteBarrierSet(content_array,
2019 ToValueIndex(descriptor_number),
2021 NoIncrementalWriteBarrierSet(content_array,
2022 ToDetailsIndex(descriptor_number),
2023 desc->GetDetails().AsSmi());
2027 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
2028 int first, int second) {
2029 NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
2030 FixedArray* content_array = GetContentArray();
2031 NoIncrementalWriteBarrierSwap(content_array,
2032 ToValueIndex(first),
2033 ToValueIndex(second));
2034 NoIncrementalWriteBarrierSwap(content_array,
2035 ToDetailsIndex(first),
2036 ToDetailsIndex(second));
2040 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2041 : marking_(array->GetHeap()->incremental_marking()) {
2042 marking_->EnterNoMarkingScope();
2043 if (array->number_of_descriptors() > 0) {
2044 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2045 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
2050 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2051 marking_->LeaveNoMarkingScope();
2055 template<typename Shape, typename Key>
2056 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2057 const int kMinCapacity = 32;
2058 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2059 if (capacity < kMinCapacity) {
2060 capacity = kMinCapacity; // Guarantee min capacity.
2066 template<typename Shape, typename Key>
2067 int HashTable<Shape, Key>::FindEntry(Key key) {
2068 return FindEntry(GetIsolate(), key);
2072 // Find entry for key otherwise return kNotFound.
2073 template<typename Shape, typename Key>
2074 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2075 uint32_t capacity = Capacity();
2076 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2078 // EnsureCapacity will guarantee the hash table is never full.
2080 Object* element = KeyAt(entry);
2082 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2083 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2084 Shape::IsMatch(key, element)) return entry;
2085 entry = NextProbe(entry, count++, capacity);
2091 bool SeededNumberDictionary::requires_slow_elements() {
2092 Object* max_index_object = get(kMaxNumberKeyIndex);
2093 if (!max_index_object->IsSmi()) return false;
2095 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2098 uint32_t SeededNumberDictionary::max_number_key() {
2099 ASSERT(!requires_slow_elements());
2100 Object* max_index_object = get(kMaxNumberKeyIndex);
2101 if (!max_index_object->IsSmi()) return 0;
2102 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2103 return value >> kRequiresSlowElementsTagSize;
2106 void SeededNumberDictionary::set_requires_slow_elements() {
2107 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2111 // ------------------------------------
2115 CAST_ACCESSOR(FixedArray)
2116 CAST_ACCESSOR(FixedDoubleArray)
2117 CAST_ACCESSOR(DescriptorArray)
2118 CAST_ACCESSOR(DeoptimizationInputData)
2119 CAST_ACCESSOR(DeoptimizationOutputData)
2120 CAST_ACCESSOR(TypeFeedbackCells)
2121 CAST_ACCESSOR(SymbolTable)
2122 CAST_ACCESSOR(JSFunctionResultCache)
2123 CAST_ACCESSOR(NormalizedMapCache)
2124 CAST_ACCESSOR(ScopeInfo)
2125 CAST_ACCESSOR(CompilationCacheTable)
2126 CAST_ACCESSOR(CodeCacheHashTable)
2127 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2128 CAST_ACCESSOR(MapCache)
2129 CAST_ACCESSOR(String)
2130 CAST_ACCESSOR(SeqString)
2131 CAST_ACCESSOR(SeqAsciiString)
2132 CAST_ACCESSOR(SeqTwoByteString)
2133 CAST_ACCESSOR(SlicedString)
2134 CAST_ACCESSOR(ConsString)
2135 CAST_ACCESSOR(ExternalString)
2136 CAST_ACCESSOR(ExternalAsciiString)
2137 CAST_ACCESSOR(ExternalTwoByteString)
2138 CAST_ACCESSOR(JSReceiver)
2139 CAST_ACCESSOR(JSObject)
2141 CAST_ACCESSOR(HeapObject)
2142 CAST_ACCESSOR(HeapNumber)
2143 CAST_ACCESSOR(Oddball)
2144 CAST_ACCESSOR(JSGlobalPropertyCell)
2145 CAST_ACCESSOR(SharedFunctionInfo)
2147 CAST_ACCESSOR(JSFunction)
2148 CAST_ACCESSOR(GlobalObject)
2149 CAST_ACCESSOR(JSGlobalProxy)
2150 CAST_ACCESSOR(JSGlobalObject)
2151 CAST_ACCESSOR(JSBuiltinsObject)
2153 CAST_ACCESSOR(JSArray)
2154 CAST_ACCESSOR(JSRegExp)
2155 CAST_ACCESSOR(JSProxy)
2156 CAST_ACCESSOR(JSFunctionProxy)
2157 CAST_ACCESSOR(JSSet)
2158 CAST_ACCESSOR(JSMap)
2159 CAST_ACCESSOR(JSWeakMap)
2160 CAST_ACCESSOR(Foreign)
2161 CAST_ACCESSOR(ByteArray)
2162 CAST_ACCESSOR(FreeSpace)
2163 CAST_ACCESSOR(ExternalArray)
2164 CAST_ACCESSOR(ExternalByteArray)
2165 CAST_ACCESSOR(ExternalUnsignedByteArray)
2166 CAST_ACCESSOR(ExternalShortArray)
2167 CAST_ACCESSOR(ExternalUnsignedShortArray)
2168 CAST_ACCESSOR(ExternalIntArray)
2169 CAST_ACCESSOR(ExternalUnsignedIntArray)
2170 CAST_ACCESSOR(ExternalFloatArray)
2171 CAST_ACCESSOR(ExternalDoubleArray)
2172 CAST_ACCESSOR(ExternalPixelArray)
2173 CAST_ACCESSOR(Struct)
2176 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2177 STRUCT_LIST(MAKE_STRUCT_CAST)
2178 #undef MAKE_STRUCT_CAST
2181 template <typename Shape, typename Key>
2182 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2183 ASSERT(obj->IsHashTable());
2184 return reinterpret_cast<HashTable*>(obj);
2188 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2189 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2191 SMI_ACCESSORS(String, length, kLengthOffset)
2192 SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
2195 uint32_t String::hash_field() {
2196 return READ_UINT32_FIELD(this, kHashFieldOffset);
2200 void String::set_hash_field(uint32_t value) {
2201 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2202 #if V8_HOST_ARCH_64_BIT
2203 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2208 bool String::Equals(String* other) {
2209 if (other == this) return true;
2210 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2213 return SlowEquals(other);
2217 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2218 if (!StringShape(this).IsCons()) return this;
2219 ConsString* cons = ConsString::cast(this);
2220 if (cons->IsFlat()) return cons->first();
2221 return SlowTryFlatten(pretenure);
2225 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2226 MaybeObject* flat = TryFlatten(pretenure);
2227 Object* successfully_flattened;
2228 if (!flat->ToObject(&successfully_flattened)) return this;
2229 return String::cast(successfully_flattened);
2233 uint16_t String::Get(int index) {
2234 ASSERT(index >= 0 && index < length());
2235 switch (StringShape(this).full_representation_tag()) {
2236 case kSeqStringTag | kAsciiStringTag:
2237 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2238 case kSeqStringTag | kTwoByteStringTag:
2239 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2240 case kConsStringTag | kAsciiStringTag:
2241 case kConsStringTag | kTwoByteStringTag:
2242 return ConsString::cast(this)->ConsStringGet(index);
2243 case kExternalStringTag | kAsciiStringTag:
2244 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2245 case kExternalStringTag | kTwoByteStringTag:
2246 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2247 case kSlicedStringTag | kAsciiStringTag:
2248 case kSlicedStringTag | kTwoByteStringTag:
2249 return SlicedString::cast(this)->SlicedStringGet(index);
2259 void String::Set(int index, uint16_t value) {
2260 ASSERT(index >= 0 && index < length());
2261 ASSERT(StringShape(this).IsSequential());
2263 return this->IsAsciiRepresentation()
2264 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2265 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2269 bool String::IsFlat() {
2270 if (!StringShape(this).IsCons()) return true;
2271 return ConsString::cast(this)->second()->length() == 0;
2275 String* String::GetUnderlying() {
2276 // Giving direct access to underlying string only makes sense if the
2277 // wrapping string is already flattened.
2278 ASSERT(this->IsFlat());
2279 ASSERT(StringShape(this).IsIndirect());
2280 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2281 const int kUnderlyingOffset = SlicedString::kParentOffset;
2282 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2286 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2287 ASSERT(index >= 0 && index < length());
2288 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2292 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2293 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2294 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2295 static_cast<byte>(value));
2299 Address SeqAsciiString::GetCharsAddress() {
2300 return FIELD_ADDR(this, kHeaderSize);
2304 char* SeqAsciiString::GetChars() {
2305 return reinterpret_cast<char*>(GetCharsAddress());
2309 Address SeqTwoByteString::GetCharsAddress() {
2310 return FIELD_ADDR(this, kHeaderSize);
2314 uc16* SeqTwoByteString::GetChars() {
2315 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2319 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2320 ASSERT(index >= 0 && index < length());
2321 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2325 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2326 ASSERT(index >= 0 && index < length());
2327 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2331 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2332 return SizeFor(length());
2336 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2337 return SizeFor(length());
2341 String* SlicedString::parent() {
2342 return String::cast(READ_FIELD(this, kParentOffset));
2346 void SlicedString::set_parent(String* parent) {
2347 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2348 WRITE_FIELD(this, kParentOffset, parent);
2352 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2355 String* ConsString::first() {
2356 return String::cast(READ_FIELD(this, kFirstOffset));
2360 Object* ConsString::unchecked_first() {
2361 return READ_FIELD(this, kFirstOffset);
2365 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2366 WRITE_FIELD(this, kFirstOffset, value);
2367 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2371 String* ConsString::second() {
2372 return String::cast(READ_FIELD(this, kSecondOffset));
2376 Object* ConsString::unchecked_second() {
2377 return READ_FIELD(this, kSecondOffset);
2381 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2382 WRITE_FIELD(this, kSecondOffset, value);
2383 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2387 bool ExternalString::is_short() {
2388 InstanceType type = map()->instance_type();
2389 return (type & kShortExternalStringMask) == kShortExternalStringTag;
2393 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2394 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2398 void ExternalAsciiString::update_data_cache() {
2399 if (is_short()) return;
2400 const char** data_field =
2401 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2402 *data_field = resource()->data();
2406 void ExternalAsciiString::set_resource(
2407 const ExternalAsciiString::Resource* resource) {
2408 *reinterpret_cast<const Resource**>(
2409 FIELD_ADDR(this, kResourceOffset)) = resource;
2410 if (resource != NULL) update_data_cache();
2414 const char* ExternalAsciiString::GetChars() {
2415 return resource()->data();
2419 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
2420 ASSERT(index >= 0 && index < length());
2421 return GetChars()[index];
2425 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2426 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2430 void ExternalTwoByteString::update_data_cache() {
2431 if (is_short()) return;
2432 const uint16_t** data_field =
2433 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
2434 *data_field = resource()->data();
2438 void ExternalTwoByteString::set_resource(
2439 const ExternalTwoByteString::Resource* resource) {
2440 *reinterpret_cast<const Resource**>(
2441 FIELD_ADDR(this, kResourceOffset)) = resource;
2442 if (resource != NULL) update_data_cache();
2446 const uint16_t* ExternalTwoByteString::GetChars() {
2447 return resource()->data();
2451 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
2452 ASSERT(index >= 0 && index < length());
2453 return GetChars()[index];
2457 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
2459 return GetChars() + start;
2463 void JSFunctionResultCache::MakeZeroSize() {
2464 set_finger_index(kEntriesIndex);
2465 set_size(kEntriesIndex);
2469 void JSFunctionResultCache::Clear() {
2470 int cache_size = size();
2471 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2472 MemsetPointer(entries_start,
2473 GetHeap()->the_hole_value(),
2474 cache_size - kEntriesIndex);
2479 int JSFunctionResultCache::size() {
2480 return Smi::cast(get(kCacheSizeIndex))->value();
2484 void JSFunctionResultCache::set_size(int size) {
2485 set(kCacheSizeIndex, Smi::FromInt(size));
2489 int JSFunctionResultCache::finger_index() {
2490 return Smi::cast(get(kFingerIndex))->value();
2494 void JSFunctionResultCache::set_finger_index(int finger_index) {
2495 set(kFingerIndex, Smi::FromInt(finger_index));
2499 byte ByteArray::get(int index) {
2500 ASSERT(index >= 0 && index < this->length());
2501 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2505 void ByteArray::set(int index, byte value) {
2506 ASSERT(index >= 0 && index < this->length());
2507 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2511 int ByteArray::get_int(int index) {
2512 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2513 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2517 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2518 ASSERT_TAG_ALIGNED(address);
2519 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2523 Address ByteArray::GetDataStartAddress() {
2524 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2528 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2529 return reinterpret_cast<uint8_t*>(external_pointer());
2533 uint8_t ExternalPixelArray::get_scalar(int index) {
2534 ASSERT((index >= 0) && (index < this->length()));
2535 uint8_t* ptr = external_pixel_pointer();
2540 MaybeObject* ExternalPixelArray::get(int index) {
2541 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2545 void ExternalPixelArray::set(int index, uint8_t value) {
2546 ASSERT((index >= 0) && (index < this->length()));
2547 uint8_t* ptr = external_pixel_pointer();
2552 void* ExternalArray::external_pointer() {
2553 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2554 return reinterpret_cast<void*>(ptr);
2558 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2559 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2560 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2564 int8_t ExternalByteArray::get_scalar(int index) {
2565 ASSERT((index >= 0) && (index < this->length()));
2566 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2571 MaybeObject* ExternalByteArray::get(int index) {
2572 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2576 void ExternalByteArray::set(int index, int8_t value) {
2577 ASSERT((index >= 0) && (index < this->length()));
2578 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2583 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2584 ASSERT((index >= 0) && (index < this->length()));
2585 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2590 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2591 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2595 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2596 ASSERT((index >= 0) && (index < this->length()));
2597 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2602 int16_t ExternalShortArray::get_scalar(int index) {
2603 ASSERT((index >= 0) && (index < this->length()));
2604 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2609 MaybeObject* ExternalShortArray::get(int index) {
2610 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2614 void ExternalShortArray::set(int index, int16_t value) {
2615 ASSERT((index >= 0) && (index < this->length()));
2616 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2621 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2622 ASSERT((index >= 0) && (index < this->length()));
2623 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2628 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2629 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2633 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2634 ASSERT((index >= 0) && (index < this->length()));
2635 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2640 int32_t ExternalIntArray::get_scalar(int index) {
2641 ASSERT((index >= 0) && (index < this->length()));
2642 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2647 MaybeObject* ExternalIntArray::get(int index) {
2648 return GetHeap()->NumberFromInt32(get_scalar(index));
2652 void ExternalIntArray::set(int index, int32_t value) {
2653 ASSERT((index >= 0) && (index < this->length()));
2654 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2659 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2660 ASSERT((index >= 0) && (index < this->length()));
2661 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2666 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2667 return GetHeap()->NumberFromUint32(get_scalar(index));
2671 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2672 ASSERT((index >= 0) && (index < this->length()));
2673 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2678 float ExternalFloatArray::get_scalar(int index) {
2679 ASSERT((index >= 0) && (index < this->length()));
2680 float* ptr = static_cast<float*>(external_pointer());
2685 MaybeObject* ExternalFloatArray::get(int index) {
2686 return GetHeap()->NumberFromDouble(get_scalar(index));
2690 void ExternalFloatArray::set(int index, float value) {
2691 ASSERT((index >= 0) && (index < this->length()));
2692 float* ptr = static_cast<float*>(external_pointer());
2697 double ExternalDoubleArray::get_scalar(int index) {
2698 ASSERT((index >= 0) && (index < this->length()));
2699 double* ptr = static_cast<double*>(external_pointer());
2704 MaybeObject* ExternalDoubleArray::get(int index) {
2705 return GetHeap()->NumberFromDouble(get_scalar(index));
2709 void ExternalDoubleArray::set(int index, double value) {
2710 ASSERT((index >= 0) && (index < this->length()));
2711 double* ptr = static_cast<double*>(external_pointer());
2716 int Map::visitor_id() {
2717 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2721 void Map::set_visitor_id(int id) {
2722 ASSERT(0 <= id && id < 256);
2723 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2727 int Map::instance_size() {
2728 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2732 int Map::inobject_properties() {
2733 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2737 int Map::pre_allocated_property_fields() {
2738 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2742 int HeapObject::SizeFromMap(Map* map) {
2743 int instance_size = map->instance_size();
2744 if (instance_size != kVariableSizeSentinel) return instance_size;
2745 // We can ignore the "symbol" bit becase it is only set for symbols
2746 // and implies a string type.
2747 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2748 // Only inline the most frequent cases.
2749 if (instance_type == FIXED_ARRAY_TYPE) {
2750 return FixedArray::BodyDescriptor::SizeOf(map, this);
2752 if (instance_type == ASCII_STRING_TYPE) {
2753 return SeqAsciiString::SizeFor(
2754 reinterpret_cast<SeqAsciiString*>(this)->length());
2756 if (instance_type == BYTE_ARRAY_TYPE) {
2757 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2759 if (instance_type == FREE_SPACE_TYPE) {
2760 return reinterpret_cast<FreeSpace*>(this)->size();
2762 if (instance_type == STRING_TYPE) {
2763 return SeqTwoByteString::SizeFor(
2764 reinterpret_cast<SeqTwoByteString*>(this)->length());
2766 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2767 return FixedDoubleArray::SizeFor(
2768 reinterpret_cast<FixedDoubleArray*>(this)->length());
2770 ASSERT(instance_type == CODE_TYPE);
2771 return reinterpret_cast<Code*>(this)->CodeSize();
2775 void Map::set_instance_size(int value) {
2776 ASSERT_EQ(0, value & (kPointerSize - 1));
2777 value >>= kPointerSizeLog2;
2778 ASSERT(0 <= value && value < 256);
2779 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2783 void Map::set_inobject_properties(int value) {
2784 ASSERT(0 <= value && value < 256);
2785 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2789 void Map::set_pre_allocated_property_fields(int value) {
2790 ASSERT(0 <= value && value < 256);
2791 WRITE_BYTE_FIELD(this,
2792 kPreAllocatedPropertyFieldsOffset,
2793 static_cast<byte>(value));
2797 InstanceType Map::instance_type() {
2798 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2802 void Map::set_instance_type(InstanceType value) {
2803 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2807 int Map::unused_property_fields() {
2808 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2812 void Map::set_unused_property_fields(int value) {
2813 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2817 byte Map::bit_field() {
2818 return READ_BYTE_FIELD(this, kBitFieldOffset);
2822 void Map::set_bit_field(byte value) {
2823 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2827 byte Map::bit_field2() {
2828 return READ_BYTE_FIELD(this, kBitField2Offset);
2832 void Map::set_bit_field2(byte value) {
2833 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2837 void Map::set_non_instance_prototype(bool value) {
2839 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2841 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2846 bool Map::has_non_instance_prototype() {
2847 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2851 void Map::set_function_with_prototype(bool value) {
2853 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2855 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2860 bool Map::function_with_prototype() {
2861 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2865 void Map::set_is_access_check_needed(bool access_check_needed) {
2866 if (access_check_needed) {
2867 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2869 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2874 bool Map::is_access_check_needed() {
2875 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2879 void Map::set_is_extensible(bool value) {
2881 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2883 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2887 bool Map::is_extensible() {
2888 return ((1 << kIsExtensible) & bit_field2()) != 0;
2892 void Map::set_attached_to_shared_function_info(bool value) {
2894 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2896 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2900 bool Map::attached_to_shared_function_info() {
2901 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2905 void Map::set_is_shared(bool value) {
2907 set_bit_field3(bit_field3() | (1 << kIsShared));
2909 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2913 bool Map::is_shared() {
2914 return ((1 << kIsShared) & bit_field3()) != 0;
2917 void Map::set_named_interceptor_is_fallback(bool value) {
2919 set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
2921 set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
2925 bool Map::named_interceptor_is_fallback() {
2926 return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
2930 JSFunction* Map::unchecked_constructor() {
2931 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2935 Code::Flags Code::flags() {
2936 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2940 void Code::set_flags(Code::Flags flags) {
2941 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2942 // Make sure that all call stubs have an arguments count.
2943 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2944 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2945 ExtractArgumentsCountFromFlags(flags) >= 0);
2946 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2950 Code::Kind Code::kind() {
2951 return ExtractKindFromFlags(flags());
2955 InlineCacheState Code::ic_state() {
2956 InlineCacheState result = ExtractICStateFromFlags(flags());
2957 // Only allow uninitialized or debugger states for non-IC code
2958 // objects. This is used in the debugger to determine whether or not
2959 // a call to code object has been replaced with a debug break call.
2960 ASSERT(is_inline_cache_stub() ||
2961 result == UNINITIALIZED ||
2962 result == DEBUG_BREAK ||
2963 result == DEBUG_PREPARE_STEP_IN);
2968 Code::ExtraICState Code::extra_ic_state() {
2969 ASSERT(is_inline_cache_stub());
2970 return ExtractExtraICStateFromFlags(flags());
2974 PropertyType Code::type() {
2975 return ExtractTypeFromFlags(flags());
2979 int Code::arguments_count() {
2980 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2981 return ExtractArgumentsCountFromFlags(flags());
2985 int Code::major_key() {
2986 ASSERT(kind() == STUB ||
2987 kind() == UNARY_OP_IC ||
2988 kind() == BINARY_OP_IC ||
2989 kind() == COMPARE_IC ||
2990 kind() == TO_BOOLEAN_IC);
2991 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2995 void Code::set_major_key(int major) {
2996 ASSERT(kind() == STUB ||
2997 kind() == UNARY_OP_IC ||
2998 kind() == BINARY_OP_IC ||
2999 kind() == COMPARE_IC ||
3000 kind() == TO_BOOLEAN_IC);
3001 ASSERT(0 <= major && major < 256);
3002 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
3006 bool Code::is_pregenerated() {
3007 return kind() == STUB && IsPregeneratedField::decode(flags());
3011 void Code::set_is_pregenerated(bool value) {
3012 ASSERT(kind() == STUB);
3014 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3019 bool Code::optimizable() {
3020 ASSERT_EQ(FUNCTION, kind());
3021 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3025 void Code::set_optimizable(bool value) {
3026 ASSERT_EQ(FUNCTION, kind());
3027 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3031 bool Code::has_deoptimization_support() {
3032 ASSERT_EQ(FUNCTION, kind());
3033 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3034 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3038 void Code::set_has_deoptimization_support(bool value) {
3039 ASSERT_EQ(FUNCTION, kind());
3040 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3041 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3042 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3046 bool Code::has_debug_break_slots() {
3047 ASSERT_EQ(FUNCTION, kind());
3048 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3049 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3053 void Code::set_has_debug_break_slots(bool value) {
3054 ASSERT_EQ(FUNCTION, kind());
3055 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3056 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3057 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3061 bool Code::is_compiled_optimizable() {
3062 ASSERT_EQ(FUNCTION, kind());
3063 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3064 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3068 void Code::set_compiled_optimizable(bool value) {
3069 ASSERT_EQ(FUNCTION, kind());
3070 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3071 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3072 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3076 int Code::allow_osr_at_loop_nesting_level() {
3077 ASSERT_EQ(FUNCTION, kind());
3078 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3082 void Code::set_allow_osr_at_loop_nesting_level(int level) {
3083 ASSERT_EQ(FUNCTION, kind());
3084 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3085 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3089 int Code::profiler_ticks() {
3090 ASSERT_EQ(FUNCTION, kind());
3091 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3095 void Code::set_profiler_ticks(int ticks) {
3096 ASSERT_EQ(FUNCTION, kind());
3097 ASSERT(ticks < 256);
3098 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
3102 unsigned Code::stack_slots() {
3103 ASSERT(kind() == OPTIMIZED_FUNCTION);
3104 return READ_UINT32_FIELD(this, kStackSlotsOffset);
3108 void Code::set_stack_slots(unsigned slots) {
3109 ASSERT(kind() == OPTIMIZED_FUNCTION);
3110 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
3114 unsigned Code::safepoint_table_offset() {
3115 ASSERT(kind() == OPTIMIZED_FUNCTION);
3116 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
3120 void Code::set_safepoint_table_offset(unsigned offset) {
3121 ASSERT(kind() == OPTIMIZED_FUNCTION);
3122 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3123 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3127 unsigned Code::stack_check_table_offset() {
3128 ASSERT_EQ(FUNCTION, kind());
3129 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3133 void Code::set_stack_check_table_offset(unsigned offset) {
3134 ASSERT_EQ(FUNCTION, kind());
3135 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3136 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3140 CheckType Code::check_type() {
3141 ASSERT(is_call_stub() || is_keyed_call_stub());
3142 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3143 return static_cast<CheckType>(type);
3147 void Code::set_check_type(CheckType value) {
3148 ASSERT(is_call_stub() || is_keyed_call_stub());
3149 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3153 byte Code::unary_op_type() {
3154 ASSERT(is_unary_op_stub());
3155 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3159 void Code::set_unary_op_type(byte value) {
3160 ASSERT(is_unary_op_stub());
3161 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3165 byte Code::binary_op_type() {
3166 ASSERT(is_binary_op_stub());
3167 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3171 void Code::set_binary_op_type(byte value) {
3172 ASSERT(is_binary_op_stub());
3173 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3177 byte Code::binary_op_result_type() {
3178 ASSERT(is_binary_op_stub());
3179 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3183 void Code::set_binary_op_result_type(byte value) {
3184 ASSERT(is_binary_op_stub());
3185 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3189 byte Code::compare_state() {
3190 ASSERT(is_compare_ic_stub());
3191 return READ_BYTE_FIELD(this, kCompareStateOffset);
3195 void Code::set_compare_state(byte value) {
3196 ASSERT(is_compare_ic_stub());
3197 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3201 byte Code::to_boolean_state() {
3202 ASSERT(is_to_boolean_ic_stub());
3203 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3207 void Code::set_to_boolean_state(byte value) {
3208 ASSERT(is_to_boolean_ic_stub());
3209 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3213 bool Code::has_function_cache() {
3214 ASSERT(kind() == STUB);
3215 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3219 void Code::set_has_function_cache(bool flag) {
3220 ASSERT(kind() == STUB);
3221 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3225 bool Code::is_inline_cache_stub() {
3226 Kind kind = this->kind();
3227 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3231 Code::Flags Code::ComputeFlags(Kind kind,
3232 InlineCacheState ic_state,
3233 ExtraICState extra_ic_state,
3236 InlineCacheHolderFlag holder) {
3237 // Extra IC state is only allowed for call IC stubs or for store IC
3239 ASSERT(extra_ic_state == kNoExtraICState ||
3242 kind == KEYED_STORE_IC);
3243 // Compute the bit mask.
3244 int bits = KindField::encode(kind)
3245 | ICStateField::encode(ic_state)
3246 | TypeField::encode(type)
3247 | ExtraICStateField::encode(extra_ic_state)
3248 | (argc << kArgumentsCountShift)
3249 | CacheHolderField::encode(holder);
3250 return static_cast<Flags>(bits);
3254 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3256 ExtraICState extra_ic_state,
3257 InlineCacheHolderFlag holder,
3259 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3263 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3264 return KindField::decode(flags);
3268 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3269 return ICStateField::decode(flags);
3273 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3274 return ExtraICStateField::decode(flags);
3278 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3279 return TypeField::decode(flags);
3283 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3284 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3288 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3289 return CacheHolderField::decode(flags);
3293 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3294 int bits = flags & ~TypeField::kMask;
3295 return static_cast<Flags>(bits);
3299 Code* Code::GetCodeFromTargetAddress(Address address) {
3300 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3301 // GetCodeFromTargetAddress might be called when marking objects during mark
3302 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3303 // Code::cast. Code::cast does not work when the object's map is
3305 Code* result = reinterpret_cast<Code*>(code);
3310 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3312 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3316 Object* Map::prototype() {
3317 return READ_FIELD(this, kPrototypeOffset);
3321 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3322 ASSERT(value->IsNull() || value->IsJSReceiver());
3323 WRITE_FIELD(this, kPrototypeOffset, value);
3324 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3328 DescriptorArray* Map::instance_descriptors() {
3329 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3330 if (object->IsSmi()) {
3331 return GetHeap()->empty_descriptor_array();
3333 return DescriptorArray::cast(object);
3338 void Map::init_instance_descriptors() {
3339 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3343 void Map::clear_instance_descriptors() {
3344 Object* object = READ_FIELD(this,
3345 kInstanceDescriptorsOrBitField3Offset);
3346 if (!object->IsSmi()) {
3349 kInstanceDescriptorsOrBitField3Offset,
3350 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3355 void Map::set_instance_descriptors(DescriptorArray* value,
3356 WriteBarrierMode mode) {
3357 Object* object = READ_FIELD(this,
3358 kInstanceDescriptorsOrBitField3Offset);
3359 Heap* heap = GetHeap();
3360 if (value == heap->empty_descriptor_array()) {
3361 clear_instance_descriptors();
3364 if (object->IsSmi()) {
3365 value->set_bit_field3_storage(Smi::cast(object)->value());
3367 value->set_bit_field3_storage(
3368 DescriptorArray::cast(object)->bit_field3_storage());
3371 ASSERT(!is_shared());
3372 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3373 CONDITIONAL_WRITE_BARRIER(
3374 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3378 int Map::bit_field3() {
3379 Object* object = READ_FIELD(this,
3380 kInstanceDescriptorsOrBitField3Offset);
3381 if (object->IsSmi()) {
3382 return Smi::cast(object)->value();
3384 return DescriptorArray::cast(object)->bit_field3_storage();
3389 void Map::set_bit_field3(int value) {
3390 ASSERT(Smi::IsValid(value));
3391 Object* object = READ_FIELD(this,
3392 kInstanceDescriptorsOrBitField3Offset);
3393 if (object->IsSmi()) {
3395 kInstanceDescriptorsOrBitField3Offset,
3396 Smi::FromInt(value));
3398 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3403 FixedArray* Map::unchecked_prototype_transitions() {
3404 return reinterpret_cast<FixedArray*>(
3405 READ_FIELD(this, kPrototypeTransitionsOffset));
3409 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3410 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3411 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3413 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3414 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3415 ACCESSORS(JSFunction,
3418 kNextFunctionLinkOffset)
3420 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3421 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3422 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3424 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3426 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3427 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3428 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3429 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3430 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
3432 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
3433 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3435 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3436 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3437 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3439 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3440 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3441 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3442 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3443 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3444 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3445 ACCESSORS(InterceptorInfo, is_fallback, Smi, kFallbackOffset)
3447 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3448 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3450 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3451 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3453 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3454 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3455 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3456 kPropertyAccessorsOffset)
3457 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3458 kPrototypeTemplateOffset)
3459 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3460 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3461 kNamedPropertyHandlerOffset)
3462 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3463 kIndexedPropertyHandlerOffset)
3464 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3465 kInstanceTemplateOffset)
3466 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3467 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3468 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3469 kInstanceCallHandlerOffset)
3470 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3471 kAccessCheckInfoOffset)
3472 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
3474 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3475 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3476 kInternalFieldCountOffset)
3478 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3479 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3481 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3483 ACCESSORS(Script, source, Object, kSourceOffset)
3484 ACCESSORS(Script, name, Object, kNameOffset)
3485 ACCESSORS(Script, id, Object, kIdOffset)
3486 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
3487 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
3488 ACCESSORS(Script, data, Object, kDataOffset)
3489 ACCESSORS(Script, context_data, Object, kContextOffset)
3490 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3491 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
3492 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
3493 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
3494 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3495 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3496 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
3497 kEvalFrominstructionsOffsetOffset)
3499 #ifdef ENABLE_DEBUGGER_SUPPORT
3500 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3501 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3502 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3503 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3505 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
3506 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
3507 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3508 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3511 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3512 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3513 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3514 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3515 kInstanceClassNameOffset)
3516 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3517 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3518 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3519 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3520 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3521 kThisPropertyAssignmentsOffset)
3522 SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
3525 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3526 kHiddenPrototypeBit)
3527 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3528 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3529 kNeedsAccessCheckBit)
3530 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3531 kReadOnlyPrototypeBit)
3532 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3534 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3536 BOOL_GETTER(SharedFunctionInfo,
3538 has_only_simple_this_property_assignments,
3539 kHasOnlySimpleThisPropertyAssignments)
3540 BOOL_ACCESSORS(SharedFunctionInfo,
3542 allows_lazy_compilation,
3543 kAllowLazyCompilation)
3544 BOOL_ACCESSORS(SharedFunctionInfo,
3548 BOOL_ACCESSORS(SharedFunctionInfo,
3550 has_duplicate_parameters,
3551 kHasDuplicateParameters)
3554 #if V8_HOST_ARCH_32_BIT
3555 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3556 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3557 kFormalParameterCountOffset)
3558 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3559 kExpectedNofPropertiesOffset)
3560 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3561 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3562 kStartPositionAndTypeOffset)
3563 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3564 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3565 kFunctionTokenPositionOffset)
3566 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3567 kCompilerHintsOffset)
3568 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3569 kThisPropertyAssignmentsCountOffset)
3570 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3571 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3572 SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3575 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3576 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3577 int holder::name() { \
3578 int value = READ_INT_FIELD(this, offset); \
3579 ASSERT(kHeapObjectTag == 1); \
3580 ASSERT((value & kHeapObjectTag) == 0); \
3581 return value >> 1; \
3583 void holder::set_##name(int value) { \
3584 ASSERT(kHeapObjectTag == 1); \
3585 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3586 (value & 0xC0000000) == 0x000000000); \
3587 WRITE_INT_FIELD(this, \
3589 (value << 1) & ~kHeapObjectTag); \
3592 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3593 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3594 INT_ACCESSORS(holder, name, offset)
3597 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3598 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3599 formal_parameter_count,
3600 kFormalParameterCountOffset)
3602 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3603 expected_nof_properties,
3604 kExpectedNofPropertiesOffset)
3605 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3607 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3608 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3609 start_position_and_type,
3610 kStartPositionAndTypeOffset)
3612 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3613 function_token_position,
3614 kFunctionTokenPositionOffset)
3615 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3617 kCompilerHintsOffset)
3619 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3620 this_property_assignments_count,
3621 kThisPropertyAssignmentsCountOffset)
3622 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3624 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3625 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3629 int SharedFunctionInfo::construction_count() {
3630 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3634 void SharedFunctionInfo::set_construction_count(int value) {
3635 ASSERT(0 <= value && value < 256);
3636 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3640 BOOL_ACCESSORS(SharedFunctionInfo,
3642 live_objects_may_exist,
3643 kLiveObjectsMayExist)
3646 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3647 return initial_map() != GetHeap()->undefined_value();
3651 BOOL_GETTER(SharedFunctionInfo,
3653 optimization_disabled,
3654 kOptimizationDisabled)
3657 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3658 set_compiler_hints(BooleanBit::set(compiler_hints(),
3659 kOptimizationDisabled,
3661 // If disabling optimizations we reflect that in the code object so
3662 // it will not be counted as optimizable code.
3663 if ((code()->kind() == Code::FUNCTION) && disable) {
3664 code()->set_optimizable(false);
3669 LanguageMode SharedFunctionInfo::language_mode() {
3670 int hints = compiler_hints();
3671 if (BooleanBit::get(hints, kExtendedModeFunction)) {
3672 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
3673 return EXTENDED_MODE;
3675 return BooleanBit::get(hints, kStrictModeFunction)
3676 ? STRICT_MODE : CLASSIC_MODE;
3680 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
3681 // We only allow language mode transitions that go set the same language mode
3682 // again or go up in the chain:
3683 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
3684 ASSERT(this->language_mode() == CLASSIC_MODE ||
3685 this->language_mode() == language_mode ||
3686 language_mode == EXTENDED_MODE);
3687 int hints = compiler_hints();
3688 hints = BooleanBit::set(
3689 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
3690 hints = BooleanBit::set(
3691 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
3692 set_compiler_hints(hints);
3696 bool SharedFunctionInfo::is_classic_mode() {
3697 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
3700 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
3701 kExtendedModeFunction)
3702 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3703 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3704 name_should_print_as_anonymous,
3705 kNameShouldPrintAsAnonymous)
3706 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3707 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3708 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
3709 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
3711 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
3713 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3714 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3716 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3718 bool Script::HasValidSource() {
3719 Object* src = this->source();
3720 if (!src->IsString()) return true;
3721 String* src_str = String::cast(src);
3722 if (!StringShape(src_str).IsExternal()) return true;
3723 if (src_str->IsAsciiRepresentation()) {
3724 return ExternalAsciiString::cast(src)->resource() != NULL;
3725 } else if (src_str->IsTwoByteRepresentation()) {
3726 return ExternalTwoByteString::cast(src)->resource() != NULL;
3732 void SharedFunctionInfo::DontAdaptArguments() {
3733 ASSERT(code()->kind() == Code::BUILTIN);
3734 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3738 int SharedFunctionInfo::start_position() {
3739 return start_position_and_type() >> kStartPositionShift;
3743 void SharedFunctionInfo::set_start_position(int start_position) {
3744 set_start_position_and_type((start_position << kStartPositionShift)
3745 | (start_position_and_type() & ~kStartPositionMask));
3749 Code* SharedFunctionInfo::code() {
3750 return Code::cast(READ_FIELD(this, kCodeOffset));
3754 Code* SharedFunctionInfo::unchecked_code() {
3755 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3759 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3760 WRITE_FIELD(this, kCodeOffset, value);
3761 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3765 ScopeInfo* SharedFunctionInfo::scope_info() {
3766 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
3770 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
3771 WriteBarrierMode mode) {
3772 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3773 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3776 reinterpret_cast<Object*>(value),
3781 bool SharedFunctionInfo::is_compiled() {
3783 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3787 bool SharedFunctionInfo::IsApiFunction() {
3788 return function_data()->IsFunctionTemplateInfo();
3792 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3793 ASSERT(IsApiFunction());
3794 return FunctionTemplateInfo::cast(function_data());
3798 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3799 return function_data()->IsSmi();
3803 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3804 ASSERT(HasBuiltinFunctionId());
3805 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3809 int SharedFunctionInfo::code_age() {
3810 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3814 void SharedFunctionInfo::set_code_age(int code_age) {
3815 int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
3816 set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
3820 bool SharedFunctionInfo::has_deoptimization_support() {
3821 Code* code = this->code();
3822 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3826 bool JSFunction::IsBuiltin() {
3827 return context()->global()->IsJSBuiltinsObject();
3831 bool JSFunction::NeedsArgumentsAdaption() {
3832 return shared()->formal_parameter_count() !=
3833 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3837 bool JSFunction::IsOptimized() {
3838 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3842 bool JSFunction::IsOptimizable() {
3843 return code()->kind() == Code::FUNCTION && code()->optimizable();
3847 bool JSFunction::IsMarkedForLazyRecompilation() {
3848 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3852 Code* JSFunction::code() {
3853 return Code::cast(unchecked_code());
3857 Code* JSFunction::unchecked_code() {
3858 return reinterpret_cast<Code*>(
3859 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3863 void JSFunction::set_code(Code* value) {
3864 ASSERT(!HEAP->InNewSpace(value));
3865 Address entry = value->entry();
3866 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3867 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3869 HeapObject::RawField(this, kCodeEntryOffset),
3874 void JSFunction::ReplaceCode(Code* code) {
3875 bool was_optimized = IsOptimized();
3876 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3880 // Add/remove the function from the list of optimized functions for this
3881 // context based on the state change.
3882 if (!was_optimized && is_optimized) {
3883 context()->global_context()->AddOptimizedFunction(this);
3885 if (was_optimized && !is_optimized) {
3886 context()->global_context()->RemoveOptimizedFunction(this);
3891 Context* JSFunction::context() {
3892 return Context::cast(READ_FIELD(this, kContextOffset));
3896 Object* JSFunction::unchecked_context() {
3897 return READ_FIELD(this, kContextOffset);
3901 SharedFunctionInfo* JSFunction::unchecked_shared() {
3902 return reinterpret_cast<SharedFunctionInfo*>(
3903 READ_FIELD(this, kSharedFunctionInfoOffset));
3907 void JSFunction::set_context(Object* value) {
3908 ASSERT(value->IsUndefined() || value->IsContext());
3909 WRITE_FIELD(this, kContextOffset, value);
3910 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3913 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3914 kPrototypeOrInitialMapOffset)
3917 Map* JSFunction::initial_map() {
3918 return Map::cast(prototype_or_initial_map());
3922 void JSFunction::set_initial_map(Map* value) {
3923 set_prototype_or_initial_map(value);
3927 MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
3929 Context* global_context = context()->global_context();
3930 Object* array_function =
3931 global_context->get(Context::ARRAY_FUNCTION_INDEX);
3932 if (array_function->IsJSFunction() &&
3933 this == JSFunction::cast(array_function)) {
3934 ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
3936 MaybeObject* maybe_map = initial_map->CopyDropTransitions();
3937 Map* new_double_map = NULL;
3938 if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
3939 new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
3940 maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
3942 if (maybe_map->IsFailure()) return maybe_map;
3944 maybe_map = new_double_map->CopyDropTransitions();
3945 Map* new_object_map = NULL;
3946 if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
3947 new_object_map->set_elements_kind(FAST_ELEMENTS);
3948 maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
3950 if (maybe_map->IsFailure()) return maybe_map;
3952 global_context->set_smi_js_array_map(initial_map);
3953 global_context->set_double_js_array_map(new_double_map);
3954 global_context->set_object_js_array_map(new_object_map);
3956 set_initial_map(initial_map);
3961 bool JSFunction::has_initial_map() {
3962 return prototype_or_initial_map()->IsMap();
3966 bool JSFunction::has_instance_prototype() {
3967 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3971 bool JSFunction::has_prototype() {
3972 return map()->has_non_instance_prototype() || has_instance_prototype();
3976 Object* JSFunction::instance_prototype() {
3977 ASSERT(has_instance_prototype());
3978 if (has_initial_map()) return initial_map()->prototype();
3979 // When there is no initial map and the prototype is a JSObject, the
3980 // initial map field is used for the prototype field.
3981 return prototype_or_initial_map();
3985 Object* JSFunction::prototype() {
3986 ASSERT(has_prototype());
3987 // If the function's prototype property has been set to a non-JSObject
3988 // value, that value is stored in the constructor field of the map.
3989 if (map()->has_non_instance_prototype()) return map()->constructor();
3990 return instance_prototype();
3993 bool JSFunction::should_have_prototype() {
3994 return map()->function_with_prototype();
3998 bool JSFunction::is_compiled() {
3999 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4003 FixedArray* JSFunction::literals() {
4004 ASSERT(!shared()->bound());
4005 return literals_or_bindings();
4009 void JSFunction::set_literals(FixedArray* literals) {
4010 ASSERT(!shared()->bound());
4011 set_literals_or_bindings(literals);
4015 FixedArray* JSFunction::function_bindings() {
4016 ASSERT(shared()->bound());
4017 return literals_or_bindings();
4021 void JSFunction::set_function_bindings(FixedArray* bindings) {
4022 ASSERT(shared()->bound());
4023 // Bound function literal may be initialized to the empty fixed array
4024 // before the bindings are set.
4025 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
4026 bindings->map() == GetHeap()->fixed_cow_array_map());
4027 set_literals_or_bindings(bindings);
4031 int JSFunction::NumberOfLiterals() {
4032 ASSERT(!shared()->bound());
4033 return literals()->length();
4037 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
4038 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4039 return READ_FIELD(this, OffsetOfFunctionWithId(id));
4043 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
4045 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4046 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
4047 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
4051 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
4052 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4053 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
4057 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
4059 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4060 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
4061 ASSERT(!HEAP->InNewSpace(value));
4065 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
4066 ACCESSORS(JSProxy, hash, Object, kHashOffset)
4067 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
4068 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4071 void JSProxy::InitializeBody(int object_size, Object* value) {
4072 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
4073 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
4074 WRITE_FIELD(this, offset, value);
4079 ACCESSORS(JSSet, table, Object, kTableOffset)
4080 ACCESSORS(JSMap, table, Object, kTableOffset)
4081 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
4082 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4085 Address Foreign::foreign_address() {
4086 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
4090 void Foreign::set_foreign_address(Address value) {
4091 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
4095 ACCESSORS(JSValue, value, Object, kValueOffset)
4098 JSValue* JSValue::cast(Object* obj) {
4099 ASSERT(obj->IsJSValue());
4100 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
4101 return reinterpret_cast<JSValue*>(obj);
4105 ACCESSORS(JSDate, value, Object, kValueOffset)
4106 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4107 ACCESSORS(JSDate, year, Object, kYearOffset)
4108 ACCESSORS(JSDate, month, Object, kMonthOffset)
4109 ACCESSORS(JSDate, day, Object, kDayOffset)
4110 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4111 ACCESSORS(JSDate, hour, Object, kHourOffset)
4112 ACCESSORS(JSDate, min, Object, kMinOffset)
4113 ACCESSORS(JSDate, sec, Object, kSecOffset)
4116 JSDate* JSDate::cast(Object* obj) {
4117 ASSERT(obj->IsJSDate());
4118 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
4119 return reinterpret_cast<JSDate*>(obj);
4123 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4124 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
4125 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4126 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4127 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4128 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4129 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4132 JSMessageObject* JSMessageObject::cast(Object* obj) {
4133 ASSERT(obj->IsJSMessageObject());
4134 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
4135 return reinterpret_cast<JSMessageObject*>(obj);
4139 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4140 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
4141 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4142 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4143 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4144 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4145 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
4147 byte* Code::instruction_start() {
4148 return FIELD_ADDR(this, kHeaderSize);
4152 byte* Code::instruction_end() {
4153 return instruction_start() + instruction_size();
4157 int Code::body_size() {
4158 return RoundUp(instruction_size(), kObjectAlignment);
4162 FixedArray* Code::unchecked_deoptimization_data() {
4163 return reinterpret_cast<FixedArray*>(
4164 READ_FIELD(this, kDeoptimizationDataOffset));
4168 ByteArray* Code::unchecked_relocation_info() {
4169 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4173 byte* Code::relocation_start() {
4174 return unchecked_relocation_info()->GetDataStartAddress();
4178 int Code::relocation_size() {
4179 return unchecked_relocation_info()->length();
4183 byte* Code::entry() {
4184 return instruction_start();
4188 bool Code::contains(byte* inner_pointer) {
4189 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4193 ACCESSORS(JSArray, length, Object, kLengthOffset)
4196 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4199 JSRegExp::Type JSRegExp::TypeTag() {
4200 Object* data = this->data();
4201 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4202 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4203 return static_cast<JSRegExp::Type>(smi->value());
4207 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4208 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4209 return static_cast<JSRegExp::Type>(smi->value());
4213 int JSRegExp::CaptureCount() {
4214 switch (TypeTag()) {
4218 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4226 JSRegExp::Flags JSRegExp::GetFlags() {
4227 ASSERT(this->data()->IsFixedArray());
4228 Object* data = this->data();
4229 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4230 return Flags(smi->value());
4234 String* JSRegExp::Pattern() {
4235 ASSERT(this->data()->IsFixedArray());
4236 Object* data = this->data();
4237 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4242 Object* JSRegExp::DataAt(int index) {
4243 ASSERT(TypeTag() != NOT_COMPILED);
4244 return FixedArray::cast(data())->get(index);
4248 Object* JSRegExp::DataAtUnchecked(int index) {
4249 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4250 int offset = FixedArray::kHeaderSize + index * kPointerSize;
4251 return READ_FIELD(fa, offset);
4255 void JSRegExp::SetDataAt(int index, Object* value) {
4256 ASSERT(TypeTag() != NOT_COMPILED);
4257 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4258 FixedArray::cast(data())->set(index, value);
4262 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4263 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4264 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4265 if (value->IsSmi()) {
4266 fa->set_unchecked(index, Smi::cast(value));
4268 // We only do this during GC, so we don't need to notify the write barrier.
4269 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4274 ElementsKind JSObject::GetElementsKind() {
4275 ElementsKind kind = map()->elements_kind();
4277 FixedArrayBase* fixed_array =
4278 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4279 Map* map = fixed_array->map();
4280 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4281 (map == GetHeap()->fixed_array_map() ||
4282 map == GetHeap()->fixed_cow_array_map())) ||
4283 (kind == FAST_DOUBLE_ELEMENTS &&
4284 (fixed_array->IsFixedDoubleArray() ||
4285 fixed_array == GetHeap()->empty_fixed_array())) ||
4286 (kind == DICTIONARY_ELEMENTS &&
4287 fixed_array->IsFixedArray() &&
4288 fixed_array->IsDictionary()) ||
4289 (kind > DICTIONARY_ELEMENTS));
4290 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4291 (elements()->IsFixedArray() && elements()->length() >= 2));
4297 ElementsAccessor* JSObject::GetElementsAccessor() {
4298 return ElementsAccessor::ForKind(GetElementsKind());
4302 bool JSObject::HasFastElements() {
4303 return GetElementsKind() == FAST_ELEMENTS;
4307 bool JSObject::HasFastSmiOnlyElements() {
4308 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4312 bool JSObject::HasFastTypeElements() {
4313 ElementsKind elements_kind = GetElementsKind();
4314 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4315 elements_kind == FAST_ELEMENTS;
4319 bool JSObject::HasFastDoubleElements() {
4320 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4324 bool JSObject::HasDictionaryElements() {
4325 return GetElementsKind() == DICTIONARY_ELEMENTS;
4329 bool JSObject::HasNonStrictArgumentsElements() {
4330 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4334 bool JSObject::HasExternalArrayElements() {
4335 HeapObject* array = elements();
4336 ASSERT(array != NULL);
4337 return array->IsExternalArray();
4341 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4342 bool JSObject::HasExternal##name##Elements() { \
4343 HeapObject* array = elements(); \
4344 ASSERT(array != NULL); \
4345 if (!array->IsHeapObject()) \
4347 return array->map()->instance_type() == type; \
4351 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4352 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4353 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4354 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4355 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4356 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4357 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4358 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4359 EXTERNAL_ELEMENTS_CHECK(Float,
4360 EXTERNAL_FLOAT_ARRAY_TYPE)
4361 EXTERNAL_ELEMENTS_CHECK(Double,
4362 EXTERNAL_DOUBLE_ARRAY_TYPE)
4363 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4366 bool JSObject::HasNamedInterceptor() {
4367 return map()->has_named_interceptor();
4371 bool JSObject::HasIndexedInterceptor() {
4372 return map()->has_indexed_interceptor();
4376 MaybeObject* JSObject::EnsureWritableFastElements() {
4377 ASSERT(HasFastTypeElements());
4378 FixedArray* elems = FixedArray::cast(elements());
4379 Isolate* isolate = GetIsolate();
4380 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4381 Object* writable_elems;
4382 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4383 elems, isolate->heap()->fixed_array_map());
4384 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4385 return maybe_writable_elems;
4388 set_elements(FixedArray::cast(writable_elems));
4389 isolate->counters()->cow_arrays_converted()->Increment();
4390 return writable_elems;
4394 StringDictionary* JSObject::property_dictionary() {
4395 ASSERT(!HasFastProperties());
4396 return StringDictionary::cast(properties());
4400 SeededNumberDictionary* JSObject::element_dictionary() {
4401 ASSERT(HasDictionaryElements());
4402 return SeededNumberDictionary::cast(elements());
4406 bool String::IsHashFieldComputed(uint32_t field) {
4407 return (field & kHashNotComputedMask) == 0;
4411 bool String::HasHashCode() {
4412 return IsHashFieldComputed(hash_field());
4416 uint32_t String::Hash() {
4417 // Fast case: has hash code already been computed?
4418 uint32_t field = hash_field();
4419 if (IsHashFieldComputed(field)) return field >> kHashShift;
4420 // Slow case: compute hash code and set it.
4421 return ComputeAndSetHash();
4425 StringHasher::StringHasher(int length, uint32_t seed)
4427 raw_running_hash_(seed),
4429 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4430 is_first_char_(true),
4432 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4436 bool StringHasher::has_trivial_hash() {
4437 return length_ > String::kMaxHashCalcLength;
4441 void StringHasher::AddCharacter(uint32_t c) {
4442 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4443 AddSurrogatePair(c); // Not inlined.
4446 // Use the Jenkins one-at-a-time hash function to update the hash
4447 // for the given character.
4448 raw_running_hash_ += c;
4449 raw_running_hash_ += (raw_running_hash_ << 10);
4450 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4451 // Incremental array index computation.
4452 if (is_array_index_) {
4453 if (c < '0' || c > '9') {
4454 is_array_index_ = false;
4457 if (is_first_char_) {
4458 is_first_char_ = false;
4459 if (c == '0' && length_ > 1) {
4460 is_array_index_ = false;
4464 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4465 is_array_index_ = false;
4467 array_index_ = array_index_ * 10 + d;
4474 void StringHasher::AddCharacterNoIndex(uint32_t c) {
4475 ASSERT(!is_array_index());
4476 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4477 AddSurrogatePairNoIndex(c); // Not inlined.
4480 raw_running_hash_ += c;
4481 raw_running_hash_ += (raw_running_hash_ << 10);
4482 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4486 uint32_t StringHasher::GetHash() {
4487 // Get the calculated raw hash value and do some more bit ops to distribute
4488 // the hash further. Ensure that we never return zero as the hash value.
4489 uint32_t result = raw_running_hash_;
4490 result += (result << 3);
4491 result ^= (result >> 11);
4492 result += (result << 15);
4493 if ((result & String::kHashBitMask) == 0) {
4500 template <typename schar>
4501 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
4502 StringHasher hasher(length, seed);
4503 if (!hasher.has_trivial_hash()) {
4505 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4506 hasher.AddCharacter(chars[i]);
4508 for (; i < length; i++) {
4509 hasher.AddCharacterNoIndex(chars[i]);
4512 return hasher.GetHashField();
4516 bool String::AsArrayIndex(uint32_t* index) {
4517 uint32_t field = hash_field();
4518 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4521 return SlowAsArrayIndex(index);
4525 Object* JSReceiver::GetPrototype() {
4526 return HeapObject::cast(this)->map()->prototype();
4530 bool JSReceiver::HasProperty(String* name) {
4532 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4534 return GetPropertyAttribute(name) != ABSENT;
4538 bool JSReceiver::HasLocalProperty(String* name) {
4540 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4542 return GetLocalPropertyAttribute(name) != ABSENT;
4546 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4547 return GetPropertyAttributeWithReceiver(this, key);
4550 // TODO(504): this may be useful in other places too where JSGlobalProxy
4552 Object* JSObject::BypassGlobalProxy() {
4553 if (IsJSGlobalProxy()) {
4554 Object* proto = GetPrototype();
4555 if (proto->IsNull()) return GetHeap()->undefined_value();
4556 ASSERT(proto->IsJSGlobalObject());
4563 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4565 ? JSProxy::cast(this)->GetIdentityHash(flag)
4566 : JSObject::cast(this)->GetIdentityHash(flag);
4570 bool JSReceiver::HasElement(uint32_t index) {
4572 return JSProxy::cast(this)->HasElementWithHandler(index);
4574 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4578 bool AccessorInfo::all_can_read() {
4579 return BooleanBit::get(flag(), kAllCanReadBit);
4583 void AccessorInfo::set_all_can_read(bool value) {
4584 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4588 bool AccessorInfo::all_can_write() {
4589 return BooleanBit::get(flag(), kAllCanWriteBit);
4593 void AccessorInfo::set_all_can_write(bool value) {
4594 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4598 bool AccessorInfo::prohibits_overwriting() {
4599 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4603 void AccessorInfo::set_prohibits_overwriting(bool value) {
4604 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4608 PropertyAttributes AccessorInfo::property_attributes() {
4609 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4613 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4614 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4618 template<typename Shape, typename Key>
4619 void Dictionary<Shape, Key>::SetEntry(int entry,
4622 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4626 template<typename Shape, typename Key>
4627 void Dictionary<Shape, Key>::SetEntry(int entry,
4630 PropertyDetails details) {
4631 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4632 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4633 AssertNoAllocation no_gc;
4634 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4635 FixedArray::set(index, key, mode);
4636 FixedArray::set(index+1, value, mode);
4637 FixedArray::set(index+2, details.AsSmi());
4641 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4642 ASSERT(other->IsNumber());
4643 return key == static_cast<uint32_t>(other->Number());
4647 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
4648 return ComputeIntegerHash(key, 0);
4652 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
4654 ASSERT(other->IsNumber());
4655 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
4658 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
4659 return ComputeIntegerHash(key, seed);
4662 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
4665 ASSERT(other->IsNumber());
4666 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
4669 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4670 return Isolate::Current()->heap()->NumberFromUint32(key);
4674 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4675 // We know that all entries in a hash table had their hash keys created.
4676 // Use that knowledge to have fast failure.
4677 if (key->Hash() != String::cast(other)->Hash()) return false;
4678 return key->Equals(String::cast(other));
4682 uint32_t StringDictionaryShape::Hash(String* key) {
4687 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4688 return String::cast(other)->Hash();
4692 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4697 template <int entrysize>
4698 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4699 return key->SameValue(other);
4703 template <int entrysize>
4704 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4705 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4706 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4710 template <int entrysize>
4711 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4713 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4714 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4718 template <int entrysize>
4719 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4724 void Map::ClearCodeCache(Heap* heap) {
4725 // No write barrier is needed since empty_fixed_array is not in new space.
4726 // Please note this function is used during marking:
4727 // - MarkCompactCollector::MarkUnmarkedObject
4728 // - IncrementalMarking::Step
4729 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4730 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4734 void JSArray::EnsureSize(int required_size) {
4735 ASSERT(HasFastTypeElements());
4736 FixedArray* elts = FixedArray::cast(elements());
4737 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4738 if (elts->length() < required_size) {
4739 // Doubling in size would be overkill, but leave some slack to avoid
4740 // constantly growing.
4741 Expand(required_size + (required_size >> 3));
4742 // It's a performance benefit to keep a frequently used array in new-space.
4743 } else if (!GetHeap()->new_space()->Contains(elts) &&
4744 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4745 // Expand will allocate a new backing store in new space even if the size
4746 // we asked for isn't larger than what we had before.
4747 Expand(required_size);
4752 void JSArray::set_length(Smi* length) {
4753 // Don't need a write barrier for a Smi.
4754 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4758 bool JSArray::AllowsSetElementsLength() {
4759 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
4760 ASSERT(result == !HasExternalArrayElements());
4765 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
4766 MaybeObject* maybe_result = EnsureCanContainElements(
4767 storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
4768 if (maybe_result->IsFailure()) return maybe_result;
4769 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
4770 GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
4771 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
4772 ((GetElementsKind() == FAST_ELEMENTS) ||
4773 (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
4774 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
4775 set_elements(storage);
4776 set_length(Smi::FromInt(storage->length()));
4781 MaybeObject* FixedArray::Copy() {
4782 if (length() == 0) return this;
4783 return GetHeap()->CopyFixedArray(this);
4787 MaybeObject* FixedDoubleArray::Copy() {
4788 if (length() == 0) return this;
4789 return GetHeap()->CopyFixedDoubleArray(this);
4793 void TypeFeedbackCells::SetAstId(int index, Smi* id) {
4794 set(1 + index * 2, id);
4798 Smi* TypeFeedbackCells::AstId(int index) {
4799 return Smi::cast(get(1 + index * 2));
4803 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
4804 set(index * 2, cell);
4808 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
4809 return JSGlobalPropertyCell::cast(get(index * 2));
4813 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
4814 return isolate->factory()->the_hole_value();
4818 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
4819 return isolate->factory()->undefined_value();
4823 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
4824 return heap->raw_unchecked_the_hole_value();
4828 SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
4829 SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
4830 kIcWithTypeinfoCountOffset)
4831 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
4832 kTypeFeedbackCellsOffset)
4835 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
4838 Relocatable::Relocatable(Isolate* isolate) {
4839 ASSERT(isolate == Isolate::Current());
4841 prev_ = isolate->relocatable_top();
4842 isolate->set_relocatable_top(this);
4846 Relocatable::~Relocatable() {
4847 ASSERT(isolate_ == Isolate::Current());
4848 ASSERT_EQ(isolate_->relocatable_top(), this);
4849 isolate_->set_relocatable_top(prev_);
4853 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4854 return map->instance_size();
4858 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4859 v->VisitExternalReference(
4860 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4864 template<typename StaticVisitor>
4865 void Foreign::ForeignIterateBody() {
4866 StaticVisitor::VisitExternalReference(
4867 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4871 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4872 typedef v8::String::ExternalAsciiStringResource Resource;
4873 v->VisitExternalAsciiString(
4874 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4878 template<typename StaticVisitor>
4879 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4880 typedef v8::String::ExternalAsciiStringResource Resource;
4881 StaticVisitor::VisitExternalAsciiString(
4882 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4886 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4887 typedef v8::String::ExternalStringResource Resource;
4888 v->VisitExternalTwoByteString(
4889 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4893 template<typename StaticVisitor>
4894 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4895 typedef v8::String::ExternalStringResource Resource;
4896 StaticVisitor::VisitExternalTwoByteString(
4897 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4900 #define SLOT_ADDR(obj, offset) \
4901 reinterpret_cast<Object**>((obj)->address() + offset)
4903 template<int start_offset, int end_offset, int size>
4904 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4907 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4911 template<int start_offset>
4912 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4915 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4921 #undef CAST_ACCESSOR
4922 #undef INT_ACCESSORS
4924 #undef ACCESSORS_TO_SMI
4925 #undef SMI_ACCESSORS
4927 #undef BOOL_ACCESSORS
4931 #undef WRITE_BARRIER
4932 #undef CONDITIONAL_WRITE_BARRIER
4933 #undef READ_DOUBLE_FIELD
4934 #undef WRITE_DOUBLE_FIELD
4935 #undef READ_INT_FIELD
4936 #undef WRITE_INT_FIELD
4937 #undef READ_INTPTR_FIELD
4938 #undef WRITE_INTPTR_FIELD
4939 #undef READ_UINT32_FIELD
4940 #undef WRITE_UINT32_FIELD
4941 #undef READ_SHORT_FIELD
4942 #undef WRITE_SHORT_FIELD
4943 #undef READ_BYTE_FIELD
4944 #undef WRITE_BYTE_FIELD
4947 } } // namespace v8::internal
4949 #endif // V8_OBJECTS_INL_H_