1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
98 #define ACCESSORS_TO_SMI(holder, name, offset) \
99 Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
100 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
101 WRITE_FIELD(this, offset, value); \
105 // Getter that returns a Smi as an int and writes an int as a Smi.
106 #define SMI_ACCESSORS(holder, name, offset) \
107 int holder::name() { \
108 Object* value = READ_FIELD(this, offset); \
109 return Smi::cast(value)->value(); \
111 void holder::set_##name(int value) { \
112 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
116 #define BOOL_GETTER(holder, field, name, offset) \
117 bool holder::name() { \
118 return BooleanBit::get(field(), offset); \
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123 bool holder::name() { \
124 return BooleanBit::get(field(), offset); \
126 void holder::set_##name(bool value) { \
127 set_##field(BooleanBit::set(field(), offset, value)); \
131 bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
132 ElementsKind to_kind) {
133 if (to_kind == FAST_ELEMENTS) {
134 return from_kind == FAST_SMI_ONLY_ELEMENTS ||
135 from_kind == FAST_DOUBLE_ELEMENTS;
137 return to_kind == FAST_DOUBLE_ELEMENTS &&
138 from_kind == FAST_SMI_ONLY_ELEMENTS;
143 bool Object::IsFixedArrayBase() {
144 return IsFixedArray() || IsFixedDoubleArray();
148 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
149 // There is a constraint on the object; check.
150 if (!this->IsJSObject()) return false;
151 // Fetch the constructor function of the object.
152 Object* cons_obj = JSObject::cast(this)->map()->constructor();
153 if (!cons_obj->IsJSFunction()) return false;
154 JSFunction* fun = JSFunction::cast(cons_obj);
155 // Iterate through the chain of inheriting function templates to
156 // see if the required one occurs.
157 for (Object* type = fun->shared()->function_data();
158 type->IsFunctionTemplateInfo();
159 type = FunctionTemplateInfo::cast(type)->parent_template()) {
160 if (type == expected) return true;
162 // Didn't find the required type in the inheritance chain.
167 bool Object::IsSmi() {
168 return HAS_SMI_TAG(this);
172 bool Object::IsHeapObject() {
173 return Internals::HasHeapObjectTag(this);
177 bool Object::NonFailureIsHeapObject() {
178 ASSERT(!this->IsFailure());
179 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
183 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
186 bool Object::IsString() {
187 return Object::IsHeapObject()
188 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
192 bool Object::IsSpecObject() {
193 return Object::IsHeapObject()
194 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
198 bool Object::IsSpecFunction() {
199 if (!Object::IsHeapObject()) return false;
200 InstanceType type = HeapObject::cast(this)->map()->instance_type();
201 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
205 bool Object::IsSymbol() {
206 if (!this->IsHeapObject()) return false;
207 uint32_t type = HeapObject::cast(this)->map()->instance_type();
208 // Because the symbol tag is non-zero and no non-string types have the
209 // symbol bit set we can test for symbols with a very simple test
211 STATIC_ASSERT(kSymbolTag != 0);
212 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
213 return (type & kIsSymbolMask) != 0;
217 bool Object::IsConsString() {
218 if (!IsString()) return false;
219 return StringShape(String::cast(this)).IsCons();
223 bool Object::IsSlicedString() {
224 if (!IsString()) return false;
225 return StringShape(String::cast(this)).IsSliced();
229 bool Object::IsSeqString() {
230 if (!IsString()) return false;
231 return StringShape(String::cast(this)).IsSequential();
235 bool Object::IsSeqAsciiString() {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential() &&
238 String::cast(this)->IsAsciiRepresentation();
242 bool Object::IsSeqTwoByteString() {
243 if (!IsString()) return false;
244 return StringShape(String::cast(this)).IsSequential() &&
245 String::cast(this)->IsTwoByteRepresentation();
249 bool Object::IsExternalString() {
250 if (!IsString()) return false;
251 return StringShape(String::cast(this)).IsExternal();
255 bool Object::IsExternalAsciiString() {
256 if (!IsString()) return false;
257 return StringShape(String::cast(this)).IsExternal() &&
258 String::cast(this)->IsAsciiRepresentation();
262 bool Object::IsExternalTwoByteString() {
263 if (!IsString()) return false;
264 return StringShape(String::cast(this)).IsExternal() &&
265 String::cast(this)->IsTwoByteRepresentation();
268 bool Object::HasValidElements() {
269 // Dictionary is covered under FixedArray.
270 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
273 StringShape::StringShape(String* str)
274 : type_(str->map()->instance_type()) {
276 ASSERT((type_ & kIsNotStringMask) == kStringTag);
280 StringShape::StringShape(Map* map)
281 : type_(map->instance_type()) {
283 ASSERT((type_ & kIsNotStringMask) == kStringTag);
287 StringShape::StringShape(InstanceType t)
288 : type_(static_cast<uint32_t>(t)) {
290 ASSERT((type_ & kIsNotStringMask) == kStringTag);
294 bool StringShape::IsSymbol() {
296 STATIC_ASSERT(kSymbolTag != 0);
297 return (type_ & kIsSymbolMask) != 0;
301 bool String::IsAsciiRepresentation() {
302 uint32_t type = map()->instance_type();
303 return (type & kStringEncodingMask) == kAsciiStringTag;
307 bool String::IsTwoByteRepresentation() {
308 uint32_t type = map()->instance_type();
309 return (type & kStringEncodingMask) == kTwoByteStringTag;
313 bool String::IsAsciiRepresentationUnderneath() {
314 uint32_t type = map()->instance_type();
315 STATIC_ASSERT(kIsIndirectStringTag != 0);
316 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
318 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
319 case kAsciiStringTag:
321 case kTwoByteStringTag:
323 default: // Cons or sliced string. Need to go deeper.
324 return GetUnderlying()->IsAsciiRepresentation();
329 bool String::IsTwoByteRepresentationUnderneath() {
330 uint32_t type = map()->instance_type();
331 STATIC_ASSERT(kIsIndirectStringTag != 0);
332 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
334 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
335 case kAsciiStringTag:
337 case kTwoByteStringTag:
339 default: // Cons or sliced string. Need to go deeper.
340 return GetUnderlying()->IsTwoByteRepresentation();
345 bool String::HasOnlyAsciiChars() {
346 uint32_t type = map()->instance_type();
347 return (type & kStringEncodingMask) == kAsciiStringTag ||
348 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
352 bool StringShape::IsCons() {
353 return (type_ & kStringRepresentationMask) == kConsStringTag;
357 bool StringShape::IsSliced() {
358 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
362 bool StringShape::IsIndirect() {
363 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
367 bool StringShape::IsExternal() {
368 return (type_ & kStringRepresentationMask) == kExternalStringTag;
372 bool StringShape::IsSequential() {
373 return (type_ & kStringRepresentationMask) == kSeqStringTag;
377 StringRepresentationTag StringShape::representation_tag() {
378 uint32_t tag = (type_ & kStringRepresentationMask);
379 return static_cast<StringRepresentationTag>(tag);
383 uint32_t StringShape::encoding_tag() {
384 return type_ & kStringEncodingMask;
388 uint32_t StringShape::full_representation_tag() {
389 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
393 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
394 Internals::kFullStringRepresentationMask);
397 bool StringShape::IsSequentialAscii() {
398 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
402 bool StringShape::IsSequentialTwoByte() {
403 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
407 bool StringShape::IsExternalAscii() {
408 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
412 bool StringShape::IsExternalTwoByte() {
413 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
417 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
418 Internals::kExternalTwoByteRepresentationTag);
421 uc32 FlatStringReader::Get(int index) {
422 ASSERT(0 <= index && index <= length_);
424 return static_cast<const byte*>(start_)[index];
426 return static_cast<const uc16*>(start_)[index];
431 bool Object::IsNumber() {
432 return IsSmi() || IsHeapNumber();
436 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
437 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
440 bool Object::IsFiller() {
441 if (!Object::IsHeapObject()) return false;
442 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
443 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
447 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
450 bool Object::IsExternalArray() {
451 if (!Object::IsHeapObject())
453 InstanceType instance_type =
454 HeapObject::cast(this)->map()->instance_type();
455 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
456 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
460 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
461 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
462 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
463 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
464 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
465 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
466 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
467 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
470 bool MaybeObject::IsFailure() {
471 return HAS_FAILURE_TAG(this);
475 bool MaybeObject::IsRetryAfterGC() {
476 return HAS_FAILURE_TAG(this)
477 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
481 bool MaybeObject::IsOutOfMemory() {
482 return HAS_FAILURE_TAG(this)
483 && Failure::cast(this)->IsOutOfMemoryException();
487 bool MaybeObject::IsException() {
488 return this == Failure::Exception();
492 bool MaybeObject::IsTheHole() {
493 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
497 Failure* Failure::cast(MaybeObject* obj) {
498 ASSERT(HAS_FAILURE_TAG(obj));
499 return reinterpret_cast<Failure*>(obj);
503 bool Object::IsJSReceiver() {
504 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
505 return IsHeapObject() &&
506 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
510 bool Object::IsJSObject() {
511 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
512 return IsHeapObject() &&
513 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
517 bool Object::IsJSProxy() {
518 if (!Object::IsHeapObject()) return false;
519 InstanceType type = HeapObject::cast(this)->map()->instance_type();
520 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
524 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
525 TYPE_CHECKER(JSSet, JS_SET_TYPE)
526 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
527 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
528 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
529 TYPE_CHECKER(Map, MAP_TYPE)
530 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
531 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
534 bool Object::IsDescriptorArray() {
535 return IsFixedArray();
539 bool Object::IsDeoptimizationInputData() {
540 // Must be a fixed array.
541 if (!IsFixedArray()) return false;
543 // There's no sure way to detect the difference between a fixed array and
544 // a deoptimization data array. Since this is used for asserts we can
545 // check that the length is zero or else the fixed size plus a multiple of
547 int length = FixedArray::cast(this)->length();
548 if (length == 0) return true;
550 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
551 return length >= 0 &&
552 length % DeoptimizationInputData::kDeoptEntrySize == 0;
556 bool Object::IsDeoptimizationOutputData() {
557 if (!IsFixedArray()) return false;
558 // There's actually no way to see the difference between a fixed array and
559 // a deoptimization data array. Since this is used for asserts we can check
560 // that the length is plausible though.
561 if (FixedArray::cast(this)->length() % 2 != 0) return false;
566 bool Object::IsTypeFeedbackCells() {
567 if (!IsFixedArray()) return false;
568 // There's actually no way to see the difference between a fixed array and
569 // a cache cells array. Since this is used for asserts we can check that
570 // the length is plausible though.
571 if (FixedArray::cast(this)->length() % 2 != 0) return false;
576 bool Object::IsContext() {
577 if (Object::IsHeapObject()) {
578 Map* map = HeapObject::cast(this)->map();
579 Heap* heap = map->GetHeap();
580 return (map == heap->function_context_map() ||
581 map == heap->catch_context_map() ||
582 map == heap->with_context_map() ||
583 map == heap->global_context_map() ||
584 map == heap->block_context_map());
590 bool Object::IsGlobalContext() {
591 return Object::IsHeapObject() &&
592 HeapObject::cast(this)->map() ==
593 HeapObject::cast(this)->GetHeap()->global_context_map();
597 bool Object::IsScopeInfo() {
598 return Object::IsHeapObject() &&
599 HeapObject::cast(this)->map() ==
600 HeapObject::cast(this)->GetHeap()->scope_info_map();
604 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
607 template <> inline bool Is<JSFunction>(Object* obj) {
608 return obj->IsJSFunction();
612 TYPE_CHECKER(Code, CODE_TYPE)
613 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
614 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
615 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
616 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
617 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
618 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
621 bool Object::IsStringWrapper() {
622 return IsJSValue() && JSValue::cast(this)->value()->IsString();
626 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
629 bool Object::IsBoolean() {
630 return IsOddball() &&
631 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
635 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
636 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
639 template <> inline bool Is<JSArray>(Object* obj) {
640 return obj->IsJSArray();
644 bool Object::IsHashTable() {
645 return Object::IsHeapObject() &&
646 HeapObject::cast(this)->map() ==
647 HeapObject::cast(this)->GetHeap()->hash_table_map();
651 bool Object::IsDictionary() {
652 return IsHashTable() &&
653 this != HeapObject::cast(this)->GetHeap()->symbol_table();
657 bool Object::IsSymbolTable() {
658 return IsHashTable() && this ==
659 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
663 bool Object::IsJSFunctionResultCache() {
664 if (!IsFixedArray()) return false;
665 FixedArray* self = FixedArray::cast(this);
666 int length = self->length();
667 if (length < JSFunctionResultCache::kEntriesIndex) return false;
668 if ((length - JSFunctionResultCache::kEntriesIndex)
669 % JSFunctionResultCache::kEntrySize != 0) {
673 if (FLAG_verify_heap) {
674 reinterpret_cast<JSFunctionResultCache*>(this)->
675 JSFunctionResultCacheVerify();
682 bool Object::IsNormalizedMapCache() {
683 if (!IsFixedArray()) return false;
684 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
688 if (FLAG_verify_heap) {
689 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
696 bool Object::IsCompilationCacheTable() {
697 return IsHashTable();
701 bool Object::IsCodeCacheHashTable() {
702 return IsHashTable();
706 bool Object::IsPolymorphicCodeCacheHashTable() {
707 return IsHashTable();
711 bool Object::IsMapCache() {
712 return IsHashTable();
716 bool Object::IsPrimitive() {
717 return IsOddball() || IsNumber() || IsString();
721 bool Object::IsJSGlobalProxy() {
722 bool result = IsHeapObject() &&
723 (HeapObject::cast(this)->map()->instance_type() ==
724 JS_GLOBAL_PROXY_TYPE);
725 ASSERT(!result || IsAccessCheckNeeded());
730 bool Object::IsGlobalObject() {
731 if (!IsHeapObject()) return false;
733 InstanceType type = HeapObject::cast(this)->map()->instance_type();
734 return type == JS_GLOBAL_OBJECT_TYPE ||
735 type == JS_BUILTINS_OBJECT_TYPE;
739 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
740 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
743 bool Object::IsUndetectableObject() {
744 return IsHeapObject()
745 && HeapObject::cast(this)->map()->is_undetectable();
749 bool Object::IsAccessCheckNeeded() {
750 return IsHeapObject()
751 && HeapObject::cast(this)->map()->is_access_check_needed();
755 bool Object::IsStruct() {
756 if (!IsHeapObject()) return false;
757 switch (HeapObject::cast(this)->map()->instance_type()) {
758 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
759 STRUCT_LIST(MAKE_STRUCT_CASE)
760 #undef MAKE_STRUCT_CASE
761 default: return false;
766 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
767 bool Object::Is##Name() { \
768 return Object::IsHeapObject() \
769 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
771 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
772 #undef MAKE_STRUCT_PREDICATE
775 bool Object::IsUndefined() {
776 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
780 bool Object::IsNull() {
781 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
785 bool Object::IsTheHole() {
786 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
790 bool Object::IsTrue() {
791 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
795 bool Object::IsFalse() {
796 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
800 bool Object::IsArgumentsMarker() {
801 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
805 double Object::Number() {
808 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
809 : reinterpret_cast<HeapNumber*>(this)->value();
813 bool Object::IsNaN() {
814 return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
818 MaybeObject* Object::ToSmi() {
819 if (IsSmi()) return this;
820 if (IsHeapNumber()) {
821 double value = HeapNumber::cast(this)->value();
822 int int_value = FastD2I(value);
823 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
824 return Smi::FromInt(int_value);
827 return Failure::Exception();
831 bool Object::HasSpecificClassOf(String* name) {
832 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
836 MaybeObject* Object::GetElement(uint32_t index) {
837 // GetElement can trigger a getter which can cause allocation.
838 // This was not always the case. This ASSERT is here to catch
839 // leftover incorrect uses.
840 ASSERT(HEAP->IsAllocationAllowed());
841 return GetElementWithReceiver(this, index);
845 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
846 MaybeObject* maybe = GetElementWithReceiver(this, index);
847 ASSERT(!maybe->IsFailure());
848 Object* result = NULL; // Initialization to please compiler.
849 maybe->ToObject(&result);
854 MaybeObject* Object::GetProperty(String* key) {
855 PropertyAttributes attributes;
856 return GetPropertyWithReceiver(this, key, &attributes);
860 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
861 return GetPropertyWithReceiver(this, key, attributes);
865 #define FIELD_ADDR(p, offset) \
866 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
868 #define READ_FIELD(p, offset) \
869 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
871 #define WRITE_FIELD(p, offset, value) \
872 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
874 #define WRITE_BARRIER(heap, object, offset, value) \
875 heap->incremental_marking()->RecordWrite( \
876 object, HeapObject::RawField(object, offset), value); \
877 if (heap->InNewSpace(value)) { \
878 heap->RecordWrite(object->address(), offset); \
881 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
882 if (mode == UPDATE_WRITE_BARRIER) { \
883 heap->incremental_marking()->RecordWrite( \
884 object, HeapObject::RawField(object, offset), value); \
885 if (heap->InNewSpace(value)) { \
886 heap->RecordWrite(object->address(), offset); \
890 #ifndef V8_TARGET_ARCH_MIPS
891 #define READ_DOUBLE_FIELD(p, offset) \
892 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
893 #else // V8_TARGET_ARCH_MIPS
894 // Prevent gcc from using load-double (mips ldc1) on (possibly)
895 // non-64-bit aligned HeapNumber::value.
896 static inline double read_double_field(void* p, int offset) {
901 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
902 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
905 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
906 #endif // V8_TARGET_ARCH_MIPS
908 #ifndef V8_TARGET_ARCH_MIPS
909 #define WRITE_DOUBLE_FIELD(p, offset, value) \
910 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
911 #else // V8_TARGET_ARCH_MIPS
912 // Prevent gcc from using store-double (mips sdc1) on (possibly)
913 // non-64-bit aligned HeapNumber::value.
914 static inline void write_double_field(void* p, int offset,
921 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
922 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
924 #define WRITE_DOUBLE_FIELD(p, offset, value) \
925 write_double_field(p, offset, value)
926 #endif // V8_TARGET_ARCH_MIPS
929 #define READ_INT_FIELD(p, offset) \
930 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
932 #define WRITE_INT_FIELD(p, offset, value) \
933 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
935 #define READ_INTPTR_FIELD(p, offset) \
936 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
938 #define WRITE_INTPTR_FIELD(p, offset, value) \
939 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
941 #define READ_UINT32_FIELD(p, offset) \
942 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
944 #define WRITE_UINT32_FIELD(p, offset, value) \
945 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
947 #define READ_INT64_FIELD(p, offset) \
948 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
950 #define WRITE_INT64_FIELD(p, offset, value) \
951 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
953 #define READ_SHORT_FIELD(p, offset) \
954 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
956 #define WRITE_SHORT_FIELD(p, offset, value) \
957 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
959 #define READ_BYTE_FIELD(p, offset) \
960 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
962 #define WRITE_BYTE_FIELD(p, offset, value) \
963 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
966 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
967 return &READ_FIELD(obj, byte_offset);
972 return Internals::SmiValue(this);
976 Smi* Smi::FromInt(int value) {
977 ASSERT(Smi::IsValid(value));
978 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
979 intptr_t tagged_value =
980 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
981 return reinterpret_cast<Smi*>(tagged_value);
985 Smi* Smi::FromIntptr(intptr_t value) {
986 ASSERT(Smi::IsValid(value));
987 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
988 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
992 Failure::Type Failure::type() const {
993 return static_cast<Type>(value() & kFailureTypeTagMask);
997 bool Failure::IsInternalError() const {
998 return type() == INTERNAL_ERROR;
1002 bool Failure::IsOutOfMemoryException() const {
1003 return type() == OUT_OF_MEMORY_EXCEPTION;
1007 AllocationSpace Failure::allocation_space() const {
1008 ASSERT_EQ(RETRY_AFTER_GC, type());
1009 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1014 Failure* Failure::InternalError() {
1015 return Construct(INTERNAL_ERROR);
1019 Failure* Failure::Exception() {
1020 return Construct(EXCEPTION);
1024 Failure* Failure::OutOfMemoryException() {
1025 return Construct(OUT_OF_MEMORY_EXCEPTION);
1029 intptr_t Failure::value() const {
1030 return static_cast<intptr_t>(
1031 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1035 Failure* Failure::RetryAfterGC() {
1036 return RetryAfterGC(NEW_SPACE);
1040 Failure* Failure::RetryAfterGC(AllocationSpace space) {
1041 ASSERT((space & ~kSpaceTagMask) == 0);
1042 return Construct(RETRY_AFTER_GC, space);
1046 Failure* Failure::Construct(Type type, intptr_t value) {
1048 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1049 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1050 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1054 bool Smi::IsValid(intptr_t value) {
1056 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1059 #ifdef V8_TARGET_ARCH_X64
1060 // To be representable as a long smi, the value must be a 32-bit integer.
1061 bool result = (value == static_cast<int32_t>(value));
1063 // To be representable as an tagged small integer, the two
1064 // most-significant bits of 'value' must be either 00 or 11 due to
1065 // sign-extension. To check this we add 01 to the two
1066 // most-significant bits, and check if the most-significant bit is 0
1068 // CAUTION: The original code below:
1069 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1070 // may lead to incorrect results according to the C language spec, and
1071 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1072 // compiler may produce undefined results in case of signed integer
1073 // overflow. The computation must be done w/ unsigned ints.
1074 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1076 ASSERT(result == in_range);
1081 MapWord MapWord::FromMap(Map* map) {
1082 return MapWord(reinterpret_cast<uintptr_t>(map));
1086 Map* MapWord::ToMap() {
1087 return reinterpret_cast<Map*>(value_);
1091 bool MapWord::IsForwardingAddress() {
1092 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1096 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1097 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1098 return MapWord(reinterpret_cast<uintptr_t>(raw));
1102 HeapObject* MapWord::ToForwardingAddress() {
1103 ASSERT(IsForwardingAddress());
1104 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1109 void HeapObject::VerifyObjectField(int offset) {
1110 VerifyPointer(READ_FIELD(this, offset));
1113 void HeapObject::VerifySmiField(int offset) {
1114 ASSERT(READ_FIELD(this, offset)->IsSmi());
1119 Heap* HeapObject::GetHeap() {
1121 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1122 ASSERT(heap != NULL);
1123 ASSERT(heap->isolate() == Isolate::Current());
1128 Isolate* HeapObject::GetIsolate() {
1129 return GetHeap()->isolate();
1133 Map* HeapObject::map() {
1134 return map_word().ToMap();
1138 void HeapObject::set_map(Map* value) {
1139 set_map_word(MapWord::FromMap(value));
1140 if (value != NULL) {
1141 // TODO(1600) We are passing NULL as a slot because maps can never be on
1142 // evacuation candidate.
1143 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1148 // Unsafe accessor omitting write barrier.
1149 void HeapObject::set_map_no_write_barrier(Map* value) {
1150 set_map_word(MapWord::FromMap(value));
1154 MapWord HeapObject::map_word() {
1155 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1159 void HeapObject::set_map_word(MapWord map_word) {
1160 // WRITE_FIELD does not invoke write barrier, but there is no need
1162 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1166 HeapObject* HeapObject::FromAddress(Address address) {
1167 ASSERT_TAG_ALIGNED(address);
1168 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1172 Address HeapObject::address() {
1173 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1177 int HeapObject::Size() {
1178 return SizeFromMap(map());
1182 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1183 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1184 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1188 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1189 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1193 double HeapNumber::value() {
1194 return READ_DOUBLE_FIELD(this, kValueOffset);
1198 void HeapNumber::set_value(double value) {
1199 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1203 int HeapNumber::get_exponent() {
1204 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1205 kExponentShift) - kExponentBias;
1209 int HeapNumber::get_sign() {
1210 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1214 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1217 Object** FixedArray::GetFirstElementAddress() {
1218 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1222 bool FixedArray::ContainsOnlySmisOrHoles() {
1223 Object* the_hole = GetHeap()->the_hole_value();
1224 Object** current = GetFirstElementAddress();
1225 for (int i = 0; i < length(); ++i) {
1226 Object* candidate = *current++;
1227 if (!candidate->IsSmi() && candidate != the_hole) return false;
1233 FixedArrayBase* JSObject::elements() {
1234 Object* array = READ_FIELD(this, kElementsOffset);
1235 return static_cast<FixedArrayBase*>(array);
1238 void JSObject::ValidateSmiOnlyElements() {
1240 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1241 Heap* heap = GetHeap();
1242 // Don't use elements, since integrity checks will fail if there
1243 // are filler pointers in the array.
1244 FixedArray* fixed_array =
1245 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1246 Map* map = fixed_array->map();
1247 // Arrays that have been shifted in place can't be verified.
1248 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1249 map != heap->raw_unchecked_two_pointer_filler_map() &&
1250 map != heap->free_space_map()) {
1251 for (int i = 0; i < fixed_array->length(); i++) {
1252 Object* current = fixed_array->get(i);
1253 ASSERT(current->IsSmi() || current->IsTheHole());
1261 MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
1263 ValidateSmiOnlyElements();
1265 if ((map()->elements_kind() != FAST_ELEMENTS)) {
1266 return TransitionElementsKind(FAST_ELEMENTS);
1272 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1274 EnsureElementsMode mode) {
1275 ElementsKind current_kind = map()->elements_kind();
1276 ElementsKind target_kind = current_kind;
1277 ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1278 if (current_kind == FAST_ELEMENTS) return this;
1280 Heap* heap = GetHeap();
1281 Object* the_hole = heap->the_hole_value();
1282 Object* heap_number_map = heap->heap_number_map();
1283 for (uint32_t i = 0; i < count; ++i) {
1284 Object* current = *objects++;
1285 if (!current->IsSmi() && current != the_hole) {
1286 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
1287 HeapObject::cast(current)->map() == heap_number_map) {
1288 target_kind = FAST_DOUBLE_ELEMENTS;
1290 target_kind = FAST_ELEMENTS;
1296 if (target_kind != current_kind) {
1297 return TransitionElementsKind(target_kind);
1303 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1304 EnsureElementsMode mode) {
1305 if (elements->map() != GetHeap()->fixed_double_array_map()) {
1306 ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1307 elements->map() == GetHeap()->fixed_cow_array_map());
1308 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1309 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1311 Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1312 return EnsureCanContainElements(objects, elements->length(), mode);
1315 ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1316 if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
1317 return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1324 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1325 ElementsKind to_kind) {
1326 Map* current_map = map();
1327 ElementsKind from_kind = current_map->elements_kind();
1329 if (from_kind == to_kind) return current_map;
1331 Context* global_context = isolate->context()->global_context();
1332 if (current_map == global_context->smi_js_array_map()) {
1333 if (to_kind == FAST_ELEMENTS) {
1334 return global_context->object_js_array_map();
1336 if (to_kind == FAST_DOUBLE_ELEMENTS) {
1337 return global_context->double_js_array_map();
1339 ASSERT(to_kind == DICTIONARY_ELEMENTS);
1343 return GetElementsTransitionMapSlow(to_kind);
1347 void JSObject::set_map_and_elements(Map* new_map,
1348 FixedArrayBase* value,
1349 WriteBarrierMode mode) {
1350 ASSERT(value->HasValidElements());
1352 ValidateSmiOnlyElements();
1354 if (new_map != NULL) {
1355 if (mode == UPDATE_WRITE_BARRIER) {
1358 ASSERT(mode == SKIP_WRITE_BARRIER);
1359 set_map_no_write_barrier(new_map);
1362 ASSERT((map()->has_fast_elements() ||
1363 map()->has_fast_smi_only_elements() ||
1364 (value == GetHeap()->empty_fixed_array())) ==
1365 (value->map() == GetHeap()->fixed_array_map() ||
1366 value->map() == GetHeap()->fixed_cow_array_map()));
1367 ASSERT((value == GetHeap()->empty_fixed_array()) ||
1368 (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1369 WRITE_FIELD(this, kElementsOffset, value);
1370 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1374 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1375 set_map_and_elements(NULL, value, mode);
1379 void JSObject::initialize_properties() {
1380 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1381 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1385 void JSObject::initialize_elements() {
1386 ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
1387 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1388 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1392 MaybeObject* JSObject::ResetElements() {
1394 ElementsKind elements_kind = FLAG_smi_only_arrays
1395 ? FAST_SMI_ONLY_ELEMENTS
1397 MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
1399 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1400 set_map(Map::cast(obj));
1401 initialize_elements();
1406 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1407 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1410 byte Oddball::kind() {
1411 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1415 void Oddball::set_kind(byte value) {
1416 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1420 Object* JSGlobalPropertyCell::value() {
1421 return READ_FIELD(this, kValueOffset);
1425 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1426 // The write barrier is not used for global property cells.
1427 ASSERT(!val->IsJSGlobalPropertyCell());
1428 WRITE_FIELD(this, kValueOffset, val);
1432 int JSObject::GetHeaderSize() {
1433 InstanceType type = map()->instance_type();
1434 // Check for the most common kind of JavaScript object before
1435 // falling into the generic switch. This speeds up the internal
1436 // field operations considerably on average.
1437 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1439 case JS_GLOBAL_PROXY_TYPE:
1440 return JSGlobalProxy::kSize;
1441 case JS_GLOBAL_OBJECT_TYPE:
1442 return JSGlobalObject::kSize;
1443 case JS_BUILTINS_OBJECT_TYPE:
1444 return JSBuiltinsObject::kSize;
1445 case JS_FUNCTION_TYPE:
1446 return JSFunction::kSize;
1448 return JSValue::kSize;
1450 return JSDate::kSize;
1452 return JSArray::kSize;
1453 case JS_WEAK_MAP_TYPE:
1454 return JSWeakMap::kSize;
1455 case JS_REGEXP_TYPE:
1456 return JSRegExp::kSize;
1457 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1458 return JSObject::kHeaderSize;
1459 case JS_MESSAGE_OBJECT_TYPE:
1460 return JSMessageObject::kSize;
1468 int JSObject::GetInternalFieldCount() {
1469 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1470 // Make sure to adjust for the number of in-object properties. These
1471 // properties do contribute to the size, but are not internal fields.
1472 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1473 map()->inobject_properties() - (map()->has_external_resource()?1:0);
1477 int JSObject::GetInternalFieldOffset(int index) {
1478 ASSERT(index < GetInternalFieldCount() && index >= 0);
1479 return GetHeaderSize() + (kPointerSize * index);
1483 Object* JSObject::GetInternalField(int index) {
1484 ASSERT(index < GetInternalFieldCount() && index >= 0);
1485 // Internal objects do follow immediately after the header, whereas in-object
1486 // properties are at the end of the object. Therefore there is no need
1487 // to adjust the index here.
1488 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1492 void JSObject::SetInternalField(int index, Object* value) {
1493 ASSERT(index < GetInternalFieldCount() && index >= 0);
1494 // Internal objects do follow immediately after the header, whereas in-object
1495 // properties are at the end of the object. Therefore there is no need
1496 // to adjust the index here.
1497 int offset = GetHeaderSize() + (kPointerSize * index);
1498 WRITE_FIELD(this, offset, value);
1499 WRITE_BARRIER(GetHeap(), this, offset, value);
1503 void JSObject::SetInternalField(int index, Smi* value) {
1504 ASSERT(index < GetInternalFieldCount() && index >= 0);
1505 // Internal objects do follow immediately after the header, whereas in-object
1506 // properties are at the end of the object. Therefore there is no need
1507 // to adjust the index here.
1508 int offset = GetHeaderSize() + (kPointerSize * index);
1509 WRITE_FIELD(this, offset, value);
1513 void JSObject::SetExternalResourceObject(Object *value) {
1514 ASSERT(map()->has_external_resource());
1515 int offset = GetHeaderSize() + kPointerSize * GetInternalFieldCount();
1516 WRITE_FIELD(this, offset, value);
1517 WRITE_BARRIER(GetHeap(), this, offset, value);
1521 Object *JSObject::GetExternalResourceObject() {
1522 if (map()->has_external_resource()) {
1523 return READ_FIELD(this, GetHeaderSize() + kPointerSize * GetInternalFieldCount());
1525 return GetHeap()->undefined_value();
1530 // Access fast-case object properties at index. The use of these routines
1531 // is needed to correctly distinguish between properties stored in-object and
1532 // properties stored in the properties array.
1533 Object* JSObject::FastPropertyAt(int index) {
1534 // Adjust for the number of properties stored in the object.
1535 index -= map()->inobject_properties();
1537 int offset = map()->instance_size() + (index * kPointerSize);
1538 return READ_FIELD(this, offset);
1540 ASSERT(index < properties()->length());
1541 return properties()->get(index);
1546 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1547 // Adjust for the number of properties stored in the object.
1548 index -= map()->inobject_properties();
1550 int offset = map()->instance_size() + (index * kPointerSize);
1551 WRITE_FIELD(this, offset, value);
1552 WRITE_BARRIER(GetHeap(), this, offset, value);
1554 ASSERT(index < properties()->length());
1555 properties()->set(index, value);
1561 int JSObject::GetInObjectPropertyOffset(int index) {
1562 // Adjust for the number of properties stored in the object.
1563 index -= map()->inobject_properties();
1565 return map()->instance_size() + (index * kPointerSize);
1569 Object* JSObject::InObjectPropertyAt(int index) {
1570 // Adjust for the number of properties stored in the object.
1571 index -= map()->inobject_properties();
1573 int offset = map()->instance_size() + (index * kPointerSize);
1574 return READ_FIELD(this, offset);
1578 Object* JSObject::InObjectPropertyAtPut(int index,
1580 WriteBarrierMode mode) {
1581 // Adjust for the number of properties stored in the object.
1582 index -= map()->inobject_properties();
1584 int offset = map()->instance_size() + (index * kPointerSize);
1585 WRITE_FIELD(this, offset, value);
1586 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1592 void JSObject::InitializeBody(Map* map,
1593 Object* pre_allocated_value,
1594 Object* filler_value) {
1595 ASSERT(!filler_value->IsHeapObject() ||
1596 !GetHeap()->InNewSpace(filler_value));
1597 ASSERT(!pre_allocated_value->IsHeapObject() ||
1598 !GetHeap()->InNewSpace(pre_allocated_value));
1599 int size = map->instance_size();
1600 int offset = kHeaderSize;
1601 if (filler_value != pre_allocated_value) {
1602 int pre_allocated = map->pre_allocated_property_fields();
1603 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1604 for (int i = 0; i < pre_allocated; i++) {
1605 WRITE_FIELD(this, offset, pre_allocated_value);
1606 offset += kPointerSize;
1609 while (offset < size) {
1610 WRITE_FIELD(this, offset, filler_value);
1611 offset += kPointerSize;
1616 bool JSObject::HasFastProperties() {
1617 return !properties()->IsDictionary();
1621 int JSObject::MaxFastProperties() {
1622 // Allow extra fast properties if the object has more than
1623 // kMaxFastProperties in-object properties. When this is the case,
1624 // it is very unlikely that the object is being used as a dictionary
1625 // and there is a good chance that allowing more map transitions
1626 // will be worth it.
1627 return Max(map()->inobject_properties(), kMaxFastProperties);
1631 void Struct::InitializeBody(int object_size) {
1632 Object* value = GetHeap()->undefined_value();
1633 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1634 WRITE_FIELD(this, offset, value);
1639 bool Object::ToArrayIndex(uint32_t* index) {
1641 int value = Smi::cast(this)->value();
1642 if (value < 0) return false;
1646 if (IsHeapNumber()) {
1647 double value = HeapNumber::cast(this)->value();
1648 uint32_t uint_value = static_cast<uint32_t>(value);
1649 if (value == static_cast<double>(uint_value)) {
1650 *index = uint_value;
1658 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1659 if (!this->IsJSValue()) return false;
1661 JSValue* js_value = JSValue::cast(this);
1662 if (!js_value->value()->IsString()) return false;
1664 String* str = String::cast(js_value->value());
1665 if (index >= (uint32_t)str->length()) return false;
1671 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1672 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1673 return reinterpret_cast<FixedArrayBase*>(object);
1677 Object* FixedArray::get(int index) {
1678 ASSERT(index >= 0 && index < this->length());
1679 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1683 void FixedArray::set(int index, Smi* value) {
1684 ASSERT(map() != HEAP->fixed_cow_array_map());
1685 ASSERT(index >= 0 && index < this->length());
1686 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1687 int offset = kHeaderSize + index * kPointerSize;
1688 WRITE_FIELD(this, offset, value);
1692 void FixedArray::set(int index, Object* value) {
1693 ASSERT(map() != HEAP->fixed_cow_array_map());
1694 ASSERT(index >= 0 && index < this->length());
1695 int offset = kHeaderSize + index * kPointerSize;
1696 WRITE_FIELD(this, offset, value);
1697 WRITE_BARRIER(GetHeap(), this, offset, value);
1701 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1702 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1706 inline double FixedDoubleArray::hole_nan_as_double() {
1707 return BitCast<double, uint64_t>(kHoleNanInt64);
1711 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1712 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1713 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1714 return OS::nan_value();
1718 double FixedDoubleArray::get_scalar(int index) {
1719 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1720 map() != HEAP->fixed_array_map());
1721 ASSERT(index >= 0 && index < this->length());
1722 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1723 ASSERT(!is_the_hole_nan(result));
1727 int64_t FixedDoubleArray::get_representation(int index) {
1728 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1729 map() != HEAP->fixed_array_map());
1730 ASSERT(index >= 0 && index < this->length());
1731 return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1734 MaybeObject* FixedDoubleArray::get(int index) {
1735 if (is_the_hole(index)) {
1736 return GetHeap()->the_hole_value();
1738 return GetHeap()->NumberFromDouble(get_scalar(index));
1743 void FixedDoubleArray::set(int index, double value) {
1744 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1745 map() != HEAP->fixed_array_map());
1746 int offset = kHeaderSize + index * kDoubleSize;
1747 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1748 WRITE_DOUBLE_FIELD(this, offset, value);
1752 void FixedDoubleArray::set_the_hole(int index) {
1753 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1754 map() != HEAP->fixed_array_map());
1755 int offset = kHeaderSize + index * kDoubleSize;
1756 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1760 bool FixedDoubleArray::is_the_hole(int index) {
1761 int offset = kHeaderSize + index * kDoubleSize;
1762 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1766 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1767 Heap* heap = GetHeap();
1768 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1769 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1770 return UPDATE_WRITE_BARRIER;
1774 void FixedArray::set(int index,
1776 WriteBarrierMode mode) {
1777 ASSERT(map() != HEAP->fixed_cow_array_map());
1778 ASSERT(index >= 0 && index < this->length());
1779 int offset = kHeaderSize + index * kPointerSize;
1780 WRITE_FIELD(this, offset, value);
1781 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1785 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
1788 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1789 ASSERT(index >= 0 && index < array->length());
1790 int offset = kHeaderSize + index * kPointerSize;
1791 WRITE_FIELD(array, offset, value);
1792 Heap* heap = array->GetHeap();
1793 if (heap->InNewSpace(value)) {
1794 heap->RecordWrite(array->address(), offset);
1799 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1802 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1803 ASSERT(index >= 0 && index < array->length());
1804 ASSERT(!HEAP->InNewSpace(value));
1805 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1809 void FixedArray::set_undefined(int index) {
1810 ASSERT(map() != HEAP->fixed_cow_array_map());
1811 set_undefined(GetHeap(), index);
1815 void FixedArray::set_undefined(Heap* heap, int index) {
1816 ASSERT(index >= 0 && index < this->length());
1817 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1818 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1819 heap->undefined_value());
1823 void FixedArray::set_null(int index) {
1824 set_null(GetHeap(), index);
1828 void FixedArray::set_null(Heap* heap, int index) {
1829 ASSERT(index >= 0 && index < this->length());
1830 ASSERT(!heap->InNewSpace(heap->null_value()));
1831 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1835 void FixedArray::set_the_hole(int index) {
1836 ASSERT(map() != HEAP->fixed_cow_array_map());
1837 ASSERT(index >= 0 && index < this->length());
1838 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1840 kHeaderSize + index * kPointerSize,
1841 GetHeap()->the_hole_value());
1845 void FixedArray::set_unchecked(int index, Smi* value) {
1846 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1847 int offset = kHeaderSize + index * kPointerSize;
1848 WRITE_FIELD(this, offset, value);
1852 void FixedArray::set_unchecked(Heap* heap,
1855 WriteBarrierMode mode) {
1856 int offset = kHeaderSize + index * kPointerSize;
1857 WRITE_FIELD(this, offset, value);
1858 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1862 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1863 ASSERT(index >= 0 && index < this->length());
1864 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1865 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1869 Object** FixedArray::data_start() {
1870 return HeapObject::RawField(this, kHeaderSize);
1874 bool DescriptorArray::IsEmpty() {
1875 ASSERT(this->IsSmi() ||
1876 this->length() > kFirstIndex ||
1877 this == HEAP->empty_descriptor_array());
1878 return this->IsSmi() || length() <= kFirstIndex;
1882 int DescriptorArray::bit_field3_storage() {
1883 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1884 return Smi::cast(storage)->value();
1887 void DescriptorArray::set_bit_field3_storage(int value) {
1889 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1893 void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
1896 Object* tmp = array->get(first);
1897 NoIncrementalWriteBarrierSet(array, first, array->get(second));
1898 NoIncrementalWriteBarrierSet(array, second, tmp);
1902 int DescriptorArray::Search(String* name) {
1903 SLOW_ASSERT(IsSortedNoDuplicates());
1905 // Check for empty descriptor array.
1906 int nof = number_of_descriptors();
1907 if (nof == 0) return kNotFound;
1909 // Fast case: do linear search for small arrays.
1910 const int kMaxElementsForLinearSearch = 8;
1911 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1912 return LinearSearch(name, nof);
1915 // Slow case: perform binary search.
1916 return BinarySearch(name, 0, nof - 1);
1920 int DescriptorArray::SearchWithCache(String* name) {
1921 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1922 if (number == DescriptorLookupCache::kAbsent) {
1923 number = Search(name);
1924 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1930 String* DescriptorArray::GetKey(int descriptor_number) {
1931 ASSERT(descriptor_number < number_of_descriptors());
1932 return String::cast(get(ToKeyIndex(descriptor_number)));
1936 Object* DescriptorArray::GetValue(int descriptor_number) {
1937 ASSERT(descriptor_number < number_of_descriptors());
1938 return GetContentArray()->get(ToValueIndex(descriptor_number));
1942 Smi* DescriptorArray::GetDetails(int descriptor_number) {
1943 ASSERT(descriptor_number < number_of_descriptors());
1944 return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1948 PropertyType DescriptorArray::GetType(int descriptor_number) {
1949 ASSERT(descriptor_number < number_of_descriptors());
1950 return PropertyDetails(GetDetails(descriptor_number)).type();
1954 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1955 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1959 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1960 return JSFunction::cast(GetValue(descriptor_number));
1964 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1965 ASSERT(GetType(descriptor_number) == CALLBACKS);
1966 return GetValue(descriptor_number);
1970 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1971 ASSERT(GetType(descriptor_number) == CALLBACKS);
1972 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1973 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
1977 bool DescriptorArray::IsProperty(int descriptor_number) {
1978 Entry entry(this, descriptor_number);
1979 return IsPropertyDescriptor(&entry);
1983 bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
1984 switch (GetType(descriptor_number)) {
1985 case MAP_TRANSITION:
1986 case CONSTANT_TRANSITION:
1987 case ELEMENTS_TRANSITION:
1990 Object* value = GetValue(descriptor_number);
1991 if (!value->IsAccessorPair()) return false;
1992 AccessorPair* accessors = AccessorPair::cast(value);
1993 return accessors->getter()->IsMap() && accessors->setter()->IsMap();
1997 case CONSTANT_FUNCTION:
2000 case NULL_DESCRIPTOR:
2003 UNREACHABLE(); // Keep the compiler happy.
2008 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
2009 return GetType(descriptor_number) == NULL_DESCRIPTOR;
2013 bool DescriptorArray::IsDontEnum(int descriptor_number) {
2014 return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
2018 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2019 desc->Init(GetKey(descriptor_number),
2020 GetValue(descriptor_number),
2021 PropertyDetails(GetDetails(descriptor_number)));
2025 void DescriptorArray::Set(int descriptor_number,
2027 const WhitenessWitness&) {
2029 ASSERT(descriptor_number < number_of_descriptors());
2031 NoIncrementalWriteBarrierSet(this,
2032 ToKeyIndex(descriptor_number),
2034 FixedArray* content_array = GetContentArray();
2035 NoIncrementalWriteBarrierSet(content_array,
2036 ToValueIndex(descriptor_number),
2038 NoIncrementalWriteBarrierSet(content_array,
2039 ToDetailsIndex(descriptor_number),
2040 desc->GetDetails().AsSmi());
2044 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
2045 int first, int second) {
2046 NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
2047 FixedArray* content_array = GetContentArray();
2048 NoIncrementalWriteBarrierSwap(content_array,
2049 ToValueIndex(first),
2050 ToValueIndex(second));
2051 NoIncrementalWriteBarrierSwap(content_array,
2052 ToDetailsIndex(first),
2053 ToDetailsIndex(second));
2057 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
2058 : marking_(array->GetHeap()->incremental_marking()) {
2059 marking_->EnterNoMarkingScope();
2060 if (array->number_of_descriptors() > 0) {
2061 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2062 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
2067 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2068 marking_->LeaveNoMarkingScope();
2072 template<typename Shape, typename Key>
2073 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2074 const int kMinCapacity = 32;
2075 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2076 if (capacity < kMinCapacity) {
2077 capacity = kMinCapacity; // Guarantee min capacity.
2083 template<typename Shape, typename Key>
2084 int HashTable<Shape, Key>::FindEntry(Key key) {
2085 return FindEntry(GetIsolate(), key);
2089 // Find entry for key otherwise return kNotFound.
2090 template<typename Shape, typename Key>
2091 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2092 uint32_t capacity = Capacity();
2093 uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2095 // EnsureCapacity will guarantee the hash table is never full.
2097 Object* element = KeyAt(entry);
2099 if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2100 if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2101 Shape::IsMatch(key, element)) return entry;
2102 entry = NextProbe(entry, count++, capacity);
2108 bool SeededNumberDictionary::requires_slow_elements() {
2109 Object* max_index_object = get(kMaxNumberKeyIndex);
2110 if (!max_index_object->IsSmi()) return false;
2112 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2115 uint32_t SeededNumberDictionary::max_number_key() {
2116 ASSERT(!requires_slow_elements());
2117 Object* max_index_object = get(kMaxNumberKeyIndex);
2118 if (!max_index_object->IsSmi()) return 0;
2119 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2120 return value >> kRequiresSlowElementsTagSize;
2123 void SeededNumberDictionary::set_requires_slow_elements() {
2124 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2128 // ------------------------------------
2132 CAST_ACCESSOR(FixedArray)
2133 CAST_ACCESSOR(FixedDoubleArray)
2134 CAST_ACCESSOR(DescriptorArray)
2135 CAST_ACCESSOR(DeoptimizationInputData)
2136 CAST_ACCESSOR(DeoptimizationOutputData)
2137 CAST_ACCESSOR(TypeFeedbackCells)
2138 CAST_ACCESSOR(SymbolTable)
2139 CAST_ACCESSOR(JSFunctionResultCache)
2140 CAST_ACCESSOR(NormalizedMapCache)
2141 CAST_ACCESSOR(ScopeInfo)
2142 CAST_ACCESSOR(CompilationCacheTable)
2143 CAST_ACCESSOR(CodeCacheHashTable)
2144 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2145 CAST_ACCESSOR(MapCache)
2146 CAST_ACCESSOR(String)
2147 CAST_ACCESSOR(SeqString)
2148 CAST_ACCESSOR(SeqAsciiString)
2149 CAST_ACCESSOR(SeqTwoByteString)
2150 CAST_ACCESSOR(SlicedString)
2151 CAST_ACCESSOR(ConsString)
2152 CAST_ACCESSOR(ExternalString)
2153 CAST_ACCESSOR(ExternalAsciiString)
2154 CAST_ACCESSOR(ExternalTwoByteString)
2155 CAST_ACCESSOR(JSReceiver)
2156 CAST_ACCESSOR(JSObject)
2158 CAST_ACCESSOR(HeapObject)
2159 CAST_ACCESSOR(HeapNumber)
2160 CAST_ACCESSOR(Oddball)
2161 CAST_ACCESSOR(JSGlobalPropertyCell)
2162 CAST_ACCESSOR(SharedFunctionInfo)
2164 CAST_ACCESSOR(JSFunction)
2165 CAST_ACCESSOR(GlobalObject)
2166 CAST_ACCESSOR(JSGlobalProxy)
2167 CAST_ACCESSOR(JSGlobalObject)
2168 CAST_ACCESSOR(JSBuiltinsObject)
2170 CAST_ACCESSOR(JSArray)
2171 CAST_ACCESSOR(JSRegExp)
2172 CAST_ACCESSOR(JSProxy)
2173 CAST_ACCESSOR(JSFunctionProxy)
2174 CAST_ACCESSOR(JSSet)
2175 CAST_ACCESSOR(JSMap)
2176 CAST_ACCESSOR(JSWeakMap)
2177 CAST_ACCESSOR(Foreign)
2178 CAST_ACCESSOR(ByteArray)
2179 CAST_ACCESSOR(FreeSpace)
2180 CAST_ACCESSOR(ExternalArray)
2181 CAST_ACCESSOR(ExternalByteArray)
2182 CAST_ACCESSOR(ExternalUnsignedByteArray)
2183 CAST_ACCESSOR(ExternalShortArray)
2184 CAST_ACCESSOR(ExternalUnsignedShortArray)
2185 CAST_ACCESSOR(ExternalIntArray)
2186 CAST_ACCESSOR(ExternalUnsignedIntArray)
2187 CAST_ACCESSOR(ExternalFloatArray)
2188 CAST_ACCESSOR(ExternalDoubleArray)
2189 CAST_ACCESSOR(ExternalPixelArray)
2190 CAST_ACCESSOR(Struct)
2193 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2194 STRUCT_LIST(MAKE_STRUCT_CAST)
2195 #undef MAKE_STRUCT_CAST
2198 template <typename Shape, typename Key>
2199 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2200 ASSERT(obj->IsHashTable());
2201 return reinterpret_cast<HashTable*>(obj);
2205 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2206 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2208 SMI_ACCESSORS(String, length, kLengthOffset)
2209 SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
2212 uint32_t String::hash_field() {
2213 return READ_UINT32_FIELD(this, kHashFieldOffset);
2217 void String::set_hash_field(uint32_t value) {
2218 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2219 #if V8_HOST_ARCH_64_BIT
2220 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2225 bool String::Equals(String* other) {
2226 if (other == this) return true;
2227 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2230 return SlowEquals(other);
2234 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2235 if (!StringShape(this).IsCons()) return this;
2236 ConsString* cons = ConsString::cast(this);
2237 if (cons->IsFlat()) return cons->first();
2238 return SlowTryFlatten(pretenure);
2242 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2243 MaybeObject* flat = TryFlatten(pretenure);
2244 Object* successfully_flattened;
2245 if (!flat->ToObject(&successfully_flattened)) return this;
2246 return String::cast(successfully_flattened);
2250 uint16_t String::Get(int index) {
2251 ASSERT(index >= 0 && index < length());
2252 switch (StringShape(this).full_representation_tag()) {
2253 case kSeqStringTag | kAsciiStringTag:
2254 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2255 case kSeqStringTag | kTwoByteStringTag:
2256 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2257 case kConsStringTag | kAsciiStringTag:
2258 case kConsStringTag | kTwoByteStringTag:
2259 return ConsString::cast(this)->ConsStringGet(index);
2260 case kExternalStringTag | kAsciiStringTag:
2261 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2262 case kExternalStringTag | kTwoByteStringTag:
2263 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2264 case kSlicedStringTag | kAsciiStringTag:
2265 case kSlicedStringTag | kTwoByteStringTag:
2266 return SlicedString::cast(this)->SlicedStringGet(index);
2276 void String::Set(int index, uint16_t value) {
2277 ASSERT(index >= 0 && index < length());
2278 ASSERT(StringShape(this).IsSequential());
2280 return this->IsAsciiRepresentation()
2281 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2282 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2286 bool String::IsFlat() {
2287 if (!StringShape(this).IsCons()) return true;
2288 return ConsString::cast(this)->second()->length() == 0;
2292 String* String::GetUnderlying() {
2293 // Giving direct access to underlying string only makes sense if the
2294 // wrapping string is already flattened.
2295 ASSERT(this->IsFlat());
2296 ASSERT(StringShape(this).IsIndirect());
2297 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2298 const int kUnderlyingOffset = SlicedString::kParentOffset;
2299 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2303 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2304 ASSERT(index >= 0 && index < length());
2305 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2309 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2310 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2311 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2312 static_cast<byte>(value));
2316 Address SeqAsciiString::GetCharsAddress() {
2317 return FIELD_ADDR(this, kHeaderSize);
2321 char* SeqAsciiString::GetChars() {
2322 return reinterpret_cast<char*>(GetCharsAddress());
2326 Address SeqTwoByteString::GetCharsAddress() {
2327 return FIELD_ADDR(this, kHeaderSize);
2331 uc16* SeqTwoByteString::GetChars() {
2332 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2336 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2337 ASSERT(index >= 0 && index < length());
2338 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2342 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2343 ASSERT(index >= 0 && index < length());
2344 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2348 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2349 return SizeFor(length());
2353 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2354 return SizeFor(length());
2358 String* SlicedString::parent() {
2359 return String::cast(READ_FIELD(this, kParentOffset));
2363 void SlicedString::set_parent(String* parent) {
2364 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2365 WRITE_FIELD(this, kParentOffset, parent);
2369 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2372 String* ConsString::first() {
2373 return String::cast(READ_FIELD(this, kFirstOffset));
2377 Object* ConsString::unchecked_first() {
2378 return READ_FIELD(this, kFirstOffset);
2382 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2383 WRITE_FIELD(this, kFirstOffset, value);
2384 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2388 String* ConsString::second() {
2389 return String::cast(READ_FIELD(this, kSecondOffset));
2393 Object* ConsString::unchecked_second() {
2394 return READ_FIELD(this, kSecondOffset);
2398 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2399 WRITE_FIELD(this, kSecondOffset, value);
2400 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2404 bool ExternalString::is_short() {
2405 InstanceType type = map()->instance_type();
2406 return (type & kShortExternalStringMask) == kShortExternalStringTag;
2410 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2411 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2415 void ExternalAsciiString::update_data_cache() {
2416 if (is_short()) return;
2417 const char** data_field =
2418 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2419 *data_field = resource()->data();
2423 void ExternalAsciiString::set_resource(
2424 const ExternalAsciiString::Resource* resource) {
2425 *reinterpret_cast<const Resource**>(
2426 FIELD_ADDR(this, kResourceOffset)) = resource;
2427 if (resource != NULL) update_data_cache();
2431 const char* ExternalAsciiString::GetChars() {
2432 return resource()->data();
2436 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
2437 ASSERT(index >= 0 && index < length());
2438 return GetChars()[index];
2442 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2443 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2447 void ExternalTwoByteString::update_data_cache() {
2448 if (is_short()) return;
2449 const uint16_t** data_field =
2450 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
2451 *data_field = resource()->data();
2455 void ExternalTwoByteString::set_resource(
2456 const ExternalTwoByteString::Resource* resource) {
2457 *reinterpret_cast<const Resource**>(
2458 FIELD_ADDR(this, kResourceOffset)) = resource;
2459 if (resource != NULL) update_data_cache();
2463 const uint16_t* ExternalTwoByteString::GetChars() {
2464 return resource()->data();
2468 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
2469 ASSERT(index >= 0 && index < length());
2470 return GetChars()[index];
2474 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
2476 return GetChars() + start;
2480 void JSFunctionResultCache::MakeZeroSize() {
2481 set_finger_index(kEntriesIndex);
2482 set_size(kEntriesIndex);
2486 void JSFunctionResultCache::Clear() {
2487 int cache_size = size();
2488 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2489 MemsetPointer(entries_start,
2490 GetHeap()->the_hole_value(),
2491 cache_size - kEntriesIndex);
2496 int JSFunctionResultCache::size() {
2497 return Smi::cast(get(kCacheSizeIndex))->value();
2501 void JSFunctionResultCache::set_size(int size) {
2502 set(kCacheSizeIndex, Smi::FromInt(size));
2506 int JSFunctionResultCache::finger_index() {
2507 return Smi::cast(get(kFingerIndex))->value();
2511 void JSFunctionResultCache::set_finger_index(int finger_index) {
2512 set(kFingerIndex, Smi::FromInt(finger_index));
2516 byte ByteArray::get(int index) {
2517 ASSERT(index >= 0 && index < this->length());
2518 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2522 void ByteArray::set(int index, byte value) {
2523 ASSERT(index >= 0 && index < this->length());
2524 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2528 int ByteArray::get_int(int index) {
2529 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2530 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2534 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2535 ASSERT_TAG_ALIGNED(address);
2536 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2540 Address ByteArray::GetDataStartAddress() {
2541 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2545 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2546 return reinterpret_cast<uint8_t*>(external_pointer());
2550 uint8_t ExternalPixelArray::get_scalar(int index) {
2551 ASSERT((index >= 0) && (index < this->length()));
2552 uint8_t* ptr = external_pixel_pointer();
2557 MaybeObject* ExternalPixelArray::get(int index) {
2558 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2562 void ExternalPixelArray::set(int index, uint8_t value) {
2563 ASSERT((index >= 0) && (index < this->length()));
2564 uint8_t* ptr = external_pixel_pointer();
2569 void* ExternalArray::external_pointer() {
2570 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2571 return reinterpret_cast<void*>(ptr);
2575 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2576 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2577 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2581 int8_t ExternalByteArray::get_scalar(int index) {
2582 ASSERT((index >= 0) && (index < this->length()));
2583 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2588 MaybeObject* ExternalByteArray::get(int index) {
2589 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2593 void ExternalByteArray::set(int index, int8_t value) {
2594 ASSERT((index >= 0) && (index < this->length()));
2595 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2600 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2601 ASSERT((index >= 0) && (index < this->length()));
2602 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2607 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2608 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2612 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2613 ASSERT((index >= 0) && (index < this->length()));
2614 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2619 int16_t ExternalShortArray::get_scalar(int index) {
2620 ASSERT((index >= 0) && (index < this->length()));
2621 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2626 MaybeObject* ExternalShortArray::get(int index) {
2627 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2631 void ExternalShortArray::set(int index, int16_t value) {
2632 ASSERT((index >= 0) && (index < this->length()));
2633 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2638 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2639 ASSERT((index >= 0) && (index < this->length()));
2640 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2645 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2646 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2650 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2651 ASSERT((index >= 0) && (index < this->length()));
2652 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2657 int32_t ExternalIntArray::get_scalar(int index) {
2658 ASSERT((index >= 0) && (index < this->length()));
2659 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2664 MaybeObject* ExternalIntArray::get(int index) {
2665 return GetHeap()->NumberFromInt32(get_scalar(index));
2669 void ExternalIntArray::set(int index, int32_t value) {
2670 ASSERT((index >= 0) && (index < this->length()));
2671 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2676 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2677 ASSERT((index >= 0) && (index < this->length()));
2678 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2683 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2684 return GetHeap()->NumberFromUint32(get_scalar(index));
2688 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2689 ASSERT((index >= 0) && (index < this->length()));
2690 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2695 float ExternalFloatArray::get_scalar(int index) {
2696 ASSERT((index >= 0) && (index < this->length()));
2697 float* ptr = static_cast<float*>(external_pointer());
2702 MaybeObject* ExternalFloatArray::get(int index) {
2703 return GetHeap()->NumberFromDouble(get_scalar(index));
2707 void ExternalFloatArray::set(int index, float value) {
2708 ASSERT((index >= 0) && (index < this->length()));
2709 float* ptr = static_cast<float*>(external_pointer());
2714 double ExternalDoubleArray::get_scalar(int index) {
2715 ASSERT((index >= 0) && (index < this->length()));
2716 double* ptr = static_cast<double*>(external_pointer());
2721 MaybeObject* ExternalDoubleArray::get(int index) {
2722 return GetHeap()->NumberFromDouble(get_scalar(index));
2726 void ExternalDoubleArray::set(int index, double value) {
2727 ASSERT((index >= 0) && (index < this->length()));
2728 double* ptr = static_cast<double*>(external_pointer());
2733 int Map::visitor_id() {
2734 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2738 void Map::set_visitor_id(int id) {
2739 ASSERT(0 <= id && id < 256);
2740 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2744 int Map::instance_size() {
2745 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2749 int Map::inobject_properties() {
2750 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2754 int Map::pre_allocated_property_fields() {
2755 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2759 int HeapObject::SizeFromMap(Map* map) {
2760 int instance_size = map->instance_size();
2761 if (instance_size != kVariableSizeSentinel) return instance_size;
2762 // We can ignore the "symbol" bit becase it is only set for symbols
2763 // and implies a string type.
2764 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2765 // Only inline the most frequent cases.
2766 if (instance_type == FIXED_ARRAY_TYPE) {
2767 return FixedArray::BodyDescriptor::SizeOf(map, this);
2769 if (instance_type == ASCII_STRING_TYPE) {
2770 return SeqAsciiString::SizeFor(
2771 reinterpret_cast<SeqAsciiString*>(this)->length());
2773 if (instance_type == BYTE_ARRAY_TYPE) {
2774 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2776 if (instance_type == FREE_SPACE_TYPE) {
2777 return reinterpret_cast<FreeSpace*>(this)->size();
2779 if (instance_type == STRING_TYPE) {
2780 return SeqTwoByteString::SizeFor(
2781 reinterpret_cast<SeqTwoByteString*>(this)->length());
2783 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2784 return FixedDoubleArray::SizeFor(
2785 reinterpret_cast<FixedDoubleArray*>(this)->length());
2787 ASSERT(instance_type == CODE_TYPE);
2788 return reinterpret_cast<Code*>(this)->CodeSize();
2792 void Map::set_instance_size(int value) {
2793 ASSERT_EQ(0, value & (kPointerSize - 1));
2794 value >>= kPointerSizeLog2;
2795 ASSERT(0 <= value && value < 256);
2796 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2800 void Map::set_inobject_properties(int value) {
2801 ASSERT(0 <= value && value < 256);
2802 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2806 void Map::set_pre_allocated_property_fields(int value) {
2807 ASSERT(0 <= value && value < 256);
2808 WRITE_BYTE_FIELD(this,
2809 kPreAllocatedPropertyFieldsOffset,
2810 static_cast<byte>(value));
2814 InstanceType Map::instance_type() {
2815 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2819 void Map::set_instance_type(InstanceType value) {
2820 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2824 int Map::unused_property_fields() {
2825 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2829 void Map::set_unused_property_fields(int value) {
2830 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2834 byte Map::bit_field() {
2835 return READ_BYTE_FIELD(this, kBitFieldOffset);
2839 void Map::set_bit_field(byte value) {
2840 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2844 byte Map::bit_field2() {
2845 return READ_BYTE_FIELD(this, kBitField2Offset);
2849 void Map::set_bit_field2(byte value) {
2850 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2854 void Map::set_non_instance_prototype(bool value) {
2856 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2858 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2863 bool Map::has_non_instance_prototype() {
2864 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2868 void Map::set_function_with_prototype(bool value) {
2870 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2872 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2877 bool Map::function_with_prototype() {
2878 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2882 void Map::set_is_access_check_needed(bool access_check_needed) {
2883 if (access_check_needed) {
2884 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2886 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2891 bool Map::is_access_check_needed() {
2892 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2896 void Map::set_is_extensible(bool value) {
2898 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2900 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2904 bool Map::is_extensible() {
2905 return ((1 << kIsExtensible) & bit_field2()) != 0;
2909 void Map::set_attached_to_shared_function_info(bool value) {
2911 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2913 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2917 bool Map::attached_to_shared_function_info() {
2918 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2922 void Map::set_is_shared(bool value) {
2924 set_bit_field3(bit_field3() | (1 << kIsShared));
2926 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2930 bool Map::is_shared() {
2931 return ((1 << kIsShared) & bit_field3()) != 0;
2934 void Map::set_has_external_resource(bool value) {
2936 set_bit_field(bit_field() | (1 << kHasExternalResource));
2938 set_bit_field(bit_field() & ~(1 << kHasExternalResource));
2942 bool Map::has_external_resource()
2944 return ((1 << kHasExternalResource) & bit_field()) != 0;
2948 void Map::set_named_interceptor_is_fallback(bool value) {
2950 set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
2952 set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
2956 bool Map::named_interceptor_is_fallback() {
2957 return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
2961 JSFunction* Map::unchecked_constructor() {
2962 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2966 Code::Flags Code::flags() {
2967 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2971 void Code::set_flags(Code::Flags flags) {
2972 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2973 // Make sure that all call stubs have an arguments count.
2974 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2975 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2976 ExtractArgumentsCountFromFlags(flags) >= 0);
2977 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2981 Code::Kind Code::kind() {
2982 return ExtractKindFromFlags(flags());
2986 InlineCacheState Code::ic_state() {
2987 InlineCacheState result = ExtractICStateFromFlags(flags());
2988 // Only allow uninitialized or debugger states for non-IC code
2989 // objects. This is used in the debugger to determine whether or not
2990 // a call to code object has been replaced with a debug break call.
2991 ASSERT(is_inline_cache_stub() ||
2992 result == UNINITIALIZED ||
2993 result == DEBUG_BREAK ||
2994 result == DEBUG_PREPARE_STEP_IN);
2999 Code::ExtraICState Code::extra_ic_state() {
3000 ASSERT(is_inline_cache_stub());
3001 return ExtractExtraICStateFromFlags(flags());
3005 PropertyType Code::type() {
3006 return ExtractTypeFromFlags(flags());
3010 int Code::arguments_count() {
3011 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
3012 return ExtractArgumentsCountFromFlags(flags());
3016 int Code::major_key() {
3017 ASSERT(kind() == STUB ||
3018 kind() == UNARY_OP_IC ||
3019 kind() == BINARY_OP_IC ||
3020 kind() == COMPARE_IC ||
3021 kind() == TO_BOOLEAN_IC);
3022 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
3026 void Code::set_major_key(int major) {
3027 ASSERT(kind() == STUB ||
3028 kind() == UNARY_OP_IC ||
3029 kind() == BINARY_OP_IC ||
3030 kind() == COMPARE_IC ||
3031 kind() == TO_BOOLEAN_IC);
3032 ASSERT(0 <= major && major < 256);
3033 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
3037 bool Code::is_pregenerated() {
3038 return kind() == STUB && IsPregeneratedField::decode(flags());
3042 void Code::set_is_pregenerated(bool value) {
3043 ASSERT(kind() == STUB);
3045 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3050 bool Code::optimizable() {
3051 ASSERT_EQ(FUNCTION, kind());
3052 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3056 void Code::set_optimizable(bool value) {
3057 ASSERT_EQ(FUNCTION, kind());
3058 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3062 bool Code::has_deoptimization_support() {
3063 ASSERT_EQ(FUNCTION, kind());
3064 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3065 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3069 void Code::set_has_deoptimization_support(bool value) {
3070 ASSERT_EQ(FUNCTION, kind());
3071 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3072 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3073 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3077 bool Code::has_debug_break_slots() {
3078 ASSERT_EQ(FUNCTION, kind());
3079 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3080 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3084 void Code::set_has_debug_break_slots(bool value) {
3085 ASSERT_EQ(FUNCTION, kind());
3086 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3087 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3088 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3092 bool Code::is_compiled_optimizable() {
3093 ASSERT_EQ(FUNCTION, kind());
3094 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3095 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3099 void Code::set_compiled_optimizable(bool value) {
3100 ASSERT_EQ(FUNCTION, kind());
3101 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3102 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3103 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3107 int Code::allow_osr_at_loop_nesting_level() {
3108 ASSERT_EQ(FUNCTION, kind());
3109 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3113 void Code::set_allow_osr_at_loop_nesting_level(int level) {
3114 ASSERT_EQ(FUNCTION, kind());
3115 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3116 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3120 int Code::profiler_ticks() {
3121 ASSERT_EQ(FUNCTION, kind());
3122 return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3126 void Code::set_profiler_ticks(int ticks) {
3127 ASSERT_EQ(FUNCTION, kind());
3128 ASSERT(ticks < 256);
3129 WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
3133 unsigned Code::stack_slots() {
3134 ASSERT(kind() == OPTIMIZED_FUNCTION);
3135 return READ_UINT32_FIELD(this, kStackSlotsOffset);
3139 void Code::set_stack_slots(unsigned slots) {
3140 ASSERT(kind() == OPTIMIZED_FUNCTION);
3141 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
3145 unsigned Code::safepoint_table_offset() {
3146 ASSERT(kind() == OPTIMIZED_FUNCTION);
3147 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
3151 void Code::set_safepoint_table_offset(unsigned offset) {
3152 ASSERT(kind() == OPTIMIZED_FUNCTION);
3153 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3154 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3158 unsigned Code::stack_check_table_offset() {
3159 ASSERT_EQ(FUNCTION, kind());
3160 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3164 void Code::set_stack_check_table_offset(unsigned offset) {
3165 ASSERT_EQ(FUNCTION, kind());
3166 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3167 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3171 CheckType Code::check_type() {
3172 ASSERT(is_call_stub() || is_keyed_call_stub());
3173 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3174 return static_cast<CheckType>(type);
3178 void Code::set_check_type(CheckType value) {
3179 ASSERT(is_call_stub() || is_keyed_call_stub());
3180 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3184 byte Code::unary_op_type() {
3185 ASSERT(is_unary_op_stub());
3186 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3190 void Code::set_unary_op_type(byte value) {
3191 ASSERT(is_unary_op_stub());
3192 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3196 byte Code::binary_op_type() {
3197 ASSERT(is_binary_op_stub());
3198 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3202 void Code::set_binary_op_type(byte value) {
3203 ASSERT(is_binary_op_stub());
3204 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3208 byte Code::binary_op_result_type() {
3209 ASSERT(is_binary_op_stub());
3210 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3214 void Code::set_binary_op_result_type(byte value) {
3215 ASSERT(is_binary_op_stub());
3216 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3220 byte Code::compare_state() {
3221 ASSERT(is_compare_ic_stub());
3222 return READ_BYTE_FIELD(this, kCompareStateOffset);
3226 void Code::set_compare_state(byte value) {
3227 ASSERT(is_compare_ic_stub());
3228 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3232 byte Code::to_boolean_state() {
3233 ASSERT(is_to_boolean_ic_stub());
3234 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3238 void Code::set_to_boolean_state(byte value) {
3239 ASSERT(is_to_boolean_ic_stub());
3240 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3244 bool Code::has_function_cache() {
3245 ASSERT(kind() == STUB);
3246 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3250 void Code::set_has_function_cache(bool flag) {
3251 ASSERT(kind() == STUB);
3252 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3256 bool Code::is_inline_cache_stub() {
3257 Kind kind = this->kind();
3258 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3262 Code::Flags Code::ComputeFlags(Kind kind,
3263 InlineCacheState ic_state,
3264 ExtraICState extra_ic_state,
3267 InlineCacheHolderFlag holder) {
3268 // Extra IC state is only allowed for call IC stubs or for store IC
3270 ASSERT(extra_ic_state == kNoExtraICState ||
3273 kind == KEYED_STORE_IC);
3274 // Compute the bit mask.
3275 int bits = KindField::encode(kind)
3276 | ICStateField::encode(ic_state)
3277 | TypeField::encode(type)
3278 | ExtraICStateField::encode(extra_ic_state)
3279 | (argc << kArgumentsCountShift)
3280 | CacheHolderField::encode(holder);
3281 return static_cast<Flags>(bits);
3285 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3287 ExtraICState extra_ic_state,
3288 InlineCacheHolderFlag holder,
3290 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3294 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3295 return KindField::decode(flags);
3299 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3300 return ICStateField::decode(flags);
3304 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3305 return ExtraICStateField::decode(flags);
3309 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3310 return TypeField::decode(flags);
3314 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3315 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3319 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3320 return CacheHolderField::decode(flags);
3324 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3325 int bits = flags & ~TypeField::kMask;
3326 return static_cast<Flags>(bits);
3330 Code* Code::GetCodeFromTargetAddress(Address address) {
3331 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3332 // GetCodeFromTargetAddress might be called when marking objects during mark
3333 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3334 // Code::cast. Code::cast does not work when the object's map is
3336 Code* result = reinterpret_cast<Code*>(code);
3341 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3343 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3347 Object* Map::prototype() {
3348 return READ_FIELD(this, kPrototypeOffset);
3352 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3353 ASSERT(value->IsNull() || value->IsJSReceiver());
3354 WRITE_FIELD(this, kPrototypeOffset, value);
3355 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3359 DescriptorArray* Map::instance_descriptors() {
3360 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3361 if (object->IsSmi()) {
3362 return GetHeap()->empty_descriptor_array();
3364 return DescriptorArray::cast(object);
3369 void Map::init_instance_descriptors() {
3370 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3374 void Map::clear_instance_descriptors() {
3375 Object* object = READ_FIELD(this,
3376 kInstanceDescriptorsOrBitField3Offset);
3377 if (!object->IsSmi()) {
3380 kInstanceDescriptorsOrBitField3Offset,
3381 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3386 void Map::set_instance_descriptors(DescriptorArray* value,
3387 WriteBarrierMode mode) {
3388 Object* object = READ_FIELD(this,
3389 kInstanceDescriptorsOrBitField3Offset);
3390 Heap* heap = GetHeap();
3391 if (value == heap->empty_descriptor_array()) {
3392 clear_instance_descriptors();
3395 if (object->IsSmi()) {
3396 value->set_bit_field3_storage(Smi::cast(object)->value());
3398 value->set_bit_field3_storage(
3399 DescriptorArray::cast(object)->bit_field3_storage());
3402 ASSERT(!is_shared());
3403 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3404 CONDITIONAL_WRITE_BARRIER(
3405 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3409 int Map::bit_field3() {
3410 Object* object = READ_FIELD(this,
3411 kInstanceDescriptorsOrBitField3Offset);
3412 if (object->IsSmi()) {
3413 return Smi::cast(object)->value();
3415 return DescriptorArray::cast(object)->bit_field3_storage();
3420 void Map::set_bit_field3(int value) {
3421 ASSERT(Smi::IsValid(value));
3422 Object* object = READ_FIELD(this,
3423 kInstanceDescriptorsOrBitField3Offset);
3424 if (object->IsSmi()) {
3426 kInstanceDescriptorsOrBitField3Offset,
3427 Smi::FromInt(value));
3429 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3434 FixedArray* Map::unchecked_prototype_transitions() {
3435 return reinterpret_cast<FixedArray*>(
3436 READ_FIELD(this, kPrototypeTransitionsOffset));
3440 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3441 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3442 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3444 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3445 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3446 ACCESSORS(JSFunction,
3449 kNextFunctionLinkOffset)
3451 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3452 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3453 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3455 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3457 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3458 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3459 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3460 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3461 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
3463 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
3464 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3466 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3467 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3468 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3470 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3471 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3472 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3473 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3474 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3475 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3476 ACCESSORS(InterceptorInfo, is_fallback, Smi, kFallbackOffset)
3478 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3479 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3481 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3482 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3484 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3485 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3486 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3487 kPropertyAccessorsOffset)
3488 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3489 kPrototypeTemplateOffset)
3490 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3491 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3492 kNamedPropertyHandlerOffset)
3493 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3494 kIndexedPropertyHandlerOffset)
3495 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3496 kInstanceTemplateOffset)
3497 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3498 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3499 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3500 kInstanceCallHandlerOffset)
3501 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3502 kAccessCheckInfoOffset)
3503 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
3505 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3506 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3507 kInternalFieldCountOffset)
3508 ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
3509 kHasExternalResourceOffset)
3511 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3512 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3514 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3516 ACCESSORS(Script, source, Object, kSourceOffset)
3517 ACCESSORS(Script, name, Object, kNameOffset)
3518 ACCESSORS(Script, id, Object, kIdOffset)
3519 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
3520 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
3521 ACCESSORS(Script, data, Object, kDataOffset)
3522 ACCESSORS(Script, context_data, Object, kContextOffset)
3523 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3524 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
3525 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
3526 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
3527 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3528 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3529 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
3530 kEvalFrominstructionsOffsetOffset)
3532 #ifdef ENABLE_DEBUGGER_SUPPORT
3533 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3534 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3535 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3536 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3538 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
3539 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
3540 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3541 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3544 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3545 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3546 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3547 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3548 kInstanceClassNameOffset)
3549 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3550 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3551 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3552 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3553 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3554 kThisPropertyAssignmentsOffset)
3555 SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
3558 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3559 kHiddenPrototypeBit)
3560 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3561 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3562 kNeedsAccessCheckBit)
3563 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3564 kReadOnlyPrototypeBit)
3565 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3567 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3569 BOOL_GETTER(SharedFunctionInfo,
3571 has_only_simple_this_property_assignments,
3572 kHasOnlySimpleThisPropertyAssignments)
3573 BOOL_ACCESSORS(SharedFunctionInfo,
3575 allows_lazy_compilation,
3576 kAllowLazyCompilation)
3577 BOOL_ACCESSORS(SharedFunctionInfo,
3581 BOOL_ACCESSORS(SharedFunctionInfo,
3583 has_duplicate_parameters,
3584 kHasDuplicateParameters)
3587 #if V8_HOST_ARCH_32_BIT
3588 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3589 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3590 kFormalParameterCountOffset)
3591 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3592 kExpectedNofPropertiesOffset)
3593 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3594 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3595 kStartPositionAndTypeOffset)
3596 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3597 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3598 kFunctionTokenPositionOffset)
3599 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3600 kCompilerHintsOffset)
3601 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3602 kThisPropertyAssignmentsCountOffset)
3603 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3604 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3605 SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3608 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3609 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3610 int holder::name() { \
3611 int value = READ_INT_FIELD(this, offset); \
3612 ASSERT(kHeapObjectTag == 1); \
3613 ASSERT((value & kHeapObjectTag) == 0); \
3614 return value >> 1; \
3616 void holder::set_##name(int value) { \
3617 ASSERT(kHeapObjectTag == 1); \
3618 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3619 (value & 0xC0000000) == 0x000000000); \
3620 WRITE_INT_FIELD(this, \
3622 (value << 1) & ~kHeapObjectTag); \
3625 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3626 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3627 INT_ACCESSORS(holder, name, offset)
3630 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3631 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3632 formal_parameter_count,
3633 kFormalParameterCountOffset)
3635 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3636 expected_nof_properties,
3637 kExpectedNofPropertiesOffset)
3638 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3640 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3641 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3642 start_position_and_type,
3643 kStartPositionAndTypeOffset)
3645 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3646 function_token_position,
3647 kFunctionTokenPositionOffset)
3648 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3650 kCompilerHintsOffset)
3652 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3653 this_property_assignments_count,
3654 kThisPropertyAssignmentsCountOffset)
3655 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3657 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3658 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
3662 int SharedFunctionInfo::construction_count() {
3663 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3667 void SharedFunctionInfo::set_construction_count(int value) {
3668 ASSERT(0 <= value && value < 256);
3669 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3673 BOOL_ACCESSORS(SharedFunctionInfo,
3675 live_objects_may_exist,
3676 kLiveObjectsMayExist)
3679 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3680 return initial_map() != GetHeap()->undefined_value();
3684 BOOL_GETTER(SharedFunctionInfo,
3686 optimization_disabled,
3687 kOptimizationDisabled)
3690 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3691 set_compiler_hints(BooleanBit::set(compiler_hints(),
3692 kOptimizationDisabled,
3694 // If disabling optimizations we reflect that in the code object so
3695 // it will not be counted as optimizable code.
3696 if ((code()->kind() == Code::FUNCTION) && disable) {
3697 code()->set_optimizable(false);
3702 LanguageMode SharedFunctionInfo::language_mode() {
3703 int hints = compiler_hints();
3704 if (BooleanBit::get(hints, kExtendedModeFunction)) {
3705 ASSERT(BooleanBit::get(hints, kStrictModeFunction));
3706 return EXTENDED_MODE;
3708 return BooleanBit::get(hints, kStrictModeFunction)
3709 ? STRICT_MODE : CLASSIC_MODE;
3713 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
3714 // We only allow language mode transitions that go set the same language mode
3715 // again or go up in the chain:
3716 // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
3717 ASSERT(this->language_mode() == CLASSIC_MODE ||
3718 this->language_mode() == language_mode ||
3719 language_mode == EXTENDED_MODE);
3720 int hints = compiler_hints();
3721 hints = BooleanBit::set(
3722 hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
3723 hints = BooleanBit::set(
3724 hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
3725 set_compiler_hints(hints);
3729 bool SharedFunctionInfo::is_classic_mode() {
3730 return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
3733 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
3734 kExtendedModeFunction)
3735 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, qml_mode,
3737 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3738 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3739 name_should_print_as_anonymous,
3740 kNameShouldPrintAsAnonymous)
3741 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3742 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3743 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
3744 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
3746 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
3748 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3749 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3751 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3753 bool Script::HasValidSource() {
3754 Object* src = this->source();
3755 if (!src->IsString()) return true;
3756 String* src_str = String::cast(src);
3757 if (!StringShape(src_str).IsExternal()) return true;
3758 if (src_str->IsAsciiRepresentation()) {
3759 return ExternalAsciiString::cast(src)->resource() != NULL;
3760 } else if (src_str->IsTwoByteRepresentation()) {
3761 return ExternalTwoByteString::cast(src)->resource() != NULL;
3767 void SharedFunctionInfo::DontAdaptArguments() {
3768 ASSERT(code()->kind() == Code::BUILTIN);
3769 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3773 int SharedFunctionInfo::start_position() {
3774 return start_position_and_type() >> kStartPositionShift;
3778 void SharedFunctionInfo::set_start_position(int start_position) {
3779 set_start_position_and_type((start_position << kStartPositionShift)
3780 | (start_position_and_type() & ~kStartPositionMask));
3784 Code* SharedFunctionInfo::code() {
3785 return Code::cast(READ_FIELD(this, kCodeOffset));
3789 Code* SharedFunctionInfo::unchecked_code() {
3790 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3794 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3795 WRITE_FIELD(this, kCodeOffset, value);
3796 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3800 ScopeInfo* SharedFunctionInfo::scope_info() {
3801 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
3805 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
3806 WriteBarrierMode mode) {
3807 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3808 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3811 reinterpret_cast<Object*>(value),
3816 bool SharedFunctionInfo::is_compiled() {
3818 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3822 bool SharedFunctionInfo::IsApiFunction() {
3823 return function_data()->IsFunctionTemplateInfo();
3827 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3828 ASSERT(IsApiFunction());
3829 return FunctionTemplateInfo::cast(function_data());
3833 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3834 return function_data()->IsSmi();
3838 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3839 ASSERT(HasBuiltinFunctionId());
3840 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3844 int SharedFunctionInfo::code_age() {
3845 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3849 void SharedFunctionInfo::set_code_age(int code_age) {
3850 int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
3851 set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
3855 bool SharedFunctionInfo::has_deoptimization_support() {
3856 Code* code = this->code();
3857 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3861 bool JSFunction::IsBuiltin() {
3862 return context()->global()->IsJSBuiltinsObject();
3866 bool JSFunction::NeedsArgumentsAdaption() {
3867 return shared()->formal_parameter_count() !=
3868 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3872 bool JSFunction::IsOptimized() {
3873 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3877 bool JSFunction::IsOptimizable() {
3878 return code()->kind() == Code::FUNCTION && code()->optimizable();
3882 bool JSFunction::IsMarkedForLazyRecompilation() {
3883 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3887 Code* JSFunction::code() {
3888 return Code::cast(unchecked_code());
3892 Code* JSFunction::unchecked_code() {
3893 return reinterpret_cast<Code*>(
3894 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3898 void JSFunction::set_code(Code* value) {
3899 ASSERT(!HEAP->InNewSpace(value));
3900 Address entry = value->entry();
3901 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3902 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3904 HeapObject::RawField(this, kCodeEntryOffset),
3909 void JSFunction::ReplaceCode(Code* code) {
3910 bool was_optimized = IsOptimized();
3911 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3915 // Add/remove the function from the list of optimized functions for this
3916 // context based on the state change.
3917 if (!was_optimized && is_optimized) {
3918 context()->global_context()->AddOptimizedFunction(this);
3920 if (was_optimized && !is_optimized) {
3921 context()->global_context()->RemoveOptimizedFunction(this);
3926 Context* JSFunction::context() {
3927 return Context::cast(READ_FIELD(this, kContextOffset));
3931 Object* JSFunction::unchecked_context() {
3932 return READ_FIELD(this, kContextOffset);
3936 SharedFunctionInfo* JSFunction::unchecked_shared() {
3937 return reinterpret_cast<SharedFunctionInfo*>(
3938 READ_FIELD(this, kSharedFunctionInfoOffset));
3942 void JSFunction::set_context(Object* value) {
3943 ASSERT(value->IsUndefined() || value->IsContext());
3944 WRITE_FIELD(this, kContextOffset, value);
3945 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3948 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3949 kPrototypeOrInitialMapOffset)
3952 Map* JSFunction::initial_map() {
3953 return Map::cast(prototype_or_initial_map());
3957 void JSFunction::set_initial_map(Map* value) {
3958 set_prototype_or_initial_map(value);
3962 MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
3964 Context* global_context = context()->global_context();
3965 Object* array_function =
3966 global_context->get(Context::ARRAY_FUNCTION_INDEX);
3967 if (array_function->IsJSFunction() &&
3968 this == JSFunction::cast(array_function)) {
3969 ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
3971 MaybeObject* maybe_map = initial_map->CopyDropTransitions();
3972 Map* new_double_map = NULL;
3973 if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
3974 new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
3975 maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
3977 if (maybe_map->IsFailure()) return maybe_map;
3979 maybe_map = new_double_map->CopyDropTransitions();
3980 Map* new_object_map = NULL;
3981 if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
3982 new_object_map->set_elements_kind(FAST_ELEMENTS);
3983 maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
3985 if (maybe_map->IsFailure()) return maybe_map;
3987 global_context->set_smi_js_array_map(initial_map);
3988 global_context->set_double_js_array_map(new_double_map);
3989 global_context->set_object_js_array_map(new_object_map);
3991 set_initial_map(initial_map);
3996 bool JSFunction::has_initial_map() {
3997 return prototype_or_initial_map()->IsMap();
4001 bool JSFunction::has_instance_prototype() {
4002 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
4006 bool JSFunction::has_prototype() {
4007 return map()->has_non_instance_prototype() || has_instance_prototype();
4011 Object* JSFunction::instance_prototype() {
4012 ASSERT(has_instance_prototype());
4013 if (has_initial_map()) return initial_map()->prototype();
4014 // When there is no initial map and the prototype is a JSObject, the
4015 // initial map field is used for the prototype field.
4016 return prototype_or_initial_map();
4020 Object* JSFunction::prototype() {
4021 ASSERT(has_prototype());
4022 // If the function's prototype property has been set to a non-JSObject
4023 // value, that value is stored in the constructor field of the map.
4024 if (map()->has_non_instance_prototype()) return map()->constructor();
4025 return instance_prototype();
4028 bool JSFunction::should_have_prototype() {
4029 return map()->function_with_prototype();
4033 bool JSFunction::is_compiled() {
4034 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4038 FixedArray* JSFunction::literals() {
4039 ASSERT(!shared()->bound());
4040 return literals_or_bindings();
4044 void JSFunction::set_literals(FixedArray* literals) {
4045 ASSERT(!shared()->bound());
4046 set_literals_or_bindings(literals);
4050 FixedArray* JSFunction::function_bindings() {
4051 ASSERT(shared()->bound());
4052 return literals_or_bindings();
4056 void JSFunction::set_function_bindings(FixedArray* bindings) {
4057 ASSERT(shared()->bound());
4058 // Bound function literal may be initialized to the empty fixed array
4059 // before the bindings are set.
4060 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
4061 bindings->map() == GetHeap()->fixed_cow_array_map());
4062 set_literals_or_bindings(bindings);
4066 int JSFunction::NumberOfLiterals() {
4067 ASSERT(!shared()->bound());
4068 return literals()->length();
4072 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
4073 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4074 return READ_FIELD(this, OffsetOfFunctionWithId(id));
4078 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
4080 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4081 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
4082 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
4086 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
4087 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4088 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
4092 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
4094 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4095 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
4096 ASSERT(!HEAP->InNewSpace(value));
4100 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
4101 ACCESSORS(JSProxy, hash, Object, kHashOffset)
4102 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
4103 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4106 void JSProxy::InitializeBody(int object_size, Object* value) {
4107 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
4108 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
4109 WRITE_FIELD(this, offset, value);
4114 ACCESSORS(JSSet, table, Object, kTableOffset)
4115 ACCESSORS(JSMap, table, Object, kTableOffset)
4116 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
4117 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4120 Address Foreign::foreign_address() {
4121 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
4125 void Foreign::set_foreign_address(Address value) {
4126 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
4130 ACCESSORS(JSValue, value, Object, kValueOffset)
4133 JSValue* JSValue::cast(Object* obj) {
4134 ASSERT(obj->IsJSValue());
4135 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
4136 return reinterpret_cast<JSValue*>(obj);
4140 ACCESSORS(JSDate, value, Object, kValueOffset)
4141 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4142 ACCESSORS(JSDate, year, Object, kYearOffset)
4143 ACCESSORS(JSDate, month, Object, kMonthOffset)
4144 ACCESSORS(JSDate, day, Object, kDayOffset)
4145 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4146 ACCESSORS(JSDate, hour, Object, kHourOffset)
4147 ACCESSORS(JSDate, min, Object, kMinOffset)
4148 ACCESSORS(JSDate, sec, Object, kSecOffset)
4151 JSDate* JSDate::cast(Object* obj) {
4152 ASSERT(obj->IsJSDate());
4153 ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
4154 return reinterpret_cast<JSDate*>(obj);
4158 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4159 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
4160 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4161 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4162 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4163 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4164 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4167 JSMessageObject* JSMessageObject::cast(Object* obj) {
4168 ASSERT(obj->IsJSMessageObject());
4169 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
4170 return reinterpret_cast<JSMessageObject*>(obj);
4174 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4175 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
4176 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4177 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4178 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4179 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4180 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
4182 byte* Code::instruction_start() {
4183 return FIELD_ADDR(this, kHeaderSize);
4187 byte* Code::instruction_end() {
4188 return instruction_start() + instruction_size();
4192 int Code::body_size() {
4193 return RoundUp(instruction_size(), kObjectAlignment);
4197 FixedArray* Code::unchecked_deoptimization_data() {
4198 return reinterpret_cast<FixedArray*>(
4199 READ_FIELD(this, kDeoptimizationDataOffset));
4203 ByteArray* Code::unchecked_relocation_info() {
4204 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4208 byte* Code::relocation_start() {
4209 return unchecked_relocation_info()->GetDataStartAddress();
4213 int Code::relocation_size() {
4214 return unchecked_relocation_info()->length();
4218 byte* Code::entry() {
4219 return instruction_start();
4223 bool Code::contains(byte* inner_pointer) {
4224 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4228 ACCESSORS(JSArray, length, Object, kLengthOffset)
4231 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4234 JSRegExp::Type JSRegExp::TypeTag() {
4235 Object* data = this->data();
4236 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4237 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4238 return static_cast<JSRegExp::Type>(smi->value());
4242 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4243 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4244 return static_cast<JSRegExp::Type>(smi->value());
4248 int JSRegExp::CaptureCount() {
4249 switch (TypeTag()) {
4253 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4261 JSRegExp::Flags JSRegExp::GetFlags() {
4262 ASSERT(this->data()->IsFixedArray());
4263 Object* data = this->data();
4264 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4265 return Flags(smi->value());
4269 String* JSRegExp::Pattern() {
4270 ASSERT(this->data()->IsFixedArray());
4271 Object* data = this->data();
4272 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4277 Object* JSRegExp::DataAt(int index) {
4278 ASSERT(TypeTag() != NOT_COMPILED);
4279 return FixedArray::cast(data())->get(index);
4283 Object* JSRegExp::DataAtUnchecked(int index) {
4284 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4285 int offset = FixedArray::kHeaderSize + index * kPointerSize;
4286 return READ_FIELD(fa, offset);
4290 void JSRegExp::SetDataAt(int index, Object* value) {
4291 ASSERT(TypeTag() != NOT_COMPILED);
4292 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4293 FixedArray::cast(data())->set(index, value);
4297 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4298 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4299 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4300 if (value->IsSmi()) {
4301 fa->set_unchecked(index, Smi::cast(value));
4303 // We only do this during GC, so we don't need to notify the write barrier.
4304 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4309 ElementsKind JSObject::GetElementsKind() {
4310 ElementsKind kind = map()->elements_kind();
4312 FixedArrayBase* fixed_array =
4313 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4314 Map* map = fixed_array->map();
4315 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4316 (map == GetHeap()->fixed_array_map() ||
4317 map == GetHeap()->fixed_cow_array_map())) ||
4318 (kind == FAST_DOUBLE_ELEMENTS &&
4319 (fixed_array->IsFixedDoubleArray() ||
4320 fixed_array == GetHeap()->empty_fixed_array())) ||
4321 (kind == DICTIONARY_ELEMENTS &&
4322 fixed_array->IsFixedArray() &&
4323 fixed_array->IsDictionary()) ||
4324 (kind > DICTIONARY_ELEMENTS));
4325 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4326 (elements()->IsFixedArray() && elements()->length() >= 2));
4332 ElementsAccessor* JSObject::GetElementsAccessor() {
4333 return ElementsAccessor::ForKind(GetElementsKind());
4337 bool JSObject::HasFastElements() {
4338 return GetElementsKind() == FAST_ELEMENTS;
4342 bool JSObject::HasFastSmiOnlyElements() {
4343 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4347 bool JSObject::HasFastTypeElements() {
4348 ElementsKind elements_kind = GetElementsKind();
4349 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4350 elements_kind == FAST_ELEMENTS;
4354 bool JSObject::HasFastDoubleElements() {
4355 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4359 bool JSObject::HasDictionaryElements() {
4360 return GetElementsKind() == DICTIONARY_ELEMENTS;
4364 bool JSObject::HasNonStrictArgumentsElements() {
4365 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4369 bool JSObject::HasExternalArrayElements() {
4370 HeapObject* array = elements();
4371 ASSERT(array != NULL);
4372 return array->IsExternalArray();
4376 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4377 bool JSObject::HasExternal##name##Elements() { \
4378 HeapObject* array = elements(); \
4379 ASSERT(array != NULL); \
4380 if (!array->IsHeapObject()) \
4382 return array->map()->instance_type() == type; \
4386 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4387 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4388 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4389 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4390 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4391 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4392 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4393 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4394 EXTERNAL_ELEMENTS_CHECK(Float,
4395 EXTERNAL_FLOAT_ARRAY_TYPE)
4396 EXTERNAL_ELEMENTS_CHECK(Double,
4397 EXTERNAL_DOUBLE_ARRAY_TYPE)
4398 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4401 bool JSObject::HasNamedInterceptor() {
4402 return map()->has_named_interceptor();
4406 bool JSObject::HasIndexedInterceptor() {
4407 return map()->has_indexed_interceptor();
4411 MaybeObject* JSObject::EnsureWritableFastElements() {
4412 ASSERT(HasFastTypeElements());
4413 FixedArray* elems = FixedArray::cast(elements());
4414 Isolate* isolate = GetIsolate();
4415 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4416 Object* writable_elems;
4417 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4418 elems, isolate->heap()->fixed_array_map());
4419 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4420 return maybe_writable_elems;
4423 set_elements(FixedArray::cast(writable_elems));
4424 isolate->counters()->cow_arrays_converted()->Increment();
4425 return writable_elems;
4429 StringDictionary* JSObject::property_dictionary() {
4430 ASSERT(!HasFastProperties());
4431 return StringDictionary::cast(properties());
4435 SeededNumberDictionary* JSObject::element_dictionary() {
4436 ASSERT(HasDictionaryElements());
4437 return SeededNumberDictionary::cast(elements());
4441 bool String::IsHashFieldComputed(uint32_t field) {
4442 return (field & kHashNotComputedMask) == 0;
4446 bool String::HasHashCode() {
4447 return IsHashFieldComputed(hash_field());
4451 uint32_t String::Hash() {
4452 // Fast case: has hash code already been computed?
4453 uint32_t field = hash_field();
4454 if (IsHashFieldComputed(field)) return field >> kHashShift;
4455 // Slow case: compute hash code and set it.
4456 return ComputeAndSetHash();
4460 StringHasher::StringHasher(int length, uint32_t seed)
4462 raw_running_hash_(seed),
4464 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4465 is_first_char_(true),
4467 ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4471 bool StringHasher::has_trivial_hash() {
4472 return length_ > String::kMaxHashCalcLength;
4476 void StringHasher::AddCharacter(uint32_t c) {
4477 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4478 AddSurrogatePair(c); // Not inlined.
4481 // Use the Jenkins one-at-a-time hash function to update the hash
4482 // for the given character.
4483 raw_running_hash_ += c;
4484 raw_running_hash_ += (raw_running_hash_ << 10);
4485 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4486 // Incremental array index computation.
4487 if (is_array_index_) {
4488 if (c < '0' || c > '9') {
4489 is_array_index_ = false;
4492 if (is_first_char_) {
4493 is_first_char_ = false;
4494 if (c == '0' && length_ > 1) {
4495 is_array_index_ = false;
4499 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4500 is_array_index_ = false;
4502 array_index_ = array_index_ * 10 + d;
4509 void StringHasher::AddCharacterNoIndex(uint32_t c) {
4510 ASSERT(!is_array_index());
4511 if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
4512 AddSurrogatePairNoIndex(c); // Not inlined.
4515 raw_running_hash_ += c;
4516 raw_running_hash_ += (raw_running_hash_ << 10);
4517 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4521 uint32_t StringHasher::GetHash() {
4522 // Get the calculated raw hash value and do some more bit ops to distribute
4523 // the hash further. Ensure that we never return zero as the hash value.
4524 uint32_t result = raw_running_hash_;
4525 result += (result << 3);
4526 result ^= (result >> 11);
4527 result += (result << 15);
4528 if ((result & String::kHashBitMask) == 0) {
4535 template <typename schar>
4536 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
4537 StringHasher hasher(length, seed);
4538 if (!hasher.has_trivial_hash()) {
4540 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4541 hasher.AddCharacter(chars[i]);
4543 for (; i < length; i++) {
4544 hasher.AddCharacterNoIndex(chars[i]);
4547 return hasher.GetHashField();
4551 bool String::AsArrayIndex(uint32_t* index) {
4552 uint32_t field = hash_field();
4553 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4556 return SlowAsArrayIndex(index);
4560 Object* JSReceiver::GetPrototype() {
4561 return HeapObject::cast(this)->map()->prototype();
4565 bool JSReceiver::HasProperty(String* name) {
4567 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4569 return GetPropertyAttribute(name) != ABSENT;
4573 bool JSReceiver::HasLocalProperty(String* name) {
4575 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4577 return GetLocalPropertyAttribute(name) != ABSENT;
4581 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4582 return GetPropertyAttributeWithReceiver(this, key);
4585 // TODO(504): this may be useful in other places too where JSGlobalProxy
4587 Object* JSObject::BypassGlobalProxy() {
4588 if (IsJSGlobalProxy()) {
4589 Object* proto = GetPrototype();
4590 if (proto->IsNull()) return GetHeap()->undefined_value();
4591 ASSERT(proto->IsJSGlobalObject());
4598 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4600 ? JSProxy::cast(this)->GetIdentityHash(flag)
4601 : JSObject::cast(this)->GetIdentityHash(flag);
4605 bool JSReceiver::HasElement(uint32_t index) {
4607 return JSProxy::cast(this)->HasElementWithHandler(index);
4609 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4613 bool AccessorInfo::all_can_read() {
4614 return BooleanBit::get(flag(), kAllCanReadBit);
4618 void AccessorInfo::set_all_can_read(bool value) {
4619 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4623 bool AccessorInfo::all_can_write() {
4624 return BooleanBit::get(flag(), kAllCanWriteBit);
4628 void AccessorInfo::set_all_can_write(bool value) {
4629 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4633 bool AccessorInfo::prohibits_overwriting() {
4634 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4638 void AccessorInfo::set_prohibits_overwriting(bool value) {
4639 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4643 PropertyAttributes AccessorInfo::property_attributes() {
4644 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4648 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4649 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4653 template<typename Shape, typename Key>
4654 void Dictionary<Shape, Key>::SetEntry(int entry,
4657 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4661 template<typename Shape, typename Key>
4662 void Dictionary<Shape, Key>::SetEntry(int entry,
4665 PropertyDetails details) {
4666 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4667 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4668 AssertNoAllocation no_gc;
4669 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4670 FixedArray::set(index, key, mode);
4671 FixedArray::set(index+1, value, mode);
4672 FixedArray::set(index+2, details.AsSmi());
4676 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4677 ASSERT(other->IsNumber());
4678 return key == static_cast<uint32_t>(other->Number());
4682 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
4683 return ComputeIntegerHash(key, 0);
4687 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
4689 ASSERT(other->IsNumber());
4690 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
4693 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
4694 return ComputeIntegerHash(key, seed);
4697 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
4700 ASSERT(other->IsNumber());
4701 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
4704 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4705 return Isolate::Current()->heap()->NumberFromUint32(key);
4709 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4710 // We know that all entries in a hash table had their hash keys created.
4711 // Use that knowledge to have fast failure.
4712 if (key->Hash() != String::cast(other)->Hash()) return false;
4713 return key->Equals(String::cast(other));
4717 uint32_t StringDictionaryShape::Hash(String* key) {
4722 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4723 return String::cast(other)->Hash();
4727 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4732 template <int entrysize>
4733 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4734 return key->SameValue(other);
4738 template <int entrysize>
4739 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4740 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4741 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4745 template <int entrysize>
4746 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4748 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4749 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4753 template <int entrysize>
4754 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4759 void Map::ClearCodeCache(Heap* heap) {
4760 // No write barrier is needed since empty_fixed_array is not in new space.
4761 // Please note this function is used during marking:
4762 // - MarkCompactCollector::MarkUnmarkedObject
4763 // - IncrementalMarking::Step
4764 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4765 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4769 void JSArray::EnsureSize(int required_size) {
4770 ASSERT(HasFastTypeElements());
4771 FixedArray* elts = FixedArray::cast(elements());
4772 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4773 if (elts->length() < required_size) {
4774 // Doubling in size would be overkill, but leave some slack to avoid
4775 // constantly growing.
4776 Expand(required_size + (required_size >> 3));
4777 // It's a performance benefit to keep a frequently used array in new-space.
4778 } else if (!GetHeap()->new_space()->Contains(elts) &&
4779 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4780 // Expand will allocate a new backing store in new space even if the size
4781 // we asked for isn't larger than what we had before.
4782 Expand(required_size);
4787 void JSArray::set_length(Smi* length) {
4788 // Don't need a write barrier for a Smi.
4789 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4793 bool JSArray::AllowsSetElementsLength() {
4794 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
4795 ASSERT(result == !HasExternalArrayElements());
4800 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
4801 MaybeObject* maybe_result = EnsureCanContainElements(
4802 storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
4803 if (maybe_result->IsFailure()) return maybe_result;
4804 ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
4805 GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
4806 ((storage->map() != GetHeap()->fixed_double_array_map()) &&
4807 ((GetElementsKind() == FAST_ELEMENTS) ||
4808 (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
4809 FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
4810 set_elements(storage);
4811 set_length(Smi::FromInt(storage->length()));
4816 MaybeObject* FixedArray::Copy() {
4817 if (length() == 0) return this;
4818 return GetHeap()->CopyFixedArray(this);
4822 MaybeObject* FixedDoubleArray::Copy() {
4823 if (length() == 0) return this;
4824 return GetHeap()->CopyFixedDoubleArray(this);
4828 void TypeFeedbackCells::SetAstId(int index, Smi* id) {
4829 set(1 + index * 2, id);
4833 Smi* TypeFeedbackCells::AstId(int index) {
4834 return Smi::cast(get(1 + index * 2));
4838 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
4839 set(index * 2, cell);
4843 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
4844 return JSGlobalPropertyCell::cast(get(index * 2));
4848 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
4849 return isolate->factory()->the_hole_value();
4853 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
4854 return isolate->factory()->undefined_value();
4858 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
4859 return heap->raw_unchecked_the_hole_value();
4863 SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
4864 SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
4865 kIcWithTypeinfoCountOffset)
4866 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
4867 kTypeFeedbackCellsOffset)
4870 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
4873 Relocatable::Relocatable(Isolate* isolate) {
4874 ASSERT(isolate == Isolate::Current());
4876 prev_ = isolate->relocatable_top();
4877 isolate->set_relocatable_top(this);
4881 Relocatable::~Relocatable() {
4882 ASSERT(isolate_ == Isolate::Current());
4883 ASSERT_EQ(isolate_->relocatable_top(), this);
4884 isolate_->set_relocatable_top(prev_);
4888 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4889 return map->instance_size();
4893 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4894 v->VisitExternalReference(
4895 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4899 template<typename StaticVisitor>
4900 void Foreign::ForeignIterateBody() {
4901 StaticVisitor::VisitExternalReference(
4902 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4906 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4907 typedef v8::String::ExternalAsciiStringResource Resource;
4908 v->VisitExternalAsciiString(
4909 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4913 template<typename StaticVisitor>
4914 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4915 typedef v8::String::ExternalAsciiStringResource Resource;
4916 StaticVisitor::VisitExternalAsciiString(
4917 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4921 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4922 typedef v8::String::ExternalStringResource Resource;
4923 v->VisitExternalTwoByteString(
4924 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4928 template<typename StaticVisitor>
4929 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4930 typedef v8::String::ExternalStringResource Resource;
4931 StaticVisitor::VisitExternalTwoByteString(
4932 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4935 #define SLOT_ADDR(obj, offset) \
4936 reinterpret_cast<Object**>((obj)->address() + offset)
4938 template<int start_offset, int end_offset, int size>
4939 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4942 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4946 template<int start_offset>
4947 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4950 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4956 #undef CAST_ACCESSOR
4957 #undef INT_ACCESSORS
4959 #undef ACCESSORS_TO_SMI
4960 #undef SMI_ACCESSORS
4962 #undef BOOL_ACCESSORS
4966 #undef WRITE_BARRIER
4967 #undef CONDITIONAL_WRITE_BARRIER
4968 #undef READ_DOUBLE_FIELD
4969 #undef WRITE_DOUBLE_FIELD
4970 #undef READ_INT_FIELD
4971 #undef WRITE_INT_FIELD
4972 #undef READ_INTPTR_FIELD
4973 #undef WRITE_INTPTR_FIELD
4974 #undef READ_UINT32_FIELD
4975 #undef WRITE_UINT32_FIELD
4976 #undef READ_SHORT_FIELD
4977 #undef WRITE_SHORT_FIELD
4978 #undef READ_BYTE_FIELD
4979 #undef WRITE_BYTE_FIELD
4982 } } // namespace v8::internal
4984 #endif // V8_OBJECTS_INL_H_