1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 #define SMI_ACCESSORS(holder, name, offset) \
98 int holder::name() { \
99 Object* value = READ_FIELD(this, offset); \
100 return Smi::cast(value)->value(); \
102 void holder::set_##name(int value) { \
103 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107 #define BOOL_GETTER(holder, field, name, offset) \
108 bool holder::name() { \
109 return BooleanBit::get(field(), offset); \
113 #define BOOL_ACCESSORS(holder, field, name, offset) \
114 bool holder::name() { \
115 return BooleanBit::get(field(), offset); \
117 void holder::set_##name(bool value) { \
118 set_##field(BooleanBit::set(field(), offset, value)); \
122 bool Object::IsFixedArrayBase() {
123 return IsFixedArray() || IsFixedDoubleArray();
127 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
128 // There is a constraint on the object; check.
129 if (!this->IsJSObject()) return false;
130 // Fetch the constructor function of the object.
131 Object* cons_obj = JSObject::cast(this)->map()->constructor();
132 if (!cons_obj->IsJSFunction()) return false;
133 JSFunction* fun = JSFunction::cast(cons_obj);
134 // Iterate through the chain of inheriting function templates to
135 // see if the required one occurs.
136 for (Object* type = fun->shared()->function_data();
137 type->IsFunctionTemplateInfo();
138 type = FunctionTemplateInfo::cast(type)->parent_template()) {
139 if (type == expected) return true;
141 // Didn't find the required type in the inheritance chain.
146 bool Object::IsSmi() {
147 return HAS_SMI_TAG(this);
151 bool Object::IsHeapObject() {
152 return Internals::HasHeapObjectTag(this);
156 bool Object::NonFailureIsHeapObject() {
157 ASSERT(!this->IsFailure());
158 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
162 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
165 bool Object::IsString() {
166 return Object::IsHeapObject()
167 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
171 bool Object::IsSpecObject() {
172 return Object::IsHeapObject()
173 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
177 bool Object::IsSpecFunction() {
178 if (!Object::IsHeapObject()) return false;
179 InstanceType type = HeapObject::cast(this)->map()->instance_type();
180 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
184 bool Object::IsSymbol() {
185 if (!this->IsHeapObject()) return false;
186 uint32_t type = HeapObject::cast(this)->map()->instance_type();
187 // Because the symbol tag is non-zero and no non-string types have the
188 // symbol bit set we can test for symbols with a very simple test
190 STATIC_ASSERT(kSymbolTag != 0);
191 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
192 return (type & kIsSymbolMask) != 0;
196 bool Object::IsConsString() {
197 if (!IsString()) return false;
198 return StringShape(String::cast(this)).IsCons();
202 bool Object::IsSlicedString() {
203 if (!IsString()) return false;
204 return StringShape(String::cast(this)).IsSliced();
208 bool Object::IsSeqString() {
209 if (!IsString()) return false;
210 return StringShape(String::cast(this)).IsSequential();
214 bool Object::IsSeqAsciiString() {
215 if (!IsString()) return false;
216 return StringShape(String::cast(this)).IsSequential() &&
217 String::cast(this)->IsAsciiRepresentation();
221 bool Object::IsSeqTwoByteString() {
222 if (!IsString()) return false;
223 return StringShape(String::cast(this)).IsSequential() &&
224 String::cast(this)->IsTwoByteRepresentation();
228 bool Object::IsExternalString() {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsExternal();
234 bool Object::IsExternalAsciiString() {
235 if (!IsString()) return false;
236 return StringShape(String::cast(this)).IsExternal() &&
237 String::cast(this)->IsAsciiRepresentation();
241 bool Object::IsExternalTwoByteString() {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsExternal() &&
244 String::cast(this)->IsTwoByteRepresentation();
247 bool Object::HasValidElements() {
248 // Dictionary is covered under FixedArray.
249 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
252 StringShape::StringShape(String* str)
253 : type_(str->map()->instance_type()) {
255 ASSERT((type_ & kIsNotStringMask) == kStringTag);
259 StringShape::StringShape(Map* map)
260 : type_(map->instance_type()) {
262 ASSERT((type_ & kIsNotStringMask) == kStringTag);
266 StringShape::StringShape(InstanceType t)
267 : type_(static_cast<uint32_t>(t)) {
269 ASSERT((type_ & kIsNotStringMask) == kStringTag);
273 bool StringShape::IsSymbol() {
275 STATIC_ASSERT(kSymbolTag != 0);
276 return (type_ & kIsSymbolMask) != 0;
280 bool String::IsAsciiRepresentation() {
281 uint32_t type = map()->instance_type();
282 return (type & kStringEncodingMask) == kAsciiStringTag;
286 bool String::IsTwoByteRepresentation() {
287 uint32_t type = map()->instance_type();
288 return (type & kStringEncodingMask) == kTwoByteStringTag;
292 bool String::IsAsciiRepresentationUnderneath() {
293 uint32_t type = map()->instance_type();
294 STATIC_ASSERT(kIsIndirectStringTag != 0);
295 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
297 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
298 case kAsciiStringTag:
300 case kTwoByteStringTag:
302 default: // Cons or sliced string. Need to go deeper.
303 return GetUnderlying()->IsAsciiRepresentation();
308 bool String::IsTwoByteRepresentationUnderneath() {
309 uint32_t type = map()->instance_type();
310 STATIC_ASSERT(kIsIndirectStringTag != 0);
311 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
313 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
314 case kAsciiStringTag:
316 case kTwoByteStringTag:
318 default: // Cons or sliced string. Need to go deeper.
319 return GetUnderlying()->IsTwoByteRepresentation();
324 bool String::HasOnlyAsciiChars() {
325 uint32_t type = map()->instance_type();
326 return (type & kStringEncodingMask) == kAsciiStringTag ||
327 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
331 bool StringShape::IsCons() {
332 return (type_ & kStringRepresentationMask) == kConsStringTag;
336 bool StringShape::IsSliced() {
337 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
341 bool StringShape::IsIndirect() {
342 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
346 bool StringShape::IsExternal() {
347 return (type_ & kStringRepresentationMask) == kExternalStringTag;
351 bool StringShape::IsSequential() {
352 return (type_ & kStringRepresentationMask) == kSeqStringTag;
356 StringRepresentationTag StringShape::representation_tag() {
357 uint32_t tag = (type_ & kStringRepresentationMask);
358 return static_cast<StringRepresentationTag>(tag);
362 uint32_t StringShape::encoding_tag() {
363 return type_ & kStringEncodingMask;
367 uint32_t StringShape::full_representation_tag() {
368 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
372 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
373 Internals::kFullStringRepresentationMask);
376 bool StringShape::IsSequentialAscii() {
377 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
381 bool StringShape::IsSequentialTwoByte() {
382 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
386 bool StringShape::IsExternalAscii() {
387 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
391 bool StringShape::IsExternalTwoByte() {
392 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
396 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
397 Internals::kExternalTwoByteRepresentationTag);
400 uc32 FlatStringReader::Get(int index) {
401 ASSERT(0 <= index && index <= length_);
403 return static_cast<const byte*>(start_)[index];
405 return static_cast<const uc16*>(start_)[index];
410 bool Object::IsNumber() {
411 return IsSmi() || IsHeapNumber();
415 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
416 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
419 bool Object::IsFiller() {
420 if (!Object::IsHeapObject()) return false;
421 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
422 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
426 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
429 bool Object::IsExternalArray() {
430 if (!Object::IsHeapObject())
432 InstanceType instance_type =
433 HeapObject::cast(this)->map()->instance_type();
434 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
435 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
439 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
440 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
441 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
442 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
443 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
444 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
445 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
446 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
449 bool MaybeObject::IsFailure() {
450 return HAS_FAILURE_TAG(this);
454 bool MaybeObject::IsRetryAfterGC() {
455 return HAS_FAILURE_TAG(this)
456 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
460 bool MaybeObject::IsOutOfMemory() {
461 return HAS_FAILURE_TAG(this)
462 && Failure::cast(this)->IsOutOfMemoryException();
466 bool MaybeObject::IsException() {
467 return this == Failure::Exception();
471 bool MaybeObject::IsTheHole() {
472 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
476 Failure* Failure::cast(MaybeObject* obj) {
477 ASSERT(HAS_FAILURE_TAG(obj));
478 return reinterpret_cast<Failure*>(obj);
482 bool Object::IsJSReceiver() {
483 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
484 return IsHeapObject() &&
485 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
489 bool Object::IsJSObject() {
490 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
491 return IsHeapObject() &&
492 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
496 bool Object::IsJSProxy() {
497 if (!Object::IsHeapObject()) return false;
498 InstanceType type = HeapObject::cast(this)->map()->instance_type();
499 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
503 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
504 TYPE_CHECKER(JSSet, JS_SET_TYPE)
505 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
506 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
507 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
508 TYPE_CHECKER(Map, MAP_TYPE)
509 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
510 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
513 bool Object::IsDescriptorArray() {
514 return IsFixedArray();
518 bool Object::IsDeoptimizationInputData() {
519 // Must be a fixed array.
520 if (!IsFixedArray()) return false;
522 // There's no sure way to detect the difference between a fixed array and
523 // a deoptimization data array. Since this is used for asserts we can
524 // check that the length is zero or else the fixed size plus a multiple of
526 int length = FixedArray::cast(this)->length();
527 if (length == 0) return true;
529 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
530 return length >= 0 &&
531 length % DeoptimizationInputData::kDeoptEntrySize == 0;
535 bool Object::IsDeoptimizationOutputData() {
536 if (!IsFixedArray()) return false;
537 // There's actually no way to see the difference between a fixed array and
538 // a deoptimization data array. Since this is used for asserts we can check
539 // that the length is plausible though.
540 if (FixedArray::cast(this)->length() % 2 != 0) return false;
545 bool Object::IsContext() {
546 if (Object::IsHeapObject()) {
547 Map* map = HeapObject::cast(this)->map();
548 Heap* heap = map->GetHeap();
549 return (map == heap->function_context_map() ||
550 map == heap->catch_context_map() ||
551 map == heap->with_context_map() ||
552 map == heap->global_context_map() ||
553 map == heap->block_context_map());
559 bool Object::IsGlobalContext() {
560 return Object::IsHeapObject() &&
561 HeapObject::cast(this)->map() ==
562 HeapObject::cast(this)->GetHeap()->global_context_map();
566 bool Object::IsSerializedScopeInfo() {
567 return Object::IsHeapObject() &&
568 HeapObject::cast(this)->map() ==
569 HeapObject::cast(this)->GetHeap()->serialized_scope_info_map();
573 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
576 template <> inline bool Is<JSFunction>(Object* obj) {
577 return obj->IsJSFunction();
581 TYPE_CHECKER(Code, CODE_TYPE)
582 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
583 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
584 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
585 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
586 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
589 bool Object::IsStringWrapper() {
590 return IsJSValue() && JSValue::cast(this)->value()->IsString();
594 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
597 bool Object::IsBoolean() {
598 return IsOddball() &&
599 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
603 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
604 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
607 template <> inline bool Is<JSArray>(Object* obj) {
608 return obj->IsJSArray();
612 bool Object::IsHashTable() {
613 return Object::IsHeapObject() &&
614 HeapObject::cast(this)->map() ==
615 HeapObject::cast(this)->GetHeap()->hash_table_map();
619 bool Object::IsDictionary() {
620 return IsHashTable() &&
621 this != HeapObject::cast(this)->GetHeap()->symbol_table();
625 bool Object::IsSymbolTable() {
626 return IsHashTable() && this ==
627 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
631 bool Object::IsJSFunctionResultCache() {
632 if (!IsFixedArray()) return false;
633 FixedArray* self = FixedArray::cast(this);
634 int length = self->length();
635 if (length < JSFunctionResultCache::kEntriesIndex) return false;
636 if ((length - JSFunctionResultCache::kEntriesIndex)
637 % JSFunctionResultCache::kEntrySize != 0) {
641 if (FLAG_verify_heap) {
642 reinterpret_cast<JSFunctionResultCache*>(this)->
643 JSFunctionResultCacheVerify();
650 bool Object::IsNormalizedMapCache() {
651 if (!IsFixedArray()) return false;
652 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
656 if (FLAG_verify_heap) {
657 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
664 bool Object::IsCompilationCacheTable() {
665 return IsHashTable();
669 bool Object::IsCodeCacheHashTable() {
670 return IsHashTable();
674 bool Object::IsPolymorphicCodeCacheHashTable() {
675 return IsHashTable();
679 bool Object::IsMapCache() {
680 return IsHashTable();
684 bool Object::IsPrimitive() {
685 return IsOddball() || IsNumber() || IsString();
689 bool Object::IsJSGlobalProxy() {
690 bool result = IsHeapObject() &&
691 (HeapObject::cast(this)->map()->instance_type() ==
692 JS_GLOBAL_PROXY_TYPE);
693 ASSERT(!result || IsAccessCheckNeeded());
698 bool Object::IsGlobalObject() {
699 if (!IsHeapObject()) return false;
701 InstanceType type = HeapObject::cast(this)->map()->instance_type();
702 return type == JS_GLOBAL_OBJECT_TYPE ||
703 type == JS_BUILTINS_OBJECT_TYPE;
707 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
708 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
711 bool Object::IsUndetectableObject() {
712 return IsHeapObject()
713 && HeapObject::cast(this)->map()->is_undetectable();
717 bool Object::IsAccessCheckNeeded() {
718 return IsHeapObject()
719 && HeapObject::cast(this)->map()->is_access_check_needed();
723 bool Object::IsStruct() {
724 if (!IsHeapObject()) return false;
725 switch (HeapObject::cast(this)->map()->instance_type()) {
726 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
727 STRUCT_LIST(MAKE_STRUCT_CASE)
728 #undef MAKE_STRUCT_CASE
729 default: return false;
734 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
735 bool Object::Is##Name() { \
736 return Object::IsHeapObject() \
737 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
739 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
740 #undef MAKE_STRUCT_PREDICATE
743 bool Object::IsUndefined() {
744 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
748 bool Object::IsNull() {
749 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
753 bool Object::IsTheHole() {
754 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
758 bool Object::IsTrue() {
759 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
763 bool Object::IsFalse() {
764 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
768 bool Object::IsArgumentsMarker() {
769 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
773 double Object::Number() {
776 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
777 : reinterpret_cast<HeapNumber*>(this)->value();
781 MaybeObject* Object::ToSmi() {
782 if (IsSmi()) return this;
783 if (IsHeapNumber()) {
784 double value = HeapNumber::cast(this)->value();
785 int int_value = FastD2I(value);
786 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
787 return Smi::FromInt(int_value);
790 return Failure::Exception();
794 bool Object::HasSpecificClassOf(String* name) {
795 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
799 MaybeObject* Object::GetElement(uint32_t index) {
800 // GetElement can trigger a getter which can cause allocation.
801 // This was not always the case. This ASSERT is here to catch
802 // leftover incorrect uses.
803 ASSERT(HEAP->IsAllocationAllowed());
804 return GetElementWithReceiver(this, index);
808 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
809 MaybeObject* maybe = GetElementWithReceiver(this, index);
810 ASSERT(!maybe->IsFailure());
811 Object* result = NULL; // Initialization to please compiler.
812 maybe->ToObject(&result);
817 MaybeObject* Object::GetProperty(String* key) {
818 PropertyAttributes attributes;
819 return GetPropertyWithReceiver(this, key, &attributes);
823 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
824 return GetPropertyWithReceiver(this, key, attributes);
828 #define FIELD_ADDR(p, offset) \
829 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
831 #define READ_FIELD(p, offset) \
832 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
834 #define WRITE_FIELD(p, offset, value) \
835 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
837 #define WRITE_BARRIER(heap, object, offset, value) \
838 heap->incremental_marking()->RecordWrite( \
839 object, HeapObject::RawField(object, offset), value); \
840 if (heap->InNewSpace(value)) { \
841 heap->RecordWrite(object->address(), offset); \
844 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
845 if (mode == UPDATE_WRITE_BARRIER) { \
846 heap->incremental_marking()->RecordWrite( \
847 object, HeapObject::RawField(object, offset), value); \
848 if (heap->InNewSpace(value)) { \
849 heap->RecordWrite(object->address(), offset); \
853 #ifndef V8_TARGET_ARCH_MIPS
854 #define READ_DOUBLE_FIELD(p, offset) \
855 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
856 #else // V8_TARGET_ARCH_MIPS
857 // Prevent gcc from using load-double (mips ldc1) on (possibly)
858 // non-64-bit aligned HeapNumber::value.
859 static inline double read_double_field(void* p, int offset) {
864 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
865 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
868 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
869 #endif // V8_TARGET_ARCH_MIPS
871 #ifndef V8_TARGET_ARCH_MIPS
872 #define WRITE_DOUBLE_FIELD(p, offset, value) \
873 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
874 #else // V8_TARGET_ARCH_MIPS
875 // Prevent gcc from using store-double (mips sdc1) on (possibly)
876 // non-64-bit aligned HeapNumber::value.
877 static inline void write_double_field(void* p, int offset,
884 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
885 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
887 #define WRITE_DOUBLE_FIELD(p, offset, value) \
888 write_double_field(p, offset, value)
889 #endif // V8_TARGET_ARCH_MIPS
892 #define READ_INT_FIELD(p, offset) \
893 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
895 #define WRITE_INT_FIELD(p, offset, value) \
896 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
898 #define READ_INTPTR_FIELD(p, offset) \
899 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
901 #define WRITE_INTPTR_FIELD(p, offset, value) \
902 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
904 #define READ_UINT32_FIELD(p, offset) \
905 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
907 #define WRITE_UINT32_FIELD(p, offset, value) \
908 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
910 #define READ_SHORT_FIELD(p, offset) \
911 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
913 #define WRITE_SHORT_FIELD(p, offset, value) \
914 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
916 #define READ_BYTE_FIELD(p, offset) \
917 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
919 #define WRITE_BYTE_FIELD(p, offset, value) \
920 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
923 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
924 return &READ_FIELD(obj, byte_offset);
929 return Internals::SmiValue(this);
933 Smi* Smi::FromInt(int value) {
934 ASSERT(Smi::IsValid(value));
935 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
936 intptr_t tagged_value =
937 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
938 return reinterpret_cast<Smi*>(tagged_value);
942 Smi* Smi::FromIntptr(intptr_t value) {
943 ASSERT(Smi::IsValid(value));
944 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
945 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
949 Failure::Type Failure::type() const {
950 return static_cast<Type>(value() & kFailureTypeTagMask);
954 bool Failure::IsInternalError() const {
955 return type() == INTERNAL_ERROR;
959 bool Failure::IsOutOfMemoryException() const {
960 return type() == OUT_OF_MEMORY_EXCEPTION;
964 AllocationSpace Failure::allocation_space() const {
965 ASSERT_EQ(RETRY_AFTER_GC, type());
966 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
971 Failure* Failure::InternalError() {
972 return Construct(INTERNAL_ERROR);
976 Failure* Failure::Exception() {
977 return Construct(EXCEPTION);
981 Failure* Failure::OutOfMemoryException() {
982 return Construct(OUT_OF_MEMORY_EXCEPTION);
986 intptr_t Failure::value() const {
987 return static_cast<intptr_t>(
988 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
992 Failure* Failure::RetryAfterGC() {
993 return RetryAfterGC(NEW_SPACE);
997 Failure* Failure::RetryAfterGC(AllocationSpace space) {
998 ASSERT((space & ~kSpaceTagMask) == 0);
999 return Construct(RETRY_AFTER_GC, space);
1003 Failure* Failure::Construct(Type type, intptr_t value) {
1005 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1006 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1007 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1011 bool Smi::IsValid(intptr_t value) {
1013 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1016 #ifdef V8_TARGET_ARCH_X64
1017 // To be representable as a long smi, the value must be a 32-bit integer.
1018 bool result = (value == static_cast<int32_t>(value));
1020 // To be representable as an tagged small integer, the two
1021 // most-significant bits of 'value' must be either 00 or 11 due to
1022 // sign-extension. To check this we add 01 to the two
1023 // most-significant bits, and check if the most-significant bit is 0
1025 // CAUTION: The original code below:
1026 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1027 // may lead to incorrect results according to the C language spec, and
1028 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1029 // compiler may produce undefined results in case of signed integer
1030 // overflow. The computation must be done w/ unsigned ints.
1031 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1033 ASSERT(result == in_range);
1038 MapWord MapWord::FromMap(Map* map) {
1039 return MapWord(reinterpret_cast<uintptr_t>(map));
1043 Map* MapWord::ToMap() {
1044 return reinterpret_cast<Map*>(value_);
1048 bool MapWord::IsForwardingAddress() {
1049 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1053 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1054 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1055 return MapWord(reinterpret_cast<uintptr_t>(raw));
1059 HeapObject* MapWord::ToForwardingAddress() {
1060 ASSERT(IsForwardingAddress());
1061 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1066 void HeapObject::VerifyObjectField(int offset) {
1067 VerifyPointer(READ_FIELD(this, offset));
1070 void HeapObject::VerifySmiField(int offset) {
1071 ASSERT(READ_FIELD(this, offset)->IsSmi());
1076 Heap* HeapObject::GetHeap() {
1078 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1079 ASSERT(heap != NULL);
1080 ASSERT(heap->isolate() == Isolate::Current());
1085 Isolate* HeapObject::GetIsolate() {
1086 return GetHeap()->isolate();
1090 Map* HeapObject::map() {
1091 return map_word().ToMap();
1095 void HeapObject::set_map(Map* value) {
1096 set_map_word(MapWord::FromMap(value));
1097 if (value != NULL) {
1098 // TODO(1600) We are passing NULL as a slot because maps can never be on
1099 // evacuation candidate.
1100 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1105 // Unsafe accessor omitting write barrier.
1106 void HeapObject::set_map_unsafe(Map* value) {
1107 set_map_word(MapWord::FromMap(value));
1111 MapWord HeapObject::map_word() {
1112 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1116 void HeapObject::set_map_word(MapWord map_word) {
1117 // WRITE_FIELD does not invoke write barrier, but there is no need
1119 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1123 HeapObject* HeapObject::FromAddress(Address address) {
1124 ASSERT_TAG_ALIGNED(address);
1125 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1129 Address HeapObject::address() {
1130 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1134 int HeapObject::Size() {
1135 return SizeFromMap(map());
1139 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1140 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1141 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1145 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1146 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1150 double HeapNumber::value() {
1151 return READ_DOUBLE_FIELD(this, kValueOffset);
1155 void HeapNumber::set_value(double value) {
1156 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1160 int HeapNumber::get_exponent() {
1161 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1162 kExponentShift) - kExponentBias;
1166 int HeapNumber::get_sign() {
1167 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1171 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1174 FixedArrayBase* JSObject::elements() {
1175 Object* array = READ_FIELD(this, kElementsOffset);
1176 return static_cast<FixedArrayBase*>(array);
1179 void JSObject::ValidateSmiOnlyElements() {
1181 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1182 Heap* heap = GetHeap();
1183 // Don't use elements, since integrity checks will fail if there
1184 // are filler pointers in the array.
1185 FixedArray* fixed_array =
1186 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1187 Map* map = fixed_array->map();
1188 // Arrays that have been shifted in place can't be verified.
1189 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1190 map != heap->raw_unchecked_two_pointer_filler_map() &&
1191 map != heap->free_space_map()) {
1192 for (int i = 0; i < fixed_array->length(); i++) {
1193 Object* current = fixed_array->get(i);
1194 ASSERT(current->IsSmi() || current == heap->the_hole_value());
1202 MaybeObject* JSObject::EnsureCanContainNonSmiElements() {
1204 ValidateSmiOnlyElements();
1206 if ((map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS)) {
1208 MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
1209 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1210 set_map(Map::cast(obj));
1216 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1218 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1219 for (uint32_t i = 0; i < count; ++i) {
1220 Object* current = *objects++;
1221 if (!current->IsSmi() && current != GetHeap()->the_hole_value()) {
1222 return EnsureCanContainNonSmiElements();
1230 MaybeObject* JSObject::EnsureCanContainElements(FixedArray* elements) {
1231 Object** objects = reinterpret_cast<Object**>(
1232 FIELD_ADDR(elements, elements->OffsetOfElementAt(0)));
1233 return EnsureCanContainElements(objects, elements->length());
1237 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1238 ASSERT((map()->has_fast_elements() ||
1239 map()->has_fast_smi_only_elements()) ==
1240 (value->map() == GetHeap()->fixed_array_map() ||
1241 value->map() == GetHeap()->fixed_cow_array_map()));
1242 ASSERT(map()->has_fast_double_elements() ==
1243 value->IsFixedDoubleArray());
1244 ASSERT(value->HasValidElements());
1246 ValidateSmiOnlyElements();
1248 WRITE_FIELD(this, kElementsOffset, value);
1249 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1253 void JSObject::initialize_properties() {
1254 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1255 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1259 void JSObject::initialize_elements() {
1260 ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
1261 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1262 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1266 MaybeObject* JSObject::ResetElements() {
1268 ElementsKind elements_kind = FLAG_smi_only_arrays
1269 ? FAST_SMI_ONLY_ELEMENTS
1271 MaybeObject* maybe_obj = GetElementsTransitionMap(elements_kind);
1272 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1273 set_map(Map::cast(obj));
1274 initialize_elements();
1279 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1280 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1283 byte Oddball::kind() {
1284 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1288 void Oddball::set_kind(byte value) {
1289 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1293 Object* JSGlobalPropertyCell::value() {
1294 return READ_FIELD(this, kValueOffset);
1298 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1299 // The write barrier is not used for global property cells.
1300 ASSERT(!val->IsJSGlobalPropertyCell());
1301 WRITE_FIELD(this, kValueOffset, val);
1302 GetHeap()->incremental_marking()->RecordWrite(
1303 this, HeapObject::RawField(this, kValueOffset), val);
1307 int JSObject::GetHeaderSize() {
1308 InstanceType type = map()->instance_type();
1309 // Check for the most common kind of JavaScript object before
1310 // falling into the generic switch. This speeds up the internal
1311 // field operations considerably on average.
1312 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1314 case JS_GLOBAL_PROXY_TYPE:
1315 return JSGlobalProxy::kSize;
1316 case JS_GLOBAL_OBJECT_TYPE:
1317 return JSGlobalObject::kSize;
1318 case JS_BUILTINS_OBJECT_TYPE:
1319 return JSBuiltinsObject::kSize;
1320 case JS_FUNCTION_TYPE:
1321 return JSFunction::kSize;
1323 return JSValue::kSize;
1325 return JSValue::kSize;
1326 case JS_WEAK_MAP_TYPE:
1327 return JSWeakMap::kSize;
1328 case JS_REGEXP_TYPE:
1329 return JSValue::kSize;
1330 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1331 return JSObject::kHeaderSize;
1332 case JS_MESSAGE_OBJECT_TYPE:
1333 return JSMessageObject::kSize;
1341 int JSObject::GetInternalFieldCount() {
1342 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1343 // Make sure to adjust for the number of in-object properties. These
1344 // properties do contribute to the size, but are not internal fields.
1345 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1346 map()->inobject_properties() - (map()->has_external_resource()?1:0);
1350 int JSObject::GetInternalFieldOffset(int index) {
1351 ASSERT(index < GetInternalFieldCount() && index >= 0);
1352 return GetHeaderSize() + (kPointerSize * index);
1356 Object* JSObject::GetInternalField(int index) {
1357 ASSERT(index < GetInternalFieldCount() && index >= 0);
1358 // Internal objects do follow immediately after the header, whereas in-object
1359 // properties are at the end of the object. Therefore there is no need
1360 // to adjust the index here.
1361 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1365 void JSObject::SetInternalField(int index, Object* value) {
1366 ASSERT(index < GetInternalFieldCount() && index >= 0);
1367 // Internal objects do follow immediately after the header, whereas in-object
1368 // properties are at the end of the object. Therefore there is no need
1369 // to adjust the index here.
1370 int offset = GetHeaderSize() + (kPointerSize * index);
1371 WRITE_FIELD(this, offset, value);
1372 WRITE_BARRIER(GetHeap(), this, offset, value);
1376 void JSObject::SetExternalResourceObject(Object *value) {
1377 ASSERT(map()->has_external_resource());
1378 int offset = GetHeaderSize() + kPointerSize * GetInternalFieldCount();
1379 WRITE_FIELD(this, offset, value);
1380 WRITE_BARRIER(GetHeap(), this, offset, value);
1384 Object *JSObject::GetExternalResourceObject() {
1385 if (map()->has_external_resource()) {
1386 return READ_FIELD(this, GetHeaderSize() + kPointerSize * GetInternalFieldCount());
1388 return GetHeap()->undefined_value();
1393 // Access fast-case object properties at index. The use of these routines
1394 // is needed to correctly distinguish between properties stored in-object and
1395 // properties stored in the properties array.
1396 Object* JSObject::FastPropertyAt(int index) {
1397 // Adjust for the number of properties stored in the object.
1398 index -= map()->inobject_properties();
1400 int offset = map()->instance_size() + (index * kPointerSize);
1401 return READ_FIELD(this, offset);
1403 ASSERT(index < properties()->length());
1404 return properties()->get(index);
1409 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1410 // Adjust for the number of properties stored in the object.
1411 index -= map()->inobject_properties();
1413 int offset = map()->instance_size() + (index * kPointerSize);
1414 WRITE_FIELD(this, offset, value);
1415 WRITE_BARRIER(GetHeap(), this, offset, value);
1417 ASSERT(index < properties()->length());
1418 properties()->set(index, value);
1424 int JSObject::GetInObjectPropertyOffset(int index) {
1425 // Adjust for the number of properties stored in the object.
1426 index -= map()->inobject_properties();
1428 return map()->instance_size() + (index * kPointerSize);
1432 Object* JSObject::InObjectPropertyAt(int index) {
1433 // Adjust for the number of properties stored in the object.
1434 index -= map()->inobject_properties();
1436 int offset = map()->instance_size() + (index * kPointerSize);
1437 return READ_FIELD(this, offset);
1441 Object* JSObject::InObjectPropertyAtPut(int index,
1443 WriteBarrierMode mode) {
1444 // Adjust for the number of properties stored in the object.
1445 index -= map()->inobject_properties();
1447 int offset = map()->instance_size() + (index * kPointerSize);
1448 WRITE_FIELD(this, offset, value);
1449 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1455 void JSObject::InitializeBody(Map* map,
1456 Object* pre_allocated_value,
1457 Object* filler_value) {
1458 ASSERT(!filler_value->IsHeapObject() ||
1459 !GetHeap()->InNewSpace(filler_value));
1460 ASSERT(!pre_allocated_value->IsHeapObject() ||
1461 !GetHeap()->InNewSpace(pre_allocated_value));
1462 int size = map->instance_size();
1463 int offset = kHeaderSize;
1464 if (filler_value != pre_allocated_value) {
1465 int pre_allocated = map->pre_allocated_property_fields();
1466 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1467 for (int i = 0; i < pre_allocated; i++) {
1468 WRITE_FIELD(this, offset, pre_allocated_value);
1469 offset += kPointerSize;
1472 while (offset < size) {
1473 WRITE_FIELD(this, offset, filler_value);
1474 offset += kPointerSize;
1479 bool JSObject::HasFastProperties() {
1480 return !properties()->IsDictionary();
1484 int JSObject::MaxFastProperties() {
1485 // Allow extra fast properties if the object has more than
1486 // kMaxFastProperties in-object properties. When this is the case,
1487 // it is very unlikely that the object is being used as a dictionary
1488 // and there is a good chance that allowing more map transitions
1489 // will be worth it.
1490 return Max(map()->inobject_properties(), kMaxFastProperties);
1494 void Struct::InitializeBody(int object_size) {
1495 Object* value = GetHeap()->undefined_value();
1496 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1497 WRITE_FIELD(this, offset, value);
1502 bool Object::ToArrayIndex(uint32_t* index) {
1504 int value = Smi::cast(this)->value();
1505 if (value < 0) return false;
1509 if (IsHeapNumber()) {
1510 double value = HeapNumber::cast(this)->value();
1511 uint32_t uint_value = static_cast<uint32_t>(value);
1512 if (value == static_cast<double>(uint_value)) {
1513 *index = uint_value;
1521 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1522 if (!this->IsJSValue()) return false;
1524 JSValue* js_value = JSValue::cast(this);
1525 if (!js_value->value()->IsString()) return false;
1527 String* str = String::cast(js_value->value());
1528 if (index >= (uint32_t)str->length()) return false;
1534 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1535 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1536 return reinterpret_cast<FixedArrayBase*>(object);
1540 Object* FixedArray::get(int index) {
1541 ASSERT(index >= 0 && index < this->length());
1542 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1546 void FixedArray::set(int index, Smi* value) {
1547 ASSERT(map() != HEAP->fixed_cow_array_map());
1548 ASSERT(index >= 0 && index < this->length());
1549 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1550 int offset = kHeaderSize + index * kPointerSize;
1551 WRITE_FIELD(this, offset, value);
1555 void FixedArray::set(int index, Object* value) {
1556 ASSERT(map() != HEAP->fixed_cow_array_map());
1557 ASSERT(index >= 0 && index < this->length());
1558 int offset = kHeaderSize + index * kPointerSize;
1559 WRITE_FIELD(this, offset, value);
1560 WRITE_BARRIER(GetHeap(), this, offset, value);
1564 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1565 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1569 inline double FixedDoubleArray::hole_nan_as_double() {
1570 return BitCast<double, uint64_t>(kHoleNanInt64);
1574 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1575 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1576 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1577 return OS::nan_value();
1581 double FixedDoubleArray::get_scalar(int index) {
1582 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1583 map() != HEAP->fixed_array_map());
1584 ASSERT(index >= 0 && index < this->length());
1585 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1586 ASSERT(!is_the_hole_nan(result));
1591 MaybeObject* FixedDoubleArray::get(int index) {
1592 if (is_the_hole(index)) {
1593 return GetHeap()->the_hole_value();
1595 return GetHeap()->NumberFromDouble(get_scalar(index));
1600 void FixedDoubleArray::set(int index, double value) {
1601 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1602 map() != HEAP->fixed_array_map());
1603 int offset = kHeaderSize + index * kDoubleSize;
1604 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1605 WRITE_DOUBLE_FIELD(this, offset, value);
1609 void FixedDoubleArray::set_the_hole(int index) {
1610 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1611 map() != HEAP->fixed_array_map());
1612 int offset = kHeaderSize + index * kDoubleSize;
1613 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1617 bool FixedDoubleArray::is_the_hole(int index) {
1618 int offset = kHeaderSize + index * kDoubleSize;
1619 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1623 void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
1624 int old_length = from->length();
1625 ASSERT(old_length < length());
1626 if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) {
1627 OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
1628 FIELD_ADDR(from, kHeaderSize),
1629 old_length * kDoubleSize);
1631 for (int i = 0; i < old_length; ++i) {
1632 if (from->is_the_hole(i)) {
1635 set(i, from->get_scalar(i));
1639 int offset = kHeaderSize + old_length * kDoubleSize;
1640 for (int current = from->length(); current < length(); ++current) {
1641 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1642 offset += kDoubleSize;
1647 void FixedDoubleArray::Initialize(FixedArray* from) {
1648 int old_length = from->length();
1649 ASSERT(old_length <= length());
1650 for (int i = 0; i < old_length; i++) {
1651 Object* hole_or_object = from->get(i);
1652 if (hole_or_object->IsTheHole()) {
1655 set(i, hole_or_object->Number());
1658 int offset = kHeaderSize + old_length * kDoubleSize;
1659 for (int current = from->length(); current < length(); ++current) {
1660 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1661 offset += kDoubleSize;
1666 void FixedDoubleArray::Initialize(NumberDictionary* from) {
1667 int offset = kHeaderSize;
1668 for (int current = 0; current < length(); ++current) {
1669 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1670 offset += kDoubleSize;
1672 for (int i = 0; i < from->Capacity(); i++) {
1673 Object* key = from->KeyAt(i);
1674 if (key->IsNumber()) {
1675 uint32_t entry = static_cast<uint32_t>(key->Number());
1676 set(entry, from->ValueAt(i)->Number());
1682 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1683 Heap* heap = GetHeap();
1684 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1685 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1686 return UPDATE_WRITE_BARRIER;
1690 void FixedArray::set(int index,
1692 WriteBarrierMode mode) {
1693 ASSERT(map() != HEAP->fixed_cow_array_map());
1694 ASSERT(index >= 0 && index < this->length());
1695 int offset = kHeaderSize + index * kPointerSize;
1696 WRITE_FIELD(this, offset, value);
1697 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1701 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1704 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1705 ASSERT(index >= 0 && index < array->length());
1706 ASSERT(!HEAP->InNewSpace(value));
1707 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1711 void FixedArray::set_undefined(int index) {
1712 ASSERT(map() != HEAP->fixed_cow_array_map());
1713 set_undefined(GetHeap(), index);
1717 void FixedArray::set_undefined(Heap* heap, int index) {
1718 ASSERT(index >= 0 && index < this->length());
1719 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1720 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1721 heap->undefined_value());
1725 void FixedArray::set_null(int index) {
1726 set_null(GetHeap(), index);
1730 void FixedArray::set_null(Heap* heap, int index) {
1731 ASSERT(index >= 0 && index < this->length());
1732 ASSERT(!heap->InNewSpace(heap->null_value()));
1733 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1737 void FixedArray::set_the_hole(int index) {
1738 ASSERT(map() != HEAP->fixed_cow_array_map());
1739 ASSERT(index >= 0 && index < this->length());
1740 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1742 kHeaderSize + index * kPointerSize,
1743 GetHeap()->the_hole_value());
1747 void FixedArray::set_unchecked(int index, Smi* value) {
1748 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1749 int offset = kHeaderSize + index * kPointerSize;
1750 WRITE_FIELD(this, offset, value);
1754 void FixedArray::set_unchecked(Heap* heap,
1757 WriteBarrierMode mode) {
1758 int offset = kHeaderSize + index * kPointerSize;
1759 WRITE_FIELD(this, offset, value);
1760 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1764 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1765 ASSERT(index >= 0 && index < this->length());
1766 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1767 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1771 Object** FixedArray::data_start() {
1772 return HeapObject::RawField(this, kHeaderSize);
1776 bool DescriptorArray::IsEmpty() {
1777 ASSERT(this->IsSmi() ||
1778 this->length() > kFirstIndex ||
1779 this == HEAP->empty_descriptor_array());
1780 return this->IsSmi() || length() <= kFirstIndex;
1784 int DescriptorArray::bit_field3_storage() {
1785 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1786 return Smi::cast(storage)->value();
1789 void DescriptorArray::set_bit_field3_storage(int value) {
1791 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1795 void DescriptorArray::NoWriteBarrierSwap(FixedArray* array,
1798 Object* tmp = array->get(first);
1799 NoWriteBarrierSet(array, first, array->get(second));
1800 NoWriteBarrierSet(array, second, tmp);
1804 int DescriptorArray::Search(String* name) {
1805 SLOW_ASSERT(IsSortedNoDuplicates());
1807 // Check for empty descriptor array.
1808 int nof = number_of_descriptors();
1809 if (nof == 0) return kNotFound;
1811 // Fast case: do linear search for small arrays.
1812 const int kMaxElementsForLinearSearch = 8;
1813 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1814 return LinearSearch(name, nof);
1817 // Slow case: perform binary search.
1818 return BinarySearch(name, 0, nof - 1);
1822 int DescriptorArray::SearchWithCache(String* name) {
1823 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1824 if (number == DescriptorLookupCache::kAbsent) {
1825 number = Search(name);
1826 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1832 String* DescriptorArray::GetKey(int descriptor_number) {
1833 ASSERT(descriptor_number < number_of_descriptors());
1834 return String::cast(get(ToKeyIndex(descriptor_number)));
1838 Object* DescriptorArray::GetValue(int descriptor_number) {
1839 ASSERT(descriptor_number < number_of_descriptors());
1840 return GetContentArray()->get(ToValueIndex(descriptor_number));
1844 Smi* DescriptorArray::GetDetails(int descriptor_number) {
1845 ASSERT(descriptor_number < number_of_descriptors());
1846 return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1850 PropertyType DescriptorArray::GetType(int descriptor_number) {
1851 ASSERT(descriptor_number < number_of_descriptors());
1852 return PropertyDetails(GetDetails(descriptor_number)).type();
1856 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1857 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1861 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1862 return JSFunction::cast(GetValue(descriptor_number));
1866 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1867 ASSERT(GetType(descriptor_number) == CALLBACKS);
1868 return GetValue(descriptor_number);
1872 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1873 ASSERT(GetType(descriptor_number) == CALLBACKS);
1874 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1875 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
1879 bool DescriptorArray::IsProperty(int descriptor_number) {
1880 return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1884 bool DescriptorArray::IsTransition(int descriptor_number) {
1885 PropertyType t = GetType(descriptor_number);
1886 return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
1887 t == ELEMENTS_TRANSITION;
1891 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1892 return GetType(descriptor_number) == NULL_DESCRIPTOR;
1896 bool DescriptorArray::IsDontEnum(int descriptor_number) {
1897 return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1901 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1902 desc->Init(GetKey(descriptor_number),
1903 GetValue(descriptor_number),
1904 PropertyDetails(GetDetails(descriptor_number)));
1908 void DescriptorArray::Set(int descriptor_number,
1910 const WhitenessWitness&) {
1912 ASSERT(descriptor_number < number_of_descriptors());
1914 // Make sure none of the elements in desc are in new space.
1915 ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1916 ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1918 NoWriteBarrierSet(this,
1919 ToKeyIndex(descriptor_number),
1921 FixedArray* content_array = GetContentArray();
1922 NoWriteBarrierSet(content_array,
1923 ToValueIndex(descriptor_number),
1925 NoWriteBarrierSet(content_array,
1926 ToDetailsIndex(descriptor_number),
1927 desc->GetDetails().AsSmi());
1931 void DescriptorArray::CopyFrom(int index,
1932 DescriptorArray* src,
1934 const WhitenessWitness& witness) {
1936 src->Get(src_index, &desc);
1937 Set(index, &desc, witness);
1941 void DescriptorArray::NoWriteBarrierSwapDescriptors(int first, int second) {
1942 NoWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
1943 FixedArray* content_array = GetContentArray();
1944 NoWriteBarrierSwap(content_array,
1945 ToValueIndex(first),
1946 ToValueIndex(second));
1947 NoWriteBarrierSwap(content_array,
1948 ToDetailsIndex(first),
1949 ToDetailsIndex(second));
1953 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
1954 : marking_(array->GetHeap()->incremental_marking()) {
1955 marking_->EnterNoMarkingScope();
1956 if (array->number_of_descriptors() > 0) {
1957 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
1958 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
1963 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
1964 marking_->LeaveNoMarkingScope();
1968 template<typename Shape, typename Key>
1969 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
1970 const int kMinCapacity = 32;
1971 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
1972 if (capacity < kMinCapacity) {
1973 capacity = kMinCapacity; // Guarantee min capacity.
1979 template<typename Shape, typename Key>
1980 int HashTable<Shape, Key>::FindEntry(Key key) {
1981 return FindEntry(GetIsolate(), key);
1985 // Find entry for key otherwise return kNotFound.
1986 template<typename Shape, typename Key>
1987 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1988 uint32_t capacity = Capacity();
1989 uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1991 // EnsureCapacity will guarantee the hash table is never full.
1993 Object* element = KeyAt(entry);
1994 if (element == isolate->heap()->undefined_value()) break; // Empty entry.
1995 if (element != isolate->heap()->null_value() &&
1996 Shape::IsMatch(key, element)) return entry;
1997 entry = NextProbe(entry, count++, capacity);
2003 bool NumberDictionary::requires_slow_elements() {
2004 Object* max_index_object = get(kMaxNumberKeyIndex);
2005 if (!max_index_object->IsSmi()) return false;
2007 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2010 uint32_t NumberDictionary::max_number_key() {
2011 ASSERT(!requires_slow_elements());
2012 Object* max_index_object = get(kMaxNumberKeyIndex);
2013 if (!max_index_object->IsSmi()) return 0;
2014 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2015 return value >> kRequiresSlowElementsTagSize;
2018 void NumberDictionary::set_requires_slow_elements() {
2019 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2023 // ------------------------------------
2027 CAST_ACCESSOR(FixedArray)
2028 CAST_ACCESSOR(FixedDoubleArray)
2029 CAST_ACCESSOR(DescriptorArray)
2030 CAST_ACCESSOR(DeoptimizationInputData)
2031 CAST_ACCESSOR(DeoptimizationOutputData)
2032 CAST_ACCESSOR(SymbolTable)
2033 CAST_ACCESSOR(JSFunctionResultCache)
2034 CAST_ACCESSOR(NormalizedMapCache)
2035 CAST_ACCESSOR(CompilationCacheTable)
2036 CAST_ACCESSOR(CodeCacheHashTable)
2037 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2038 CAST_ACCESSOR(MapCache)
2039 CAST_ACCESSOR(String)
2040 CAST_ACCESSOR(SeqString)
2041 CAST_ACCESSOR(SeqAsciiString)
2042 CAST_ACCESSOR(SeqTwoByteString)
2043 CAST_ACCESSOR(SlicedString)
2044 CAST_ACCESSOR(ConsString)
2045 CAST_ACCESSOR(ExternalString)
2046 CAST_ACCESSOR(ExternalAsciiString)
2047 CAST_ACCESSOR(ExternalTwoByteString)
2048 CAST_ACCESSOR(JSReceiver)
2049 CAST_ACCESSOR(JSObject)
2051 CAST_ACCESSOR(HeapObject)
2052 CAST_ACCESSOR(HeapNumber)
2053 CAST_ACCESSOR(Oddball)
2054 CAST_ACCESSOR(JSGlobalPropertyCell)
2055 CAST_ACCESSOR(SharedFunctionInfo)
2057 CAST_ACCESSOR(JSFunction)
2058 CAST_ACCESSOR(GlobalObject)
2059 CAST_ACCESSOR(JSGlobalProxy)
2060 CAST_ACCESSOR(JSGlobalObject)
2061 CAST_ACCESSOR(JSBuiltinsObject)
2063 CAST_ACCESSOR(JSArray)
2064 CAST_ACCESSOR(JSRegExp)
2065 CAST_ACCESSOR(JSProxy)
2066 CAST_ACCESSOR(JSFunctionProxy)
2067 CAST_ACCESSOR(JSSet)
2068 CAST_ACCESSOR(JSMap)
2069 CAST_ACCESSOR(JSWeakMap)
2070 CAST_ACCESSOR(Foreign)
2071 CAST_ACCESSOR(ByteArray)
2072 CAST_ACCESSOR(FreeSpace)
2073 CAST_ACCESSOR(ExternalArray)
2074 CAST_ACCESSOR(ExternalByteArray)
2075 CAST_ACCESSOR(ExternalUnsignedByteArray)
2076 CAST_ACCESSOR(ExternalShortArray)
2077 CAST_ACCESSOR(ExternalUnsignedShortArray)
2078 CAST_ACCESSOR(ExternalIntArray)
2079 CAST_ACCESSOR(ExternalUnsignedIntArray)
2080 CAST_ACCESSOR(ExternalFloatArray)
2081 CAST_ACCESSOR(ExternalDoubleArray)
2082 CAST_ACCESSOR(ExternalPixelArray)
2083 CAST_ACCESSOR(Struct)
2086 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2087 STRUCT_LIST(MAKE_STRUCT_CAST)
2088 #undef MAKE_STRUCT_CAST
2091 template <typename Shape, typename Key>
2092 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2093 ASSERT(obj->IsHashTable());
2094 return reinterpret_cast<HashTable*>(obj);
2098 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2099 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2101 SMI_ACCESSORS(String, length, kLengthOffset)
2102 SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
2105 uint32_t String::hash_field() {
2106 return READ_UINT32_FIELD(this, kHashFieldOffset);
2110 void String::set_hash_field(uint32_t value) {
2111 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2112 #if V8_HOST_ARCH_64_BIT
2113 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2118 bool String::Equals(String* other) {
2119 if (other == this) return true;
2120 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2123 return SlowEquals(other);
2127 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2128 if (!StringShape(this).IsCons()) return this;
2129 ConsString* cons = ConsString::cast(this);
2130 if (cons->IsFlat()) return cons->first();
2131 return SlowTryFlatten(pretenure);
2135 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2136 MaybeObject* flat = TryFlatten(pretenure);
2137 Object* successfully_flattened;
2138 if (!flat->ToObject(&successfully_flattened)) return this;
2139 return String::cast(successfully_flattened);
2143 uint16_t String::Get(int index) {
2144 ASSERT(index >= 0 && index < length());
2145 switch (StringShape(this).full_representation_tag()) {
2146 case kSeqStringTag | kAsciiStringTag:
2147 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2148 case kSeqStringTag | kTwoByteStringTag:
2149 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2150 case kConsStringTag | kAsciiStringTag:
2151 case kConsStringTag | kTwoByteStringTag:
2152 return ConsString::cast(this)->ConsStringGet(index);
2153 case kExternalStringTag | kAsciiStringTag:
2154 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2155 case kExternalStringTag | kTwoByteStringTag:
2156 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2157 case kSlicedStringTag | kAsciiStringTag:
2158 case kSlicedStringTag | kTwoByteStringTag:
2159 return SlicedString::cast(this)->SlicedStringGet(index);
2169 void String::Set(int index, uint16_t value) {
2170 ASSERT(index >= 0 && index < length());
2171 ASSERT(StringShape(this).IsSequential());
2173 return this->IsAsciiRepresentation()
2174 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2175 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2179 bool String::IsFlat() {
2180 if (!StringShape(this).IsCons()) return true;
2181 return ConsString::cast(this)->second()->length() == 0;
2185 String* String::GetUnderlying() {
2186 // Giving direct access to underlying string only makes sense if the
2187 // wrapping string is already flattened.
2188 ASSERT(this->IsFlat());
2189 ASSERT(StringShape(this).IsIndirect());
2190 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2191 const int kUnderlyingOffset = SlicedString::kParentOffset;
2192 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2196 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2197 ASSERT(index >= 0 && index < length());
2198 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2202 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2203 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2204 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2205 static_cast<byte>(value));
2209 Address SeqAsciiString::GetCharsAddress() {
2210 return FIELD_ADDR(this, kHeaderSize);
2214 char* SeqAsciiString::GetChars() {
2215 return reinterpret_cast<char*>(GetCharsAddress());
2219 Address SeqTwoByteString::GetCharsAddress() {
2220 return FIELD_ADDR(this, kHeaderSize);
2224 uc16* SeqTwoByteString::GetChars() {
2225 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2229 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2230 ASSERT(index >= 0 && index < length());
2231 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2235 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2236 ASSERT(index >= 0 && index < length());
2237 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2241 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2242 return SizeFor(length());
2246 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2247 return SizeFor(length());
2251 String* SlicedString::parent() {
2252 return String::cast(READ_FIELD(this, kParentOffset));
2256 void SlicedString::set_parent(String* parent) {
2257 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2258 WRITE_FIELD(this, kParentOffset, parent);
2262 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2265 String* ConsString::first() {
2266 return String::cast(READ_FIELD(this, kFirstOffset));
2270 Object* ConsString::unchecked_first() {
2271 return READ_FIELD(this, kFirstOffset);
2275 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2276 WRITE_FIELD(this, kFirstOffset, value);
2277 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2281 String* ConsString::second() {
2282 return String::cast(READ_FIELD(this, kSecondOffset));
2286 Object* ConsString::unchecked_second() {
2287 return READ_FIELD(this, kSecondOffset);
2291 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2292 WRITE_FIELD(this, kSecondOffset, value);
2293 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2297 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2298 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2302 void ExternalAsciiString::set_resource(
2303 const ExternalAsciiString::Resource* resource) {
2304 *reinterpret_cast<const Resource**>(
2305 FIELD_ADDR(this, kResourceOffset)) = resource;
2309 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2310 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2314 void ExternalTwoByteString::set_resource(
2315 const ExternalTwoByteString::Resource* resource) {
2316 *reinterpret_cast<const Resource**>(
2317 FIELD_ADDR(this, kResourceOffset)) = resource;
2321 void JSFunctionResultCache::MakeZeroSize() {
2322 set_finger_index(kEntriesIndex);
2323 set_size(kEntriesIndex);
2327 void JSFunctionResultCache::Clear() {
2328 int cache_size = size();
2329 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2330 MemsetPointer(entries_start,
2331 GetHeap()->the_hole_value(),
2332 cache_size - kEntriesIndex);
2337 int JSFunctionResultCache::size() {
2338 return Smi::cast(get(kCacheSizeIndex))->value();
2342 void JSFunctionResultCache::set_size(int size) {
2343 set(kCacheSizeIndex, Smi::FromInt(size));
2347 int JSFunctionResultCache::finger_index() {
2348 return Smi::cast(get(kFingerIndex))->value();
2352 void JSFunctionResultCache::set_finger_index(int finger_index) {
2353 set(kFingerIndex, Smi::FromInt(finger_index));
2357 byte ByteArray::get(int index) {
2358 ASSERT(index >= 0 && index < this->length());
2359 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2363 void ByteArray::set(int index, byte value) {
2364 ASSERT(index >= 0 && index < this->length());
2365 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2369 int ByteArray::get_int(int index) {
2370 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2371 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2375 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2376 ASSERT_TAG_ALIGNED(address);
2377 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2381 Address ByteArray::GetDataStartAddress() {
2382 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2386 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2387 return reinterpret_cast<uint8_t*>(external_pointer());
2391 uint8_t ExternalPixelArray::get_scalar(int index) {
2392 ASSERT((index >= 0) && (index < this->length()));
2393 uint8_t* ptr = external_pixel_pointer();
2398 MaybeObject* ExternalPixelArray::get(int index) {
2399 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2403 void ExternalPixelArray::set(int index, uint8_t value) {
2404 ASSERT((index >= 0) && (index < this->length()));
2405 uint8_t* ptr = external_pixel_pointer();
2410 void* ExternalArray::external_pointer() {
2411 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2412 return reinterpret_cast<void*>(ptr);
2416 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2417 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2418 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2422 int8_t ExternalByteArray::get_scalar(int index) {
2423 ASSERT((index >= 0) && (index < this->length()));
2424 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2429 MaybeObject* ExternalByteArray::get(int index) {
2430 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2434 void ExternalByteArray::set(int index, int8_t value) {
2435 ASSERT((index >= 0) && (index < this->length()));
2436 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2441 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2442 ASSERT((index >= 0) && (index < this->length()));
2443 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2448 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2449 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2453 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2454 ASSERT((index >= 0) && (index < this->length()));
2455 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2460 int16_t ExternalShortArray::get_scalar(int index) {
2461 ASSERT((index >= 0) && (index < this->length()));
2462 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2467 MaybeObject* ExternalShortArray::get(int index) {
2468 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2472 void ExternalShortArray::set(int index, int16_t value) {
2473 ASSERT((index >= 0) && (index < this->length()));
2474 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2479 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2480 ASSERT((index >= 0) && (index < this->length()));
2481 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2486 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2487 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2491 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2492 ASSERT((index >= 0) && (index < this->length()));
2493 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2498 int32_t ExternalIntArray::get_scalar(int index) {
2499 ASSERT((index >= 0) && (index < this->length()));
2500 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2505 MaybeObject* ExternalIntArray::get(int index) {
2506 return GetHeap()->NumberFromInt32(get_scalar(index));
2510 void ExternalIntArray::set(int index, int32_t value) {
2511 ASSERT((index >= 0) && (index < this->length()));
2512 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2517 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2518 ASSERT((index >= 0) && (index < this->length()));
2519 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2524 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2525 return GetHeap()->NumberFromUint32(get_scalar(index));
2529 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2530 ASSERT((index >= 0) && (index < this->length()));
2531 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2536 float ExternalFloatArray::get_scalar(int index) {
2537 ASSERT((index >= 0) && (index < this->length()));
2538 float* ptr = static_cast<float*>(external_pointer());
2543 MaybeObject* ExternalFloatArray::get(int index) {
2544 return GetHeap()->NumberFromDouble(get_scalar(index));
2548 void ExternalFloatArray::set(int index, float value) {
2549 ASSERT((index >= 0) && (index < this->length()));
2550 float* ptr = static_cast<float*>(external_pointer());
2555 double ExternalDoubleArray::get_scalar(int index) {
2556 ASSERT((index >= 0) && (index < this->length()));
2557 double* ptr = static_cast<double*>(external_pointer());
2562 MaybeObject* ExternalDoubleArray::get(int index) {
2563 return GetHeap()->NumberFromDouble(get_scalar(index));
2567 void ExternalDoubleArray::set(int index, double value) {
2568 ASSERT((index >= 0) && (index < this->length()));
2569 double* ptr = static_cast<double*>(external_pointer());
2574 int Map::visitor_id() {
2575 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2579 void Map::set_visitor_id(int id) {
2580 ASSERT(0 <= id && id < 256);
2581 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2585 int Map::instance_size() {
2586 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2590 int Map::inobject_properties() {
2591 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2595 int Map::pre_allocated_property_fields() {
2596 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2600 int HeapObject::SizeFromMap(Map* map) {
2601 int instance_size = map->instance_size();
2602 if (instance_size != kVariableSizeSentinel) return instance_size;
2603 // We can ignore the "symbol" bit becase it is only set for symbols
2604 // and implies a string type.
2605 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2606 // Only inline the most frequent cases.
2607 if (instance_type == FIXED_ARRAY_TYPE) {
2608 return FixedArray::BodyDescriptor::SizeOf(map, this);
2610 if (instance_type == ASCII_STRING_TYPE) {
2611 return SeqAsciiString::SizeFor(
2612 reinterpret_cast<SeqAsciiString*>(this)->length());
2614 if (instance_type == BYTE_ARRAY_TYPE) {
2615 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2617 if (instance_type == FREE_SPACE_TYPE) {
2618 return reinterpret_cast<FreeSpace*>(this)->size();
2620 if (instance_type == STRING_TYPE) {
2621 return SeqTwoByteString::SizeFor(
2622 reinterpret_cast<SeqTwoByteString*>(this)->length());
2624 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2625 return FixedDoubleArray::SizeFor(
2626 reinterpret_cast<FixedDoubleArray*>(this)->length());
2628 ASSERT(instance_type == CODE_TYPE);
2629 return reinterpret_cast<Code*>(this)->CodeSize();
2633 void Map::set_instance_size(int value) {
2634 ASSERT_EQ(0, value & (kPointerSize - 1));
2635 value >>= kPointerSizeLog2;
2636 ASSERT(0 <= value && value < 256);
2637 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2641 void Map::set_inobject_properties(int value) {
2642 ASSERT(0 <= value && value < 256);
2643 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2647 void Map::set_pre_allocated_property_fields(int value) {
2648 ASSERT(0 <= value && value < 256);
2649 WRITE_BYTE_FIELD(this,
2650 kPreAllocatedPropertyFieldsOffset,
2651 static_cast<byte>(value));
2655 InstanceType Map::instance_type() {
2656 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2660 void Map::set_instance_type(InstanceType value) {
2661 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2665 int Map::unused_property_fields() {
2666 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2670 void Map::set_unused_property_fields(int value) {
2671 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2675 byte Map::bit_field() {
2676 return READ_BYTE_FIELD(this, kBitFieldOffset);
2680 void Map::set_bit_field(byte value) {
2681 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2685 byte Map::bit_field2() {
2686 return READ_BYTE_FIELD(this, kBitField2Offset);
2690 void Map::set_bit_field2(byte value) {
2691 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2695 void Map::set_non_instance_prototype(bool value) {
2697 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2699 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2704 bool Map::has_non_instance_prototype() {
2705 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2709 void Map::set_function_with_prototype(bool value) {
2711 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2713 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2718 bool Map::function_with_prototype() {
2719 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2723 void Map::set_is_access_check_needed(bool access_check_needed) {
2724 if (access_check_needed) {
2725 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2727 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2732 bool Map::is_access_check_needed() {
2733 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2737 void Map::set_is_extensible(bool value) {
2739 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2741 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2745 bool Map::is_extensible() {
2746 return ((1 << kIsExtensible) & bit_field2()) != 0;
2750 void Map::set_attached_to_shared_function_info(bool value) {
2752 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2754 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2758 bool Map::attached_to_shared_function_info() {
2759 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2763 void Map::set_is_shared(bool value) {
2765 set_bit_field3(bit_field3() | (1 << kIsShared));
2767 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2771 bool Map::is_shared() {
2772 return ((1 << kIsShared) & bit_field3()) != 0;
2775 void Map::set_has_external_resource(bool value) {
2777 set_bit_field(bit_field() | (1 << kHasExternalResource));
2779 set_bit_field(bit_field() & ~(1 << kHasExternalResource));
2783 bool Map::has_external_resource()
2785 return ((1 << kHasExternalResource) & bit_field()) != 0;
2789 void Map::set_named_interceptor_is_fallback(bool value)
2792 set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
2794 set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
2798 bool Map::named_interceptor_is_fallback()
2800 return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
2804 JSFunction* Map::unchecked_constructor() {
2805 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2809 Code::Flags Code::flags() {
2810 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2814 void Code::set_flags(Code::Flags flags) {
2815 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2816 // Make sure that all call stubs have an arguments count.
2817 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2818 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2819 ExtractArgumentsCountFromFlags(flags) >= 0);
2820 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2824 Code::Kind Code::kind() {
2825 return ExtractKindFromFlags(flags());
2829 InlineCacheState Code::ic_state() {
2830 InlineCacheState result = ExtractICStateFromFlags(flags());
2831 // Only allow uninitialized or debugger states for non-IC code
2832 // objects. This is used in the debugger to determine whether or not
2833 // a call to code object has been replaced with a debug break call.
2834 ASSERT(is_inline_cache_stub() ||
2835 result == UNINITIALIZED ||
2836 result == DEBUG_BREAK ||
2837 result == DEBUG_PREPARE_STEP_IN);
2842 Code::ExtraICState Code::extra_ic_state() {
2843 ASSERT(is_inline_cache_stub());
2844 return ExtractExtraICStateFromFlags(flags());
2848 PropertyType Code::type() {
2849 return ExtractTypeFromFlags(flags());
2853 int Code::arguments_count() {
2854 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2855 return ExtractArgumentsCountFromFlags(flags());
2859 int Code::major_key() {
2860 ASSERT(kind() == STUB ||
2861 kind() == UNARY_OP_IC ||
2862 kind() == BINARY_OP_IC ||
2863 kind() == COMPARE_IC ||
2864 kind() == TO_BOOLEAN_IC);
2865 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2869 void Code::set_major_key(int major) {
2870 ASSERT(kind() == STUB ||
2871 kind() == UNARY_OP_IC ||
2872 kind() == BINARY_OP_IC ||
2873 kind() == COMPARE_IC ||
2874 kind() == TO_BOOLEAN_IC);
2875 ASSERT(0 <= major && major < 256);
2876 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2880 bool Code::is_pregenerated() {
2881 return kind() == STUB && IsPregeneratedField::decode(flags());
2885 void Code::set_is_pregenerated(bool value) {
2886 ASSERT(kind() == STUB);
2888 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
2893 bool Code::optimizable() {
2894 ASSERT(kind() == FUNCTION);
2895 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2899 void Code::set_optimizable(bool value) {
2900 ASSERT(kind() == FUNCTION);
2901 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2905 bool Code::has_deoptimization_support() {
2906 ASSERT(kind() == FUNCTION);
2907 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2908 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
2912 void Code::set_has_deoptimization_support(bool value) {
2913 ASSERT(kind() == FUNCTION);
2914 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2915 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
2916 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2920 bool Code::has_debug_break_slots() {
2921 ASSERT(kind() == FUNCTION);
2922 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2923 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
2927 void Code::set_has_debug_break_slots(bool value) {
2928 ASSERT(kind() == FUNCTION);
2929 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2930 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
2931 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2935 bool Code::is_compiled_optimizable() {
2936 ASSERT(kind() == FUNCTION);
2937 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2938 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
2942 void Code::set_compiled_optimizable(bool value) {
2943 ASSERT(kind() == FUNCTION);
2944 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2945 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
2946 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2950 int Code::allow_osr_at_loop_nesting_level() {
2951 ASSERT(kind() == FUNCTION);
2952 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2956 void Code::set_allow_osr_at_loop_nesting_level(int level) {
2957 ASSERT(kind() == FUNCTION);
2958 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2959 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2963 unsigned Code::stack_slots() {
2964 ASSERT(kind() == OPTIMIZED_FUNCTION);
2965 return READ_UINT32_FIELD(this, kStackSlotsOffset);
2969 void Code::set_stack_slots(unsigned slots) {
2970 ASSERT(kind() == OPTIMIZED_FUNCTION);
2971 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2975 unsigned Code::safepoint_table_offset() {
2976 ASSERT(kind() == OPTIMIZED_FUNCTION);
2977 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
2981 void Code::set_safepoint_table_offset(unsigned offset) {
2982 ASSERT(kind() == OPTIMIZED_FUNCTION);
2983 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2984 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
2988 unsigned Code::stack_check_table_offset() {
2989 ASSERT(kind() == FUNCTION);
2990 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
2994 void Code::set_stack_check_table_offset(unsigned offset) {
2995 ASSERT(kind() == FUNCTION);
2996 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2997 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3001 CheckType Code::check_type() {
3002 ASSERT(is_call_stub() || is_keyed_call_stub());
3003 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3004 return static_cast<CheckType>(type);
3008 void Code::set_check_type(CheckType value) {
3009 ASSERT(is_call_stub() || is_keyed_call_stub());
3010 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3014 byte Code::unary_op_type() {
3015 ASSERT(is_unary_op_stub());
3016 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3020 void Code::set_unary_op_type(byte value) {
3021 ASSERT(is_unary_op_stub());
3022 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3026 byte Code::binary_op_type() {
3027 ASSERT(is_binary_op_stub());
3028 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3032 void Code::set_binary_op_type(byte value) {
3033 ASSERT(is_binary_op_stub());
3034 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3038 byte Code::binary_op_result_type() {
3039 ASSERT(is_binary_op_stub());
3040 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3044 void Code::set_binary_op_result_type(byte value) {
3045 ASSERT(is_binary_op_stub());
3046 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3050 byte Code::compare_state() {
3051 ASSERT(is_compare_ic_stub());
3052 return READ_BYTE_FIELD(this, kCompareStateOffset);
3056 void Code::set_compare_state(byte value) {
3057 ASSERT(is_compare_ic_stub());
3058 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3062 byte Code::to_boolean_state() {
3063 ASSERT(is_to_boolean_ic_stub());
3064 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3068 void Code::set_to_boolean_state(byte value) {
3069 ASSERT(is_to_boolean_ic_stub());
3070 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3074 bool Code::has_function_cache() {
3075 ASSERT(kind() == STUB);
3076 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3080 void Code::set_has_function_cache(bool flag) {
3081 ASSERT(kind() == STUB);
3082 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3086 bool Code::is_inline_cache_stub() {
3087 Kind kind = this->kind();
3088 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3092 Code::Flags Code::ComputeFlags(Kind kind,
3093 InlineCacheState ic_state,
3094 ExtraICState extra_ic_state,
3097 InlineCacheHolderFlag holder) {
3098 // Extra IC state is only allowed for call IC stubs or for store IC
3100 ASSERT(extra_ic_state == kNoExtraICState ||
3103 kind == KEYED_STORE_IC);
3104 // Compute the bit mask.
3105 int bits = KindField::encode(kind)
3106 | ICStateField::encode(ic_state)
3107 | TypeField::encode(type)
3108 | ExtraICStateField::encode(extra_ic_state)
3109 | (argc << kArgumentsCountShift)
3110 | CacheHolderField::encode(holder);
3111 return static_cast<Flags>(bits);
3115 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3117 ExtraICState extra_ic_state,
3118 InlineCacheHolderFlag holder,
3120 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3124 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3125 return KindField::decode(flags);
3129 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3130 return ICStateField::decode(flags);
3134 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3135 return ExtraICStateField::decode(flags);
3139 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3140 return TypeField::decode(flags);
3144 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3145 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3149 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3150 return CacheHolderField::decode(flags);
3154 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3155 int bits = flags & ~TypeField::kMask;
3156 return static_cast<Flags>(bits);
3160 Code* Code::GetCodeFromTargetAddress(Address address) {
3161 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3162 // GetCodeFromTargetAddress might be called when marking objects during mark
3163 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3164 // Code::cast. Code::cast does not work when the object's map is
3166 Code* result = reinterpret_cast<Code*>(code);
3171 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3173 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3177 Object* Map::prototype() {
3178 return READ_FIELD(this, kPrototypeOffset);
3182 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3183 ASSERT(value->IsNull() || value->IsJSReceiver());
3184 WRITE_FIELD(this, kPrototypeOffset, value);
3185 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3189 DescriptorArray* Map::instance_descriptors() {
3190 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3191 if (object->IsSmi()) {
3192 return HEAP->empty_descriptor_array();
3194 return DescriptorArray::cast(object);
3199 void Map::init_instance_descriptors() {
3200 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3204 void Map::clear_instance_descriptors() {
3205 Object* object = READ_FIELD(this,
3206 kInstanceDescriptorsOrBitField3Offset);
3207 if (!object->IsSmi()) {
3210 kInstanceDescriptorsOrBitField3Offset,
3211 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3216 void Map::set_instance_descriptors(DescriptorArray* value,
3217 WriteBarrierMode mode) {
3218 Object* object = READ_FIELD(this,
3219 kInstanceDescriptorsOrBitField3Offset);
3220 Heap* heap = GetHeap();
3221 if (value == heap->empty_descriptor_array()) {
3222 clear_instance_descriptors();
3225 if (object->IsSmi()) {
3226 value->set_bit_field3_storage(Smi::cast(object)->value());
3228 value->set_bit_field3_storage(
3229 DescriptorArray::cast(object)->bit_field3_storage());
3232 ASSERT(!is_shared());
3233 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3234 CONDITIONAL_WRITE_BARRIER(
3235 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3239 int Map::bit_field3() {
3240 Object* object = READ_FIELD(this,
3241 kInstanceDescriptorsOrBitField3Offset);
3242 if (object->IsSmi()) {
3243 return Smi::cast(object)->value();
3245 return DescriptorArray::cast(object)->bit_field3_storage();
3250 void Map::set_bit_field3(int value) {
3251 ASSERT(Smi::IsValid(value));
3252 Object* object = READ_FIELD(this,
3253 kInstanceDescriptorsOrBitField3Offset);
3254 if (object->IsSmi()) {
3256 kInstanceDescriptorsOrBitField3Offset,
3257 Smi::FromInt(value));
3259 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3264 FixedArray* Map::unchecked_prototype_transitions() {
3265 return reinterpret_cast<FixedArray*>(
3266 READ_FIELD(this, kPrototypeTransitionsOffset));
3270 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3271 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3272 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3274 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3275 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3276 ACCESSORS(JSFunction,
3279 kNextFunctionLinkOffset)
3281 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3282 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3283 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3285 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3287 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3288 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3289 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3290 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3291 ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
3293 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3294 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3295 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3297 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3298 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3299 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3300 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3301 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3302 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3303 ACCESSORS(InterceptorInfo, is_fallback, Smi, kFallbackOffset)
3305 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3306 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3308 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3309 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3311 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3312 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3313 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3314 kPropertyAccessorsOffset)
3315 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3316 kPrototypeTemplateOffset)
3317 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3318 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3319 kNamedPropertyHandlerOffset)
3320 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3321 kIndexedPropertyHandlerOffset)
3322 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3323 kInstanceTemplateOffset)
3324 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3325 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3326 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3327 kInstanceCallHandlerOffset)
3328 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3329 kAccessCheckInfoOffset)
3330 ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
3332 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3333 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3334 kInternalFieldCountOffset)
3335 ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
3336 kHasExternalResourceOffset)
3338 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3339 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3341 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3343 ACCESSORS(Script, source, Object, kSourceOffset)
3344 ACCESSORS(Script, name, Object, kNameOffset)
3345 ACCESSORS(Script, id, Object, kIdOffset)
3346 ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
3347 ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
3348 ACCESSORS(Script, data, Object, kDataOffset)
3349 ACCESSORS(Script, context_data, Object, kContextOffset)
3350 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3351 ACCESSORS(Script, type, Smi, kTypeOffset)
3352 ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
3353 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3354 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3355 ACCESSORS(Script, eval_from_instructions_offset, Smi,
3356 kEvalFrominstructionsOffsetOffset)
3358 #ifdef ENABLE_DEBUGGER_SUPPORT
3359 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3360 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3361 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3362 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3364 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3365 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3366 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3367 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3370 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3371 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3372 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3373 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3374 kInstanceClassNameOffset)
3375 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3376 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3377 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3378 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3379 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3380 kThisPropertyAssignmentsOffset)
3382 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3383 kHiddenPrototypeBit)
3384 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3385 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3386 kNeedsAccessCheckBit)
3387 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3388 kReadOnlyPrototypeBit)
3389 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3391 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3393 BOOL_GETTER(SharedFunctionInfo,
3395 has_only_simple_this_property_assignments,
3396 kHasOnlySimpleThisPropertyAssignments)
3397 BOOL_ACCESSORS(SharedFunctionInfo,
3399 allows_lazy_compilation,
3400 kAllowLazyCompilation)
3401 BOOL_ACCESSORS(SharedFunctionInfo,
3405 BOOL_ACCESSORS(SharedFunctionInfo,
3407 has_duplicate_parameters,
3408 kHasDuplicateParameters)
3411 #if V8_HOST_ARCH_32_BIT
3412 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3413 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3414 kFormalParameterCountOffset)
3415 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3416 kExpectedNofPropertiesOffset)
3417 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3418 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3419 kStartPositionAndTypeOffset)
3420 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3421 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3422 kFunctionTokenPositionOffset)
3423 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3424 kCompilerHintsOffset)
3425 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3426 kThisPropertyAssignmentsCountOffset)
3427 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3430 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3431 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3432 int holder::name() { \
3433 int value = READ_INT_FIELD(this, offset); \
3434 ASSERT(kHeapObjectTag == 1); \
3435 ASSERT((value & kHeapObjectTag) == 0); \
3436 return value >> 1; \
3438 void holder::set_##name(int value) { \
3439 ASSERT(kHeapObjectTag == 1); \
3440 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3441 (value & 0xC0000000) == 0x000000000); \
3442 WRITE_INT_FIELD(this, \
3444 (value << 1) & ~kHeapObjectTag); \
3447 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3448 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3449 INT_ACCESSORS(holder, name, offset)
3452 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3453 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3454 formal_parameter_count,
3455 kFormalParameterCountOffset)
3457 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3458 expected_nof_properties,
3459 kExpectedNofPropertiesOffset)
3460 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3462 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3463 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3464 start_position_and_type,
3465 kStartPositionAndTypeOffset)
3467 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3468 function_token_position,
3469 kFunctionTokenPositionOffset)
3470 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3472 kCompilerHintsOffset)
3474 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3475 this_property_assignments_count,
3476 kThisPropertyAssignmentsCountOffset)
3477 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3481 int SharedFunctionInfo::construction_count() {
3482 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3486 void SharedFunctionInfo::set_construction_count(int value) {
3487 ASSERT(0 <= value && value < 256);
3488 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3492 BOOL_ACCESSORS(SharedFunctionInfo,
3494 live_objects_may_exist,
3495 kLiveObjectsMayExist)
3498 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3499 return initial_map() != HEAP->undefined_value();
3503 BOOL_GETTER(SharedFunctionInfo,
3505 optimization_disabled,
3506 kOptimizationDisabled)
3509 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3510 set_compiler_hints(BooleanBit::set(compiler_hints(),
3511 kOptimizationDisabled,
3513 // If disabling optimizations we reflect that in the code object so
3514 // it will not be counted as optimizable code.
3515 if ((code()->kind() == Code::FUNCTION) && disable) {
3516 code()->set_optimizable(false);
3521 StrictModeFlag SharedFunctionInfo::strict_mode_flag() {
3522 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
3523 ? kStrictMode : kNonStrictMode;
3527 void SharedFunctionInfo::set_strict_mode_flag(StrictModeFlag strict_mode_flag) {
3528 ASSERT(strict_mode_flag == kStrictMode ||
3529 strict_mode_flag == kNonStrictMode);
3530 bool value = strict_mode_flag == kStrictMode;
3532 BooleanBit::set(compiler_hints(), kStrictModeFunction, value));
3536 BOOL_GETTER(SharedFunctionInfo, compiler_hints, strict_mode,
3537 kStrictModeFunction)
3538 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, qml_mode,
3540 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3541 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3542 name_should_print_as_anonymous,
3543 kNameShouldPrintAsAnonymous)
3544 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3545 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3547 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3548 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3550 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3552 bool Script::HasValidSource() {
3553 Object* src = this->source();
3554 if (!src->IsString()) return true;
3555 String* src_str = String::cast(src);
3556 if (!StringShape(src_str).IsExternal()) return true;
3557 if (src_str->IsAsciiRepresentation()) {
3558 return ExternalAsciiString::cast(src)->resource() != NULL;
3559 } else if (src_str->IsTwoByteRepresentation()) {
3560 return ExternalTwoByteString::cast(src)->resource() != NULL;
3566 void SharedFunctionInfo::DontAdaptArguments() {
3567 ASSERT(code()->kind() == Code::BUILTIN);
3568 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3572 int SharedFunctionInfo::start_position() {
3573 return start_position_and_type() >> kStartPositionShift;
3577 void SharedFunctionInfo::set_start_position(int start_position) {
3578 set_start_position_and_type((start_position << kStartPositionShift)
3579 | (start_position_and_type() & ~kStartPositionMask));
3583 Code* SharedFunctionInfo::code() {
3584 return Code::cast(READ_FIELD(this, kCodeOffset));
3588 Code* SharedFunctionInfo::unchecked_code() {
3589 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3593 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3594 WRITE_FIELD(this, kCodeOffset, value);
3595 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3599 SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3600 return reinterpret_cast<SerializedScopeInfo*>(
3601 READ_FIELD(this, kScopeInfoOffset));
3605 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3606 WriteBarrierMode mode) {
3607 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3608 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3611 reinterpret_cast<Object*>(value),
3616 Smi* SharedFunctionInfo::deopt_counter() {
3617 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3621 void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3622 WRITE_FIELD(this, kDeoptCounterOffset, value);
3626 bool SharedFunctionInfo::is_compiled() {
3628 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3632 bool SharedFunctionInfo::IsApiFunction() {
3633 return function_data()->IsFunctionTemplateInfo();
3637 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3638 ASSERT(IsApiFunction());
3639 return FunctionTemplateInfo::cast(function_data());
3643 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3644 return function_data()->IsSmi();
3648 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3649 ASSERT(HasBuiltinFunctionId());
3650 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3654 int SharedFunctionInfo::code_age() {
3655 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3659 void SharedFunctionInfo::set_code_age(int code_age) {
3660 set_compiler_hints(compiler_hints() |
3661 ((code_age & kCodeAgeMask) << kCodeAgeShift));
3665 bool SharedFunctionInfo::has_deoptimization_support() {
3666 Code* code = this->code();
3667 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3671 bool JSFunction::IsBuiltin() {
3672 return context()->global()->IsJSBuiltinsObject();
3676 bool JSFunction::NeedsArgumentsAdaption() {
3677 return shared()->formal_parameter_count() !=
3678 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3682 bool JSFunction::IsOptimized() {
3683 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3687 bool JSFunction::IsOptimizable() {
3688 return code()->kind() == Code::FUNCTION && code()->optimizable();
3692 bool JSFunction::IsMarkedForLazyRecompilation() {
3693 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3697 Code* JSFunction::code() {
3698 return Code::cast(unchecked_code());
3702 Code* JSFunction::unchecked_code() {
3703 return reinterpret_cast<Code*>(
3704 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3708 void JSFunction::set_code(Code* value) {
3709 ASSERT(!HEAP->InNewSpace(value));
3710 Address entry = value->entry();
3711 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3712 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3714 HeapObject::RawField(this, kCodeEntryOffset),
3719 void JSFunction::ReplaceCode(Code* code) {
3720 bool was_optimized = IsOptimized();
3721 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3725 // Add/remove the function from the list of optimized functions for this
3726 // context based on the state change.
3727 if (!was_optimized && is_optimized) {
3728 context()->global_context()->AddOptimizedFunction(this);
3730 if (was_optimized && !is_optimized) {
3731 context()->global_context()->RemoveOptimizedFunction(this);
3736 Context* JSFunction::context() {
3737 return Context::cast(READ_FIELD(this, kContextOffset));
3741 Object* JSFunction::unchecked_context() {
3742 return READ_FIELD(this, kContextOffset);
3746 SharedFunctionInfo* JSFunction::unchecked_shared() {
3747 return reinterpret_cast<SharedFunctionInfo*>(
3748 READ_FIELD(this, kSharedFunctionInfoOffset));
3752 void JSFunction::set_context(Object* value) {
3753 ASSERT(value->IsUndefined() || value->IsContext());
3754 WRITE_FIELD(this, kContextOffset, value);
3755 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3758 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3759 kPrototypeOrInitialMapOffset)
3762 Map* JSFunction::initial_map() {
3763 return Map::cast(prototype_or_initial_map());
3767 void JSFunction::set_initial_map(Map* value) {
3768 set_prototype_or_initial_map(value);
3772 bool JSFunction::has_initial_map() {
3773 return prototype_or_initial_map()->IsMap();
3777 bool JSFunction::has_instance_prototype() {
3778 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3782 bool JSFunction::has_prototype() {
3783 return map()->has_non_instance_prototype() || has_instance_prototype();
3787 Object* JSFunction::instance_prototype() {
3788 ASSERT(has_instance_prototype());
3789 if (has_initial_map()) return initial_map()->prototype();
3790 // When there is no initial map and the prototype is a JSObject, the
3791 // initial map field is used for the prototype field.
3792 return prototype_or_initial_map();
3796 Object* JSFunction::prototype() {
3797 ASSERT(has_prototype());
3798 // If the function's prototype property has been set to a non-JSObject
3799 // value, that value is stored in the constructor field of the map.
3800 if (map()->has_non_instance_prototype()) return map()->constructor();
3801 return instance_prototype();
3804 bool JSFunction::should_have_prototype() {
3805 return map()->function_with_prototype();
3809 bool JSFunction::is_compiled() {
3810 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3814 FixedArray* JSFunction::literals() {
3815 ASSERT(!shared()->bound());
3816 return literals_or_bindings();
3820 void JSFunction::set_literals(FixedArray* literals) {
3821 ASSERT(!shared()->bound());
3822 set_literals_or_bindings(literals);
3826 FixedArray* JSFunction::function_bindings() {
3827 ASSERT(shared()->bound());
3828 return literals_or_bindings();
3832 void JSFunction::set_function_bindings(FixedArray* bindings) {
3833 ASSERT(shared()->bound());
3834 // Bound function literal may be initialized to the empty fixed array
3835 // before the bindings are set.
3836 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
3837 bindings->map() == GetHeap()->fixed_cow_array_map());
3838 set_literals_or_bindings(bindings);
3842 int JSFunction::NumberOfLiterals() {
3843 ASSERT(!shared()->bound());
3844 return literals()->length();
3848 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3849 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3850 return READ_FIELD(this, OffsetOfFunctionWithId(id));
3854 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3856 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3857 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3858 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
3862 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3863 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3864 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3868 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3870 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3871 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3872 ASSERT(!HEAP->InNewSpace(value));
3876 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
3877 ACCESSORS(JSProxy, hash, Object, kHashOffset)
3878 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
3879 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
3882 void JSProxy::InitializeBody(int object_size, Object* value) {
3883 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
3884 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
3885 WRITE_FIELD(this, offset, value);
3890 ACCESSORS(JSSet, table, Object, kTableOffset)
3891 ACCESSORS(JSMap, table, Object, kTableOffset)
3892 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
3893 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
3896 ObjectHashTable* JSWeakMap::unchecked_table() {
3897 return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
3901 Address Foreign::foreign_address() {
3902 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
3906 void Foreign::set_foreign_address(Address value) {
3907 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
3911 ACCESSORS(JSValue, value, Object, kValueOffset)
3914 JSValue* JSValue::cast(Object* obj) {
3915 ASSERT(obj->IsJSValue());
3916 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3917 return reinterpret_cast<JSValue*>(obj);
3921 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3922 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3923 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3924 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3925 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3926 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3927 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3930 JSMessageObject* JSMessageObject::cast(Object* obj) {
3931 ASSERT(obj->IsJSMessageObject());
3932 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3933 return reinterpret_cast<JSMessageObject*>(obj);
3937 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3938 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3939 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3940 ACCESSORS(Code, next_code_flushing_candidate,
3941 Object, kNextCodeFlushingCandidateOffset)
3944 byte* Code::instruction_start() {
3945 return FIELD_ADDR(this, kHeaderSize);
3949 byte* Code::instruction_end() {
3950 return instruction_start() + instruction_size();
3954 int Code::body_size() {
3955 return RoundUp(instruction_size(), kObjectAlignment);
3959 FixedArray* Code::unchecked_deoptimization_data() {
3960 return reinterpret_cast<FixedArray*>(
3961 READ_FIELD(this, kDeoptimizationDataOffset));
3965 ByteArray* Code::unchecked_relocation_info() {
3966 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
3970 byte* Code::relocation_start() {
3971 return unchecked_relocation_info()->GetDataStartAddress();
3975 int Code::relocation_size() {
3976 return unchecked_relocation_info()->length();
3980 byte* Code::entry() {
3981 return instruction_start();
3985 bool Code::contains(byte* inner_pointer) {
3986 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
3990 ACCESSORS(JSArray, length, Object, kLengthOffset)
3993 ACCESSORS(JSRegExp, data, Object, kDataOffset)
3996 JSRegExp::Type JSRegExp::TypeTag() {
3997 Object* data = this->data();
3998 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
3999 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4000 return static_cast<JSRegExp::Type>(smi->value());
4004 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4005 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4006 return static_cast<JSRegExp::Type>(smi->value());
4010 int JSRegExp::CaptureCount() {
4011 switch (TypeTag()) {
4015 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4023 JSRegExp::Flags JSRegExp::GetFlags() {
4024 ASSERT(this->data()->IsFixedArray());
4025 Object* data = this->data();
4026 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4027 return Flags(smi->value());
4031 String* JSRegExp::Pattern() {
4032 ASSERT(this->data()->IsFixedArray());
4033 Object* data = this->data();
4034 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4039 Object* JSRegExp::DataAt(int index) {
4040 ASSERT(TypeTag() != NOT_COMPILED);
4041 return FixedArray::cast(data())->get(index);
4045 Object* JSRegExp::DataAtUnchecked(int index) {
4046 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4047 int offset = FixedArray::kHeaderSize + index * kPointerSize;
4048 return READ_FIELD(fa, offset);
4052 void JSRegExp::SetDataAt(int index, Object* value) {
4053 ASSERT(TypeTag() != NOT_COMPILED);
4054 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4055 FixedArray::cast(data())->set(index, value);
4059 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4060 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4061 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4062 if (value->IsSmi()) {
4063 fa->set_unchecked(index, Smi::cast(value));
4065 // We only do this during GC, so we don't need to notify the write barrier.
4066 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4071 ElementsKind JSObject::GetElementsKind() {
4072 ElementsKind kind = map()->elements_kind();
4074 FixedArrayBase* fixed_array =
4075 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4076 Map* map = fixed_array->map();
4077 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4078 (map == GetHeap()->fixed_array_map() ||
4079 map == GetHeap()->fixed_cow_array_map())) ||
4080 (kind == FAST_DOUBLE_ELEMENTS &&
4081 fixed_array->IsFixedDoubleArray()) ||
4082 (kind == DICTIONARY_ELEMENTS &&
4083 fixed_array->IsFixedArray() &&
4084 fixed_array->IsDictionary()) ||
4085 (kind > DICTIONARY_ELEMENTS));
4086 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4087 (elements()->IsFixedArray() && elements()->length() >= 2));
4093 ElementsAccessor* JSObject::GetElementsAccessor() {
4094 return ElementsAccessor::ForKind(GetElementsKind());
4098 bool JSObject::HasFastElements() {
4099 return GetElementsKind() == FAST_ELEMENTS;
4103 bool JSObject::HasFastSmiOnlyElements() {
4104 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4108 bool JSObject::HasFastTypeElements() {
4109 ElementsKind elements_kind = GetElementsKind();
4110 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4111 elements_kind == FAST_ELEMENTS;
4115 bool JSObject::HasFastDoubleElements() {
4116 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4120 bool JSObject::HasDictionaryElements() {
4121 return GetElementsKind() == DICTIONARY_ELEMENTS;
4125 bool JSObject::HasNonStrictArgumentsElements() {
4126 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4130 bool JSObject::HasExternalArrayElements() {
4131 HeapObject* array = elements();
4132 ASSERT(array != NULL);
4133 return array->IsExternalArray();
4137 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4138 bool JSObject::HasExternal##name##Elements() { \
4139 HeapObject* array = elements(); \
4140 ASSERT(array != NULL); \
4141 if (!array->IsHeapObject()) \
4143 return array->map()->instance_type() == type; \
4147 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4148 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4149 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4150 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4151 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4152 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4153 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4154 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4155 EXTERNAL_ELEMENTS_CHECK(Float,
4156 EXTERNAL_FLOAT_ARRAY_TYPE)
4157 EXTERNAL_ELEMENTS_CHECK(Double,
4158 EXTERNAL_DOUBLE_ARRAY_TYPE)
4159 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4162 bool JSObject::HasNamedInterceptor() {
4163 return map()->has_named_interceptor();
4167 bool JSObject::HasIndexedInterceptor() {
4168 return map()->has_indexed_interceptor();
4172 bool JSObject::AllowsSetElementsLength() {
4173 bool result = elements()->IsFixedArray() ||
4174 elements()->IsFixedDoubleArray();
4175 ASSERT(result == !HasExternalArrayElements());
4180 MaybeObject* JSObject::EnsureWritableFastElements() {
4181 ASSERT(HasFastTypeElements());
4182 FixedArray* elems = FixedArray::cast(elements());
4183 Isolate* isolate = GetIsolate();
4184 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4185 Object* writable_elems;
4186 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4187 elems, isolate->heap()->fixed_array_map());
4188 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4189 return maybe_writable_elems;
4192 set_elements(FixedArray::cast(writable_elems));
4193 isolate->counters()->cow_arrays_converted()->Increment();
4194 return writable_elems;
4198 StringDictionary* JSObject::property_dictionary() {
4199 ASSERT(!HasFastProperties());
4200 return StringDictionary::cast(properties());
4204 NumberDictionary* JSObject::element_dictionary() {
4205 ASSERT(HasDictionaryElements());
4206 return NumberDictionary::cast(elements());
4210 bool String::IsHashFieldComputed(uint32_t field) {
4211 return (field & kHashNotComputedMask) == 0;
4215 bool String::HasHashCode() {
4216 return IsHashFieldComputed(hash_field());
4220 uint32_t String::Hash() {
4221 // Fast case: has hash code already been computed?
4222 uint32_t field = hash_field();
4223 if (IsHashFieldComputed(field)) return field >> kHashShift;
4224 // Slow case: compute hash code and set it.
4225 return ComputeAndSetHash();
4229 StringHasher::StringHasher(int length)
4231 raw_running_hash_(0),
4233 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4234 is_first_char_(true),
4238 bool StringHasher::has_trivial_hash() {
4239 return length_ > String::kMaxHashCalcLength;
4243 void StringHasher::AddCharacter(uc32 c) {
4244 // Use the Jenkins one-at-a-time hash function to update the hash
4245 // for the given character.
4246 raw_running_hash_ += c;
4247 raw_running_hash_ += (raw_running_hash_ << 10);
4248 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4249 // Incremental array index computation.
4250 if (is_array_index_) {
4251 if (c < '0' || c > '9') {
4252 is_array_index_ = false;
4255 if (is_first_char_) {
4256 is_first_char_ = false;
4257 if (c == '0' && length_ > 1) {
4258 is_array_index_ = false;
4262 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4263 is_array_index_ = false;
4265 array_index_ = array_index_ * 10 + d;
4272 void StringHasher::AddCharacterNoIndex(uc32 c) {
4273 ASSERT(!is_array_index());
4274 raw_running_hash_ += c;
4275 raw_running_hash_ += (raw_running_hash_ << 10);
4276 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4280 uint32_t StringHasher::GetHash() {
4281 // Get the calculated raw hash value and do some more bit ops to distribute
4282 // the hash further. Ensure that we never return zero as the hash value.
4283 uint32_t result = raw_running_hash_;
4284 result += (result << 3);
4285 result ^= (result >> 11);
4286 result += (result << 15);
4294 template <typename schar>
4295 uint32_t HashSequentialString(const schar* chars, int length) {
4296 StringHasher hasher(length);
4297 if (!hasher.has_trivial_hash()) {
4299 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4300 hasher.AddCharacter(chars[i]);
4302 for (; i < length; i++) {
4303 hasher.AddCharacterNoIndex(chars[i]);
4306 return hasher.GetHashField();
4310 bool String::AsArrayIndex(uint32_t* index) {
4311 uint32_t field = hash_field();
4312 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4315 return SlowAsArrayIndex(index);
4319 Object* JSReceiver::GetPrototype() {
4320 return HeapObject::cast(this)->map()->prototype();
4324 bool JSReceiver::HasProperty(String* name) {
4326 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4328 return GetPropertyAttribute(name) != ABSENT;
4332 bool JSReceiver::HasLocalProperty(String* name) {
4334 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4336 return GetLocalPropertyAttribute(name) != ABSENT;
4340 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4341 return GetPropertyAttributeWithReceiver(this, key);
4344 // TODO(504): this may be useful in other places too where JSGlobalProxy
4346 Object* JSObject::BypassGlobalProxy() {
4347 if (IsJSGlobalProxy()) {
4348 Object* proto = GetPrototype();
4349 if (proto->IsNull()) return GetHeap()->undefined_value();
4350 ASSERT(proto->IsJSGlobalObject());
4357 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4359 ? JSProxy::cast(this)->GetIdentityHash(flag)
4360 : JSObject::cast(this)->GetIdentityHash(flag);
4364 bool JSReceiver::HasElement(uint32_t index) {
4366 return JSProxy::cast(this)->HasElementWithHandler(index);
4368 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4372 bool AccessorInfo::all_can_read() {
4373 return BooleanBit::get(flag(), kAllCanReadBit);
4377 void AccessorInfo::set_all_can_read(bool value) {
4378 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4382 bool AccessorInfo::all_can_write() {
4383 return BooleanBit::get(flag(), kAllCanWriteBit);
4387 void AccessorInfo::set_all_can_write(bool value) {
4388 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4392 bool AccessorInfo::prohibits_overwriting() {
4393 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4397 void AccessorInfo::set_prohibits_overwriting(bool value) {
4398 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4402 PropertyAttributes AccessorInfo::property_attributes() {
4403 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4407 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4408 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4412 template<typename Shape, typename Key>
4413 void Dictionary<Shape, Key>::SetEntry(int entry,
4416 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4420 template<typename Shape, typename Key>
4421 void Dictionary<Shape, Key>::SetEntry(int entry,
4424 PropertyDetails details) {
4425 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4426 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4427 AssertNoAllocation no_gc;
4428 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4429 FixedArray::set(index, key, mode);
4430 FixedArray::set(index+1, value, mode);
4431 FixedArray::set(index+2, details.AsSmi());
4435 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4436 ASSERT(other->IsNumber());
4437 return key == static_cast<uint32_t>(other->Number());
4441 uint32_t NumberDictionaryShape::Hash(uint32_t key) {
4442 return ComputeIntegerHash(key);
4446 uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
4447 ASSERT(other->IsNumber());
4448 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
4452 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4453 return Isolate::Current()->heap()->NumberFromUint32(key);
4457 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4458 // We know that all entries in a hash table had their hash keys created.
4459 // Use that knowledge to have fast failure.
4460 if (key->Hash() != String::cast(other)->Hash()) return false;
4461 return key->Equals(String::cast(other));
4465 uint32_t StringDictionaryShape::Hash(String* key) {
4470 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4471 return String::cast(other)->Hash();
4475 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4480 template <int entrysize>
4481 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4482 return key->SameValue(other);
4486 template <int entrysize>
4487 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4488 ASSERT(!key->IsUndefined() && !key->IsNull());
4489 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4490 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4494 template <int entrysize>
4495 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4497 ASSERT(!other->IsUndefined() && !other->IsNull());
4498 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4499 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4503 template <int entrysize>
4504 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4509 void ObjectHashTable::RemoveEntry(int entry) {
4510 RemoveEntry(entry, GetHeap());
4514 void Map::ClearCodeCache(Heap* heap) {
4515 // No write barrier is needed since empty_fixed_array is not in new space.
4516 // Please note this function is used during marking:
4517 // - MarkCompactCollector::MarkUnmarkedObject
4518 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4519 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4523 void JSArray::EnsureSize(int required_size) {
4524 ASSERT(HasFastTypeElements());
4525 FixedArray* elts = FixedArray::cast(elements());
4526 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4527 if (elts->length() < required_size) {
4528 // Doubling in size would be overkill, but leave some slack to avoid
4529 // constantly growing.
4530 Expand(required_size + (required_size >> 3));
4531 // It's a performance benefit to keep a frequently used array in new-space.
4532 } else if (!GetHeap()->new_space()->Contains(elts) &&
4533 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4534 // Expand will allocate a new backing store in new space even if the size
4535 // we asked for isn't larger than what we had before.
4536 Expand(required_size);
4541 void JSArray::set_length(Smi* length) {
4542 // Don't need a write barrier for a Smi.
4543 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4547 MaybeObject* JSArray::SetContent(FixedArray* storage) {
4548 MaybeObject* maybe_object = EnsureCanContainElements(storage);
4549 if (maybe_object->IsFailure()) return maybe_object;
4550 set_length(Smi::FromInt(storage->length()));
4551 set_elements(storage);
4556 MaybeObject* FixedArray::Copy() {
4557 if (length() == 0) return this;
4558 return GetHeap()->CopyFixedArray(this);
4562 MaybeObject* FixedDoubleArray::Copy() {
4563 if (length() == 0) return this;
4564 return GetHeap()->CopyFixedDoubleArray(this);
4568 Relocatable::Relocatable(Isolate* isolate) {
4569 ASSERT(isolate == Isolate::Current());
4571 prev_ = isolate->relocatable_top();
4572 isolate->set_relocatable_top(this);
4576 Relocatable::~Relocatable() {
4577 ASSERT(isolate_ == Isolate::Current());
4578 ASSERT_EQ(isolate_->relocatable_top(), this);
4579 isolate_->set_relocatable_top(prev_);
4583 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4584 return map->instance_size();
4588 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4589 v->VisitExternalReference(
4590 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4594 template<typename StaticVisitor>
4595 void Foreign::ForeignIterateBody() {
4596 StaticVisitor::VisitExternalReference(
4597 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4601 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4602 typedef v8::String::ExternalAsciiStringResource Resource;
4603 v->VisitExternalAsciiString(
4604 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4608 template<typename StaticVisitor>
4609 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4610 typedef v8::String::ExternalAsciiStringResource Resource;
4611 StaticVisitor::VisitExternalAsciiString(
4612 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4616 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4617 typedef v8::String::ExternalStringResource Resource;
4618 v->VisitExternalTwoByteString(
4619 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4623 template<typename StaticVisitor>
4624 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4625 typedef v8::String::ExternalStringResource Resource;
4626 StaticVisitor::VisitExternalTwoByteString(
4627 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4630 #define SLOT_ADDR(obj, offset) \
4631 reinterpret_cast<Object**>((obj)->address() + offset)
4633 template<int start_offset, int end_offset, int size>
4634 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4637 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4641 template<int start_offset>
4642 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4645 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4651 #undef CAST_ACCESSOR
4652 #undef INT_ACCESSORS
4653 #undef SMI_ACCESSORS
4658 #undef WRITE_BARRIER
4659 #undef CONDITIONAL_WRITE_BARRIER
4660 #undef READ_MEMADDR_FIELD
4661 #undef WRITE_MEMADDR_FIELD
4662 #undef READ_DOUBLE_FIELD
4663 #undef WRITE_DOUBLE_FIELD
4664 #undef READ_INT_FIELD
4665 #undef WRITE_INT_FIELD
4666 #undef READ_SHORT_FIELD
4667 #undef WRITE_SHORT_FIELD
4668 #undef READ_BYTE_FIELD
4669 #undef WRITE_BYTE_FIELD
4672 } } // namespace v8::internal
4674 #endif // V8_OBJECTS_INL_H_