1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 #define SMI_ACCESSORS(holder, name, offset) \
98 int holder::name() { \
99 Object* value = READ_FIELD(this, offset); \
100 return Smi::cast(value)->value(); \
102 void holder::set_##name(int value) { \
103 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107 #define BOOL_GETTER(holder, field, name, offset) \
108 bool holder::name() { \
109 return BooleanBit::get(field(), offset); \
113 #define BOOL_ACCESSORS(holder, field, name, offset) \
114 bool holder::name() { \
115 return BooleanBit::get(field(), offset); \
117 void holder::set_##name(bool value) { \
118 set_##field(BooleanBit::set(field(), offset, value)); \
122 bool Object::IsFixedArrayBase() {
123 return IsFixedArray() || IsFixedDoubleArray();
127 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
128 // There is a constraint on the object; check.
129 if (!this->IsJSObject()) return false;
130 // Fetch the constructor function of the object.
131 Object* cons_obj = JSObject::cast(this)->map()->constructor();
132 if (!cons_obj->IsJSFunction()) return false;
133 JSFunction* fun = JSFunction::cast(cons_obj);
134 // Iterate through the chain of inheriting function templates to
135 // see if the required one occurs.
136 for (Object* type = fun->shared()->function_data();
137 type->IsFunctionTemplateInfo();
138 type = FunctionTemplateInfo::cast(type)->parent_template()) {
139 if (type == expected) return true;
141 // Didn't find the required type in the inheritance chain.
146 bool Object::IsSmi() {
147 return HAS_SMI_TAG(this);
151 bool Object::IsHeapObject() {
152 return Internals::HasHeapObjectTag(this);
156 bool Object::NonFailureIsHeapObject() {
157 ASSERT(!this->IsFailure());
158 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
162 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
165 bool Object::IsString() {
166 return Object::IsHeapObject()
167 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
171 bool Object::IsSpecObject() {
172 return Object::IsHeapObject()
173 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
177 bool Object::IsSpecFunction() {
178 if (!Object::IsHeapObject()) return false;
179 InstanceType type = HeapObject::cast(this)->map()->instance_type();
180 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
184 bool Object::IsSymbol() {
185 if (!this->IsHeapObject()) return false;
186 uint32_t type = HeapObject::cast(this)->map()->instance_type();
187 // Because the symbol tag is non-zero and no non-string types have the
188 // symbol bit set we can test for symbols with a very simple test
190 STATIC_ASSERT(kSymbolTag != 0);
191 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
192 return (type & kIsSymbolMask) != 0;
196 bool Object::IsConsString() {
197 if (!IsString()) return false;
198 return StringShape(String::cast(this)).IsCons();
202 bool Object::IsSlicedString() {
203 if (!IsString()) return false;
204 return StringShape(String::cast(this)).IsSliced();
208 bool Object::IsSeqString() {
209 if (!IsString()) return false;
210 return StringShape(String::cast(this)).IsSequential();
214 bool Object::IsSeqAsciiString() {
215 if (!IsString()) return false;
216 return StringShape(String::cast(this)).IsSequential() &&
217 String::cast(this)->IsAsciiRepresentation();
221 bool Object::IsSeqTwoByteString() {
222 if (!IsString()) return false;
223 return StringShape(String::cast(this)).IsSequential() &&
224 String::cast(this)->IsTwoByteRepresentation();
228 bool Object::IsExternalString() {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsExternal();
234 bool Object::IsExternalAsciiString() {
235 if (!IsString()) return false;
236 return StringShape(String::cast(this)).IsExternal() &&
237 String::cast(this)->IsAsciiRepresentation();
241 bool Object::IsExternalTwoByteString() {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsExternal() &&
244 String::cast(this)->IsTwoByteRepresentation();
247 bool Object::HasValidElements() {
248 // Dictionary is covered under FixedArray.
249 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
252 StringShape::StringShape(String* str)
253 : type_(str->map()->instance_type()) {
255 ASSERT((type_ & kIsNotStringMask) == kStringTag);
259 StringShape::StringShape(Map* map)
260 : type_(map->instance_type()) {
262 ASSERT((type_ & kIsNotStringMask) == kStringTag);
266 StringShape::StringShape(InstanceType t)
267 : type_(static_cast<uint32_t>(t)) {
269 ASSERT((type_ & kIsNotStringMask) == kStringTag);
273 bool StringShape::IsSymbol() {
275 STATIC_ASSERT(kSymbolTag != 0);
276 return (type_ & kIsSymbolMask) != 0;
280 bool String::IsAsciiRepresentation() {
281 uint32_t type = map()->instance_type();
282 return (type & kStringEncodingMask) == kAsciiStringTag;
286 bool String::IsTwoByteRepresentation() {
287 uint32_t type = map()->instance_type();
288 return (type & kStringEncodingMask) == kTwoByteStringTag;
292 bool String::IsAsciiRepresentationUnderneath() {
293 uint32_t type = map()->instance_type();
294 STATIC_ASSERT(kIsIndirectStringTag != 0);
295 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
297 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
298 case kAsciiStringTag:
300 case kTwoByteStringTag:
302 default: // Cons or sliced string. Need to go deeper.
303 return GetUnderlying()->IsAsciiRepresentation();
308 bool String::IsTwoByteRepresentationUnderneath() {
309 uint32_t type = map()->instance_type();
310 STATIC_ASSERT(kIsIndirectStringTag != 0);
311 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
313 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
314 case kAsciiStringTag:
316 case kTwoByteStringTag:
318 default: // Cons or sliced string. Need to go deeper.
319 return GetUnderlying()->IsTwoByteRepresentation();
324 bool String::HasOnlyAsciiChars() {
325 uint32_t type = map()->instance_type();
326 return (type & kStringEncodingMask) == kAsciiStringTag ||
327 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
331 bool StringShape::IsCons() {
332 return (type_ & kStringRepresentationMask) == kConsStringTag;
336 bool StringShape::IsSliced() {
337 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
341 bool StringShape::IsIndirect() {
342 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
346 bool StringShape::IsExternal() {
347 return (type_ & kStringRepresentationMask) == kExternalStringTag;
351 bool StringShape::IsSequential() {
352 return (type_ & kStringRepresentationMask) == kSeqStringTag;
356 StringRepresentationTag StringShape::representation_tag() {
357 uint32_t tag = (type_ & kStringRepresentationMask);
358 return static_cast<StringRepresentationTag>(tag);
362 uint32_t StringShape::encoding_tag() {
363 return type_ & kStringEncodingMask;
367 uint32_t StringShape::full_representation_tag() {
368 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
372 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
373 Internals::kFullStringRepresentationMask);
376 bool StringShape::IsSequentialAscii() {
377 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
381 bool StringShape::IsSequentialTwoByte() {
382 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
386 bool StringShape::IsExternalAscii() {
387 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
391 bool StringShape::IsExternalTwoByte() {
392 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
396 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
397 Internals::kExternalTwoByteRepresentationTag);
400 uc32 FlatStringReader::Get(int index) {
401 ASSERT(0 <= index && index <= length_);
403 return static_cast<const byte*>(start_)[index];
405 return static_cast<const uc16*>(start_)[index];
410 bool Object::IsNumber() {
411 return IsSmi() || IsHeapNumber();
415 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
416 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
419 bool Object::IsFiller() {
420 if (!Object::IsHeapObject()) return false;
421 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
422 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
426 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
429 bool Object::IsExternalArray() {
430 if (!Object::IsHeapObject())
432 InstanceType instance_type =
433 HeapObject::cast(this)->map()->instance_type();
434 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
435 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
439 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
440 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
441 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
442 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
443 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
444 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
445 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
446 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
449 bool MaybeObject::IsFailure() {
450 return HAS_FAILURE_TAG(this);
454 bool MaybeObject::IsRetryAfterGC() {
455 return HAS_FAILURE_TAG(this)
456 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
460 bool MaybeObject::IsOutOfMemory() {
461 return HAS_FAILURE_TAG(this)
462 && Failure::cast(this)->IsOutOfMemoryException();
466 bool MaybeObject::IsException() {
467 return this == Failure::Exception();
471 bool MaybeObject::IsTheHole() {
472 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
476 Failure* Failure::cast(MaybeObject* obj) {
477 ASSERT(HAS_FAILURE_TAG(obj));
478 return reinterpret_cast<Failure*>(obj);
482 bool Object::IsJSReceiver() {
483 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
484 return IsHeapObject() &&
485 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
489 bool Object::IsJSObject() {
490 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
491 return IsHeapObject() &&
492 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
496 bool Object::IsJSProxy() {
497 if (!Object::IsHeapObject()) return false;
498 InstanceType type = HeapObject::cast(this)->map()->instance_type();
499 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
503 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
504 TYPE_CHECKER(JSSet, JS_SET_TYPE)
505 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
506 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
507 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
508 TYPE_CHECKER(Map, MAP_TYPE)
509 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
510 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
513 bool Object::IsDescriptorArray() {
514 return IsFixedArray();
518 bool Object::IsDeoptimizationInputData() {
519 // Must be a fixed array.
520 if (!IsFixedArray()) return false;
522 // There's no sure way to detect the difference between a fixed array and
523 // a deoptimization data array. Since this is used for asserts we can
524 // check that the length is zero or else the fixed size plus a multiple of
526 int length = FixedArray::cast(this)->length();
527 if (length == 0) return true;
529 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
530 return length >= 0 &&
531 length % DeoptimizationInputData::kDeoptEntrySize == 0;
535 bool Object::IsDeoptimizationOutputData() {
536 if (!IsFixedArray()) return false;
537 // There's actually no way to see the difference between a fixed array and
538 // a deoptimization data array. Since this is used for asserts we can check
539 // that the length is plausible though.
540 if (FixedArray::cast(this)->length() % 2 != 0) return false;
545 bool Object::IsContext() {
546 if (Object::IsHeapObject()) {
547 Map* map = HeapObject::cast(this)->map();
548 Heap* heap = map->GetHeap();
549 return (map == heap->function_context_map() ||
550 map == heap->catch_context_map() ||
551 map == heap->with_context_map() ||
552 map == heap->global_context_map() ||
553 map == heap->block_context_map());
559 bool Object::IsGlobalContext() {
560 return Object::IsHeapObject() &&
561 HeapObject::cast(this)->map() ==
562 HeapObject::cast(this)->GetHeap()->global_context_map();
566 bool Object::IsSerializedScopeInfo() {
567 return Object::IsHeapObject() &&
568 HeapObject::cast(this)->map() ==
569 HeapObject::cast(this)->GetHeap()->serialized_scope_info_map();
573 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
576 template <> inline bool Is<JSFunction>(Object* obj) {
577 return obj->IsJSFunction();
581 TYPE_CHECKER(Code, CODE_TYPE)
582 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
583 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
584 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
585 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
586 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
589 bool Object::IsStringWrapper() {
590 return IsJSValue() && JSValue::cast(this)->value()->IsString();
594 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
597 bool Object::IsBoolean() {
598 return IsOddball() &&
599 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
603 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
604 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
607 template <> inline bool Is<JSArray>(Object* obj) {
608 return obj->IsJSArray();
612 bool Object::IsHashTable() {
613 return Object::IsHeapObject() &&
614 HeapObject::cast(this)->map() ==
615 HeapObject::cast(this)->GetHeap()->hash_table_map();
619 bool Object::IsDictionary() {
620 return IsHashTable() &&
621 this != HeapObject::cast(this)->GetHeap()->symbol_table();
625 bool Object::IsSymbolTable() {
626 return IsHashTable() && this ==
627 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
631 bool Object::IsJSFunctionResultCache() {
632 if (!IsFixedArray()) return false;
633 FixedArray* self = FixedArray::cast(this);
634 int length = self->length();
635 if (length < JSFunctionResultCache::kEntriesIndex) return false;
636 if ((length - JSFunctionResultCache::kEntriesIndex)
637 % JSFunctionResultCache::kEntrySize != 0) {
641 if (FLAG_verify_heap) {
642 reinterpret_cast<JSFunctionResultCache*>(this)->
643 JSFunctionResultCacheVerify();
650 bool Object::IsNormalizedMapCache() {
651 if (!IsFixedArray()) return false;
652 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
656 if (FLAG_verify_heap) {
657 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
664 bool Object::IsCompilationCacheTable() {
665 return IsHashTable();
669 bool Object::IsCodeCacheHashTable() {
670 return IsHashTable();
674 bool Object::IsPolymorphicCodeCacheHashTable() {
675 return IsHashTable();
679 bool Object::IsMapCache() {
680 return IsHashTable();
684 bool Object::IsPrimitive() {
685 return IsOddball() || IsNumber() || IsString();
689 bool Object::IsJSGlobalProxy() {
690 bool result = IsHeapObject() &&
691 (HeapObject::cast(this)->map()->instance_type() ==
692 JS_GLOBAL_PROXY_TYPE);
693 ASSERT(!result || IsAccessCheckNeeded());
698 bool Object::IsGlobalObject() {
699 if (!IsHeapObject()) return false;
701 InstanceType type = HeapObject::cast(this)->map()->instance_type();
702 return type == JS_GLOBAL_OBJECT_TYPE ||
703 type == JS_BUILTINS_OBJECT_TYPE;
707 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
708 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
711 bool Object::IsUndetectableObject() {
712 return IsHeapObject()
713 && HeapObject::cast(this)->map()->is_undetectable();
717 bool Object::IsAccessCheckNeeded() {
718 return IsHeapObject()
719 && HeapObject::cast(this)->map()->is_access_check_needed();
723 bool Object::IsStruct() {
724 if (!IsHeapObject()) return false;
725 switch (HeapObject::cast(this)->map()->instance_type()) {
726 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
727 STRUCT_LIST(MAKE_STRUCT_CASE)
728 #undef MAKE_STRUCT_CASE
729 default: return false;
734 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
735 bool Object::Is##Name() { \
736 return Object::IsHeapObject() \
737 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
739 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
740 #undef MAKE_STRUCT_PREDICATE
743 bool Object::IsUndefined() {
744 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
748 bool Object::IsNull() {
749 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
753 bool Object::IsTheHole() {
754 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
758 bool Object::IsTrue() {
759 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
763 bool Object::IsFalse() {
764 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
768 bool Object::IsArgumentsMarker() {
769 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
773 double Object::Number() {
776 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
777 : reinterpret_cast<HeapNumber*>(this)->value();
781 MaybeObject* Object::ToSmi() {
782 if (IsSmi()) return this;
783 if (IsHeapNumber()) {
784 double value = HeapNumber::cast(this)->value();
785 int int_value = FastD2I(value);
786 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
787 return Smi::FromInt(int_value);
790 return Failure::Exception();
794 bool Object::HasSpecificClassOf(String* name) {
795 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
799 MaybeObject* Object::GetElement(uint32_t index) {
800 // GetElement can trigger a getter which can cause allocation.
801 // This was not always the case. This ASSERT is here to catch
802 // leftover incorrect uses.
803 ASSERT(HEAP->IsAllocationAllowed());
804 return GetElementWithReceiver(this, index);
808 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
809 MaybeObject* maybe = GetElementWithReceiver(this, index);
810 ASSERT(!maybe->IsFailure());
811 Object* result = NULL; // Initialization to please compiler.
812 maybe->ToObject(&result);
817 MaybeObject* Object::GetProperty(String* key) {
818 PropertyAttributes attributes;
819 return GetPropertyWithReceiver(this, key, &attributes);
823 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
824 return GetPropertyWithReceiver(this, key, attributes);
828 #define FIELD_ADDR(p, offset) \
829 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
831 #define READ_FIELD(p, offset) \
832 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
834 #define WRITE_FIELD(p, offset, value) \
835 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
837 #define WRITE_BARRIER(heap, object, offset, value) \
838 heap->incremental_marking()->RecordWrite( \
839 object, HeapObject::RawField(object, offset), value); \
840 if (heap->InNewSpace(value)) { \
841 heap->RecordWrite(object->address(), offset); \
844 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
845 if (mode == UPDATE_WRITE_BARRIER) { \
846 heap->incremental_marking()->RecordWrite( \
847 object, HeapObject::RawField(object, offset), value); \
848 if (heap->InNewSpace(value)) { \
849 heap->RecordWrite(object->address(), offset); \
853 #ifndef V8_TARGET_ARCH_MIPS
854 #define READ_DOUBLE_FIELD(p, offset) \
855 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
856 #else // V8_TARGET_ARCH_MIPS
857 // Prevent gcc from using load-double (mips ldc1) on (possibly)
858 // non-64-bit aligned HeapNumber::value.
859 static inline double read_double_field(void* p, int offset) {
864 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
865 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
868 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
869 #endif // V8_TARGET_ARCH_MIPS
871 #ifndef V8_TARGET_ARCH_MIPS
872 #define WRITE_DOUBLE_FIELD(p, offset, value) \
873 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
874 #else // V8_TARGET_ARCH_MIPS
875 // Prevent gcc from using store-double (mips sdc1) on (possibly)
876 // non-64-bit aligned HeapNumber::value.
877 static inline void write_double_field(void* p, int offset,
884 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
885 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
887 #define WRITE_DOUBLE_FIELD(p, offset, value) \
888 write_double_field(p, offset, value)
889 #endif // V8_TARGET_ARCH_MIPS
892 #define READ_INT_FIELD(p, offset) \
893 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
895 #define WRITE_INT_FIELD(p, offset, value) \
896 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
898 #define READ_INTPTR_FIELD(p, offset) \
899 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
901 #define WRITE_INTPTR_FIELD(p, offset, value) \
902 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
904 #define READ_UINT32_FIELD(p, offset) \
905 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
907 #define WRITE_UINT32_FIELD(p, offset, value) \
908 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
910 #define READ_SHORT_FIELD(p, offset) \
911 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
913 #define WRITE_SHORT_FIELD(p, offset, value) \
914 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
916 #define READ_BYTE_FIELD(p, offset) \
917 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
919 #define WRITE_BYTE_FIELD(p, offset, value) \
920 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
923 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
924 return &READ_FIELD(obj, byte_offset);
929 return Internals::SmiValue(this);
933 Smi* Smi::FromInt(int value) {
934 ASSERT(Smi::IsValid(value));
935 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
936 intptr_t tagged_value =
937 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
938 return reinterpret_cast<Smi*>(tagged_value);
942 Smi* Smi::FromIntptr(intptr_t value) {
943 ASSERT(Smi::IsValid(value));
944 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
945 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
949 Failure::Type Failure::type() const {
950 return static_cast<Type>(value() & kFailureTypeTagMask);
954 bool Failure::IsInternalError() const {
955 return type() == INTERNAL_ERROR;
959 bool Failure::IsOutOfMemoryException() const {
960 return type() == OUT_OF_MEMORY_EXCEPTION;
964 AllocationSpace Failure::allocation_space() const {
965 ASSERT_EQ(RETRY_AFTER_GC, type());
966 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
971 Failure* Failure::InternalError() {
972 return Construct(INTERNAL_ERROR);
976 Failure* Failure::Exception() {
977 return Construct(EXCEPTION);
981 Failure* Failure::OutOfMemoryException() {
982 return Construct(OUT_OF_MEMORY_EXCEPTION);
986 intptr_t Failure::value() const {
987 return static_cast<intptr_t>(
988 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
992 Failure* Failure::RetryAfterGC() {
993 return RetryAfterGC(NEW_SPACE);
997 Failure* Failure::RetryAfterGC(AllocationSpace space) {
998 ASSERT((space & ~kSpaceTagMask) == 0);
999 return Construct(RETRY_AFTER_GC, space);
1003 Failure* Failure::Construct(Type type, intptr_t value) {
1005 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1006 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1007 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1011 bool Smi::IsValid(intptr_t value) {
1013 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1016 #ifdef V8_TARGET_ARCH_X64
1017 // To be representable as a long smi, the value must be a 32-bit integer.
1018 bool result = (value == static_cast<int32_t>(value));
1020 // To be representable as an tagged small integer, the two
1021 // most-significant bits of 'value' must be either 00 or 11 due to
1022 // sign-extension. To check this we add 01 to the two
1023 // most-significant bits, and check if the most-significant bit is 0
1025 // CAUTION: The original code below:
1026 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1027 // may lead to incorrect results according to the C language spec, and
1028 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1029 // compiler may produce undefined results in case of signed integer
1030 // overflow. The computation must be done w/ unsigned ints.
1031 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1033 ASSERT(result == in_range);
1038 MapWord MapWord::FromMap(Map* map) {
1039 return MapWord(reinterpret_cast<uintptr_t>(map));
1043 Map* MapWord::ToMap() {
1044 return reinterpret_cast<Map*>(value_);
1048 bool MapWord::IsForwardingAddress() {
1049 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1053 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1054 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1055 return MapWord(reinterpret_cast<uintptr_t>(raw));
1059 HeapObject* MapWord::ToForwardingAddress() {
1060 ASSERT(IsForwardingAddress());
1061 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1066 void HeapObject::VerifyObjectField(int offset) {
1067 VerifyPointer(READ_FIELD(this, offset));
1070 void HeapObject::VerifySmiField(int offset) {
1071 ASSERT(READ_FIELD(this, offset)->IsSmi());
1076 Heap* HeapObject::GetHeap() {
1078 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1079 ASSERT(heap != NULL);
1080 ASSERT(heap->isolate() == Isolate::Current());
1085 Isolate* HeapObject::GetIsolate() {
1086 return GetHeap()->isolate();
1090 Map* HeapObject::map() {
1091 return map_word().ToMap();
1095 void HeapObject::set_map(Map* value) {
1096 set_map_word(MapWord::FromMap(value));
1097 if (value != NULL) {
1098 // TODO(1600) We are passing NULL as a slot because maps can never be on
1099 // evacuation candidate.
1100 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1105 // Unsafe accessor omitting write barrier.
1106 void HeapObject::set_map_unsafe(Map* value) {
1107 set_map_word(MapWord::FromMap(value));
1111 MapWord HeapObject::map_word() {
1112 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1116 void HeapObject::set_map_word(MapWord map_word) {
1117 // WRITE_FIELD does not invoke write barrier, but there is no need
1119 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1123 HeapObject* HeapObject::FromAddress(Address address) {
1124 ASSERT_TAG_ALIGNED(address);
1125 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1129 Address HeapObject::address() {
1130 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1134 int HeapObject::Size() {
1135 return SizeFromMap(map());
1139 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1140 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1141 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1145 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1146 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1150 double HeapNumber::value() {
1151 return READ_DOUBLE_FIELD(this, kValueOffset);
1155 void HeapNumber::set_value(double value) {
1156 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1160 int HeapNumber::get_exponent() {
1161 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1162 kExponentShift) - kExponentBias;
1166 int HeapNumber::get_sign() {
1167 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1171 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1174 FixedArrayBase* JSObject::elements() {
1175 Object* array = READ_FIELD(this, kElementsOffset);
1176 return static_cast<FixedArrayBase*>(array);
1179 void JSObject::ValidateSmiOnlyElements() {
1181 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1182 Heap* heap = GetHeap();
1183 // Don't use elements, since integrity checks will fail if there
1184 // are filler pointers in the array.
1185 FixedArray* fixed_array =
1186 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1187 Map* map = fixed_array->map();
1188 // Arrays that have been shifted in place can't be verified.
1189 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1190 map != heap->raw_unchecked_two_pointer_filler_map() &&
1191 map != heap->free_space_map()) {
1192 for (int i = 0; i < fixed_array->length(); i++) {
1193 Object* current = fixed_array->get(i);
1194 ASSERT(current->IsSmi() || current == heap->the_hole_value());
1202 MaybeObject* JSObject::EnsureCanContainNonSmiElements() {
1204 ValidateSmiOnlyElements();
1206 if ((map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS)) {
1208 MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
1209 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1210 set_map(Map::cast(obj));
1216 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1218 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1219 for (uint32_t i = 0; i < count; ++i) {
1220 Object* current = *objects++;
1221 if (!current->IsSmi() && current != GetHeap()->the_hole_value()) {
1222 return EnsureCanContainNonSmiElements();
1230 MaybeObject* JSObject::EnsureCanContainElements(FixedArray* elements) {
1231 Object** objects = reinterpret_cast<Object**>(
1232 FIELD_ADDR(elements, elements->OffsetOfElementAt(0)));
1233 return EnsureCanContainElements(objects, elements->length());
1237 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1238 ASSERT((map()->has_fast_elements() ||
1239 map()->has_fast_smi_only_elements()) ==
1240 (value->map() == GetHeap()->fixed_array_map() ||
1241 value->map() == GetHeap()->fixed_cow_array_map()));
1242 ASSERT(map()->has_fast_double_elements() ==
1243 value->IsFixedDoubleArray());
1244 ASSERT(value->HasValidElements());
1246 ValidateSmiOnlyElements();
1248 WRITE_FIELD(this, kElementsOffset, value);
1249 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1253 void JSObject::initialize_properties() {
1254 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1255 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1259 void JSObject::initialize_elements() {
1260 ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
1261 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1262 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1266 MaybeObject* JSObject::ResetElements() {
1268 ElementsKind elements_kind = FLAG_smi_only_arrays
1269 ? FAST_SMI_ONLY_ELEMENTS
1271 MaybeObject* maybe_obj = GetElementsTransitionMap(elements_kind);
1272 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1273 set_map(Map::cast(obj));
1274 initialize_elements();
1279 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1280 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1283 byte Oddball::kind() {
1284 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1288 void Oddball::set_kind(byte value) {
1289 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1293 Object* JSGlobalPropertyCell::value() {
1294 return READ_FIELD(this, kValueOffset);
1298 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1299 // The write barrier is not used for global property cells.
1300 ASSERT(!val->IsJSGlobalPropertyCell());
1301 WRITE_FIELD(this, kValueOffset, val);
1302 GetHeap()->incremental_marking()->RecordWrite(
1303 this, HeapObject::RawField(this, kValueOffset), val);
1307 int JSObject::GetHeaderSize() {
1308 InstanceType type = map()->instance_type();
1309 // Check for the most common kind of JavaScript object before
1310 // falling into the generic switch. This speeds up the internal
1311 // field operations considerably on average.
1312 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1314 case JS_GLOBAL_PROXY_TYPE:
1315 return JSGlobalProxy::kSize;
1316 case JS_GLOBAL_OBJECT_TYPE:
1317 return JSGlobalObject::kSize;
1318 case JS_BUILTINS_OBJECT_TYPE:
1319 return JSBuiltinsObject::kSize;
1320 case JS_FUNCTION_TYPE:
1321 return JSFunction::kSize;
1323 return JSValue::kSize;
1325 return JSValue::kSize;
1326 case JS_WEAK_MAP_TYPE:
1327 return JSWeakMap::kSize;
1328 case JS_REGEXP_TYPE:
1329 return JSValue::kSize;
1330 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1331 return JSObject::kHeaderSize;
1332 case JS_MESSAGE_OBJECT_TYPE:
1333 return JSMessageObject::kSize;
1341 int JSObject::GetInternalFieldCount() {
1342 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1343 // Make sure to adjust for the number of in-object properties. These
1344 // properties do contribute to the size, but are not internal fields.
1345 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1346 map()->inobject_properties();
1350 int JSObject::GetInternalFieldOffset(int index) {
1351 ASSERT(index < GetInternalFieldCount() && index >= 0);
1352 return GetHeaderSize() + (kPointerSize * index);
1356 Object* JSObject::GetInternalField(int index) {
1357 ASSERT(index < GetInternalFieldCount() && index >= 0);
1358 // Internal objects do follow immediately after the header, whereas in-object
1359 // properties are at the end of the object. Therefore there is no need
1360 // to adjust the index here.
1361 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1365 void JSObject::SetInternalField(int index, Object* value) {
1366 ASSERT(index < GetInternalFieldCount() && index >= 0);
1367 // Internal objects do follow immediately after the header, whereas in-object
1368 // properties are at the end of the object. Therefore there is no need
1369 // to adjust the index here.
1370 int offset = GetHeaderSize() + (kPointerSize * index);
1371 WRITE_FIELD(this, offset, value);
1372 WRITE_BARRIER(GetHeap(), this, offset, value);
1376 // Access fast-case object properties at index. The use of these routines
1377 // is needed to correctly distinguish between properties stored in-object and
1378 // properties stored in the properties array.
1379 Object* JSObject::FastPropertyAt(int index) {
1380 // Adjust for the number of properties stored in the object.
1381 index -= map()->inobject_properties();
1383 int offset = map()->instance_size() + (index * kPointerSize);
1384 return READ_FIELD(this, offset);
1386 ASSERT(index < properties()->length());
1387 return properties()->get(index);
1392 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1393 // Adjust for the number of properties stored in the object.
1394 index -= map()->inobject_properties();
1396 int offset = map()->instance_size() + (index * kPointerSize);
1397 WRITE_FIELD(this, offset, value);
1398 WRITE_BARRIER(GetHeap(), this, offset, value);
1400 ASSERT(index < properties()->length());
1401 properties()->set(index, value);
1407 int JSObject::GetInObjectPropertyOffset(int index) {
1408 // Adjust for the number of properties stored in the object.
1409 index -= map()->inobject_properties();
1411 return map()->instance_size() + (index * kPointerSize);
1415 Object* JSObject::InObjectPropertyAt(int index) {
1416 // Adjust for the number of properties stored in the object.
1417 index -= map()->inobject_properties();
1419 int offset = map()->instance_size() + (index * kPointerSize);
1420 return READ_FIELD(this, offset);
1424 Object* JSObject::InObjectPropertyAtPut(int index,
1426 WriteBarrierMode mode) {
1427 // Adjust for the number of properties stored in the object.
1428 index -= map()->inobject_properties();
1430 int offset = map()->instance_size() + (index * kPointerSize);
1431 WRITE_FIELD(this, offset, value);
1432 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1438 void JSObject::InitializeBody(Map* map,
1439 Object* pre_allocated_value,
1440 Object* filler_value) {
1441 ASSERT(!filler_value->IsHeapObject() ||
1442 !GetHeap()->InNewSpace(filler_value));
1443 ASSERT(!pre_allocated_value->IsHeapObject() ||
1444 !GetHeap()->InNewSpace(pre_allocated_value));
1445 int size = map->instance_size();
1446 int offset = kHeaderSize;
1447 if (filler_value != pre_allocated_value) {
1448 int pre_allocated = map->pre_allocated_property_fields();
1449 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1450 for (int i = 0; i < pre_allocated; i++) {
1451 WRITE_FIELD(this, offset, pre_allocated_value);
1452 offset += kPointerSize;
1455 while (offset < size) {
1456 WRITE_FIELD(this, offset, filler_value);
1457 offset += kPointerSize;
1462 bool JSObject::HasFastProperties() {
1463 return !properties()->IsDictionary();
1467 int JSObject::MaxFastProperties() {
1468 // Allow extra fast properties if the object has more than
1469 // kMaxFastProperties in-object properties. When this is the case,
1470 // it is very unlikely that the object is being used as a dictionary
1471 // and there is a good chance that allowing more map transitions
1472 // will be worth it.
1473 return Max(map()->inobject_properties(), kMaxFastProperties);
1477 void Struct::InitializeBody(int object_size) {
1478 Object* value = GetHeap()->undefined_value();
1479 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1480 WRITE_FIELD(this, offset, value);
1485 bool Object::ToArrayIndex(uint32_t* index) {
1487 int value = Smi::cast(this)->value();
1488 if (value < 0) return false;
1492 if (IsHeapNumber()) {
1493 double value = HeapNumber::cast(this)->value();
1494 uint32_t uint_value = static_cast<uint32_t>(value);
1495 if (value == static_cast<double>(uint_value)) {
1496 *index = uint_value;
1504 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1505 if (!this->IsJSValue()) return false;
1507 JSValue* js_value = JSValue::cast(this);
1508 if (!js_value->value()->IsString()) return false;
1510 String* str = String::cast(js_value->value());
1511 if (index >= (uint32_t)str->length()) return false;
1517 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1518 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1519 return reinterpret_cast<FixedArrayBase*>(object);
1523 Object* FixedArray::get(int index) {
1524 ASSERT(index >= 0 && index < this->length());
1525 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1529 void FixedArray::set(int index, Smi* value) {
1530 ASSERT(map() != HEAP->fixed_cow_array_map());
1531 ASSERT(index >= 0 && index < this->length());
1532 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1533 int offset = kHeaderSize + index * kPointerSize;
1534 WRITE_FIELD(this, offset, value);
1538 void FixedArray::set(int index, Object* value) {
1539 ASSERT(map() != HEAP->fixed_cow_array_map());
1540 ASSERT(index >= 0 && index < this->length());
1541 int offset = kHeaderSize + index * kPointerSize;
1542 WRITE_FIELD(this, offset, value);
1543 WRITE_BARRIER(GetHeap(), this, offset, value);
1547 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1548 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1552 inline double FixedDoubleArray::hole_nan_as_double() {
1553 return BitCast<double, uint64_t>(kHoleNanInt64);
1557 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1558 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1559 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1560 return OS::nan_value();
1564 double FixedDoubleArray::get_scalar(int index) {
1565 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1566 map() != HEAP->fixed_array_map());
1567 ASSERT(index >= 0 && index < this->length());
1568 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1569 ASSERT(!is_the_hole_nan(result));
1574 MaybeObject* FixedDoubleArray::get(int index) {
1575 if (is_the_hole(index)) {
1576 return GetHeap()->the_hole_value();
1578 return GetHeap()->NumberFromDouble(get_scalar(index));
1583 void FixedDoubleArray::set(int index, double value) {
1584 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1585 map() != HEAP->fixed_array_map());
1586 int offset = kHeaderSize + index * kDoubleSize;
1587 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1588 WRITE_DOUBLE_FIELD(this, offset, value);
1592 void FixedDoubleArray::set_the_hole(int index) {
1593 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1594 map() != HEAP->fixed_array_map());
1595 int offset = kHeaderSize + index * kDoubleSize;
1596 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1600 bool FixedDoubleArray::is_the_hole(int index) {
1601 int offset = kHeaderSize + index * kDoubleSize;
1602 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1606 void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
1607 int old_length = from->length();
1608 ASSERT(old_length < length());
1609 if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) {
1610 OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
1611 FIELD_ADDR(from, kHeaderSize),
1612 old_length * kDoubleSize);
1614 for (int i = 0; i < old_length; ++i) {
1615 if (from->is_the_hole(i)) {
1618 set(i, from->get_scalar(i));
1622 int offset = kHeaderSize + old_length * kDoubleSize;
1623 for (int current = from->length(); current < length(); ++current) {
1624 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1625 offset += kDoubleSize;
1630 void FixedDoubleArray::Initialize(FixedArray* from) {
1631 int old_length = from->length();
1632 ASSERT(old_length <= length());
1633 for (int i = 0; i < old_length; i++) {
1634 Object* hole_or_object = from->get(i);
1635 if (hole_or_object->IsTheHole()) {
1638 set(i, hole_or_object->Number());
1641 int offset = kHeaderSize + old_length * kDoubleSize;
1642 for (int current = from->length(); current < length(); ++current) {
1643 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1644 offset += kDoubleSize;
1649 void FixedDoubleArray::Initialize(NumberDictionary* from) {
1650 int offset = kHeaderSize;
1651 for (int current = 0; current < length(); ++current) {
1652 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1653 offset += kDoubleSize;
1655 for (int i = 0; i < from->Capacity(); i++) {
1656 Object* key = from->KeyAt(i);
1657 if (key->IsNumber()) {
1658 uint32_t entry = static_cast<uint32_t>(key->Number());
1659 set(entry, from->ValueAt(i)->Number());
1665 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1666 Heap* heap = GetHeap();
1667 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1668 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1669 return UPDATE_WRITE_BARRIER;
1673 void FixedArray::set(int index,
1675 WriteBarrierMode mode) {
1676 ASSERT(map() != HEAP->fixed_cow_array_map());
1677 ASSERT(index >= 0 && index < this->length());
1678 int offset = kHeaderSize + index * kPointerSize;
1679 WRITE_FIELD(this, offset, value);
1680 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1684 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1687 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1688 ASSERT(index >= 0 && index < array->length());
1689 ASSERT(!HEAP->InNewSpace(value));
1690 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1694 void FixedArray::set_undefined(int index) {
1695 ASSERT(map() != HEAP->fixed_cow_array_map());
1696 set_undefined(GetHeap(), index);
1700 void FixedArray::set_undefined(Heap* heap, int index) {
1701 ASSERT(index >= 0 && index < this->length());
1702 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1703 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1704 heap->undefined_value());
1708 void FixedArray::set_null(int index) {
1709 set_null(GetHeap(), index);
1713 void FixedArray::set_null(Heap* heap, int index) {
1714 ASSERT(index >= 0 && index < this->length());
1715 ASSERT(!heap->InNewSpace(heap->null_value()));
1716 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1720 void FixedArray::set_the_hole(int index) {
1721 ASSERT(map() != HEAP->fixed_cow_array_map());
1722 ASSERT(index >= 0 && index < this->length());
1723 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1725 kHeaderSize + index * kPointerSize,
1726 GetHeap()->the_hole_value());
1730 void FixedArray::set_unchecked(int index, Smi* value) {
1731 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1732 int offset = kHeaderSize + index * kPointerSize;
1733 WRITE_FIELD(this, offset, value);
1737 void FixedArray::set_unchecked(Heap* heap,
1740 WriteBarrierMode mode) {
1741 int offset = kHeaderSize + index * kPointerSize;
1742 WRITE_FIELD(this, offset, value);
1743 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1747 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1748 ASSERT(index >= 0 && index < this->length());
1749 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1750 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1754 Object** FixedArray::data_start() {
1755 return HeapObject::RawField(this, kHeaderSize);
1759 bool DescriptorArray::IsEmpty() {
1760 ASSERT(this->IsSmi() ||
1761 this->length() > kFirstIndex ||
1762 this == HEAP->empty_descriptor_array());
1763 return this->IsSmi() || length() <= kFirstIndex;
1767 int DescriptorArray::bit_field3_storage() {
1768 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1769 return Smi::cast(storage)->value();
1772 void DescriptorArray::set_bit_field3_storage(int value) {
1774 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1778 void DescriptorArray::NoWriteBarrierSwap(FixedArray* array,
1781 Object* tmp = array->get(first);
1782 NoWriteBarrierSet(array, first, array->get(second));
1783 NoWriteBarrierSet(array, second, tmp);
1787 int DescriptorArray::Search(String* name) {
1788 SLOW_ASSERT(IsSortedNoDuplicates());
1790 // Check for empty descriptor array.
1791 int nof = number_of_descriptors();
1792 if (nof == 0) return kNotFound;
1794 // Fast case: do linear search for small arrays.
1795 const int kMaxElementsForLinearSearch = 8;
1796 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1797 return LinearSearch(name, nof);
1800 // Slow case: perform binary search.
1801 return BinarySearch(name, 0, nof - 1);
1805 int DescriptorArray::SearchWithCache(String* name) {
1806 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1807 if (number == DescriptorLookupCache::kAbsent) {
1808 number = Search(name);
1809 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1815 String* DescriptorArray::GetKey(int descriptor_number) {
1816 ASSERT(descriptor_number < number_of_descriptors());
1817 return String::cast(get(ToKeyIndex(descriptor_number)));
1821 Object* DescriptorArray::GetValue(int descriptor_number) {
1822 ASSERT(descriptor_number < number_of_descriptors());
1823 return GetContentArray()->get(ToValueIndex(descriptor_number));
1827 Smi* DescriptorArray::GetDetails(int descriptor_number) {
1828 ASSERT(descriptor_number < number_of_descriptors());
1829 return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1833 PropertyType DescriptorArray::GetType(int descriptor_number) {
1834 ASSERT(descriptor_number < number_of_descriptors());
1835 return PropertyDetails(GetDetails(descriptor_number)).type();
1839 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1840 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1844 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1845 return JSFunction::cast(GetValue(descriptor_number));
1849 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1850 ASSERT(GetType(descriptor_number) == CALLBACKS);
1851 return GetValue(descriptor_number);
1855 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1856 ASSERT(GetType(descriptor_number) == CALLBACKS);
1857 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1858 return reinterpret_cast<AccessorDescriptor*>(p->address());
1862 bool DescriptorArray::IsProperty(int descriptor_number) {
1863 return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1867 bool DescriptorArray::IsTransition(int descriptor_number) {
1868 PropertyType t = GetType(descriptor_number);
1869 return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
1870 t == ELEMENTS_TRANSITION;
1874 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1875 return GetType(descriptor_number) == NULL_DESCRIPTOR;
1879 bool DescriptorArray::IsDontEnum(int descriptor_number) {
1880 return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1884 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1885 desc->Init(GetKey(descriptor_number),
1886 GetValue(descriptor_number),
1887 PropertyDetails(GetDetails(descriptor_number)));
1891 void DescriptorArray::Set(int descriptor_number,
1893 const WhitenessWitness&) {
1895 ASSERT(descriptor_number < number_of_descriptors());
1897 // Make sure none of the elements in desc are in new space.
1898 ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1899 ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1901 NoWriteBarrierSet(this,
1902 ToKeyIndex(descriptor_number),
1904 FixedArray* content_array = GetContentArray();
1905 NoWriteBarrierSet(content_array,
1906 ToValueIndex(descriptor_number),
1908 NoWriteBarrierSet(content_array,
1909 ToDetailsIndex(descriptor_number),
1910 desc->GetDetails().AsSmi());
1914 void DescriptorArray::CopyFrom(int index,
1915 DescriptorArray* src,
1917 const WhitenessWitness& witness) {
1919 src->Get(src_index, &desc);
1920 Set(index, &desc, witness);
1924 void DescriptorArray::NoWriteBarrierSwapDescriptors(int first, int second) {
1925 NoWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
1926 FixedArray* content_array = GetContentArray();
1927 NoWriteBarrierSwap(content_array,
1928 ToValueIndex(first),
1929 ToValueIndex(second));
1930 NoWriteBarrierSwap(content_array,
1931 ToDetailsIndex(first),
1932 ToDetailsIndex(second));
1936 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
1937 : marking_(array->GetHeap()->incremental_marking()) {
1938 marking_->EnterNoMarkingScope();
1939 if (array->number_of_descriptors() > 0) {
1940 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
1941 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
1946 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
1947 marking_->LeaveNoMarkingScope();
1951 template<typename Shape, typename Key>
1952 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
1953 const int kMinCapacity = 32;
1954 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
1955 if (capacity < kMinCapacity) {
1956 capacity = kMinCapacity; // Guarantee min capacity.
1962 template<typename Shape, typename Key>
1963 int HashTable<Shape, Key>::FindEntry(Key key) {
1964 return FindEntry(GetIsolate(), key);
1968 // Find entry for key otherwise return kNotFound.
1969 template<typename Shape, typename Key>
1970 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1971 uint32_t capacity = Capacity();
1972 uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1974 // EnsureCapacity will guarantee the hash table is never full.
1976 Object* element = KeyAt(entry);
1977 if (element == isolate->heap()->undefined_value()) break; // Empty entry.
1978 if (element != isolate->heap()->null_value() &&
1979 Shape::IsMatch(key, element)) return entry;
1980 entry = NextProbe(entry, count++, capacity);
1986 bool NumberDictionary::requires_slow_elements() {
1987 Object* max_index_object = get(kMaxNumberKeyIndex);
1988 if (!max_index_object->IsSmi()) return false;
1990 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1993 uint32_t NumberDictionary::max_number_key() {
1994 ASSERT(!requires_slow_elements());
1995 Object* max_index_object = get(kMaxNumberKeyIndex);
1996 if (!max_index_object->IsSmi()) return 0;
1997 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
1998 return value >> kRequiresSlowElementsTagSize;
2001 void NumberDictionary::set_requires_slow_elements() {
2002 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2006 // ------------------------------------
2010 CAST_ACCESSOR(FixedArray)
2011 CAST_ACCESSOR(FixedDoubleArray)
2012 CAST_ACCESSOR(DescriptorArray)
2013 CAST_ACCESSOR(DeoptimizationInputData)
2014 CAST_ACCESSOR(DeoptimizationOutputData)
2015 CAST_ACCESSOR(SymbolTable)
2016 CAST_ACCESSOR(JSFunctionResultCache)
2017 CAST_ACCESSOR(NormalizedMapCache)
2018 CAST_ACCESSOR(CompilationCacheTable)
2019 CAST_ACCESSOR(CodeCacheHashTable)
2020 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2021 CAST_ACCESSOR(MapCache)
2022 CAST_ACCESSOR(String)
2023 CAST_ACCESSOR(SeqString)
2024 CAST_ACCESSOR(SeqAsciiString)
2025 CAST_ACCESSOR(SeqTwoByteString)
2026 CAST_ACCESSOR(SlicedString)
2027 CAST_ACCESSOR(ConsString)
2028 CAST_ACCESSOR(ExternalString)
2029 CAST_ACCESSOR(ExternalAsciiString)
2030 CAST_ACCESSOR(ExternalTwoByteString)
2031 CAST_ACCESSOR(JSReceiver)
2032 CAST_ACCESSOR(JSObject)
2034 CAST_ACCESSOR(HeapObject)
2035 CAST_ACCESSOR(HeapNumber)
2036 CAST_ACCESSOR(Oddball)
2037 CAST_ACCESSOR(JSGlobalPropertyCell)
2038 CAST_ACCESSOR(SharedFunctionInfo)
2040 CAST_ACCESSOR(JSFunction)
2041 CAST_ACCESSOR(GlobalObject)
2042 CAST_ACCESSOR(JSGlobalProxy)
2043 CAST_ACCESSOR(JSGlobalObject)
2044 CAST_ACCESSOR(JSBuiltinsObject)
2046 CAST_ACCESSOR(JSArray)
2047 CAST_ACCESSOR(JSRegExp)
2048 CAST_ACCESSOR(JSProxy)
2049 CAST_ACCESSOR(JSFunctionProxy)
2050 CAST_ACCESSOR(JSSet)
2051 CAST_ACCESSOR(JSMap)
2052 CAST_ACCESSOR(JSWeakMap)
2053 CAST_ACCESSOR(Foreign)
2054 CAST_ACCESSOR(ByteArray)
2055 CAST_ACCESSOR(FreeSpace)
2056 CAST_ACCESSOR(ExternalArray)
2057 CAST_ACCESSOR(ExternalByteArray)
2058 CAST_ACCESSOR(ExternalUnsignedByteArray)
2059 CAST_ACCESSOR(ExternalShortArray)
2060 CAST_ACCESSOR(ExternalUnsignedShortArray)
2061 CAST_ACCESSOR(ExternalIntArray)
2062 CAST_ACCESSOR(ExternalUnsignedIntArray)
2063 CAST_ACCESSOR(ExternalFloatArray)
2064 CAST_ACCESSOR(ExternalDoubleArray)
2065 CAST_ACCESSOR(ExternalPixelArray)
2066 CAST_ACCESSOR(Struct)
2069 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2070 STRUCT_LIST(MAKE_STRUCT_CAST)
2071 #undef MAKE_STRUCT_CAST
2074 template <typename Shape, typename Key>
2075 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2076 ASSERT(obj->IsHashTable());
2077 return reinterpret_cast<HashTable*>(obj);
2081 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2082 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2084 SMI_ACCESSORS(String, length, kLengthOffset)
2085 SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
2088 uint32_t String::hash_field() {
2089 return READ_UINT32_FIELD(this, kHashFieldOffset);
2093 void String::set_hash_field(uint32_t value) {
2094 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2095 #if V8_HOST_ARCH_64_BIT
2096 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2101 bool String::Equals(String* other) {
2102 if (other == this) return true;
2103 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2106 return SlowEquals(other);
2110 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2111 if (!StringShape(this).IsCons()) return this;
2112 ConsString* cons = ConsString::cast(this);
2113 if (cons->IsFlat()) return cons->first();
2114 return SlowTryFlatten(pretenure);
2118 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2119 MaybeObject* flat = TryFlatten(pretenure);
2120 Object* successfully_flattened;
2121 if (!flat->ToObject(&successfully_flattened)) return this;
2122 return String::cast(successfully_flattened);
2126 uint16_t String::Get(int index) {
2127 ASSERT(index >= 0 && index < length());
2128 switch (StringShape(this).full_representation_tag()) {
2129 case kSeqStringTag | kAsciiStringTag:
2130 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2131 case kSeqStringTag | kTwoByteStringTag:
2132 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2133 case kConsStringTag | kAsciiStringTag:
2134 case kConsStringTag | kTwoByteStringTag:
2135 return ConsString::cast(this)->ConsStringGet(index);
2136 case kExternalStringTag | kAsciiStringTag:
2137 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2138 case kExternalStringTag | kTwoByteStringTag:
2139 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2140 case kSlicedStringTag | kAsciiStringTag:
2141 case kSlicedStringTag | kTwoByteStringTag:
2142 return SlicedString::cast(this)->SlicedStringGet(index);
2152 void String::Set(int index, uint16_t value) {
2153 ASSERT(index >= 0 && index < length());
2154 ASSERT(StringShape(this).IsSequential());
2156 return this->IsAsciiRepresentation()
2157 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2158 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2162 bool String::IsFlat() {
2163 if (!StringShape(this).IsCons()) return true;
2164 return ConsString::cast(this)->second()->length() == 0;
2168 String* String::GetUnderlying() {
2169 // Giving direct access to underlying string only makes sense if the
2170 // wrapping string is already flattened.
2171 ASSERT(this->IsFlat());
2172 ASSERT(StringShape(this).IsIndirect());
2173 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2174 const int kUnderlyingOffset = SlicedString::kParentOffset;
2175 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2179 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2180 ASSERT(index >= 0 && index < length());
2181 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2185 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2186 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2187 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2188 static_cast<byte>(value));
2192 Address SeqAsciiString::GetCharsAddress() {
2193 return FIELD_ADDR(this, kHeaderSize);
2197 char* SeqAsciiString::GetChars() {
2198 return reinterpret_cast<char*>(GetCharsAddress());
2202 Address SeqTwoByteString::GetCharsAddress() {
2203 return FIELD_ADDR(this, kHeaderSize);
2207 uc16* SeqTwoByteString::GetChars() {
2208 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2212 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2213 ASSERT(index >= 0 && index < length());
2214 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2218 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2219 ASSERT(index >= 0 && index < length());
2220 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2224 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2225 return SizeFor(length());
2229 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2230 return SizeFor(length());
2234 String* SlicedString::parent() {
2235 return String::cast(READ_FIELD(this, kParentOffset));
2239 void SlicedString::set_parent(String* parent) {
2240 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2241 WRITE_FIELD(this, kParentOffset, parent);
2245 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2248 String* ConsString::first() {
2249 return String::cast(READ_FIELD(this, kFirstOffset));
2253 Object* ConsString::unchecked_first() {
2254 return READ_FIELD(this, kFirstOffset);
2258 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2259 WRITE_FIELD(this, kFirstOffset, value);
2260 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2264 String* ConsString::second() {
2265 return String::cast(READ_FIELD(this, kSecondOffset));
2269 Object* ConsString::unchecked_second() {
2270 return READ_FIELD(this, kSecondOffset);
2274 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2275 WRITE_FIELD(this, kSecondOffset, value);
2276 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2280 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2281 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2285 void ExternalAsciiString::set_resource(
2286 const ExternalAsciiString::Resource* resource) {
2287 *reinterpret_cast<const Resource**>(
2288 FIELD_ADDR(this, kResourceOffset)) = resource;
2292 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2293 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2297 void ExternalTwoByteString::set_resource(
2298 const ExternalTwoByteString::Resource* resource) {
2299 *reinterpret_cast<const Resource**>(
2300 FIELD_ADDR(this, kResourceOffset)) = resource;
2304 void JSFunctionResultCache::MakeZeroSize() {
2305 set_finger_index(kEntriesIndex);
2306 set_size(kEntriesIndex);
2310 void JSFunctionResultCache::Clear() {
2311 int cache_size = size();
2312 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2313 MemsetPointer(entries_start,
2314 GetHeap()->the_hole_value(),
2315 cache_size - kEntriesIndex);
2320 int JSFunctionResultCache::size() {
2321 return Smi::cast(get(kCacheSizeIndex))->value();
2325 void JSFunctionResultCache::set_size(int size) {
2326 set(kCacheSizeIndex, Smi::FromInt(size));
2330 int JSFunctionResultCache::finger_index() {
2331 return Smi::cast(get(kFingerIndex))->value();
2335 void JSFunctionResultCache::set_finger_index(int finger_index) {
2336 set(kFingerIndex, Smi::FromInt(finger_index));
2340 byte ByteArray::get(int index) {
2341 ASSERT(index >= 0 && index < this->length());
2342 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2346 void ByteArray::set(int index, byte value) {
2347 ASSERT(index >= 0 && index < this->length());
2348 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2352 int ByteArray::get_int(int index) {
2353 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2354 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2358 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2359 ASSERT_TAG_ALIGNED(address);
2360 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2364 Address ByteArray::GetDataStartAddress() {
2365 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2369 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2370 return reinterpret_cast<uint8_t*>(external_pointer());
2374 uint8_t ExternalPixelArray::get_scalar(int index) {
2375 ASSERT((index >= 0) && (index < this->length()));
2376 uint8_t* ptr = external_pixel_pointer();
2381 MaybeObject* ExternalPixelArray::get(int index) {
2382 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2386 void ExternalPixelArray::set(int index, uint8_t value) {
2387 ASSERT((index >= 0) && (index < this->length()));
2388 uint8_t* ptr = external_pixel_pointer();
2393 void* ExternalArray::external_pointer() {
2394 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2395 return reinterpret_cast<void*>(ptr);
2399 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2400 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2401 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2405 int8_t ExternalByteArray::get_scalar(int index) {
2406 ASSERT((index >= 0) && (index < this->length()));
2407 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2412 MaybeObject* ExternalByteArray::get(int index) {
2413 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2417 void ExternalByteArray::set(int index, int8_t value) {
2418 ASSERT((index >= 0) && (index < this->length()));
2419 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2424 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2425 ASSERT((index >= 0) && (index < this->length()));
2426 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2431 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2432 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2436 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2437 ASSERT((index >= 0) && (index < this->length()));
2438 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2443 int16_t ExternalShortArray::get_scalar(int index) {
2444 ASSERT((index >= 0) && (index < this->length()));
2445 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2450 MaybeObject* ExternalShortArray::get(int index) {
2451 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2455 void ExternalShortArray::set(int index, int16_t value) {
2456 ASSERT((index >= 0) && (index < this->length()));
2457 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2462 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2463 ASSERT((index >= 0) && (index < this->length()));
2464 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2469 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2470 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2474 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2475 ASSERT((index >= 0) && (index < this->length()));
2476 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2481 int32_t ExternalIntArray::get_scalar(int index) {
2482 ASSERT((index >= 0) && (index < this->length()));
2483 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2488 MaybeObject* ExternalIntArray::get(int index) {
2489 return GetHeap()->NumberFromInt32(get_scalar(index));
2493 void ExternalIntArray::set(int index, int32_t value) {
2494 ASSERT((index >= 0) && (index < this->length()));
2495 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2500 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2501 ASSERT((index >= 0) && (index < this->length()));
2502 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2507 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2508 return GetHeap()->NumberFromUint32(get_scalar(index));
2512 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2513 ASSERT((index >= 0) && (index < this->length()));
2514 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2519 float ExternalFloatArray::get_scalar(int index) {
2520 ASSERT((index >= 0) && (index < this->length()));
2521 float* ptr = static_cast<float*>(external_pointer());
2526 MaybeObject* ExternalFloatArray::get(int index) {
2527 return GetHeap()->NumberFromDouble(get_scalar(index));
2531 void ExternalFloatArray::set(int index, float value) {
2532 ASSERT((index >= 0) && (index < this->length()));
2533 float* ptr = static_cast<float*>(external_pointer());
2538 double ExternalDoubleArray::get_scalar(int index) {
2539 ASSERT((index >= 0) && (index < this->length()));
2540 double* ptr = static_cast<double*>(external_pointer());
2545 MaybeObject* ExternalDoubleArray::get(int index) {
2546 return GetHeap()->NumberFromDouble(get_scalar(index));
2550 void ExternalDoubleArray::set(int index, double value) {
2551 ASSERT((index >= 0) && (index < this->length()));
2552 double* ptr = static_cast<double*>(external_pointer());
2557 int Map::visitor_id() {
2558 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2562 void Map::set_visitor_id(int id) {
2563 ASSERT(0 <= id && id < 256);
2564 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2568 int Map::instance_size() {
2569 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2573 int Map::inobject_properties() {
2574 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2578 int Map::pre_allocated_property_fields() {
2579 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2583 int HeapObject::SizeFromMap(Map* map) {
2584 int instance_size = map->instance_size();
2585 if (instance_size != kVariableSizeSentinel) return instance_size;
2586 // We can ignore the "symbol" bit becase it is only set for symbols
2587 // and implies a string type.
2588 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2589 // Only inline the most frequent cases.
2590 if (instance_type == FIXED_ARRAY_TYPE) {
2591 return FixedArray::BodyDescriptor::SizeOf(map, this);
2593 if (instance_type == ASCII_STRING_TYPE) {
2594 return SeqAsciiString::SizeFor(
2595 reinterpret_cast<SeqAsciiString*>(this)->length());
2597 if (instance_type == BYTE_ARRAY_TYPE) {
2598 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2600 if (instance_type == FREE_SPACE_TYPE) {
2601 return reinterpret_cast<FreeSpace*>(this)->size();
2603 if (instance_type == STRING_TYPE) {
2604 return SeqTwoByteString::SizeFor(
2605 reinterpret_cast<SeqTwoByteString*>(this)->length());
2607 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2608 return FixedDoubleArray::SizeFor(
2609 reinterpret_cast<FixedDoubleArray*>(this)->length());
2611 ASSERT(instance_type == CODE_TYPE);
2612 return reinterpret_cast<Code*>(this)->CodeSize();
2616 void Map::set_instance_size(int value) {
2617 ASSERT_EQ(0, value & (kPointerSize - 1));
2618 value >>= kPointerSizeLog2;
2619 ASSERT(0 <= value && value < 256);
2620 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2624 void Map::set_inobject_properties(int value) {
2625 ASSERT(0 <= value && value < 256);
2626 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2630 void Map::set_pre_allocated_property_fields(int value) {
2631 ASSERT(0 <= value && value < 256);
2632 WRITE_BYTE_FIELD(this,
2633 kPreAllocatedPropertyFieldsOffset,
2634 static_cast<byte>(value));
2638 InstanceType Map::instance_type() {
2639 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2643 void Map::set_instance_type(InstanceType value) {
2644 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2648 int Map::unused_property_fields() {
2649 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2653 void Map::set_unused_property_fields(int value) {
2654 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2658 byte Map::bit_field() {
2659 return READ_BYTE_FIELD(this, kBitFieldOffset);
2663 void Map::set_bit_field(byte value) {
2664 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2668 byte Map::bit_field2() {
2669 return READ_BYTE_FIELD(this, kBitField2Offset);
2673 void Map::set_bit_field2(byte value) {
2674 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2678 void Map::set_non_instance_prototype(bool value) {
2680 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2682 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2687 bool Map::has_non_instance_prototype() {
2688 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2692 void Map::set_function_with_prototype(bool value) {
2694 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2696 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2701 bool Map::function_with_prototype() {
2702 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2706 void Map::set_is_access_check_needed(bool access_check_needed) {
2707 if (access_check_needed) {
2708 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2710 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2715 bool Map::is_access_check_needed() {
2716 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2720 void Map::set_is_extensible(bool value) {
2722 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2724 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2728 bool Map::is_extensible() {
2729 return ((1 << kIsExtensible) & bit_field2()) != 0;
2733 void Map::set_attached_to_shared_function_info(bool value) {
2735 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2737 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2741 bool Map::attached_to_shared_function_info() {
2742 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2746 void Map::set_is_shared(bool value) {
2748 set_bit_field3(bit_field3() | (1 << kIsShared));
2750 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2754 bool Map::is_shared() {
2755 return ((1 << kIsShared) & bit_field3()) != 0;
2759 JSFunction* Map::unchecked_constructor() {
2760 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2764 Code::Flags Code::flags() {
2765 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2769 void Code::set_flags(Code::Flags flags) {
2770 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2771 // Make sure that all call stubs have an arguments count.
2772 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2773 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2774 ExtractArgumentsCountFromFlags(flags) >= 0);
2775 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2779 Code::Kind Code::kind() {
2780 return ExtractKindFromFlags(flags());
2784 InlineCacheState Code::ic_state() {
2785 InlineCacheState result = ExtractICStateFromFlags(flags());
2786 // Only allow uninitialized or debugger states for non-IC code
2787 // objects. This is used in the debugger to determine whether or not
2788 // a call to code object has been replaced with a debug break call.
2789 ASSERT(is_inline_cache_stub() ||
2790 result == UNINITIALIZED ||
2791 result == DEBUG_BREAK ||
2792 result == DEBUG_PREPARE_STEP_IN);
2797 Code::ExtraICState Code::extra_ic_state() {
2798 ASSERT(is_inline_cache_stub());
2799 return ExtractExtraICStateFromFlags(flags());
2803 PropertyType Code::type() {
2804 return ExtractTypeFromFlags(flags());
2808 int Code::arguments_count() {
2809 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2810 return ExtractArgumentsCountFromFlags(flags());
2814 int Code::major_key() {
2815 ASSERT(kind() == STUB ||
2816 kind() == UNARY_OP_IC ||
2817 kind() == BINARY_OP_IC ||
2818 kind() == COMPARE_IC ||
2819 kind() == TO_BOOLEAN_IC);
2820 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2824 void Code::set_major_key(int major) {
2825 ASSERT(kind() == STUB ||
2826 kind() == UNARY_OP_IC ||
2827 kind() == BINARY_OP_IC ||
2828 kind() == COMPARE_IC ||
2829 kind() == TO_BOOLEAN_IC);
2830 ASSERT(0 <= major && major < 256);
2831 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2835 bool Code::is_pregenerated() {
2836 return kind() == STUB && IsPregeneratedField::decode(flags());
2840 void Code::set_is_pregenerated(bool value) {
2841 ASSERT(kind() == STUB);
2843 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
2848 bool Code::optimizable() {
2849 ASSERT(kind() == FUNCTION);
2850 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2854 void Code::set_optimizable(bool value) {
2855 ASSERT(kind() == FUNCTION);
2856 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2860 bool Code::has_deoptimization_support() {
2861 ASSERT(kind() == FUNCTION);
2862 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2863 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
2867 void Code::set_has_deoptimization_support(bool value) {
2868 ASSERT(kind() == FUNCTION);
2869 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2870 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
2871 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2875 bool Code::has_debug_break_slots() {
2876 ASSERT(kind() == FUNCTION);
2877 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2878 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
2882 void Code::set_has_debug_break_slots(bool value) {
2883 ASSERT(kind() == FUNCTION);
2884 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2885 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
2886 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2890 bool Code::is_compiled_optimizable() {
2891 ASSERT(kind() == FUNCTION);
2892 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2893 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
2897 void Code::set_compiled_optimizable(bool value) {
2898 ASSERT(kind() == FUNCTION);
2899 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2900 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
2901 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2905 int Code::allow_osr_at_loop_nesting_level() {
2906 ASSERT(kind() == FUNCTION);
2907 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2911 void Code::set_allow_osr_at_loop_nesting_level(int level) {
2912 ASSERT(kind() == FUNCTION);
2913 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2914 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2918 unsigned Code::stack_slots() {
2919 ASSERT(kind() == OPTIMIZED_FUNCTION);
2920 return READ_UINT32_FIELD(this, kStackSlotsOffset);
2924 void Code::set_stack_slots(unsigned slots) {
2925 ASSERT(kind() == OPTIMIZED_FUNCTION);
2926 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2930 unsigned Code::safepoint_table_offset() {
2931 ASSERT(kind() == OPTIMIZED_FUNCTION);
2932 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
2936 void Code::set_safepoint_table_offset(unsigned offset) {
2937 ASSERT(kind() == OPTIMIZED_FUNCTION);
2938 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2939 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
2943 unsigned Code::stack_check_table_offset() {
2944 ASSERT(kind() == FUNCTION);
2945 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
2949 void Code::set_stack_check_table_offset(unsigned offset) {
2950 ASSERT(kind() == FUNCTION);
2951 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2952 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
2956 CheckType Code::check_type() {
2957 ASSERT(is_call_stub() || is_keyed_call_stub());
2958 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
2959 return static_cast<CheckType>(type);
2963 void Code::set_check_type(CheckType value) {
2964 ASSERT(is_call_stub() || is_keyed_call_stub());
2965 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
2969 byte Code::unary_op_type() {
2970 ASSERT(is_unary_op_stub());
2971 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
2975 void Code::set_unary_op_type(byte value) {
2976 ASSERT(is_unary_op_stub());
2977 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
2981 byte Code::binary_op_type() {
2982 ASSERT(is_binary_op_stub());
2983 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2987 void Code::set_binary_op_type(byte value) {
2988 ASSERT(is_binary_op_stub());
2989 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2993 byte Code::binary_op_result_type() {
2994 ASSERT(is_binary_op_stub());
2995 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
2999 void Code::set_binary_op_result_type(byte value) {
3000 ASSERT(is_binary_op_stub());
3001 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3005 byte Code::compare_state() {
3006 ASSERT(is_compare_ic_stub());
3007 return READ_BYTE_FIELD(this, kCompareStateOffset);
3011 void Code::set_compare_state(byte value) {
3012 ASSERT(is_compare_ic_stub());
3013 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3017 byte Code::to_boolean_state() {
3018 ASSERT(is_to_boolean_ic_stub());
3019 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3023 void Code::set_to_boolean_state(byte value) {
3024 ASSERT(is_to_boolean_ic_stub());
3025 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3029 bool Code::has_function_cache() {
3030 ASSERT(kind() == STUB);
3031 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3035 void Code::set_has_function_cache(bool flag) {
3036 ASSERT(kind() == STUB);
3037 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3041 bool Code::is_inline_cache_stub() {
3042 Kind kind = this->kind();
3043 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3047 Code::Flags Code::ComputeFlags(Kind kind,
3048 InlineCacheState ic_state,
3049 ExtraICState extra_ic_state,
3052 InlineCacheHolderFlag holder) {
3053 // Extra IC state is only allowed for call IC stubs or for store IC
3055 ASSERT(extra_ic_state == kNoExtraICState ||
3058 kind == KEYED_STORE_IC);
3059 // Compute the bit mask.
3060 int bits = KindField::encode(kind)
3061 | ICStateField::encode(ic_state)
3062 | TypeField::encode(type)
3063 | ExtraICStateField::encode(extra_ic_state)
3064 | (argc << kArgumentsCountShift)
3065 | CacheHolderField::encode(holder);
3066 return static_cast<Flags>(bits);
3070 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3072 ExtraICState extra_ic_state,
3073 InlineCacheHolderFlag holder,
3075 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3079 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3080 return KindField::decode(flags);
3084 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3085 return ICStateField::decode(flags);
3089 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3090 return ExtraICStateField::decode(flags);
3094 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3095 return TypeField::decode(flags);
3099 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3100 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3104 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3105 return CacheHolderField::decode(flags);
3109 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3110 int bits = flags & ~TypeField::kMask;
3111 return static_cast<Flags>(bits);
3115 Code* Code::GetCodeFromTargetAddress(Address address) {
3116 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3117 // GetCodeFromTargetAddress might be called when marking objects during mark
3118 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3119 // Code::cast. Code::cast does not work when the object's map is
3121 Code* result = reinterpret_cast<Code*>(code);
3126 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3128 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3132 Object* Map::prototype() {
3133 return READ_FIELD(this, kPrototypeOffset);
3137 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3138 ASSERT(value->IsNull() || value->IsJSReceiver());
3139 WRITE_FIELD(this, kPrototypeOffset, value);
3140 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3144 DescriptorArray* Map::instance_descriptors() {
3145 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3146 if (object->IsSmi()) {
3147 return HEAP->empty_descriptor_array();
3149 return DescriptorArray::cast(object);
3154 void Map::init_instance_descriptors() {
3155 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3159 void Map::clear_instance_descriptors() {
3160 Object* object = READ_FIELD(this,
3161 kInstanceDescriptorsOrBitField3Offset);
3162 if (!object->IsSmi()) {
3165 kInstanceDescriptorsOrBitField3Offset,
3166 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3171 void Map::set_instance_descriptors(DescriptorArray* value,
3172 WriteBarrierMode mode) {
3173 Object* object = READ_FIELD(this,
3174 kInstanceDescriptorsOrBitField3Offset);
3175 Heap* heap = GetHeap();
3176 if (value == heap->empty_descriptor_array()) {
3177 clear_instance_descriptors();
3180 if (object->IsSmi()) {
3181 value->set_bit_field3_storage(Smi::cast(object)->value());
3183 value->set_bit_field3_storage(
3184 DescriptorArray::cast(object)->bit_field3_storage());
3187 ASSERT(!is_shared());
3188 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3189 CONDITIONAL_WRITE_BARRIER(
3190 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3194 int Map::bit_field3() {
3195 Object* object = READ_FIELD(this,
3196 kInstanceDescriptorsOrBitField3Offset);
3197 if (object->IsSmi()) {
3198 return Smi::cast(object)->value();
3200 return DescriptorArray::cast(object)->bit_field3_storage();
3205 void Map::set_bit_field3(int value) {
3206 ASSERT(Smi::IsValid(value));
3207 Object* object = READ_FIELD(this,
3208 kInstanceDescriptorsOrBitField3Offset);
3209 if (object->IsSmi()) {
3211 kInstanceDescriptorsOrBitField3Offset,
3212 Smi::FromInt(value));
3214 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3219 FixedArray* Map::unchecked_prototype_transitions() {
3220 return reinterpret_cast<FixedArray*>(
3221 READ_FIELD(this, kPrototypeTransitionsOffset));
3225 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3226 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3227 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3229 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3230 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3231 ACCESSORS(JSFunction,
3234 kNextFunctionLinkOffset)
3236 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3237 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3238 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3240 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3242 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3243 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3244 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3245 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3246 ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
3248 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3249 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3250 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3252 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3253 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3254 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3255 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3256 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3257 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3259 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3260 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3262 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3263 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3265 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3266 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3267 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3268 kPropertyAccessorsOffset)
3269 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3270 kPrototypeTemplateOffset)
3271 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3272 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3273 kNamedPropertyHandlerOffset)
3274 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3275 kIndexedPropertyHandlerOffset)
3276 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3277 kInstanceTemplateOffset)
3278 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3279 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3280 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3281 kInstanceCallHandlerOffset)
3282 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3283 kAccessCheckInfoOffset)
3284 ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
3286 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3287 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3288 kInternalFieldCountOffset)
3290 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3291 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3293 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3295 ACCESSORS(Script, source, Object, kSourceOffset)
3296 ACCESSORS(Script, name, Object, kNameOffset)
3297 ACCESSORS(Script, id, Object, kIdOffset)
3298 ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
3299 ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
3300 ACCESSORS(Script, data, Object, kDataOffset)
3301 ACCESSORS(Script, context_data, Object, kContextOffset)
3302 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3303 ACCESSORS(Script, type, Smi, kTypeOffset)
3304 ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
3305 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3306 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3307 ACCESSORS(Script, eval_from_instructions_offset, Smi,
3308 kEvalFrominstructionsOffsetOffset)
3310 #ifdef ENABLE_DEBUGGER_SUPPORT
3311 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3312 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3313 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3314 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3316 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3317 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3318 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3319 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3322 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3323 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3324 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3325 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3326 kInstanceClassNameOffset)
3327 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3328 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3329 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3330 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3331 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3332 kThisPropertyAssignmentsOffset)
3334 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3335 kHiddenPrototypeBit)
3336 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3337 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3338 kNeedsAccessCheckBit)
3339 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3340 kReadOnlyPrototypeBit)
3341 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3343 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3345 BOOL_GETTER(SharedFunctionInfo,
3347 has_only_simple_this_property_assignments,
3348 kHasOnlySimpleThisPropertyAssignments)
3349 BOOL_ACCESSORS(SharedFunctionInfo,
3351 allows_lazy_compilation,
3352 kAllowLazyCompilation)
3353 BOOL_ACCESSORS(SharedFunctionInfo,
3357 BOOL_ACCESSORS(SharedFunctionInfo,
3359 has_duplicate_parameters,
3360 kHasDuplicateParameters)
3363 #if V8_HOST_ARCH_32_BIT
3364 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3365 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3366 kFormalParameterCountOffset)
3367 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3368 kExpectedNofPropertiesOffset)
3369 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3370 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3371 kStartPositionAndTypeOffset)
3372 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3373 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3374 kFunctionTokenPositionOffset)
3375 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3376 kCompilerHintsOffset)
3377 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3378 kThisPropertyAssignmentsCountOffset)
3379 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3382 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3383 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3384 int holder::name() { \
3385 int value = READ_INT_FIELD(this, offset); \
3386 ASSERT(kHeapObjectTag == 1); \
3387 ASSERT((value & kHeapObjectTag) == 0); \
3388 return value >> 1; \
3390 void holder::set_##name(int value) { \
3391 ASSERT(kHeapObjectTag == 1); \
3392 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3393 (value & 0xC0000000) == 0x000000000); \
3394 WRITE_INT_FIELD(this, \
3396 (value << 1) & ~kHeapObjectTag); \
3399 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3400 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3401 INT_ACCESSORS(holder, name, offset)
3404 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3405 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3406 formal_parameter_count,
3407 kFormalParameterCountOffset)
3409 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3410 expected_nof_properties,
3411 kExpectedNofPropertiesOffset)
3412 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3414 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3415 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3416 start_position_and_type,
3417 kStartPositionAndTypeOffset)
3419 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3420 function_token_position,
3421 kFunctionTokenPositionOffset)
3422 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3424 kCompilerHintsOffset)
3426 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3427 this_property_assignments_count,
3428 kThisPropertyAssignmentsCountOffset)
3429 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3433 int SharedFunctionInfo::construction_count() {
3434 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3438 void SharedFunctionInfo::set_construction_count(int value) {
3439 ASSERT(0 <= value && value < 256);
3440 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3444 BOOL_ACCESSORS(SharedFunctionInfo,
3446 live_objects_may_exist,
3447 kLiveObjectsMayExist)
3450 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3451 return initial_map() != HEAP->undefined_value();
3455 BOOL_GETTER(SharedFunctionInfo,
3457 optimization_disabled,
3458 kOptimizationDisabled)
3461 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3462 set_compiler_hints(BooleanBit::set(compiler_hints(),
3463 kOptimizationDisabled,
3465 // If disabling optimizations we reflect that in the code object so
3466 // it will not be counted as optimizable code.
3467 if ((code()->kind() == Code::FUNCTION) && disable) {
3468 code()->set_optimizable(false);
3473 StrictModeFlag SharedFunctionInfo::strict_mode_flag() {
3474 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
3475 ? kStrictMode : kNonStrictMode;
3479 void SharedFunctionInfo::set_strict_mode_flag(StrictModeFlag strict_mode_flag) {
3480 ASSERT(strict_mode_flag == kStrictMode ||
3481 strict_mode_flag == kNonStrictMode);
3482 bool value = strict_mode_flag == kStrictMode;
3484 BooleanBit::set(compiler_hints(), kStrictModeFunction, value));
3488 BOOL_GETTER(SharedFunctionInfo, compiler_hints, strict_mode,
3489 kStrictModeFunction)
3490 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3491 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3492 name_should_print_as_anonymous,
3493 kNameShouldPrintAsAnonymous)
3494 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3495 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3497 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3498 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3500 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3502 bool Script::HasValidSource() {
3503 Object* src = this->source();
3504 if (!src->IsString()) return true;
3505 String* src_str = String::cast(src);
3506 if (!StringShape(src_str).IsExternal()) return true;
3507 if (src_str->IsAsciiRepresentation()) {
3508 return ExternalAsciiString::cast(src)->resource() != NULL;
3509 } else if (src_str->IsTwoByteRepresentation()) {
3510 return ExternalTwoByteString::cast(src)->resource() != NULL;
3516 void SharedFunctionInfo::DontAdaptArguments() {
3517 ASSERT(code()->kind() == Code::BUILTIN);
3518 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3522 int SharedFunctionInfo::start_position() {
3523 return start_position_and_type() >> kStartPositionShift;
3527 void SharedFunctionInfo::set_start_position(int start_position) {
3528 set_start_position_and_type((start_position << kStartPositionShift)
3529 | (start_position_and_type() & ~kStartPositionMask));
3533 Code* SharedFunctionInfo::code() {
3534 return Code::cast(READ_FIELD(this, kCodeOffset));
3538 Code* SharedFunctionInfo::unchecked_code() {
3539 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3543 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3544 WRITE_FIELD(this, kCodeOffset, value);
3545 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3549 SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3550 return reinterpret_cast<SerializedScopeInfo*>(
3551 READ_FIELD(this, kScopeInfoOffset));
3555 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3556 WriteBarrierMode mode) {
3557 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3558 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3561 reinterpret_cast<Object*>(value),
3566 Smi* SharedFunctionInfo::deopt_counter() {
3567 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3571 void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3572 WRITE_FIELD(this, kDeoptCounterOffset, value);
3576 bool SharedFunctionInfo::is_compiled() {
3578 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3582 bool SharedFunctionInfo::IsApiFunction() {
3583 return function_data()->IsFunctionTemplateInfo();
3587 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3588 ASSERT(IsApiFunction());
3589 return FunctionTemplateInfo::cast(function_data());
3593 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3594 return function_data()->IsSmi();
3598 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3599 ASSERT(HasBuiltinFunctionId());
3600 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3604 int SharedFunctionInfo::code_age() {
3605 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3609 void SharedFunctionInfo::set_code_age(int code_age) {
3610 set_compiler_hints(compiler_hints() |
3611 ((code_age & kCodeAgeMask) << kCodeAgeShift));
3615 bool SharedFunctionInfo::has_deoptimization_support() {
3616 Code* code = this->code();
3617 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3621 bool JSFunction::IsBuiltin() {
3622 return context()->global()->IsJSBuiltinsObject();
3626 bool JSFunction::NeedsArgumentsAdaption() {
3627 return shared()->formal_parameter_count() !=
3628 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3632 bool JSFunction::IsOptimized() {
3633 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3637 bool JSFunction::IsOptimizable() {
3638 return code()->kind() == Code::FUNCTION && code()->optimizable();
3642 bool JSFunction::IsMarkedForLazyRecompilation() {
3643 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3647 Code* JSFunction::code() {
3648 return Code::cast(unchecked_code());
3652 Code* JSFunction::unchecked_code() {
3653 return reinterpret_cast<Code*>(
3654 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3658 void JSFunction::set_code(Code* value) {
3659 ASSERT(!HEAP->InNewSpace(value));
3660 Address entry = value->entry();
3661 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3662 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3664 HeapObject::RawField(this, kCodeEntryOffset),
3669 void JSFunction::ReplaceCode(Code* code) {
3670 bool was_optimized = IsOptimized();
3671 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3675 // Add/remove the function from the list of optimized functions for this
3676 // context based on the state change.
3677 if (!was_optimized && is_optimized) {
3678 context()->global_context()->AddOptimizedFunction(this);
3680 if (was_optimized && !is_optimized) {
3681 context()->global_context()->RemoveOptimizedFunction(this);
3686 Context* JSFunction::context() {
3687 return Context::cast(READ_FIELD(this, kContextOffset));
3691 Object* JSFunction::unchecked_context() {
3692 return READ_FIELD(this, kContextOffset);
3696 SharedFunctionInfo* JSFunction::unchecked_shared() {
3697 return reinterpret_cast<SharedFunctionInfo*>(
3698 READ_FIELD(this, kSharedFunctionInfoOffset));
3702 void JSFunction::set_context(Object* value) {
3703 ASSERT(value->IsUndefined() || value->IsContext());
3704 WRITE_FIELD(this, kContextOffset, value);
3705 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3708 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3709 kPrototypeOrInitialMapOffset)
3712 Map* JSFunction::initial_map() {
3713 return Map::cast(prototype_or_initial_map());
3717 void JSFunction::set_initial_map(Map* value) {
3718 set_prototype_or_initial_map(value);
3722 bool JSFunction::has_initial_map() {
3723 return prototype_or_initial_map()->IsMap();
3727 bool JSFunction::has_instance_prototype() {
3728 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3732 bool JSFunction::has_prototype() {
3733 return map()->has_non_instance_prototype() || has_instance_prototype();
3737 Object* JSFunction::instance_prototype() {
3738 ASSERT(has_instance_prototype());
3739 if (has_initial_map()) return initial_map()->prototype();
3740 // When there is no initial map and the prototype is a JSObject, the
3741 // initial map field is used for the prototype field.
3742 return prototype_or_initial_map();
3746 Object* JSFunction::prototype() {
3747 ASSERT(has_prototype());
3748 // If the function's prototype property has been set to a non-JSObject
3749 // value, that value is stored in the constructor field of the map.
3750 if (map()->has_non_instance_prototype()) return map()->constructor();
3751 return instance_prototype();
3754 bool JSFunction::should_have_prototype() {
3755 return map()->function_with_prototype();
3759 bool JSFunction::is_compiled() {
3760 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3764 FixedArray* JSFunction::literals() {
3765 ASSERT(!shared()->bound());
3766 return literals_or_bindings();
3770 void JSFunction::set_literals(FixedArray* literals) {
3771 ASSERT(!shared()->bound());
3772 set_literals_or_bindings(literals);
3776 FixedArray* JSFunction::function_bindings() {
3777 ASSERT(shared()->bound());
3778 return literals_or_bindings();
3782 void JSFunction::set_function_bindings(FixedArray* bindings) {
3783 ASSERT(shared()->bound());
3784 // Bound function literal may be initialized to the empty fixed array
3785 // before the bindings are set.
3786 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
3787 bindings->map() == GetHeap()->fixed_cow_array_map());
3788 set_literals_or_bindings(bindings);
3792 int JSFunction::NumberOfLiterals() {
3793 ASSERT(!shared()->bound());
3794 return literals()->length();
3798 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3799 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3800 return READ_FIELD(this, OffsetOfFunctionWithId(id));
3804 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3806 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3807 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3808 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
3812 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3813 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3814 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3818 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3820 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3821 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3822 ASSERT(!HEAP->InNewSpace(value));
3826 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
3827 ACCESSORS(JSProxy, hash, Object, kHashOffset)
3828 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
3829 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
3832 void JSProxy::InitializeBody(int object_size, Object* value) {
3833 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
3834 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
3835 WRITE_FIELD(this, offset, value);
3840 ACCESSORS(JSSet, table, Object, kTableOffset)
3841 ACCESSORS(JSMap, table, Object, kTableOffset)
3842 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
3843 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
3846 ObjectHashTable* JSWeakMap::unchecked_table() {
3847 return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
3851 Address Foreign::address() {
3852 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kAddressOffset));
3856 void Foreign::set_address(Address value) {
3857 WRITE_INTPTR_FIELD(this, kAddressOffset, OffsetFrom(value));
3861 ACCESSORS(JSValue, value, Object, kValueOffset)
3864 JSValue* JSValue::cast(Object* obj) {
3865 ASSERT(obj->IsJSValue());
3866 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3867 return reinterpret_cast<JSValue*>(obj);
3871 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3872 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3873 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3874 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3875 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3876 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3877 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3880 JSMessageObject* JSMessageObject::cast(Object* obj) {
3881 ASSERT(obj->IsJSMessageObject());
3882 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3883 return reinterpret_cast<JSMessageObject*>(obj);
3887 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3888 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3889 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3890 ACCESSORS(Code, next_code_flushing_candidate,
3891 Object, kNextCodeFlushingCandidateOffset)
3894 byte* Code::instruction_start() {
3895 return FIELD_ADDR(this, kHeaderSize);
3899 byte* Code::instruction_end() {
3900 return instruction_start() + instruction_size();
3904 int Code::body_size() {
3905 return RoundUp(instruction_size(), kObjectAlignment);
3909 FixedArray* Code::unchecked_deoptimization_data() {
3910 return reinterpret_cast<FixedArray*>(
3911 READ_FIELD(this, kDeoptimizationDataOffset));
3915 ByteArray* Code::unchecked_relocation_info() {
3916 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
3920 byte* Code::relocation_start() {
3921 return unchecked_relocation_info()->GetDataStartAddress();
3925 int Code::relocation_size() {
3926 return unchecked_relocation_info()->length();
3930 byte* Code::entry() {
3931 return instruction_start();
3935 bool Code::contains(byte* inner_pointer) {
3936 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
3940 ACCESSORS(JSArray, length, Object, kLengthOffset)
3943 ACCESSORS(JSRegExp, data, Object, kDataOffset)
3946 JSRegExp::Type JSRegExp::TypeTag() {
3947 Object* data = this->data();
3948 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
3949 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
3950 return static_cast<JSRegExp::Type>(smi->value());
3954 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
3955 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
3956 return static_cast<JSRegExp::Type>(smi->value());
3960 int JSRegExp::CaptureCount() {
3961 switch (TypeTag()) {
3965 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
3973 JSRegExp::Flags JSRegExp::GetFlags() {
3974 ASSERT(this->data()->IsFixedArray());
3975 Object* data = this->data();
3976 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
3977 return Flags(smi->value());
3981 String* JSRegExp::Pattern() {
3982 ASSERT(this->data()->IsFixedArray());
3983 Object* data = this->data();
3984 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
3989 Object* JSRegExp::DataAt(int index) {
3990 ASSERT(TypeTag() != NOT_COMPILED);
3991 return FixedArray::cast(data())->get(index);
3995 Object* JSRegExp::DataAtUnchecked(int index) {
3996 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
3997 int offset = FixedArray::kHeaderSize + index * kPointerSize;
3998 return READ_FIELD(fa, offset);
4002 void JSRegExp::SetDataAt(int index, Object* value) {
4003 ASSERT(TypeTag() != NOT_COMPILED);
4004 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4005 FixedArray::cast(data())->set(index, value);
4009 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4010 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4011 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4012 if (value->IsSmi()) {
4013 fa->set_unchecked(index, Smi::cast(value));
4015 // We only do this during GC, so we don't need to notify the write barrier.
4016 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4021 ElementsKind JSObject::GetElementsKind() {
4022 ElementsKind kind = map()->elements_kind();
4024 FixedArrayBase* fixed_array =
4025 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4026 Map* map = fixed_array->map();
4027 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4028 (map == GetHeap()->fixed_array_map() ||
4029 map == GetHeap()->fixed_cow_array_map())) ||
4030 (kind == FAST_DOUBLE_ELEMENTS &&
4031 fixed_array->IsFixedDoubleArray()) ||
4032 (kind == DICTIONARY_ELEMENTS &&
4033 fixed_array->IsFixedArray() &&
4034 fixed_array->IsDictionary()) ||
4035 (kind > DICTIONARY_ELEMENTS));
4036 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4037 (elements()->IsFixedArray() && elements()->length() >= 2));
4043 ElementsAccessor* JSObject::GetElementsAccessor() {
4044 return ElementsAccessor::ForKind(GetElementsKind());
4048 bool JSObject::HasFastElements() {
4049 return GetElementsKind() == FAST_ELEMENTS;
4053 bool JSObject::HasFastSmiOnlyElements() {
4054 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4058 bool JSObject::HasFastTypeElements() {
4059 ElementsKind elements_kind = GetElementsKind();
4060 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4061 elements_kind == FAST_ELEMENTS;
4065 bool JSObject::HasFastDoubleElements() {
4066 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4070 bool JSObject::HasDictionaryElements() {
4071 return GetElementsKind() == DICTIONARY_ELEMENTS;
4075 bool JSObject::HasNonStrictArgumentsElements() {
4076 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4080 bool JSObject::HasExternalArrayElements() {
4081 HeapObject* array = elements();
4082 ASSERT(array != NULL);
4083 return array->IsExternalArray();
4087 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4088 bool JSObject::HasExternal##name##Elements() { \
4089 HeapObject* array = elements(); \
4090 ASSERT(array != NULL); \
4091 if (!array->IsHeapObject()) \
4093 return array->map()->instance_type() == type; \
4097 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4098 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4099 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4100 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4101 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4102 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4103 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4104 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4105 EXTERNAL_ELEMENTS_CHECK(Float,
4106 EXTERNAL_FLOAT_ARRAY_TYPE)
4107 EXTERNAL_ELEMENTS_CHECK(Double,
4108 EXTERNAL_DOUBLE_ARRAY_TYPE)
4109 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4112 bool JSObject::HasNamedInterceptor() {
4113 return map()->has_named_interceptor();
4117 bool JSObject::HasIndexedInterceptor() {
4118 return map()->has_indexed_interceptor();
4122 bool JSObject::AllowsSetElementsLength() {
4123 bool result = elements()->IsFixedArray() ||
4124 elements()->IsFixedDoubleArray();
4125 ASSERT(result == !HasExternalArrayElements());
4130 MaybeObject* JSObject::EnsureWritableFastElements() {
4131 ASSERT(HasFastTypeElements());
4132 FixedArray* elems = FixedArray::cast(elements());
4133 Isolate* isolate = GetIsolate();
4134 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4135 Object* writable_elems;
4136 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4137 elems, isolate->heap()->fixed_array_map());
4138 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4139 return maybe_writable_elems;
4142 set_elements(FixedArray::cast(writable_elems));
4143 isolate->counters()->cow_arrays_converted()->Increment();
4144 return writable_elems;
4148 StringDictionary* JSObject::property_dictionary() {
4149 ASSERT(!HasFastProperties());
4150 return StringDictionary::cast(properties());
4154 NumberDictionary* JSObject::element_dictionary() {
4155 ASSERT(HasDictionaryElements());
4156 return NumberDictionary::cast(elements());
4160 bool String::IsHashFieldComputed(uint32_t field) {
4161 return (field & kHashNotComputedMask) == 0;
4165 bool String::HasHashCode() {
4166 return IsHashFieldComputed(hash_field());
4170 uint32_t String::Hash() {
4171 // Fast case: has hash code already been computed?
4172 uint32_t field = hash_field();
4173 if (IsHashFieldComputed(field)) return field >> kHashShift;
4174 // Slow case: compute hash code and set it.
4175 return ComputeAndSetHash();
4179 StringHasher::StringHasher(int length)
4181 raw_running_hash_(0),
4183 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4184 is_first_char_(true),
4188 bool StringHasher::has_trivial_hash() {
4189 return length_ > String::kMaxHashCalcLength;
4193 void StringHasher::AddCharacter(uc32 c) {
4194 // Use the Jenkins one-at-a-time hash function to update the hash
4195 // for the given character.
4196 raw_running_hash_ += c;
4197 raw_running_hash_ += (raw_running_hash_ << 10);
4198 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4199 // Incremental array index computation.
4200 if (is_array_index_) {
4201 if (c < '0' || c > '9') {
4202 is_array_index_ = false;
4205 if (is_first_char_) {
4206 is_first_char_ = false;
4207 if (c == '0' && length_ > 1) {
4208 is_array_index_ = false;
4212 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4213 is_array_index_ = false;
4215 array_index_ = array_index_ * 10 + d;
4222 void StringHasher::AddCharacterNoIndex(uc32 c) {
4223 ASSERT(!is_array_index());
4224 raw_running_hash_ += c;
4225 raw_running_hash_ += (raw_running_hash_ << 10);
4226 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4230 uint32_t StringHasher::GetHash() {
4231 // Get the calculated raw hash value and do some more bit ops to distribute
4232 // the hash further. Ensure that we never return zero as the hash value.
4233 uint32_t result = raw_running_hash_;
4234 result += (result << 3);
4235 result ^= (result >> 11);
4236 result += (result << 15);
4244 template <typename schar>
4245 uint32_t HashSequentialString(const schar* chars, int length) {
4246 StringHasher hasher(length);
4247 if (!hasher.has_trivial_hash()) {
4249 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4250 hasher.AddCharacter(chars[i]);
4252 for (; i < length; i++) {
4253 hasher.AddCharacterNoIndex(chars[i]);
4256 return hasher.GetHashField();
4260 bool String::AsArrayIndex(uint32_t* index) {
4261 uint32_t field = hash_field();
4262 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4265 return SlowAsArrayIndex(index);
4269 Object* JSReceiver::GetPrototype() {
4270 return HeapObject::cast(this)->map()->prototype();
4274 bool JSReceiver::HasProperty(String* name) {
4276 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4278 return GetPropertyAttribute(name) != ABSENT;
4282 bool JSReceiver::HasLocalProperty(String* name) {
4284 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4286 return GetLocalPropertyAttribute(name) != ABSENT;
4290 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4291 return GetPropertyAttributeWithReceiver(this, key);
4294 // TODO(504): this may be useful in other places too where JSGlobalProxy
4296 Object* JSObject::BypassGlobalProxy() {
4297 if (IsJSGlobalProxy()) {
4298 Object* proto = GetPrototype();
4299 if (proto->IsNull()) return GetHeap()->undefined_value();
4300 ASSERT(proto->IsJSGlobalObject());
4307 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4309 ? JSProxy::cast(this)->GetIdentityHash(flag)
4310 : JSObject::cast(this)->GetIdentityHash(flag);
4314 bool JSReceiver::HasElement(uint32_t index) {
4316 return JSProxy::cast(this)->HasElementWithHandler(index);
4318 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4322 bool AccessorInfo::all_can_read() {
4323 return BooleanBit::get(flag(), kAllCanReadBit);
4327 void AccessorInfo::set_all_can_read(bool value) {
4328 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4332 bool AccessorInfo::all_can_write() {
4333 return BooleanBit::get(flag(), kAllCanWriteBit);
4337 void AccessorInfo::set_all_can_write(bool value) {
4338 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4342 bool AccessorInfo::prohibits_overwriting() {
4343 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4347 void AccessorInfo::set_prohibits_overwriting(bool value) {
4348 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4352 PropertyAttributes AccessorInfo::property_attributes() {
4353 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4357 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4358 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4362 template<typename Shape, typename Key>
4363 void Dictionary<Shape, Key>::SetEntry(int entry,
4366 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4370 template<typename Shape, typename Key>
4371 void Dictionary<Shape, Key>::SetEntry(int entry,
4374 PropertyDetails details) {
4375 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4376 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4377 AssertNoAllocation no_gc;
4378 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4379 FixedArray::set(index, key, mode);
4380 FixedArray::set(index+1, value, mode);
4381 FixedArray::set(index+2, details.AsSmi());
4385 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4386 ASSERT(other->IsNumber());
4387 return key == static_cast<uint32_t>(other->Number());
4391 uint32_t NumberDictionaryShape::Hash(uint32_t key) {
4392 return ComputeIntegerHash(key);
4396 uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
4397 ASSERT(other->IsNumber());
4398 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
4402 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4403 return Isolate::Current()->heap()->NumberFromUint32(key);
4407 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4408 // We know that all entries in a hash table had their hash keys created.
4409 // Use that knowledge to have fast failure.
4410 if (key->Hash() != String::cast(other)->Hash()) return false;
4411 return key->Equals(String::cast(other));
4415 uint32_t StringDictionaryShape::Hash(String* key) {
4420 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4421 return String::cast(other)->Hash();
4425 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4430 template <int entrysize>
4431 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4432 return key->SameValue(other);
4436 template <int entrysize>
4437 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4438 ASSERT(!key->IsUndefined() && !key->IsNull());
4439 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4440 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4444 template <int entrysize>
4445 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4447 ASSERT(!other->IsUndefined() && !other->IsNull());
4448 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4449 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4453 template <int entrysize>
4454 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4459 void ObjectHashTable::RemoveEntry(int entry) {
4460 RemoveEntry(entry, GetHeap());
4464 void Map::ClearCodeCache(Heap* heap) {
4465 // No write barrier is needed since empty_fixed_array is not in new space.
4466 // Please note this function is used during marking:
4467 // - MarkCompactCollector::MarkUnmarkedObject
4468 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4469 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4473 void JSArray::EnsureSize(int required_size) {
4474 ASSERT(HasFastTypeElements());
4475 FixedArray* elts = FixedArray::cast(elements());
4476 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4477 if (elts->length() < required_size) {
4478 // Doubling in size would be overkill, but leave some slack to avoid
4479 // constantly growing.
4480 Expand(required_size + (required_size >> 3));
4481 // It's a performance benefit to keep a frequently used array in new-space.
4482 } else if (!GetHeap()->new_space()->Contains(elts) &&
4483 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4484 // Expand will allocate a new backing store in new space even if the size
4485 // we asked for isn't larger than what we had before.
4486 Expand(required_size);
4491 void JSArray::set_length(Smi* length) {
4492 // Don't need a write barrier for a Smi.
4493 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4497 MaybeObject* JSArray::SetContent(FixedArray* storage) {
4498 MaybeObject* maybe_object = EnsureCanContainElements(storage);
4499 if (maybe_object->IsFailure()) return maybe_object;
4500 set_length(Smi::FromInt(storage->length()));
4501 set_elements(storage);
4506 MaybeObject* FixedArray::Copy() {
4507 if (length() == 0) return this;
4508 return GetHeap()->CopyFixedArray(this);
4512 MaybeObject* FixedDoubleArray::Copy() {
4513 if (length() == 0) return this;
4514 return GetHeap()->CopyFixedDoubleArray(this);
4518 Relocatable::Relocatable(Isolate* isolate) {
4519 ASSERT(isolate == Isolate::Current());
4521 prev_ = isolate->relocatable_top();
4522 isolate->set_relocatable_top(this);
4526 Relocatable::~Relocatable() {
4527 ASSERT(isolate_ == Isolate::Current());
4528 ASSERT_EQ(isolate_->relocatable_top(), this);
4529 isolate_->set_relocatable_top(prev_);
4533 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4534 return map->instance_size();
4538 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4539 v->VisitExternalReference(
4540 reinterpret_cast<Address *>(FIELD_ADDR(this, kAddressOffset)));
4544 template<typename StaticVisitor>
4545 void Foreign::ForeignIterateBody() {
4546 StaticVisitor::VisitExternalReference(
4547 reinterpret_cast<Address *>(FIELD_ADDR(this, kAddressOffset)));
4551 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4552 typedef v8::String::ExternalAsciiStringResource Resource;
4553 v->VisitExternalAsciiString(
4554 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4558 template<typename StaticVisitor>
4559 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4560 typedef v8::String::ExternalAsciiStringResource Resource;
4561 StaticVisitor::VisitExternalAsciiString(
4562 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4566 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4567 typedef v8::String::ExternalStringResource Resource;
4568 v->VisitExternalTwoByteString(
4569 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4573 template<typename StaticVisitor>
4574 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4575 typedef v8::String::ExternalStringResource Resource;
4576 StaticVisitor::VisitExternalTwoByteString(
4577 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4580 #define SLOT_ADDR(obj, offset) \
4581 reinterpret_cast<Object**>((obj)->address() + offset)
4583 template<int start_offset, int end_offset, int size>
4584 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4587 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4591 template<int start_offset>
4592 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4595 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4601 #undef CAST_ACCESSOR
4602 #undef INT_ACCESSORS
4603 #undef SMI_ACCESSORS
4608 #undef WRITE_BARRIER
4609 #undef CONDITIONAL_WRITE_BARRIER
4610 #undef READ_MEMADDR_FIELD
4611 #undef WRITE_MEMADDR_FIELD
4612 #undef READ_DOUBLE_FIELD
4613 #undef WRITE_DOUBLE_FIELD
4614 #undef READ_INT_FIELD
4615 #undef WRITE_INT_FIELD
4616 #undef READ_SHORT_FIELD
4617 #undef WRITE_SHORT_FIELD
4618 #undef READ_BYTE_FIELD
4619 #undef WRITE_BYTE_FIELD
4622 } } // namespace v8::internal
4624 #endif // V8_OBJECTS_INL_H_