1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 #define SMI_ACCESSORS(holder, name, offset) \
98 int holder::name() { \
99 Object* value = READ_FIELD(this, offset); \
100 return Smi::cast(value)->value(); \
102 void holder::set_##name(int value) { \
103 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107 #define BOOL_GETTER(holder, field, name, offset) \
108 bool holder::name() { \
109 return BooleanBit::get(field(), offset); \
113 #define BOOL_ACCESSORS(holder, field, name, offset) \
114 bool holder::name() { \
115 return BooleanBit::get(field(), offset); \
117 void holder::set_##name(bool value) { \
118 set_##field(BooleanBit::set(field(), offset, value)); \
122 bool Object::IsFixedArrayBase() {
123 return IsFixedArray() || IsFixedDoubleArray();
127 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
128 // There is a constraint on the object; check.
129 if (!this->IsJSObject()) return false;
130 // Fetch the constructor function of the object.
131 Object* cons_obj = JSObject::cast(this)->map()->constructor();
132 if (!cons_obj->IsJSFunction()) return false;
133 JSFunction* fun = JSFunction::cast(cons_obj);
134 // Iterate through the chain of inheriting function templates to
135 // see if the required one occurs.
136 for (Object* type = fun->shared()->function_data();
137 type->IsFunctionTemplateInfo();
138 type = FunctionTemplateInfo::cast(type)->parent_template()) {
139 if (type == expected) return true;
141 // Didn't find the required type in the inheritance chain.
146 bool Object::IsSmi() {
147 return HAS_SMI_TAG(this);
151 bool Object::IsHeapObject() {
152 return Internals::HasHeapObjectTag(this);
156 bool Object::NonFailureIsHeapObject() {
157 ASSERT(!this->IsFailure());
158 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
162 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
165 bool Object::IsString() {
166 return Object::IsHeapObject()
167 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
171 bool Object::IsSpecObject() {
172 return Object::IsHeapObject()
173 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
177 bool Object::IsSpecFunction() {
178 if (!Object::IsHeapObject()) return false;
179 InstanceType type = HeapObject::cast(this)->map()->instance_type();
180 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
184 bool Object::IsSymbol() {
185 if (!this->IsHeapObject()) return false;
186 uint32_t type = HeapObject::cast(this)->map()->instance_type();
187 // Because the symbol tag is non-zero and no non-string types have the
188 // symbol bit set we can test for symbols with a very simple test
190 STATIC_ASSERT(kSymbolTag != 0);
191 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
192 return (type & kIsSymbolMask) != 0;
196 bool Object::IsConsString() {
197 if (!IsString()) return false;
198 return StringShape(String::cast(this)).IsCons();
202 bool Object::IsSlicedString() {
203 if (!IsString()) return false;
204 return StringShape(String::cast(this)).IsSliced();
208 bool Object::IsSeqString() {
209 if (!IsString()) return false;
210 return StringShape(String::cast(this)).IsSequential();
214 bool Object::IsSeqAsciiString() {
215 if (!IsString()) return false;
216 return StringShape(String::cast(this)).IsSequential() &&
217 String::cast(this)->IsAsciiRepresentation();
221 bool Object::IsSeqTwoByteString() {
222 if (!IsString()) return false;
223 return StringShape(String::cast(this)).IsSequential() &&
224 String::cast(this)->IsTwoByteRepresentation();
228 bool Object::IsExternalString() {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsExternal();
234 bool Object::IsExternalAsciiString() {
235 if (!IsString()) return false;
236 return StringShape(String::cast(this)).IsExternal() &&
237 String::cast(this)->IsAsciiRepresentation();
241 bool Object::IsExternalTwoByteString() {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsExternal() &&
244 String::cast(this)->IsTwoByteRepresentation();
247 bool Object::HasValidElements() {
248 // Dictionary is covered under FixedArray.
249 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
252 StringShape::StringShape(String* str)
253 : type_(str->map()->instance_type()) {
255 ASSERT((type_ & kIsNotStringMask) == kStringTag);
259 StringShape::StringShape(Map* map)
260 : type_(map->instance_type()) {
262 ASSERT((type_ & kIsNotStringMask) == kStringTag);
266 StringShape::StringShape(InstanceType t)
267 : type_(static_cast<uint32_t>(t)) {
269 ASSERT((type_ & kIsNotStringMask) == kStringTag);
273 bool StringShape::IsSymbol() {
275 STATIC_ASSERT(kSymbolTag != 0);
276 return (type_ & kIsSymbolMask) != 0;
280 bool String::IsAsciiRepresentation() {
281 uint32_t type = map()->instance_type();
282 return (type & kStringEncodingMask) == kAsciiStringTag;
286 bool String::IsTwoByteRepresentation() {
287 uint32_t type = map()->instance_type();
288 return (type & kStringEncodingMask) == kTwoByteStringTag;
292 bool String::IsAsciiRepresentationUnderneath() {
293 uint32_t type = map()->instance_type();
294 STATIC_ASSERT(kIsIndirectStringTag != 0);
295 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
297 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
298 case kAsciiStringTag:
300 case kTwoByteStringTag:
302 default: // Cons or sliced string. Need to go deeper.
303 return GetUnderlying()->IsAsciiRepresentation();
308 bool String::IsTwoByteRepresentationUnderneath() {
309 uint32_t type = map()->instance_type();
310 STATIC_ASSERT(kIsIndirectStringTag != 0);
311 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
313 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
314 case kAsciiStringTag:
316 case kTwoByteStringTag:
318 default: // Cons or sliced string. Need to go deeper.
319 return GetUnderlying()->IsTwoByteRepresentation();
324 bool String::HasOnlyAsciiChars() {
325 uint32_t type = map()->instance_type();
326 return (type & kStringEncodingMask) == kAsciiStringTag ||
327 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
331 bool StringShape::IsCons() {
332 return (type_ & kStringRepresentationMask) == kConsStringTag;
336 bool StringShape::IsSliced() {
337 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
341 bool StringShape::IsIndirect() {
342 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
346 bool StringShape::IsExternal() {
347 return (type_ & kStringRepresentationMask) == kExternalStringTag;
351 bool StringShape::IsSequential() {
352 return (type_ & kStringRepresentationMask) == kSeqStringTag;
356 StringRepresentationTag StringShape::representation_tag() {
357 uint32_t tag = (type_ & kStringRepresentationMask);
358 return static_cast<StringRepresentationTag>(tag);
362 uint32_t StringShape::encoding_tag() {
363 return type_ & kStringEncodingMask;
367 uint32_t StringShape::full_representation_tag() {
368 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
372 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
373 Internals::kFullStringRepresentationMask);
376 bool StringShape::IsSequentialAscii() {
377 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
381 bool StringShape::IsSequentialTwoByte() {
382 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
386 bool StringShape::IsExternalAscii() {
387 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
391 bool StringShape::IsExternalTwoByte() {
392 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
396 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
397 Internals::kExternalTwoByteRepresentationTag);
400 uc32 FlatStringReader::Get(int index) {
401 ASSERT(0 <= index && index <= length_);
403 return static_cast<const byte*>(start_)[index];
405 return static_cast<const uc16*>(start_)[index];
410 bool Object::IsNumber() {
411 return IsSmi() || IsHeapNumber();
415 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
416 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
419 bool Object::IsFiller() {
420 if (!Object::IsHeapObject()) return false;
421 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
422 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
426 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
429 bool Object::IsExternalArray() {
430 if (!Object::IsHeapObject())
432 InstanceType instance_type =
433 HeapObject::cast(this)->map()->instance_type();
434 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
435 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
439 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
440 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
441 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
442 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
443 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
444 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
445 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
446 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
449 bool MaybeObject::IsFailure() {
450 return HAS_FAILURE_TAG(this);
454 bool MaybeObject::IsRetryAfterGC() {
455 return HAS_FAILURE_TAG(this)
456 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
460 bool MaybeObject::IsOutOfMemory() {
461 return HAS_FAILURE_TAG(this)
462 && Failure::cast(this)->IsOutOfMemoryException();
466 bool MaybeObject::IsException() {
467 return this == Failure::Exception();
471 bool MaybeObject::IsTheHole() {
472 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
476 Failure* Failure::cast(MaybeObject* obj) {
477 ASSERT(HAS_FAILURE_TAG(obj));
478 return reinterpret_cast<Failure*>(obj);
482 bool Object::IsJSReceiver() {
483 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
484 return IsHeapObject() &&
485 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
489 bool Object::IsJSObject() {
490 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
491 return IsHeapObject() &&
492 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
496 bool Object::IsJSProxy() {
497 if (!Object::IsHeapObject()) return false;
498 InstanceType type = HeapObject::cast(this)->map()->instance_type();
499 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
503 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
504 TYPE_CHECKER(JSSet, JS_SET_TYPE)
505 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
506 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
507 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
508 TYPE_CHECKER(Map, MAP_TYPE)
509 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
510 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
513 bool Object::IsDescriptorArray() {
514 return IsFixedArray();
518 bool Object::IsDeoptimizationInputData() {
519 // Must be a fixed array.
520 if (!IsFixedArray()) return false;
522 // There's no sure way to detect the difference between a fixed array and
523 // a deoptimization data array. Since this is used for asserts we can
524 // check that the length is zero or else the fixed size plus a multiple of
526 int length = FixedArray::cast(this)->length();
527 if (length == 0) return true;
529 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
530 return length >= 0 &&
531 length % DeoptimizationInputData::kDeoptEntrySize == 0;
535 bool Object::IsDeoptimizationOutputData() {
536 if (!IsFixedArray()) return false;
537 // There's actually no way to see the difference between a fixed array and
538 // a deoptimization data array. Since this is used for asserts we can check
539 // that the length is plausible though.
540 if (FixedArray::cast(this)->length() % 2 != 0) return false;
545 bool Object::IsContext() {
546 if (Object::IsHeapObject()) {
547 Map* map = HeapObject::cast(this)->map();
548 Heap* heap = map->GetHeap();
549 return (map == heap->function_context_map() ||
550 map == heap->catch_context_map() ||
551 map == heap->with_context_map() ||
552 map == heap->global_context_map() ||
553 map == heap->block_context_map());
559 bool Object::IsGlobalContext() {
560 return Object::IsHeapObject() &&
561 HeapObject::cast(this)->map() ==
562 HeapObject::cast(this)->GetHeap()->global_context_map();
566 bool Object::IsSerializedScopeInfo() {
567 return Object::IsHeapObject() &&
568 HeapObject::cast(this)->map() ==
569 HeapObject::cast(this)->GetHeap()->serialized_scope_info_map();
573 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
576 template <> inline bool Is<JSFunction>(Object* obj) {
577 return obj->IsJSFunction();
581 TYPE_CHECKER(Code, CODE_TYPE)
582 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
583 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
584 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
585 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
586 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
589 bool Object::IsStringWrapper() {
590 return IsJSValue() && JSValue::cast(this)->value()->IsString();
594 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
597 bool Object::IsBoolean() {
598 return IsOddball() &&
599 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
603 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
604 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
607 template <> inline bool Is<JSArray>(Object* obj) {
608 return obj->IsJSArray();
612 bool Object::IsHashTable() {
613 return Object::IsHeapObject() &&
614 HeapObject::cast(this)->map() ==
615 HeapObject::cast(this)->GetHeap()->hash_table_map();
619 bool Object::IsDictionary() {
620 return IsHashTable() &&
621 this != HeapObject::cast(this)->GetHeap()->symbol_table();
625 bool Object::IsSymbolTable() {
626 return IsHashTable() && this ==
627 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
631 bool Object::IsJSFunctionResultCache() {
632 if (!IsFixedArray()) return false;
633 FixedArray* self = FixedArray::cast(this);
634 int length = self->length();
635 if (length < JSFunctionResultCache::kEntriesIndex) return false;
636 if ((length - JSFunctionResultCache::kEntriesIndex)
637 % JSFunctionResultCache::kEntrySize != 0) {
641 if (FLAG_verify_heap) {
642 reinterpret_cast<JSFunctionResultCache*>(this)->
643 JSFunctionResultCacheVerify();
650 bool Object::IsNormalizedMapCache() {
651 if (!IsFixedArray()) return false;
652 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
656 if (FLAG_verify_heap) {
657 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
664 bool Object::IsCompilationCacheTable() {
665 return IsHashTable();
669 bool Object::IsCodeCacheHashTable() {
670 return IsHashTable();
674 bool Object::IsPolymorphicCodeCacheHashTable() {
675 return IsHashTable();
679 bool Object::IsMapCache() {
680 return IsHashTable();
684 bool Object::IsPrimitive() {
685 return IsOddball() || IsNumber() || IsString();
689 bool Object::IsJSGlobalProxy() {
690 bool result = IsHeapObject() &&
691 (HeapObject::cast(this)->map()->instance_type() ==
692 JS_GLOBAL_PROXY_TYPE);
693 ASSERT(!result || IsAccessCheckNeeded());
698 bool Object::IsGlobalObject() {
699 if (!IsHeapObject()) return false;
701 InstanceType type = HeapObject::cast(this)->map()->instance_type();
702 return type == JS_GLOBAL_OBJECT_TYPE ||
703 type == JS_BUILTINS_OBJECT_TYPE;
707 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
708 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
711 bool Object::IsUndetectableObject() {
712 return IsHeapObject()
713 && HeapObject::cast(this)->map()->is_undetectable();
717 bool Object::IsAccessCheckNeeded() {
718 return IsHeapObject()
719 && HeapObject::cast(this)->map()->is_access_check_needed();
723 bool Object::IsStruct() {
724 if (!IsHeapObject()) return false;
725 switch (HeapObject::cast(this)->map()->instance_type()) {
726 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
727 STRUCT_LIST(MAKE_STRUCT_CASE)
728 #undef MAKE_STRUCT_CASE
729 default: return false;
734 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
735 bool Object::Is##Name() { \
736 return Object::IsHeapObject() \
737 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
739 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
740 #undef MAKE_STRUCT_PREDICATE
743 bool Object::IsUndefined() {
744 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
748 bool Object::IsNull() {
749 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
753 bool Object::IsTheHole() {
754 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
758 bool Object::IsTrue() {
759 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
763 bool Object::IsFalse() {
764 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
768 bool Object::IsArgumentsMarker() {
769 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
773 double Object::Number() {
776 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
777 : reinterpret_cast<HeapNumber*>(this)->value();
781 MaybeObject* Object::ToSmi() {
782 if (IsSmi()) return this;
783 if (IsHeapNumber()) {
784 double value = HeapNumber::cast(this)->value();
785 int int_value = FastD2I(value);
786 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
787 return Smi::FromInt(int_value);
790 return Failure::Exception();
794 bool Object::HasSpecificClassOf(String* name) {
795 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
799 MaybeObject* Object::GetElement(uint32_t index) {
800 // GetElement can trigger a getter which can cause allocation.
801 // This was not always the case. This ASSERT is here to catch
802 // leftover incorrect uses.
803 ASSERT(HEAP->IsAllocationAllowed());
804 return GetElementWithReceiver(this, index);
808 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
809 MaybeObject* maybe = GetElementWithReceiver(this, index);
810 ASSERT(!maybe->IsFailure());
811 Object* result = NULL; // Initialization to please compiler.
812 maybe->ToObject(&result);
817 MaybeObject* Object::GetProperty(String* key) {
818 PropertyAttributes attributes;
819 return GetPropertyWithReceiver(this, key, &attributes);
823 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
824 return GetPropertyWithReceiver(this, key, attributes);
828 #define FIELD_ADDR(p, offset) \
829 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
831 #define READ_FIELD(p, offset) \
832 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
834 #define WRITE_FIELD(p, offset, value) \
835 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
837 #define WRITE_BARRIER(heap, object, offset, value) \
838 heap->incremental_marking()->RecordWrite( \
839 object, HeapObject::RawField(object, offset), value); \
840 if (heap->InNewSpace(value)) { \
841 heap->RecordWrite(object->address(), offset); \
844 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
845 if (mode == UPDATE_WRITE_BARRIER) { \
846 heap->incremental_marking()->RecordWrite( \
847 object, HeapObject::RawField(object, offset), value); \
848 if (heap->InNewSpace(value)) { \
849 heap->RecordWrite(object->address(), offset); \
853 #ifndef V8_TARGET_ARCH_MIPS
854 #define READ_DOUBLE_FIELD(p, offset) \
855 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
856 #else // V8_TARGET_ARCH_MIPS
857 // Prevent gcc from using load-double (mips ldc1) on (possibly)
858 // non-64-bit aligned HeapNumber::value.
859 static inline double read_double_field(void* p, int offset) {
864 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
865 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
868 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
869 #endif // V8_TARGET_ARCH_MIPS
871 #ifndef V8_TARGET_ARCH_MIPS
872 #define WRITE_DOUBLE_FIELD(p, offset, value) \
873 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
874 #else // V8_TARGET_ARCH_MIPS
875 // Prevent gcc from using store-double (mips sdc1) on (possibly)
876 // non-64-bit aligned HeapNumber::value.
877 static inline void write_double_field(void* p, int offset,
884 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
885 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
887 #define WRITE_DOUBLE_FIELD(p, offset, value) \
888 write_double_field(p, offset, value)
889 #endif // V8_TARGET_ARCH_MIPS
892 #define READ_INT_FIELD(p, offset) \
893 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
895 #define WRITE_INT_FIELD(p, offset, value) \
896 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
898 #define READ_INTPTR_FIELD(p, offset) \
899 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
901 #define WRITE_INTPTR_FIELD(p, offset, value) \
902 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
904 #define READ_UINT32_FIELD(p, offset) \
905 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
907 #define WRITE_UINT32_FIELD(p, offset, value) \
908 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
910 #define READ_SHORT_FIELD(p, offset) \
911 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
913 #define WRITE_SHORT_FIELD(p, offset, value) \
914 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
916 #define READ_BYTE_FIELD(p, offset) \
917 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
919 #define WRITE_BYTE_FIELD(p, offset, value) \
920 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
923 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
924 return &READ_FIELD(obj, byte_offset);
929 return Internals::SmiValue(this);
933 Smi* Smi::FromInt(int value) {
934 ASSERT(Smi::IsValid(value));
935 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
936 intptr_t tagged_value =
937 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
938 return reinterpret_cast<Smi*>(tagged_value);
942 Smi* Smi::FromIntptr(intptr_t value) {
943 ASSERT(Smi::IsValid(value));
944 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
945 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
949 Failure::Type Failure::type() const {
950 return static_cast<Type>(value() & kFailureTypeTagMask);
954 bool Failure::IsInternalError() const {
955 return type() == INTERNAL_ERROR;
959 bool Failure::IsOutOfMemoryException() const {
960 return type() == OUT_OF_MEMORY_EXCEPTION;
964 AllocationSpace Failure::allocation_space() const {
965 ASSERT_EQ(RETRY_AFTER_GC, type());
966 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
971 Failure* Failure::InternalError() {
972 return Construct(INTERNAL_ERROR);
976 Failure* Failure::Exception() {
977 return Construct(EXCEPTION);
981 Failure* Failure::OutOfMemoryException() {
982 return Construct(OUT_OF_MEMORY_EXCEPTION);
986 intptr_t Failure::value() const {
987 return static_cast<intptr_t>(
988 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
992 Failure* Failure::RetryAfterGC() {
993 return RetryAfterGC(NEW_SPACE);
997 Failure* Failure::RetryAfterGC(AllocationSpace space) {
998 ASSERT((space & ~kSpaceTagMask) == 0);
999 return Construct(RETRY_AFTER_GC, space);
1003 Failure* Failure::Construct(Type type, intptr_t value) {
1005 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1006 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1007 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1011 bool Smi::IsValid(intptr_t value) {
1013 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1016 #ifdef V8_TARGET_ARCH_X64
1017 // To be representable as a long smi, the value must be a 32-bit integer.
1018 bool result = (value == static_cast<int32_t>(value));
1020 // To be representable as an tagged small integer, the two
1021 // most-significant bits of 'value' must be either 00 or 11 due to
1022 // sign-extension. To check this we add 01 to the two
1023 // most-significant bits, and check if the most-significant bit is 0
1025 // CAUTION: The original code below:
1026 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1027 // may lead to incorrect results according to the C language spec, and
1028 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1029 // compiler may produce undefined results in case of signed integer
1030 // overflow. The computation must be done w/ unsigned ints.
1031 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1033 ASSERT(result == in_range);
1038 MapWord MapWord::FromMap(Map* map) {
1039 return MapWord(reinterpret_cast<uintptr_t>(map));
1043 Map* MapWord::ToMap() {
1044 return reinterpret_cast<Map*>(value_);
1048 bool MapWord::IsForwardingAddress() {
1049 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1053 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1054 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1055 return MapWord(reinterpret_cast<uintptr_t>(raw));
1059 HeapObject* MapWord::ToForwardingAddress() {
1060 ASSERT(IsForwardingAddress());
1061 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1066 void HeapObject::VerifyObjectField(int offset) {
1067 VerifyPointer(READ_FIELD(this, offset));
1070 void HeapObject::VerifySmiField(int offset) {
1071 ASSERT(READ_FIELD(this, offset)->IsSmi());
1076 Heap* HeapObject::GetHeap() {
1078 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1079 ASSERT(heap != NULL);
1080 ASSERT(heap->isolate() == Isolate::Current());
1085 Isolate* HeapObject::GetIsolate() {
1086 return GetHeap()->isolate();
1090 Map* HeapObject::map() {
1091 return map_word().ToMap();
1095 void HeapObject::set_map(Map* value) {
1096 set_map_word(MapWord::FromMap(value));
1097 if (value != NULL) {
1098 // TODO(1600) We are passing NULL as a slot because maps can never be on
1099 // evacuation candidate.
1100 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1105 // Unsafe accessor omitting write barrier.
1106 void HeapObject::set_map_unsafe(Map* value) {
1107 set_map_word(MapWord::FromMap(value));
1111 MapWord HeapObject::map_word() {
1112 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1116 void HeapObject::set_map_word(MapWord map_word) {
1117 // WRITE_FIELD does not invoke write barrier, but there is no need
1119 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1123 HeapObject* HeapObject::FromAddress(Address address) {
1124 ASSERT_TAG_ALIGNED(address);
1125 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1129 Address HeapObject::address() {
1130 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1134 int HeapObject::Size() {
1135 return SizeFromMap(map());
1139 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1140 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1141 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1145 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1146 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1150 double HeapNumber::value() {
1151 return READ_DOUBLE_FIELD(this, kValueOffset);
1155 void HeapNumber::set_value(double value) {
1156 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1160 int HeapNumber::get_exponent() {
1161 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1162 kExponentShift) - kExponentBias;
1166 int HeapNumber::get_sign() {
1167 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1171 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1174 FixedArrayBase* JSObject::elements() {
1175 Object* array = READ_FIELD(this, kElementsOffset);
1176 return static_cast<FixedArrayBase*>(array);
1179 void JSObject::ValidateSmiOnlyElements() {
1181 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1182 Heap* heap = GetHeap();
1183 // Don't use elements, since integrity checks will fail if there
1184 // are filler pointers in the array.
1185 FixedArray* fixed_array =
1186 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1187 Map* map = fixed_array->map();
1188 // Arrays that have been shifted in place can't be verified.
1189 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1190 map != heap->raw_unchecked_two_pointer_filler_map() &&
1191 map != heap->free_space_map()) {
1192 for (int i = 0; i < fixed_array->length(); i++) {
1193 Object* current = fixed_array->get(i);
1194 ASSERT(current->IsSmi() || current == heap->the_hole_value());
1202 MaybeObject* JSObject::EnsureCanContainNonSmiElements() {
1204 ValidateSmiOnlyElements();
1206 if ((map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS)) {
1208 MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
1209 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1210 set_map(Map::cast(obj));
1216 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1218 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1219 for (uint32_t i = 0; i < count; ++i) {
1220 Object* current = *objects++;
1221 if (!current->IsSmi() && current != GetHeap()->the_hole_value()) {
1222 return EnsureCanContainNonSmiElements();
1230 MaybeObject* JSObject::EnsureCanContainElements(FixedArray* elements) {
1231 Object** objects = reinterpret_cast<Object**>(
1232 FIELD_ADDR(elements, elements->OffsetOfElementAt(0)));
1233 return EnsureCanContainElements(objects, elements->length());
1237 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1238 ASSERT((map()->has_fast_elements() ||
1239 map()->has_fast_smi_only_elements()) ==
1240 (value->map() == GetHeap()->fixed_array_map() ||
1241 value->map() == GetHeap()->fixed_cow_array_map()));
1242 ASSERT(map()->has_fast_double_elements() ==
1243 value->IsFixedDoubleArray());
1244 ASSERT(value->HasValidElements());
1246 ValidateSmiOnlyElements();
1248 WRITE_FIELD(this, kElementsOffset, value);
1249 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1253 void JSObject::initialize_properties() {
1254 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1255 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1259 void JSObject::initialize_elements() {
1260 ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
1261 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1262 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1266 MaybeObject* JSObject::ResetElements() {
1268 ElementsKind elements_kind = FLAG_smi_only_arrays
1269 ? FAST_SMI_ONLY_ELEMENTS
1271 MaybeObject* maybe_obj = GetElementsTransitionMap(elements_kind);
1272 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1273 set_map(Map::cast(obj));
1274 initialize_elements();
1279 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1280 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1283 byte Oddball::kind() {
1284 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1288 void Oddball::set_kind(byte value) {
1289 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1293 Object* JSGlobalPropertyCell::value() {
1294 return READ_FIELD(this, kValueOffset);
1298 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1299 // The write barrier is not used for global property cells.
1300 ASSERT(!val->IsJSGlobalPropertyCell());
1301 WRITE_FIELD(this, kValueOffset, val);
1302 GetHeap()->incremental_marking()->RecordWrite(
1303 this, HeapObject::RawField(this, kValueOffset), val);
1307 int JSObject::GetHeaderSize() {
1308 InstanceType type = map()->instance_type();
1309 // Check for the most common kind of JavaScript object before
1310 // falling into the generic switch. This speeds up the internal
1311 // field operations considerably on average.
1312 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1314 case JS_GLOBAL_PROXY_TYPE:
1315 return JSGlobalProxy::kSize;
1316 case JS_GLOBAL_OBJECT_TYPE:
1317 return JSGlobalObject::kSize;
1318 case JS_BUILTINS_OBJECT_TYPE:
1319 return JSBuiltinsObject::kSize;
1320 case JS_FUNCTION_TYPE:
1321 return JSFunction::kSize;
1323 return JSValue::kSize;
1325 return JSValue::kSize;
1326 case JS_WEAK_MAP_TYPE:
1327 return JSWeakMap::kSize;
1328 case JS_REGEXP_TYPE:
1329 return JSValue::kSize;
1330 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1331 return JSObject::kHeaderSize;
1332 case JS_MESSAGE_OBJECT_TYPE:
1333 return JSMessageObject::kSize;
1341 int JSObject::GetInternalFieldCount() {
1342 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1343 // Make sure to adjust for the number of in-object properties. These
1344 // properties do contribute to the size, but are not internal fields.
1345 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1346 map()->inobject_properties();
1350 int JSObject::GetInternalFieldOffset(int index) {
1351 ASSERT(index < GetInternalFieldCount() && index >= 0);
1352 return GetHeaderSize() + (kPointerSize * index);
1356 Object* JSObject::GetInternalField(int index) {
1357 ASSERT(index < GetInternalFieldCount() && index >= 0);
1358 // Internal objects do follow immediately after the header, whereas in-object
1359 // properties are at the end of the object. Therefore there is no need
1360 // to adjust the index here.
1361 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1365 void JSObject::SetInternalField(int index, Object* value) {
1366 ASSERT(index < GetInternalFieldCount() && index >= 0);
1367 // Internal objects do follow immediately after the header, whereas in-object
1368 // properties are at the end of the object. Therefore there is no need
1369 // to adjust the index here.
1370 int offset = GetHeaderSize() + (kPointerSize * index);
1371 WRITE_FIELD(this, offset, value);
1372 WRITE_BARRIER(GetHeap(), this, offset, value);
1376 // Access fast-case object properties at index. The use of these routines
1377 // is needed to correctly distinguish between properties stored in-object and
1378 // properties stored in the properties array.
1379 Object* JSObject::FastPropertyAt(int index) {
1380 // Adjust for the number of properties stored in the object.
1381 index -= map()->inobject_properties();
1383 int offset = map()->instance_size() + (index * kPointerSize);
1384 return READ_FIELD(this, offset);
1386 ASSERT(index < properties()->length());
1387 return properties()->get(index);
1392 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1393 // Adjust for the number of properties stored in the object.
1394 index -= map()->inobject_properties();
1396 int offset = map()->instance_size() + (index * kPointerSize);
1397 WRITE_FIELD(this, offset, value);
1398 WRITE_BARRIER(GetHeap(), this, offset, value);
1400 ASSERT(index < properties()->length());
1401 properties()->set(index, value);
1407 int JSObject::GetInObjectPropertyOffset(int index) {
1408 // Adjust for the number of properties stored in the object.
1409 index -= map()->inobject_properties();
1411 return map()->instance_size() + (index * kPointerSize);
1415 Object* JSObject::InObjectPropertyAt(int index) {
1416 // Adjust for the number of properties stored in the object.
1417 index -= map()->inobject_properties();
1419 int offset = map()->instance_size() + (index * kPointerSize);
1420 return READ_FIELD(this, offset);
1424 Object* JSObject::InObjectPropertyAtPut(int index,
1426 WriteBarrierMode mode) {
1427 // Adjust for the number of properties stored in the object.
1428 index -= map()->inobject_properties();
1430 int offset = map()->instance_size() + (index * kPointerSize);
1431 WRITE_FIELD(this, offset, value);
1432 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1438 void JSObject::InitializeBody(Map* map,
1439 Object* pre_allocated_value,
1440 Object* filler_value) {
1441 ASSERT(!filler_value->IsHeapObject() ||
1442 !GetHeap()->InNewSpace(filler_value));
1443 ASSERT(!pre_allocated_value->IsHeapObject() ||
1444 !GetHeap()->InNewSpace(pre_allocated_value));
1445 int size = map->instance_size();
1446 int offset = kHeaderSize;
1447 if (filler_value != pre_allocated_value) {
1448 int pre_allocated = map->pre_allocated_property_fields();
1449 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1450 for (int i = 0; i < pre_allocated; i++) {
1451 WRITE_FIELD(this, offset, pre_allocated_value);
1452 offset += kPointerSize;
1455 while (offset < size) {
1456 WRITE_FIELD(this, offset, filler_value);
1457 offset += kPointerSize;
1462 bool JSObject::HasFastProperties() {
1463 return !properties()->IsDictionary();
1467 int JSObject::MaxFastProperties() {
1468 // Allow extra fast properties if the object has more than
1469 // kMaxFastProperties in-object properties. When this is the case,
1470 // it is very unlikely that the object is being used as a dictionary
1471 // and there is a good chance that allowing more map transitions
1472 // will be worth it.
1473 return Max(map()->inobject_properties(), kMaxFastProperties);
1477 void Struct::InitializeBody(int object_size) {
1478 Object* value = GetHeap()->undefined_value();
1479 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1480 WRITE_FIELD(this, offset, value);
1485 bool Object::ToArrayIndex(uint32_t* index) {
1487 int value = Smi::cast(this)->value();
1488 if (value < 0) return false;
1492 if (IsHeapNumber()) {
1493 double value = HeapNumber::cast(this)->value();
1494 uint32_t uint_value = static_cast<uint32_t>(value);
1495 if (value == static_cast<double>(uint_value)) {
1496 *index = uint_value;
1504 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1505 if (!this->IsJSValue()) return false;
1507 JSValue* js_value = JSValue::cast(this);
1508 if (!js_value->value()->IsString()) return false;
1510 String* str = String::cast(js_value->value());
1511 if (index >= (uint32_t)str->length()) return false;
1517 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1518 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1519 return reinterpret_cast<FixedArrayBase*>(object);
1523 Object* FixedArray::get(int index) {
1524 ASSERT(index >= 0 && index < this->length());
1525 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1529 void FixedArray::set(int index, Smi* value) {
1530 ASSERT(map() != HEAP->fixed_cow_array_map());
1531 ASSERT(index >= 0 && index < this->length());
1532 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1533 int offset = kHeaderSize + index * kPointerSize;
1534 WRITE_FIELD(this, offset, value);
1538 void FixedArray::set(int index, Object* value) {
1539 ASSERT(map() != HEAP->fixed_cow_array_map());
1540 ASSERT(index >= 0 && index < this->length());
1541 int offset = kHeaderSize + index * kPointerSize;
1542 WRITE_FIELD(this, offset, value);
1543 WRITE_BARRIER(GetHeap(), this, offset, value);
1547 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1548 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1552 inline double FixedDoubleArray::hole_nan_as_double() {
1553 return BitCast<double, uint64_t>(kHoleNanInt64);
1557 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1558 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1559 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1560 return OS::nan_value();
1564 double FixedDoubleArray::get_scalar(int index) {
1565 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1566 map() != HEAP->fixed_array_map());
1567 ASSERT(index >= 0 && index < this->length());
1568 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1569 ASSERT(!is_the_hole_nan(result));
1574 MaybeObject* FixedDoubleArray::get(int index) {
1575 if (is_the_hole(index)) {
1576 return GetHeap()->the_hole_value();
1578 return GetHeap()->NumberFromDouble(get_scalar(index));
1583 void FixedDoubleArray::set(int index, double value) {
1584 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1585 map() != HEAP->fixed_array_map());
1586 int offset = kHeaderSize + index * kDoubleSize;
1587 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1588 WRITE_DOUBLE_FIELD(this, offset, value);
1592 void FixedDoubleArray::set_the_hole(int index) {
1593 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1594 map() != HEAP->fixed_array_map());
1595 int offset = kHeaderSize + index * kDoubleSize;
1596 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1600 bool FixedDoubleArray::is_the_hole(int index) {
1601 int offset = kHeaderSize + index * kDoubleSize;
1602 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1606 void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
1607 int old_length = from->length();
1608 ASSERT(old_length < length());
1609 if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) {
1610 OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
1611 FIELD_ADDR(from, kHeaderSize),
1612 old_length * kDoubleSize);
1614 for (int i = 0; i < old_length; ++i) {
1615 if (from->is_the_hole(i)) {
1618 set(i, from->get_scalar(i));
1622 int offset = kHeaderSize + old_length * kDoubleSize;
1623 for (int current = from->length(); current < length(); ++current) {
1624 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1625 offset += kDoubleSize;
1630 void FixedDoubleArray::Initialize(FixedArray* from) {
1631 int old_length = from->length();
1632 ASSERT(old_length <= length());
1633 for (int i = 0; i < old_length; i++) {
1634 Object* hole_or_object = from->get(i);
1635 if (hole_or_object->IsTheHole()) {
1638 set(i, hole_or_object->Number());
1641 int offset = kHeaderSize + old_length * kDoubleSize;
1642 for (int current = from->length(); current < length(); ++current) {
1643 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1644 offset += kDoubleSize;
1649 void FixedDoubleArray::Initialize(NumberDictionary* from) {
1650 int offset = kHeaderSize;
1651 for (int current = 0; current < length(); ++current) {
1652 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1653 offset += kDoubleSize;
1655 for (int i = 0; i < from->Capacity(); i++) {
1656 Object* key = from->KeyAt(i);
1657 if (key->IsNumber()) {
1658 uint32_t entry = static_cast<uint32_t>(key->Number());
1659 set(entry, from->ValueAt(i)->Number());
1665 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1666 Heap* heap = GetHeap();
1667 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1668 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1669 return UPDATE_WRITE_BARRIER;
1673 void FixedArray::set(int index,
1675 WriteBarrierMode mode) {
1676 ASSERT(map() != HEAP->fixed_cow_array_map());
1677 ASSERT(index >= 0 && index < this->length());
1678 int offset = kHeaderSize + index * kPointerSize;
1679 WRITE_FIELD(this, offset, value);
1680 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1684 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1687 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1688 ASSERT(index >= 0 && index < array->length());
1689 ASSERT(!HEAP->InNewSpace(value));
1690 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1694 void FixedArray::set_undefined(int index) {
1695 ASSERT(map() != HEAP->fixed_cow_array_map());
1696 set_undefined(GetHeap(), index);
1700 void FixedArray::set_undefined(Heap* heap, int index) {
1701 ASSERT(index >= 0 && index < this->length());
1702 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1703 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1704 heap->undefined_value());
1708 void FixedArray::set_null(int index) {
1709 set_null(GetHeap(), index);
1713 void FixedArray::set_null(Heap* heap, int index) {
1714 ASSERT(index >= 0 && index < this->length());
1715 ASSERT(!heap->InNewSpace(heap->null_value()));
1716 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1720 void FixedArray::set_the_hole(int index) {
1721 ASSERT(map() != HEAP->fixed_cow_array_map());
1722 ASSERT(index >= 0 && index < this->length());
1723 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1725 kHeaderSize + index * kPointerSize,
1726 GetHeap()->the_hole_value());
1730 void FixedArray::set_unchecked(int index, Smi* value) {
1731 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1732 int offset = kHeaderSize + index * kPointerSize;
1733 WRITE_FIELD(this, offset, value);
1737 void FixedArray::set_unchecked(Heap* heap,
1740 WriteBarrierMode mode) {
1741 int offset = kHeaderSize + index * kPointerSize;
1742 WRITE_FIELD(this, offset, value);
1743 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1747 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1748 ASSERT(index >= 0 && index < this->length());
1749 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1750 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1754 Object** FixedArray::data_start() {
1755 return HeapObject::RawField(this, kHeaderSize);
1759 bool DescriptorArray::IsEmpty() {
1760 ASSERT(this->IsSmi() ||
1761 this->length() > kFirstIndex ||
1762 this == HEAP->empty_descriptor_array());
1763 return this->IsSmi() || length() <= kFirstIndex;
1767 int DescriptorArray::bit_field3_storage() {
1768 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1769 return Smi::cast(storage)->value();
1772 void DescriptorArray::set_bit_field3_storage(int value) {
1774 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1778 void DescriptorArray::NoWriteBarrierSwap(FixedArray* array,
1781 Object* tmp = array->get(first);
1782 NoWriteBarrierSet(array, first, array->get(second));
1783 NoWriteBarrierSet(array, second, tmp);
1787 int DescriptorArray::Search(String* name) {
1788 SLOW_ASSERT(IsSortedNoDuplicates());
1790 // Check for empty descriptor array.
1791 int nof = number_of_descriptors();
1792 if (nof == 0) return kNotFound;
1794 // Fast case: do linear search for small arrays.
1795 const int kMaxElementsForLinearSearch = 8;
1796 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1797 return LinearSearch(name, nof);
1800 // Slow case: perform binary search.
1801 return BinarySearch(name, 0, nof - 1);
1805 int DescriptorArray::SearchWithCache(String* name) {
1806 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1807 if (number == DescriptorLookupCache::kAbsent) {
1808 number = Search(name);
1809 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1815 String* DescriptorArray::GetKey(int descriptor_number) {
1816 ASSERT(descriptor_number < number_of_descriptors());
1817 return String::cast(get(ToKeyIndex(descriptor_number)));
1821 Object* DescriptorArray::GetValue(int descriptor_number) {
1822 ASSERT(descriptor_number < number_of_descriptors());
1823 return GetContentArray()->get(ToValueIndex(descriptor_number));
1827 Smi* DescriptorArray::GetDetails(int descriptor_number) {
1828 ASSERT(descriptor_number < number_of_descriptors());
1829 return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1833 PropertyType DescriptorArray::GetType(int descriptor_number) {
1834 ASSERT(descriptor_number < number_of_descriptors());
1835 return PropertyDetails(GetDetails(descriptor_number)).type();
1839 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1840 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1844 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1845 return JSFunction::cast(GetValue(descriptor_number));
1849 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1850 ASSERT(GetType(descriptor_number) == CALLBACKS);
1851 return GetValue(descriptor_number);
1855 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1856 ASSERT(GetType(descriptor_number) == CALLBACKS);
1857 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1858 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
1862 bool DescriptorArray::IsProperty(int descriptor_number) {
1863 return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1867 bool DescriptorArray::IsTransition(int descriptor_number) {
1868 PropertyType t = GetType(descriptor_number);
1869 return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
1870 t == ELEMENTS_TRANSITION;
1874 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1875 return GetType(descriptor_number) == NULL_DESCRIPTOR;
1879 bool DescriptorArray::IsDontEnum(int descriptor_number) {
1880 return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1884 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1885 desc->Init(GetKey(descriptor_number),
1886 GetValue(descriptor_number),
1887 PropertyDetails(GetDetails(descriptor_number)));
1891 void DescriptorArray::Set(int descriptor_number,
1893 const WhitenessWitness&) {
1895 ASSERT(descriptor_number < number_of_descriptors());
1897 // Make sure none of the elements in desc are in new space.
1898 ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1899 ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1901 NoWriteBarrierSet(this,
1902 ToKeyIndex(descriptor_number),
1904 FixedArray* content_array = GetContentArray();
1905 NoWriteBarrierSet(content_array,
1906 ToValueIndex(descriptor_number),
1908 NoWriteBarrierSet(content_array,
1909 ToDetailsIndex(descriptor_number),
1910 desc->GetDetails().AsSmi());
1914 void DescriptorArray::CopyFrom(int index,
1915 DescriptorArray* src,
1917 const WhitenessWitness& witness) {
1919 src->Get(src_index, &desc);
1920 Set(index, &desc, witness);
1924 void DescriptorArray::NoWriteBarrierSwapDescriptors(int first, int second) {
1925 NoWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
1926 FixedArray* content_array = GetContentArray();
1927 NoWriteBarrierSwap(content_array,
1928 ToValueIndex(first),
1929 ToValueIndex(second));
1930 NoWriteBarrierSwap(content_array,
1931 ToDetailsIndex(first),
1932 ToDetailsIndex(second));
1936 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
1937 : marking_(array->GetHeap()->incremental_marking()) {
1938 marking_->EnterNoMarkingScope();
1939 if (array->number_of_descriptors() > 0) {
1940 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
1941 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
1946 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
1947 marking_->LeaveNoMarkingScope();
1951 template<typename Shape, typename Key>
1952 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
1953 const int kMinCapacity = 32;
1954 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
1955 if (capacity < kMinCapacity) {
1956 capacity = kMinCapacity; // Guarantee min capacity.
1962 template<typename Shape, typename Key>
1963 int HashTable<Shape, Key>::FindEntry(Key key) {
1964 return FindEntry(GetIsolate(), key);
1968 // Find entry for key otherwise return kNotFound.
1969 template<typename Shape, typename Key>
1970 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1971 uint32_t capacity = Capacity();
1972 uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1974 // EnsureCapacity will guarantee the hash table is never full.
1976 Object* element = KeyAt(entry);
1977 if (element == isolate->heap()->undefined_value()) break; // Empty entry.
1978 if (element != isolate->heap()->null_value() &&
1979 Shape::IsMatch(key, element)) return entry;
1980 entry = NextProbe(entry, count++, capacity);
1986 bool NumberDictionary::requires_slow_elements() {
1987 Object* max_index_object = get(kMaxNumberKeyIndex);
1988 if (!max_index_object->IsSmi()) return false;
1990 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1993 uint32_t NumberDictionary::max_number_key() {
1994 ASSERT(!requires_slow_elements());
1995 Object* max_index_object = get(kMaxNumberKeyIndex);
1996 if (!max_index_object->IsSmi()) return 0;
1997 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
1998 return value >> kRequiresSlowElementsTagSize;
2001 void NumberDictionary::set_requires_slow_elements() {
2002 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2006 // ------------------------------------
2010 CAST_ACCESSOR(FixedArray)
2011 CAST_ACCESSOR(FixedDoubleArray)
2012 CAST_ACCESSOR(DescriptorArray)
2013 CAST_ACCESSOR(DeoptimizationInputData)
2014 CAST_ACCESSOR(DeoptimizationOutputData)
2015 CAST_ACCESSOR(SymbolTable)
2016 CAST_ACCESSOR(JSFunctionResultCache)
2017 CAST_ACCESSOR(NormalizedMapCache)
2018 CAST_ACCESSOR(CompilationCacheTable)
2019 CAST_ACCESSOR(CodeCacheHashTable)
2020 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2021 CAST_ACCESSOR(MapCache)
2022 CAST_ACCESSOR(String)
2023 CAST_ACCESSOR(SeqString)
2024 CAST_ACCESSOR(SeqAsciiString)
2025 CAST_ACCESSOR(SeqTwoByteString)
2026 CAST_ACCESSOR(SlicedString)
2027 CAST_ACCESSOR(ConsString)
2028 CAST_ACCESSOR(ExternalString)
2029 CAST_ACCESSOR(ExternalAsciiString)
2030 CAST_ACCESSOR(ExternalTwoByteString)
2031 CAST_ACCESSOR(JSReceiver)
2032 CAST_ACCESSOR(JSObject)
2034 CAST_ACCESSOR(HeapObject)
2035 CAST_ACCESSOR(HeapNumber)
2036 CAST_ACCESSOR(Oddball)
2037 CAST_ACCESSOR(JSGlobalPropertyCell)
2038 CAST_ACCESSOR(SharedFunctionInfo)
2040 CAST_ACCESSOR(JSFunction)
2041 CAST_ACCESSOR(GlobalObject)
2042 CAST_ACCESSOR(JSGlobalProxy)
2043 CAST_ACCESSOR(JSGlobalObject)
2044 CAST_ACCESSOR(JSBuiltinsObject)
2046 CAST_ACCESSOR(JSArray)
2047 CAST_ACCESSOR(JSRegExp)
2048 CAST_ACCESSOR(JSProxy)
2049 CAST_ACCESSOR(JSFunctionProxy)
2050 CAST_ACCESSOR(JSSet)
2051 CAST_ACCESSOR(JSMap)
2052 CAST_ACCESSOR(JSWeakMap)
2053 CAST_ACCESSOR(Foreign)
2054 CAST_ACCESSOR(ByteArray)
2055 CAST_ACCESSOR(FreeSpace)
2056 CAST_ACCESSOR(ExternalArray)
2057 CAST_ACCESSOR(ExternalByteArray)
2058 CAST_ACCESSOR(ExternalUnsignedByteArray)
2059 CAST_ACCESSOR(ExternalShortArray)
2060 CAST_ACCESSOR(ExternalUnsignedShortArray)
2061 CAST_ACCESSOR(ExternalIntArray)
2062 CAST_ACCESSOR(ExternalUnsignedIntArray)
2063 CAST_ACCESSOR(ExternalFloatArray)
2064 CAST_ACCESSOR(ExternalDoubleArray)
2065 CAST_ACCESSOR(ExternalPixelArray)
2066 CAST_ACCESSOR(Struct)
2069 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2070 STRUCT_LIST(MAKE_STRUCT_CAST)
2071 #undef MAKE_STRUCT_CAST
2074 template <typename Shape, typename Key>
2075 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2076 ASSERT(obj->IsHashTable());
2077 return reinterpret_cast<HashTable*>(obj);
2081 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2082 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2084 SMI_ACCESSORS(String, length, kLengthOffset)
2087 uint32_t String::hash_field() {
2088 return READ_UINT32_FIELD(this, kHashFieldOffset);
2092 void String::set_hash_field(uint32_t value) {
2093 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2094 #if V8_HOST_ARCH_64_BIT
2095 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2100 bool String::Equals(String* other) {
2101 if (other == this) return true;
2102 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2105 return SlowEquals(other);
2109 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2110 if (!StringShape(this).IsCons()) return this;
2111 ConsString* cons = ConsString::cast(this);
2112 if (cons->IsFlat()) return cons->first();
2113 return SlowTryFlatten(pretenure);
2117 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2118 MaybeObject* flat = TryFlatten(pretenure);
2119 Object* successfully_flattened;
2120 if (!flat->ToObject(&successfully_flattened)) return this;
2121 return String::cast(successfully_flattened);
2125 uint16_t String::Get(int index) {
2126 ASSERT(index >= 0 && index < length());
2127 switch (StringShape(this).full_representation_tag()) {
2128 case kSeqStringTag | kAsciiStringTag:
2129 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2130 case kSeqStringTag | kTwoByteStringTag:
2131 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2132 case kConsStringTag | kAsciiStringTag:
2133 case kConsStringTag | kTwoByteStringTag:
2134 return ConsString::cast(this)->ConsStringGet(index);
2135 case kExternalStringTag | kAsciiStringTag:
2136 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2137 case kExternalStringTag | kTwoByteStringTag:
2138 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2139 case kSlicedStringTag | kAsciiStringTag:
2140 case kSlicedStringTag | kTwoByteStringTag:
2141 return SlicedString::cast(this)->SlicedStringGet(index);
2151 void String::Set(int index, uint16_t value) {
2152 ASSERT(index >= 0 && index < length());
2153 ASSERT(StringShape(this).IsSequential());
2155 return this->IsAsciiRepresentation()
2156 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2157 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2161 bool String::IsFlat() {
2162 if (!StringShape(this).IsCons()) return true;
2163 return ConsString::cast(this)->second()->length() == 0;
2167 String* String::GetUnderlying() {
2168 // Giving direct access to underlying string only makes sense if the
2169 // wrapping string is already flattened.
2170 ASSERT(this->IsFlat());
2171 ASSERT(StringShape(this).IsIndirect());
2172 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2173 const int kUnderlyingOffset = SlicedString::kParentOffset;
2174 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2178 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2179 ASSERT(index >= 0 && index < length());
2180 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2184 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2185 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2186 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2187 static_cast<byte>(value));
2191 Address SeqAsciiString::GetCharsAddress() {
2192 return FIELD_ADDR(this, kHeaderSize);
2196 char* SeqAsciiString::GetChars() {
2197 return reinterpret_cast<char*>(GetCharsAddress());
2201 Address SeqTwoByteString::GetCharsAddress() {
2202 return FIELD_ADDR(this, kHeaderSize);
2206 uc16* SeqTwoByteString::GetChars() {
2207 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2211 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2212 ASSERT(index >= 0 && index < length());
2213 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2217 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2218 ASSERT(index >= 0 && index < length());
2219 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2223 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2224 return SizeFor(length());
2228 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2229 return SizeFor(length());
2233 String* SlicedString::parent() {
2234 return String::cast(READ_FIELD(this, kParentOffset));
2238 void SlicedString::set_parent(String* parent) {
2239 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2240 WRITE_FIELD(this, kParentOffset, parent);
2244 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2247 String* ConsString::first() {
2248 return String::cast(READ_FIELD(this, kFirstOffset));
2252 Object* ConsString::unchecked_first() {
2253 return READ_FIELD(this, kFirstOffset);
2257 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2258 WRITE_FIELD(this, kFirstOffset, value);
2259 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2263 String* ConsString::second() {
2264 return String::cast(READ_FIELD(this, kSecondOffset));
2268 Object* ConsString::unchecked_second() {
2269 return READ_FIELD(this, kSecondOffset);
2273 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2274 WRITE_FIELD(this, kSecondOffset, value);
2275 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2279 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2280 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2284 void ExternalAsciiString::set_resource(
2285 const ExternalAsciiString::Resource* resource) {
2286 *reinterpret_cast<const Resource**>(
2287 FIELD_ADDR(this, kResourceOffset)) = resource;
2291 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2292 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2296 void ExternalTwoByteString::set_resource(
2297 const ExternalTwoByteString::Resource* resource) {
2298 *reinterpret_cast<const Resource**>(
2299 FIELD_ADDR(this, kResourceOffset)) = resource;
2303 void JSFunctionResultCache::MakeZeroSize() {
2304 set_finger_index(kEntriesIndex);
2305 set_size(kEntriesIndex);
2309 void JSFunctionResultCache::Clear() {
2310 int cache_size = size();
2311 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2312 MemsetPointer(entries_start,
2313 GetHeap()->the_hole_value(),
2314 cache_size - kEntriesIndex);
2319 int JSFunctionResultCache::size() {
2320 return Smi::cast(get(kCacheSizeIndex))->value();
2324 void JSFunctionResultCache::set_size(int size) {
2325 set(kCacheSizeIndex, Smi::FromInt(size));
2329 int JSFunctionResultCache::finger_index() {
2330 return Smi::cast(get(kFingerIndex))->value();
2334 void JSFunctionResultCache::set_finger_index(int finger_index) {
2335 set(kFingerIndex, Smi::FromInt(finger_index));
2339 byte ByteArray::get(int index) {
2340 ASSERT(index >= 0 && index < this->length());
2341 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2345 void ByteArray::set(int index, byte value) {
2346 ASSERT(index >= 0 && index < this->length());
2347 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2351 int ByteArray::get_int(int index) {
2352 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2353 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2357 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2358 ASSERT_TAG_ALIGNED(address);
2359 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2363 Address ByteArray::GetDataStartAddress() {
2364 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2368 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2369 return reinterpret_cast<uint8_t*>(external_pointer());
2373 uint8_t ExternalPixelArray::get_scalar(int index) {
2374 ASSERT((index >= 0) && (index < this->length()));
2375 uint8_t* ptr = external_pixel_pointer();
2380 MaybeObject* ExternalPixelArray::get(int index) {
2381 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2385 void ExternalPixelArray::set(int index, uint8_t value) {
2386 ASSERT((index >= 0) && (index < this->length()));
2387 uint8_t* ptr = external_pixel_pointer();
2392 void* ExternalArray::external_pointer() {
2393 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2394 return reinterpret_cast<void*>(ptr);
2398 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2399 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2400 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2404 int8_t ExternalByteArray::get_scalar(int index) {
2405 ASSERT((index >= 0) && (index < this->length()));
2406 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2411 MaybeObject* ExternalByteArray::get(int index) {
2412 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2416 void ExternalByteArray::set(int index, int8_t value) {
2417 ASSERT((index >= 0) && (index < this->length()));
2418 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2423 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2424 ASSERT((index >= 0) && (index < this->length()));
2425 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2430 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2431 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2435 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2436 ASSERT((index >= 0) && (index < this->length()));
2437 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2442 int16_t ExternalShortArray::get_scalar(int index) {
2443 ASSERT((index >= 0) && (index < this->length()));
2444 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2449 MaybeObject* ExternalShortArray::get(int index) {
2450 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2454 void ExternalShortArray::set(int index, int16_t value) {
2455 ASSERT((index >= 0) && (index < this->length()));
2456 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2461 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2462 ASSERT((index >= 0) && (index < this->length()));
2463 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2468 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2469 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2473 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2474 ASSERT((index >= 0) && (index < this->length()));
2475 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2480 int32_t ExternalIntArray::get_scalar(int index) {
2481 ASSERT((index >= 0) && (index < this->length()));
2482 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2487 MaybeObject* ExternalIntArray::get(int index) {
2488 return GetHeap()->NumberFromInt32(get_scalar(index));
2492 void ExternalIntArray::set(int index, int32_t value) {
2493 ASSERT((index >= 0) && (index < this->length()));
2494 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2499 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2500 ASSERT((index >= 0) && (index < this->length()));
2501 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2506 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2507 return GetHeap()->NumberFromUint32(get_scalar(index));
2511 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2512 ASSERT((index >= 0) && (index < this->length()));
2513 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2518 float ExternalFloatArray::get_scalar(int index) {
2519 ASSERT((index >= 0) && (index < this->length()));
2520 float* ptr = static_cast<float*>(external_pointer());
2525 MaybeObject* ExternalFloatArray::get(int index) {
2526 return GetHeap()->NumberFromDouble(get_scalar(index));
2530 void ExternalFloatArray::set(int index, float value) {
2531 ASSERT((index >= 0) && (index < this->length()));
2532 float* ptr = static_cast<float*>(external_pointer());
2537 double ExternalDoubleArray::get_scalar(int index) {
2538 ASSERT((index >= 0) && (index < this->length()));
2539 double* ptr = static_cast<double*>(external_pointer());
2544 MaybeObject* ExternalDoubleArray::get(int index) {
2545 return GetHeap()->NumberFromDouble(get_scalar(index));
2549 void ExternalDoubleArray::set(int index, double value) {
2550 ASSERT((index >= 0) && (index < this->length()));
2551 double* ptr = static_cast<double*>(external_pointer());
2556 int Map::visitor_id() {
2557 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2561 void Map::set_visitor_id(int id) {
2562 ASSERT(0 <= id && id < 256);
2563 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2567 int Map::instance_size() {
2568 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2572 int Map::inobject_properties() {
2573 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2577 int Map::pre_allocated_property_fields() {
2578 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2582 int HeapObject::SizeFromMap(Map* map) {
2583 int instance_size = map->instance_size();
2584 if (instance_size != kVariableSizeSentinel) return instance_size;
2585 // We can ignore the "symbol" bit becase it is only set for symbols
2586 // and implies a string type.
2587 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2588 // Only inline the most frequent cases.
2589 if (instance_type == FIXED_ARRAY_TYPE) {
2590 return FixedArray::BodyDescriptor::SizeOf(map, this);
2592 if (instance_type == ASCII_STRING_TYPE) {
2593 return SeqAsciiString::SizeFor(
2594 reinterpret_cast<SeqAsciiString*>(this)->length());
2596 if (instance_type == BYTE_ARRAY_TYPE) {
2597 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2599 if (instance_type == FREE_SPACE_TYPE) {
2600 return reinterpret_cast<FreeSpace*>(this)->size();
2602 if (instance_type == STRING_TYPE) {
2603 return SeqTwoByteString::SizeFor(
2604 reinterpret_cast<SeqTwoByteString*>(this)->length());
2606 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2607 return FixedDoubleArray::SizeFor(
2608 reinterpret_cast<FixedDoubleArray*>(this)->length());
2610 ASSERT(instance_type == CODE_TYPE);
2611 return reinterpret_cast<Code*>(this)->CodeSize();
2615 void Map::set_instance_size(int value) {
2616 ASSERT_EQ(0, value & (kPointerSize - 1));
2617 value >>= kPointerSizeLog2;
2618 ASSERT(0 <= value && value < 256);
2619 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2623 void Map::set_inobject_properties(int value) {
2624 ASSERT(0 <= value && value < 256);
2625 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2629 void Map::set_pre_allocated_property_fields(int value) {
2630 ASSERT(0 <= value && value < 256);
2631 WRITE_BYTE_FIELD(this,
2632 kPreAllocatedPropertyFieldsOffset,
2633 static_cast<byte>(value));
2637 InstanceType Map::instance_type() {
2638 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2642 void Map::set_instance_type(InstanceType value) {
2643 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2647 int Map::unused_property_fields() {
2648 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2652 void Map::set_unused_property_fields(int value) {
2653 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2657 byte Map::bit_field() {
2658 return READ_BYTE_FIELD(this, kBitFieldOffset);
2662 void Map::set_bit_field(byte value) {
2663 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2667 byte Map::bit_field2() {
2668 return READ_BYTE_FIELD(this, kBitField2Offset);
2672 void Map::set_bit_field2(byte value) {
2673 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2677 void Map::set_non_instance_prototype(bool value) {
2679 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2681 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2686 bool Map::has_non_instance_prototype() {
2687 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2691 void Map::set_function_with_prototype(bool value) {
2693 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2695 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2700 bool Map::function_with_prototype() {
2701 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2705 void Map::set_is_access_check_needed(bool access_check_needed) {
2706 if (access_check_needed) {
2707 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2709 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2714 bool Map::is_access_check_needed() {
2715 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2719 void Map::set_is_extensible(bool value) {
2721 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2723 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2727 bool Map::is_extensible() {
2728 return ((1 << kIsExtensible) & bit_field2()) != 0;
2732 void Map::set_attached_to_shared_function_info(bool value) {
2734 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2736 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2740 bool Map::attached_to_shared_function_info() {
2741 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2745 void Map::set_is_shared(bool value) {
2747 set_bit_field3(bit_field3() | (1 << kIsShared));
2749 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2753 bool Map::is_shared() {
2754 return ((1 << kIsShared) & bit_field3()) != 0;
2758 JSFunction* Map::unchecked_constructor() {
2759 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2763 Code::Flags Code::flags() {
2764 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2768 void Code::set_flags(Code::Flags flags) {
2769 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2770 // Make sure that all call stubs have an arguments count.
2771 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2772 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2773 ExtractArgumentsCountFromFlags(flags) >= 0);
2774 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2778 Code::Kind Code::kind() {
2779 return ExtractKindFromFlags(flags());
2783 InlineCacheState Code::ic_state() {
2784 InlineCacheState result = ExtractICStateFromFlags(flags());
2785 // Only allow uninitialized or debugger states for non-IC code
2786 // objects. This is used in the debugger to determine whether or not
2787 // a call to code object has been replaced with a debug break call.
2788 ASSERT(is_inline_cache_stub() ||
2789 result == UNINITIALIZED ||
2790 result == DEBUG_BREAK ||
2791 result == DEBUG_PREPARE_STEP_IN);
2796 Code::ExtraICState Code::extra_ic_state() {
2797 ASSERT(is_inline_cache_stub());
2798 return ExtractExtraICStateFromFlags(flags());
2802 PropertyType Code::type() {
2803 return ExtractTypeFromFlags(flags());
2807 int Code::arguments_count() {
2808 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2809 return ExtractArgumentsCountFromFlags(flags());
2813 int Code::major_key() {
2814 ASSERT(kind() == STUB ||
2815 kind() == UNARY_OP_IC ||
2816 kind() == BINARY_OP_IC ||
2817 kind() == COMPARE_IC ||
2818 kind() == TO_BOOLEAN_IC);
2819 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2823 void Code::set_major_key(int major) {
2824 ASSERT(kind() == STUB ||
2825 kind() == UNARY_OP_IC ||
2826 kind() == BINARY_OP_IC ||
2827 kind() == COMPARE_IC ||
2828 kind() == TO_BOOLEAN_IC);
2829 ASSERT(0 <= major && major < 256);
2830 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2834 bool Code::is_pregenerated() {
2835 return kind() == STUB && IsPregeneratedField::decode(flags());
2839 void Code::set_is_pregenerated(bool value) {
2840 ASSERT(kind() == STUB);
2842 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
2847 bool Code::optimizable() {
2848 ASSERT(kind() == FUNCTION);
2849 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2853 void Code::set_optimizable(bool value) {
2854 ASSERT(kind() == FUNCTION);
2855 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2859 bool Code::has_deoptimization_support() {
2860 ASSERT(kind() == FUNCTION);
2861 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2862 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
2866 void Code::set_has_deoptimization_support(bool value) {
2867 ASSERT(kind() == FUNCTION);
2868 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2869 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
2870 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2874 bool Code::has_debug_break_slots() {
2875 ASSERT(kind() == FUNCTION);
2876 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2877 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
2881 void Code::set_has_debug_break_slots(bool value) {
2882 ASSERT(kind() == FUNCTION);
2883 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2884 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
2885 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2889 bool Code::is_compiled_optimizable() {
2890 ASSERT(kind() == FUNCTION);
2891 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2892 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
2896 void Code::set_compiled_optimizable(bool value) {
2897 ASSERT(kind() == FUNCTION);
2898 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2899 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
2900 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2904 int Code::allow_osr_at_loop_nesting_level() {
2905 ASSERT(kind() == FUNCTION);
2906 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2910 void Code::set_allow_osr_at_loop_nesting_level(int level) {
2911 ASSERT(kind() == FUNCTION);
2912 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2913 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2917 unsigned Code::stack_slots() {
2918 ASSERT(kind() == OPTIMIZED_FUNCTION);
2919 return READ_UINT32_FIELD(this, kStackSlotsOffset);
2923 void Code::set_stack_slots(unsigned slots) {
2924 ASSERT(kind() == OPTIMIZED_FUNCTION);
2925 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2929 unsigned Code::safepoint_table_offset() {
2930 ASSERT(kind() == OPTIMIZED_FUNCTION);
2931 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
2935 void Code::set_safepoint_table_offset(unsigned offset) {
2936 ASSERT(kind() == OPTIMIZED_FUNCTION);
2937 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2938 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
2942 unsigned Code::stack_check_table_offset() {
2943 ASSERT(kind() == FUNCTION);
2944 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
2948 void Code::set_stack_check_table_offset(unsigned offset) {
2949 ASSERT(kind() == FUNCTION);
2950 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2951 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
2955 CheckType Code::check_type() {
2956 ASSERT(is_call_stub() || is_keyed_call_stub());
2957 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
2958 return static_cast<CheckType>(type);
2962 void Code::set_check_type(CheckType value) {
2963 ASSERT(is_call_stub() || is_keyed_call_stub());
2964 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
2968 byte Code::unary_op_type() {
2969 ASSERT(is_unary_op_stub());
2970 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
2974 void Code::set_unary_op_type(byte value) {
2975 ASSERT(is_unary_op_stub());
2976 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
2980 byte Code::binary_op_type() {
2981 ASSERT(is_binary_op_stub());
2982 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2986 void Code::set_binary_op_type(byte value) {
2987 ASSERT(is_binary_op_stub());
2988 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2992 byte Code::binary_op_result_type() {
2993 ASSERT(is_binary_op_stub());
2994 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
2998 void Code::set_binary_op_result_type(byte value) {
2999 ASSERT(is_binary_op_stub());
3000 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3004 byte Code::compare_state() {
3005 ASSERT(is_compare_ic_stub());
3006 return READ_BYTE_FIELD(this, kCompareStateOffset);
3010 void Code::set_compare_state(byte value) {
3011 ASSERT(is_compare_ic_stub());
3012 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3016 byte Code::to_boolean_state() {
3017 ASSERT(is_to_boolean_ic_stub());
3018 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3022 void Code::set_to_boolean_state(byte value) {
3023 ASSERT(is_to_boolean_ic_stub());
3024 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3028 bool Code::has_function_cache() {
3029 ASSERT(kind() == STUB);
3030 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3034 void Code::set_has_function_cache(bool flag) {
3035 ASSERT(kind() == STUB);
3036 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3040 bool Code::is_inline_cache_stub() {
3041 Kind kind = this->kind();
3042 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3046 Code::Flags Code::ComputeFlags(Kind kind,
3047 InlineCacheState ic_state,
3048 ExtraICState extra_ic_state,
3051 InlineCacheHolderFlag holder) {
3052 // Extra IC state is only allowed for call IC stubs or for store IC
3054 ASSERT(extra_ic_state == kNoExtraICState ||
3057 kind == KEYED_STORE_IC);
3058 // Compute the bit mask.
3059 int bits = KindField::encode(kind)
3060 | ICStateField::encode(ic_state)
3061 | TypeField::encode(type)
3062 | ExtraICStateField::encode(extra_ic_state)
3063 | (argc << kArgumentsCountShift)
3064 | CacheHolderField::encode(holder);
3065 return static_cast<Flags>(bits);
3069 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3071 ExtraICState extra_ic_state,
3072 InlineCacheHolderFlag holder,
3074 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3078 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3079 return KindField::decode(flags);
3083 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3084 return ICStateField::decode(flags);
3088 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3089 return ExtraICStateField::decode(flags);
3093 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3094 return TypeField::decode(flags);
3098 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3099 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3103 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3104 return CacheHolderField::decode(flags);
3108 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3109 int bits = flags & ~TypeField::kMask;
3110 return static_cast<Flags>(bits);
3114 Code* Code::GetCodeFromTargetAddress(Address address) {
3115 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3116 // GetCodeFromTargetAddress might be called when marking objects during mark
3117 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3118 // Code::cast. Code::cast does not work when the object's map is
3120 Code* result = reinterpret_cast<Code*>(code);
3125 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3127 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3131 Object* Map::prototype() {
3132 return READ_FIELD(this, kPrototypeOffset);
3136 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3137 ASSERT(value->IsNull() || value->IsJSReceiver());
3138 WRITE_FIELD(this, kPrototypeOffset, value);
3139 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3143 DescriptorArray* Map::instance_descriptors() {
3144 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3145 if (object->IsSmi()) {
3146 return HEAP->empty_descriptor_array();
3148 return DescriptorArray::cast(object);
3153 void Map::init_instance_descriptors() {
3154 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3158 void Map::clear_instance_descriptors() {
3159 Object* object = READ_FIELD(this,
3160 kInstanceDescriptorsOrBitField3Offset);
3161 if (!object->IsSmi()) {
3164 kInstanceDescriptorsOrBitField3Offset,
3165 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3170 void Map::set_instance_descriptors(DescriptorArray* value,
3171 WriteBarrierMode mode) {
3172 Object* object = READ_FIELD(this,
3173 kInstanceDescriptorsOrBitField3Offset);
3174 Heap* heap = GetHeap();
3175 if (value == heap->empty_descriptor_array()) {
3176 clear_instance_descriptors();
3179 if (object->IsSmi()) {
3180 value->set_bit_field3_storage(Smi::cast(object)->value());
3182 value->set_bit_field3_storage(
3183 DescriptorArray::cast(object)->bit_field3_storage());
3186 ASSERT(!is_shared());
3187 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3188 CONDITIONAL_WRITE_BARRIER(
3189 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3193 int Map::bit_field3() {
3194 Object* object = READ_FIELD(this,
3195 kInstanceDescriptorsOrBitField3Offset);
3196 if (object->IsSmi()) {
3197 return Smi::cast(object)->value();
3199 return DescriptorArray::cast(object)->bit_field3_storage();
3204 void Map::set_bit_field3(int value) {
3205 ASSERT(Smi::IsValid(value));
3206 Object* object = READ_FIELD(this,
3207 kInstanceDescriptorsOrBitField3Offset);
3208 if (object->IsSmi()) {
3210 kInstanceDescriptorsOrBitField3Offset,
3211 Smi::FromInt(value));
3213 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3218 FixedArray* Map::unchecked_prototype_transitions() {
3219 return reinterpret_cast<FixedArray*>(
3220 READ_FIELD(this, kPrototypeTransitionsOffset));
3224 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3225 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3226 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3228 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3229 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3230 ACCESSORS(JSFunction,
3233 kNextFunctionLinkOffset)
3235 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3236 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3237 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3239 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3241 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3242 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3243 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3244 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3245 ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
3247 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3248 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3249 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3251 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3252 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3253 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3254 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3255 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3256 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3258 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3259 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3261 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3262 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3264 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3265 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3266 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3267 kPropertyAccessorsOffset)
3268 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3269 kPrototypeTemplateOffset)
3270 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3271 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3272 kNamedPropertyHandlerOffset)
3273 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3274 kIndexedPropertyHandlerOffset)
3275 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3276 kInstanceTemplateOffset)
3277 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3278 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3279 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3280 kInstanceCallHandlerOffset)
3281 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3282 kAccessCheckInfoOffset)
3283 ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
3285 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3286 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3287 kInternalFieldCountOffset)
3289 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3290 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3292 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3294 ACCESSORS(Script, source, Object, kSourceOffset)
3295 ACCESSORS(Script, name, Object, kNameOffset)
3296 ACCESSORS(Script, id, Object, kIdOffset)
3297 ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
3298 ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
3299 ACCESSORS(Script, data, Object, kDataOffset)
3300 ACCESSORS(Script, context_data, Object, kContextOffset)
3301 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3302 ACCESSORS(Script, type, Smi, kTypeOffset)
3303 ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
3304 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3305 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3306 ACCESSORS(Script, eval_from_instructions_offset, Smi,
3307 kEvalFrominstructionsOffsetOffset)
3309 #ifdef ENABLE_DEBUGGER_SUPPORT
3310 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3311 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3312 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3313 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3315 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3316 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3317 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3318 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3321 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3322 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3323 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3324 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3325 kInstanceClassNameOffset)
3326 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3327 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3328 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3329 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3330 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3331 kThisPropertyAssignmentsOffset)
3333 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3334 kHiddenPrototypeBit)
3335 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3336 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3337 kNeedsAccessCheckBit)
3338 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3339 kReadOnlyPrototypeBit)
3340 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3342 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3344 BOOL_GETTER(SharedFunctionInfo,
3346 has_only_simple_this_property_assignments,
3347 kHasOnlySimpleThisPropertyAssignments)
3348 BOOL_ACCESSORS(SharedFunctionInfo,
3350 allows_lazy_compilation,
3351 kAllowLazyCompilation)
3352 BOOL_ACCESSORS(SharedFunctionInfo,
3356 BOOL_ACCESSORS(SharedFunctionInfo,
3358 has_duplicate_parameters,
3359 kHasDuplicateParameters)
3362 #if V8_HOST_ARCH_32_BIT
3363 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3364 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3365 kFormalParameterCountOffset)
3366 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3367 kExpectedNofPropertiesOffset)
3368 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3369 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3370 kStartPositionAndTypeOffset)
3371 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3372 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3373 kFunctionTokenPositionOffset)
3374 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3375 kCompilerHintsOffset)
3376 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3377 kThisPropertyAssignmentsCountOffset)
3378 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3381 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3382 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3383 int holder::name() { \
3384 int value = READ_INT_FIELD(this, offset); \
3385 ASSERT(kHeapObjectTag == 1); \
3386 ASSERT((value & kHeapObjectTag) == 0); \
3387 return value >> 1; \
3389 void holder::set_##name(int value) { \
3390 ASSERT(kHeapObjectTag == 1); \
3391 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3392 (value & 0xC0000000) == 0x000000000); \
3393 WRITE_INT_FIELD(this, \
3395 (value << 1) & ~kHeapObjectTag); \
3398 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3399 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3400 INT_ACCESSORS(holder, name, offset)
3403 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3404 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3405 formal_parameter_count,
3406 kFormalParameterCountOffset)
3408 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3409 expected_nof_properties,
3410 kExpectedNofPropertiesOffset)
3411 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3413 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3414 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3415 start_position_and_type,
3416 kStartPositionAndTypeOffset)
3418 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3419 function_token_position,
3420 kFunctionTokenPositionOffset)
3421 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3423 kCompilerHintsOffset)
3425 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3426 this_property_assignments_count,
3427 kThisPropertyAssignmentsCountOffset)
3428 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3432 int SharedFunctionInfo::construction_count() {
3433 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3437 void SharedFunctionInfo::set_construction_count(int value) {
3438 ASSERT(0 <= value && value < 256);
3439 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3443 BOOL_ACCESSORS(SharedFunctionInfo,
3445 live_objects_may_exist,
3446 kLiveObjectsMayExist)
3449 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3450 return initial_map() != HEAP->undefined_value();
3454 BOOL_GETTER(SharedFunctionInfo,
3456 optimization_disabled,
3457 kOptimizationDisabled)
3460 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3461 set_compiler_hints(BooleanBit::set(compiler_hints(),
3462 kOptimizationDisabled,
3464 // If disabling optimizations we reflect that in the code object so
3465 // it will not be counted as optimizable code.
3466 if ((code()->kind() == Code::FUNCTION) && disable) {
3467 code()->set_optimizable(false);
3472 StrictModeFlag SharedFunctionInfo::strict_mode_flag() {
3473 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
3474 ? kStrictMode : kNonStrictMode;
3478 void SharedFunctionInfo::set_strict_mode_flag(StrictModeFlag strict_mode_flag) {
3479 ASSERT(strict_mode_flag == kStrictMode ||
3480 strict_mode_flag == kNonStrictMode);
3481 bool value = strict_mode_flag == kStrictMode;
3483 BooleanBit::set(compiler_hints(), kStrictModeFunction, value));
3487 BOOL_GETTER(SharedFunctionInfo, compiler_hints, strict_mode,
3488 kStrictModeFunction)
3489 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3490 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3491 name_should_print_as_anonymous,
3492 kNameShouldPrintAsAnonymous)
3493 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3494 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3496 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3497 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3499 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3501 bool Script::HasValidSource() {
3502 Object* src = this->source();
3503 if (!src->IsString()) return true;
3504 String* src_str = String::cast(src);
3505 if (!StringShape(src_str).IsExternal()) return true;
3506 if (src_str->IsAsciiRepresentation()) {
3507 return ExternalAsciiString::cast(src)->resource() != NULL;
3508 } else if (src_str->IsTwoByteRepresentation()) {
3509 return ExternalTwoByteString::cast(src)->resource() != NULL;
3515 void SharedFunctionInfo::DontAdaptArguments() {
3516 ASSERT(code()->kind() == Code::BUILTIN);
3517 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3521 int SharedFunctionInfo::start_position() {
3522 return start_position_and_type() >> kStartPositionShift;
3526 void SharedFunctionInfo::set_start_position(int start_position) {
3527 set_start_position_and_type((start_position << kStartPositionShift)
3528 | (start_position_and_type() & ~kStartPositionMask));
3532 Code* SharedFunctionInfo::code() {
3533 return Code::cast(READ_FIELD(this, kCodeOffset));
3537 Code* SharedFunctionInfo::unchecked_code() {
3538 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3542 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3543 WRITE_FIELD(this, kCodeOffset, value);
3544 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3548 SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3549 return reinterpret_cast<SerializedScopeInfo*>(
3550 READ_FIELD(this, kScopeInfoOffset));
3554 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3555 WriteBarrierMode mode) {
3556 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3557 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3560 reinterpret_cast<Object*>(value),
3565 Smi* SharedFunctionInfo::deopt_counter() {
3566 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3570 void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3571 WRITE_FIELD(this, kDeoptCounterOffset, value);
3575 bool SharedFunctionInfo::is_compiled() {
3577 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3581 bool SharedFunctionInfo::IsApiFunction() {
3582 return function_data()->IsFunctionTemplateInfo();
3586 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3587 ASSERT(IsApiFunction());
3588 return FunctionTemplateInfo::cast(function_data());
3592 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3593 return function_data()->IsSmi();
3597 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3598 ASSERT(HasBuiltinFunctionId());
3599 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3603 int SharedFunctionInfo::code_age() {
3604 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3608 void SharedFunctionInfo::set_code_age(int code_age) {
3609 set_compiler_hints(compiler_hints() |
3610 ((code_age & kCodeAgeMask) << kCodeAgeShift));
3614 bool SharedFunctionInfo::has_deoptimization_support() {
3615 Code* code = this->code();
3616 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3620 bool JSFunction::IsBuiltin() {
3621 return context()->global()->IsJSBuiltinsObject();
3625 bool JSFunction::NeedsArgumentsAdaption() {
3626 return shared()->formal_parameter_count() !=
3627 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3631 bool JSFunction::IsOptimized() {
3632 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3636 bool JSFunction::IsOptimizable() {
3637 return code()->kind() == Code::FUNCTION && code()->optimizable();
3641 bool JSFunction::IsMarkedForLazyRecompilation() {
3642 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3646 Code* JSFunction::code() {
3647 return Code::cast(unchecked_code());
3651 Code* JSFunction::unchecked_code() {
3652 return reinterpret_cast<Code*>(
3653 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3657 void JSFunction::set_code(Code* value) {
3658 ASSERT(!HEAP->InNewSpace(value));
3659 Address entry = value->entry();
3660 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3661 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3663 HeapObject::RawField(this, kCodeEntryOffset),
3668 void JSFunction::ReplaceCode(Code* code) {
3669 bool was_optimized = IsOptimized();
3670 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3674 // Add/remove the function from the list of optimized functions for this
3675 // context based on the state change.
3676 if (!was_optimized && is_optimized) {
3677 context()->global_context()->AddOptimizedFunction(this);
3679 if (was_optimized && !is_optimized) {
3680 context()->global_context()->RemoveOptimizedFunction(this);
3685 Context* JSFunction::context() {
3686 return Context::cast(READ_FIELD(this, kContextOffset));
3690 Object* JSFunction::unchecked_context() {
3691 return READ_FIELD(this, kContextOffset);
3695 SharedFunctionInfo* JSFunction::unchecked_shared() {
3696 return reinterpret_cast<SharedFunctionInfo*>(
3697 READ_FIELD(this, kSharedFunctionInfoOffset));
3701 void JSFunction::set_context(Object* value) {
3702 ASSERT(value->IsUndefined() || value->IsContext());
3703 WRITE_FIELD(this, kContextOffset, value);
3704 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3707 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3708 kPrototypeOrInitialMapOffset)
3711 Map* JSFunction::initial_map() {
3712 return Map::cast(prototype_or_initial_map());
3716 void JSFunction::set_initial_map(Map* value) {
3717 set_prototype_or_initial_map(value);
3721 bool JSFunction::has_initial_map() {
3722 return prototype_or_initial_map()->IsMap();
3726 bool JSFunction::has_instance_prototype() {
3727 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3731 bool JSFunction::has_prototype() {
3732 return map()->has_non_instance_prototype() || has_instance_prototype();
3736 Object* JSFunction::instance_prototype() {
3737 ASSERT(has_instance_prototype());
3738 if (has_initial_map()) return initial_map()->prototype();
3739 // When there is no initial map and the prototype is a JSObject, the
3740 // initial map field is used for the prototype field.
3741 return prototype_or_initial_map();
3745 Object* JSFunction::prototype() {
3746 ASSERT(has_prototype());
3747 // If the function's prototype property has been set to a non-JSObject
3748 // value, that value is stored in the constructor field of the map.
3749 if (map()->has_non_instance_prototype()) return map()->constructor();
3750 return instance_prototype();
3753 bool JSFunction::should_have_prototype() {
3754 return map()->function_with_prototype();
3758 bool JSFunction::is_compiled() {
3759 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3763 FixedArray* JSFunction::literals() {
3764 ASSERT(!shared()->bound());
3765 return literals_or_bindings();
3769 void JSFunction::set_literals(FixedArray* literals) {
3770 ASSERT(!shared()->bound());
3771 set_literals_or_bindings(literals);
3775 FixedArray* JSFunction::function_bindings() {
3776 ASSERT(shared()->bound());
3777 return literals_or_bindings();
3781 void JSFunction::set_function_bindings(FixedArray* bindings) {
3782 ASSERT(shared()->bound());
3783 // Bound function literal may be initialized to the empty fixed array
3784 // before the bindings are set.
3785 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
3786 bindings->map() == GetHeap()->fixed_cow_array_map());
3787 set_literals_or_bindings(bindings);
3791 int JSFunction::NumberOfLiterals() {
3792 ASSERT(!shared()->bound());
3793 return literals()->length();
3797 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3798 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3799 return READ_FIELD(this, OffsetOfFunctionWithId(id));
3803 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3805 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3806 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3807 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
3811 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3812 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3813 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3817 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3819 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3820 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3821 ASSERT(!HEAP->InNewSpace(value));
3825 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
3826 ACCESSORS(JSProxy, hash, Object, kHashOffset)
3827 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
3828 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
3831 void JSProxy::InitializeBody(int object_size, Object* value) {
3832 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
3833 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
3834 WRITE_FIELD(this, offset, value);
3839 ACCESSORS(JSSet, table, Object, kTableOffset)
3840 ACCESSORS(JSMap, table, Object, kTableOffset)
3841 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
3842 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
3845 ObjectHashTable* JSWeakMap::unchecked_table() {
3846 return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
3850 Address Foreign::foreign_address() {
3851 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
3855 void Foreign::set_foreign_address(Address value) {
3856 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
3860 ACCESSORS(JSValue, value, Object, kValueOffset)
3863 JSValue* JSValue::cast(Object* obj) {
3864 ASSERT(obj->IsJSValue());
3865 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3866 return reinterpret_cast<JSValue*>(obj);
3870 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3871 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3872 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3873 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3874 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3875 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3876 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3879 JSMessageObject* JSMessageObject::cast(Object* obj) {
3880 ASSERT(obj->IsJSMessageObject());
3881 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3882 return reinterpret_cast<JSMessageObject*>(obj);
3886 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3887 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3888 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3889 ACCESSORS(Code, next_code_flushing_candidate,
3890 Object, kNextCodeFlushingCandidateOffset)
3893 byte* Code::instruction_start() {
3894 return FIELD_ADDR(this, kHeaderSize);
3898 byte* Code::instruction_end() {
3899 return instruction_start() + instruction_size();
3903 int Code::body_size() {
3904 return RoundUp(instruction_size(), kObjectAlignment);
3908 FixedArray* Code::unchecked_deoptimization_data() {
3909 return reinterpret_cast<FixedArray*>(
3910 READ_FIELD(this, kDeoptimizationDataOffset));
3914 ByteArray* Code::unchecked_relocation_info() {
3915 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
3919 byte* Code::relocation_start() {
3920 return unchecked_relocation_info()->GetDataStartAddress();
3924 int Code::relocation_size() {
3925 return unchecked_relocation_info()->length();
3929 byte* Code::entry() {
3930 return instruction_start();
3934 bool Code::contains(byte* inner_pointer) {
3935 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
3939 ACCESSORS(JSArray, length, Object, kLengthOffset)
3942 ACCESSORS(JSRegExp, data, Object, kDataOffset)
3945 JSRegExp::Type JSRegExp::TypeTag() {
3946 Object* data = this->data();
3947 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
3948 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
3949 return static_cast<JSRegExp::Type>(smi->value());
3953 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
3954 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
3955 return static_cast<JSRegExp::Type>(smi->value());
3959 int JSRegExp::CaptureCount() {
3960 switch (TypeTag()) {
3964 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
3972 JSRegExp::Flags JSRegExp::GetFlags() {
3973 ASSERT(this->data()->IsFixedArray());
3974 Object* data = this->data();
3975 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
3976 return Flags(smi->value());
3980 String* JSRegExp::Pattern() {
3981 ASSERT(this->data()->IsFixedArray());
3982 Object* data = this->data();
3983 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
3988 Object* JSRegExp::DataAt(int index) {
3989 ASSERT(TypeTag() != NOT_COMPILED);
3990 return FixedArray::cast(data())->get(index);
3994 Object* JSRegExp::DataAtUnchecked(int index) {
3995 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
3996 int offset = FixedArray::kHeaderSize + index * kPointerSize;
3997 return READ_FIELD(fa, offset);
4001 void JSRegExp::SetDataAt(int index, Object* value) {
4002 ASSERT(TypeTag() != NOT_COMPILED);
4003 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4004 FixedArray::cast(data())->set(index, value);
4008 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4009 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4010 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4011 if (value->IsSmi()) {
4012 fa->set_unchecked(index, Smi::cast(value));
4014 // We only do this during GC, so we don't need to notify the write barrier.
4015 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4020 ElementsKind JSObject::GetElementsKind() {
4021 ElementsKind kind = map()->elements_kind();
4023 FixedArrayBase* fixed_array =
4024 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4025 Map* map = fixed_array->map();
4026 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4027 (map == GetHeap()->fixed_array_map() ||
4028 map == GetHeap()->fixed_cow_array_map())) ||
4029 (kind == FAST_DOUBLE_ELEMENTS &&
4030 fixed_array->IsFixedDoubleArray()) ||
4031 (kind == DICTIONARY_ELEMENTS &&
4032 fixed_array->IsFixedArray() &&
4033 fixed_array->IsDictionary()) ||
4034 (kind > DICTIONARY_ELEMENTS));
4035 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4036 (elements()->IsFixedArray() && elements()->length() >= 2));
4042 ElementsAccessor* JSObject::GetElementsAccessor() {
4043 return ElementsAccessor::ForKind(GetElementsKind());
4047 bool JSObject::HasFastElements() {
4048 return GetElementsKind() == FAST_ELEMENTS;
4052 bool JSObject::HasFastSmiOnlyElements() {
4053 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4057 bool JSObject::HasFastTypeElements() {
4058 ElementsKind elements_kind = GetElementsKind();
4059 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4060 elements_kind == FAST_ELEMENTS;
4064 bool JSObject::HasFastDoubleElements() {
4065 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4069 bool JSObject::HasDictionaryElements() {
4070 return GetElementsKind() == DICTIONARY_ELEMENTS;
4074 bool JSObject::HasNonStrictArgumentsElements() {
4075 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4079 bool JSObject::HasExternalArrayElements() {
4080 HeapObject* array = elements();
4081 ASSERT(array != NULL);
4082 return array->IsExternalArray();
4086 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4087 bool JSObject::HasExternal##name##Elements() { \
4088 HeapObject* array = elements(); \
4089 ASSERT(array != NULL); \
4090 if (!array->IsHeapObject()) \
4092 return array->map()->instance_type() == type; \
4096 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4097 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4098 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4099 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4100 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4101 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4102 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4103 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4104 EXTERNAL_ELEMENTS_CHECK(Float,
4105 EXTERNAL_FLOAT_ARRAY_TYPE)
4106 EXTERNAL_ELEMENTS_CHECK(Double,
4107 EXTERNAL_DOUBLE_ARRAY_TYPE)
4108 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4111 bool JSObject::HasNamedInterceptor() {
4112 return map()->has_named_interceptor();
4116 bool JSObject::HasIndexedInterceptor() {
4117 return map()->has_indexed_interceptor();
4121 bool JSObject::AllowsSetElementsLength() {
4122 bool result = elements()->IsFixedArray() ||
4123 elements()->IsFixedDoubleArray();
4124 ASSERT(result == !HasExternalArrayElements());
4129 MaybeObject* JSObject::EnsureWritableFastElements() {
4130 ASSERT(HasFastTypeElements());
4131 FixedArray* elems = FixedArray::cast(elements());
4132 Isolate* isolate = GetIsolate();
4133 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4134 Object* writable_elems;
4135 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4136 elems, isolate->heap()->fixed_array_map());
4137 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4138 return maybe_writable_elems;
4141 set_elements(FixedArray::cast(writable_elems));
4142 isolate->counters()->cow_arrays_converted()->Increment();
4143 return writable_elems;
4147 StringDictionary* JSObject::property_dictionary() {
4148 ASSERT(!HasFastProperties());
4149 return StringDictionary::cast(properties());
4153 NumberDictionary* JSObject::element_dictionary() {
4154 ASSERT(HasDictionaryElements());
4155 return NumberDictionary::cast(elements());
4159 bool String::IsHashFieldComputed(uint32_t field) {
4160 return (field & kHashNotComputedMask) == 0;
4164 bool String::HasHashCode() {
4165 return IsHashFieldComputed(hash_field());
4169 uint32_t String::Hash() {
4170 // Fast case: has hash code already been computed?
4171 uint32_t field = hash_field();
4172 if (IsHashFieldComputed(field)) return field >> kHashShift;
4173 // Slow case: compute hash code and set it.
4174 return ComputeAndSetHash();
4178 StringHasher::StringHasher(int length)
4180 raw_running_hash_(0),
4182 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4183 is_first_char_(true),
4187 bool StringHasher::has_trivial_hash() {
4188 return length_ > String::kMaxHashCalcLength;
4192 void StringHasher::AddCharacter(uc32 c) {
4193 // Use the Jenkins one-at-a-time hash function to update the hash
4194 // for the given character.
4195 raw_running_hash_ += c;
4196 raw_running_hash_ += (raw_running_hash_ << 10);
4197 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4198 // Incremental array index computation.
4199 if (is_array_index_) {
4200 if (c < '0' || c > '9') {
4201 is_array_index_ = false;
4204 if (is_first_char_) {
4205 is_first_char_ = false;
4206 if (c == '0' && length_ > 1) {
4207 is_array_index_ = false;
4211 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4212 is_array_index_ = false;
4214 array_index_ = array_index_ * 10 + d;
4221 void StringHasher::AddCharacterNoIndex(uc32 c) {
4222 ASSERT(!is_array_index());
4223 raw_running_hash_ += c;
4224 raw_running_hash_ += (raw_running_hash_ << 10);
4225 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4229 uint32_t StringHasher::GetHash() {
4230 // Get the calculated raw hash value and do some more bit ops to distribute
4231 // the hash further. Ensure that we never return zero as the hash value.
4232 uint32_t result = raw_running_hash_;
4233 result += (result << 3);
4234 result ^= (result >> 11);
4235 result += (result << 15);
4243 template <typename schar>
4244 uint32_t HashSequentialString(const schar* chars, int length) {
4245 StringHasher hasher(length);
4246 if (!hasher.has_trivial_hash()) {
4248 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4249 hasher.AddCharacter(chars[i]);
4251 for (; i < length; i++) {
4252 hasher.AddCharacterNoIndex(chars[i]);
4255 return hasher.GetHashField();
4259 bool String::AsArrayIndex(uint32_t* index) {
4260 uint32_t field = hash_field();
4261 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4264 return SlowAsArrayIndex(index);
4268 Object* JSReceiver::GetPrototype() {
4269 return HeapObject::cast(this)->map()->prototype();
4273 bool JSReceiver::HasProperty(String* name) {
4275 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4277 return GetPropertyAttribute(name) != ABSENT;
4281 bool JSReceiver::HasLocalProperty(String* name) {
4283 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4285 return GetLocalPropertyAttribute(name) != ABSENT;
4289 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4290 return GetPropertyAttributeWithReceiver(this, key);
4293 // TODO(504): this may be useful in other places too where JSGlobalProxy
4295 Object* JSObject::BypassGlobalProxy() {
4296 if (IsJSGlobalProxy()) {
4297 Object* proto = GetPrototype();
4298 if (proto->IsNull()) return GetHeap()->undefined_value();
4299 ASSERT(proto->IsJSGlobalObject());
4306 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4308 ? JSProxy::cast(this)->GetIdentityHash(flag)
4309 : JSObject::cast(this)->GetIdentityHash(flag);
4313 bool JSReceiver::HasElement(uint32_t index) {
4315 return JSProxy::cast(this)->HasElementWithHandler(index);
4317 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4321 bool AccessorInfo::all_can_read() {
4322 return BooleanBit::get(flag(), kAllCanReadBit);
4326 void AccessorInfo::set_all_can_read(bool value) {
4327 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4331 bool AccessorInfo::all_can_write() {
4332 return BooleanBit::get(flag(), kAllCanWriteBit);
4336 void AccessorInfo::set_all_can_write(bool value) {
4337 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4341 bool AccessorInfo::prohibits_overwriting() {
4342 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4346 void AccessorInfo::set_prohibits_overwriting(bool value) {
4347 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4351 PropertyAttributes AccessorInfo::property_attributes() {
4352 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4356 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4357 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4361 template<typename Shape, typename Key>
4362 void Dictionary<Shape, Key>::SetEntry(int entry,
4365 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4369 template<typename Shape, typename Key>
4370 void Dictionary<Shape, Key>::SetEntry(int entry,
4373 PropertyDetails details) {
4374 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4375 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4376 AssertNoAllocation no_gc;
4377 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4378 FixedArray::set(index, key, mode);
4379 FixedArray::set(index+1, value, mode);
4380 FixedArray::set(index+2, details.AsSmi());
4384 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4385 ASSERT(other->IsNumber());
4386 return key == static_cast<uint32_t>(other->Number());
4390 uint32_t NumberDictionaryShape::Hash(uint32_t key) {
4391 return ComputeIntegerHash(key);
4395 uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
4396 ASSERT(other->IsNumber());
4397 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
4401 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4402 return Isolate::Current()->heap()->NumberFromUint32(key);
4406 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4407 // We know that all entries in a hash table had their hash keys created.
4408 // Use that knowledge to have fast failure.
4409 if (key->Hash() != String::cast(other)->Hash()) return false;
4410 return key->Equals(String::cast(other));
4414 uint32_t StringDictionaryShape::Hash(String* key) {
4419 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4420 return String::cast(other)->Hash();
4424 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4429 template <int entrysize>
4430 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4431 return key->SameValue(other);
4435 template <int entrysize>
4436 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4437 ASSERT(!key->IsUndefined() && !key->IsNull());
4438 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4439 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4443 template <int entrysize>
4444 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4446 ASSERT(!other->IsUndefined() && !other->IsNull());
4447 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4448 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4452 template <int entrysize>
4453 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4458 void ObjectHashTable::RemoveEntry(int entry) {
4459 RemoveEntry(entry, GetHeap());
4463 void Map::ClearCodeCache(Heap* heap) {
4464 // No write barrier is needed since empty_fixed_array is not in new space.
4465 // Please note this function is used during marking:
4466 // - MarkCompactCollector::MarkUnmarkedObject
4467 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4468 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4472 void JSArray::EnsureSize(int required_size) {
4473 ASSERT(HasFastTypeElements());
4474 FixedArray* elts = FixedArray::cast(elements());
4475 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4476 if (elts->length() < required_size) {
4477 // Doubling in size would be overkill, but leave some slack to avoid
4478 // constantly growing.
4479 Expand(required_size + (required_size >> 3));
4480 // It's a performance benefit to keep a frequently used array in new-space.
4481 } else if (!GetHeap()->new_space()->Contains(elts) &&
4482 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4483 // Expand will allocate a new backing store in new space even if the size
4484 // we asked for isn't larger than what we had before.
4485 Expand(required_size);
4490 void JSArray::set_length(Smi* length) {
4491 // Don't need a write barrier for a Smi.
4492 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4496 MaybeObject* JSArray::SetContent(FixedArray* storage) {
4497 MaybeObject* maybe_object = EnsureCanContainElements(storage);
4498 if (maybe_object->IsFailure()) return maybe_object;
4499 set_length(Smi::FromInt(storage->length()));
4500 set_elements(storage);
4505 MaybeObject* FixedArray::Copy() {
4506 if (length() == 0) return this;
4507 return GetHeap()->CopyFixedArray(this);
4511 MaybeObject* FixedDoubleArray::Copy() {
4512 if (length() == 0) return this;
4513 return GetHeap()->CopyFixedDoubleArray(this);
4517 Relocatable::Relocatable(Isolate* isolate) {
4518 ASSERT(isolate == Isolate::Current());
4520 prev_ = isolate->relocatable_top();
4521 isolate->set_relocatable_top(this);
4525 Relocatable::~Relocatable() {
4526 ASSERT(isolate_ == Isolate::Current());
4527 ASSERT_EQ(isolate_->relocatable_top(), this);
4528 isolate_->set_relocatable_top(prev_);
4532 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4533 return map->instance_size();
4537 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4538 v->VisitExternalReference(
4539 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4543 template<typename StaticVisitor>
4544 void Foreign::ForeignIterateBody() {
4545 StaticVisitor::VisitExternalReference(
4546 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
4550 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4551 typedef v8::String::ExternalAsciiStringResource Resource;
4552 v->VisitExternalAsciiString(
4553 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4557 template<typename StaticVisitor>
4558 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4559 typedef v8::String::ExternalAsciiStringResource Resource;
4560 StaticVisitor::VisitExternalAsciiString(
4561 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4565 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4566 typedef v8::String::ExternalStringResource Resource;
4567 v->VisitExternalTwoByteString(
4568 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4572 template<typename StaticVisitor>
4573 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4574 typedef v8::String::ExternalStringResource Resource;
4575 StaticVisitor::VisitExternalTwoByteString(
4576 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4579 #define SLOT_ADDR(obj, offset) \
4580 reinterpret_cast<Object**>((obj)->address() + offset)
4582 template<int start_offset, int end_offset, int size>
4583 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4586 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4590 template<int start_offset>
4591 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4594 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4600 #undef CAST_ACCESSOR
4601 #undef INT_ACCESSORS
4602 #undef SMI_ACCESSORS
4607 #undef WRITE_BARRIER
4608 #undef CONDITIONAL_WRITE_BARRIER
4609 #undef READ_MEMADDR_FIELD
4610 #undef WRITE_MEMADDR_FIELD
4611 #undef READ_DOUBLE_FIELD
4612 #undef WRITE_DOUBLE_FIELD
4613 #undef READ_INT_FIELD
4614 #undef WRITE_INT_FIELD
4615 #undef READ_SHORT_FIELD
4616 #undef WRITE_SHORT_FIELD
4617 #undef READ_BYTE_FIELD
4618 #undef WRITE_BYTE_FIELD
4621 } } // namespace v8::internal
4623 #endif // V8_OBJECTS_INL_H_