1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
41 #include "conversions-inl.h"
46 #include "store-buffer.h"
49 #include "incremental-marking.h"
54 PropertyDetails::PropertyDetails(Smi* smi) {
55 value_ = smi->value();
59 Smi* PropertyDetails::AsSmi() {
60 return Smi::FromInt(value_);
64 PropertyDetails PropertyDetails::AsDeleted() {
65 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66 return PropertyDetails(smi);
70 #define TYPE_CHECKER(type, instancetype) \
71 bool Object::Is##type() { \
72 return Object::IsHeapObject() && \
73 HeapObject::cast(this)->map()->instance_type() == instancetype; \
77 #define CAST_ACCESSOR(type) \
78 type* type::cast(Object* object) { \
79 ASSERT(object->Is##type()); \
80 return reinterpret_cast<type*>(object); \
84 #define INT_ACCESSORS(holder, name, offset) \
85 int holder::name() { return READ_INT_FIELD(this, offset); } \
86 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
89 #define ACCESSORS(holder, name, type, offset) \
90 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91 void holder::set_##name(type* value, WriteBarrierMode mode) { \
92 WRITE_FIELD(this, offset, value); \
93 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
97 #define SMI_ACCESSORS(holder, name, offset) \
98 int holder::name() { \
99 Object* value = READ_FIELD(this, offset); \
100 return Smi::cast(value)->value(); \
102 void holder::set_##name(int value) { \
103 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
107 #define BOOL_GETTER(holder, field, name, offset) \
108 bool holder::name() { \
109 return BooleanBit::get(field(), offset); \
113 #define BOOL_ACCESSORS(holder, field, name, offset) \
114 bool holder::name() { \
115 return BooleanBit::get(field(), offset); \
117 void holder::set_##name(bool value) { \
118 set_##field(BooleanBit::set(field(), offset, value)); \
122 bool Object::IsFixedArrayBase() {
123 return IsFixedArray() || IsFixedDoubleArray();
127 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
128 // There is a constraint on the object; check.
129 if (!this->IsJSObject()) return false;
130 // Fetch the constructor function of the object.
131 Object* cons_obj = JSObject::cast(this)->map()->constructor();
132 if (!cons_obj->IsJSFunction()) return false;
133 JSFunction* fun = JSFunction::cast(cons_obj);
134 // Iterate through the chain of inheriting function templates to
135 // see if the required one occurs.
136 for (Object* type = fun->shared()->function_data();
137 type->IsFunctionTemplateInfo();
138 type = FunctionTemplateInfo::cast(type)->parent_template()) {
139 if (type == expected) return true;
141 // Didn't find the required type in the inheritance chain.
146 bool Object::IsSmi() {
147 return HAS_SMI_TAG(this);
151 bool Object::IsHeapObject() {
152 return Internals::HasHeapObjectTag(this);
156 bool Object::NonFailureIsHeapObject() {
157 ASSERT(!this->IsFailure());
158 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
162 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
165 bool Object::IsString() {
166 return Object::IsHeapObject()
167 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
171 bool Object::IsSpecObject() {
172 return Object::IsHeapObject()
173 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
177 bool Object::IsSpecFunction() {
178 if (!Object::IsHeapObject()) return false;
179 InstanceType type = HeapObject::cast(this)->map()->instance_type();
180 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
184 bool Object::IsSymbol() {
185 if (!this->IsHeapObject()) return false;
186 uint32_t type = HeapObject::cast(this)->map()->instance_type();
187 // Because the symbol tag is non-zero and no non-string types have the
188 // symbol bit set we can test for symbols with a very simple test
190 STATIC_ASSERT(kSymbolTag != 0);
191 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
192 return (type & kIsSymbolMask) != 0;
196 bool Object::IsConsString() {
197 if (!IsString()) return false;
198 return StringShape(String::cast(this)).IsCons();
202 bool Object::IsSlicedString() {
203 if (!IsString()) return false;
204 return StringShape(String::cast(this)).IsSliced();
208 bool Object::IsSeqString() {
209 if (!IsString()) return false;
210 return StringShape(String::cast(this)).IsSequential();
214 bool Object::IsSeqAsciiString() {
215 if (!IsString()) return false;
216 return StringShape(String::cast(this)).IsSequential() &&
217 String::cast(this)->IsAsciiRepresentation();
221 bool Object::IsSeqTwoByteString() {
222 if (!IsString()) return false;
223 return StringShape(String::cast(this)).IsSequential() &&
224 String::cast(this)->IsTwoByteRepresentation();
228 bool Object::IsExternalString() {
229 if (!IsString()) return false;
230 return StringShape(String::cast(this)).IsExternal();
234 bool Object::IsExternalAsciiString() {
235 if (!IsString()) return false;
236 return StringShape(String::cast(this)).IsExternal() &&
237 String::cast(this)->IsAsciiRepresentation();
241 bool Object::IsExternalTwoByteString() {
242 if (!IsString()) return false;
243 return StringShape(String::cast(this)).IsExternal() &&
244 String::cast(this)->IsTwoByteRepresentation();
247 bool Object::HasValidElements() {
248 // Dictionary is covered under FixedArray.
249 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
252 StringShape::StringShape(String* str)
253 : type_(str->map()->instance_type()) {
255 ASSERT((type_ & kIsNotStringMask) == kStringTag);
259 StringShape::StringShape(Map* map)
260 : type_(map->instance_type()) {
262 ASSERT((type_ & kIsNotStringMask) == kStringTag);
266 StringShape::StringShape(InstanceType t)
267 : type_(static_cast<uint32_t>(t)) {
269 ASSERT((type_ & kIsNotStringMask) == kStringTag);
273 bool StringShape::IsSymbol() {
275 STATIC_ASSERT(kSymbolTag != 0);
276 return (type_ & kIsSymbolMask) != 0;
280 bool String::IsAsciiRepresentation() {
281 uint32_t type = map()->instance_type();
282 return (type & kStringEncodingMask) == kAsciiStringTag;
286 bool String::IsTwoByteRepresentation() {
287 uint32_t type = map()->instance_type();
288 return (type & kStringEncodingMask) == kTwoByteStringTag;
292 bool String::IsAsciiRepresentationUnderneath() {
293 uint32_t type = map()->instance_type();
294 STATIC_ASSERT(kIsIndirectStringTag != 0);
295 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
297 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
298 case kAsciiStringTag:
300 case kTwoByteStringTag:
302 default: // Cons or sliced string. Need to go deeper.
303 return GetUnderlying()->IsAsciiRepresentation();
308 bool String::IsTwoByteRepresentationUnderneath() {
309 uint32_t type = map()->instance_type();
310 STATIC_ASSERT(kIsIndirectStringTag != 0);
311 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
313 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
314 case kAsciiStringTag:
316 case kTwoByteStringTag:
318 default: // Cons or sliced string. Need to go deeper.
319 return GetUnderlying()->IsTwoByteRepresentation();
324 bool String::HasOnlyAsciiChars() {
325 uint32_t type = map()->instance_type();
326 return (type & kStringEncodingMask) == kAsciiStringTag ||
327 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
331 bool StringShape::IsCons() {
332 return (type_ & kStringRepresentationMask) == kConsStringTag;
336 bool StringShape::IsSliced() {
337 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
341 bool StringShape::IsIndirect() {
342 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
346 bool StringShape::IsExternal() {
347 return (type_ & kStringRepresentationMask) == kExternalStringTag;
351 bool StringShape::IsSequential() {
352 return (type_ & kStringRepresentationMask) == kSeqStringTag;
356 StringRepresentationTag StringShape::representation_tag() {
357 uint32_t tag = (type_ & kStringRepresentationMask);
358 return static_cast<StringRepresentationTag>(tag);
362 uint32_t StringShape::encoding_tag() {
363 return type_ & kStringEncodingMask;
367 uint32_t StringShape::full_representation_tag() {
368 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
372 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
373 Internals::kFullStringRepresentationMask);
376 bool StringShape::IsSequentialAscii() {
377 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
381 bool StringShape::IsSequentialTwoByte() {
382 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
386 bool StringShape::IsExternalAscii() {
387 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
391 bool StringShape::IsExternalTwoByte() {
392 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
396 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
397 Internals::kExternalTwoByteRepresentationTag);
400 uc32 FlatStringReader::Get(int index) {
401 ASSERT(0 <= index && index <= length_);
403 return static_cast<const byte*>(start_)[index];
405 return static_cast<const uc16*>(start_)[index];
410 bool Object::IsNumber() {
411 return IsSmi() || IsHeapNumber();
415 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
416 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
419 bool Object::IsFiller() {
420 if (!Object::IsHeapObject()) return false;
421 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
422 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
426 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
429 bool Object::IsExternalArray() {
430 if (!Object::IsHeapObject())
432 InstanceType instance_type =
433 HeapObject::cast(this)->map()->instance_type();
434 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
435 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
439 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
440 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
441 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
442 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
443 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
444 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
445 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
446 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
449 bool MaybeObject::IsFailure() {
450 return HAS_FAILURE_TAG(this);
454 bool MaybeObject::IsRetryAfterGC() {
455 return HAS_FAILURE_TAG(this)
456 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
460 bool MaybeObject::IsOutOfMemory() {
461 return HAS_FAILURE_TAG(this)
462 && Failure::cast(this)->IsOutOfMemoryException();
466 bool MaybeObject::IsException() {
467 return this == Failure::Exception();
471 bool MaybeObject::IsTheHole() {
472 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
476 Failure* Failure::cast(MaybeObject* obj) {
477 ASSERT(HAS_FAILURE_TAG(obj));
478 return reinterpret_cast<Failure*>(obj);
482 bool Object::IsJSReceiver() {
483 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
484 return IsHeapObject() &&
485 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
489 bool Object::IsJSObject() {
490 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
491 return IsHeapObject() &&
492 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
496 bool Object::IsJSProxy() {
497 if (!Object::IsHeapObject()) return false;
498 InstanceType type = HeapObject::cast(this)->map()->instance_type();
499 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
503 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
504 TYPE_CHECKER(JSSet, JS_SET_TYPE)
505 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
506 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
507 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
508 TYPE_CHECKER(Map, MAP_TYPE)
509 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
510 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
513 bool Object::IsDescriptorArray() {
514 return IsFixedArray();
518 bool Object::IsDeoptimizationInputData() {
519 // Must be a fixed array.
520 if (!IsFixedArray()) return false;
522 // There's no sure way to detect the difference between a fixed array and
523 // a deoptimization data array. Since this is used for asserts we can
524 // check that the length is zero or else the fixed size plus a multiple of
526 int length = FixedArray::cast(this)->length();
527 if (length == 0) return true;
529 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
530 return length >= 0 &&
531 length % DeoptimizationInputData::kDeoptEntrySize == 0;
535 bool Object::IsDeoptimizationOutputData() {
536 if (!IsFixedArray()) return false;
537 // There's actually no way to see the difference between a fixed array and
538 // a deoptimization data array. Since this is used for asserts we can check
539 // that the length is plausible though.
540 if (FixedArray::cast(this)->length() % 2 != 0) return false;
545 bool Object::IsContext() {
546 if (Object::IsHeapObject()) {
547 Map* map = HeapObject::cast(this)->map();
548 Heap* heap = map->GetHeap();
549 return (map == heap->function_context_map() ||
550 map == heap->catch_context_map() ||
551 map == heap->with_context_map() ||
552 map == heap->global_context_map() ||
553 map == heap->block_context_map());
559 bool Object::IsGlobalContext() {
560 return Object::IsHeapObject() &&
561 HeapObject::cast(this)->map() ==
562 HeapObject::cast(this)->GetHeap()->global_context_map();
566 bool Object::IsSerializedScopeInfo() {
567 return Object::IsHeapObject() &&
568 HeapObject::cast(this)->map() ==
569 HeapObject::cast(this)->GetHeap()->serialized_scope_info_map();
573 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
576 template <> inline bool Is<JSFunction>(Object* obj) {
577 return obj->IsJSFunction();
581 TYPE_CHECKER(Code, CODE_TYPE)
582 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
583 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
584 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
585 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
586 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
589 bool Object::IsStringWrapper() {
590 return IsJSValue() && JSValue::cast(this)->value()->IsString();
594 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
597 bool Object::IsBoolean() {
598 return IsOddball() &&
599 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
603 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
604 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
607 template <> inline bool Is<JSArray>(Object* obj) {
608 return obj->IsJSArray();
612 bool Object::IsHashTable() {
613 return Object::IsHeapObject() &&
614 HeapObject::cast(this)->map() ==
615 HeapObject::cast(this)->GetHeap()->hash_table_map();
619 bool Object::IsDictionary() {
620 return IsHashTable() &&
621 this != HeapObject::cast(this)->GetHeap()->symbol_table();
625 bool Object::IsSymbolTable() {
626 return IsHashTable() && this ==
627 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
631 bool Object::IsJSFunctionResultCache() {
632 if (!IsFixedArray()) return false;
633 FixedArray* self = FixedArray::cast(this);
634 int length = self->length();
635 if (length < JSFunctionResultCache::kEntriesIndex) return false;
636 if ((length - JSFunctionResultCache::kEntriesIndex)
637 % JSFunctionResultCache::kEntrySize != 0) {
641 if (FLAG_verify_heap) {
642 reinterpret_cast<JSFunctionResultCache*>(this)->
643 JSFunctionResultCacheVerify();
650 bool Object::IsNormalizedMapCache() {
651 if (!IsFixedArray()) return false;
652 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
656 if (FLAG_verify_heap) {
657 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
664 bool Object::IsCompilationCacheTable() {
665 return IsHashTable();
669 bool Object::IsCodeCacheHashTable() {
670 return IsHashTable();
674 bool Object::IsPolymorphicCodeCacheHashTable() {
675 return IsHashTable();
679 bool Object::IsMapCache() {
680 return IsHashTable();
684 bool Object::IsPrimitive() {
685 return IsOddball() || IsNumber() || IsString();
689 bool Object::IsJSGlobalProxy() {
690 bool result = IsHeapObject() &&
691 (HeapObject::cast(this)->map()->instance_type() ==
692 JS_GLOBAL_PROXY_TYPE);
693 ASSERT(!result || IsAccessCheckNeeded());
698 bool Object::IsGlobalObject() {
699 if (!IsHeapObject()) return false;
701 InstanceType type = HeapObject::cast(this)->map()->instance_type();
702 return type == JS_GLOBAL_OBJECT_TYPE ||
703 type == JS_BUILTINS_OBJECT_TYPE;
707 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
708 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
711 bool Object::IsUndetectableObject() {
712 return IsHeapObject()
713 && HeapObject::cast(this)->map()->is_undetectable();
717 bool Object::IsAccessCheckNeeded() {
718 return IsHeapObject()
719 && HeapObject::cast(this)->map()->is_access_check_needed();
723 bool Object::IsStruct() {
724 if (!IsHeapObject()) return false;
725 switch (HeapObject::cast(this)->map()->instance_type()) {
726 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
727 STRUCT_LIST(MAKE_STRUCT_CASE)
728 #undef MAKE_STRUCT_CASE
729 default: return false;
734 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
735 bool Object::Is##Name() { \
736 return Object::IsHeapObject() \
737 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
739 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
740 #undef MAKE_STRUCT_PREDICATE
743 bool Object::IsUndefined() {
744 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
748 bool Object::IsNull() {
749 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
753 bool Object::IsTheHole() {
754 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
758 bool Object::IsTrue() {
759 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
763 bool Object::IsFalse() {
764 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
768 bool Object::IsArgumentsMarker() {
769 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
773 double Object::Number() {
776 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
777 : reinterpret_cast<HeapNumber*>(this)->value();
781 MaybeObject* Object::ToSmi() {
782 if (IsSmi()) return this;
783 if (IsHeapNumber()) {
784 double value = HeapNumber::cast(this)->value();
785 int int_value = FastD2I(value);
786 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
787 return Smi::FromInt(int_value);
790 return Failure::Exception();
794 bool Object::HasSpecificClassOf(String* name) {
795 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
799 MaybeObject* Object::GetElement(uint32_t index) {
800 // GetElement can trigger a getter which can cause allocation.
801 // This was not always the case. This ASSERT is here to catch
802 // leftover incorrect uses.
803 ASSERT(HEAP->IsAllocationAllowed());
804 return GetElementWithReceiver(this, index);
808 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
809 MaybeObject* maybe = GetElementWithReceiver(this, index);
810 ASSERT(!maybe->IsFailure());
811 Object* result = NULL; // Initialization to please compiler.
812 maybe->ToObject(&result);
817 MaybeObject* Object::GetProperty(String* key) {
818 PropertyAttributes attributes;
819 return GetPropertyWithReceiver(this, key, &attributes);
823 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
824 return GetPropertyWithReceiver(this, key, attributes);
828 #define FIELD_ADDR(p, offset) \
829 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
831 #define READ_FIELD(p, offset) \
832 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
834 #define WRITE_FIELD(p, offset, value) \
835 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
837 #define WRITE_BARRIER(heap, object, offset, value) \
838 heap->incremental_marking()->RecordWrite( \
839 object, HeapObject::RawField(object, offset), value); \
840 if (heap->InNewSpace(value)) { \
841 heap->RecordWrite(object->address(), offset); \
844 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
845 if (mode == UPDATE_WRITE_BARRIER) { \
846 heap->incremental_marking()->RecordWrite( \
847 object, HeapObject::RawField(object, offset), value); \
848 if (heap->InNewSpace(value)) { \
849 heap->RecordWrite(object->address(), offset); \
853 #ifndef V8_TARGET_ARCH_MIPS
854 #define READ_DOUBLE_FIELD(p, offset) \
855 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
856 #else // V8_TARGET_ARCH_MIPS
857 // Prevent gcc from using load-double (mips ldc1) on (possibly)
858 // non-64-bit aligned HeapNumber::value.
859 static inline double read_double_field(void* p, int offset) {
864 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
865 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
868 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
869 #endif // V8_TARGET_ARCH_MIPS
871 #ifndef V8_TARGET_ARCH_MIPS
872 #define WRITE_DOUBLE_FIELD(p, offset, value) \
873 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
874 #else // V8_TARGET_ARCH_MIPS
875 // Prevent gcc from using store-double (mips sdc1) on (possibly)
876 // non-64-bit aligned HeapNumber::value.
877 static inline void write_double_field(void* p, int offset,
884 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
885 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
887 #define WRITE_DOUBLE_FIELD(p, offset, value) \
888 write_double_field(p, offset, value)
889 #endif // V8_TARGET_ARCH_MIPS
892 #define READ_INT_FIELD(p, offset) \
893 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
895 #define WRITE_INT_FIELD(p, offset, value) \
896 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
898 #define READ_INTPTR_FIELD(p, offset) \
899 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
901 #define WRITE_INTPTR_FIELD(p, offset, value) \
902 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
904 #define READ_UINT32_FIELD(p, offset) \
905 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
907 #define WRITE_UINT32_FIELD(p, offset, value) \
908 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
910 #define READ_SHORT_FIELD(p, offset) \
911 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
913 #define WRITE_SHORT_FIELD(p, offset, value) \
914 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
916 #define READ_BYTE_FIELD(p, offset) \
917 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
919 #define WRITE_BYTE_FIELD(p, offset, value) \
920 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
923 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
924 return &READ_FIELD(obj, byte_offset);
929 return Internals::SmiValue(this);
933 Smi* Smi::FromInt(int value) {
934 ASSERT(Smi::IsValid(value));
935 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
936 intptr_t tagged_value =
937 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
938 return reinterpret_cast<Smi*>(tagged_value);
942 Smi* Smi::FromIntptr(intptr_t value) {
943 ASSERT(Smi::IsValid(value));
944 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
945 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
949 Failure::Type Failure::type() const {
950 return static_cast<Type>(value() & kFailureTypeTagMask);
954 bool Failure::IsInternalError() const {
955 return type() == INTERNAL_ERROR;
959 bool Failure::IsOutOfMemoryException() const {
960 return type() == OUT_OF_MEMORY_EXCEPTION;
964 AllocationSpace Failure::allocation_space() const {
965 ASSERT_EQ(RETRY_AFTER_GC, type());
966 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
971 Failure* Failure::InternalError() {
972 return Construct(INTERNAL_ERROR);
976 Failure* Failure::Exception() {
977 return Construct(EXCEPTION);
981 Failure* Failure::OutOfMemoryException() {
982 return Construct(OUT_OF_MEMORY_EXCEPTION);
986 intptr_t Failure::value() const {
987 return static_cast<intptr_t>(
988 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
992 Failure* Failure::RetryAfterGC() {
993 return RetryAfterGC(NEW_SPACE);
997 Failure* Failure::RetryAfterGC(AllocationSpace space) {
998 ASSERT((space & ~kSpaceTagMask) == 0);
999 return Construct(RETRY_AFTER_GC, space);
1003 Failure* Failure::Construct(Type type, intptr_t value) {
1005 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1006 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1007 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1011 bool Smi::IsValid(intptr_t value) {
1013 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1016 #ifdef V8_TARGET_ARCH_X64
1017 // To be representable as a long smi, the value must be a 32-bit integer.
1018 bool result = (value == static_cast<int32_t>(value));
1020 // To be representable as an tagged small integer, the two
1021 // most-significant bits of 'value' must be either 00 or 11 due to
1022 // sign-extension. To check this we add 01 to the two
1023 // most-significant bits, and check if the most-significant bit is 0
1025 // CAUTION: The original code below:
1026 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1027 // may lead to incorrect results according to the C language spec, and
1028 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1029 // compiler may produce undefined results in case of signed integer
1030 // overflow. The computation must be done w/ unsigned ints.
1031 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1033 ASSERT(result == in_range);
1038 MapWord MapWord::FromMap(Map* map) {
1039 return MapWord(reinterpret_cast<uintptr_t>(map));
1043 Map* MapWord::ToMap() {
1044 return reinterpret_cast<Map*>(value_);
1048 bool MapWord::IsForwardingAddress() {
1049 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1053 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1054 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1055 return MapWord(reinterpret_cast<uintptr_t>(raw));
1059 HeapObject* MapWord::ToForwardingAddress() {
1060 ASSERT(IsForwardingAddress());
1061 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1066 void HeapObject::VerifyObjectField(int offset) {
1067 VerifyPointer(READ_FIELD(this, offset));
1070 void HeapObject::VerifySmiField(int offset) {
1071 ASSERT(READ_FIELD(this, offset)->IsSmi());
1076 Heap* HeapObject::GetHeap() {
1078 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1079 ASSERT(heap != NULL);
1080 ASSERT(heap->isolate() == Isolate::Current());
1085 Isolate* HeapObject::GetIsolate() {
1086 return GetHeap()->isolate();
1090 Map* HeapObject::map() {
1091 return map_word().ToMap();
1095 void HeapObject::set_map(Map* value) {
1096 set_map_word(MapWord::FromMap(value));
1097 if (value != NULL) {
1098 // TODO(1600) We are passing NULL as a slot because maps can never be on
1099 // evacuation candidate.
1100 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1105 // Unsafe accessor omitting write barrier.
1106 void HeapObject::set_map_unsafe(Map* value) {
1107 set_map_word(MapWord::FromMap(value));
1111 MapWord HeapObject::map_word() {
1112 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1116 void HeapObject::set_map_word(MapWord map_word) {
1117 // WRITE_FIELD does not invoke write barrier, but there is no need
1119 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1123 HeapObject* HeapObject::FromAddress(Address address) {
1124 ASSERT_TAG_ALIGNED(address);
1125 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1129 Address HeapObject::address() {
1130 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1134 int HeapObject::Size() {
1135 return SizeFromMap(map());
1139 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1140 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1141 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1145 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1146 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1150 double HeapNumber::value() {
1151 return READ_DOUBLE_FIELD(this, kValueOffset);
1155 void HeapNumber::set_value(double value) {
1156 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1160 int HeapNumber::get_exponent() {
1161 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1162 kExponentShift) - kExponentBias;
1166 int HeapNumber::get_sign() {
1167 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1171 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1174 FixedArrayBase* JSObject::elements() {
1175 Object* array = READ_FIELD(this, kElementsOffset);
1176 return static_cast<FixedArrayBase*>(array);
1179 void JSObject::ValidateSmiOnlyElements() {
1181 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1182 Heap* heap = GetHeap();
1183 // Don't use elements, since integrity checks will fail if there
1184 // are filler pointers in the array.
1185 FixedArray* fixed_array =
1186 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1187 Map* map = fixed_array->map();
1188 // Arrays that have been shifted in place can't be verified.
1189 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1190 map != heap->raw_unchecked_two_pointer_filler_map() &&
1191 map != heap->free_space_map()) {
1192 for (int i = 0; i < fixed_array->length(); i++) {
1193 Object* current = fixed_array->get(i);
1194 ASSERT(current->IsSmi() || current == heap->the_hole_value());
1202 MaybeObject* JSObject::EnsureCanContainNonSmiElements() {
1204 ValidateSmiOnlyElements();
1206 if ((map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS)) {
1208 MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
1209 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1210 set_map(Map::cast(obj));
1216 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1218 if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1219 for (uint32_t i = 0; i < count; ++i) {
1220 Object* current = *objects++;
1221 if (!current->IsSmi() && current != GetHeap()->the_hole_value()) {
1222 return EnsureCanContainNonSmiElements();
1230 MaybeObject* JSObject::EnsureCanContainElements(FixedArray* elements) {
1231 Object** objects = reinterpret_cast<Object**>(
1232 FIELD_ADDR(elements, elements->OffsetOfElementAt(0)));
1233 return EnsureCanContainElements(objects, elements->length());
1237 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1238 ASSERT((map()->has_fast_elements() ||
1239 map()->has_fast_smi_only_elements()) ==
1240 (value->map() == GetHeap()->fixed_array_map() ||
1241 value->map() == GetHeap()->fixed_cow_array_map()));
1242 ASSERT(map()->has_fast_double_elements() ==
1243 value->IsFixedDoubleArray());
1244 ASSERT(value->HasValidElements());
1246 ValidateSmiOnlyElements();
1248 WRITE_FIELD(this, kElementsOffset, value);
1249 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1253 void JSObject::initialize_properties() {
1254 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1255 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1259 void JSObject::initialize_elements() {
1260 ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
1261 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1262 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1266 MaybeObject* JSObject::ResetElements() {
1268 ElementsKind elements_kind = FLAG_smi_only_arrays
1269 ? FAST_SMI_ONLY_ELEMENTS
1271 MaybeObject* maybe_obj = GetElementsTransitionMap(elements_kind);
1272 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1273 set_map(Map::cast(obj));
1274 initialize_elements();
1279 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1280 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1283 byte Oddball::kind() {
1284 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1288 void Oddball::set_kind(byte value) {
1289 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1293 Object* JSGlobalPropertyCell::value() {
1294 return READ_FIELD(this, kValueOffset);
1298 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1299 // The write barrier is not used for global property cells.
1300 ASSERT(!val->IsJSGlobalPropertyCell());
1301 WRITE_FIELD(this, kValueOffset, val);
1302 GetHeap()->incremental_marking()->RecordWrite(
1303 this, HeapObject::RawField(this, kValueOffset), val);
1307 int JSObject::GetHeaderSize() {
1308 InstanceType type = map()->instance_type();
1309 // Check for the most common kind of JavaScript object before
1310 // falling into the generic switch. This speeds up the internal
1311 // field operations considerably on average.
1312 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1314 case JS_GLOBAL_PROXY_TYPE:
1315 return JSGlobalProxy::kSize;
1316 case JS_GLOBAL_OBJECT_TYPE:
1317 return JSGlobalObject::kSize;
1318 case JS_BUILTINS_OBJECT_TYPE:
1319 return JSBuiltinsObject::kSize;
1320 case JS_FUNCTION_TYPE:
1321 return JSFunction::kSize;
1323 return JSValue::kSize;
1325 return JSValue::kSize;
1326 case JS_WEAK_MAP_TYPE:
1327 return JSWeakMap::kSize;
1328 case JS_REGEXP_TYPE:
1329 return JSValue::kSize;
1330 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1331 return JSObject::kHeaderSize;
1332 case JS_MESSAGE_OBJECT_TYPE:
1333 return JSMessageObject::kSize;
1341 int JSObject::GetInternalFieldCount() {
1342 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1343 // Make sure to adjust for the number of in-object properties. These
1344 // properties do contribute to the size, but are not internal fields.
1345 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1346 map()->inobject_properties() - (map()->has_external_resource()?1:0);
1350 int JSObject::GetInternalFieldOffset(int index) {
1351 ASSERT(index < GetInternalFieldCount() && index >= 0);
1352 return GetHeaderSize() + (kPointerSize * index);
1356 Object* JSObject::GetInternalField(int index) {
1357 ASSERT(index < GetInternalFieldCount() && index >= 0);
1358 // Internal objects do follow immediately after the header, whereas in-object
1359 // properties are at the end of the object. Therefore there is no need
1360 // to adjust the index here.
1361 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1365 void JSObject::SetInternalField(int index, Object* value) {
1366 ASSERT(index < GetInternalFieldCount() && index >= 0);
1367 // Internal objects do follow immediately after the header, whereas in-object
1368 // properties are at the end of the object. Therefore there is no need
1369 // to adjust the index here.
1370 int offset = GetHeaderSize() + (kPointerSize * index);
1371 WRITE_FIELD(this, offset, value);
1372 WRITE_BARRIER(GetHeap(), this, offset, value);
1376 void JSObject::SetExternalResourceObject(Object *value) {
1377 ASSERT(map()->has_external_resource());
1378 int offset = GetHeaderSize() + kPointerSize * GetInternalFieldCount();
1379 WRITE_FIELD(this, offset, value);
1380 WRITE_BARRIER(GetHeap(), this, offset, value);
1384 Object *JSObject::GetExternalResourceObject() {
1385 if (map()->has_external_resource()) {
1386 return READ_FIELD(this, GetHeaderSize() + kPointerSize * GetInternalFieldCount());
1388 return GetHeap()->undefined_value();
1393 // Access fast-case object properties at index. The use of these routines
1394 // is needed to correctly distinguish between properties stored in-object and
1395 // properties stored in the properties array.
1396 Object* JSObject::FastPropertyAt(int index) {
1397 // Adjust for the number of properties stored in the object.
1398 index -= map()->inobject_properties();
1400 int offset = map()->instance_size() + (index * kPointerSize);
1401 return READ_FIELD(this, offset);
1403 ASSERT(index < properties()->length());
1404 return properties()->get(index);
1409 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1410 // Adjust for the number of properties stored in the object.
1411 index -= map()->inobject_properties();
1413 int offset = map()->instance_size() + (index * kPointerSize);
1414 WRITE_FIELD(this, offset, value);
1415 WRITE_BARRIER(GetHeap(), this, offset, value);
1417 ASSERT(index < properties()->length());
1418 properties()->set(index, value);
1424 int JSObject::GetInObjectPropertyOffset(int index) {
1425 // Adjust for the number of properties stored in the object.
1426 index -= map()->inobject_properties();
1428 return map()->instance_size() + (index * kPointerSize);
1432 Object* JSObject::InObjectPropertyAt(int index) {
1433 // Adjust for the number of properties stored in the object.
1434 index -= map()->inobject_properties();
1436 int offset = map()->instance_size() + (index * kPointerSize);
1437 return READ_FIELD(this, offset);
1441 Object* JSObject::InObjectPropertyAtPut(int index,
1443 WriteBarrierMode mode) {
1444 // Adjust for the number of properties stored in the object.
1445 index -= map()->inobject_properties();
1447 int offset = map()->instance_size() + (index * kPointerSize);
1448 WRITE_FIELD(this, offset, value);
1449 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1455 void JSObject::InitializeBody(Map* map,
1456 Object* pre_allocated_value,
1457 Object* filler_value) {
1458 ASSERT(!filler_value->IsHeapObject() ||
1459 !GetHeap()->InNewSpace(filler_value));
1460 ASSERT(!pre_allocated_value->IsHeapObject() ||
1461 !GetHeap()->InNewSpace(pre_allocated_value));
1462 int size = map->instance_size();
1463 int offset = kHeaderSize;
1464 if (filler_value != pre_allocated_value) {
1465 int pre_allocated = map->pre_allocated_property_fields();
1466 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1467 for (int i = 0; i < pre_allocated; i++) {
1468 WRITE_FIELD(this, offset, pre_allocated_value);
1469 offset += kPointerSize;
1472 while (offset < size) {
1473 WRITE_FIELD(this, offset, filler_value);
1474 offset += kPointerSize;
1479 bool JSObject::HasFastProperties() {
1480 return !properties()->IsDictionary();
1484 int JSObject::MaxFastProperties() {
1485 // Allow extra fast properties if the object has more than
1486 // kMaxFastProperties in-object properties. When this is the case,
1487 // it is very unlikely that the object is being used as a dictionary
1488 // and there is a good chance that allowing more map transitions
1489 // will be worth it.
1490 return Max(map()->inobject_properties(), kMaxFastProperties);
1494 void Struct::InitializeBody(int object_size) {
1495 Object* value = GetHeap()->undefined_value();
1496 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1497 WRITE_FIELD(this, offset, value);
1502 bool Object::ToArrayIndex(uint32_t* index) {
1504 int value = Smi::cast(this)->value();
1505 if (value < 0) return false;
1509 if (IsHeapNumber()) {
1510 double value = HeapNumber::cast(this)->value();
1511 uint32_t uint_value = static_cast<uint32_t>(value);
1512 if (value == static_cast<double>(uint_value)) {
1513 *index = uint_value;
1521 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1522 if (!this->IsJSValue()) return false;
1524 JSValue* js_value = JSValue::cast(this);
1525 if (!js_value->value()->IsString()) return false;
1527 String* str = String::cast(js_value->value());
1528 if (index >= (uint32_t)str->length()) return false;
1534 FixedArrayBase* FixedArrayBase::cast(Object* object) {
1535 ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1536 return reinterpret_cast<FixedArrayBase*>(object);
1540 Object* FixedArray::get(int index) {
1541 ASSERT(index >= 0 && index < this->length());
1542 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1546 void FixedArray::set(int index, Smi* value) {
1547 ASSERT(map() != HEAP->fixed_cow_array_map());
1548 ASSERT(index >= 0 && index < this->length());
1549 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1550 int offset = kHeaderSize + index * kPointerSize;
1551 WRITE_FIELD(this, offset, value);
1555 void FixedArray::set(int index, Object* value) {
1556 ASSERT(map() != HEAP->fixed_cow_array_map());
1557 ASSERT(index >= 0 && index < this->length());
1558 int offset = kHeaderSize + index * kPointerSize;
1559 WRITE_FIELD(this, offset, value);
1560 WRITE_BARRIER(GetHeap(), this, offset, value);
1564 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1565 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1569 inline double FixedDoubleArray::hole_nan_as_double() {
1570 return BitCast<double, uint64_t>(kHoleNanInt64);
1574 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1575 ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1576 ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1577 return OS::nan_value();
1581 double FixedDoubleArray::get_scalar(int index) {
1582 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1583 map() != HEAP->fixed_array_map());
1584 ASSERT(index >= 0 && index < this->length());
1585 double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1586 ASSERT(!is_the_hole_nan(result));
1591 MaybeObject* FixedDoubleArray::get(int index) {
1592 if (is_the_hole(index)) {
1593 return GetHeap()->the_hole_value();
1595 return GetHeap()->NumberFromDouble(get_scalar(index));
1600 void FixedDoubleArray::set(int index, double value) {
1601 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1602 map() != HEAP->fixed_array_map());
1603 int offset = kHeaderSize + index * kDoubleSize;
1604 if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1605 WRITE_DOUBLE_FIELD(this, offset, value);
1609 void FixedDoubleArray::set_the_hole(int index) {
1610 ASSERT(map() != HEAP->fixed_cow_array_map() &&
1611 map() != HEAP->fixed_array_map());
1612 int offset = kHeaderSize + index * kDoubleSize;
1613 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1617 bool FixedDoubleArray::is_the_hole(int index) {
1618 int offset = kHeaderSize + index * kDoubleSize;
1619 return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1623 void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
1624 int old_length = from->length();
1625 ASSERT(old_length < length());
1626 if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) {
1627 OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
1628 FIELD_ADDR(from, kHeaderSize),
1629 old_length * kDoubleSize);
1631 for (int i = 0; i < old_length; ++i) {
1632 if (from->is_the_hole(i)) {
1635 set(i, from->get_scalar(i));
1639 int offset = kHeaderSize + old_length * kDoubleSize;
1640 for (int current = from->length(); current < length(); ++current) {
1641 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1642 offset += kDoubleSize;
1647 void FixedDoubleArray::Initialize(FixedArray* from) {
1648 int old_length = from->length();
1649 ASSERT(old_length <= length());
1650 for (int i = 0; i < old_length; i++) {
1651 Object* hole_or_object = from->get(i);
1652 if (hole_or_object->IsTheHole()) {
1655 set(i, hole_or_object->Number());
1658 int offset = kHeaderSize + old_length * kDoubleSize;
1659 for (int current = from->length(); current < length(); ++current) {
1660 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1661 offset += kDoubleSize;
1666 void FixedDoubleArray::Initialize(NumberDictionary* from) {
1667 int offset = kHeaderSize;
1668 for (int current = 0; current < length(); ++current) {
1669 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1670 offset += kDoubleSize;
1672 for (int i = 0; i < from->Capacity(); i++) {
1673 Object* key = from->KeyAt(i);
1674 if (key->IsNumber()) {
1675 uint32_t entry = static_cast<uint32_t>(key->Number());
1676 set(entry, from->ValueAt(i)->Number());
1682 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1683 Heap* heap = GetHeap();
1684 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1685 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1686 return UPDATE_WRITE_BARRIER;
1690 void FixedArray::set(int index,
1692 WriteBarrierMode mode) {
1693 ASSERT(map() != HEAP->fixed_cow_array_map());
1694 ASSERT(index >= 0 && index < this->length());
1695 int offset = kHeaderSize + index * kPointerSize;
1696 WRITE_FIELD(this, offset, value);
1697 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1701 void FixedArray::NoWriteBarrierSet(FixedArray* array,
1704 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1705 ASSERT(index >= 0 && index < array->length());
1706 ASSERT(!HEAP->InNewSpace(value));
1707 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1711 void FixedArray::set_undefined(int index) {
1712 ASSERT(map() != HEAP->fixed_cow_array_map());
1713 set_undefined(GetHeap(), index);
1717 void FixedArray::set_undefined(Heap* heap, int index) {
1718 ASSERT(index >= 0 && index < this->length());
1719 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1720 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1721 heap->undefined_value());
1725 void FixedArray::set_null(int index) {
1726 set_null(GetHeap(), index);
1730 void FixedArray::set_null(Heap* heap, int index) {
1731 ASSERT(index >= 0 && index < this->length());
1732 ASSERT(!heap->InNewSpace(heap->null_value()));
1733 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1737 void FixedArray::set_the_hole(int index) {
1738 ASSERT(map() != HEAP->fixed_cow_array_map());
1739 ASSERT(index >= 0 && index < this->length());
1740 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1742 kHeaderSize + index * kPointerSize,
1743 GetHeap()->the_hole_value());
1747 void FixedArray::set_unchecked(int index, Smi* value) {
1748 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1749 int offset = kHeaderSize + index * kPointerSize;
1750 WRITE_FIELD(this, offset, value);
1754 void FixedArray::set_unchecked(Heap* heap,
1757 WriteBarrierMode mode) {
1758 int offset = kHeaderSize + index * kPointerSize;
1759 WRITE_FIELD(this, offset, value);
1760 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1764 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1765 ASSERT(index >= 0 && index < this->length());
1766 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1767 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1771 Object** FixedArray::data_start() {
1772 return HeapObject::RawField(this, kHeaderSize);
1776 bool DescriptorArray::IsEmpty() {
1777 ASSERT(this->IsSmi() ||
1778 this->length() > kFirstIndex ||
1779 this == HEAP->empty_descriptor_array());
1780 return this->IsSmi() || length() <= kFirstIndex;
1784 int DescriptorArray::bit_field3_storage() {
1785 Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1786 return Smi::cast(storage)->value();
1789 void DescriptorArray::set_bit_field3_storage(int value) {
1791 WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
1795 void DescriptorArray::NoWriteBarrierSwap(FixedArray* array,
1798 Object* tmp = array->get(first);
1799 NoWriteBarrierSet(array, first, array->get(second));
1800 NoWriteBarrierSet(array, second, tmp);
1804 int DescriptorArray::Search(String* name) {
1805 SLOW_ASSERT(IsSortedNoDuplicates());
1807 // Check for empty descriptor array.
1808 int nof = number_of_descriptors();
1809 if (nof == 0) return kNotFound;
1811 // Fast case: do linear search for small arrays.
1812 const int kMaxElementsForLinearSearch = 8;
1813 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1814 return LinearSearch(name, nof);
1817 // Slow case: perform binary search.
1818 return BinarySearch(name, 0, nof - 1);
1822 int DescriptorArray::SearchWithCache(String* name) {
1823 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1824 if (number == DescriptorLookupCache::kAbsent) {
1825 number = Search(name);
1826 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1832 String* DescriptorArray::GetKey(int descriptor_number) {
1833 ASSERT(descriptor_number < number_of_descriptors());
1834 return String::cast(get(ToKeyIndex(descriptor_number)));
1838 Object* DescriptorArray::GetValue(int descriptor_number) {
1839 ASSERT(descriptor_number < number_of_descriptors());
1840 return GetContentArray()->get(ToValueIndex(descriptor_number));
1844 Smi* DescriptorArray::GetDetails(int descriptor_number) {
1845 ASSERT(descriptor_number < number_of_descriptors());
1846 return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1850 PropertyType DescriptorArray::GetType(int descriptor_number) {
1851 ASSERT(descriptor_number < number_of_descriptors());
1852 return PropertyDetails(GetDetails(descriptor_number)).type();
1856 int DescriptorArray::GetFieldIndex(int descriptor_number) {
1857 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1861 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1862 return JSFunction::cast(GetValue(descriptor_number));
1866 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1867 ASSERT(GetType(descriptor_number) == CALLBACKS);
1868 return GetValue(descriptor_number);
1872 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1873 ASSERT(GetType(descriptor_number) == CALLBACKS);
1874 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
1875 return reinterpret_cast<AccessorDescriptor*>(p->address());
1879 bool DescriptorArray::IsProperty(int descriptor_number) {
1880 return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1884 bool DescriptorArray::IsTransition(int descriptor_number) {
1885 PropertyType t = GetType(descriptor_number);
1886 return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
1887 t == ELEMENTS_TRANSITION;
1891 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1892 return GetType(descriptor_number) == NULL_DESCRIPTOR;
1896 bool DescriptorArray::IsDontEnum(int descriptor_number) {
1897 return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1901 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1902 desc->Init(GetKey(descriptor_number),
1903 GetValue(descriptor_number),
1904 PropertyDetails(GetDetails(descriptor_number)));
1908 void DescriptorArray::Set(int descriptor_number,
1910 const WhitenessWitness&) {
1912 ASSERT(descriptor_number < number_of_descriptors());
1914 // Make sure none of the elements in desc are in new space.
1915 ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1916 ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1918 NoWriteBarrierSet(this,
1919 ToKeyIndex(descriptor_number),
1921 FixedArray* content_array = GetContentArray();
1922 NoWriteBarrierSet(content_array,
1923 ToValueIndex(descriptor_number),
1925 NoWriteBarrierSet(content_array,
1926 ToDetailsIndex(descriptor_number),
1927 desc->GetDetails().AsSmi());
1931 void DescriptorArray::CopyFrom(int index,
1932 DescriptorArray* src,
1934 const WhitenessWitness& witness) {
1936 src->Get(src_index, &desc);
1937 Set(index, &desc, witness);
1941 void DescriptorArray::NoWriteBarrierSwapDescriptors(int first, int second) {
1942 NoWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
1943 FixedArray* content_array = GetContentArray();
1944 NoWriteBarrierSwap(content_array,
1945 ToValueIndex(first),
1946 ToValueIndex(second));
1947 NoWriteBarrierSwap(content_array,
1948 ToDetailsIndex(first),
1949 ToDetailsIndex(second));
1953 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
1954 : marking_(array->GetHeap()->incremental_marking()) {
1955 marking_->EnterNoMarkingScope();
1956 if (array->number_of_descriptors() > 0) {
1957 ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
1958 ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
1963 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
1964 marking_->LeaveNoMarkingScope();
1968 template<typename Shape, typename Key>
1969 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
1970 const int kMinCapacity = 32;
1971 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
1972 if (capacity < kMinCapacity) {
1973 capacity = kMinCapacity; // Guarantee min capacity.
1979 template<typename Shape, typename Key>
1980 int HashTable<Shape, Key>::FindEntry(Key key) {
1981 return FindEntry(GetIsolate(), key);
1985 // Find entry for key otherwise return kNotFound.
1986 template<typename Shape, typename Key>
1987 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1988 uint32_t capacity = Capacity();
1989 uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1991 // EnsureCapacity will guarantee the hash table is never full.
1993 Object* element = KeyAt(entry);
1994 if (element == isolate->heap()->undefined_value()) break; // Empty entry.
1995 if (element != isolate->heap()->null_value() &&
1996 Shape::IsMatch(key, element)) return entry;
1997 entry = NextProbe(entry, count++, capacity);
2003 bool NumberDictionary::requires_slow_elements() {
2004 Object* max_index_object = get(kMaxNumberKeyIndex);
2005 if (!max_index_object->IsSmi()) return false;
2007 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2010 uint32_t NumberDictionary::max_number_key() {
2011 ASSERT(!requires_slow_elements());
2012 Object* max_index_object = get(kMaxNumberKeyIndex);
2013 if (!max_index_object->IsSmi()) return 0;
2014 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2015 return value >> kRequiresSlowElementsTagSize;
2018 void NumberDictionary::set_requires_slow_elements() {
2019 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2023 // ------------------------------------
2027 CAST_ACCESSOR(FixedArray)
2028 CAST_ACCESSOR(FixedDoubleArray)
2029 CAST_ACCESSOR(DescriptorArray)
2030 CAST_ACCESSOR(DeoptimizationInputData)
2031 CAST_ACCESSOR(DeoptimizationOutputData)
2032 CAST_ACCESSOR(SymbolTable)
2033 CAST_ACCESSOR(JSFunctionResultCache)
2034 CAST_ACCESSOR(NormalizedMapCache)
2035 CAST_ACCESSOR(CompilationCacheTable)
2036 CAST_ACCESSOR(CodeCacheHashTable)
2037 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2038 CAST_ACCESSOR(MapCache)
2039 CAST_ACCESSOR(String)
2040 CAST_ACCESSOR(SeqString)
2041 CAST_ACCESSOR(SeqAsciiString)
2042 CAST_ACCESSOR(SeqTwoByteString)
2043 CAST_ACCESSOR(SlicedString)
2044 CAST_ACCESSOR(ConsString)
2045 CAST_ACCESSOR(ExternalString)
2046 CAST_ACCESSOR(ExternalAsciiString)
2047 CAST_ACCESSOR(ExternalTwoByteString)
2048 CAST_ACCESSOR(JSReceiver)
2049 CAST_ACCESSOR(JSObject)
2051 CAST_ACCESSOR(HeapObject)
2052 CAST_ACCESSOR(HeapNumber)
2053 CAST_ACCESSOR(Oddball)
2054 CAST_ACCESSOR(JSGlobalPropertyCell)
2055 CAST_ACCESSOR(SharedFunctionInfo)
2057 CAST_ACCESSOR(JSFunction)
2058 CAST_ACCESSOR(GlobalObject)
2059 CAST_ACCESSOR(JSGlobalProxy)
2060 CAST_ACCESSOR(JSGlobalObject)
2061 CAST_ACCESSOR(JSBuiltinsObject)
2063 CAST_ACCESSOR(JSArray)
2064 CAST_ACCESSOR(JSRegExp)
2065 CAST_ACCESSOR(JSProxy)
2066 CAST_ACCESSOR(JSFunctionProxy)
2067 CAST_ACCESSOR(JSSet)
2068 CAST_ACCESSOR(JSMap)
2069 CAST_ACCESSOR(JSWeakMap)
2070 CAST_ACCESSOR(Foreign)
2071 CAST_ACCESSOR(ByteArray)
2072 CAST_ACCESSOR(FreeSpace)
2073 CAST_ACCESSOR(ExternalArray)
2074 CAST_ACCESSOR(ExternalByteArray)
2075 CAST_ACCESSOR(ExternalUnsignedByteArray)
2076 CAST_ACCESSOR(ExternalShortArray)
2077 CAST_ACCESSOR(ExternalUnsignedShortArray)
2078 CAST_ACCESSOR(ExternalIntArray)
2079 CAST_ACCESSOR(ExternalUnsignedIntArray)
2080 CAST_ACCESSOR(ExternalFloatArray)
2081 CAST_ACCESSOR(ExternalDoubleArray)
2082 CAST_ACCESSOR(ExternalPixelArray)
2083 CAST_ACCESSOR(Struct)
2086 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2087 STRUCT_LIST(MAKE_STRUCT_CAST)
2088 #undef MAKE_STRUCT_CAST
2091 template <typename Shape, typename Key>
2092 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2093 ASSERT(obj->IsHashTable());
2094 return reinterpret_cast<HashTable*>(obj);
2098 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2099 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2101 SMI_ACCESSORS(String, length, kLengthOffset)
2102 SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
2105 uint32_t String::hash_field() {
2106 return READ_UINT32_FIELD(this, kHashFieldOffset);
2110 void String::set_hash_field(uint32_t value) {
2111 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2112 #if V8_HOST_ARCH_64_BIT
2113 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2118 bool String::Equals(String* other) {
2119 if (other == this) return true;
2120 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2123 return SlowEquals(other);
2127 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2128 if (!StringShape(this).IsCons()) return this;
2129 ConsString* cons = ConsString::cast(this);
2130 if (cons->IsFlat()) return cons->first();
2131 return SlowTryFlatten(pretenure);
2135 String* String::TryFlattenGetString(PretenureFlag pretenure) {
2136 MaybeObject* flat = TryFlatten(pretenure);
2137 Object* successfully_flattened;
2138 if (!flat->ToObject(&successfully_flattened)) return this;
2139 return String::cast(successfully_flattened);
2143 uint16_t String::Get(int index) {
2144 ASSERT(index >= 0 && index < length());
2145 switch (StringShape(this).full_representation_tag()) {
2146 case kSeqStringTag | kAsciiStringTag:
2147 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2148 case kSeqStringTag | kTwoByteStringTag:
2149 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2150 case kConsStringTag | kAsciiStringTag:
2151 case kConsStringTag | kTwoByteStringTag:
2152 return ConsString::cast(this)->ConsStringGet(index);
2153 case kExternalStringTag | kAsciiStringTag:
2154 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2155 case kExternalStringTag | kTwoByteStringTag:
2156 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2157 case kSlicedStringTag | kAsciiStringTag:
2158 case kSlicedStringTag | kTwoByteStringTag:
2159 return SlicedString::cast(this)->SlicedStringGet(index);
2169 void String::Set(int index, uint16_t value) {
2170 ASSERT(index >= 0 && index < length());
2171 ASSERT(StringShape(this).IsSequential());
2173 return this->IsAsciiRepresentation()
2174 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2175 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2179 bool String::IsFlat() {
2180 if (!StringShape(this).IsCons()) return true;
2181 return ConsString::cast(this)->second()->length() == 0;
2185 String* String::GetUnderlying() {
2186 // Giving direct access to underlying string only makes sense if the
2187 // wrapping string is already flattened.
2188 ASSERT(this->IsFlat());
2189 ASSERT(StringShape(this).IsIndirect());
2190 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2191 const int kUnderlyingOffset = SlicedString::kParentOffset;
2192 return String::cast(READ_FIELD(this, kUnderlyingOffset));
2196 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
2197 ASSERT(index >= 0 && index < length());
2198 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2202 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
2203 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2204 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2205 static_cast<byte>(value));
2209 Address SeqAsciiString::GetCharsAddress() {
2210 return FIELD_ADDR(this, kHeaderSize);
2214 char* SeqAsciiString::GetChars() {
2215 return reinterpret_cast<char*>(GetCharsAddress());
2219 Address SeqTwoByteString::GetCharsAddress() {
2220 return FIELD_ADDR(this, kHeaderSize);
2224 uc16* SeqTwoByteString::GetChars() {
2225 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2229 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2230 ASSERT(index >= 0 && index < length());
2231 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2235 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2236 ASSERT(index >= 0 && index < length());
2237 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2241 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2242 return SizeFor(length());
2246 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
2247 return SizeFor(length());
2251 String* SlicedString::parent() {
2252 return String::cast(READ_FIELD(this, kParentOffset));
2256 void SlicedString::set_parent(String* parent) {
2257 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2258 WRITE_FIELD(this, kParentOffset, parent);
2262 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2265 String* ConsString::first() {
2266 return String::cast(READ_FIELD(this, kFirstOffset));
2270 Object* ConsString::unchecked_first() {
2271 return READ_FIELD(this, kFirstOffset);
2275 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2276 WRITE_FIELD(this, kFirstOffset, value);
2277 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2281 String* ConsString::second() {
2282 return String::cast(READ_FIELD(this, kSecondOffset));
2286 Object* ConsString::unchecked_second() {
2287 return READ_FIELD(this, kSecondOffset);
2291 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2292 WRITE_FIELD(this, kSecondOffset, value);
2293 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2297 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2298 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2302 void ExternalAsciiString::set_resource(
2303 const ExternalAsciiString::Resource* resource) {
2304 *reinterpret_cast<const Resource**>(
2305 FIELD_ADDR(this, kResourceOffset)) = resource;
2309 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2310 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2314 void ExternalTwoByteString::set_resource(
2315 const ExternalTwoByteString::Resource* resource) {
2316 *reinterpret_cast<const Resource**>(
2317 FIELD_ADDR(this, kResourceOffset)) = resource;
2321 void JSFunctionResultCache::MakeZeroSize() {
2322 set_finger_index(kEntriesIndex);
2323 set_size(kEntriesIndex);
2327 void JSFunctionResultCache::Clear() {
2328 int cache_size = size();
2329 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2330 MemsetPointer(entries_start,
2331 GetHeap()->the_hole_value(),
2332 cache_size - kEntriesIndex);
2337 int JSFunctionResultCache::size() {
2338 return Smi::cast(get(kCacheSizeIndex))->value();
2342 void JSFunctionResultCache::set_size(int size) {
2343 set(kCacheSizeIndex, Smi::FromInt(size));
2347 int JSFunctionResultCache::finger_index() {
2348 return Smi::cast(get(kFingerIndex))->value();
2352 void JSFunctionResultCache::set_finger_index(int finger_index) {
2353 set(kFingerIndex, Smi::FromInt(finger_index));
2357 byte ByteArray::get(int index) {
2358 ASSERT(index >= 0 && index < this->length());
2359 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2363 void ByteArray::set(int index, byte value) {
2364 ASSERT(index >= 0 && index < this->length());
2365 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2369 int ByteArray::get_int(int index) {
2370 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2371 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2375 ByteArray* ByteArray::FromDataStartAddress(Address address) {
2376 ASSERT_TAG_ALIGNED(address);
2377 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2381 Address ByteArray::GetDataStartAddress() {
2382 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2386 uint8_t* ExternalPixelArray::external_pixel_pointer() {
2387 return reinterpret_cast<uint8_t*>(external_pointer());
2391 uint8_t ExternalPixelArray::get_scalar(int index) {
2392 ASSERT((index >= 0) && (index < this->length()));
2393 uint8_t* ptr = external_pixel_pointer();
2398 MaybeObject* ExternalPixelArray::get(int index) {
2399 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2403 void ExternalPixelArray::set(int index, uint8_t value) {
2404 ASSERT((index >= 0) && (index < this->length()));
2405 uint8_t* ptr = external_pixel_pointer();
2410 void* ExternalArray::external_pointer() {
2411 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2412 return reinterpret_cast<void*>(ptr);
2416 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2417 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2418 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2422 int8_t ExternalByteArray::get_scalar(int index) {
2423 ASSERT((index >= 0) && (index < this->length()));
2424 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2429 MaybeObject* ExternalByteArray::get(int index) {
2430 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2434 void ExternalByteArray::set(int index, int8_t value) {
2435 ASSERT((index >= 0) && (index < this->length()));
2436 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2441 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
2442 ASSERT((index >= 0) && (index < this->length()));
2443 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2448 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2449 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2453 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2454 ASSERT((index >= 0) && (index < this->length()));
2455 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2460 int16_t ExternalShortArray::get_scalar(int index) {
2461 ASSERT((index >= 0) && (index < this->length()));
2462 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2467 MaybeObject* ExternalShortArray::get(int index) {
2468 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2472 void ExternalShortArray::set(int index, int16_t value) {
2473 ASSERT((index >= 0) && (index < this->length()));
2474 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2479 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
2480 ASSERT((index >= 0) && (index < this->length()));
2481 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2486 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2487 return Smi::FromInt(static_cast<int>(get_scalar(index)));
2491 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2492 ASSERT((index >= 0) && (index < this->length()));
2493 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2498 int32_t ExternalIntArray::get_scalar(int index) {
2499 ASSERT((index >= 0) && (index < this->length()));
2500 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2505 MaybeObject* ExternalIntArray::get(int index) {
2506 return GetHeap()->NumberFromInt32(get_scalar(index));
2510 void ExternalIntArray::set(int index, int32_t value) {
2511 ASSERT((index >= 0) && (index < this->length()));
2512 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2517 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
2518 ASSERT((index >= 0) && (index < this->length()));
2519 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2524 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2525 return GetHeap()->NumberFromUint32(get_scalar(index));
2529 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2530 ASSERT((index >= 0) && (index < this->length()));
2531 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2536 float ExternalFloatArray::get_scalar(int index) {
2537 ASSERT((index >= 0) && (index < this->length()));
2538 float* ptr = static_cast<float*>(external_pointer());
2543 MaybeObject* ExternalFloatArray::get(int index) {
2544 return GetHeap()->NumberFromDouble(get_scalar(index));
2548 void ExternalFloatArray::set(int index, float value) {
2549 ASSERT((index >= 0) && (index < this->length()));
2550 float* ptr = static_cast<float*>(external_pointer());
2555 double ExternalDoubleArray::get_scalar(int index) {
2556 ASSERT((index >= 0) && (index < this->length()));
2557 double* ptr = static_cast<double*>(external_pointer());
2562 MaybeObject* ExternalDoubleArray::get(int index) {
2563 return GetHeap()->NumberFromDouble(get_scalar(index));
2567 void ExternalDoubleArray::set(int index, double value) {
2568 ASSERT((index >= 0) && (index < this->length()));
2569 double* ptr = static_cast<double*>(external_pointer());
2574 int Map::visitor_id() {
2575 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2579 void Map::set_visitor_id(int id) {
2580 ASSERT(0 <= id && id < 256);
2581 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2585 int Map::instance_size() {
2586 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2590 int Map::inobject_properties() {
2591 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2595 int Map::pre_allocated_property_fields() {
2596 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2600 int HeapObject::SizeFromMap(Map* map) {
2601 int instance_size = map->instance_size();
2602 if (instance_size != kVariableSizeSentinel) return instance_size;
2603 // We can ignore the "symbol" bit becase it is only set for symbols
2604 // and implies a string type.
2605 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2606 // Only inline the most frequent cases.
2607 if (instance_type == FIXED_ARRAY_TYPE) {
2608 return FixedArray::BodyDescriptor::SizeOf(map, this);
2610 if (instance_type == ASCII_STRING_TYPE) {
2611 return SeqAsciiString::SizeFor(
2612 reinterpret_cast<SeqAsciiString*>(this)->length());
2614 if (instance_type == BYTE_ARRAY_TYPE) {
2615 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2617 if (instance_type == FREE_SPACE_TYPE) {
2618 return reinterpret_cast<FreeSpace*>(this)->size();
2620 if (instance_type == STRING_TYPE) {
2621 return SeqTwoByteString::SizeFor(
2622 reinterpret_cast<SeqTwoByteString*>(this)->length());
2624 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2625 return FixedDoubleArray::SizeFor(
2626 reinterpret_cast<FixedDoubleArray*>(this)->length());
2628 ASSERT(instance_type == CODE_TYPE);
2629 return reinterpret_cast<Code*>(this)->CodeSize();
2633 void Map::set_instance_size(int value) {
2634 ASSERT_EQ(0, value & (kPointerSize - 1));
2635 value >>= kPointerSizeLog2;
2636 ASSERT(0 <= value && value < 256);
2637 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2641 void Map::set_inobject_properties(int value) {
2642 ASSERT(0 <= value && value < 256);
2643 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2647 void Map::set_pre_allocated_property_fields(int value) {
2648 ASSERT(0 <= value && value < 256);
2649 WRITE_BYTE_FIELD(this,
2650 kPreAllocatedPropertyFieldsOffset,
2651 static_cast<byte>(value));
2655 InstanceType Map::instance_type() {
2656 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2660 void Map::set_instance_type(InstanceType value) {
2661 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2665 int Map::unused_property_fields() {
2666 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2670 void Map::set_unused_property_fields(int value) {
2671 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2675 byte Map::bit_field() {
2676 return READ_BYTE_FIELD(this, kBitFieldOffset);
2680 void Map::set_bit_field(byte value) {
2681 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2685 byte Map::bit_field2() {
2686 return READ_BYTE_FIELD(this, kBitField2Offset);
2690 void Map::set_bit_field2(byte value) {
2691 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2695 void Map::set_non_instance_prototype(bool value) {
2697 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2699 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2704 bool Map::has_non_instance_prototype() {
2705 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2709 void Map::set_function_with_prototype(bool value) {
2711 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2713 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2718 bool Map::function_with_prototype() {
2719 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2723 void Map::set_is_access_check_needed(bool access_check_needed) {
2724 if (access_check_needed) {
2725 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2727 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2732 bool Map::is_access_check_needed() {
2733 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2737 void Map::set_is_extensible(bool value) {
2739 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2741 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2745 bool Map::is_extensible() {
2746 return ((1 << kIsExtensible) & bit_field2()) != 0;
2750 void Map::set_attached_to_shared_function_info(bool value) {
2752 set_bit_field3(bit_field3() | (1 << kAttachedToSharedFunctionInfo));
2754 set_bit_field3(bit_field3() & ~(1 << kAttachedToSharedFunctionInfo));
2758 bool Map::attached_to_shared_function_info() {
2759 return ((1 << kAttachedToSharedFunctionInfo) & bit_field3()) != 0;
2763 void Map::set_is_shared(bool value) {
2765 set_bit_field3(bit_field3() | (1 << kIsShared));
2767 set_bit_field3(bit_field3() & ~(1 << kIsShared));
2771 bool Map::is_shared() {
2772 return ((1 << kIsShared) & bit_field3()) != 0;
2775 void Map::set_has_external_resource(bool value) {
2777 set_bit_field(bit_field() | (1 << kHasExternalResource));
2779 set_bit_field(bit_field() & ~(1 << kHasExternalResource));
2783 bool Map::has_external_resource()
2785 return ((1 << kHasExternalResource) & bit_field()) != 0;
2789 void Map::set_use_user_object_comparison(bool value) {
2791 set_bit_field2(bit_field2() | (1 << kUseUserObjectComparison));
2793 set_bit_field2(bit_field2() & ~(1 << kUseUserObjectComparison));
2797 bool Map::use_user_object_comparison() {
2798 return ((1 << kUseUserObjectComparison) & bit_field2()) != 0;
2802 void Map::set_named_interceptor_is_fallback(bool value)
2805 set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
2807 set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
2811 bool Map::named_interceptor_is_fallback()
2813 return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
2817 JSFunction* Map::unchecked_constructor() {
2818 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2822 Code::Flags Code::flags() {
2823 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2827 void Code::set_flags(Code::Flags flags) {
2828 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2829 // Make sure that all call stubs have an arguments count.
2830 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2831 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
2832 ExtractArgumentsCountFromFlags(flags) >= 0);
2833 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2837 Code::Kind Code::kind() {
2838 return ExtractKindFromFlags(flags());
2842 InlineCacheState Code::ic_state() {
2843 InlineCacheState result = ExtractICStateFromFlags(flags());
2844 // Only allow uninitialized or debugger states for non-IC code
2845 // objects. This is used in the debugger to determine whether or not
2846 // a call to code object has been replaced with a debug break call.
2847 ASSERT(is_inline_cache_stub() ||
2848 result == UNINITIALIZED ||
2849 result == DEBUG_BREAK ||
2850 result == DEBUG_PREPARE_STEP_IN);
2855 Code::ExtraICState Code::extra_ic_state() {
2856 ASSERT(is_inline_cache_stub());
2857 return ExtractExtraICStateFromFlags(flags());
2861 PropertyType Code::type() {
2862 return ExtractTypeFromFlags(flags());
2866 int Code::arguments_count() {
2867 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
2868 return ExtractArgumentsCountFromFlags(flags());
2872 int Code::major_key() {
2873 ASSERT(kind() == STUB ||
2874 kind() == UNARY_OP_IC ||
2875 kind() == BINARY_OP_IC ||
2876 kind() == COMPARE_IC ||
2877 kind() == TO_BOOLEAN_IC);
2878 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
2882 void Code::set_major_key(int major) {
2883 ASSERT(kind() == STUB ||
2884 kind() == UNARY_OP_IC ||
2885 kind() == BINARY_OP_IC ||
2886 kind() == COMPARE_IC ||
2887 kind() == TO_BOOLEAN_IC);
2888 ASSERT(0 <= major && major < 256);
2889 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2893 bool Code::is_pregenerated() {
2894 return kind() == STUB && IsPregeneratedField::decode(flags());
2898 void Code::set_is_pregenerated(bool value) {
2899 ASSERT(kind() == STUB);
2901 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
2906 bool Code::optimizable() {
2907 ASSERT(kind() == FUNCTION);
2908 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2912 void Code::set_optimizable(bool value) {
2913 ASSERT(kind() == FUNCTION);
2914 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2918 bool Code::has_deoptimization_support() {
2919 ASSERT(kind() == FUNCTION);
2920 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2921 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
2925 void Code::set_has_deoptimization_support(bool value) {
2926 ASSERT(kind() == FUNCTION);
2927 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2928 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
2929 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2933 bool Code::has_debug_break_slots() {
2934 ASSERT(kind() == FUNCTION);
2935 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2936 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
2940 void Code::set_has_debug_break_slots(bool value) {
2941 ASSERT(kind() == FUNCTION);
2942 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2943 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
2944 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2948 bool Code::is_compiled_optimizable() {
2949 ASSERT(kind() == FUNCTION);
2950 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2951 return FullCodeFlagsIsCompiledOptimizable::decode(flags);
2955 void Code::set_compiled_optimizable(bool value) {
2956 ASSERT(kind() == FUNCTION);
2957 byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
2958 flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
2959 WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
2963 int Code::allow_osr_at_loop_nesting_level() {
2964 ASSERT(kind() == FUNCTION);
2965 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2969 void Code::set_allow_osr_at_loop_nesting_level(int level) {
2970 ASSERT(kind() == FUNCTION);
2971 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2972 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2976 unsigned Code::stack_slots() {
2977 ASSERT(kind() == OPTIMIZED_FUNCTION);
2978 return READ_UINT32_FIELD(this, kStackSlotsOffset);
2982 void Code::set_stack_slots(unsigned slots) {
2983 ASSERT(kind() == OPTIMIZED_FUNCTION);
2984 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2988 unsigned Code::safepoint_table_offset() {
2989 ASSERT(kind() == OPTIMIZED_FUNCTION);
2990 return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
2994 void Code::set_safepoint_table_offset(unsigned offset) {
2995 ASSERT(kind() == OPTIMIZED_FUNCTION);
2996 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2997 WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3001 unsigned Code::stack_check_table_offset() {
3002 ASSERT(kind() == FUNCTION);
3003 return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3007 void Code::set_stack_check_table_offset(unsigned offset) {
3008 ASSERT(kind() == FUNCTION);
3009 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3010 WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3014 CheckType Code::check_type() {
3015 ASSERT(is_call_stub() || is_keyed_call_stub());
3016 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3017 return static_cast<CheckType>(type);
3021 void Code::set_check_type(CheckType value) {
3022 ASSERT(is_call_stub() || is_keyed_call_stub());
3023 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3027 byte Code::unary_op_type() {
3028 ASSERT(is_unary_op_stub());
3029 return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3033 void Code::set_unary_op_type(byte value) {
3034 ASSERT(is_unary_op_stub());
3035 WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3039 byte Code::binary_op_type() {
3040 ASSERT(is_binary_op_stub());
3041 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3045 void Code::set_binary_op_type(byte value) {
3046 ASSERT(is_binary_op_stub());
3047 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3051 byte Code::binary_op_result_type() {
3052 ASSERT(is_binary_op_stub());
3053 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3057 void Code::set_binary_op_result_type(byte value) {
3058 ASSERT(is_binary_op_stub());
3059 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3063 byte Code::compare_state() {
3064 ASSERT(is_compare_ic_stub());
3065 return READ_BYTE_FIELD(this, kCompareStateOffset);
3069 void Code::set_compare_state(byte value) {
3070 ASSERT(is_compare_ic_stub());
3071 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3075 byte Code::to_boolean_state() {
3076 ASSERT(is_to_boolean_ic_stub());
3077 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3081 void Code::set_to_boolean_state(byte value) {
3082 ASSERT(is_to_boolean_ic_stub());
3083 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3087 bool Code::has_function_cache() {
3088 ASSERT(kind() == STUB);
3089 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3093 void Code::set_has_function_cache(bool flag) {
3094 ASSERT(kind() == STUB);
3095 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3099 bool Code::is_inline_cache_stub() {
3100 Kind kind = this->kind();
3101 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3105 Code::Flags Code::ComputeFlags(Kind kind,
3106 InlineCacheState ic_state,
3107 ExtraICState extra_ic_state,
3110 InlineCacheHolderFlag holder) {
3111 // Extra IC state is only allowed for call IC stubs or for store IC
3113 ASSERT(extra_ic_state == kNoExtraICState ||
3116 kind == KEYED_STORE_IC);
3117 // Compute the bit mask.
3118 int bits = KindField::encode(kind)
3119 | ICStateField::encode(ic_state)
3120 | TypeField::encode(type)
3121 | ExtraICStateField::encode(extra_ic_state)
3122 | (argc << kArgumentsCountShift)
3123 | CacheHolderField::encode(holder);
3124 return static_cast<Flags>(bits);
3128 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
3130 ExtraICState extra_ic_state,
3131 InlineCacheHolderFlag holder,
3133 return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3137 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
3138 return KindField::decode(flags);
3142 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
3143 return ICStateField::decode(flags);
3147 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
3148 return ExtraICStateField::decode(flags);
3152 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
3153 return TypeField::decode(flags);
3157 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
3158 return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3162 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
3163 return CacheHolderField::decode(flags);
3167 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
3168 int bits = flags & ~TypeField::kMask;
3169 return static_cast<Flags>(bits);
3173 Code* Code::GetCodeFromTargetAddress(Address address) {
3174 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3175 // GetCodeFromTargetAddress might be called when marking objects during mark
3176 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3177 // Code::cast. Code::cast does not work when the object's map is
3179 Code* result = reinterpret_cast<Code*>(code);
3184 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3186 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3190 Object* Map::prototype() {
3191 return READ_FIELD(this, kPrototypeOffset);
3195 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3196 ASSERT(value->IsNull() || value->IsJSReceiver());
3197 WRITE_FIELD(this, kPrototypeOffset, value);
3198 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3202 DescriptorArray* Map::instance_descriptors() {
3203 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3204 if (object->IsSmi()) {
3205 return HEAP->empty_descriptor_array();
3207 return DescriptorArray::cast(object);
3212 void Map::init_instance_descriptors() {
3213 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3217 void Map::clear_instance_descriptors() {
3218 Object* object = READ_FIELD(this,
3219 kInstanceDescriptorsOrBitField3Offset);
3220 if (!object->IsSmi()) {
3223 kInstanceDescriptorsOrBitField3Offset,
3224 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3229 void Map::set_instance_descriptors(DescriptorArray* value,
3230 WriteBarrierMode mode) {
3231 Object* object = READ_FIELD(this,
3232 kInstanceDescriptorsOrBitField3Offset);
3233 Heap* heap = GetHeap();
3234 if (value == heap->empty_descriptor_array()) {
3235 clear_instance_descriptors();
3238 if (object->IsSmi()) {
3239 value->set_bit_field3_storage(Smi::cast(object)->value());
3241 value->set_bit_field3_storage(
3242 DescriptorArray::cast(object)->bit_field3_storage());
3245 ASSERT(!is_shared());
3246 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3247 CONDITIONAL_WRITE_BARRIER(
3248 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3252 int Map::bit_field3() {
3253 Object* object = READ_FIELD(this,
3254 kInstanceDescriptorsOrBitField3Offset);
3255 if (object->IsSmi()) {
3256 return Smi::cast(object)->value();
3258 return DescriptorArray::cast(object)->bit_field3_storage();
3263 void Map::set_bit_field3(int value) {
3264 ASSERT(Smi::IsValid(value));
3265 Object* object = READ_FIELD(this,
3266 kInstanceDescriptorsOrBitField3Offset);
3267 if (object->IsSmi()) {
3269 kInstanceDescriptorsOrBitField3Offset,
3270 Smi::FromInt(value));
3272 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3277 FixedArray* Map::unchecked_prototype_transitions() {
3278 return reinterpret_cast<FixedArray*>(
3279 READ_FIELD(this, kPrototypeTransitionsOffset));
3283 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3284 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3285 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3287 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3288 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3289 ACCESSORS(JSFunction,
3292 kNextFunctionLinkOffset)
3294 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3295 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3296 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3298 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3300 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3301 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3302 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3303 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3304 ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
3306 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3307 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3308 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3310 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3311 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3312 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3313 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3314 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3315 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3316 ACCESSORS(InterceptorInfo, is_fallback, Smi, kFallbackOffset)
3318 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3319 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3321 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3322 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3324 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3325 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3326 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3327 kPropertyAccessorsOffset)
3328 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3329 kPrototypeTemplateOffset)
3330 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3331 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3332 kNamedPropertyHandlerOffset)
3333 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3334 kIndexedPropertyHandlerOffset)
3335 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3336 kInstanceTemplateOffset)
3337 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3338 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3339 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3340 kInstanceCallHandlerOffset)
3341 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3342 kAccessCheckInfoOffset)
3343 ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
3345 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3346 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3347 kInternalFieldCountOffset)
3348 ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
3349 kHasExternalResourceOffset)
3350 ACCESSORS(ObjectTemplateInfo, use_user_object_comparison, Object,
3351 kUseUserObjectComparisonOffset)
3353 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3354 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3356 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3358 ACCESSORS(Script, source, Object, kSourceOffset)
3359 ACCESSORS(Script, name, Object, kNameOffset)
3360 ACCESSORS(Script, id, Object, kIdOffset)
3361 ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
3362 ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
3363 ACCESSORS(Script, data, Object, kDataOffset)
3364 ACCESSORS(Script, context_data, Object, kContextOffset)
3365 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3366 ACCESSORS(Script, type, Smi, kTypeOffset)
3367 ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
3368 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3369 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3370 ACCESSORS(Script, eval_from_instructions_offset, Smi,
3371 kEvalFrominstructionsOffsetOffset)
3373 #ifdef ENABLE_DEBUGGER_SUPPORT
3374 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3375 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3376 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3377 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3379 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3380 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3381 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3382 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3385 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3386 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3387 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3388 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3389 kInstanceClassNameOffset)
3390 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3391 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3392 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3393 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3394 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3395 kThisPropertyAssignmentsOffset)
3397 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3398 kHiddenPrototypeBit)
3399 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3400 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3401 kNeedsAccessCheckBit)
3402 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3403 kReadOnlyPrototypeBit)
3404 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3406 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3408 BOOL_GETTER(SharedFunctionInfo,
3410 has_only_simple_this_property_assignments,
3411 kHasOnlySimpleThisPropertyAssignments)
3412 BOOL_ACCESSORS(SharedFunctionInfo,
3414 allows_lazy_compilation,
3415 kAllowLazyCompilation)
3416 BOOL_ACCESSORS(SharedFunctionInfo,
3420 BOOL_ACCESSORS(SharedFunctionInfo,
3422 has_duplicate_parameters,
3423 kHasDuplicateParameters)
3426 #if V8_HOST_ARCH_32_BIT
3427 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3428 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3429 kFormalParameterCountOffset)
3430 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3431 kExpectedNofPropertiesOffset)
3432 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3433 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3434 kStartPositionAndTypeOffset)
3435 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3436 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3437 kFunctionTokenPositionOffset)
3438 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3439 kCompilerHintsOffset)
3440 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3441 kThisPropertyAssignmentsCountOffset)
3442 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3445 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3446 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3447 int holder::name() { \
3448 int value = READ_INT_FIELD(this, offset); \
3449 ASSERT(kHeapObjectTag == 1); \
3450 ASSERT((value & kHeapObjectTag) == 0); \
3451 return value >> 1; \
3453 void holder::set_##name(int value) { \
3454 ASSERT(kHeapObjectTag == 1); \
3455 ASSERT((value & 0xC0000000) == 0xC0000000 || \
3456 (value & 0xC0000000) == 0x000000000); \
3457 WRITE_INT_FIELD(this, \
3459 (value << 1) & ~kHeapObjectTag); \
3462 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3463 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3464 INT_ACCESSORS(holder, name, offset)
3467 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3468 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3469 formal_parameter_count,
3470 kFormalParameterCountOffset)
3472 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3473 expected_nof_properties,
3474 kExpectedNofPropertiesOffset)
3475 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3477 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3478 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3479 start_position_and_type,
3480 kStartPositionAndTypeOffset)
3482 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3483 function_token_position,
3484 kFunctionTokenPositionOffset)
3485 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3487 kCompilerHintsOffset)
3489 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3490 this_property_assignments_count,
3491 kThisPropertyAssignmentsCountOffset)
3492 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3496 int SharedFunctionInfo::construction_count() {
3497 return READ_BYTE_FIELD(this, kConstructionCountOffset);
3501 void SharedFunctionInfo::set_construction_count(int value) {
3502 ASSERT(0 <= value && value < 256);
3503 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3507 BOOL_ACCESSORS(SharedFunctionInfo,
3509 live_objects_may_exist,
3510 kLiveObjectsMayExist)
3513 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3514 return initial_map() != HEAP->undefined_value();
3518 BOOL_GETTER(SharedFunctionInfo,
3520 optimization_disabled,
3521 kOptimizationDisabled)
3524 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3525 set_compiler_hints(BooleanBit::set(compiler_hints(),
3526 kOptimizationDisabled,
3528 // If disabling optimizations we reflect that in the code object so
3529 // it will not be counted as optimizable code.
3530 if ((code()->kind() == Code::FUNCTION) && disable) {
3531 code()->set_optimizable(false);
3536 StrictModeFlag SharedFunctionInfo::strict_mode_flag() {
3537 return BooleanBit::get(compiler_hints(), kStrictModeFunction)
3538 ? kStrictMode : kNonStrictMode;
3542 void SharedFunctionInfo::set_strict_mode_flag(StrictModeFlag strict_mode_flag) {
3543 ASSERT(strict_mode_flag == kStrictMode ||
3544 strict_mode_flag == kNonStrictMode);
3545 bool value = strict_mode_flag == kStrictMode;
3547 BooleanBit::set(compiler_hints(), kStrictModeFunction, value));
3551 BOOL_GETTER(SharedFunctionInfo, compiler_hints, strict_mode,
3552 kStrictModeFunction)
3553 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, qml_mode,
3555 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3556 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3557 name_should_print_as_anonymous,
3558 kNameShouldPrintAsAnonymous)
3559 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3560 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3562 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3563 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3565 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3567 bool Script::HasValidSource() {
3568 Object* src = this->source();
3569 if (!src->IsString()) return true;
3570 String* src_str = String::cast(src);
3571 if (!StringShape(src_str).IsExternal()) return true;
3572 if (src_str->IsAsciiRepresentation()) {
3573 return ExternalAsciiString::cast(src)->resource() != NULL;
3574 } else if (src_str->IsTwoByteRepresentation()) {
3575 return ExternalTwoByteString::cast(src)->resource() != NULL;
3581 void SharedFunctionInfo::DontAdaptArguments() {
3582 ASSERT(code()->kind() == Code::BUILTIN);
3583 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3587 int SharedFunctionInfo::start_position() {
3588 return start_position_and_type() >> kStartPositionShift;
3592 void SharedFunctionInfo::set_start_position(int start_position) {
3593 set_start_position_and_type((start_position << kStartPositionShift)
3594 | (start_position_and_type() & ~kStartPositionMask));
3598 Code* SharedFunctionInfo::code() {
3599 return Code::cast(READ_FIELD(this, kCodeOffset));
3603 Code* SharedFunctionInfo::unchecked_code() {
3604 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3608 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3609 WRITE_FIELD(this, kCodeOffset, value);
3610 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3614 SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3615 return reinterpret_cast<SerializedScopeInfo*>(
3616 READ_FIELD(this, kScopeInfoOffset));
3620 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3621 WriteBarrierMode mode) {
3622 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3623 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3626 reinterpret_cast<Object*>(value),
3631 Smi* SharedFunctionInfo::deopt_counter() {
3632 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3636 void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3637 WRITE_FIELD(this, kDeoptCounterOffset, value);
3641 bool SharedFunctionInfo::is_compiled() {
3643 Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3647 bool SharedFunctionInfo::IsApiFunction() {
3648 return function_data()->IsFunctionTemplateInfo();
3652 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3653 ASSERT(IsApiFunction());
3654 return FunctionTemplateInfo::cast(function_data());
3658 bool SharedFunctionInfo::HasBuiltinFunctionId() {
3659 return function_data()->IsSmi();
3663 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3664 ASSERT(HasBuiltinFunctionId());
3665 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3669 int SharedFunctionInfo::code_age() {
3670 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3674 void SharedFunctionInfo::set_code_age(int code_age) {
3675 set_compiler_hints(compiler_hints() |
3676 ((code_age & kCodeAgeMask) << kCodeAgeShift));
3680 bool SharedFunctionInfo::has_deoptimization_support() {
3681 Code* code = this->code();
3682 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3686 bool JSFunction::IsBuiltin() {
3687 return context()->global()->IsJSBuiltinsObject();
3691 bool JSFunction::NeedsArgumentsAdaption() {
3692 return shared()->formal_parameter_count() !=
3693 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3697 bool JSFunction::IsOptimized() {
3698 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3702 bool JSFunction::IsOptimizable() {
3703 return code()->kind() == Code::FUNCTION && code()->optimizable();
3707 bool JSFunction::IsMarkedForLazyRecompilation() {
3708 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
3712 Code* JSFunction::code() {
3713 return Code::cast(unchecked_code());
3717 Code* JSFunction::unchecked_code() {
3718 return reinterpret_cast<Code*>(
3719 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3723 void JSFunction::set_code(Code* value) {
3724 ASSERT(!HEAP->InNewSpace(value));
3725 Address entry = value->entry();
3726 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3727 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3729 HeapObject::RawField(this, kCodeEntryOffset),
3734 void JSFunction::ReplaceCode(Code* code) {
3735 bool was_optimized = IsOptimized();
3736 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3740 // Add/remove the function from the list of optimized functions for this
3741 // context based on the state change.
3742 if (!was_optimized && is_optimized) {
3743 context()->global_context()->AddOptimizedFunction(this);
3745 if (was_optimized && !is_optimized) {
3746 context()->global_context()->RemoveOptimizedFunction(this);
3751 Context* JSFunction::context() {
3752 return Context::cast(READ_FIELD(this, kContextOffset));
3756 Object* JSFunction::unchecked_context() {
3757 return READ_FIELD(this, kContextOffset);
3761 SharedFunctionInfo* JSFunction::unchecked_shared() {
3762 return reinterpret_cast<SharedFunctionInfo*>(
3763 READ_FIELD(this, kSharedFunctionInfoOffset));
3767 void JSFunction::set_context(Object* value) {
3768 ASSERT(value->IsUndefined() || value->IsContext());
3769 WRITE_FIELD(this, kContextOffset, value);
3770 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3773 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3774 kPrototypeOrInitialMapOffset)
3777 Map* JSFunction::initial_map() {
3778 return Map::cast(prototype_or_initial_map());
3782 void JSFunction::set_initial_map(Map* value) {
3783 set_prototype_or_initial_map(value);
3787 bool JSFunction::has_initial_map() {
3788 return prototype_or_initial_map()->IsMap();
3792 bool JSFunction::has_instance_prototype() {
3793 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3797 bool JSFunction::has_prototype() {
3798 return map()->has_non_instance_prototype() || has_instance_prototype();
3802 Object* JSFunction::instance_prototype() {
3803 ASSERT(has_instance_prototype());
3804 if (has_initial_map()) return initial_map()->prototype();
3805 // When there is no initial map and the prototype is a JSObject, the
3806 // initial map field is used for the prototype field.
3807 return prototype_or_initial_map();
3811 Object* JSFunction::prototype() {
3812 ASSERT(has_prototype());
3813 // If the function's prototype property has been set to a non-JSObject
3814 // value, that value is stored in the constructor field of the map.
3815 if (map()->has_non_instance_prototype()) return map()->constructor();
3816 return instance_prototype();
3819 bool JSFunction::should_have_prototype() {
3820 return map()->function_with_prototype();
3824 bool JSFunction::is_compiled() {
3825 return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
3829 FixedArray* JSFunction::literals() {
3830 ASSERT(!shared()->bound());
3831 return literals_or_bindings();
3835 void JSFunction::set_literals(FixedArray* literals) {
3836 ASSERT(!shared()->bound());
3837 set_literals_or_bindings(literals);
3841 FixedArray* JSFunction::function_bindings() {
3842 ASSERT(shared()->bound());
3843 return literals_or_bindings();
3847 void JSFunction::set_function_bindings(FixedArray* bindings) {
3848 ASSERT(shared()->bound());
3849 // Bound function literal may be initialized to the empty fixed array
3850 // before the bindings are set.
3851 ASSERT(bindings == GetHeap()->empty_fixed_array() ||
3852 bindings->map() == GetHeap()->fixed_cow_array_map());
3853 set_literals_or_bindings(bindings);
3857 int JSFunction::NumberOfLiterals() {
3858 ASSERT(!shared()->bound());
3859 return literals()->length();
3863 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3864 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3865 return READ_FIELD(this, OffsetOfFunctionWithId(id));
3869 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3871 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3872 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3873 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
3877 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3878 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3879 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3883 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3885 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3886 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3887 ASSERT(!HEAP->InNewSpace(value));
3891 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
3892 ACCESSORS(JSProxy, hash, Object, kHashOffset)
3893 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
3894 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
3897 void JSProxy::InitializeBody(int object_size, Object* value) {
3898 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
3899 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
3900 WRITE_FIELD(this, offset, value);
3905 ACCESSORS(JSSet, table, Object, kTableOffset)
3906 ACCESSORS(JSMap, table, Object, kTableOffset)
3907 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
3908 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
3911 ObjectHashTable* JSWeakMap::unchecked_table() {
3912 return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
3916 Address Foreign::address() {
3917 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kAddressOffset));
3921 void Foreign::set_address(Address value) {
3922 WRITE_INTPTR_FIELD(this, kAddressOffset, OffsetFrom(value));
3926 ACCESSORS(JSValue, value, Object, kValueOffset)
3929 JSValue* JSValue::cast(Object* obj) {
3930 ASSERT(obj->IsJSValue());
3931 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3932 return reinterpret_cast<JSValue*>(obj);
3936 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
3937 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
3938 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
3939 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
3940 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
3941 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
3942 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
3945 JSMessageObject* JSMessageObject::cast(Object* obj) {
3946 ASSERT(obj->IsJSMessageObject());
3947 ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
3948 return reinterpret_cast<JSMessageObject*>(obj);
3952 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
3953 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
3954 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
3955 ACCESSORS(Code, next_code_flushing_candidate,
3956 Object, kNextCodeFlushingCandidateOffset)
3959 byte* Code::instruction_start() {
3960 return FIELD_ADDR(this, kHeaderSize);
3964 byte* Code::instruction_end() {
3965 return instruction_start() + instruction_size();
3969 int Code::body_size() {
3970 return RoundUp(instruction_size(), kObjectAlignment);
3974 FixedArray* Code::unchecked_deoptimization_data() {
3975 return reinterpret_cast<FixedArray*>(
3976 READ_FIELD(this, kDeoptimizationDataOffset));
3980 ByteArray* Code::unchecked_relocation_info() {
3981 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
3985 byte* Code::relocation_start() {
3986 return unchecked_relocation_info()->GetDataStartAddress();
3990 int Code::relocation_size() {
3991 return unchecked_relocation_info()->length();
3995 byte* Code::entry() {
3996 return instruction_start();
4000 bool Code::contains(byte* inner_pointer) {
4001 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4005 ACCESSORS(JSArray, length, Object, kLengthOffset)
4008 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4011 JSRegExp::Type JSRegExp::TypeTag() {
4012 Object* data = this->data();
4013 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4014 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4015 return static_cast<JSRegExp::Type>(smi->value());
4019 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
4020 Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4021 return static_cast<JSRegExp::Type>(smi->value());
4025 int JSRegExp::CaptureCount() {
4026 switch (TypeTag()) {
4030 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4038 JSRegExp::Flags JSRegExp::GetFlags() {
4039 ASSERT(this->data()->IsFixedArray());
4040 Object* data = this->data();
4041 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4042 return Flags(smi->value());
4046 String* JSRegExp::Pattern() {
4047 ASSERT(this->data()->IsFixedArray());
4048 Object* data = this->data();
4049 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4054 Object* JSRegExp::DataAt(int index) {
4055 ASSERT(TypeTag() != NOT_COMPILED);
4056 return FixedArray::cast(data())->get(index);
4060 Object* JSRegExp::DataAtUnchecked(int index) {
4061 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4062 int offset = FixedArray::kHeaderSize + index * kPointerSize;
4063 return READ_FIELD(fa, offset);
4067 void JSRegExp::SetDataAt(int index, Object* value) {
4068 ASSERT(TypeTag() != NOT_COMPILED);
4069 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4070 FixedArray::cast(data())->set(index, value);
4074 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4075 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4076 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4077 if (value->IsSmi()) {
4078 fa->set_unchecked(index, Smi::cast(value));
4080 // We only do this during GC, so we don't need to notify the write barrier.
4081 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4086 ElementsKind JSObject::GetElementsKind() {
4087 ElementsKind kind = map()->elements_kind();
4089 FixedArrayBase* fixed_array =
4090 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4091 Map* map = fixed_array->map();
4092 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4093 (map == GetHeap()->fixed_array_map() ||
4094 map == GetHeap()->fixed_cow_array_map())) ||
4095 (kind == FAST_DOUBLE_ELEMENTS &&
4096 fixed_array->IsFixedDoubleArray()) ||
4097 (kind == DICTIONARY_ELEMENTS &&
4098 fixed_array->IsFixedArray() &&
4099 fixed_array->IsDictionary()) ||
4100 (kind > DICTIONARY_ELEMENTS));
4101 ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
4102 (elements()->IsFixedArray() && elements()->length() >= 2));
4108 ElementsAccessor* JSObject::GetElementsAccessor() {
4109 return ElementsAccessor::ForKind(GetElementsKind());
4113 bool JSObject::HasFastElements() {
4114 return GetElementsKind() == FAST_ELEMENTS;
4118 bool JSObject::HasFastSmiOnlyElements() {
4119 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4123 bool JSObject::HasFastTypeElements() {
4124 ElementsKind elements_kind = GetElementsKind();
4125 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4126 elements_kind == FAST_ELEMENTS;
4130 bool JSObject::HasFastDoubleElements() {
4131 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4135 bool JSObject::HasDictionaryElements() {
4136 return GetElementsKind() == DICTIONARY_ELEMENTS;
4140 bool JSObject::HasNonStrictArgumentsElements() {
4141 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4145 bool JSObject::HasExternalArrayElements() {
4146 HeapObject* array = elements();
4147 ASSERT(array != NULL);
4148 return array->IsExternalArray();
4152 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4153 bool JSObject::HasExternal##name##Elements() { \
4154 HeapObject* array = elements(); \
4155 ASSERT(array != NULL); \
4156 if (!array->IsHeapObject()) \
4158 return array->map()->instance_type() == type; \
4162 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
4163 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
4164 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
4165 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4166 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
4167 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
4168 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4169 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
4170 EXTERNAL_ELEMENTS_CHECK(Float,
4171 EXTERNAL_FLOAT_ARRAY_TYPE)
4172 EXTERNAL_ELEMENTS_CHECK(Double,
4173 EXTERNAL_DOUBLE_ARRAY_TYPE)
4174 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
4177 bool JSObject::HasNamedInterceptor() {
4178 return map()->has_named_interceptor();
4182 bool JSObject::HasIndexedInterceptor() {
4183 return map()->has_indexed_interceptor();
4187 bool JSObject::AllowsSetElementsLength() {
4188 bool result = elements()->IsFixedArray() ||
4189 elements()->IsFixedDoubleArray();
4190 ASSERT(result == !HasExternalArrayElements());
4195 MaybeObject* JSObject::EnsureWritableFastElements() {
4196 ASSERT(HasFastTypeElements());
4197 FixedArray* elems = FixedArray::cast(elements());
4198 Isolate* isolate = GetIsolate();
4199 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4200 Object* writable_elems;
4201 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4202 elems, isolate->heap()->fixed_array_map());
4203 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4204 return maybe_writable_elems;
4207 set_elements(FixedArray::cast(writable_elems));
4208 isolate->counters()->cow_arrays_converted()->Increment();
4209 return writable_elems;
4213 StringDictionary* JSObject::property_dictionary() {
4214 ASSERT(!HasFastProperties());
4215 return StringDictionary::cast(properties());
4219 NumberDictionary* JSObject::element_dictionary() {
4220 ASSERT(HasDictionaryElements());
4221 return NumberDictionary::cast(elements());
4225 bool String::IsHashFieldComputed(uint32_t field) {
4226 return (field & kHashNotComputedMask) == 0;
4230 bool String::HasHashCode() {
4231 return IsHashFieldComputed(hash_field());
4235 uint32_t String::Hash() {
4236 // Fast case: has hash code already been computed?
4237 uint32_t field = hash_field();
4238 if (IsHashFieldComputed(field)) return field >> kHashShift;
4239 // Slow case: compute hash code and set it.
4240 return ComputeAndSetHash();
4244 StringHasher::StringHasher(int length)
4246 raw_running_hash_(0),
4248 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4249 is_first_char_(true),
4253 bool StringHasher::has_trivial_hash() {
4254 return length_ > String::kMaxHashCalcLength;
4258 void StringHasher::AddCharacter(uc32 c) {
4259 // Use the Jenkins one-at-a-time hash function to update the hash
4260 // for the given character.
4261 raw_running_hash_ += c;
4262 raw_running_hash_ += (raw_running_hash_ << 10);
4263 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4264 // Incremental array index computation.
4265 if (is_array_index_) {
4266 if (c < '0' || c > '9') {
4267 is_array_index_ = false;
4270 if (is_first_char_) {
4271 is_first_char_ = false;
4272 if (c == '0' && length_ > 1) {
4273 is_array_index_ = false;
4277 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4278 is_array_index_ = false;
4280 array_index_ = array_index_ * 10 + d;
4287 void StringHasher::AddCharacterNoIndex(uc32 c) {
4288 ASSERT(!is_array_index());
4289 raw_running_hash_ += c;
4290 raw_running_hash_ += (raw_running_hash_ << 10);
4291 raw_running_hash_ ^= (raw_running_hash_ >> 6);
4295 uint32_t StringHasher::GetHash() {
4296 // Get the calculated raw hash value and do some more bit ops to distribute
4297 // the hash further. Ensure that we never return zero as the hash value.
4298 uint32_t result = raw_running_hash_;
4299 result += (result << 3);
4300 result ^= (result >> 11);
4301 result += (result << 15);
4309 template <typename schar>
4310 uint32_t HashSequentialString(const schar* chars, int length) {
4311 StringHasher hasher(length);
4312 if (!hasher.has_trivial_hash()) {
4314 for (i = 0; hasher.is_array_index() && (i < length); i++) {
4315 hasher.AddCharacter(chars[i]);
4317 for (; i < length; i++) {
4318 hasher.AddCharacterNoIndex(chars[i]);
4321 return hasher.GetHashField();
4325 bool String::AsArrayIndex(uint32_t* index) {
4326 uint32_t field = hash_field();
4327 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4330 return SlowAsArrayIndex(index);
4334 Object* JSReceiver::GetPrototype() {
4335 return HeapObject::cast(this)->map()->prototype();
4339 bool JSReceiver::HasProperty(String* name) {
4341 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4343 return GetPropertyAttribute(name) != ABSENT;
4347 bool JSReceiver::HasLocalProperty(String* name) {
4349 return JSProxy::cast(this)->HasPropertyWithHandler(name);
4351 return GetLocalPropertyAttribute(name) != ABSENT;
4355 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
4356 return GetPropertyAttributeWithReceiver(this, key);
4359 // TODO(504): this may be useful in other places too where JSGlobalProxy
4361 Object* JSObject::BypassGlobalProxy() {
4362 if (IsJSGlobalProxy()) {
4363 Object* proto = GetPrototype();
4364 if (proto->IsNull()) return GetHeap()->undefined_value();
4365 ASSERT(proto->IsJSGlobalObject());
4372 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4374 ? JSProxy::cast(this)->GetIdentityHash(flag)
4375 : JSObject::cast(this)->GetIdentityHash(flag);
4379 bool JSReceiver::HasElement(uint32_t index) {
4381 return JSProxy::cast(this)->HasElementWithHandler(index);
4383 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4387 bool AccessorInfo::all_can_read() {
4388 return BooleanBit::get(flag(), kAllCanReadBit);
4392 void AccessorInfo::set_all_can_read(bool value) {
4393 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4397 bool AccessorInfo::all_can_write() {
4398 return BooleanBit::get(flag(), kAllCanWriteBit);
4402 void AccessorInfo::set_all_can_write(bool value) {
4403 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4407 bool AccessorInfo::prohibits_overwriting() {
4408 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4412 void AccessorInfo::set_prohibits_overwriting(bool value) {
4413 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4417 PropertyAttributes AccessorInfo::property_attributes() {
4418 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4422 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
4423 set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4427 template<typename Shape, typename Key>
4428 void Dictionary<Shape, Key>::SetEntry(int entry,
4431 SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4435 template<typename Shape, typename Key>
4436 void Dictionary<Shape, Key>::SetEntry(int entry,
4439 PropertyDetails details) {
4440 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4441 int index = HashTable<Shape, Key>::EntryToIndex(entry);
4442 AssertNoAllocation no_gc;
4443 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
4444 FixedArray::set(index, key, mode);
4445 FixedArray::set(index+1, value, mode);
4446 FixedArray::set(index+2, details.AsSmi());
4450 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4451 ASSERT(other->IsNumber());
4452 return key == static_cast<uint32_t>(other->Number());
4456 uint32_t NumberDictionaryShape::Hash(uint32_t key) {
4457 return ComputeIntegerHash(key);
4461 uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
4462 ASSERT(other->IsNumber());
4463 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
4467 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4468 return Isolate::Current()->heap()->NumberFromUint32(key);
4472 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4473 // We know that all entries in a hash table had their hash keys created.
4474 // Use that knowledge to have fast failure.
4475 if (key->Hash() != String::cast(other)->Hash()) return false;
4476 return key->Equals(String::cast(other));
4480 uint32_t StringDictionaryShape::Hash(String* key) {
4485 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4486 return String::cast(other)->Hash();
4490 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4495 template <int entrysize>
4496 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4497 return key->SameValue(other);
4501 template <int entrysize>
4502 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
4503 ASSERT(!key->IsUndefined() && !key->IsNull());
4504 MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4505 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4509 template <int entrysize>
4510 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
4512 ASSERT(!other->IsUndefined() && !other->IsNull());
4513 MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4514 return Smi::cast(maybe_hash->ToObjectChecked())->value();
4518 template <int entrysize>
4519 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
4524 void ObjectHashTable::RemoveEntry(int entry) {
4525 RemoveEntry(entry, GetHeap());
4529 void Map::ClearCodeCache(Heap* heap) {
4530 // No write barrier is needed since empty_fixed_array is not in new space.
4531 // Please note this function is used during marking:
4532 // - MarkCompactCollector::MarkUnmarkedObject
4533 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4534 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4538 void JSArray::EnsureSize(int required_size) {
4539 ASSERT(HasFastTypeElements());
4540 FixedArray* elts = FixedArray::cast(elements());
4541 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4542 if (elts->length() < required_size) {
4543 // Doubling in size would be overkill, but leave some slack to avoid
4544 // constantly growing.
4545 Expand(required_size + (required_size >> 3));
4546 // It's a performance benefit to keep a frequently used array in new-space.
4547 } else if (!GetHeap()->new_space()->Contains(elts) &&
4548 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4549 // Expand will allocate a new backing store in new space even if the size
4550 // we asked for isn't larger than what we had before.
4551 Expand(required_size);
4556 void JSArray::set_length(Smi* length) {
4557 // Don't need a write barrier for a Smi.
4558 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4562 MaybeObject* JSArray::SetContent(FixedArray* storage) {
4563 MaybeObject* maybe_object = EnsureCanContainElements(storage);
4564 if (maybe_object->IsFailure()) return maybe_object;
4565 set_length(Smi::FromInt(storage->length()));
4566 set_elements(storage);
4571 MaybeObject* FixedArray::Copy() {
4572 if (length() == 0) return this;
4573 return GetHeap()->CopyFixedArray(this);
4577 MaybeObject* FixedDoubleArray::Copy() {
4578 if (length() == 0) return this;
4579 return GetHeap()->CopyFixedDoubleArray(this);
4583 Relocatable::Relocatable(Isolate* isolate) {
4584 ASSERT(isolate == Isolate::Current());
4586 prev_ = isolate->relocatable_top();
4587 isolate->set_relocatable_top(this);
4591 Relocatable::~Relocatable() {
4592 ASSERT(isolate_ == Isolate::Current());
4593 ASSERT_EQ(isolate_->relocatable_top(), this);
4594 isolate_->set_relocatable_top(prev_);
4598 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
4599 return map->instance_size();
4603 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
4604 v->VisitExternalReference(
4605 reinterpret_cast<Address *>(FIELD_ADDR(this, kAddressOffset)));
4609 template<typename StaticVisitor>
4610 void Foreign::ForeignIterateBody() {
4611 StaticVisitor::VisitExternalReference(
4612 reinterpret_cast<Address *>(FIELD_ADDR(this, kAddressOffset)));
4616 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
4617 typedef v8::String::ExternalAsciiStringResource Resource;
4618 v->VisitExternalAsciiString(
4619 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4623 template<typename StaticVisitor>
4624 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
4625 typedef v8::String::ExternalAsciiStringResource Resource;
4626 StaticVisitor::VisitExternalAsciiString(
4627 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4631 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
4632 typedef v8::String::ExternalStringResource Resource;
4633 v->VisitExternalTwoByteString(
4634 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4638 template<typename StaticVisitor>
4639 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
4640 typedef v8::String::ExternalStringResource Resource;
4641 StaticVisitor::VisitExternalTwoByteString(
4642 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
4645 #define SLOT_ADDR(obj, offset) \
4646 reinterpret_cast<Object**>((obj)->address() + offset)
4648 template<int start_offset, int end_offset, int size>
4649 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
4652 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
4656 template<int start_offset>
4657 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
4660 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
4666 #undef CAST_ACCESSOR
4667 #undef INT_ACCESSORS
4668 #undef SMI_ACCESSORS
4673 #undef WRITE_BARRIER
4674 #undef CONDITIONAL_WRITE_BARRIER
4675 #undef READ_MEMADDR_FIELD
4676 #undef WRITE_MEMADDR_FIELD
4677 #undef READ_DOUBLE_FIELD
4678 #undef WRITE_DOUBLE_FIELD
4679 #undef READ_INT_FIELD
4680 #undef WRITE_INT_FIELD
4681 #undef READ_SHORT_FIELD
4682 #undef WRITE_SHORT_FIELD
4683 #undef READ_BYTE_FIELD
4684 #undef WRITE_BYTE_FIELD
4687 } } // namespace v8::internal
4689 #endif // V8_OBJECTS_INL_H_