1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "accessors.h"
31 #include "allocation-site-scopes.h"
33 #include "arguments.h"
34 #include "bootstrapper.h"
36 #include "code-stubs.h"
37 #include "cpu-profiler.h"
39 #include "deoptimizer.h"
42 #include "execution.h"
43 #include "full-codegen.h"
45 #include "isolate-inl.h"
47 #include "objects-inl.h"
48 #include "objects-visiting-inl.h"
49 #include "macro-assembler.h"
50 #include "mark-compact.h"
51 #include "safepoint-table.h"
52 #include "string-stream.h"
55 #ifdef ENABLE_DISASSEMBLER
57 #include "disassembler.h"
64 MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor,
67 { MaybeObject* maybe_result =
68 constructor->GetHeap()->AllocateJSObject(constructor);
69 if (!maybe_result->ToObject(&result)) return maybe_result;
71 JSValue::cast(result)->set_value(value);
76 MaybeObject* Object::ToObject(Context* native_context) {
78 return CreateJSValue(native_context->number_function(), this);
79 } else if (IsFloat32x4()) {
80 return CreateJSValue(native_context->float32x4_function(), this);
81 } else if (IsInt32x4()) {
82 return CreateJSValue(native_context->int32x4_function(), this);
83 } else if (IsBoolean()) {
84 return CreateJSValue(native_context->boolean_function(), this);
85 } else if (IsString()) {
86 return CreateJSValue(native_context->string_function(), this);
87 } else if (IsSymbol()) {
88 return CreateJSValue(native_context->symbol_function(), this);
95 MaybeObject* Object::ToObject(Isolate* isolate) {
98 } else if (IsNumber()) {
99 Context* native_context = isolate->context()->native_context();
100 return CreateJSValue(native_context->number_function(), this);
101 } else if (IsFloat32x4()) {
102 Isolate* isolate = HeapObject::cast(this)->GetIsolate();
103 Context* native_context = isolate->context()->native_context();
104 return CreateJSValue(native_context->float32x4_function(), this);
105 } else if (IsInt32x4()) {
106 Isolate* isolate = HeapObject::cast(this)->GetIsolate();
107 Context* native_context = isolate->context()->native_context();
108 return CreateJSValue(native_context->int32x4_function(), this);
109 } else if (IsBoolean()) {
110 Context* native_context = isolate->context()->native_context();
111 return CreateJSValue(native_context->boolean_function(), this);
112 } else if (IsString()) {
113 Context* native_context = isolate->context()->native_context();
114 return CreateJSValue(native_context->string_function(), this);
115 } else if (IsSymbol()) {
116 Context* native_context = isolate->context()->native_context();
117 return CreateJSValue(native_context->symbol_function(), this);
120 // Throw a type error.
121 return Failure::InternalError();
125 bool Object::BooleanValue() {
126 if (IsBoolean()) return IsTrue();
127 if (IsSmi()) return Smi::cast(this)->value() != 0;
128 if (IsUndefined() || IsNull()) return false;
129 if (IsUndetectableObject()) return false; // Undetectable object is false.
130 if (IsString()) return String::cast(this)->length() != 0;
131 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue();
136 bool Object::IsCallable() {
138 while (fun->IsJSFunctionProxy()) {
139 fun = JSFunctionProxy::cast(fun)->call_trap();
141 return fun->IsJSFunction() ||
142 (fun->IsHeapObject() &&
143 HeapObject::cast(fun)->map()->has_instance_call_handler());
147 void Object::Lookup(Name* name, LookupResult* result) {
148 Object* holder = NULL;
149 if (IsJSReceiver()) {
152 Context* native_context = result->isolate()->context()->native_context();
154 holder = native_context->number_function()->instance_prototype();
155 } else if (IsFloat32x4()) {
156 holder = native_context->float32x4_function()->instance_prototype();
157 } else if (IsInt32x4()) {
158 holder = native_context->int32x4_function()->instance_prototype();
159 } else if (IsString()) {
160 holder = native_context->string_function()->instance_prototype();
161 } else if (IsSymbol()) {
162 holder = native_context->symbol_function()->instance_prototype();
163 } else if (IsBoolean()) {
164 holder = native_context->boolean_function()->instance_prototype();
166 result->isolate()->PushStackTraceAndDie(
167 0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001);
170 ASSERT(holder != NULL); // Cannot handle null or undefined.
171 JSReceiver::cast(holder)->Lookup(name, result);
175 Handle<Object> Object::GetPropertyWithReceiver(
176 Handle<Object> object,
177 Handle<Object> receiver,
179 PropertyAttributes* attributes) {
180 LookupResult lookup(name->GetIsolate());
181 object->Lookup(*name, &lookup);
182 Handle<Object> result =
183 GetProperty(object, receiver, &lookup, name, attributes);
184 ASSERT(*attributes <= ABSENT);
189 MaybeObject* Object::GetPropertyWithReceiver(Object* receiver,
191 PropertyAttributes* attributes) {
192 LookupResult result(name->GetIsolate());
193 Lookup(name, &result);
194 MaybeObject* value = GetProperty(receiver, &result, name, attributes);
195 ASSERT(*attributes <= ABSENT);
200 bool Object::ToInt32(int32_t* value) {
202 *value = Smi::cast(this)->value();
205 if (IsHeapNumber()) {
206 double num = HeapNumber::cast(this)->value();
207 if (FastI2D(FastD2I(num)) == num) {
208 *value = FastD2I(num);
216 bool Object::ToUint32(uint32_t* value) {
218 int num = Smi::cast(this)->value();
220 *value = static_cast<uint32_t>(num);
224 if (IsHeapNumber()) {
225 double num = HeapNumber::cast(this)->value();
226 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) {
227 *value = FastD2UI(num);
235 bool FunctionTemplateInfo::IsTemplateFor(Object* object) {
236 if (!object->IsHeapObject()) return false;
237 return IsTemplateFor(HeapObject::cast(object)->map());
241 bool FunctionTemplateInfo::IsTemplateFor(Map* map) {
242 // There is a constraint on the object; check.
243 if (!map->IsJSObjectMap()) return false;
244 // Fetch the constructor function of the object.
245 Object* cons_obj = map->constructor();
246 if (!cons_obj->IsJSFunction()) return false;
247 JSFunction* fun = JSFunction::cast(cons_obj);
248 // Iterate through the chain of inheriting function templates to
249 // see if the required one occurs.
250 for (Object* type = fun->shared()->function_data();
251 type->IsFunctionTemplateInfo();
252 type = FunctionTemplateInfo::cast(type)->parent_template()) {
253 if (type == this) return true;
255 // Didn't find the required type in the inheritance chain.
260 template<typename To>
261 static inline To* CheckedCast(void *from) {
262 uintptr_t temp = reinterpret_cast<uintptr_t>(from);
263 ASSERT(temp % sizeof(To) == 0);
264 return reinterpret_cast<To*>(temp);
268 static MaybeObject* PerformCompare(const BitmaskCompareDescriptor& descriptor,
271 uint32_t bitmask = descriptor.bitmask;
272 uint32_t compare_value = descriptor.compare_value;
274 switch (descriptor.size) {
276 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr));
277 compare_value &= 0xff;
281 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr));
282 compare_value &= 0xffff;
286 value = *CheckedCast<uint32_t>(ptr);
292 return heap->ToBoolean((bitmask & value) == (bitmask & compare_value));
296 static MaybeObject* PerformCompare(const PointerCompareDescriptor& descriptor,
299 uintptr_t compare_value =
300 reinterpret_cast<uintptr_t>(descriptor.compare_value);
301 uintptr_t value = *CheckedCast<uintptr_t>(ptr);
302 return heap->ToBoolean(compare_value == value);
306 static MaybeObject* GetPrimitiveValue(
307 const PrimitiveValueDescriptor& descriptor,
310 int32_t int32_value = 0;
311 switch (descriptor.data_type) {
312 case kDescriptorInt8Type:
313 int32_value = *CheckedCast<int8_t>(ptr);
315 case kDescriptorUint8Type:
316 int32_value = *CheckedCast<uint8_t>(ptr);
318 case kDescriptorInt16Type:
319 int32_value = *CheckedCast<int16_t>(ptr);
321 case kDescriptorUint16Type:
322 int32_value = *CheckedCast<uint16_t>(ptr);
324 case kDescriptorInt32Type:
325 int32_value = *CheckedCast<int32_t>(ptr);
327 case kDescriptorUint32Type: {
328 uint32_t value = *CheckedCast<uint32_t>(ptr);
329 return heap->NumberFromUint32(value);
331 case kDescriptorBoolType: {
332 uint8_t byte = *CheckedCast<uint8_t>(ptr);
333 return heap->ToBoolean(byte & (0x1 << descriptor.bool_offset));
335 case kDescriptorFloatType: {
336 float value = *CheckedCast<float>(ptr);
337 return heap->NumberFromDouble(value);
339 case kDescriptorDoubleType: {
340 double value = *CheckedCast<double>(ptr);
341 return heap->NumberFromDouble(value);
344 return heap->NumberFromInt32(int32_value);
348 static MaybeObject* GetDeclaredAccessorProperty(Object* receiver,
349 DeclaredAccessorInfo* info,
351 char* current = reinterpret_cast<char*>(receiver);
352 DeclaredAccessorDescriptorIterator iterator(info->descriptor());
354 const DeclaredAccessorDescriptorData* data = iterator.Next();
355 switch (data->type) {
356 case kDescriptorReturnObject: {
357 ASSERT(iterator.Complete());
358 current = *CheckedCast<char*>(current);
359 return *CheckedCast<Object*>(current);
361 case kDescriptorPointerDereference:
362 ASSERT(!iterator.Complete());
363 current = *reinterpret_cast<char**>(current);
365 case kDescriptorPointerShift:
366 ASSERT(!iterator.Complete());
367 current += data->pointer_shift_descriptor.byte_offset;
369 case kDescriptorObjectDereference: {
370 ASSERT(!iterator.Complete());
371 Object* object = CheckedCast<Object>(current);
372 int field = data->object_dereference_descriptor.internal_field;
373 Object* smi = JSObject::cast(object)->GetInternalField(field);
374 ASSERT(smi->IsSmi());
375 current = reinterpret_cast<char*>(smi);
378 case kDescriptorBitmaskCompare:
379 ASSERT(iterator.Complete());
380 return PerformCompare(data->bitmask_compare_descriptor,
383 case kDescriptorPointerCompare:
384 ASSERT(iterator.Complete());
385 return PerformCompare(data->pointer_compare_descriptor,
388 case kDescriptorPrimitiveValue:
389 ASSERT(iterator.Complete());
390 return GetPrimitiveValue(data->primitive_value_descriptor,
400 Handle<FixedArray> JSObject::EnsureWritableFastElements(
401 Handle<JSObject> object) {
402 CALL_HEAP_FUNCTION(object->GetIsolate(),
403 object->EnsureWritableFastElements(),
408 Handle<Object> JSObject::GetPropertyWithCallback(Handle<JSObject> object,
409 Handle<Object> receiver,
410 Handle<Object> structure,
412 Isolate* isolate = name->GetIsolate();
413 // To accommodate both the old and the new api we switch on the
414 // data structure used to store the callbacks. Eventually foreign
415 // callbacks should be phased out.
416 if (structure->IsForeign()) {
417 AccessorDescriptor* callback =
418 reinterpret_cast<AccessorDescriptor*>(
419 Handle<Foreign>::cast(structure)->foreign_address());
420 CALL_HEAP_FUNCTION(isolate,
421 (callback->getter)(isolate, *receiver, callback->data),
425 // api style callbacks.
426 if (structure->IsAccessorInfo()) {
427 Handle<AccessorInfo> accessor_info = Handle<AccessorInfo>::cast(structure);
428 if (!accessor_info->IsCompatibleReceiver(*receiver)) {
429 Handle<Object> args[2] = { name, receiver };
430 Handle<Object> error =
431 isolate->factory()->NewTypeError("incompatible_method_receiver",
434 isolate->Throw(*error);
435 return Handle<Object>::null();
437 // TODO(rossberg): Handling symbols in the API requires changing the API,
438 // so we do not support it for now.
439 if (name->IsSymbol()) return isolate->factory()->undefined_value();
440 if (structure->IsDeclaredAccessorInfo()) {
443 GetDeclaredAccessorProperty(*receiver,
444 DeclaredAccessorInfo::cast(*structure),
449 Handle<ExecutableAccessorInfo> data =
450 Handle<ExecutableAccessorInfo>::cast(structure);
451 v8::AccessorGetterCallback call_fun =
452 v8::ToCData<v8::AccessorGetterCallback>(data->getter());
453 if (call_fun == NULL) return isolate->factory()->undefined_value();
455 HandleScope scope(isolate);
456 Handle<JSObject> self = Handle<JSObject>::cast(receiver);
457 Handle<String> key = Handle<String>::cast(name);
458 LOG(isolate, ApiNamedPropertyAccess("load", *self, *name));
459 PropertyCallbackArguments args(isolate, data->data(), *self, *object);
460 v8::Handle<v8::Value> result =
461 args.Call(call_fun, v8::Utils::ToLocal(key));
462 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
463 if (result.IsEmpty()) {
464 return isolate->factory()->undefined_value();
466 Handle<Object> return_value = v8::Utils::OpenHandle(*result);
467 return_value->VerifyApiCallResultType();
468 return scope.CloseAndEscape(return_value);
471 // __defineGetter__ callback
472 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
474 if (getter->IsSpecFunction()) {
475 // TODO(rossberg): nicer would be to cast to some JSCallable here...
478 object->GetPropertyWithDefinedGetter(*receiver,
479 JSReceiver::cast(*getter)),
482 // Getter is not a function.
483 return isolate->factory()->undefined_value();
487 MaybeObject* JSProxy::GetPropertyWithHandler(Object* receiver_raw,
489 Isolate* isolate = GetIsolate();
490 HandleScope scope(isolate);
491 Handle<Object> receiver(receiver_raw, isolate);
492 Handle<Object> name(name_raw, isolate);
494 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
495 if (name->IsSymbol()) return isolate->heap()->undefined_value();
497 Handle<Object> args[] = { receiver, name };
498 Handle<Object> result = CallTrap(
499 "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args);
500 if (isolate->has_pending_exception()) return Failure::Exception();
506 Handle<Object> Object::GetProperty(Handle<Object> object,
508 // TODO(rossberg): The index test should not be here but in the GetProperty
509 // method (or somewhere else entirely). Needs more global clean-up.
511 Isolate* isolate = name->GetIsolate();
512 if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
513 CALL_HEAP_FUNCTION(isolate, object->GetProperty(*name), Object);
517 MaybeObject* JSProxy::GetElementWithHandler(Object* receiver,
520 MaybeObject* maybe = GetHeap()->Uint32ToString(index);
521 if (!maybe->To<String>(&name)) return maybe;
522 return GetPropertyWithHandler(receiver, name);
526 Handle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
527 Handle<JSReceiver> receiver,
529 Handle<Object> value,
530 StrictMode strict_mode) {
531 Isolate* isolate = proxy->GetIsolate();
532 Handle<String> name = isolate->factory()->Uint32ToString(index);
533 return SetPropertyWithHandler(
534 proxy, receiver, name, value, NONE, strict_mode);
538 bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) {
539 Isolate* isolate = proxy->GetIsolate();
540 Handle<String> name = isolate->factory()->Uint32ToString(index);
541 return HasPropertyWithHandler(proxy, name);
545 MaybeObject* Object::GetPropertyWithDefinedGetter(Object* receiver,
546 JSReceiver* getter) {
547 Isolate* isolate = getter->GetIsolate();
548 HandleScope scope(isolate);
549 Handle<JSReceiver> fun(getter);
550 Handle<Object> self(receiver, isolate);
551 #ifdef ENABLE_DEBUGGER_SUPPORT
552 Debug* debug = isolate->debug();
553 // Handle stepping into a getter if step into is active.
554 // TODO(rossberg): should this apply to getters that are function proxies?
555 if (debug->StepInActive() && fun->IsJSFunction()) {
557 Handle<JSFunction>::cast(fun), Handle<Object>::null(), 0, false);
561 bool has_pending_exception;
562 Handle<Object> result = Execution::Call(
563 isolate, fun, self, 0, NULL, &has_pending_exception, true);
564 // Check for pending exception and return the result.
565 if (has_pending_exception) return Failure::Exception();
570 // Only deal with CALLBACKS and INTERCEPTOR
571 Handle<Object> JSObject::GetPropertyWithFailedAccessCheck(
572 Handle<JSObject> object,
573 Handle<Object> receiver,
574 LookupResult* result,
576 PropertyAttributes* attributes) {
577 Isolate* isolate = name->GetIsolate();
578 if (result->IsProperty()) {
579 switch (result->type()) {
581 // Only allow API accessors.
582 Handle<Object> callback_obj(result->GetCallbackObject(), isolate);
583 if (callback_obj->IsAccessorInfo()) {
584 if (!AccessorInfo::cast(*callback_obj)->all_can_read()) break;
585 *attributes = result->GetAttributes();
586 // Fall through to GetPropertyWithCallback.
587 } else if (callback_obj->IsAccessorPair()) {
588 if (!AccessorPair::cast(*callback_obj)->all_can_read()) break;
589 // Fall through to GetPropertyWithCallback.
593 Handle<JSObject> holder(result->holder(), isolate);
594 return GetPropertyWithCallback(holder, receiver, callback_obj, name);
599 // Search ALL_CAN_READ accessors in prototype chain.
600 LookupResult r(isolate);
601 result->holder()->LookupRealNamedPropertyInPrototypes(*name, &r);
602 if (r.IsProperty()) {
603 return GetPropertyWithFailedAccessCheck(
604 object, receiver, &r, name, attributes);
609 // If the object has an interceptor, try real named properties.
610 // No access check in GetPropertyAttributeWithInterceptor.
611 LookupResult r(isolate);
612 result->holder()->LookupRealNamedProperty(*name, &r);
613 if (r.IsProperty()) {
614 return GetPropertyWithFailedAccessCheck(
615 object, receiver, &r, name, attributes);
624 // No accessible property found.
625 *attributes = ABSENT;
626 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_GET);
627 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
628 return isolate->factory()->undefined_value();
632 PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck(
633 Handle<JSObject> object,
634 LookupResult* result,
636 bool continue_search) {
637 if (result->IsProperty()) {
638 switch (result->type()) {
640 // Only allow API accessors.
641 Handle<Object> obj(result->GetCallbackObject(), object->GetIsolate());
642 if (obj->IsAccessorInfo()) {
643 Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(obj);
644 if (info->all_can_read()) {
645 return result->GetAttributes();
647 } else if (obj->IsAccessorPair()) {
648 Handle<AccessorPair> pair = Handle<AccessorPair>::cast(obj);
649 if (pair->all_can_read()) {
650 return result->GetAttributes();
659 if (!continue_search) break;
660 // Search ALL_CAN_READ accessors in prototype chain.
661 LookupResult r(object->GetIsolate());
662 result->holder()->LookupRealNamedPropertyInPrototypes(*name, &r);
663 if (r.IsProperty()) {
664 return GetPropertyAttributeWithFailedAccessCheck(
665 object, &r, name, continue_search);
671 // If the object has an interceptor, try real named properties.
672 // No access check in GetPropertyAttributeWithInterceptor.
673 LookupResult r(object->GetIsolate());
674 if (continue_search) {
675 result->holder()->LookupRealNamedProperty(*name, &r);
677 result->holder()->LocalLookupRealNamedProperty(*name, &r);
679 if (!r.IsFound()) break;
680 return GetPropertyAttributeWithFailedAccessCheck(
681 object, &r, name, continue_search);
691 object->GetIsolate()->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS);
696 Object* JSObject::GetNormalizedProperty(const LookupResult* result) {
697 ASSERT(!HasFastProperties());
698 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
699 if (IsGlobalObject()) {
700 value = PropertyCell::cast(value)->value();
702 ASSERT(!value->IsPropertyCell() && !value->IsCell());
707 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
708 const LookupResult* result,
709 Handle<Object> value) {
710 ASSERT(!object->HasFastProperties());
711 NameDictionary* property_dictionary = object->property_dictionary();
712 if (object->IsGlobalObject()) {
713 Handle<PropertyCell> cell(PropertyCell::cast(
714 property_dictionary->ValueAt(result->GetDictionaryEntry())));
715 PropertyCell::SetValueInferType(cell, value);
717 property_dictionary->ValueAtPut(result->GetDictionaryEntry(), *value);
722 // TODO(mstarzinger): Temporary wrapper until handlified.
723 static Handle<NameDictionary> NameDictionaryAdd(Handle<NameDictionary> dict,
725 Handle<Object> value,
726 PropertyDetails details) {
727 CALL_HEAP_FUNCTION(dict->GetIsolate(),
728 dict->Add(*name, *value, details),
733 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
735 Handle<Object> value,
736 PropertyDetails details) {
737 ASSERT(!object->HasFastProperties());
738 Handle<NameDictionary> property_dictionary(object->property_dictionary());
740 if (!name->IsUniqueName()) {
741 name = object->GetIsolate()->factory()->InternalizeString(
742 Handle<String>::cast(name));
745 int entry = property_dictionary->FindEntry(*name);
746 if (entry == NameDictionary::kNotFound) {
747 Handle<Object> store_value = value;
748 if (object->IsGlobalObject()) {
749 store_value = object->GetIsolate()->factory()->NewPropertyCell(value);
752 property_dictionary =
753 NameDictionaryAdd(property_dictionary, name, store_value, details);
754 object->set_properties(*property_dictionary);
758 PropertyDetails original_details = property_dictionary->DetailsAt(entry);
759 int enumeration_index;
760 // Preserve the enumeration index unless the property was deleted.
761 if (original_details.IsDeleted()) {
762 enumeration_index = property_dictionary->NextEnumerationIndex();
763 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1);
765 enumeration_index = original_details.dictionary_index();
766 ASSERT(enumeration_index > 0);
769 details = PropertyDetails(
770 details.attributes(), details.type(), enumeration_index);
772 if (object->IsGlobalObject()) {
773 Handle<PropertyCell> cell(
774 PropertyCell::cast(property_dictionary->ValueAt(entry)));
775 PropertyCell::SetValueInferType(cell, value);
776 // Please note we have to update the property details.
777 property_dictionary->DetailsAtPut(entry, details);
779 property_dictionary->SetEntry(entry, *name, *value, details);
784 // TODO(mstarzinger): Temporary wrapper until target is handlified.
785 Handle<NameDictionary> NameDictionaryShrink(Handle<NameDictionary> dict,
787 CALL_HEAP_FUNCTION(dict->GetIsolate(), dict->Shrink(*name), NameDictionary);
791 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object,
794 ASSERT(!object->HasFastProperties());
795 Isolate* isolate = object->GetIsolate();
796 Handle<NameDictionary> dictionary(object->property_dictionary());
797 int entry = dictionary->FindEntry(*name);
798 if (entry != NameDictionary::kNotFound) {
799 // If we have a global object set the cell to the hole.
800 if (object->IsGlobalObject()) {
801 PropertyDetails details = dictionary->DetailsAt(entry);
802 if (details.IsDontDelete()) {
803 if (mode != FORCE_DELETION) return isolate->factory()->false_value();
804 // When forced to delete global properties, we have to make a
805 // map change to invalidate any ICs that think they can load
806 // from the DontDelete cell without checking if it contains
808 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
809 ASSERT(new_map->is_dictionary_map());
810 object->set_map(*new_map);
812 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry)));
813 Handle<Object> value = isolate->factory()->the_hole_value();
814 PropertyCell::SetValueInferType(cell, value);
815 dictionary->DetailsAtPut(entry, details.AsDeleted());
817 Handle<Object> deleted(dictionary->DeleteProperty(entry, mode), isolate);
818 if (*deleted == isolate->heap()->true_value()) {
819 Handle<NameDictionary> new_properties =
820 NameDictionaryShrink(dictionary, name);
821 object->set_properties(*new_properties);
826 return isolate->factory()->true_value();
830 bool JSObject::IsDirty() {
831 Object* cons_obj = map()->constructor();
832 if (!cons_obj->IsJSFunction())
834 JSFunction* fun = JSFunction::cast(cons_obj);
835 if (!fun->shared()->IsApiFunction())
837 // If the object is fully fast case and has the same map it was
838 // created with then no changes can have been made to it.
839 return map() != fun->initial_map()
840 || !HasFastObjectElements()
841 || !HasFastProperties();
845 Handle<Object> Object::GetProperty(Handle<Object> object,
846 Handle<Object> receiver,
847 LookupResult* result,
849 PropertyAttributes* attributes) {
850 Isolate* isolate = result->isolate();
853 object->GetProperty(*receiver, result, *key, attributes),
858 MaybeObject* Object::GetPropertyOrFail(Handle<Object> object,
859 Handle<Object> receiver,
860 LookupResult* result,
862 PropertyAttributes* attributes) {
863 Isolate* isolate = result->isolate();
864 CALL_HEAP_FUNCTION_PASS_EXCEPTION(
866 object->GetProperty(*receiver, result, *key, attributes));
870 // TODO(yangguo): handlify this and get rid of.
871 MaybeObject* Object::GetProperty(Object* receiver,
872 LookupResult* result,
874 PropertyAttributes* attributes) {
875 Isolate* isolate = name->GetIsolate();
876 Heap* heap = isolate->heap();
879 // TODO(mstarzinger): Only because of the AssertNoContextChange, drop as soon
880 // as this method has been fully handlified.
881 HandleScope scope(isolate);
884 // Make sure that the top context does not change when doing
885 // callbacks or interceptor calls.
886 AssertNoContextChange ncc(isolate);
888 // Traverse the prototype chain from the current object (this) to
889 // the holder and check for access rights. This avoids traversing the
890 // objects more than once in case of interceptors, because the
891 // holder will always be the interceptor holder and the search may
892 // only continue with a current object just after the interceptor
893 // holder in the prototype chain.
894 // Proxy handlers do not use the proxy's prototype, so we can skip this.
895 if (!result->IsHandler()) {
896 Object* last = result->IsProperty()
898 : Object::cast(heap->null_value());
899 ASSERT(this != this->GetPrototype(isolate));
900 for (Object* current = this;
902 current = current->GetPrototype(isolate)) {
903 if (current->IsAccessCheckNeeded()) {
904 // Check if we're allowed to read from the current object. Note
905 // that even though we may not actually end up loading the named
906 // property from the current object, we still check that we have
908 JSObject* checked = JSObject::cast(current);
909 if (!isolate->MayNamedAccess(checked, name, v8::ACCESS_GET)) {
910 HandleScope scope(isolate);
911 Handle<Object> value = JSObject::GetPropertyWithFailedAccessCheck(
912 handle(checked, isolate),
913 handle(receiver, isolate),
915 handle(name, isolate),
917 RETURN_IF_EMPTY_HANDLE(isolate, value);
921 // Stop traversing the chain once we reach the last object in the
922 // chain; either the holder of the result or null in case of an
924 if (current == last) break;
928 if (!result->IsProperty()) {
929 *attributes = ABSENT;
930 return heap->undefined_value();
932 *attributes = result->GetAttributes();
934 switch (result->type()) {
936 value = result->holder()->GetNormalizedProperty(result);
937 ASSERT(!value->IsTheHole() || result->IsReadOnly());
938 return value->IsTheHole() ? heap->undefined_value() : value;
940 MaybeObject* maybe_result = result->holder()->FastPropertyAt(
941 result->representation(),
942 result->GetFieldIndex().field_index());
943 if (!maybe_result->To(&value)) return maybe_result;
944 ASSERT(!value->IsTheHole() || result->IsReadOnly());
945 return value->IsTheHole() ? heap->undefined_value() : value;
948 return result->GetConstant();
950 HandleScope scope(isolate);
951 Handle<Object> value = JSObject::GetPropertyWithCallback(
952 handle(result->holder(), isolate),
953 handle(receiver, isolate),
954 handle(result->GetCallbackObject(), isolate),
955 handle(name, isolate));
956 RETURN_IF_EMPTY_HANDLE(isolate, value);
960 return result->proxy()->GetPropertyWithHandler(receiver, name);
962 HandleScope scope(isolate);
963 Handle<Object> value = JSObject::GetPropertyWithInterceptor(
964 handle(result->holder(), isolate),
965 handle(receiver, isolate),
966 handle(name, isolate),
968 RETURN_IF_EMPTY_HANDLE(isolate, value);
981 Handle<Object> Object::GetElementWithReceiver(Isolate* isolate,
982 Handle<Object> object,
983 Handle<Object> receiver,
985 Handle<Object> holder;
987 // Iterate up the prototype chain until an element is found or the null
988 // prototype is encountered.
989 for (holder = object;
991 holder = Handle<Object>(holder->GetPrototype(isolate), isolate)) {
992 if (!holder->IsJSObject()) {
993 Context* native_context = isolate->context()->native_context();
994 if (holder->IsNumber()) {
995 holder = Handle<Object>(
996 native_context->number_function()->instance_prototype(), isolate);
997 } else if (holder->IsFloat32x4()) {
998 holder = Handle<Object>(
999 native_context->float32x4_function()->instance_prototype(),
1001 } else if (holder->IsInt32x4()) {
1002 holder = Handle<Object>(
1003 native_context->int32x4_function()->instance_prototype(), isolate);
1004 } else if (holder->IsString()) {
1005 holder = Handle<Object>(
1006 native_context->string_function()->instance_prototype(), isolate);
1007 } else if (holder->IsSymbol()) {
1008 holder = Handle<Object>(
1009 native_context->symbol_function()->instance_prototype(), isolate);
1010 } else if (holder->IsBoolean()) {
1011 holder = Handle<Object>(
1012 native_context->boolean_function()->instance_prototype(), isolate);
1013 } else if (holder->IsJSProxy()) {
1014 CALL_HEAP_FUNCTION(isolate,
1015 Handle<JSProxy>::cast(holder)->GetElementWithHandler(
1019 // Undefined and null have no indexed properties.
1020 ASSERT(holder->IsUndefined() || holder->IsNull());
1021 return isolate->factory()->undefined_value();
1025 // Inline the case for JSObjects. Doing so significantly improves the
1026 // performance of fetching elements where checking the prototype chain is
1028 Handle<JSObject> js_object = Handle<JSObject>::cast(holder);
1030 // Check access rights if needed.
1031 if (js_object->IsAccessCheckNeeded()) {
1032 if (!isolate->MayIndexedAccessWrapper(js_object, index, v8::ACCESS_GET)) {
1033 isolate->ReportFailedAccessCheckWrapper(js_object, v8::ACCESS_GET);
1034 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
1035 return isolate->factory()->undefined_value();
1039 if (js_object->HasIndexedInterceptor()) {
1040 return JSObject::GetElementWithInterceptor(js_object, receiver, index);
1043 if (js_object->elements() != isolate->heap()->empty_fixed_array()) {
1044 Handle<Object> result = js_object->GetElementsAccessor()->Get(
1045 receiver, js_object, index);
1046 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
1047 if (!result->IsTheHole()) return result;
1051 return isolate->factory()->undefined_value();
1055 Object* Object::GetPrototype(Isolate* isolate) {
1057 Context* context = isolate->context()->native_context();
1058 return context->number_function()->instance_prototype();
1061 HeapObject* heap_object = HeapObject::cast(this);
1063 // The object is either a number, a string, a boolean,
1064 // a real JS object, or a Harmony proxy.
1065 if (heap_object->IsJSReceiver()) {
1066 return heap_object->map()->prototype();
1068 Context* context = isolate->context()->native_context();
1070 if (heap_object->IsHeapNumber()) {
1071 return context->number_function()->instance_prototype();
1073 if (heap_object->IsFloat32x4()) {
1074 return context->float32x4_function()->instance_prototype();
1076 if (heap_object->IsInt32x4()) {
1077 return context->int32x4_function()->instance_prototype();
1079 if (heap_object->IsString()) {
1080 return context->string_function()->instance_prototype();
1082 if (heap_object->IsSymbol()) {
1083 return context->symbol_function()->instance_prototype();
1085 if (heap_object->IsBoolean()) {
1086 return context->boolean_function()->instance_prototype();
1088 return isolate->heap()->null_value();
1093 Map* Object::GetMarkerMap(Isolate* isolate) {
1094 if (IsSmi()) return isolate->heap()->heap_number_map();
1095 return HeapObject::cast(this)->map();
1099 Object* Object::GetHash() {
1100 // The object is either a number, a name, an odd-ball,
1101 // a real JS object, or a Harmony proxy.
1103 uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
1104 return Smi::FromInt(hash & Smi::kMaxValue);
1107 uint32_t hash = Name::cast(this)->Hash();
1108 return Smi::FromInt(hash);
1111 uint32_t hash = Oddball::cast(this)->to_string()->Hash();
1112 return Smi::FromInt(hash);
1115 ASSERT(IsJSReceiver());
1116 return JSReceiver::cast(this)->GetIdentityHash();
1120 Handle<Object> Object::GetOrCreateHash(Handle<Object> object,
1122 Handle<Object> hash(object->GetHash(), isolate);
1126 ASSERT(object->IsJSReceiver());
1127 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object));
1131 bool Object::SameValue(Object* other) {
1132 if (other == this) return true;
1134 // The object is either a number, a name, an odd-ball,
1135 // a real JS object, or a Harmony proxy.
1136 if (IsNumber() && other->IsNumber()) {
1137 double this_value = Number();
1138 double other_value = other->Number();
1139 bool equal = this_value == other_value;
1140 // SameValue(NaN, NaN) is true.
1141 if (!equal) return std::isnan(this_value) && std::isnan(other_value);
1142 // SameValue(0.0, -0.0) is false.
1143 return (this_value != 0) || ((1 / this_value) == (1 / other_value));
1145 if (IsString() && other->IsString()) {
1146 return String::cast(this)->Equals(String::cast(other));
1152 void Object::ShortPrint(FILE* out) {
1153 HeapStringAllocator allocator;
1154 StringStream accumulator(&allocator);
1155 ShortPrint(&accumulator);
1156 accumulator.OutputToFile(out);
1160 void Object::ShortPrint(StringStream* accumulator) {
1162 Smi::cast(this)->SmiPrint(accumulator);
1163 } else if (IsFailure()) {
1164 Failure::cast(this)->FailurePrint(accumulator);
1166 HeapObject::cast(this)->HeapObjectShortPrint(accumulator);
1171 void Smi::SmiPrint(FILE* out) {
1172 PrintF(out, "%d", value());
1176 void Smi::SmiPrint(StringStream* accumulator) {
1177 accumulator->Add("%d", value());
1181 void Failure::FailurePrint(StringStream* accumulator) {
1182 accumulator->Add("Failure(%p)", reinterpret_cast<void*>(value()));
1186 void Failure::FailurePrint(FILE* out) {
1187 PrintF(out, "Failure(%p)", reinterpret_cast<void*>(value()));
1191 // Should a word be prefixed by 'a' or 'an' in order to read naturally in
1192 // English? Returns false for non-ASCII or words that don't start with
1193 // a capital letter. The a/an rule follows pronunciation in English.
1194 // We don't use the BBC's overcorrect "an historic occasion" though if
1195 // you speak a dialect you may well say "an 'istoric occasion".
1196 static bool AnWord(String* str) {
1197 if (str->length() == 0) return false; // A nothing.
1198 int c0 = str->Get(0);
1199 int c1 = str->length() > 1 ? str->Get(1) : 0;
1202 return true; // An Umpire, but a UTF8String, a U.
1204 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') {
1205 return true; // An Ape, an ABCBook.
1206 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) &&
1207 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' ||
1208 c0 == 'S' || c0 == 'X')) {
1209 return true; // An MP3File, an M.
1215 MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) {
1217 // Do not attempt to flatten in debug mode when allocation is not
1218 // allowed. This is to avoid an assertion failure when allocating.
1219 // Flattening strings is the only case where we always allow
1220 // allocation because no GC is performed if the allocation fails.
1221 if (!AllowHeapAllocation::IsAllowed()) return this;
1224 Heap* heap = GetHeap();
1225 switch (StringShape(this).representation_tag()) {
1226 case kConsStringTag: {
1227 ConsString* cs = ConsString::cast(this);
1228 if (cs->second()->length() == 0) {
1231 // There's little point in putting the flat string in new space if the
1232 // cons string is in old space. It can never get GCed until there is
1234 PretenureFlag tenure = heap->InNewSpace(this) ? pretenure : TENURED;
1238 if (IsOneByteRepresentation()) {
1239 { MaybeObject* maybe_object =
1240 heap->AllocateRawOneByteString(len, tenure);
1241 if (!maybe_object->ToObject(&object)) return maybe_object;
1243 result = String::cast(object);
1244 String* first = cs->first();
1245 int first_length = first->length();
1246 uint8_t* dest = SeqOneByteString::cast(result)->GetChars();
1247 WriteToFlat(first, dest, 0, first_length);
1248 String* second = cs->second();
1250 dest + first_length,
1252 len - first_length);
1254 { MaybeObject* maybe_object =
1255 heap->AllocateRawTwoByteString(len, tenure);
1256 if (!maybe_object->ToObject(&object)) return maybe_object;
1258 result = String::cast(object);
1259 uc16* dest = SeqTwoByteString::cast(result)->GetChars();
1260 String* first = cs->first();
1261 int first_length = first->length();
1262 WriteToFlat(first, dest, 0, first_length);
1263 String* second = cs->second();
1265 dest + first_length,
1267 len - first_length);
1269 cs->set_first(result);
1270 cs->set_second(heap->empty_string(), SKIP_WRITE_BARRIER);
1279 bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
1280 // Externalizing twice leaks the external resource, so it's
1281 // prohibited by the API.
1282 ASSERT(!this->IsExternalString());
1283 #ifdef ENABLE_SLOW_ASSERTS
1284 if (FLAG_enable_slow_asserts) {
1285 // Assert that the resource and the string are equivalent.
1286 ASSERT(static_cast<size_t>(this->length()) == resource->length());
1287 ScopedVector<uc16> smart_chars(this->length());
1288 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1289 ASSERT(memcmp(smart_chars.start(),
1291 resource->length() * sizeof(smart_chars[0])) == 0);
1294 Heap* heap = GetHeap();
1295 int size = this->Size(); // Byte size of the original string.
1296 if (size < ExternalString::kShortSize) {
1299 bool is_ascii = this->IsOneByteRepresentation();
1300 bool is_internalized = this->IsInternalizedString();
1302 // Morph the string to an external string by replacing the map and
1303 // reinitializing the fields. This won't work if
1304 // - the space the existing string occupies is too small for a regular
1306 // - the existing string is in old pointer space and the backing store of
1307 // the external string is not aligned. The GC cannot deal with a field
1308 // containing a possibly unaligned address to outside of V8's heap.
1309 // In either case we resort to a short external string instead, omitting
1310 // the field caching the address of the backing store. When we encounter
1311 // short external strings in generated code, we need to bailout to runtime.
1312 if (size < ExternalString::kSize ||
1313 heap->old_pointer_space()->Contains(this)) {
1314 this->set_map_no_write_barrier(
1318 short_external_internalized_string_with_one_byte_data_map()
1319 : heap->short_external_internalized_string_map())
1321 ? heap->short_external_string_with_one_byte_data_map()
1322 : heap->short_external_string_map()));
1324 this->set_map_no_write_barrier(
1327 ? heap->external_internalized_string_with_one_byte_data_map()
1328 : heap->external_internalized_string_map())
1330 ? heap->external_string_with_one_byte_data_map()
1331 : heap->external_string_map()));
1333 ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
1334 self->set_resource(resource);
1335 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1337 // Fill the remainder of the string with dead wood.
1338 int new_size = this->Size(); // Byte size of the external String object.
1339 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1340 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1345 bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) {
1346 #ifdef ENABLE_SLOW_ASSERTS
1347 if (FLAG_enable_slow_asserts) {
1348 // Assert that the resource and the string are equivalent.
1349 ASSERT(static_cast<size_t>(this->length()) == resource->length());
1350 if (this->IsTwoByteRepresentation()) {
1351 ScopedVector<uint16_t> smart_chars(this->length());
1352 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1353 ASSERT(String::IsOneByte(smart_chars.start(), this->length()));
1355 ScopedVector<char> smart_chars(this->length());
1356 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1357 ASSERT(memcmp(smart_chars.start(),
1359 resource->length() * sizeof(smart_chars[0])) == 0);
1362 Heap* heap = GetHeap();
1363 int size = this->Size(); // Byte size of the original string.
1364 if (size < ExternalString::kShortSize) {
1367 bool is_internalized = this->IsInternalizedString();
1369 // Morph the string to an external string by replacing the map and
1370 // reinitializing the fields. This won't work if
1371 // - the space the existing string occupies is too small for a regular
1373 // - the existing string is in old pointer space and the backing store of
1374 // the external string is not aligned. The GC cannot deal with a field
1375 // containing a possibly unaligned address to outside of V8's heap.
1376 // In either case we resort to a short external string instead, omitting
1377 // the field caching the address of the backing store. When we encounter
1378 // short external strings in generated code, we need to bailout to runtime.
1379 if (size < ExternalString::kSize ||
1380 heap->old_pointer_space()->Contains(this)) {
1381 this->set_map_no_write_barrier(
1382 is_internalized ? heap->short_external_ascii_internalized_string_map()
1383 : heap->short_external_ascii_string_map());
1385 this->set_map_no_write_barrier(
1386 is_internalized ? heap->external_ascii_internalized_string_map()
1387 : heap->external_ascii_string_map());
1389 ExternalAsciiString* self = ExternalAsciiString::cast(this);
1390 self->set_resource(resource);
1391 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1393 // Fill the remainder of the string with dead wood.
1394 int new_size = this->Size(); // Byte size of the external String object.
1395 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1396 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1401 void String::StringShortPrint(StringStream* accumulator) {
1403 if (len > kMaxShortPrintLength) {
1404 accumulator->Add("<Very long string[%u]>", len);
1408 if (!LooksValid()) {
1409 accumulator->Add("<Invalid String>");
1413 ConsStringIteratorOp op;
1414 StringCharacterStream stream(this, &op);
1416 bool truncated = false;
1417 if (len > kMaxShortPrintLength) {
1418 len = kMaxShortPrintLength;
1422 for (int i = 0; i < len; i++) {
1423 uint16_t c = stream.GetNext();
1425 if (c < 32 || c >= 127) {
1431 accumulator->Add("<String[%u]: ", length());
1432 for (int i = 0; i < len; i++) {
1433 accumulator->Put(static_cast<char>(stream.GetNext()));
1435 accumulator->Put('>');
1437 // Backslash indicates that the string contains control
1438 // characters and that backslashes are therefore escaped.
1439 accumulator->Add("<String[%u]\\: ", length());
1440 for (int i = 0; i < len; i++) {
1441 uint16_t c = stream.GetNext();
1443 accumulator->Add("\\n");
1444 } else if (c == '\r') {
1445 accumulator->Add("\\r");
1446 } else if (c == '\\') {
1447 accumulator->Add("\\\\");
1448 } else if (c < 32 || c > 126) {
1449 accumulator->Add("\\x%02x", c);
1451 accumulator->Put(static_cast<char>(c));
1455 accumulator->Put('.');
1456 accumulator->Put('.');
1457 accumulator->Put('.');
1459 accumulator->Put('>');
1465 void JSObject::JSObjectShortPrint(StringStream* accumulator) {
1466 switch (map()->instance_type()) {
1467 case JS_ARRAY_TYPE: {
1468 double length = JSArray::cast(this)->length()->IsUndefined()
1470 : JSArray::cast(this)->length()->Number();
1471 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
1474 case JS_WEAK_MAP_TYPE: {
1475 accumulator->Add("<JS WeakMap>");
1478 case JS_WEAK_SET_TYPE: {
1479 accumulator->Add("<JS WeakSet>");
1482 case JS_REGEXP_TYPE: {
1483 accumulator->Add("<JS RegExp>");
1486 case JS_FUNCTION_TYPE: {
1487 JSFunction* function = JSFunction::cast(this);
1488 Object* fun_name = function->shared()->DebugName();
1489 bool printed = false;
1490 if (fun_name->IsString()) {
1491 String* str = String::cast(fun_name);
1492 if (str->length() > 0) {
1493 accumulator->Add("<JS Function ");
1494 accumulator->Put(str);
1499 accumulator->Add("<JS Function");
1501 accumulator->Add(" (SharedFunctionInfo %p)",
1502 reinterpret_cast<void*>(function->shared()));
1503 accumulator->Put('>');
1506 case JS_GENERATOR_OBJECT_TYPE: {
1507 accumulator->Add("<JS Generator>");
1510 case JS_MODULE_TYPE: {
1511 accumulator->Add("<JS Module>");
1514 // All other JSObjects are rather similar to each other (JSObject,
1515 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
1517 Map* map_of_this = map();
1518 Heap* heap = GetHeap();
1519 Object* constructor = map_of_this->constructor();
1520 bool printed = false;
1521 if (constructor->IsHeapObject() &&
1522 !heap->Contains(HeapObject::cast(constructor))) {
1523 accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
1525 bool global_object = IsJSGlobalProxy();
1526 if (constructor->IsJSFunction()) {
1527 if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
1528 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
1530 Object* constructor_name =
1531 JSFunction::cast(constructor)->shared()->name();
1532 if (constructor_name->IsString()) {
1533 String* str = String::cast(constructor_name);
1534 if (str->length() > 0) {
1535 bool vowel = AnWord(str);
1536 accumulator->Add("<%sa%s ",
1537 global_object ? "Global Object: " : "",
1539 accumulator->Put(str);
1540 accumulator->Add(" with %smap %p",
1541 map_of_this->is_deprecated() ? "deprecated " : "",
1549 accumulator->Add("<JS %sObject", global_object ? "Global " : "");
1553 accumulator->Add(" value = ");
1554 JSValue::cast(this)->value()->ShortPrint(accumulator);
1556 accumulator->Put('>');
1563 void JSObject::PrintElementsTransition(
1564 FILE* file, Handle<JSObject> object,
1565 ElementsKind from_kind, Handle<FixedArrayBase> from_elements,
1566 ElementsKind to_kind, Handle<FixedArrayBase> to_elements) {
1567 if (from_kind != to_kind) {
1568 PrintF(file, "elements transition [");
1569 PrintElementsKind(file, from_kind);
1570 PrintF(file, " -> ");
1571 PrintElementsKind(file, to_kind);
1572 PrintF(file, "] in ");
1573 JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true);
1574 PrintF(file, " for ");
1575 object->ShortPrint(file);
1576 PrintF(file, " from ");
1577 from_elements->ShortPrint(file);
1578 PrintF(file, " to ");
1579 to_elements->ShortPrint(file);
1585 void Map::PrintGeneralization(FILE* file,
1590 bool constant_to_field,
1591 Representation old_representation,
1592 Representation new_representation) {
1593 PrintF(file, "[generalizing ");
1594 constructor_name()->PrintOn(file);
1596 Name* name = instance_descriptors()->GetKey(modify_index);
1597 if (name->IsString()) {
1598 String::cast(name)->PrintOn(file);
1600 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1602 if (constant_to_field) {
1603 PrintF(file, ":c->f");
1605 PrintF(file, ":%s->%s",
1606 old_representation.Mnemonic(),
1607 new_representation.Mnemonic());
1610 if (strlen(reason) > 0) {
1611 PrintF(file, "%s", reason);
1613 PrintF(file, "+%i maps", descriptors - split);
1615 PrintF(file, ") [");
1616 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true);
1617 PrintF(file, "]\n");
1621 void JSObject::PrintInstanceMigration(FILE* file,
1624 PrintF(file, "[migrating ");
1625 map()->constructor_name()->PrintOn(file);
1627 DescriptorArray* o = original_map->instance_descriptors();
1628 DescriptorArray* n = new_map->instance_descriptors();
1629 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) {
1630 Representation o_r = o->GetDetails(i).representation();
1631 Representation n_r = n->GetDetails(i).representation();
1632 if (!o_r.Equals(n_r)) {
1633 String::cast(o->GetKey(i))->PrintOn(file);
1634 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic());
1635 } else if (o->GetDetails(i).type() == CONSTANT &&
1636 n->GetDetails(i).type() == FIELD) {
1637 Name* name = o->GetKey(i);
1638 if (name->IsString()) {
1639 String::cast(name)->PrintOn(file);
1641 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1650 void HeapObject::HeapObjectShortPrint(StringStream* accumulator) {
1651 Heap* heap = GetHeap();
1652 if (!heap->Contains(this)) {
1653 accumulator->Add("!!!INVALID POINTER!!!");
1656 if (!heap->Contains(map())) {
1657 accumulator->Add("!!!INVALID MAP!!!");
1661 accumulator->Add("%p ", this);
1664 String::cast(this)->StringShortPrint(accumulator);
1668 JSObject::cast(this)->JSObjectShortPrint(accumulator);
1671 switch (map()->instance_type()) {
1673 accumulator->Add("<Map(elements=%u)>", Map::cast(this)->elements_kind());
1675 case FIXED_ARRAY_TYPE:
1676 accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length());
1678 case FIXED_DOUBLE_ARRAY_TYPE:
1679 accumulator->Add("<FixedDoubleArray[%u]>",
1680 FixedDoubleArray::cast(this)->length());
1682 case BYTE_ARRAY_TYPE:
1683 accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length());
1685 case FREE_SPACE_TYPE:
1686 accumulator->Add("<FreeSpace[%u]>", FreeSpace::cast(this)->Size());
1688 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \
1689 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1690 accumulator->Add("<External" #Type "Array[%u]>", \
1691 External##Type##Array::cast(this)->length()); \
1693 case FIXED_##TYPE##_ARRAY_TYPE: \
1694 accumulator->Add("<Fixed" #Type "Array[%u]>", \
1695 Fixed##Type##Array::cast(this)->length()); \
1698 TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT)
1699 #undef TYPED_ARRAY_SHORT_PRINT
1701 case SHARED_FUNCTION_INFO_TYPE: {
1702 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this);
1703 SmartArrayPointer<char> debug_name =
1704 shared->DebugName()->ToCString();
1705 if (debug_name[0] != 0) {
1706 accumulator->Add("<SharedFunctionInfo %s>", debug_name.get());
1708 accumulator->Add("<SharedFunctionInfo>");
1712 case JS_MESSAGE_OBJECT_TYPE:
1713 accumulator->Add("<JSMessageObject>");
1715 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1717 accumulator->Put('<'); \
1718 accumulator->Add(#Name); \
1719 accumulator->Put('>'); \
1721 STRUCT_LIST(MAKE_STRUCT_CASE)
1722 #undef MAKE_STRUCT_CASE
1724 accumulator->Add("<Code>");
1726 case ODDBALL_TYPE: {
1728 accumulator->Add("<undefined>");
1729 else if (IsTheHole())
1730 accumulator->Add("<the hole>");
1732 accumulator->Add("<null>");
1734 accumulator->Add("<true>");
1736 accumulator->Add("<false>");
1738 accumulator->Add("<Odd Oddball>");
1742 Symbol* symbol = Symbol::cast(this);
1743 accumulator->Add("<Symbol: %d", symbol->Hash());
1744 if (!symbol->name()->IsUndefined()) {
1745 accumulator->Add(" ");
1746 String::cast(symbol->name())->StringShortPrint(accumulator);
1748 accumulator->Add(">");
1751 case HEAP_NUMBER_TYPE:
1752 accumulator->Add("<Number: ");
1753 HeapNumber::cast(this)->HeapNumberPrint(accumulator);
1754 accumulator->Put('>');
1756 case FLOAT32x4_TYPE:
1757 accumulator->Add("<Float32x4: ");
1758 Float32x4::cast(this)->Float32x4Print(accumulator);
1759 accumulator->Put('>');
1762 accumulator->Add("<Int32x4: ");
1763 Int32x4::cast(this)->Int32x4Print(accumulator);
1764 accumulator->Put('>');
1767 accumulator->Add("<JSProxy>");
1769 case JS_FUNCTION_PROXY_TYPE:
1770 accumulator->Add("<JSFunctionProxy>");
1773 accumulator->Add("<Foreign>");
1776 accumulator->Add("Cell for ");
1777 Cell::cast(this)->value()->ShortPrint(accumulator);
1779 case PROPERTY_CELL_TYPE:
1780 accumulator->Add("PropertyCell for ");
1781 PropertyCell::cast(this)->value()->ShortPrint(accumulator);
1784 accumulator->Add("<Other heap object (%d)>", map()->instance_type());
1790 void HeapObject::Iterate(ObjectVisitor* v) {
1792 IteratePointer(v, kMapOffset);
1793 // Handle object body
1795 IterateBody(m->instance_type(), SizeFromMap(m), v);
1799 void HeapObject::IterateBody(InstanceType type, int object_size,
1801 // Avoiding <Type>::cast(this) because it accesses the map pointer field.
1802 // During GC, the map pointer field is encoded.
1803 if (type < FIRST_NONSTRING_TYPE) {
1804 switch (type & kStringRepresentationMask) {
1807 case kConsStringTag:
1808 ConsString::BodyDescriptor::IterateBody(this, v);
1810 case kSlicedStringTag:
1811 SlicedString::BodyDescriptor::IterateBody(this, v);
1813 case kExternalStringTag:
1814 if ((type & kStringEncodingMask) == kOneByteStringTag) {
1815 reinterpret_cast<ExternalAsciiString*>(this)->
1816 ExternalAsciiStringIterateBody(v);
1818 reinterpret_cast<ExternalTwoByteString*>(this)->
1819 ExternalTwoByteStringIterateBody(v);
1827 case FIXED_ARRAY_TYPE:
1828 FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
1830 case CONSTANT_POOL_ARRAY_TYPE:
1831 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v);
1833 case FIXED_DOUBLE_ARRAY_TYPE:
1835 case JS_OBJECT_TYPE:
1836 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1837 case JS_GENERATOR_OBJECT_TYPE:
1838 case JS_MODULE_TYPE:
1842 case JS_ARRAY_BUFFER_TYPE:
1843 case JS_TYPED_ARRAY_TYPE:
1844 case JS_DATA_VIEW_TYPE:
1847 case JS_WEAK_MAP_TYPE:
1848 case JS_WEAK_SET_TYPE:
1849 case JS_REGEXP_TYPE:
1850 case JS_GLOBAL_PROXY_TYPE:
1851 case JS_GLOBAL_OBJECT_TYPE:
1852 case JS_BUILTINS_OBJECT_TYPE:
1853 case JS_MESSAGE_OBJECT_TYPE:
1854 JSObject::BodyDescriptor::IterateBody(this, object_size, v);
1856 case JS_FUNCTION_TYPE:
1857 reinterpret_cast<JSFunction*>(this)
1858 ->JSFunctionIterateBody(object_size, v);
1861 Oddball::BodyDescriptor::IterateBody(this, v);
1864 JSProxy::BodyDescriptor::IterateBody(this, v);
1866 case JS_FUNCTION_PROXY_TYPE:
1867 JSFunctionProxy::BodyDescriptor::IterateBody(this, v);
1870 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v);
1873 Map::BodyDescriptor::IterateBody(this, v);
1876 reinterpret_cast<Code*>(this)->CodeIterateBody(v);
1879 Cell::BodyDescriptor::IterateBody(this, v);
1881 case PROPERTY_CELL_TYPE:
1882 PropertyCell::BodyDescriptor::IterateBody(this, v);
1885 Symbol::BodyDescriptor::IterateBody(this, v);
1888 case HEAP_NUMBER_TYPE:
1889 case FLOAT32x4_TYPE:
1892 case BYTE_ARRAY_TYPE:
1893 case FREE_SPACE_TYPE:
1896 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
1897 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1898 case FIXED_##TYPE##_ARRAY_TYPE: \
1901 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1902 #undef TYPED_ARRAY_CASE
1904 case SHARED_FUNCTION_INFO_TYPE: {
1905 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
1909 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1911 STRUCT_LIST(MAKE_STRUCT_CASE)
1912 #undef MAKE_STRUCT_CASE
1913 if (type == ALLOCATION_SITE_TYPE) {
1914 AllocationSite::BodyDescriptor::IterateBody(this, v);
1916 StructBodyDescriptor::IterateBody(this, object_size, v);
1920 PrintF("Unknown type: %d\n", type);
1926 bool HeapNumber::HeapNumberBooleanValue() {
1927 // NaN, +0, and -0 should return the false object
1928 #if __BYTE_ORDER == __LITTLE_ENDIAN
1929 union IeeeDoubleLittleEndianArchType u;
1930 #elif __BYTE_ORDER == __BIG_ENDIAN
1931 union IeeeDoubleBigEndianArchType u;
1934 if (u.bits.exp == 2047) {
1935 // Detect NaN for IEEE double precision floating point.
1936 if ((u.bits.man_low | u.bits.man_high) != 0) return false;
1938 if (u.bits.exp == 0) {
1939 // Detect +0, and -0 for IEEE double precision floating point.
1940 if ((u.bits.man_low | u.bits.man_high) == 0) return false;
1946 void HeapNumber::HeapNumberPrint(FILE* out) {
1947 PrintF(out, "%.16g", Number());
1951 void HeapNumber::HeapNumberPrint(StringStream* accumulator) {
1952 // The Windows version of vsnprintf can allocate when printing a %g string
1953 // into a buffer that may not be big enough. We don't want random memory
1954 // allocation when producing post-crash stack traces, so we print into a
1955 // buffer that is plenty big enough for any floating point number, then
1956 // print that using vsnprintf (which may truncate but never allocate if
1957 // there is no more space in the buffer).
1958 EmbeddedVector<char, 100> buffer;
1959 OS::SNPrintF(buffer, "%.16g", Number());
1960 accumulator->Add("%s", buffer.start());
1964 void Float32x4::Float32x4Print(FILE* out) {
1965 PrintF(out, "%.16g %.16g %.16g %.16g", x(), y(), z(), w());
1969 void Float32x4::Float32x4Print(StringStream* accumulator) {
1970 // The Windows version of vsnprintf can allocate when printing a %g string
1971 // into a buffer that may not be big enough. We don't want random memory
1972 // allocation when producing post-crash stack traces, so we print into a
1973 // buffer that is plenty big enough for any floating point number, then
1974 // print that using vsnprintf (which may truncate but never allocate if
1975 // there is no more space in the buffer).
1976 EmbeddedVector<char, 100> buffer;
1977 OS::SNPrintF(buffer, "%.16g %.16g %.16g %.16g", x(), y(), z(), w());
1978 accumulator->Add("%s", buffer.start());
1982 void Int32x4::Int32x4Print(FILE* out) {
1983 PrintF(out, "%u %u %u %u", x(), y(), z(), w());
1987 void Int32x4::Int32x4Print(StringStream* accumulator) {
1988 // The Windows version of vsnprintf can allocate when printing a %g string
1989 // into a buffer that may not be big enough. We don't want random memory
1990 // allocation when producing post-crash stack traces, so we print into a
1991 // buffer that is plenty big enough for any floating point number, then
1992 // print that using vsnprintf (which may truncate but never allocate if
1993 // there is no more space in the buffer).
1994 EmbeddedVector<char, 100> buffer;
1995 OS::SNPrintF(buffer, "%u %u %u %u", x(), y(), z(), w());
1996 accumulator->Add("%s", buffer.start());
2000 String* JSReceiver::class_name() {
2001 if (IsJSFunction() && IsJSFunctionProxy()) {
2002 return GetHeap()->function_class_string();
2004 if (map()->constructor()->IsJSFunction()) {
2005 JSFunction* constructor = JSFunction::cast(map()->constructor());
2006 return String::cast(constructor->shared()->instance_class_name());
2008 // If the constructor is not present, return "Object".
2009 return GetHeap()->Object_string();
2013 String* Map::constructor_name() {
2014 if (constructor()->IsJSFunction()) {
2015 JSFunction* constructor = JSFunction::cast(this->constructor());
2016 String* name = String::cast(constructor->shared()->name());
2017 if (name->length() > 0) return name;
2018 String* inferred_name = constructor->shared()->inferred_name();
2019 if (inferred_name->length() > 0) return inferred_name;
2020 Object* proto = prototype();
2021 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
2023 // TODO(rossberg): what about proxies?
2024 // If the constructor is not present, return "Object".
2025 return GetHeap()->Object_string();
2029 String* JSReceiver::constructor_name() {
2030 return map()->constructor_name();
2034 // TODO(mstarzinger): Temporary wrapper until handlified.
2035 static Handle<Object> NewStorageFor(Isolate* isolate,
2036 Handle<Object> object,
2037 Representation representation) {
2038 Heap* heap = isolate->heap();
2039 CALL_HEAP_FUNCTION(isolate,
2040 object->AllocateNewStorageFor(heap, representation),
2045 static MaybeObject* CopyAddFieldDescriptor(Map* map,
2048 PropertyAttributes attributes,
2049 Representation representation,
2050 TransitionFlag flag) {
2052 FieldDescriptor new_field_desc(name, index, attributes, representation);
2053 MaybeObject* maybe_map = map->CopyAddDescriptor(&new_field_desc, flag);
2054 if (!maybe_map->To(&new_map)) return maybe_map;
2055 int unused_property_fields = map->unused_property_fields() - 1;
2056 if (unused_property_fields < 0) {
2057 unused_property_fields += JSObject::kFieldsAdded;
2059 new_map->set_unused_property_fields(unused_property_fields);
2064 static Handle<Map> CopyAddFieldDescriptor(Handle<Map> map,
2067 PropertyAttributes attributes,
2068 Representation representation,
2069 TransitionFlag flag) {
2070 CALL_HEAP_FUNCTION(map->GetIsolate(),
2071 CopyAddFieldDescriptor(
2072 *map, *name, index, attributes, representation, flag),
2077 void JSObject::AddFastProperty(Handle<JSObject> object,
2079 Handle<Object> value,
2080 PropertyAttributes attributes,
2081 StoreFromKeyed store_mode,
2082 ValueType value_type,
2083 TransitionFlag flag) {
2084 ASSERT(!object->IsJSGlobalProxy());
2085 ASSERT(DescriptorArray::kNotFound ==
2086 object->map()->instance_descriptors()->Search(
2087 *name, object->map()->NumberOfOwnDescriptors()));
2089 // Normalize the object if the name is an actual name (not the
2090 // hidden strings) and is not a real identifier.
2091 // Normalize the object if it will have too many fast properties.
2092 Isolate* isolate = object->GetIsolate();
2093 if (!name->IsCacheable(isolate) ||
2094 object->TooManyFastProperties(store_mode)) {
2095 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
2096 AddSlowProperty(object, name, value, attributes);
2100 // Compute the new index for new field.
2101 int index = object->map()->NextFreePropertyIndex();
2103 // Allocate new instance descriptors with (name, index) added
2104 if (object->IsJSContextExtensionObject()) value_type = FORCE_TAGGED;
2105 Representation representation = value->OptimalRepresentation(value_type);
2106 Handle<Map> new_map = CopyAddFieldDescriptor(
2107 handle(object->map()), name, index, attributes, representation, flag);
2109 JSObject::MigrateToMap(object, new_map);
2111 if (representation.IsDouble()) {
2112 // Nothing more to be done.
2113 if (value->IsUninitialized()) return;
2114 HeapNumber* box = HeapNumber::cast(object->RawFastPropertyAt(index));
2115 box->set_value(value->Number());
2117 object->FastPropertyAtPut(index, *value);
2122 static MaybeObject* CopyAddConstantDescriptor(Map* map,
2125 PropertyAttributes attributes,
2126 TransitionFlag flag) {
2127 ConstantDescriptor new_constant_desc(name, value, attributes);
2128 return map->CopyAddDescriptor(&new_constant_desc, flag);
2132 static Handle<Map> CopyAddConstantDescriptor(Handle<Map> map,
2134 Handle<Object> value,
2135 PropertyAttributes attributes,
2136 TransitionFlag flag) {
2137 CALL_HEAP_FUNCTION(map->GetIsolate(),
2138 CopyAddConstantDescriptor(
2139 *map, *name, *value, attributes, flag),
2144 void JSObject::AddConstantProperty(Handle<JSObject> object,
2146 Handle<Object> constant,
2147 PropertyAttributes attributes,
2148 TransitionFlag initial_flag) {
2149 TransitionFlag flag =
2150 // Do not add transitions to global objects.
2151 (object->IsGlobalObject() ||
2152 // Don't add transitions to special properties with non-trivial
2158 // Allocate new instance descriptors with (name, constant) added.
2159 Handle<Map> new_map = CopyAddConstantDescriptor(
2160 handle(object->map()), name, constant, attributes, flag);
2162 JSObject::MigrateToMap(object, new_map);
2166 void JSObject::AddSlowProperty(Handle<JSObject> object,
2168 Handle<Object> value,
2169 PropertyAttributes attributes) {
2170 ASSERT(!object->HasFastProperties());
2171 Isolate* isolate = object->GetIsolate();
2172 Handle<NameDictionary> dict(object->property_dictionary());
2173 if (object->IsGlobalObject()) {
2174 // In case name is an orphaned property reuse the cell.
2175 int entry = dict->FindEntry(*name);
2176 if (entry != NameDictionary::kNotFound) {
2177 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry)));
2178 PropertyCell::SetValueInferType(cell, value);
2179 // Assign an enumeration index to the property and update
2180 // SetNextEnumerationIndex.
2181 int index = dict->NextEnumerationIndex();
2182 PropertyDetails details = PropertyDetails(attributes, NORMAL, index);
2183 dict->SetNextEnumerationIndex(index + 1);
2184 dict->SetEntry(entry, *name, *cell, details);
2187 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value);
2188 PropertyCell::SetValueInferType(cell, value);
2191 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
2192 Handle<NameDictionary> result = NameDictionaryAdd(dict, name, value, details);
2193 if (*dict != *result) object->set_properties(*result);
2197 Handle<Object> JSObject::AddProperty(Handle<JSObject> object,
2199 Handle<Object> value,
2200 PropertyAttributes attributes,
2201 StrictMode strict_mode,
2202 JSReceiver::StoreFromKeyed store_mode,
2203 ExtensibilityCheck extensibility_check,
2204 ValueType value_type,
2206 TransitionFlag transition_flag) {
2207 ASSERT(!object->IsJSGlobalProxy());
2208 Isolate* isolate = object->GetIsolate();
2210 if (!name->IsUniqueName()) {
2211 name = isolate->factory()->InternalizeString(
2212 Handle<String>::cast(name));
2215 if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK &&
2216 !object->map()->is_extensible()) {
2217 if (strict_mode == SLOPPY) {
2220 Handle<Object> args[1] = { name };
2221 Handle<Object> error = isolate->factory()->NewTypeError(
2222 "object_not_extensible", HandleVector(args, ARRAY_SIZE(args)));
2223 isolate->Throw(*error);
2224 return Handle<Object>();
2228 if (object->HasFastProperties()) {
2229 // Ensure the descriptor array does not get too big.
2230 if (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors) {
2231 // TODO(verwaest): Support other constants.
2232 // if (mode == ALLOW_AS_CONSTANT &&
2233 // !value->IsTheHole() &&
2234 // !value->IsConsString()) {
2235 if (value->IsJSFunction()) {
2236 AddConstantProperty(object, name, value, attributes, transition_flag);
2238 AddFastProperty(object, name, value, attributes, store_mode,
2239 value_type, transition_flag);
2242 // Normalize the object to prevent very large instance descriptors.
2243 // This eliminates unwanted N^2 allocation and lookup behavior.
2244 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
2245 AddSlowProperty(object, name, value, attributes);
2248 AddSlowProperty(object, name, value, attributes);
2251 if (object->map()->is_observed() &&
2252 *name != isolate->heap()->hidden_string()) {
2253 Handle<Object> old_value = isolate->factory()->the_hole_value();
2254 EnqueueChangeRecord(object, "add", name, old_value);
2261 void JSObject::EnqueueChangeRecord(Handle<JSObject> object,
2262 const char* type_str,
2264 Handle<Object> old_value) {
2265 Isolate* isolate = object->GetIsolate();
2266 HandleScope scope(isolate);
2267 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str);
2268 if (object->IsJSGlobalObject()) {
2269 object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate);
2271 Handle<Object> args[] = { type, object, name, old_value };
2272 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4;
2275 Execution::Call(isolate,
2276 Handle<JSFunction>(isolate->observers_notify_change()),
2277 isolate->factory()->undefined_value(),
2284 Handle<Object> JSObject::SetPropertyPostInterceptor(
2285 Handle<JSObject> object,
2287 Handle<Object> value,
2288 PropertyAttributes attributes,
2289 StrictMode strict_mode) {
2290 // Check local property, ignore interceptor.
2291 LookupResult result(object->GetIsolate());
2292 object->LocalLookupRealNamedProperty(*name, &result);
2293 if (!result.IsFound()) {
2294 object->map()->LookupTransition(*object, *name, &result);
2296 if (result.IsFound()) {
2297 // An existing property or a map transition was found. Use set property to
2298 // handle all these cases.
2299 return SetPropertyForResult(object, &result, name, value, attributes,
2300 strict_mode, MAY_BE_STORE_FROM_KEYED);
2303 Handle<Object> result_object = SetPropertyViaPrototypes(
2304 object, name, value, attributes, strict_mode, &done);
2305 if (done) return result_object;
2306 // Add a new real property.
2307 return AddProperty(object, name, value, attributes, strict_mode);
2311 static void ReplaceSlowProperty(Handle<JSObject> object,
2313 Handle<Object> value,
2314 PropertyAttributes attributes) {
2315 NameDictionary* dictionary = object->property_dictionary();
2316 int old_index = dictionary->FindEntry(*name);
2317 int new_enumeration_index = 0; // 0 means "Use the next available index."
2318 if (old_index != -1) {
2319 // All calls to ReplaceSlowProperty have had all transitions removed.
2320 new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index();
2323 PropertyDetails new_details(attributes, NORMAL, new_enumeration_index);
2324 JSObject::SetNormalizedProperty(object, name, value, new_details);
2328 const char* Representation::Mnemonic() const {
2330 case kNone: return "v";
2331 case kTagged: return "t";
2332 case kSmi: return "s";
2333 case kDouble: return "d";
2334 case kFloat32x4: return "float32x4";
2335 case kInt32x4: return "int32x44";
2336 case kInteger32: return "i";
2337 case kHeapObject: return "h";
2338 case kExternal: return "x";
2346 static void ZapEndOfFixedArray(Address new_end, int to_trim) {
2347 // If we are doing a big trim in old space then we zap the space.
2348 Object** zap = reinterpret_cast<Object**>(new_end);
2349 zap++; // Header of filler must be at least one word so skip that.
2350 for (int i = 1; i < to_trim; i++) {
2351 *zap++ = Smi::FromInt(0);
2356 template<Heap::InvocationMode mode>
2357 static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
2358 ASSERT(elms->map() != heap->fixed_cow_array_map());
2359 // For now this trick is only applied to fixed arrays in new and paged space.
2360 ASSERT(!heap->lo_space()->Contains(elms));
2362 const int len = elms->length();
2364 ASSERT(to_trim < len);
2366 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
2368 if (mode != Heap::FROM_GC || Heap::ShouldZapGarbage()) {
2369 ZapEndOfFixedArray(new_end, to_trim);
2372 int size_delta = to_trim * kPointerSize;
2374 // Technically in new space this write might be omitted (except for
2375 // debug mode which iterates through the heap), but to play safer
2377 heap->CreateFillerObjectAt(new_end, size_delta);
2379 elms->set_length(len - to_trim);
2381 heap->AdjustLiveBytes(elms->address(), -size_delta, mode);
2383 // The array may not be moved during GC,
2384 // and size has to be adjusted nevertheless.
2385 HeapProfiler* profiler = heap->isolate()->heap_profiler();
2386 if (profiler->is_tracking_allocations()) {
2387 profiler->UpdateObjectSizeEvent(elms->address(), elms->Size());
2392 bool Map::InstancesNeedRewriting(Map* target,
2393 int target_number_of_fields,
2394 int target_inobject,
2395 int target_unused) {
2396 // If fields were added (or removed), rewrite the instance.
2397 int number_of_fields = NumberOfFields();
2398 ASSERT(target_number_of_fields >= number_of_fields);
2399 if (target_number_of_fields != number_of_fields) return true;
2401 // If smi descriptors were replaced by double descriptors, rewrite.
2402 DescriptorArray* old_desc = instance_descriptors();
2403 DescriptorArray* new_desc = target->instance_descriptors();
2404 int limit = NumberOfOwnDescriptors();
2405 for (int i = 0; i < limit; i++) {
2406 if (new_desc->GetDetails(i).representation().IsDouble() &&
2407 !old_desc->GetDetails(i).representation().IsDouble()) {
2412 // If no fields were added, and no inobject properties were removed, setting
2413 // the map is sufficient.
2414 if (target_inobject == inobject_properties()) return false;
2415 // In-object slack tracking may have reduced the object size of the new map.
2416 // In that case, succeed if all existing fields were inobject, and they still
2417 // fit within the new inobject size.
2418 ASSERT(target_inobject < inobject_properties());
2419 if (target_number_of_fields <= target_inobject) {
2420 ASSERT(target_number_of_fields + target_unused == target_inobject);
2423 // Otherwise, properties will need to be moved to the backing store.
2428 // To migrate an instance to a map:
2429 // - First check whether the instance needs to be rewritten. If not, simply
2431 // - Otherwise, allocate a fixed array large enough to hold all fields, in
2432 // addition to unused space.
2433 // - Copy all existing properties in, in the following order: backing store
2434 // properties, unused fields, inobject properties.
2435 // - If all allocation succeeded, commit the state atomically:
2436 // * Copy inobject properties from the backing store back into the object.
2437 // * Trim the difference in instance size of the object. This also cleanly
2438 // frees inobject properties that moved to the backing store.
2439 // * If there are properties left in the backing store, trim of the space used
2440 // to temporarily store the inobject properties.
2441 // * If there are properties left in the backing store, install the backing
2443 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) {
2444 Isolate* isolate = object->GetIsolate();
2445 Handle<Map> old_map(object->map());
2446 int number_of_fields = new_map->NumberOfFields();
2447 int inobject = new_map->inobject_properties();
2448 int unused = new_map->unused_property_fields();
2450 // Nothing to do if no functions were converted to fields and no smis were
2451 // converted to doubles.
2452 if (!old_map->InstancesNeedRewriting(
2453 *new_map, number_of_fields, inobject, unused)) {
2454 object->set_map(*new_map);
2458 int total_size = number_of_fields + unused;
2459 int external = total_size - inobject;
2460 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size);
2462 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
2463 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors());
2464 int old_nof = old_map->NumberOfOwnDescriptors();
2465 int new_nof = new_map->NumberOfOwnDescriptors();
2467 // This method only supports generalizing instances to at least the same
2468 // number of properties.
2469 ASSERT(old_nof <= new_nof);
2471 for (int i = 0; i < old_nof; i++) {
2472 PropertyDetails details = new_descriptors->GetDetails(i);
2473 if (details.type() != FIELD) continue;
2474 PropertyDetails old_details = old_descriptors->GetDetails(i);
2475 if (old_details.type() == CALLBACKS) {
2476 ASSERT(details.representation().IsTagged());
2479 ASSERT(old_details.type() == CONSTANT ||
2480 old_details.type() == FIELD);
2481 Object* raw_value = old_details.type() == CONSTANT
2482 ? old_descriptors->GetValue(i)
2483 : object->RawFastPropertyAt(old_descriptors->GetFieldIndex(i));
2484 Handle<Object> value(raw_value, isolate);
2485 if (!old_details.representation().IsDouble() &&
2486 details.representation().IsDouble()) {
2487 if (old_details.representation().IsNone()) {
2488 value = handle(Smi::FromInt(0), isolate);
2490 value = NewStorageFor(isolate, value, details.representation());
2492 ASSERT(!(details.representation().IsDouble() && value->IsSmi()));
2493 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2494 if (target_index < 0) target_index += total_size;
2495 array->set(target_index, *value);
2498 for (int i = old_nof; i < new_nof; i++) {
2499 PropertyDetails details = new_descriptors->GetDetails(i);
2500 if (details.type() != FIELD) continue;
2501 if (details.representation().IsDouble()) {
2502 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2503 if (target_index < 0) target_index += total_size;
2504 Handle<Object> box = isolate->factory()->NewHeapNumber(0);
2505 array->set(target_index, *box);
2509 // From here on we cannot fail and we shouldn't GC anymore.
2510 DisallowHeapAllocation no_allocation;
2512 // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2513 // avoid overwriting |one_pointer_filler_map|.
2514 int limit = Min(inobject, number_of_fields);
2515 for (int i = 0; i < limit; i++) {
2516 object->FastPropertyAtPut(i, array->get(external + i));
2519 // Create filler object past the new instance size.
2520 int new_instance_size = new_map->instance_size();
2521 int instance_size_delta = old_map->instance_size() - new_instance_size;
2522 ASSERT(instance_size_delta >= 0);
2523 Address address = object->address() + new_instance_size;
2524 isolate->heap()->CreateFillerObjectAt(address, instance_size_delta);
2526 // If there are properties in the new backing store, trim it to the correct
2527 // size and install the backing store into the object.
2529 RightTrimFixedArray<Heap::FROM_MUTATOR>(isolate->heap(), *array, inobject);
2530 object->set_properties(*array);
2533 object->set_map(*new_map);
2537 Handle<TransitionArray> Map::AddTransition(Handle<Map> map,
2540 SimpleTransitionFlag flag) {
2541 CALL_HEAP_FUNCTION(map->GetIsolate(),
2542 map->AddTransition(*key, *target, flag),
2547 void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object,
2549 Representation new_representation,
2550 StoreMode store_mode) {
2551 Handle<Map> new_map = Map::GeneralizeRepresentation(
2552 handle(object->map()), modify_index, new_representation, store_mode);
2553 if (object->map() == *new_map) return;
2554 return MigrateToMap(object, new_map);
2558 int Map::NumberOfFields() {
2559 DescriptorArray* descriptors = instance_descriptors();
2561 for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
2562 if (descriptors->GetDetails(i).type() == FIELD) result++;
2568 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2570 StoreMode store_mode,
2571 PropertyAttributes attributes,
2572 const char* reason) {
2573 Handle<Map> new_map = Copy(map);
2575 DescriptorArray* descriptors = new_map->instance_descriptors();
2576 descriptors->InitializeRepresentations(Representation::Tagged());
2578 // Unless the instance is being migrated, ensure that modify_index is a field.
2579 PropertyDetails details = descriptors->GetDetails(modify_index);
2580 if (store_mode == FORCE_FIELD && details.type() != FIELD) {
2581 FieldDescriptor d(descriptors->GetKey(modify_index),
2582 new_map->NumberOfFields(),
2584 Representation::Tagged());
2585 d.SetSortedKeyIndex(details.pointer());
2586 descriptors->Set(modify_index, &d);
2587 int unused_property_fields = new_map->unused_property_fields() - 1;
2588 if (unused_property_fields < 0) {
2589 unused_property_fields += JSObject::kFieldsAdded;
2591 new_map->set_unused_property_fields(unused_property_fields);
2594 if (FLAG_trace_generalization) {
2595 map->PrintGeneralization(stdout, reason, modify_index,
2596 new_map->NumberOfOwnDescriptors(),
2597 new_map->NumberOfOwnDescriptors(),
2598 details.type() == CONSTANT && store_mode == FORCE_FIELD,
2599 Representation::Tagged(), Representation::Tagged());
2605 void Map::DeprecateTransitionTree() {
2606 if (is_deprecated()) return;
2607 if (HasTransitionArray()) {
2608 TransitionArray* transitions = this->transitions();
2609 for (int i = 0; i < transitions->number_of_transitions(); i++) {
2610 transitions->GetTarget(i)->DeprecateTransitionTree();
2614 dependent_code()->DeoptimizeDependentCodeGroup(
2615 GetIsolate(), DependentCode::kTransitionGroup);
2616 NotifyLeafMapLayoutChange();
2620 // Invalidates a transition target at |key|, and installs |new_descriptors| over
2621 // the current instance_descriptors to ensure proper sharing of descriptor
2623 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
2624 if (HasTransitionArray()) {
2625 TransitionArray* transitions = this->transitions();
2626 int transition = transitions->Search(key);
2627 if (transition != TransitionArray::kNotFound) {
2628 transitions->GetTarget(transition)->DeprecateTransitionTree();
2632 // Don't overwrite the empty descriptor array.
2633 if (NumberOfOwnDescriptors() == 0) return;
2635 DescriptorArray* to_replace = instance_descriptors();
2636 Map* current = this;
2637 GetHeap()->incremental_marking()->RecordWrites(to_replace);
2638 while (current->instance_descriptors() == to_replace) {
2639 current->SetEnumLength(kInvalidEnumCacheSentinel);
2640 current->set_instance_descriptors(new_descriptors);
2641 Object* next = current->GetBackPointer();
2642 if (next->IsUndefined()) break;
2643 current = Map::cast(next);
2646 set_owns_descriptors(false);
2650 Map* Map::FindRootMap() {
2653 Object* back = result->GetBackPointer();
2654 if (back->IsUndefined()) return result;
2655 result = Map::cast(back);
2660 // Returns NULL if the updated map is incompatible.
2661 Map* Map::FindUpdatedMap(int verbatim,
2663 DescriptorArray* descriptors) {
2664 // This can only be called on roots of transition trees.
2665 ASSERT(GetBackPointer()->IsUndefined());
2667 Map* current = this;
2669 for (int i = verbatim; i < length; i++) {
2670 if (!current->HasTransitionArray()) break;
2671 Name* name = descriptors->GetKey(i);
2672 TransitionArray* transitions = current->transitions();
2673 int transition = transitions->Search(name);
2674 if (transition == TransitionArray::kNotFound) break;
2675 current = transitions->GetTarget(transition);
2676 PropertyDetails details = descriptors->GetDetails(i);
2677 PropertyDetails target_details =
2678 current->instance_descriptors()->GetDetails(i);
2679 if (details.attributes() != target_details.attributes()) return NULL;
2680 if (details.type() == CALLBACKS) {
2681 if (target_details.type() != CALLBACKS) return NULL;
2682 if (descriptors->GetValue(i) !=
2683 current->instance_descriptors()->GetValue(i)) {
2686 } else if (target_details.type() == CALLBACKS) {
2695 Map* Map::FindLastMatchMap(int verbatim,
2697 DescriptorArray* descriptors) {
2698 // This can only be called on roots of transition trees.
2699 ASSERT(GetBackPointer()->IsUndefined());
2701 Map* current = this;
2703 for (int i = verbatim; i < length; i++) {
2704 if (!current->HasTransitionArray()) break;
2705 Name* name = descriptors->GetKey(i);
2706 TransitionArray* transitions = current->transitions();
2707 int transition = transitions->Search(name);
2708 if (transition == TransitionArray::kNotFound) break;
2710 Map* next = transitions->GetTarget(transition);
2711 DescriptorArray* next_descriptors = next->instance_descriptors();
2713 if (next_descriptors->GetValue(i) != descriptors->GetValue(i)) break;
2715 PropertyDetails details = descriptors->GetDetails(i);
2716 PropertyDetails next_details = next_descriptors->GetDetails(i);
2717 if (details.type() != next_details.type()) break;
2718 if (details.attributes() != next_details.attributes()) break;
2719 if (!details.representation().Equals(next_details.representation())) break;
2727 // Generalize the representation of the descriptor at |modify_index|.
2728 // This method rewrites the transition tree to reflect the new change. To avoid
2729 // high degrees over polymorphism, and to stabilize quickly, on every rewrite
2730 // the new type is deduced by merging the current type with any potential new
2731 // (partial) version of the type in the transition tree.
2732 // To do this, on each rewrite:
2733 // - Search the root of the transition tree using FindRootMap.
2734 // - Find |updated|, the newest matching version of this map using
2735 // FindUpdatedMap. This uses the keys in the own map's descriptor array to
2736 // walk the transition tree.
2737 // - Merge/generalize the descriptor array of the current map and |updated|.
2738 // - Generalize the |modify_index| descriptor using |new_representation|.
2739 // - Walk the tree again starting from the root towards |updated|. Stop at
2740 // |split_map|, the first map who's descriptor array does not match the merged
2741 // descriptor array.
2742 // - If |updated| == |split_map|, |updated| is in the expected state. Return it.
2743 // - Otherwise, invalidate the outdated transition target from |updated|, and
2744 // replace its transition tree with a new branch for the updated descriptors.
2745 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map,
2747 Representation new_representation,
2748 StoreMode store_mode) {
2749 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
2750 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2751 Representation old_representation = old_details.representation();
2753 // It's fine to transition from None to anything but double without any
2754 // modification to the object, because the default uninitialized value for
2755 // representation None can be overwritten by both smi and tagged values.
2756 // Doubles, however, would require a box allocation.
2757 if (old_representation.IsNone() &&
2758 !new_representation.IsNone() &&
2759 !new_representation.IsDouble()) {
2760 old_descriptors->SetRepresentation(modify_index, new_representation);
2764 int descriptors = old_map->NumberOfOwnDescriptors();
2765 Handle<Map> root_map(old_map->FindRootMap());
2767 // Check the state of the root map.
2768 if (!old_map->EquivalentToForTransition(*root_map)) {
2769 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2770 old_details.attributes(), "not equivalent");
2773 int verbatim = root_map->NumberOfOwnDescriptors();
2775 if (store_mode != ALLOW_AS_CONSTANT && modify_index < verbatim) {
2776 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2777 old_details.attributes(), "root modification");
2780 Map* raw_updated = root_map->FindUpdatedMap(
2781 verbatim, descriptors, *old_descriptors);
2782 if (raw_updated == NULL) {
2783 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2784 old_details.attributes(), "incompatible");
2787 Handle<Map> updated(raw_updated);
2788 Handle<DescriptorArray> updated_descriptors(updated->instance_descriptors());
2790 int valid = updated->NumberOfOwnDescriptors();
2792 // Directly change the map if the target map is more general. Ensure that the
2793 // target type of the modify_index is a FIELD, unless we are migrating.
2794 if (updated_descriptors->IsMoreGeneralThan(
2795 verbatim, valid, descriptors, *old_descriptors) &&
2796 (store_mode == ALLOW_AS_CONSTANT ||
2797 updated_descriptors->GetDetails(modify_index).type() == FIELD)) {
2798 Representation updated_representation =
2799 updated_descriptors->GetDetails(modify_index).representation();
2800 if (new_representation.fits_into(updated_representation)) return updated;
2803 Handle<DescriptorArray> new_descriptors = DescriptorArray::Merge(
2804 updated_descriptors, verbatim, valid, descriptors, modify_index,
2805 store_mode, old_descriptors);
2806 ASSERT(store_mode == ALLOW_AS_CONSTANT ||
2807 new_descriptors->GetDetails(modify_index).type() == FIELD);
2809 old_representation =
2810 new_descriptors->GetDetails(modify_index).representation();
2811 Representation updated_representation =
2812 new_representation.generalize(old_representation);
2813 if (!updated_representation.Equals(old_representation)) {
2814 new_descriptors->SetRepresentation(modify_index, updated_representation);
2817 Handle<Map> split_map(root_map->FindLastMatchMap(
2818 verbatim, descriptors, *new_descriptors));
2820 int split_descriptors = split_map->NumberOfOwnDescriptors();
2821 // This is shadowed by |updated_descriptors| being more general than
2822 // |old_descriptors|.
2823 ASSERT(descriptors != split_descriptors);
2825 int descriptor = split_descriptors;
2826 split_map->DeprecateTarget(
2827 old_descriptors->GetKey(descriptor), *new_descriptors);
2829 if (FLAG_trace_generalization) {
2830 old_map->PrintGeneralization(
2831 stdout, "", modify_index, descriptor, descriptors,
2832 old_descriptors->GetDetails(modify_index).type() == CONSTANT &&
2833 store_mode == FORCE_FIELD,
2834 old_representation, updated_representation);
2837 // Add missing transitions.
2838 Handle<Map> new_map = split_map;
2839 for (; descriptor < descriptors; descriptor++) {
2840 new_map = Map::CopyInstallDescriptors(new_map, descriptor, new_descriptors);
2843 new_map->set_owns_descriptors(true);
2848 // Generalize the representation of all FIELD descriptors.
2849 Handle<Map> Map::GeneralizeAllFieldRepresentations(
2851 Representation new_representation) {
2852 Handle<DescriptorArray> descriptors(map->instance_descriptors());
2853 for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
2854 PropertyDetails details = descriptors->GetDetails(i);
2855 if (details.type() == FIELD) {
2856 map = GeneralizeRepresentation(map, i, new_representation, FORCE_FIELD);
2863 Handle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) {
2864 Handle<Map> proto_map(map);
2865 while (proto_map->prototype()->IsJSObject()) {
2866 Handle<JSObject> holder(JSObject::cast(proto_map->prototype()));
2867 if (holder->map()->is_deprecated()) {
2868 JSObject::TryMigrateInstance(holder);
2870 proto_map = Handle<Map>(holder->map());
2872 return CurrentMapForDeprecatedInternal(map);
2876 Handle<Map> Map::CurrentMapForDeprecatedInternal(Handle<Map> map) {
2877 if (!map->is_deprecated()) return map;
2879 DisallowHeapAllocation no_allocation;
2880 DescriptorArray* old_descriptors = map->instance_descriptors();
2882 int descriptors = map->NumberOfOwnDescriptors();
2883 Map* root_map = map->FindRootMap();
2885 // Check the state of the root map.
2886 if (!map->EquivalentToForTransition(root_map)) return Handle<Map>();
2887 int verbatim = root_map->NumberOfOwnDescriptors();
2889 Map* updated = root_map->FindUpdatedMap(
2890 verbatim, descriptors, old_descriptors);
2891 if (updated == NULL) return Handle<Map>();
2893 DescriptorArray* updated_descriptors = updated->instance_descriptors();
2894 int valid = updated->NumberOfOwnDescriptors();
2895 if (!updated_descriptors->IsMoreGeneralThan(
2896 verbatim, valid, descriptors, old_descriptors)) {
2897 return Handle<Map>();
2900 return handle(updated);
2904 Handle<Object> JSObject::SetPropertyWithInterceptor(
2905 Handle<JSObject> object,
2907 Handle<Object> value,
2908 PropertyAttributes attributes,
2909 StrictMode strict_mode) {
2910 // TODO(rossberg): Support symbols in the API.
2911 if (name->IsSymbol()) return value;
2912 Isolate* isolate = object->GetIsolate();
2913 Handle<String> name_string = Handle<String>::cast(name);
2914 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
2915 if (!interceptor->setter()->IsUndefined()) {
2917 ApiNamedPropertyAccess("interceptor-named-set", *object, *name));
2918 PropertyCallbackArguments args(
2919 isolate, interceptor->data(), *object, *object);
2920 v8::NamedPropertySetterCallback setter =
2921 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter());
2922 Handle<Object> value_unhole = value->IsTheHole()
2923 ? Handle<Object>(isolate->factory()->undefined_value()) : value;
2924 v8::Handle<v8::Value> result = args.Call(setter,
2925 v8::Utils::ToLocal(name_string),
2926 v8::Utils::ToLocal(value_unhole));
2927 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
2928 if (!result.IsEmpty()) return value;
2930 Handle<Object> result =
2931 SetPropertyPostInterceptor(object, name, value, attributes, strict_mode);
2932 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
2937 Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
2939 Handle<Object> value,
2940 PropertyAttributes attributes,
2941 StrictMode strict_mode,
2942 StoreFromKeyed store_mode) {
2943 LookupResult result(object->GetIsolate());
2944 object->LocalLookup(*name, &result, true);
2945 if (!result.IsFound()) {
2946 object->map()->LookupTransition(JSObject::cast(*object), *name, &result);
2948 return SetProperty(object, &result, name, value, attributes, strict_mode,
2953 Handle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object,
2954 Handle<Object> structure,
2956 Handle<Object> value,
2957 Handle<JSObject> holder,
2958 StrictMode strict_mode) {
2959 Isolate* isolate = object->GetIsolate();
2961 // We should never get here to initialize a const with the hole
2962 // value since a const declaration would conflict with the setter.
2963 ASSERT(!value->IsTheHole());
2965 // To accommodate both the old and the new api we switch on the
2966 // data structure used to store the callbacks. Eventually foreign
2967 // callbacks should be phased out.
2968 if (structure->IsForeign()) {
2969 AccessorDescriptor* callback =
2970 reinterpret_cast<AccessorDescriptor*>(
2971 Handle<Foreign>::cast(structure)->foreign_address());
2972 CALL_AND_RETRY_OR_DIE(isolate,
2974 isolate, *object, *value, callback->data),
2976 return Handle<Object>());
2977 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
2981 if (structure->IsExecutableAccessorInfo()) {
2982 // api style callbacks
2983 ExecutableAccessorInfo* data = ExecutableAccessorInfo::cast(*structure);
2984 if (!data->IsCompatibleReceiver(*object)) {
2985 Handle<Object> args[2] = { name, object };
2986 Handle<Object> error =
2987 isolate->factory()->NewTypeError("incompatible_method_receiver",
2990 isolate->Throw(*error);
2991 return Handle<Object>();
2993 // TODO(rossberg): Support symbols in the API.
2994 if (name->IsSymbol()) return value;
2995 Object* call_obj = data->setter();
2996 v8::AccessorSetterCallback call_fun =
2997 v8::ToCData<v8::AccessorSetterCallback>(call_obj);
2998 if (call_fun == NULL) return value;
2999 Handle<String> key = Handle<String>::cast(name);
3000 LOG(isolate, ApiNamedPropertyAccess("store", *object, *name));
3001 PropertyCallbackArguments args(
3002 isolate, data->data(), *object, JSObject::cast(*holder));
3004 v8::Utils::ToLocal(key),
3005 v8::Utils::ToLocal(value));
3006 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
3010 if (structure->IsAccessorPair()) {
3011 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
3012 if (setter->IsSpecFunction()) {
3013 // TODO(rossberg): nicer would be to cast to some JSCallable here...
3014 return SetPropertyWithDefinedSetter(
3015 object, Handle<JSReceiver>::cast(setter), value);
3017 if (strict_mode == SLOPPY) return value;
3018 Handle<Object> args[2] = { name, holder };
3019 Handle<Object> error =
3020 isolate->factory()->NewTypeError("no_setter_in_callback",
3021 HandleVector(args, 2));
3022 isolate->Throw(*error);
3023 return Handle<Object>();
3027 // TODO(dcarney): Handle correctly.
3028 if (structure->IsDeclaredAccessorInfo()) {
3033 return Handle<Object>();
3037 Handle<Object> JSReceiver::SetPropertyWithDefinedSetter(
3038 Handle<JSReceiver> object,
3039 Handle<JSReceiver> setter,
3040 Handle<Object> value) {
3041 Isolate* isolate = object->GetIsolate();
3043 #ifdef ENABLE_DEBUGGER_SUPPORT
3044 Debug* debug = isolate->debug();
3045 // Handle stepping into a setter if step into is active.
3046 // TODO(rossberg): should this apply to getters that are function proxies?
3047 if (debug->StepInActive() && setter->IsJSFunction()) {
3048 debug->HandleStepIn(
3049 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false);
3053 bool has_pending_exception;
3054 Handle<Object> argv[] = { value };
3056 isolate, setter, object, ARRAY_SIZE(argv), argv, &has_pending_exception);
3057 // Check for pending exception and return the result.
3058 if (has_pending_exception) return Handle<Object>();
3063 Handle<Object> JSObject::SetElementWithCallbackSetterInPrototypes(
3064 Handle<JSObject> object,
3066 Handle<Object> value,
3068 StrictMode strict_mode) {
3069 Isolate *isolate = object->GetIsolate();
3070 for (Handle<Object> proto = handle(object->GetPrototype(), isolate);
3072 proto = handle(proto->GetPrototype(isolate), isolate)) {
3073 if (proto->IsJSProxy()) {
3074 return JSProxy::SetPropertyViaPrototypesWithHandler(
3075 Handle<JSProxy>::cast(proto),
3077 isolate->factory()->Uint32ToString(index), // name
3083 Handle<JSObject> js_proto = Handle<JSObject>::cast(proto);
3084 if (!js_proto->HasDictionaryElements()) {
3087 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary());
3088 int entry = dictionary->FindEntry(index);
3089 if (entry != SeededNumberDictionary::kNotFound) {
3090 PropertyDetails details = dictionary->DetailsAt(entry);
3091 if (details.type() == CALLBACKS) {
3093 Handle<Object> structure(dictionary->ValueAt(entry), isolate);
3094 return SetElementWithCallback(object, structure, index, value, js_proto,
3100 return isolate->factory()->the_hole_value();
3104 Handle<Object> JSObject::SetPropertyViaPrototypes(Handle<JSObject> object,
3106 Handle<Object> value,
3107 PropertyAttributes attributes,
3108 StrictMode strict_mode,
3110 Isolate* isolate = object->GetIsolate();
3113 // We could not find a local property so let's check whether there is an
3114 // accessor that wants to handle the property, or whether the property is
3115 // read-only on the prototype chain.
3116 LookupResult result(isolate);
3117 object->LookupRealNamedPropertyInPrototypes(*name, &result);
3118 if (result.IsFound()) {
3119 switch (result.type()) {
3123 *done = result.IsReadOnly();
3126 PropertyAttributes attr = GetPropertyAttributeWithInterceptor(
3127 handle(result.holder()), object, name, true);
3128 *done = !!(attr & READ_ONLY);
3133 Handle<Object> callback_object(result.GetCallbackObject(), isolate);
3134 return SetPropertyWithCallback(object, callback_object, name, value,
3135 handle(result.holder()), strict_mode);
3138 Handle<JSProxy> proxy(result.proxy());
3139 return JSProxy::SetPropertyViaPrototypesWithHandler(
3140 proxy, object, name, value, attributes, strict_mode, done);
3149 // If we get here with *done true, we have encountered a read-only property.
3151 if (strict_mode == SLOPPY) return value;
3152 Handle<Object> args[] = { name, object };
3153 Handle<Object> error = isolate->factory()->NewTypeError(
3154 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3155 isolate->Throw(*error);
3156 return Handle<Object>();
3158 return isolate->factory()->the_hole_value();
3162 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
3163 Handle<DescriptorArray> descriptors(map->instance_descriptors());
3164 if (slack <= descriptors->NumberOfSlackDescriptors()) return;
3165 int number_of_descriptors = descriptors->number_of_descriptors();
3166 Isolate* isolate = map->GetIsolate();
3167 Handle<DescriptorArray> new_descriptors =
3168 isolate->factory()->NewDescriptorArray(number_of_descriptors, slack);
3169 DescriptorArray::WhitenessWitness witness(*new_descriptors);
3171 for (int i = 0; i < number_of_descriptors; ++i) {
3172 new_descriptors->CopyFrom(i, *descriptors, i, witness);
3175 map->set_instance_descriptors(*new_descriptors);
3180 static int AppendUniqueCallbacks(NeanderArray* callbacks,
3181 Handle<typename T::Array> array,
3182 int valid_descriptors) {
3183 int nof_callbacks = callbacks->length();
3185 Isolate* isolate = array->GetIsolate();
3186 // Ensure the keys are unique names before writing them into the
3187 // instance descriptor. Since it may cause a GC, it has to be done before we
3188 // temporarily put the heap in an invalid state while appending descriptors.
3189 for (int i = 0; i < nof_callbacks; ++i) {
3190 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3191 if (entry->name()->IsUniqueName()) continue;
3192 Handle<String> key =
3193 isolate->factory()->InternalizeString(
3194 Handle<String>(String::cast(entry->name())));
3195 entry->set_name(*key);
3198 // Fill in new callback descriptors. Process the callbacks from
3199 // back to front so that the last callback with a given name takes
3200 // precedence over previously added callbacks with that name.
3201 for (int i = nof_callbacks - 1; i >= 0; i--) {
3202 AccessorInfo* entry = AccessorInfo::cast(callbacks->get(i));
3203 Name* key = Name::cast(entry->name());
3204 // Check if a descriptor with this name already exists before writing.
3205 if (!T::Contains(key, entry, valid_descriptors, array)) {
3206 T::Insert(key, entry, valid_descriptors, array);
3207 valid_descriptors++;
3211 return valid_descriptors;
3214 struct DescriptorArrayAppender {
3215 typedef DescriptorArray Array;
3216 static bool Contains(Name* key,
3217 AccessorInfo* entry,
3218 int valid_descriptors,
3219 Handle<DescriptorArray> array) {
3220 return array->Search(key, valid_descriptors) != DescriptorArray::kNotFound;
3222 static void Insert(Name* key,
3223 AccessorInfo* entry,
3224 int valid_descriptors,
3225 Handle<DescriptorArray> array) {
3226 CallbacksDescriptor desc(key, entry, entry->property_attributes());
3227 array->Append(&desc);
3232 struct FixedArrayAppender {
3233 typedef FixedArray Array;
3234 static bool Contains(Name* key,
3235 AccessorInfo* entry,
3236 int valid_descriptors,
3237 Handle<FixedArray> array) {
3238 for (int i = 0; i < valid_descriptors; i++) {
3239 if (key == AccessorInfo::cast(array->get(i))->name()) return true;
3243 static void Insert(Name* key,
3244 AccessorInfo* entry,
3245 int valid_descriptors,
3246 Handle<FixedArray> array) {
3247 array->set(valid_descriptors, entry);
3252 void Map::AppendCallbackDescriptors(Handle<Map> map,
3253 Handle<Object> descriptors) {
3254 int nof = map->NumberOfOwnDescriptors();
3255 Handle<DescriptorArray> array(map->instance_descriptors());
3256 NeanderArray callbacks(descriptors);
3257 ASSERT(array->NumberOfSlackDescriptors() >= callbacks.length());
3258 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof);
3259 map->SetNumberOfOwnDescriptors(nof);
3263 int AccessorInfo::AppendUnique(Handle<Object> descriptors,
3264 Handle<FixedArray> array,
3265 int valid_descriptors) {
3266 NeanderArray callbacks(descriptors);
3267 ASSERT(array->length() >= callbacks.length() + valid_descriptors);
3268 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks,
3274 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
3275 ASSERT(!map.is_null());
3276 for (int i = 0; i < maps->length(); ++i) {
3277 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
3284 static Handle<T> MaybeNull(T* p) {
3285 if (p == NULL) return Handle<T>::null();
3286 return Handle<T>(p);
3290 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
3291 ElementsKind kind = elements_kind();
3292 Handle<Map> transitioned_map = Handle<Map>::null();
3293 Handle<Map> current_map(this);
3294 bool packed = IsFastPackedElementsKind(kind);
3295 if (IsTransitionableFastElementsKind(kind)) {
3296 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
3297 kind = GetNextMoreGeneralFastElementsKind(kind, false);
3298 Handle<Map> maybe_transitioned_map =
3299 MaybeNull(current_map->LookupElementsTransitionMap(kind));
3300 if (maybe_transitioned_map.is_null()) break;
3301 if (ContainsMap(candidates, maybe_transitioned_map) &&
3302 (packed || !IsFastPackedElementsKind(kind))) {
3303 transitioned_map = maybe_transitioned_map;
3304 if (!IsFastPackedElementsKind(kind)) packed = false;
3306 current_map = maybe_transitioned_map;
3309 return transitioned_map;
3313 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
3314 Map* current_map = map;
3316 IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind)
3318 : TERMINAL_FAST_ELEMENTS_KIND;
3320 // Support for legacy API.
3321 if (IsExternalArrayElementsKind(to_kind) &&
3322 !IsFixedTypedArrayElementsKind(map->elements_kind())) {
3326 ElementsKind kind = map->elements_kind();
3327 while (kind != target_kind) {
3328 kind = GetNextTransitionElementsKind(kind);
3329 if (!current_map->HasElementsTransition()) return current_map;
3330 current_map = current_map->elements_transition_map();
3333 if (to_kind != kind && current_map->HasElementsTransition()) {
3334 ASSERT(to_kind == DICTIONARY_ELEMENTS);
3335 Map* next_map = current_map->elements_transition_map();
3336 if (next_map->elements_kind() == to_kind) return next_map;
3339 ASSERT(current_map->elements_kind() == target_kind);
3344 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
3345 Map* to_map = FindClosestElementsTransition(this, to_kind);
3346 if (to_map->elements_kind() == to_kind) return to_map;
3351 bool Map::IsMapInArrayPrototypeChain() {
3352 Isolate* isolate = GetIsolate();
3353 if (isolate->initial_array_prototype()->map() == this) {
3357 if (isolate->initial_object_prototype()->map() == this) {
3365 static MaybeObject* AddMissingElementsTransitions(Map* map,
3366 ElementsKind to_kind) {
3367 ASSERT(IsTransitionElementsKind(map->elements_kind()));
3369 Map* current_map = map;
3371 ElementsKind kind = map->elements_kind();
3372 while (kind != to_kind && !IsTerminalElementsKind(kind)) {
3373 kind = GetNextTransitionElementsKind(kind);
3374 MaybeObject* maybe_next_map =
3375 current_map->CopyAsElementsKind(kind, INSERT_TRANSITION);
3376 if (!maybe_next_map->To(¤t_map)) return maybe_next_map;
3379 // In case we are exiting the fast elements kind system, just add the map in
3381 if (kind != to_kind) {
3382 MaybeObject* maybe_next_map =
3383 current_map->CopyAsElementsKind(to_kind, INSERT_TRANSITION);
3384 if (!maybe_next_map->To(¤t_map)) return maybe_next_map;
3387 ASSERT(current_map->elements_kind() == to_kind);
3392 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
3393 ElementsKind to_kind) {
3394 Isolate* isolate = object->GetIsolate();
3395 CALL_HEAP_FUNCTION(isolate,
3396 object->GetElementsTransitionMap(isolate, to_kind),
3401 MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) {
3402 Map* start_map = map();
3403 ElementsKind from_kind = start_map->elements_kind();
3405 if (from_kind == to_kind) {
3409 bool allow_store_transition =
3410 // Only remember the map transition if there is not an already existing
3411 // non-matching element transition.
3412 !start_map->IsUndefined() && !start_map->is_shared() &&
3413 IsTransitionElementsKind(from_kind);
3415 // Only store fast element maps in ascending generality.
3416 if (IsFastElementsKind(to_kind)) {
3417 allow_store_transition &=
3418 IsTransitionableFastElementsKind(from_kind) &&
3419 IsMoreGeneralElementsKindTransition(from_kind, to_kind);
3422 if (!allow_store_transition) {
3423 return start_map->CopyAsElementsKind(to_kind, OMIT_TRANSITION);
3426 return start_map->AsElementsKind(to_kind);
3430 // TODO(ishell): Temporary wrapper until handlified.
3432 Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) {
3433 CALL_HEAP_FUNCTION(map->GetIsolate(),
3434 map->AsElementsKind(kind),
3439 MaybeObject* Map::AsElementsKind(ElementsKind kind) {
3440 Map* closest_map = FindClosestElementsTransition(this, kind);
3442 if (closest_map->elements_kind() == kind) {
3446 return AddMissingElementsTransitions(closest_map, kind);
3450 void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) {
3451 DisallowHeapAllocation no_gc;
3452 if (IsJSGlobalProxy()) {
3453 Object* proto = GetPrototype();
3454 if (proto->IsNull()) return result->NotFound();
3455 ASSERT(proto->IsJSGlobalObject());
3456 return JSObject::cast(proto)->LocalLookupRealNamedProperty(name, result);
3459 if (HasFastProperties()) {
3460 map()->LookupDescriptor(this, name, result);
3461 // A property or a map transition was found. We return all of these result
3462 // types because LocalLookupRealNamedProperty is used when setting
3463 // properties where map transitions are handled.
3464 ASSERT(!result->IsFound() ||
3465 (result->holder() == this && result->IsFastPropertyType()));
3466 // Disallow caching for uninitialized constants. These can only
3468 if (result->IsField() &&
3469 result->IsReadOnly() &&
3470 RawFastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) {
3471 result->DisallowCaching();
3476 int entry = property_dictionary()->FindEntry(name);
3477 if (entry != NameDictionary::kNotFound) {
3478 Object* value = property_dictionary()->ValueAt(entry);
3479 if (IsGlobalObject()) {
3480 PropertyDetails d = property_dictionary()->DetailsAt(entry);
3481 if (d.IsDeleted()) {
3485 value = PropertyCell::cast(value)->value();
3487 // Make sure to disallow caching for uninitialized constants
3488 // found in the dictionary-mode objects.
3489 if (value->IsTheHole()) result->DisallowCaching();
3490 result->DictionaryResult(this, entry);
3498 void JSObject::LookupRealNamedProperty(Name* name, LookupResult* result) {
3499 LocalLookupRealNamedProperty(name, result);
3500 if (result->IsFound()) return;
3502 LookupRealNamedPropertyInPrototypes(name, result);
3506 void JSObject::LookupRealNamedPropertyInPrototypes(Name* name,
3507 LookupResult* result) {
3508 Isolate* isolate = GetIsolate();
3509 Heap* heap = isolate->heap();
3510 for (Object* pt = GetPrototype();
3511 pt != heap->null_value();
3512 pt = pt->GetPrototype(isolate)) {
3513 if (pt->IsJSProxy()) {
3514 return result->HandlerResult(JSProxy::cast(pt));
3516 JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
3517 ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR));
3518 if (result->IsFound()) return;
3524 // We only need to deal with CALLBACKS and INTERCEPTORS
3525 Handle<Object> JSObject::SetPropertyWithFailedAccessCheck(
3526 Handle<JSObject> object,
3527 LookupResult* result,
3529 Handle<Object> value,
3530 bool check_prototype,
3531 StrictMode strict_mode) {
3532 if (check_prototype && !result->IsProperty()) {
3533 object->LookupRealNamedPropertyInPrototypes(*name, result);
3536 if (result->IsProperty()) {
3537 if (!result->IsReadOnly()) {
3538 switch (result->type()) {
3540 Object* obj = result->GetCallbackObject();
3541 if (obj->IsAccessorInfo()) {
3542 Handle<AccessorInfo> info(AccessorInfo::cast(obj));
3543 if (info->all_can_write()) {
3544 return SetPropertyWithCallback(object,
3548 handle(result->holder()),
3551 } else if (obj->IsAccessorPair()) {
3552 Handle<AccessorPair> pair(AccessorPair::cast(obj));
3553 if (pair->all_can_read()) {
3554 return SetPropertyWithCallback(object,
3558 handle(result->holder()),
3565 // Try lookup real named properties. Note that only property can be
3566 // set is callbacks marked as ALL_CAN_WRITE on the prototype chain.
3567 LookupResult r(object->GetIsolate());
3568 object->LookupRealNamedProperty(*name, &r);
3569 if (r.IsProperty()) {
3570 return SetPropertyWithFailedAccessCheck(object,
3586 Isolate* isolate = object->GetIsolate();
3587 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_SET);
3588 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
3593 Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
3594 LookupResult* result,
3596 Handle<Object> value,
3597 PropertyAttributes attributes,
3598 StrictMode strict_mode,
3599 StoreFromKeyed store_mode) {
3600 if (result->IsHandler()) {
3601 return JSProxy::SetPropertyWithHandler(handle(result->proxy()),
3602 object, key, value, attributes, strict_mode);
3604 return JSObject::SetPropertyForResult(Handle<JSObject>::cast(object),
3605 result, key, value, attributes, strict_mode, store_mode);
3610 bool JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy, Handle<Name> name) {
3611 Isolate* isolate = proxy->GetIsolate();
3613 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3614 if (name->IsSymbol()) return false;
3616 Handle<Object> args[] = { name };
3617 Handle<Object> result = proxy->CallTrap(
3618 "has", isolate->derived_has_trap(), ARRAY_SIZE(args), args);
3619 if (isolate->has_pending_exception()) return false;
3621 return result->BooleanValue();
3625 Handle<Object> JSProxy::SetPropertyWithHandler(Handle<JSProxy> proxy,
3626 Handle<JSReceiver> receiver,
3628 Handle<Object> value,
3629 PropertyAttributes attributes,
3630 StrictMode strict_mode) {
3631 Isolate* isolate = proxy->GetIsolate();
3633 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3634 if (name->IsSymbol()) return value;
3636 Handle<Object> args[] = { receiver, name, value };
3637 proxy->CallTrap("set", isolate->derived_set_trap(), ARRAY_SIZE(args), args);
3638 if (isolate->has_pending_exception()) return Handle<Object>();
3644 Handle<Object> JSProxy::SetPropertyViaPrototypesWithHandler(
3645 Handle<JSProxy> proxy,
3646 Handle<JSReceiver> receiver,
3648 Handle<Object> value,
3649 PropertyAttributes attributes,
3650 StrictMode strict_mode,
3652 Isolate* isolate = proxy->GetIsolate();
3653 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy.
3655 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3656 if (name->IsSymbol()) {
3658 return isolate->factory()->the_hole_value();
3661 *done = true; // except where redefined...
3662 Handle<Object> args[] = { name };
3663 Handle<Object> result = proxy->CallTrap(
3664 "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args);
3665 if (isolate->has_pending_exception()) return Handle<Object>();
3667 if (result->IsUndefined()) {
3669 return isolate->factory()->the_hole_value();
3672 // Emulate [[GetProperty]] semantics for proxies.
3673 bool has_pending_exception;
3674 Handle<Object> argv[] = { result };
3675 Handle<Object> desc = Execution::Call(
3676 isolate, isolate->to_complete_property_descriptor(), result,
3677 ARRAY_SIZE(argv), argv, &has_pending_exception);
3678 if (has_pending_exception) return Handle<Object>();
3680 // [[GetProperty]] requires to check that all properties are configurable.
3681 Handle<String> configurable_name =
3682 isolate->factory()->InternalizeOneByteString(
3683 STATIC_ASCII_VECTOR("configurable_"));
3684 Handle<Object> configurable(
3685 v8::internal::GetProperty(isolate, desc, configurable_name));
3686 ASSERT(!isolate->has_pending_exception());
3687 ASSERT(configurable->IsTrue() || configurable->IsFalse());
3688 if (configurable->IsFalse()) {
3689 Handle<String> trap =
3690 isolate->factory()->InternalizeOneByteString(
3691 STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3692 Handle<Object> args[] = { handler, trap, name };
3693 Handle<Object> error = isolate->factory()->NewTypeError(
3694 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3695 isolate->Throw(*error);
3696 return Handle<Object>();
3698 ASSERT(configurable->IsTrue());
3700 // Check for DataDescriptor.
3701 Handle<String> hasWritable_name =
3702 isolate->factory()->InternalizeOneByteString(
3703 STATIC_ASCII_VECTOR("hasWritable_"));
3704 Handle<Object> hasWritable(
3705 v8::internal::GetProperty(isolate, desc, hasWritable_name));
3706 ASSERT(!isolate->has_pending_exception());
3707 ASSERT(hasWritable->IsTrue() || hasWritable->IsFalse());
3708 if (hasWritable->IsTrue()) {
3709 Handle<String> writable_name =
3710 isolate->factory()->InternalizeOneByteString(
3711 STATIC_ASCII_VECTOR("writable_"));
3712 Handle<Object> writable(
3713 v8::internal::GetProperty(isolate, desc, writable_name));
3714 ASSERT(!isolate->has_pending_exception());
3715 ASSERT(writable->IsTrue() || writable->IsFalse());
3716 *done = writable->IsFalse();
3717 if (!*done) return isolate->factory()->the_hole_value();
3718 if (strict_mode == SLOPPY) return value;
3719 Handle<Object> args[] = { name, receiver };
3720 Handle<Object> error = isolate->factory()->NewTypeError(
3721 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3722 isolate->Throw(*error);
3723 return Handle<Object>();
3726 // We have an AccessorDescriptor.
3727 Handle<String> set_name = isolate->factory()->InternalizeOneByteString(
3728 STATIC_ASCII_VECTOR("set_"));
3729 Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_name));
3730 ASSERT(!isolate->has_pending_exception());
3731 if (!setter->IsUndefined()) {
3732 // TODO(rossberg): nicer would be to cast to some JSCallable here...
3733 return SetPropertyWithDefinedSetter(
3734 receiver, Handle<JSReceiver>::cast(setter), value);
3737 if (strict_mode == SLOPPY) return value;
3738 Handle<Object> args2[] = { name, proxy };
3739 Handle<Object> error = isolate->factory()->NewTypeError(
3740 "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2)));
3741 isolate->Throw(*error);
3742 return Handle<Object>();
3746 Handle<Object> JSProxy::DeletePropertyWithHandler(
3747 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) {
3748 Isolate* isolate = proxy->GetIsolate();
3750 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3751 if (name->IsSymbol()) return isolate->factory()->false_value();
3753 Handle<Object> args[] = { name };
3754 Handle<Object> result = proxy->CallTrap(
3755 "delete", Handle<Object>(), ARRAY_SIZE(args), args);
3756 if (isolate->has_pending_exception()) return Handle<Object>();
3758 bool result_bool = result->BooleanValue();
3759 if (mode == STRICT_DELETION && !result_bool) {
3760 Handle<Object> handler(proxy->handler(), isolate);
3761 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString(
3762 STATIC_ASCII_VECTOR("delete"));
3763 Handle<Object> args[] = { handler, trap_name };
3764 Handle<Object> error = isolate->factory()->NewTypeError(
3765 "handler_failed", HandleVector(args, ARRAY_SIZE(args)));
3766 isolate->Throw(*error);
3767 return Handle<Object>();
3769 return isolate->factory()->ToBoolean(result_bool);
3773 Handle<Object> JSProxy::DeleteElementWithHandler(
3774 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) {
3775 Isolate* isolate = proxy->GetIsolate();
3776 Handle<String> name = isolate->factory()->Uint32ToString(index);
3777 return JSProxy::DeletePropertyWithHandler(proxy, name, mode);
3781 PropertyAttributes JSProxy::GetPropertyAttributeWithHandler(
3782 Handle<JSProxy> proxy,
3783 Handle<JSReceiver> receiver,
3784 Handle<Name> name) {
3785 Isolate* isolate = proxy->GetIsolate();
3786 HandleScope scope(isolate);
3788 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3789 if (name->IsSymbol()) return ABSENT;
3791 Handle<Object> args[] = { name };
3792 Handle<Object> result = proxy->CallTrap(
3793 "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args);
3794 if (isolate->has_pending_exception()) return NONE;
3796 if (result->IsUndefined()) return ABSENT;
3798 bool has_pending_exception;
3799 Handle<Object> argv[] = { result };
3800 Handle<Object> desc = Execution::Call(
3801 isolate, isolate->to_complete_property_descriptor(), result,
3802 ARRAY_SIZE(argv), argv, &has_pending_exception);
3803 if (has_pending_exception) return NONE;
3805 // Convert result to PropertyAttributes.
3806 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString(
3807 STATIC_ASCII_VECTOR("enumerable_"));
3808 Handle<Object> enumerable(v8::internal::GetProperty(isolate, desc, enum_n));
3809 if (isolate->has_pending_exception()) return NONE;
3810 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString(
3811 STATIC_ASCII_VECTOR("configurable_"));
3812 Handle<Object> configurable(v8::internal::GetProperty(isolate, desc, conf_n));
3813 if (isolate->has_pending_exception()) return NONE;
3814 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString(
3815 STATIC_ASCII_VECTOR("writable_"));
3816 Handle<Object> writable(v8::internal::GetProperty(isolate, desc, writ_n));
3817 if (isolate->has_pending_exception()) return NONE;
3818 if (!writable->BooleanValue()) {
3819 Handle<String> set_n = isolate->factory()->InternalizeOneByteString(
3820 STATIC_ASCII_VECTOR("set_"));
3821 Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_n));
3822 if (isolate->has_pending_exception()) return NONE;
3823 writable = isolate->factory()->ToBoolean(!setter->IsUndefined());
3826 if (configurable->IsFalse()) {
3827 Handle<Object> handler(proxy->handler(), isolate);
3828 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3829 STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3830 Handle<Object> args[] = { handler, trap, name };
3831 Handle<Object> error = isolate->factory()->NewTypeError(
3832 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3833 isolate->Throw(*error);
3837 int attributes = NONE;
3838 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM;
3839 if (!configurable->BooleanValue()) attributes |= DONT_DELETE;
3840 if (!writable->BooleanValue()) attributes |= READ_ONLY;
3841 return static_cast<PropertyAttributes>(attributes);
3845 PropertyAttributes JSProxy::GetElementAttributeWithHandler(
3846 Handle<JSProxy> proxy,
3847 Handle<JSReceiver> receiver,
3849 Isolate* isolate = proxy->GetIsolate();
3850 Handle<String> name = isolate->factory()->Uint32ToString(index);
3851 return GetPropertyAttributeWithHandler(proxy, receiver, name);
3855 void JSProxy::Fix(Handle<JSProxy> proxy) {
3856 Isolate* isolate = proxy->GetIsolate();
3858 // Save identity hash.
3859 Handle<Object> hash(proxy->GetIdentityHash(), isolate);
3861 if (proxy->IsJSFunctionProxy()) {
3862 isolate->factory()->BecomeJSFunction(proxy);
3863 // Code will be set on the JavaScript side.
3865 isolate->factory()->BecomeJSObject(proxy);
3867 ASSERT(proxy->IsJSObject());
3869 // Inherit identity, if it was present.
3870 if (hash->IsSmi()) {
3871 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy),
3872 Handle<Smi>::cast(hash));
3877 MUST_USE_RESULT Handle<Object> JSProxy::CallTrap(const char* name,
3878 Handle<Object> derived,
3880 Handle<Object> argv[]) {
3881 Isolate* isolate = GetIsolate();
3882 Handle<Object> handler(this->handler(), isolate);
3884 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name);
3885 Handle<Object> trap(v8::internal::GetProperty(isolate, handler, trap_name));
3886 if (isolate->has_pending_exception()) return trap;
3888 if (trap->IsUndefined()) {
3889 if (derived.is_null()) {
3890 Handle<Object> args[] = { handler, trap_name };
3891 Handle<Object> error = isolate->factory()->NewTypeError(
3892 "handler_trap_missing", HandleVector(args, ARRAY_SIZE(args)));
3893 isolate->Throw(*error);
3894 return Handle<Object>();
3896 trap = Handle<Object>(derived);
3900 return Execution::Call(isolate, trap, handler, argc, argv, &threw);
3904 // TODO(mstarzinger): Temporary wrapper until handlified.
3905 static Handle<Map> MapAsElementsKind(Handle<Map> map, ElementsKind kind) {
3906 CALL_HEAP_FUNCTION(map->GetIsolate(), map->AsElementsKind(kind), Map);
3910 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
3911 ASSERT(object->map()->inobject_properties() == map->inobject_properties());
3912 ElementsKind obj_kind = object->map()->elements_kind();
3913 ElementsKind map_kind = map->elements_kind();
3914 if (map_kind != obj_kind) {
3915 ElementsKind to_kind = map_kind;
3916 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
3917 IsDictionaryElementsKind(obj_kind)) {
3920 if (IsDictionaryElementsKind(to_kind)) {
3921 NormalizeElements(object);
3923 TransitionElementsKind(object, to_kind);
3925 map = MapAsElementsKind(map, to_kind);
3927 JSObject::MigrateToMap(object, map);
3931 void JSObject::MigrateInstance(Handle<JSObject> object) {
3932 // Converting any field to the most specific type will cause the
3933 // GeneralizeFieldRepresentation algorithm to create the most general existing
3934 // transition that matches the object. This achieves what is needed.
3935 Handle<Map> original_map(object->map());
3936 GeneralizeFieldRepresentation(
3937 object, 0, Representation::None(), ALLOW_AS_CONSTANT);
3938 object->map()->set_migration_target(true);
3939 if (FLAG_trace_migration) {
3940 object->PrintInstanceMigration(stdout, *original_map, object->map());
3945 Handle<Object> JSObject::TryMigrateInstance(Handle<JSObject> object) {
3946 Handle<Map> original_map(object->map());
3947 Handle<Map> new_map = Map::CurrentMapForDeprecatedInternal(original_map);
3948 if (new_map.is_null()) return Handle<Object>();
3949 JSObject::MigrateToMap(object, new_map);
3950 if (FLAG_trace_migration) {
3951 object->PrintInstanceMigration(stdout, *original_map, object->map());
3957 Handle<Object> JSObject::SetPropertyUsingTransition(
3958 Handle<JSObject> object,
3959 LookupResult* lookup,
3961 Handle<Object> value,
3962 PropertyAttributes attributes) {
3963 Handle<Map> transition_map(lookup->GetTransitionTarget());
3964 int descriptor = transition_map->LastAdded();
3966 DescriptorArray* descriptors = transition_map->instance_descriptors();
3967 PropertyDetails details = descriptors->GetDetails(descriptor);
3969 if (details.type() == CALLBACKS || attributes != details.attributes()) {
3970 // AddProperty will either normalize the object, or create a new fast copy
3971 // of the map. If we get a fast copy of the map, all field representations
3972 // will be tagged since the transition is omitted.
3973 return JSObject::AddProperty(
3974 object, name, value, attributes, SLOPPY,
3975 JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED,
3976 JSReceiver::OMIT_EXTENSIBILITY_CHECK,
3977 JSObject::FORCE_TAGGED, FORCE_FIELD, OMIT_TRANSITION);
3980 // Keep the target CONSTANT if the same value is stored.
3981 // TODO(verwaest): Also support keeping the placeholder
3982 // (value->IsUninitialized) as constant.
3983 if (!value->FitsRepresentation(details.representation()) ||
3984 (details.type() == CONSTANT &&
3985 descriptors->GetValue(descriptor) != *value)) {
3986 transition_map = Map::GeneralizeRepresentation(transition_map,
3987 descriptor, value->OptimalRepresentation(), FORCE_FIELD);
3990 JSObject::MigrateToMap(object, transition_map);
3993 descriptors = transition_map->instance_descriptors();
3994 details = descriptors->GetDetails(descriptor);
3996 if (details.type() != FIELD) return value;
3998 int field_index = descriptors->GetFieldIndex(descriptor);
3999 if (details.representation().IsDouble()) {
4000 // Nothing more to be done.
4001 if (value->IsUninitialized()) return value;
4002 HeapNumber* box = HeapNumber::cast(object->RawFastPropertyAt(field_index));
4003 box->set_value(value->Number());
4005 object->FastPropertyAtPut(field_index, *value);
4012 static void SetPropertyToField(LookupResult* lookup,
4014 Handle<Object> value) {
4015 Representation representation = lookup->representation();
4016 if (!value->FitsRepresentation(representation) ||
4017 lookup->type() == CONSTANT) {
4018 JSObject::GeneralizeFieldRepresentation(handle(lookup->holder()),
4019 lookup->GetDescriptorIndex(),
4020 value->OptimalRepresentation(),
4022 DescriptorArray* desc = lookup->holder()->map()->instance_descriptors();
4023 int descriptor = lookup->GetDescriptorIndex();
4024 representation = desc->GetDetails(descriptor).representation();
4027 if (representation.IsDouble()) {
4028 HeapNumber* storage = HeapNumber::cast(lookup->holder()->RawFastPropertyAt(
4029 lookup->GetFieldIndex().field_index()));
4030 storage->set_value(value->Number());
4034 lookup->holder()->FastPropertyAtPut(
4035 lookup->GetFieldIndex().field_index(), *value);
4039 static void ConvertAndSetLocalProperty(LookupResult* lookup,
4041 Handle<Object> value,
4042 PropertyAttributes attributes) {
4043 Handle<JSObject> object(lookup->holder());
4044 if (object->TooManyFastProperties()) {
4045 JSObject::NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
4048 if (!object->HasFastProperties()) {
4049 ReplaceSlowProperty(object, name, value, attributes);
4053 int descriptor_index = lookup->GetDescriptorIndex();
4054 if (lookup->GetAttributes() == attributes) {
4055 JSObject::GeneralizeFieldRepresentation(
4056 object, descriptor_index, Representation::Tagged(), FORCE_FIELD);
4058 Handle<Map> old_map(object->map());
4059 Handle<Map> new_map = Map::CopyGeneralizeAllRepresentations(old_map,
4060 descriptor_index, FORCE_FIELD, attributes, "attributes mismatch");
4061 JSObject::MigrateToMap(object, new_map);
4064 DescriptorArray* descriptors = object->map()->instance_descriptors();
4065 int index = descriptors->GetDetails(descriptor_index).field_index();
4066 object->FastPropertyAtPut(index, *value);
4070 static void SetPropertyToFieldWithAttributes(LookupResult* lookup,
4072 Handle<Object> value,
4073 PropertyAttributes attributes) {
4074 if (lookup->GetAttributes() == attributes) {
4075 if (value->IsUninitialized()) return;
4076 SetPropertyToField(lookup, name, value);
4078 ConvertAndSetLocalProperty(lookup, name, value, attributes);
4083 Handle<Object> JSObject::SetPropertyForResult(Handle<JSObject> object,
4084 LookupResult* lookup,
4086 Handle<Object> value,
4087 PropertyAttributes attributes,
4088 StrictMode strict_mode,
4089 StoreFromKeyed store_mode) {
4090 Isolate* isolate = object->GetIsolate();
4092 // Make sure that the top context does not change when doing callbacks or
4093 // interceptor calls.
4094 AssertNoContextChange ncc(isolate);
4096 // Optimization for 2-byte strings often used as keys in a decompression
4097 // dictionary. We internalize these short keys to avoid constantly
4098 // reallocating them.
4099 if (name->IsString() && !name->IsInternalizedString() &&
4100 Handle<String>::cast(name)->length() <= 2) {
4101 name = isolate->factory()->InternalizeString(Handle<String>::cast(name));
4104 // Check access rights if needed.
4105 if (object->IsAccessCheckNeeded()) {
4106 if (!isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) {
4107 return SetPropertyWithFailedAccessCheck(object, lookup, name, value,
4112 if (object->IsJSGlobalProxy()) {
4113 Handle<Object> proto(object->GetPrototype(), isolate);
4114 if (proto->IsNull()) return value;
4115 ASSERT(proto->IsJSGlobalObject());
4116 return SetPropertyForResult(Handle<JSObject>::cast(proto),
4117 lookup, name, value, attributes, strict_mode, store_mode);
4120 ASSERT(!lookup->IsFound() || lookup->holder() == *object ||
4121 lookup->holder()->map()->is_hidden_prototype());
4123 if (!lookup->IsProperty() && !object->IsJSContextExtensionObject()) {
4125 Handle<Object> result_object = SetPropertyViaPrototypes(
4126 object, name, value, attributes, strict_mode, &done);
4127 if (done) return result_object;
4130 if (!lookup->IsFound()) {
4131 // Neither properties nor transitions found.
4133 object, name, value, attributes, strict_mode, store_mode);
4136 if (lookup->IsProperty() && lookup->IsReadOnly()) {
4137 if (strict_mode == STRICT) {
4138 Handle<Object> args[] = { name, object };
4139 Handle<Object> error = isolate->factory()->NewTypeError(
4140 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
4141 isolate->Throw(*error);
4142 return Handle<Object>();
4148 Handle<Object> old_value = isolate->factory()->the_hole_value();
4149 bool is_observed = object->map()->is_observed() &&
4150 *name != isolate->heap()->hidden_string();
4151 if (is_observed && lookup->IsDataProperty()) {
4152 old_value = Object::GetProperty(object, name);
4153 CHECK_NOT_EMPTY_HANDLE(isolate, old_value);
4156 // This is a real property that is not read-only, or it is a
4157 // transition or null descriptor and there are no setters in the prototypes.
4158 Handle<Object> result = value;
4159 switch (lookup->type()) {
4161 SetNormalizedProperty(handle(lookup->holder()), lookup, value);
4164 SetPropertyToField(lookup, name, value);
4167 // Only replace the constant if necessary.
4168 if (*value == lookup->GetConstant()) return value;
4169 SetPropertyToField(lookup, name, value);
4172 Handle<Object> callback_object(lookup->GetCallbackObject(), isolate);
4173 return SetPropertyWithCallback(object, callback_object, name, value,
4174 handle(lookup->holder()), strict_mode);
4177 result = SetPropertyWithInterceptor(handle(lookup->holder()), name, value,
4178 attributes, strict_mode);
4181 result = SetPropertyUsingTransition(handle(lookup->holder()), lookup,
4182 name, value, attributes);
4189 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
4192 if (lookup->IsTransition()) {
4193 EnqueueChangeRecord(object, "add", name, old_value);
4195 LookupResult new_lookup(isolate);
4196 object->LocalLookup(*name, &new_lookup, true);
4197 if (new_lookup.IsDataProperty()) {
4198 Handle<Object> new_value = Object::GetProperty(object, name);
4199 CHECK_NOT_EMPTY_HANDLE(isolate, new_value);
4200 if (!new_value->SameValue(*old_value)) {
4201 EnqueueChangeRecord(object, "update", name, old_value);
4211 // Set a real local property, even if it is READ_ONLY. If the property is not
4212 // present, add it with attributes NONE. This code is an exact clone of
4213 // SetProperty, with the check for IsReadOnly and the check for a
4214 // callback setter removed. The two lines looking up the LookupResult
4215 // result are also added. If one of the functions is changed, the other
4217 // Note that this method cannot be used to set the prototype of a function
4218 // because ConvertDescriptorToField() which is called in "case CALLBACKS:"
4219 // doesn't handle function prototypes correctly.
4220 Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes(
4221 Handle<JSObject> object,
4223 Handle<Object> value,
4224 PropertyAttributes attributes,
4225 ValueType value_type,
4227 ExtensibilityCheck extensibility_check) {
4228 Isolate* isolate = object->GetIsolate();
4230 // Make sure that the top context does not change when doing callbacks or
4231 // interceptor calls.
4232 AssertNoContextChange ncc(isolate);
4234 LookupResult lookup(isolate);
4235 object->LocalLookup(*name, &lookup, true);
4236 if (!lookup.IsFound()) {
4237 object->map()->LookupTransition(*object, *name, &lookup);
4240 // Check access rights if needed.
4241 if (object->IsAccessCheckNeeded()) {
4242 if (!isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) {
4243 return SetPropertyWithFailedAccessCheck(object, &lookup, name, value,
4248 if (object->IsJSGlobalProxy()) {
4249 Handle<Object> proto(object->GetPrototype(), isolate);
4250 if (proto->IsNull()) return value;
4251 ASSERT(proto->IsJSGlobalObject());
4252 return SetLocalPropertyIgnoreAttributes(Handle<JSObject>::cast(proto),
4253 name, value, attributes, value_type, mode, extensibility_check);
4256 if (lookup.IsFound() &&
4257 (lookup.type() == INTERCEPTOR || lookup.type() == CALLBACKS)) {
4258 object->LocalLookupRealNamedProperty(*name, &lookup);
4261 // Check for accessor in prototype chain removed here in clone.
4262 if (!lookup.IsFound()) {
4263 object->map()->LookupTransition(*object, *name, &lookup);
4264 TransitionFlag flag = lookup.IsFound()
4265 ? OMIT_TRANSITION : INSERT_TRANSITION;
4266 // Neither properties nor transitions found.
4267 return AddProperty(object, name, value, attributes, SLOPPY,
4268 MAY_BE_STORE_FROM_KEYED, extensibility_check, value_type, mode, flag);
4271 Handle<Object> old_value = isolate->factory()->the_hole_value();
4272 PropertyAttributes old_attributes = ABSENT;
4273 bool is_observed = object->map()->is_observed() &&
4274 *name != isolate->heap()->hidden_string();
4275 if (is_observed && lookup.IsProperty()) {
4276 if (lookup.IsDataProperty()) {
4277 old_value = Object::GetProperty(object, name);
4278 CHECK_NOT_EMPTY_HANDLE(isolate, old_value);
4280 old_attributes = lookup.GetAttributes();
4283 // Check of IsReadOnly removed from here in clone.
4284 switch (lookup.type()) {
4286 ReplaceSlowProperty(object, name, value, attributes);
4289 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes);
4292 // Only replace the constant if necessary.
4293 if (lookup.GetAttributes() != attributes ||
4294 *value != lookup.GetConstant()) {
4295 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes);
4299 ConvertAndSetLocalProperty(&lookup, name, value, attributes);
4302 Handle<Object> result = SetPropertyUsingTransition(
4303 handle(lookup.holder()), &lookup, name, value, attributes);
4304 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
4314 if (lookup.IsTransition()) {
4315 EnqueueChangeRecord(object, "add", name, old_value);
4316 } else if (old_value->IsTheHole()) {
4317 EnqueueChangeRecord(object, "reconfigure", name, old_value);
4319 LookupResult new_lookup(isolate);
4320 object->LocalLookup(*name, &new_lookup, true);
4321 bool value_changed = false;
4322 if (new_lookup.IsDataProperty()) {
4323 Handle<Object> new_value = Object::GetProperty(object, name);
4324 CHECK_NOT_EMPTY_HANDLE(isolate, new_value);
4325 value_changed = !old_value->SameValue(*new_value);
4327 if (new_lookup.GetAttributes() != old_attributes) {
4328 if (!value_changed) old_value = isolate->factory()->the_hole_value();
4329 EnqueueChangeRecord(object, "reconfigure", name, old_value);
4330 } else if (value_changed) {
4331 EnqueueChangeRecord(object, "update", name, old_value);
4340 PropertyAttributes JSObject::GetPropertyAttributePostInterceptor(
4341 Handle<JSObject> object,
4342 Handle<JSObject> receiver,
4344 bool continue_search) {
4345 // Check local property, ignore interceptor.
4346 Isolate* isolate = object->GetIsolate();
4347 LookupResult result(isolate);
4348 object->LocalLookupRealNamedProperty(*name, &result);
4349 if (result.IsFound()) return result.GetAttributes();
4351 if (continue_search) {
4352 // Continue searching via the prototype chain.
4353 Handle<Object> proto(object->GetPrototype(), isolate);
4354 if (!proto->IsNull()) {
4355 return JSReceiver::GetPropertyAttributeWithReceiver(
4356 Handle<JSObject>::cast(proto), receiver, name);
4363 PropertyAttributes JSObject::GetPropertyAttributeWithInterceptor(
4364 Handle<JSObject> object,
4365 Handle<JSObject> receiver,
4367 bool continue_search) {
4368 // TODO(rossberg): Support symbols in the API.
4369 if (name->IsSymbol()) return ABSENT;
4371 Isolate* isolate = object->GetIsolate();
4372 HandleScope scope(isolate);
4374 // Make sure that the top context does not change when doing
4375 // callbacks or interceptor calls.
4376 AssertNoContextChange ncc(isolate);
4378 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
4379 PropertyCallbackArguments args(
4380 isolate, interceptor->data(), *receiver, *object);
4381 if (!interceptor->query()->IsUndefined()) {
4382 v8::NamedPropertyQueryCallback query =
4383 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query());
4385 ApiNamedPropertyAccess("interceptor-named-has", *object, *name));
4386 v8::Handle<v8::Integer> result =
4387 args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name)));
4388 if (!result.IsEmpty()) {
4389 ASSERT(result->IsInt32());
4390 return static_cast<PropertyAttributes>(result->Int32Value());
4392 } else if (!interceptor->getter()->IsUndefined()) {
4393 v8::NamedPropertyGetterCallback getter =
4394 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
4396 ApiNamedPropertyAccess("interceptor-named-get-has", *object, *name));
4397 v8::Handle<v8::Value> result =
4398 args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4399 if (!result.IsEmpty()) return DONT_ENUM;
4401 return GetPropertyAttributePostInterceptor(
4402 object, receiver, name, continue_search);
4406 PropertyAttributes JSReceiver::GetPropertyAttributeWithReceiver(
4407 Handle<JSReceiver> object,
4408 Handle<JSReceiver> receiver,
4411 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
4412 return JSObject::GetElementAttributeWithReceiver(
4413 Handle<JSObject>::cast(object), receiver, index, true);
4416 LookupResult lookup(object->GetIsolate());
4417 object->Lookup(*key, &lookup);
4418 return GetPropertyAttributeForResult(object, receiver, &lookup, key, true);
4422 PropertyAttributes JSReceiver::GetPropertyAttributeForResult(
4423 Handle<JSReceiver> object,
4424 Handle<JSReceiver> receiver,
4425 LookupResult* lookup,
4427 bool continue_search) {
4428 // Check access rights if needed.
4429 if (object->IsAccessCheckNeeded()) {
4430 Heap* heap = object->GetHeap();
4431 Handle<JSObject> obj = Handle<JSObject>::cast(object);
4432 if (!heap->isolate()->MayNamedAccessWrapper(obj, name, v8::ACCESS_HAS)) {
4433 return JSObject::GetPropertyAttributeWithFailedAccessCheck(
4434 obj, lookup, name, continue_search);
4437 if (lookup->IsFound()) {
4438 switch (lookup->type()) {
4439 case NORMAL: // fall through
4443 return lookup->GetAttributes();
4445 return JSProxy::GetPropertyAttributeWithHandler(
4446 handle(lookup->proxy()), receiver, name);
4449 return JSObject::GetPropertyAttributeWithInterceptor(
4450 handle(lookup->holder()),
4451 Handle<JSObject>::cast(receiver),
4463 PropertyAttributes JSReceiver::GetLocalPropertyAttribute(
4464 Handle<JSReceiver> object, Handle<Name> name) {
4465 // Check whether the name is an array index.
4467 if (object->IsJSObject() && name->AsArrayIndex(&index)) {
4468 return GetLocalElementAttribute(object, index);
4471 LookupResult lookup(object->GetIsolate());
4472 object->LocalLookup(*name, &lookup, true);
4473 return GetPropertyAttributeForResult(object, object, &lookup, name, false);
4477 PropertyAttributes JSObject::GetElementAttributeWithReceiver(
4478 Handle<JSObject> object,
4479 Handle<JSReceiver> receiver,
4481 bool continue_search) {
4482 Isolate* isolate = object->GetIsolate();
4484 // Check access rights if needed.
4485 if (object->IsAccessCheckNeeded()) {
4486 if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_HAS)) {
4487 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS);
4492 if (object->IsJSGlobalProxy()) {
4493 Handle<Object> proto(object->GetPrototype(), isolate);
4494 if (proto->IsNull()) return ABSENT;
4495 ASSERT(proto->IsJSGlobalObject());
4496 return JSObject::GetElementAttributeWithReceiver(
4497 Handle<JSObject>::cast(proto), receiver, index, continue_search);
4500 // Check for lookup interceptor except when bootstrapping.
4501 if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
4502 return JSObject::GetElementAttributeWithInterceptor(
4503 object, receiver, index, continue_search);
4506 return GetElementAttributeWithoutInterceptor(
4507 object, receiver, index, continue_search);
4511 PropertyAttributes JSObject::GetElementAttributeWithInterceptor(
4512 Handle<JSObject> object,
4513 Handle<JSReceiver> receiver,
4515 bool continue_search) {
4516 Isolate* isolate = object->GetIsolate();
4517 HandleScope scope(isolate);
4519 // Make sure that the top context does not change when doing
4520 // callbacks or interceptor calls.
4521 AssertNoContextChange ncc(isolate);
4523 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4524 PropertyCallbackArguments args(
4525 isolate, interceptor->data(), *receiver, *object);
4526 if (!interceptor->query()->IsUndefined()) {
4527 v8::IndexedPropertyQueryCallback query =
4528 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query());
4530 ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index));
4531 v8::Handle<v8::Integer> result = args.Call(query, index);
4532 if (!result.IsEmpty())
4533 return static_cast<PropertyAttributes>(result->Int32Value());
4534 } else if (!interceptor->getter()->IsUndefined()) {
4535 v8::IndexedPropertyGetterCallback getter =
4536 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
4538 ApiIndexedPropertyAccess(
4539 "interceptor-indexed-get-has", *object, index));
4540 v8::Handle<v8::Value> result = args.Call(getter, index);
4541 if (!result.IsEmpty()) return NONE;
4544 return GetElementAttributeWithoutInterceptor(
4545 object, receiver, index, continue_search);
4549 PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor(
4550 Handle<JSObject> object,
4551 Handle<JSReceiver> receiver,
4553 bool continue_search) {
4554 PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes(
4555 *receiver, *object, index);
4556 if (attr != ABSENT) return attr;
4558 // Handle [] on String objects.
4559 if (object->IsStringObjectWithCharacterAt(index)) {
4560 return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
4563 if (!continue_search) return ABSENT;
4565 Handle<Object> proto(object->GetPrototype(), object->GetIsolate());
4566 if (proto->IsJSProxy()) {
4567 // We need to follow the spec and simulate a call to [[GetOwnProperty]].
4568 return JSProxy::GetElementAttributeWithHandler(
4569 Handle<JSProxy>::cast(proto), receiver, index);
4571 if (proto->IsNull()) return ABSENT;
4572 return GetElementAttributeWithReceiver(
4573 Handle<JSObject>::cast(proto), receiver, index, true);
4577 Handle<Map> NormalizedMapCache::Get(Handle<NormalizedMapCache> cache,
4578 Handle<JSObject> obj,
4579 PropertyNormalizationMode mode) {
4580 int index = obj->map()->Hash() % kEntries;
4581 Handle<Object> result = handle(cache->get(index), cache->GetIsolate());
4582 if (result->IsMap() &&
4583 Handle<Map>::cast(result)->EquivalentToForNormalization(obj->map(),
4586 if (FLAG_verify_heap) {
4587 Handle<Map>::cast(result)->SharedMapVerify();
4590 #ifdef ENABLE_SLOW_ASSERTS
4591 if (FLAG_enable_slow_asserts) {
4592 // The cached map should match newly created normalized map bit-by-bit,
4593 // except for the code cache, which can contain some ics which can be
4594 // applied to the shared map.
4595 Handle<Map> fresh = Map::CopyNormalized(handle(obj->map()), mode,
4596 SHARED_NORMALIZED_MAP);
4598 ASSERT(memcmp(fresh->address(),
4599 Handle<Map>::cast(result)->address(),
4600 Map::kCodeCacheOffset) == 0);
4601 STATIC_ASSERT(Map::kDependentCodeOffset ==
4602 Map::kCodeCacheOffset + kPointerSize);
4603 int offset = Map::kDependentCodeOffset + kPointerSize;
4604 ASSERT(memcmp(fresh->address() + offset,
4605 Handle<Map>::cast(result)->address() + offset,
4606 Map::kSize - offset) == 0);
4609 return Handle<Map>::cast(result);
4612 Isolate* isolate = cache->GetIsolate();
4613 Handle<Map> map = Map::CopyNormalized(handle(obj->map()), mode,
4614 SHARED_NORMALIZED_MAP);
4615 ASSERT(map->is_dictionary_map());
4616 cache->set(index, *map);
4617 isolate->counters()->normalized_maps()->Increment();
4623 void NormalizedMapCache::Clear() {
4624 int entries = length();
4625 for (int i = 0; i != entries; i++) {
4631 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object,
4633 Handle<Code> code) {
4634 Handle<Map> map(object->map());
4635 Map::UpdateCodeCache(map, name, code);
4639 void JSObject::NormalizeProperties(Handle<JSObject> object,
4640 PropertyNormalizationMode mode,
4641 int expected_additional_properties) {
4642 if (!object->HasFastProperties()) return;
4644 // The global object is always normalized.
4645 ASSERT(!object->IsGlobalObject());
4646 // JSGlobalProxy must never be normalized
4647 ASSERT(!object->IsJSGlobalProxy());
4649 Isolate* isolate = object->GetIsolate();
4650 HandleScope scope(isolate);
4651 Handle<Map> map(object->map());
4653 // Allocate new content.
4654 int real_size = map->NumberOfOwnDescriptors();
4655 int property_count = real_size;
4656 if (expected_additional_properties > 0) {
4657 property_count += expected_additional_properties;
4659 property_count += 2; // Make space for two more properties.
4661 Handle<NameDictionary> dictionary =
4662 isolate->factory()->NewNameDictionary(property_count);
4664 Handle<DescriptorArray> descs(map->instance_descriptors());
4665 for (int i = 0; i < real_size; i++) {
4666 PropertyDetails details = descs->GetDetails(i);
4667 switch (details.type()) {
4669 Handle<Name> key(descs->GetKey(i));
4670 Handle<Object> value(descs->GetConstant(i), isolate);
4671 PropertyDetails d = PropertyDetails(
4672 details.attributes(), NORMAL, i + 1);
4673 dictionary = NameDictionaryAdd(dictionary, key, value, d);
4677 Handle<Name> key(descs->GetKey(i));
4678 Handle<Object> value(
4679 object->RawFastPropertyAt(descs->GetFieldIndex(i)), isolate);
4681 PropertyDetails(details.attributes(), NORMAL, i + 1);
4682 dictionary = NameDictionaryAdd(dictionary, key, value, d);
4686 Handle<Name> key(descs->GetKey(i));
4687 Handle<Object> value(descs->GetCallbacksObject(i), isolate);
4688 PropertyDetails d = PropertyDetails(
4689 details.attributes(), CALLBACKS, i + 1);
4690 dictionary = NameDictionaryAdd(dictionary, key, value, d);
4704 // Copy the next enumeration index from instance descriptor.
4705 dictionary->SetNextEnumerationIndex(real_size + 1);
4707 Handle<NormalizedMapCache> cache(
4708 isolate->context()->native_context()->normalized_map_cache());
4709 Handle<Map> new_map = NormalizedMapCache::Get(cache, object, mode);
4710 ASSERT(new_map->is_dictionary_map());
4712 // From here on we cannot fail and we shouldn't GC anymore.
4713 DisallowHeapAllocation no_allocation;
4715 // Resize the object in the heap if necessary.
4716 int new_instance_size = new_map->instance_size();
4717 int instance_size_delta = map->instance_size() - new_instance_size;
4718 ASSERT(instance_size_delta >= 0);
4719 Heap* heap = isolate->heap();
4720 heap->CreateFillerObjectAt(object->address() + new_instance_size,
4721 instance_size_delta);
4722 heap->AdjustLiveBytes(object->address(),
4723 -instance_size_delta,
4724 Heap::FROM_MUTATOR);
4726 object->set_map(*new_map);
4727 map->NotifyLeafMapLayoutChange();
4729 object->set_properties(*dictionary);
4731 isolate->counters()->props_to_dictionary()->Increment();
4734 if (FLAG_trace_normalization) {
4735 PrintF("Object properties have been normalized:\n");
4742 void JSObject::TransformToFastProperties(Handle<JSObject> object,
4743 int unused_property_fields) {
4744 if (object->HasFastProperties()) return;
4745 ASSERT(!object->IsGlobalObject());
4746 CALL_HEAP_FUNCTION_VOID(
4747 object->GetIsolate(),
4748 object->property_dictionary()->TransformPropertiesToFastFor(
4749 *object, unused_property_fields));
4753 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary(
4754 Handle<FixedArrayBase> array,
4756 Handle<SeededNumberDictionary> dictionary) {
4757 Isolate* isolate = array->GetIsolate();
4758 Factory* factory = isolate->factory();
4759 bool has_double_elements = array->IsFixedDoubleArray();
4760 for (int i = 0; i < length; i++) {
4761 Handle<Object> value;
4762 if (has_double_elements) {
4763 Handle<FixedDoubleArray> double_array =
4764 Handle<FixedDoubleArray>::cast(array);
4765 if (double_array->is_the_hole(i)) {
4766 value = factory->the_hole_value();
4768 value = factory->NewHeapNumber(double_array->get_scalar(i));
4771 value = handle(Handle<FixedArray>::cast(array)->get(i), isolate);
4773 if (!value->IsTheHole()) {
4774 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
4776 SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details);
4783 Handle<SeededNumberDictionary> JSObject::NormalizeElements(
4784 Handle<JSObject> object) {
4785 ASSERT(!object->HasExternalArrayElements() &&
4786 !object->HasFixedTypedArrayElements());
4787 Isolate* isolate = object->GetIsolate();
4788 Factory* factory = isolate->factory();
4790 // Find the backing store.
4791 Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements()));
4793 (array->map() == isolate->heap()->sloppy_arguments_elements_map());
4795 array = handle(FixedArrayBase::cast(
4796 Handle<FixedArray>::cast(array)->get(1)));
4798 if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array);
4800 ASSERT(object->HasFastSmiOrObjectElements() ||
4801 object->HasFastDoubleElements() ||
4802 object->HasFastArgumentsElements());
4803 // Compute the effective length and allocate a new backing store.
4804 int length = object->IsJSArray()
4805 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
4807 int old_capacity = 0;
4808 int used_elements = 0;
4809 object->GetElementsCapacityAndUsage(&old_capacity, &used_elements);
4810 Handle<SeededNumberDictionary> dictionary =
4811 factory->NewSeededNumberDictionary(used_elements);
4813 dictionary = CopyFastElementsToDictionary(array, length, dictionary);
4815 // Switch to using the dictionary as the backing storage for elements.
4817 FixedArray::cast(object->elements())->set(1, *dictionary);
4819 // Set the new map first to satify the elements type assert in
4821 Handle<Map> new_map =
4822 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
4824 JSObject::MigrateToMap(object, new_map);
4825 object->set_elements(*dictionary);
4828 isolate->counters()->elements_to_dictionary()->Increment();
4831 if (FLAG_trace_normalization) {
4832 PrintF("Object elements have been normalized:\n");
4837 ASSERT(object->HasDictionaryElements() ||
4838 object->HasDictionaryArgumentsElements());
4843 Smi* JSReceiver::GenerateIdentityHash() {
4844 Isolate* isolate = GetIsolate();
4849 // Generate a random 32-bit hash value but limit range to fit
4851 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue;
4853 } while (hash_value == 0 && attempts < 30);
4854 hash_value = hash_value != 0 ? hash_value : 1; // never return 0
4856 return Smi::FromInt(hash_value);
4860 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) {
4861 Isolate* isolate = object->GetIsolate();
4862 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4866 Object* JSObject::GetIdentityHash() {
4867 Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_string());
4868 return stored_value->IsSmi() ? stored_value : GetHeap()->undefined_value();
4872 Handle<Object> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) {
4873 Handle<Object> hash(object->GetIdentityHash(), object->GetIsolate());
4877 Isolate* isolate = object->GetIsolate();
4879 hash = handle(object->GenerateIdentityHash(), isolate);
4880 Handle<Object> result = SetHiddenProperty(object,
4881 isolate->factory()->identity_hash_string(), hash);
4883 if (result->IsUndefined()) {
4884 // Trying to get hash of detached proxy.
4885 return handle(Smi::FromInt(0), isolate);
4892 Object* JSProxy::GetIdentityHash() {
4893 return this->hash();
4897 Handle<Object> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) {
4898 Isolate* isolate = proxy->GetIsolate();
4900 Handle<Object> hash(proxy->GetIdentityHash(), isolate);
4904 hash = handle(proxy->GenerateIdentityHash(), isolate);
4905 proxy->set_hash(*hash);
4910 Object* JSObject::GetHiddenProperty(Name* key) {
4911 ASSERT(key->IsUniqueName());
4912 if (IsJSGlobalProxy()) {
4913 // For a proxy, use the prototype as target object.
4914 Object* proxy_parent = GetPrototype();
4915 // If the proxy is detached, return undefined.
4916 if (proxy_parent->IsNull()) return GetHeap()->the_hole_value();
4917 ASSERT(proxy_parent->IsJSGlobalObject());
4918 return JSObject::cast(proxy_parent)->GetHiddenProperty(key);
4920 ASSERT(!IsJSGlobalProxy());
4921 Object* inline_value = GetHiddenPropertiesHashTable();
4923 if (inline_value->IsSmi()) {
4924 // Handle inline-stored identity hash.
4925 if (key == GetHeap()->identity_hash_string()) {
4926 return inline_value;
4928 return GetHeap()->the_hole_value();
4932 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value();
4934 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
4935 Object* entry = hashtable->Lookup(key);
4940 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object,
4942 Handle<Object> value) {
4943 Isolate* isolate = object->GetIsolate();
4945 ASSERT(key->IsUniqueName());
4946 if (object->IsJSGlobalProxy()) {
4947 // For a proxy, use the prototype as target object.
4948 Handle<Object> proxy_parent(object->GetPrototype(), isolate);
4949 // If the proxy is detached, return undefined.
4950 if (proxy_parent->IsNull()) return isolate->factory()->undefined_value();
4951 ASSERT(proxy_parent->IsJSGlobalObject());
4952 return SetHiddenProperty(Handle<JSObject>::cast(proxy_parent), key, value);
4954 ASSERT(!object->IsJSGlobalProxy());
4956 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4958 // If there is no backing store yet, store the identity hash inline.
4959 if (value->IsSmi() &&
4960 *key == *isolate->factory()->identity_hash_string() &&
4961 (inline_value->IsUndefined() || inline_value->IsSmi())) {
4962 return JSObject::SetHiddenPropertiesHashTable(object, value);
4965 Handle<ObjectHashTable> hashtable =
4966 GetOrCreateHiddenPropertiesHashtable(object);
4968 // If it was found, check if the key is already in the dictionary.
4969 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key,
4971 if (*new_table != *hashtable) {
4972 // If adding the key expanded the dictionary (i.e., Add returned a new
4973 // dictionary), store it back to the object.
4974 SetHiddenPropertiesHashTable(object, new_table);
4977 // Return this to mark success.
4982 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) {
4983 Isolate* isolate = object->GetIsolate();
4984 ASSERT(key->IsUniqueName());
4986 if (object->IsJSGlobalProxy()) {
4987 Handle<Object> proto(object->GetPrototype(), isolate);
4988 if (proto->IsNull()) return;
4989 ASSERT(proto->IsJSGlobalObject());
4990 return DeleteHiddenProperty(Handle<JSObject>::cast(proto), key);
4993 Object* inline_value = object->GetHiddenPropertiesHashTable();
4995 // We never delete (inline-stored) identity hashes.
4996 ASSERT(*key != *isolate->factory()->identity_hash_string());
4997 if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
4999 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value));
5000 ObjectHashTable::Put(hashtable, key, isolate->factory()->the_hole_value());
5004 bool JSObject::HasHiddenProperties(Handle<JSObject> object) {
5005 Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string();
5006 return GetPropertyAttributePostInterceptor(
5007 object, object, hidden, false) != ABSENT;
5011 Object* JSObject::GetHiddenPropertiesHashTable() {
5012 ASSERT(!IsJSGlobalProxy());
5013 if (HasFastProperties()) {
5014 // If the object has fast properties, check whether the first slot
5015 // in the descriptor array matches the hidden string. Since the
5016 // hidden strings hash code is zero (and no other name has hash
5017 // code zero) it will always occupy the first entry if present.
5018 DescriptorArray* descriptors = this->map()->instance_descriptors();
5019 if (descriptors->number_of_descriptors() > 0) {
5020 int sorted_index = descriptors->GetSortedKeyIndex(0);
5021 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
5022 sorted_index < map()->NumberOfOwnDescriptors()) {
5023 ASSERT(descriptors->GetType(sorted_index) == FIELD);
5024 ASSERT(descriptors->GetDetails(sorted_index).representation().
5025 IsCompatibleForLoad(Representation::Tagged()));
5026 return this->RawFastPropertyAt(
5027 descriptors->GetFieldIndex(sorted_index));
5029 return GetHeap()->undefined_value();
5032 return GetHeap()->undefined_value();
5035 PropertyAttributes attributes;
5036 // You can't install a getter on a property indexed by the hidden string,
5037 // so we can be sure that GetLocalPropertyPostInterceptor returns a real
5039 return GetLocalPropertyPostInterceptor(this,
5040 GetHeap()->hidden_string(),
5041 &attributes)->ToObjectUnchecked();
5045 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable(
5046 Handle<JSObject> object) {
5047 Isolate* isolate = object->GetIsolate();
5049 static const int kInitialCapacity = 4;
5050 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
5051 if (inline_value->IsHashTable()) {
5052 return Handle<ObjectHashTable>::cast(inline_value);
5055 Handle<ObjectHashTable> hashtable = isolate->factory()->NewObjectHashTable(
5057 USE_CUSTOM_MINIMUM_CAPACITY);
5059 if (inline_value->IsSmi()) {
5060 // We were storing the identity hash inline and now allocated an actual
5061 // dictionary. Put the identity hash into the new dictionary.
5062 hashtable = ObjectHashTable::Put(hashtable,
5063 isolate->factory()->identity_hash_string(),
5067 JSObject::SetLocalPropertyIgnoreAttributes(
5069 isolate->factory()->hidden_string(),
5072 OPTIMAL_REPRESENTATION,
5074 OMIT_EXTENSIBILITY_CHECK);
5080 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object,
5081 Handle<Object> value) {
5082 ASSERT(!object->IsJSGlobalProxy());
5084 Isolate* isolate = object->GetIsolate();
5086 // We can store the identity hash inline iff there is no backing store
5087 // for hidden properties yet.
5088 ASSERT(JSObject::HasHiddenProperties(object) != value->IsSmi());
5089 if (object->HasFastProperties()) {
5090 // If the object has fast properties, check whether the first slot
5091 // in the descriptor array matches the hidden string. Since the
5092 // hidden strings hash code is zero (and no other name has hash
5093 // code zero) it will always occupy the first entry if present.
5094 DescriptorArray* descriptors = object->map()->instance_descriptors();
5095 if (descriptors->number_of_descriptors() > 0) {
5096 int sorted_index = descriptors->GetSortedKeyIndex(0);
5097 if (descriptors->GetKey(sorted_index) == isolate->heap()->hidden_string()
5098 && sorted_index < object->map()->NumberOfOwnDescriptors()) {
5099 ASSERT(descriptors->GetType(sorted_index) == FIELD);
5100 object->FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index),
5107 SetLocalPropertyIgnoreAttributes(object,
5108 isolate->factory()->hidden_string(),
5111 OPTIMAL_REPRESENTATION,
5113 OMIT_EXTENSIBILITY_CHECK);
5118 Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object,
5121 // Check local property, ignore interceptor.
5122 Isolate* isolate = object->GetIsolate();
5123 LookupResult result(isolate);
5124 object->LocalLookupRealNamedProperty(*name, &result);
5125 if (!result.IsFound()) return isolate->factory()->true_value();
5127 // Normalize object if needed.
5128 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5130 return DeleteNormalizedProperty(object, name, mode);
5134 Handle<Object> JSObject::DeletePropertyWithInterceptor(Handle<JSObject> object,
5135 Handle<Name> name) {
5136 Isolate* isolate = object->GetIsolate();
5138 // TODO(rossberg): Support symbols in the API.
5139 if (name->IsSymbol()) return isolate->factory()->false_value();
5141 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
5142 if (!interceptor->deleter()->IsUndefined()) {
5143 v8::NamedPropertyDeleterCallback deleter =
5144 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter());
5146 ApiNamedPropertyAccess("interceptor-named-delete", *object, *name));
5147 PropertyCallbackArguments args(
5148 isolate, interceptor->data(), *object, *object);
5149 v8::Handle<v8::Boolean> result =
5150 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name)));
5151 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5152 if (!result.IsEmpty()) {
5153 ASSERT(result->IsBoolean());
5154 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5155 result_internal->VerifyApiCallResultType();
5156 // Rebox CustomArguments::kReturnValueOffset before returning.
5157 return handle(*result_internal, isolate);
5160 Handle<Object> result =
5161 DeletePropertyPostInterceptor(object, name, NORMAL_DELETION);
5162 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5167 Handle<Object> JSObject::DeleteElementWithInterceptor(Handle<JSObject> object,
5169 Isolate* isolate = object->GetIsolate();
5170 Factory* factory = isolate->factory();
5172 // Make sure that the top context does not change when doing
5173 // callbacks or interceptor calls.
5174 AssertNoContextChange ncc(isolate);
5176 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
5177 if (interceptor->deleter()->IsUndefined()) return factory->false_value();
5178 v8::IndexedPropertyDeleterCallback deleter =
5179 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter());
5181 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index));
5182 PropertyCallbackArguments args(
5183 isolate, interceptor->data(), *object, *object);
5184 v8::Handle<v8::Boolean> result = args.Call(deleter, index);
5185 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5186 if (!result.IsEmpty()) {
5187 ASSERT(result->IsBoolean());
5188 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5189 result_internal->VerifyApiCallResultType();
5190 // Rebox CustomArguments::kReturnValueOffset before returning.
5191 return handle(*result_internal, isolate);
5193 Handle<Object> delete_result = object->GetElementsAccessor()->Delete(
5194 object, index, NORMAL_DELETION);
5195 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5196 return delete_result;
5200 Handle<Object> JSObject::DeleteElement(Handle<JSObject> object,
5203 Isolate* isolate = object->GetIsolate();
5204 Factory* factory = isolate->factory();
5206 // Check access rights if needed.
5207 if (object->IsAccessCheckNeeded() &&
5208 !isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_DELETE)) {
5209 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_DELETE);
5210 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5211 return factory->false_value();
5214 if (object->IsStringObjectWithCharacterAt(index)) {
5215 if (mode == STRICT_DELETION) {
5216 // Deleting a non-configurable property in strict mode.
5217 Handle<Object> name = factory->NewNumberFromUint(index);
5218 Handle<Object> args[2] = { name, object };
5219 Handle<Object> error =
5220 factory->NewTypeError("strict_delete_property",
5221 HandleVector(args, 2));
5222 isolate->Throw(*error);
5223 return Handle<Object>();
5225 return factory->false_value();
5228 if (object->IsJSGlobalProxy()) {
5229 Handle<Object> proto(object->GetPrototype(), isolate);
5230 if (proto->IsNull()) return factory->false_value();
5231 ASSERT(proto->IsJSGlobalObject());
5232 return DeleteElement(Handle<JSObject>::cast(proto), index, mode);
5235 Handle<Object> old_value;
5236 bool should_enqueue_change_record = false;
5237 if (object->map()->is_observed()) {
5238 should_enqueue_change_record = HasLocalElement(object, index);
5239 if (should_enqueue_change_record) {
5240 if (object->GetLocalElementAccessorPair(index) != NULL) {
5241 old_value = Handle<Object>::cast(factory->the_hole_value());
5243 old_value = Object::GetElementNoExceptionThrown(isolate, object, index);
5248 // Skip interceptor if forcing deletion.
5249 Handle<Object> result;
5250 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) {
5251 result = DeleteElementWithInterceptor(object, index);
5253 result = object->GetElementsAccessor()->Delete(object, index, mode);
5256 if (should_enqueue_change_record && !HasLocalElement(object, index)) {
5257 Handle<String> name = factory->Uint32ToString(index);
5258 EnqueueChangeRecord(object, "delete", name, old_value);
5265 Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object,
5268 Isolate* isolate = object->GetIsolate();
5269 // ECMA-262, 3rd, 8.6.2.5
5270 ASSERT(name->IsName());
5272 // Check access rights if needed.
5273 if (object->IsAccessCheckNeeded() &&
5274 !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_DELETE)) {
5275 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_DELETE);
5276 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5277 return isolate->factory()->false_value();
5280 if (object->IsJSGlobalProxy()) {
5281 Object* proto = object->GetPrototype();
5282 if (proto->IsNull()) return isolate->factory()->false_value();
5283 ASSERT(proto->IsJSGlobalObject());
5284 return JSGlobalObject::DeleteProperty(
5285 handle(JSGlobalObject::cast(proto)), name, mode);
5289 if (name->AsArrayIndex(&index)) {
5290 return DeleteElement(object, index, mode);
5293 LookupResult lookup(isolate);
5294 object->LocalLookup(*name, &lookup, true);
5295 if (!lookup.IsFound()) return isolate->factory()->true_value();
5296 // Ignore attributes if forcing a deletion.
5297 if (lookup.IsDontDelete() && mode != FORCE_DELETION) {
5298 if (mode == STRICT_DELETION) {
5299 // Deleting a non-configurable property in strict mode.
5300 Handle<Object> args[2] = { name, object };
5301 Handle<Object> error = isolate->factory()->NewTypeError(
5302 "strict_delete_property", HandleVector(args, ARRAY_SIZE(args)));
5303 isolate->Throw(*error);
5304 return Handle<Object>();
5306 return isolate->factory()->false_value();
5309 Handle<Object> old_value = isolate->factory()->the_hole_value();
5310 bool is_observed = object->map()->is_observed() &&
5311 *name != isolate->heap()->hidden_string();
5312 if (is_observed && lookup.IsDataProperty()) {
5313 old_value = Object::GetProperty(object, name);
5314 CHECK_NOT_EMPTY_HANDLE(isolate, old_value);
5316 Handle<Object> result;
5318 // Check for interceptor.
5319 if (lookup.IsInterceptor()) {
5320 // Skip interceptor if forcing a deletion.
5321 if (mode == FORCE_DELETION) {
5322 result = DeletePropertyPostInterceptor(object, name, mode);
5324 result = DeletePropertyWithInterceptor(object, name);
5327 // Normalize object if needed.
5328 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5329 // Make sure the properties are normalized before removing the entry.
5330 result = DeleteNormalizedProperty(object, name, mode);
5333 if (is_observed && !HasLocalProperty(object, name)) {
5334 EnqueueChangeRecord(object, "delete", name, old_value);
5341 Handle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object,
5344 if (object->IsJSProxy()) {
5345 return JSProxy::DeleteElementWithHandler(
5346 Handle<JSProxy>::cast(object), index, mode);
5348 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode);
5352 Handle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object,
5355 if (object->IsJSProxy()) {
5356 return JSProxy::DeletePropertyWithHandler(
5357 Handle<JSProxy>::cast(object), name, mode);
5359 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode);
5363 bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
5366 ASSERT(IsFastObjectElementsKind(kind) ||
5367 kind == DICTIONARY_ELEMENTS);
5368 if (IsFastObjectElementsKind(kind)) {
5369 int length = IsJSArray()
5370 ? Smi::cast(JSArray::cast(this)->length())->value()
5371 : elements->length();
5372 for (int i = 0; i < length; ++i) {
5373 Object* element = elements->get(i);
5374 if (!element->IsTheHole() && element == object) return true;
5378 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object);
5379 if (!key->IsUndefined()) return true;
5385 // Check whether this object references another object.
5386 bool JSObject::ReferencesObject(Object* obj) {
5387 Map* map_of_this = map();
5388 Heap* heap = GetHeap();
5389 DisallowHeapAllocation no_allocation;
5391 // Is the object the constructor for this object?
5392 if (map_of_this->constructor() == obj) {
5396 // Is the object the prototype for this object?
5397 if (map_of_this->prototype() == obj) {
5401 // Check if the object is among the named properties.
5402 Object* key = SlowReverseLookup(obj);
5403 if (!key->IsUndefined()) {
5407 // Check if the object is among the indexed properties.
5408 ElementsKind kind = GetElementsKind();
5410 // Raw pixels and external arrays do not reference other
5412 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5413 case EXTERNAL_##TYPE##_ELEMENTS: \
5414 case TYPE##_ELEMENTS: \
5417 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5418 #undef TYPED_ARRAY_CASE
5420 case FAST_DOUBLE_ELEMENTS:
5421 case FAST_HOLEY_DOUBLE_ELEMENTS:
5423 case FAST_SMI_ELEMENTS:
5424 case FAST_HOLEY_SMI_ELEMENTS:
5427 case FAST_HOLEY_ELEMENTS:
5428 case DICTIONARY_ELEMENTS: {
5429 FixedArray* elements = FixedArray::cast(this->elements());
5430 if (ReferencesObjectFromElements(elements, kind, obj)) return true;
5433 case SLOPPY_ARGUMENTS_ELEMENTS: {
5434 FixedArray* parameter_map = FixedArray::cast(elements());
5435 // Check the mapped parameters.
5436 int length = parameter_map->length();
5437 for (int i = 2; i < length; ++i) {
5438 Object* value = parameter_map->get(i);
5439 if (!value->IsTheHole() && value == obj) return true;
5441 // Check the arguments.
5442 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5443 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
5444 FAST_HOLEY_ELEMENTS;
5445 if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
5450 // For functions check the context.
5451 if (IsJSFunction()) {
5452 // Get the constructor function for arguments array.
5453 JSObject* arguments_boilerplate =
5454 heap->isolate()->context()->native_context()->
5455 sloppy_arguments_boilerplate();
5456 JSFunction* arguments_function =
5457 JSFunction::cast(arguments_boilerplate->map()->constructor());
5459 // Get the context and don't check if it is the native context.
5460 JSFunction* f = JSFunction::cast(this);
5461 Context* context = f->context();
5462 if (context->IsNativeContext()) {
5466 // Check the non-special context slots.
5467 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) {
5468 // Only check JS objects.
5469 if (context->get(i)->IsJSObject()) {
5470 JSObject* ctxobj = JSObject::cast(context->get(i));
5471 // If it is an arguments array check the content.
5472 if (ctxobj->map()->constructor() == arguments_function) {
5473 if (ctxobj->ReferencesObject(obj)) {
5476 } else if (ctxobj == obj) {
5482 // Check the context extension (if any) if it can have references.
5483 if (context->has_extension() && !context->IsCatchContext()) {
5484 // With harmony scoping, a JSFunction may have a global context.
5485 // TODO(mvstanton): walk into the ScopeInfo.
5486 if (FLAG_harmony_scoping && context->IsGlobalContext()) {
5490 return JSObject::cast(context->extension())->ReferencesObject(obj);
5494 // No references to object.
5499 Handle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
5500 Isolate* isolate = object->GetIsolate();
5502 if (!object->map()->is_extensible()) return object;
5504 if (object->IsAccessCheckNeeded() &&
5505 !isolate->MayNamedAccessWrapper(object,
5506 isolate->factory()->undefined_value(),
5508 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_KEYS);
5509 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5510 return isolate->factory()->false_value();
5513 if (object->IsJSGlobalProxy()) {
5514 Handle<Object> proto(object->GetPrototype(), isolate);
5515 if (proto->IsNull()) return object;
5516 ASSERT(proto->IsJSGlobalObject());
5517 return PreventExtensions(Handle<JSObject>::cast(proto));
5520 // It's not possible to seal objects with external array elements
5521 if (object->HasExternalArrayElements() ||
5522 object->HasFixedTypedArrayElements()) {
5523 Handle<Object> error =
5524 isolate->factory()->NewTypeError(
5525 "cant_prevent_ext_external_array_elements",
5526 HandleVector(&object, 1));
5527 isolate->Throw(*error);
5528 return Handle<Object>();
5531 // If there are fast elements we normalize.
5532 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
5533 ASSERT(object->HasDictionaryElements() ||
5534 object->HasDictionaryArgumentsElements());
5536 // Make sure that we never go back to fast case.
5537 dictionary->set_requires_slow_elements();
5539 // Do a map transition, other objects with this map may still
5541 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5542 Handle<Map> new_map = Map::Copy(handle(object->map()));
5544 new_map->set_is_extensible(false);
5545 JSObject::MigrateToMap(object, new_map);
5546 ASSERT(!object->map()->is_extensible());
5548 if (object->map()->is_observed()) {
5549 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(),
5550 isolate->factory()->the_hole_value());
5556 template<typename Dictionary>
5557 static void FreezeDictionary(Dictionary* dictionary) {
5558 int capacity = dictionary->Capacity();
5559 for (int i = 0; i < capacity; i++) {
5560 Object* k = dictionary->KeyAt(i);
5561 if (dictionary->IsKey(k)) {
5562 PropertyDetails details = dictionary->DetailsAt(i);
5563 int attrs = DONT_DELETE;
5564 // READ_ONLY is an invalid attribute for JS setters/getters.
5565 if (details.type() != CALLBACKS ||
5566 !dictionary->ValueAt(i)->IsAccessorPair()) {
5569 details = details.CopyAddAttributes(
5570 static_cast<PropertyAttributes>(attrs));
5571 dictionary->DetailsAtPut(i, details);
5577 Handle<Object> JSObject::Freeze(Handle<JSObject> object) {
5578 // Freezing sloppy arguments should be handled elsewhere.
5579 ASSERT(!object->HasSloppyArgumentsElements());
5580 ASSERT(!object->map()->is_observed());
5582 if (object->map()->is_frozen()) return object;
5584 Isolate* isolate = object->GetIsolate();
5585 if (object->IsAccessCheckNeeded() &&
5586 !isolate->MayNamedAccessWrapper(object,
5587 isolate->factory()->undefined_value(),
5589 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_KEYS);
5590 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
5591 return isolate->factory()->false_value();
5594 if (object->IsJSGlobalProxy()) {
5595 Handle<Object> proto(object->GetPrototype(), isolate);
5596 if (proto->IsNull()) return object;
5597 ASSERT(proto->IsJSGlobalObject());
5598 return Freeze(Handle<JSObject>::cast(proto));
5601 // It's not possible to freeze objects with external array elements
5602 if (object->HasExternalArrayElements() ||
5603 object->HasFixedTypedArrayElements()) {
5604 Handle<Object> error =
5605 isolate->factory()->NewTypeError(
5606 "cant_prevent_ext_external_array_elements",
5607 HandleVector(&object, 1));
5608 isolate->Throw(*error);
5609 return Handle<Object>();
5612 Handle<SeededNumberDictionary> new_element_dictionary;
5613 if (!object->elements()->IsDictionary()) {
5614 int length = object->IsJSArray()
5615 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
5616 : object->elements()->length();
5620 object->GetElementsCapacityAndUsage(&capacity, &used);
5621 new_element_dictionary =
5622 isolate->factory()->NewSeededNumberDictionary(used);
5624 // Move elements to a dictionary; avoid calling NormalizeElements to avoid
5625 // unnecessary transitions.
5626 new_element_dictionary = CopyFastElementsToDictionary(
5627 handle(object->elements()), length, new_element_dictionary);
5629 // No existing elements, use a pre-allocated empty backing store
5630 new_element_dictionary =
5631 isolate->factory()->empty_slow_element_dictionary();
5635 LookupResult result(isolate);
5636 Handle<Map> old_map(object->map());
5637 old_map->LookupTransition(*object, isolate->heap()->frozen_symbol(), &result);
5638 if (result.IsTransition()) {
5639 Handle<Map> transition_map(result.GetTransitionTarget());
5640 ASSERT(transition_map->has_dictionary_elements());
5641 ASSERT(transition_map->is_frozen());
5642 ASSERT(!transition_map->is_extensible());
5643 JSObject::MigrateToMap(object, transition_map);
5644 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5645 // Create a new descriptor array with fully-frozen properties
5646 int num_descriptors = old_map->NumberOfOwnDescriptors();
5647 Handle<DescriptorArray> new_descriptors =
5648 DescriptorArray::CopyUpToAddAttributes(
5649 handle(old_map->instance_descriptors()), num_descriptors, FROZEN);
5650 Handle<Map> new_map = Map::CopyReplaceDescriptors(
5651 old_map, new_descriptors, INSERT_TRANSITION,
5652 isolate->factory()->frozen_symbol());
5654 new_map->set_is_extensible(false);
5655 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5656 JSObject::MigrateToMap(object, new_map);
5658 // Slow path: need to normalize properties for safety
5659 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5661 // Create a new map, since other objects with this map may be extensible.
5662 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5663 Handle<Map> new_map = Map::Copy(handle(object->map()));
5665 new_map->set_is_extensible(false);
5666 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5667 JSObject::MigrateToMap(object, new_map);
5669 // Freeze dictionary-mode properties
5670 FreezeDictionary(object->property_dictionary());
5673 ASSERT(object->map()->has_dictionary_elements());
5674 if (!new_element_dictionary.is_null()) {
5675 object->set_elements(*new_element_dictionary);
5678 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) {
5679 SeededNumberDictionary* dictionary = object->element_dictionary();
5680 // Make sure we never go back to the fast case
5681 dictionary->set_requires_slow_elements();
5682 // Freeze all elements in the dictionary
5683 FreezeDictionary(dictionary);
5690 void JSObject::SetObserved(Handle<JSObject> object) {
5691 Isolate* isolate = object->GetIsolate();
5693 if (object->map()->is_observed())
5696 LookupResult result(isolate);
5697 object->map()->LookupTransition(*object,
5698 isolate->heap()->observed_symbol(),
5701 Handle<Map> new_map;
5702 if (result.IsTransition()) {
5703 new_map = handle(result.GetTransitionTarget());
5704 ASSERT(new_map->is_observed());
5705 } else if (object->map()->CanHaveMoreTransitions()) {
5706 new_map = Map::CopyForObserved(handle(object->map()));
5708 new_map = Map::Copy(handle(object->map()));
5709 new_map->set_is_observed();
5711 JSObject::MigrateToMap(object, new_map);
5715 Handle<JSObject> JSObject::Copy(Handle<JSObject> object) {
5716 Isolate* isolate = object->GetIsolate();
5717 CALL_HEAP_FUNCTION(isolate,
5718 isolate->heap()->CopyJSObject(*object), JSObject);
5722 template<class ContextObject>
5723 class JSObjectWalkVisitor {
5725 JSObjectWalkVisitor(ContextObject* site_context, bool copying,
5726 JSObject::DeepCopyHints hints)
5727 : site_context_(site_context),
5731 Handle<JSObject> StructureWalk(Handle<JSObject> object);
5734 inline Handle<JSObject> VisitElementOrProperty(Handle<JSObject> object,
5735 Handle<JSObject> value) {
5736 Handle<AllocationSite> current_site = site_context()->EnterNewScope();
5737 Handle<JSObject> copy_of_value = StructureWalk(value);
5738 site_context()->ExitScope(current_site, value);
5739 return copy_of_value;
5742 inline ContextObject* site_context() { return site_context_; }
5743 inline Isolate* isolate() { return site_context()->isolate(); }
5745 inline bool copying() const { return copying_; }
5748 ContextObject* site_context_;
5749 const bool copying_;
5750 const JSObject::DeepCopyHints hints_;
5754 template <class ContextObject>
5755 Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk(
5756 Handle<JSObject> object) {
5757 Isolate* isolate = this->isolate();
5758 bool copying = this->copying();
5759 bool shallow = hints_ == JSObject::kObjectIsShallowArray;
5762 StackLimitCheck check(isolate);
5764 if (check.HasOverflowed()) {
5765 isolate->StackOverflow();
5766 return Handle<JSObject>::null();
5770 if (object->map()->is_deprecated()) {
5771 JSObject::MigrateInstance(object);
5774 Handle<JSObject> copy;
5776 Handle<AllocationSite> site_to_pass;
5777 if (site_context()->ShouldCreateMemento(object)) {
5778 site_to_pass = site_context()->current();
5780 CALL_AND_RETRY_OR_DIE(isolate,
5781 isolate->heap()->CopyJSObject(*object,
5782 site_to_pass.is_null() ? NULL : *site_to_pass),
5783 { copy = Handle<JSObject>(JSObject::cast(__object__),
5787 return Handle<JSObject>());
5792 ASSERT(copying || copy.is_identical_to(object));
5794 ElementsKind kind = copy->GetElementsKind();
5795 if (copying && IsFastSmiOrObjectElementsKind(kind) &&
5796 FixedArray::cast(copy->elements())->map() ==
5797 isolate->heap()->fixed_cow_array_map()) {
5798 isolate->counters()->cow_arrays_created_runtime()->Increment();
5802 HandleScope scope(isolate);
5804 // Deep copy local properties.
5805 if (copy->HasFastProperties()) {
5806 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors());
5807 int limit = copy->map()->NumberOfOwnDescriptors();
5808 for (int i = 0; i < limit; i++) {
5809 PropertyDetails details = descriptors->GetDetails(i);
5810 if (details.type() != FIELD) continue;
5811 int index = descriptors->GetFieldIndex(i);
5812 Handle<Object> value(object->RawFastPropertyAt(index), isolate);
5813 if (value->IsJSObject()) {
5814 value = VisitElementOrProperty(copy, Handle<JSObject>::cast(value));
5815 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, value, Handle<JSObject>());
5817 Representation representation = details.representation();
5818 value = NewStorageFor(isolate, value, representation);
5821 copy->FastPropertyAtPut(index, *value);
5825 Handle<FixedArray> names =
5826 isolate->factory()->NewFixedArray(copy->NumberOfLocalProperties());
5827 copy->GetLocalPropertyNames(*names, 0);
5828 for (int i = 0; i < names->length(); i++) {
5829 ASSERT(names->get(i)->IsString());
5830 Handle<String> key_string(String::cast(names->get(i)));
5831 PropertyAttributes attributes =
5832 JSReceiver::GetLocalPropertyAttribute(copy, key_string);
5833 // Only deep copy fields from the object literal expression.
5834 // In particular, don't try to copy the length attribute of
5836 if (attributes != NONE) continue;
5837 Handle<Object> value(
5838 copy->GetProperty(*key_string, &attributes)->ToObjectUnchecked(),
5840 if (value->IsJSObject()) {
5841 Handle<JSObject> result = VisitElementOrProperty(
5842 copy, Handle<JSObject>::cast(value));
5843 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>());
5845 // Creating object copy for literals. No strict mode needed.
5846 CHECK_NOT_EMPTY_HANDLE(isolate, JSObject::SetProperty(
5847 copy, key_string, result, NONE, SLOPPY));
5853 // Deep copy local elements.
5854 // Pixel elements cannot be created using an object literal.
5855 ASSERT(!copy->HasExternalArrayElements());
5857 case FAST_SMI_ELEMENTS:
5859 case FAST_HOLEY_SMI_ELEMENTS:
5860 case FAST_HOLEY_ELEMENTS: {
5861 Handle<FixedArray> elements(FixedArray::cast(copy->elements()));
5862 if (elements->map() == isolate->heap()->fixed_cow_array_map()) {
5864 for (int i = 0; i < elements->length(); i++) {
5865 ASSERT(!elements->get(i)->IsJSObject());
5869 for (int i = 0; i < elements->length(); i++) {
5870 Handle<Object> value(elements->get(i), isolate);
5871 ASSERT(value->IsSmi() ||
5872 value->IsTheHole() ||
5873 (IsFastObjectElementsKind(copy->GetElementsKind())));
5874 if (value->IsJSObject()) {
5875 Handle<JSObject> result = VisitElementOrProperty(
5876 copy, Handle<JSObject>::cast(value));
5877 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>());
5879 elements->set(i, *result);
5886 case DICTIONARY_ELEMENTS: {
5887 Handle<SeededNumberDictionary> element_dictionary(
5888 copy->element_dictionary());
5889 int capacity = element_dictionary->Capacity();
5890 for (int i = 0; i < capacity; i++) {
5891 Object* k = element_dictionary->KeyAt(i);
5892 if (element_dictionary->IsKey(k)) {
5893 Handle<Object> value(element_dictionary->ValueAt(i), isolate);
5894 if (value->IsJSObject()) {
5895 Handle<JSObject> result = VisitElementOrProperty(
5896 copy, Handle<JSObject>::cast(value));
5897 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>());
5899 element_dictionary->ValueAtPut(i, *result);
5906 case SLOPPY_ARGUMENTS_ELEMENTS:
5911 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5912 case EXTERNAL_##TYPE##_ELEMENTS: \
5913 case TYPE##_ELEMENTS: \
5915 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5916 #undef TYPED_ARRAY_CASE
5918 case FAST_DOUBLE_ELEMENTS:
5919 case FAST_HOLEY_DOUBLE_ELEMENTS:
5920 // No contained objects, nothing to do.
5929 Handle<JSObject> JSObject::DeepWalk(
5930 Handle<JSObject> object,
5931 AllocationSiteCreationContext* site_context) {
5932 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false,
5934 Handle<JSObject> result = v.StructureWalk(object);
5935 ASSERT(result.is_null() || result.is_identical_to(object));
5940 Handle<JSObject> JSObject::DeepCopy(Handle<JSObject> object,
5941 AllocationSiteUsageContext* site_context,
5942 DeepCopyHints hints) {
5943 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints);
5944 Handle<JSObject> copy = v.StructureWalk(object);
5945 ASSERT(!copy.is_identical_to(object));
5950 // Tests for the fast common case for property enumeration:
5951 // - This object and all prototypes has an enum cache (which means that
5952 // it is no proxy, has no interceptors and needs no access checks).
5953 // - This object has no elements.
5954 // - No prototype has enumerable properties/elements.
5955 bool JSReceiver::IsSimpleEnum() {
5956 Heap* heap = GetHeap();
5957 for (Object* o = this;
5958 o != heap->null_value();
5959 o = JSObject::cast(o)->GetPrototype()) {
5960 if (!o->IsJSObject()) return false;
5961 JSObject* curr = JSObject::cast(o);
5962 int enum_length = curr->map()->EnumLength();
5963 if (enum_length == kInvalidEnumCacheSentinel) return false;
5964 if (curr->IsAccessCheckNeeded()) return false;
5965 ASSERT(!curr->HasNamedInterceptor());
5966 ASSERT(!curr->HasIndexedInterceptor());
5967 if (curr->NumberOfEnumElements() > 0) return false;
5968 if (curr != this && enum_length != 0) return false;
5974 static bool FilterKey(Object* key, PropertyAttributes filter) {
5975 if ((filter & SYMBOLIC) && key->IsSymbol()) {
5979 if ((filter & PRIVATE_SYMBOL) &&
5980 key->IsSymbol() && Symbol::cast(key)->is_private()) {
5984 if ((filter & STRING) && !key->IsSymbol()) {
5992 int Map::NumberOfDescribedProperties(DescriptorFlag which,
5993 PropertyAttributes filter) {
5995 DescriptorArray* descs = instance_descriptors();
5996 int limit = which == ALL_DESCRIPTORS
5997 ? descs->number_of_descriptors()
5998 : NumberOfOwnDescriptors();
5999 for (int i = 0; i < limit; i++) {
6000 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
6001 !FilterKey(descs->GetKey(i), filter)) {
6009 int Map::NextFreePropertyIndex() {
6011 int number_of_own_descriptors = NumberOfOwnDescriptors();
6012 DescriptorArray* descs = instance_descriptors();
6013 for (int i = 0; i < number_of_own_descriptors; i++) {
6014 if (descs->GetType(i) == FIELD) {
6015 int current_index = descs->GetFieldIndex(i);
6016 if (current_index > max_index) max_index = current_index;
6019 return max_index + 1;
6023 AccessorDescriptor* Map::FindAccessor(Name* name) {
6024 DescriptorArray* descs = instance_descriptors();
6025 int number_of_own_descriptors = NumberOfOwnDescriptors();
6026 for (int i = 0; i < number_of_own_descriptors; i++) {
6027 if (descs->GetType(i) == CALLBACKS && name->Equals(descs->GetKey(i))) {
6028 return descs->GetCallbacks(i);
6035 void JSReceiver::LocalLookup(
6036 Name* name, LookupResult* result, bool search_hidden_prototypes) {
6037 ASSERT(name->IsName());
6039 Heap* heap = GetHeap();
6041 if (IsJSGlobalProxy()) {
6042 Object* proto = GetPrototype();
6043 if (proto->IsNull()) return result->NotFound();
6044 ASSERT(proto->IsJSGlobalObject());
6045 return JSReceiver::cast(proto)->LocalLookup(
6046 name, result, search_hidden_prototypes);
6050 result->HandlerResult(JSProxy::cast(this));
6054 // Do not use inline caching if the object is a non-global object
6055 // that requires access checks.
6056 if (IsAccessCheckNeeded()) {
6057 result->DisallowCaching();
6060 JSObject* js_object = JSObject::cast(this);
6062 // Check for lookup interceptor except when bootstrapping.
6063 if (js_object->HasNamedInterceptor() &&
6064 !heap->isolate()->bootstrapper()->IsActive()) {
6065 result->InterceptorResult(js_object);
6069 js_object->LocalLookupRealNamedProperty(name, result);
6070 if (result->IsFound() || !search_hidden_prototypes) return;
6072 Object* proto = js_object->GetPrototype();
6073 if (!proto->IsJSReceiver()) return;
6074 JSReceiver* receiver = JSReceiver::cast(proto);
6075 if (receiver->map()->is_hidden_prototype()) {
6076 receiver->LocalLookup(name, result, search_hidden_prototypes);
6081 void JSReceiver::Lookup(Name* name, LookupResult* result) {
6082 // Ecma-262 3rd 8.6.2.4
6083 Heap* heap = GetHeap();
6084 for (Object* current = this;
6085 current != heap->null_value();
6086 current = JSObject::cast(current)->GetPrototype()) {
6087 JSReceiver::cast(current)->LocalLookup(name, result, false);
6088 if (result->IsFound()) return;
6094 // Search object and its prototype chain for callback properties.
6095 void JSObject::LookupCallbackProperty(Name* name, LookupResult* result) {
6096 Heap* heap = GetHeap();
6097 for (Object* current = this;
6098 current != heap->null_value() && current->IsJSObject();
6099 current = JSObject::cast(current)->GetPrototype()) {
6100 JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
6101 if (result->IsPropertyCallbacks()) return;
6107 // Try to update an accessor in an elements dictionary. Return true if the
6108 // update succeeded, and false otherwise.
6109 static bool UpdateGetterSetterInDictionary(
6110 SeededNumberDictionary* dictionary,
6114 PropertyAttributes attributes) {
6115 int entry = dictionary->FindEntry(index);
6116 if (entry != SeededNumberDictionary::kNotFound) {
6117 Object* result = dictionary->ValueAt(entry);
6118 PropertyDetails details = dictionary->DetailsAt(entry);
6119 if (details.type() == CALLBACKS && result->IsAccessorPair()) {
6120 ASSERT(!details.IsDontDelete());
6121 if (details.attributes() != attributes) {
6122 dictionary->DetailsAtPut(
6124 PropertyDetails(attributes, CALLBACKS, index));
6126 AccessorPair::cast(result)->SetComponents(getter, setter);
6134 void JSObject::DefineElementAccessor(Handle<JSObject> object,
6136 Handle<Object> getter,
6137 Handle<Object> setter,
6138 PropertyAttributes attributes,
6139 v8::AccessControl access_control) {
6140 switch (object->GetElementsKind()) {
6141 case FAST_SMI_ELEMENTS:
6143 case FAST_DOUBLE_ELEMENTS:
6144 case FAST_HOLEY_SMI_ELEMENTS:
6145 case FAST_HOLEY_ELEMENTS:
6146 case FAST_HOLEY_DOUBLE_ELEMENTS:
6149 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6150 case EXTERNAL_##TYPE##_ELEMENTS: \
6151 case TYPE##_ELEMENTS: \
6153 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6154 #undef TYPED_ARRAY_CASE
6155 // Ignore getters and setters on pixel and external array elements.
6158 case DICTIONARY_ELEMENTS:
6159 if (UpdateGetterSetterInDictionary(object->element_dictionary(),
6167 case SLOPPY_ARGUMENTS_ELEMENTS: {
6168 // Ascertain whether we have read-only properties or an existing
6169 // getter/setter pair in an arguments elements dictionary backing
6171 FixedArray* parameter_map = FixedArray::cast(object->elements());
6172 uint32_t length = parameter_map->length();
6174 index < (length - 2) ? parameter_map->get(index + 2) : NULL;
6175 if (probe == NULL || probe->IsTheHole()) {
6176 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
6177 if (arguments->IsDictionary()) {
6178 SeededNumberDictionary* dictionary =
6179 SeededNumberDictionary::cast(arguments);
6180 if (UpdateGetterSetterInDictionary(dictionary,
6193 Isolate* isolate = object->GetIsolate();
6194 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair();
6195 accessors->SetComponents(*getter, *setter);
6196 accessors->set_access_flags(access_control);
6198 SetElementCallback(object, index, accessors, attributes);
6202 Handle<AccessorPair> JSObject::CreateAccessorPairFor(Handle<JSObject> object,
6203 Handle<Name> name) {
6204 Isolate* isolate = object->GetIsolate();
6205 LookupResult result(isolate);
6206 object->LocalLookupRealNamedProperty(*name, &result);
6207 if (result.IsPropertyCallbacks()) {
6208 // Note that the result can actually have IsDontDelete() == true when we
6209 // e.g. have to fall back to the slow case while adding a setter after
6210 // successfully reusing a map transition for a getter. Nevertheless, this is
6211 // OK, because the assertion only holds for the whole addition of both
6212 // accessors, not for the addition of each part. See first comment in
6213 // DefinePropertyAccessor below.
6214 Object* obj = result.GetCallbackObject();
6215 if (obj->IsAccessorPair()) {
6216 return AccessorPair::Copy(handle(AccessorPair::cast(obj), isolate));
6219 return isolate->factory()->NewAccessorPair();
6223 void JSObject::DefinePropertyAccessor(Handle<JSObject> object,
6225 Handle<Object> getter,
6226 Handle<Object> setter,
6227 PropertyAttributes attributes,
6228 v8::AccessControl access_control) {
6229 // We could assert that the property is configurable here, but we would need
6230 // to do a lookup, which seems to be a bit of overkill.
6231 bool only_attribute_changes = getter->IsNull() && setter->IsNull();
6232 if (object->HasFastProperties() && !only_attribute_changes &&
6233 access_control == v8::DEFAULT &&
6234 (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors)) {
6235 bool getterOk = getter->IsNull() ||
6236 DefineFastAccessor(object, name, ACCESSOR_GETTER, getter, attributes);
6237 bool setterOk = !getterOk || setter->IsNull() ||
6238 DefineFastAccessor(object, name, ACCESSOR_SETTER, setter, attributes);
6239 if (getterOk && setterOk) return;
6242 Handle<AccessorPair> accessors = CreateAccessorPairFor(object, name);
6243 accessors->SetComponents(*getter, *setter);
6244 accessors->set_access_flags(access_control);
6246 SetPropertyCallback(object, name, accessors, attributes);
6250 bool JSObject::CanSetCallback(Handle<JSObject> object, Handle<Name> name) {
6251 Isolate* isolate = object->GetIsolate();
6252 ASSERT(!object->IsAccessCheckNeeded() ||
6253 isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET));
6255 // Check if there is an API defined callback object which prohibits
6256 // callback overwriting in this object or its prototype chain.
6257 // This mechanism is needed for instance in a browser setting, where
6258 // certain accessors such as window.location should not be allowed
6259 // to be overwritten because allowing overwriting could potentially
6260 // cause security problems.
6261 LookupResult callback_result(isolate);
6262 object->LookupCallbackProperty(*name, &callback_result);
6263 if (callback_result.IsFound()) {
6264 Object* callback_obj = callback_result.GetCallbackObject();
6265 if (callback_obj->IsAccessorInfo()) {
6266 return !AccessorInfo::cast(callback_obj)->prohibits_overwriting();
6268 if (callback_obj->IsAccessorPair()) {
6269 return !AccessorPair::cast(callback_obj)->prohibits_overwriting();
6276 bool Map::DictionaryElementsInPrototypeChainOnly() {
6277 Heap* heap = GetHeap();
6279 if (IsDictionaryElementsKind(elements_kind())) {
6283 for (Object* prototype = this->prototype();
6284 prototype != heap->null_value();
6285 prototype = prototype->GetPrototype(GetIsolate())) {
6286 if (prototype->IsJSProxy()) {
6287 // Be conservative, don't walk into proxies.
6291 if (IsDictionaryElementsKind(
6292 JSObject::cast(prototype)->map()->elements_kind())) {
6301 void JSObject::SetElementCallback(Handle<JSObject> object,
6303 Handle<Object> structure,
6304 PropertyAttributes attributes) {
6305 Heap* heap = object->GetHeap();
6306 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6308 // Normalize elements to make this operation simple.
6309 bool had_dictionary_elements = object->HasDictionaryElements();
6310 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
6311 ASSERT(object->HasDictionaryElements() ||
6312 object->HasDictionaryArgumentsElements());
6313 // Update the dictionary with the new CALLBACKS property.
6314 dictionary = SeededNumberDictionary::Set(dictionary, index, structure,
6316 dictionary->set_requires_slow_elements();
6318 // Update the dictionary backing store on the object.
6319 if (object->elements()->map() == heap->sloppy_arguments_elements_map()) {
6320 // Also delete any parameter alias.
6322 // TODO(kmillikin): when deleting the last parameter alias we could
6323 // switch to a direct backing store without the parameter map. This
6324 // would allow GC of the context.
6325 FixedArray* parameter_map = FixedArray::cast(object->elements());
6326 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
6327 parameter_map->set(index + 2, heap->the_hole_value());
6329 parameter_map->set(1, *dictionary);
6331 object->set_elements(*dictionary);
6333 if (!had_dictionary_elements) {
6334 // KeyedStoreICs (at least the non-generic ones) need a reset.
6335 heap->ClearAllICsByKind(Code::KEYED_STORE_IC);
6341 void JSObject::SetPropertyCallback(Handle<JSObject> object,
6343 Handle<Object> structure,
6344 PropertyAttributes attributes) {
6345 // Normalize object to make this operation simple.
6346 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
6348 // For the global object allocate a new map to invalidate the global inline
6349 // caches which have a global property cell reference directly in the code.
6350 if (object->IsGlobalObject()) {
6351 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
6352 ASSERT(new_map->is_dictionary_map());
6353 object->set_map(*new_map);
6355 // When running crankshaft, changing the map is not enough. We
6356 // need to deoptimize all functions that rely on this global
6358 Deoptimizer::DeoptimizeGlobalObject(*object);
6361 // Update the dictionary with the new CALLBACKS property.
6362 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6363 SetNormalizedProperty(object, name, structure, details);
6367 void JSObject::DefineAccessor(Handle<JSObject> object,
6369 Handle<Object> getter,
6370 Handle<Object> setter,
6371 PropertyAttributes attributes,
6372 v8::AccessControl access_control) {
6373 Isolate* isolate = object->GetIsolate();
6374 // Check access rights if needed.
6375 if (object->IsAccessCheckNeeded() &&
6376 !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) {
6377 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_SET);
6381 if (object->IsJSGlobalProxy()) {
6382 Handle<Object> proto(object->GetPrototype(), isolate);
6383 if (proto->IsNull()) return;
6384 ASSERT(proto->IsJSGlobalObject());
6385 DefineAccessor(Handle<JSObject>::cast(proto),
6394 // Make sure that the top context does not change when doing callbacks or
6395 // interceptor calls.
6396 AssertNoContextChange ncc(isolate);
6398 // Try to flatten before operating on the string.
6399 if (name->IsString()) String::cast(*name)->TryFlatten();
6401 if (!JSObject::CanSetCallback(object, name)) return;
6404 bool is_element = name->AsArrayIndex(&index);
6406 Handle<Object> old_value = isolate->factory()->the_hole_value();
6407 bool is_observed = object->map()->is_observed() &&
6408 *name != isolate->heap()->hidden_string();
6409 bool preexists = false;
6412 preexists = HasLocalElement(object, index);
6413 if (preexists && object->GetLocalElementAccessorPair(index) == NULL) {
6414 old_value = Object::GetElementNoExceptionThrown(isolate, object, index);
6417 LookupResult lookup(isolate);
6418 object->LocalLookup(*name, &lookup, true);
6419 preexists = lookup.IsProperty();
6420 if (preexists && lookup.IsDataProperty()) {
6421 old_value = Object::GetProperty(object, name);
6422 CHECK_NOT_EMPTY_HANDLE(isolate, old_value);
6428 DefineElementAccessor(
6429 object, index, getter, setter, attributes, access_control);
6431 DefinePropertyAccessor(
6432 object, name, getter, setter, attributes, access_control);
6436 const char* type = preexists ? "reconfigure" : "add";
6437 EnqueueChangeRecord(object, type, name, old_value);
6442 static bool TryAccessorTransition(Handle<JSObject> self,
6443 Handle<Map> transitioned_map,
6444 int target_descriptor,
6445 AccessorComponent component,
6446 Handle<Object> accessor,
6447 PropertyAttributes attributes) {
6448 DescriptorArray* descs = transitioned_map->instance_descriptors();
6449 PropertyDetails details = descs->GetDetails(target_descriptor);
6451 // If the transition target was not callbacks, fall back to the slow case.
6452 if (details.type() != CALLBACKS) return false;
6453 Object* descriptor = descs->GetCallbacksObject(target_descriptor);
6454 if (!descriptor->IsAccessorPair()) return false;
6456 Object* target_accessor = AccessorPair::cast(descriptor)->get(component);
6457 PropertyAttributes target_attributes = details.attributes();
6459 // Reuse transition if adding same accessor with same attributes.
6460 if (target_accessor == *accessor && target_attributes == attributes) {
6461 JSObject::MigrateToMap(self, transitioned_map);
6465 // If either not the same accessor, or not the same attributes, fall back to
6471 static MaybeObject* CopyInsertDescriptor(Map* map,
6473 AccessorPair* accessors,
6474 PropertyAttributes attributes) {
6475 CallbacksDescriptor new_accessors_desc(name, accessors, attributes);
6476 return map->CopyInsertDescriptor(&new_accessors_desc, INSERT_TRANSITION);
6480 static Handle<Map> CopyInsertDescriptor(Handle<Map> map,
6482 Handle<AccessorPair> accessors,
6483 PropertyAttributes attributes) {
6484 CALL_HEAP_FUNCTION(map->GetIsolate(),
6485 CopyInsertDescriptor(*map, *name, *accessors, attributes),
6490 bool JSObject::DefineFastAccessor(Handle<JSObject> object,
6492 AccessorComponent component,
6493 Handle<Object> accessor,
6494 PropertyAttributes attributes) {
6495 ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined());
6496 Isolate* isolate = object->GetIsolate();
6497 LookupResult result(isolate);
6498 object->LocalLookup(*name, &result);
6500 if (result.IsFound() && !result.IsPropertyCallbacks()) {
6504 // Return success if the same accessor with the same attributes already exist.
6505 AccessorPair* source_accessors = NULL;
6506 if (result.IsPropertyCallbacks()) {
6507 Object* callback_value = result.GetCallbackObject();
6508 if (callback_value->IsAccessorPair()) {
6509 source_accessors = AccessorPair::cast(callback_value);
6510 Object* entry = source_accessors->get(component);
6511 if (entry == *accessor && result.GetAttributes() == attributes) {
6518 int descriptor_number = result.GetDescriptorIndex();
6520 object->map()->LookupTransition(*object, *name, &result);
6522 if (result.IsFound()) {
6523 Handle<Map> target(result.GetTransitionTarget());
6524 ASSERT(target->NumberOfOwnDescriptors() ==
6525 object->map()->NumberOfOwnDescriptors());
6526 // This works since descriptors are sorted in order of addition.
6527 ASSERT(object->map()->instance_descriptors()->
6528 GetKey(descriptor_number) == *name);
6529 return TryAccessorTransition(object, target, descriptor_number,
6530 component, accessor, attributes);
6533 // If not, lookup a transition.
6534 object->map()->LookupTransition(*object, *name, &result);
6536 // If there is a transition, try to follow it.
6537 if (result.IsFound()) {
6538 Handle<Map> target(result.GetTransitionTarget());
6539 int descriptor_number = target->LastAdded();
6540 ASSERT(target->instance_descriptors()->GetKey(descriptor_number)
6542 return TryAccessorTransition(object, target, descriptor_number,
6543 component, accessor, attributes);
6547 // If there is no transition yet, add a transition to the a new accessor pair
6548 // containing the accessor. Allocate a new pair if there were no source
6549 // accessors. Otherwise, copy the pair and modify the accessor.
6550 Handle<AccessorPair> accessors = source_accessors != NULL
6551 ? AccessorPair::Copy(Handle<AccessorPair>(source_accessors))
6552 : isolate->factory()->NewAccessorPair();
6553 accessors->set(component, *accessor);
6554 Handle<Map> new_map = CopyInsertDescriptor(Handle<Map>(object->map()),
6555 name, accessors, attributes);
6556 JSObject::MigrateToMap(object, new_map);
6561 Handle<Object> JSObject::SetAccessor(Handle<JSObject> object,
6562 Handle<AccessorInfo> info) {
6563 Isolate* isolate = object->GetIsolate();
6564 Factory* factory = isolate->factory();
6565 Handle<Name> name(Name::cast(info->name()));
6567 // Check access rights if needed.
6568 if (object->IsAccessCheckNeeded() &&
6569 !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) {
6570 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_SET);
6571 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
6572 return factory->undefined_value();
6575 if (object->IsJSGlobalProxy()) {
6576 Handle<Object> proto(object->GetPrototype(), isolate);
6577 if (proto->IsNull()) return object;
6578 ASSERT(proto->IsJSGlobalObject());
6579 return SetAccessor(Handle<JSObject>::cast(proto), info);
6582 // Make sure that the top context does not change when doing callbacks or
6583 // interceptor calls.
6584 AssertNoContextChange ncc(isolate);
6586 // Try to flatten before operating on the string.
6587 if (name->IsString()) FlattenString(Handle<String>::cast(name));
6589 if (!JSObject::CanSetCallback(object, name)) {
6590 return factory->undefined_value();
6594 bool is_element = name->AsArrayIndex(&index);
6597 if (object->IsJSArray()) return factory->undefined_value();
6599 // Accessors overwrite previous callbacks (cf. with getters/setters).
6600 switch (object->GetElementsKind()) {
6601 case FAST_SMI_ELEMENTS:
6603 case FAST_DOUBLE_ELEMENTS:
6604 case FAST_HOLEY_SMI_ELEMENTS:
6605 case FAST_HOLEY_ELEMENTS:
6606 case FAST_HOLEY_DOUBLE_ELEMENTS:
6609 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6610 case EXTERNAL_##TYPE##_ELEMENTS: \
6611 case TYPE##_ELEMENTS: \
6613 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6614 #undef TYPED_ARRAY_CASE
6615 // Ignore getters and setters on pixel and external array
6617 return factory->undefined_value();
6619 case DICTIONARY_ELEMENTS:
6621 case SLOPPY_ARGUMENTS_ELEMENTS:
6626 SetElementCallback(object, index, info, info->property_attributes());
6629 LookupResult result(isolate);
6630 object->LocalLookup(*name, &result, true);
6631 // ES5 forbids turning a property into an accessor if it's not
6632 // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5).
6633 if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) {
6634 return factory->undefined_value();
6637 SetPropertyCallback(object, name, info, info->property_attributes());
6644 Handle<Object> JSObject::GetAccessor(Handle<JSObject> object,
6646 AccessorComponent component) {
6647 Isolate* isolate = object->GetIsolate();
6649 // Make sure that the top context does not change when doing callbacks or
6650 // interceptor calls.
6651 AssertNoContextChange ncc(isolate);
6653 // Check access rights if needed.
6654 if (object->IsAccessCheckNeeded() &&
6655 !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_HAS)) {
6656 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS);
6657 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
6658 return isolate->factory()->undefined_value();
6661 // Make the lookup and include prototypes.
6663 if (name->AsArrayIndex(&index)) {
6664 for (Handle<Object> obj = object;
6666 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) {
6667 if (obj->IsJSObject() && JSObject::cast(*obj)->HasDictionaryElements()) {
6668 JSObject* js_object = JSObject::cast(*obj);
6669 SeededNumberDictionary* dictionary = js_object->element_dictionary();
6670 int entry = dictionary->FindEntry(index);
6671 if (entry != SeededNumberDictionary::kNotFound) {
6672 Object* element = dictionary->ValueAt(entry);
6673 if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
6674 element->IsAccessorPair()) {
6675 return handle(AccessorPair::cast(element)->GetComponent(component),
6682 for (Handle<Object> obj = object;
6684 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) {
6685 LookupResult result(isolate);
6686 JSReceiver::cast(*obj)->LocalLookup(*name, &result);
6687 if (result.IsFound()) {
6688 if (result.IsReadOnly()) return isolate->factory()->undefined_value();
6689 if (result.IsPropertyCallbacks()) {
6690 Object* obj = result.GetCallbackObject();
6691 if (obj->IsAccessorPair()) {
6692 return handle(AccessorPair::cast(obj)->GetComponent(component),
6699 return isolate->factory()->undefined_value();
6703 Object* JSObject::SlowReverseLookup(Object* value) {
6704 if (HasFastProperties()) {
6705 int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
6706 DescriptorArray* descs = map()->instance_descriptors();
6707 for (int i = 0; i < number_of_own_descriptors; i++) {
6708 if (descs->GetType(i) == FIELD) {
6709 Object* property = RawFastPropertyAt(descs->GetFieldIndex(i));
6710 if (descs->GetDetails(i).representation().IsDouble()) {
6711 ASSERT(property->IsHeapNumber());
6712 if (value->IsNumber() && property->Number() == value->Number()) {
6713 return descs->GetKey(i);
6715 } else if (property == value) {
6716 return descs->GetKey(i);
6718 } else if (descs->GetType(i) == CONSTANT) {
6719 if (descs->GetConstant(i) == value) {
6720 return descs->GetKey(i);
6724 return GetHeap()->undefined_value();
6726 return property_dictionary()->SlowReverseLookup(value);
6731 Handle<Map> Map::RawCopy(Handle<Map> map,
6732 int instance_size) {
6733 CALL_HEAP_FUNCTION(map->GetIsolate(),
6734 map->RawCopy(instance_size),
6739 MaybeObject* Map::RawCopy(int instance_size) {
6741 MaybeObject* maybe_result =
6742 GetHeap()->AllocateMap(instance_type(), instance_size);
6743 if (!maybe_result->To(&result)) return maybe_result;
6745 result->set_prototype(prototype());
6746 result->set_constructor(constructor());
6747 result->set_bit_field(bit_field());
6748 result->set_bit_field2(bit_field2());
6749 int new_bit_field3 = bit_field3();
6750 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
6751 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
6752 new_bit_field3 = EnumLengthBits::update(new_bit_field3,
6753 kInvalidEnumCacheSentinel);
6754 new_bit_field3 = Deprecated::update(new_bit_field3, false);
6755 if (!is_dictionary_map()) {
6756 new_bit_field3 = IsUnstable::update(new_bit_field3, false);
6758 result->set_bit_field3(new_bit_field3);
6763 Handle<Map> Map::CopyNormalized(Handle<Map> map,
6764 PropertyNormalizationMode mode,
6765 NormalizedMapSharingMode sharing) {
6766 int new_instance_size = map->instance_size();
6767 if (mode == CLEAR_INOBJECT_PROPERTIES) {
6768 new_instance_size -= map->inobject_properties() * kPointerSize;
6771 Handle<Map> result = Map::RawCopy(map, new_instance_size);
6773 if (mode != CLEAR_INOBJECT_PROPERTIES) {
6774 result->set_inobject_properties(map->inobject_properties());
6777 result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
6778 result->set_dictionary_map(true);
6779 result->set_migration_target(false);
6782 if (FLAG_verify_heap && result->is_shared()) {
6783 result->SharedMapVerify();
6791 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) {
6792 CALL_HEAP_FUNCTION(map->GetIsolate(), map->CopyDropDescriptors(), Map);
6796 MaybeObject* Map::CopyDropDescriptors() {
6798 MaybeObject* maybe_result = RawCopy(instance_size());
6799 if (!maybe_result->To(&result)) return maybe_result;
6801 // Please note instance_type and instance_size are set when allocated.
6802 result->set_inobject_properties(inobject_properties());
6803 result->set_unused_property_fields(unused_property_fields());
6805 result->set_pre_allocated_property_fields(pre_allocated_property_fields());
6806 result->set_is_shared(false);
6807 result->ClearCodeCache(GetHeap());
6808 NotifyLeafMapLayoutChange();
6813 MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors,
6814 Descriptor* descriptor) {
6815 // Sanity check. This path is only to be taken if the map owns its descriptor
6816 // array, implying that its NumberOfOwnDescriptors equals the number of
6817 // descriptors in the descriptor array.
6818 ASSERT(NumberOfOwnDescriptors() ==
6819 instance_descriptors()->number_of_descriptors());
6821 MaybeObject* maybe_result = CopyDropDescriptors();
6822 if (!maybe_result->To(&result)) return maybe_result;
6824 Name* name = descriptor->GetKey();
6826 TransitionArray* transitions;
6827 MaybeObject* maybe_transitions =
6828 AddTransition(name, result, SIMPLE_TRANSITION);
6829 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
6831 int old_size = descriptors->number_of_descriptors();
6833 DescriptorArray* new_descriptors;
6835 if (descriptors->NumberOfSlackDescriptors() > 0) {
6836 new_descriptors = descriptors;
6837 new_descriptors->Append(descriptor);
6839 // Descriptor arrays grow by 50%.
6840 MaybeObject* maybe_descriptors = DescriptorArray::Allocate(
6841 GetIsolate(), old_size, old_size < 4 ? 1 : old_size / 2);
6842 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6844 DescriptorArray::WhitenessWitness witness(new_descriptors);
6846 // Copy the descriptors, inserting a descriptor.
6847 for (int i = 0; i < old_size; ++i) {
6848 new_descriptors->CopyFrom(i, descriptors, i, witness);
6851 new_descriptors->Append(descriptor, witness);
6854 // If the source descriptors had an enum cache we copy it. This ensures
6855 // that the maps to which we push the new descriptor array back can rely
6856 // on a cache always being available once it is set. If the map has more
6857 // enumerated descriptors than available in the original cache, the cache
6858 // will be lazily replaced by the extended cache when needed.
6859 if (descriptors->HasEnumCache()) {
6860 new_descriptors->CopyEnumCacheFrom(descriptors);
6864 // Replace descriptors by new_descriptors in all maps that share it.
6866 GetHeap()->incremental_marking()->RecordWrites(descriptors);
6867 for (Object* current = GetBackPointer();
6868 !current->IsUndefined();
6869 current = map->GetBackPointer()) {
6870 map = Map::cast(current);
6871 if (map->instance_descriptors() != descriptors) break;
6872 map->set_instance_descriptors(new_descriptors);
6875 set_instance_descriptors(new_descriptors);
6879 result->SetBackPointer(this);
6880 result->InitializeDescriptors(new_descriptors);
6881 ASSERT(result->NumberOfOwnDescriptors() == NumberOfOwnDescriptors() + 1);
6883 set_transitions(transitions);
6884 set_owns_descriptors(false);
6890 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map,
6891 Handle<DescriptorArray> descriptors,
6892 TransitionFlag flag,
6893 Handle<Name> name) {
6894 CALL_HEAP_FUNCTION(map->GetIsolate(),
6895 map->CopyReplaceDescriptors(*descriptors, flag, *name),
6900 MaybeObject* Map::CopyReplaceDescriptors(DescriptorArray* descriptors,
6901 TransitionFlag flag,
6903 SimpleTransitionFlag simple_flag) {
6904 ASSERT(descriptors->IsSortedNoDuplicates());
6907 MaybeObject* maybe_result = CopyDropDescriptors();
6908 if (!maybe_result->To(&result)) return maybe_result;
6910 result->InitializeDescriptors(descriptors);
6912 if (flag == INSERT_TRANSITION && CanHaveMoreTransitions()) {
6913 TransitionArray* transitions;
6914 MaybeObject* maybe_transitions = AddTransition(name, result, simple_flag);
6915 if (!maybe_transitions->To(&transitions)) return maybe_transitions;
6916 set_transitions(transitions);
6917 result->SetBackPointer(this);
6919 descriptors->InitializeRepresentations(Representation::Tagged());
6926 // Since this method is used to rewrite an existing transition tree, it can
6927 // always insert transitions without checking.
6928 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map,
6930 Handle<DescriptorArray> descriptors) {
6931 ASSERT(descriptors->IsSortedNoDuplicates());
6933 Handle<Map> result = Map::CopyDropDescriptors(map);
6935 result->InitializeDescriptors(*descriptors);
6936 result->SetNumberOfOwnDescriptors(new_descriptor + 1);
6938 int unused_property_fields = map->unused_property_fields();
6939 if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
6940 unused_property_fields = map->unused_property_fields() - 1;
6941 if (unused_property_fields < 0) {
6942 unused_property_fields += JSObject::kFieldsAdded;
6946 result->set_unused_property_fields(unused_property_fields);
6947 result->set_owns_descriptors(false);
6949 Handle<Name> name = handle(descriptors->GetKey(new_descriptor));
6950 Handle<TransitionArray> transitions = Map::AddTransition(map, name, result,
6953 map->set_transitions(*transitions);
6954 result->SetBackPointer(*map);
6960 MaybeObject* Map::CopyAsElementsKind(ElementsKind kind, TransitionFlag flag) {
6961 if (flag == INSERT_TRANSITION) {
6962 ASSERT(!HasElementsTransition() ||
6963 ((elements_transition_map()->elements_kind() == DICTIONARY_ELEMENTS ||
6964 IsExternalArrayElementsKind(
6965 elements_transition_map()->elements_kind())) &&
6966 (kind == DICTIONARY_ELEMENTS ||
6967 IsExternalArrayElementsKind(kind))));
6968 ASSERT(!IsFastElementsKind(kind) ||
6969 IsMoreGeneralElementsKindTransition(elements_kind(), kind));
6970 ASSERT(kind != elements_kind());
6973 bool insert_transition =
6974 flag == INSERT_TRANSITION && !HasElementsTransition();
6976 if (insert_transition && owns_descriptors()) {
6977 // In case the map owned its own descriptors, share the descriptors and
6978 // transfer ownership to the new map.
6980 MaybeObject* maybe_new_map = CopyDropDescriptors();
6981 if (!maybe_new_map->To(&new_map)) return maybe_new_map;
6983 MaybeObject* added_elements = set_elements_transition_map(new_map);
6984 if (added_elements->IsFailure()) return added_elements;
6986 new_map->set_elements_kind(kind);
6987 new_map->InitializeDescriptors(instance_descriptors());
6988 new_map->SetBackPointer(this);
6989 set_owns_descriptors(false);
6993 // In case the map did not own its own descriptors, a split is forced by
6994 // copying the map; creating a new descriptor array cell.
6995 // Create a new free-floating map only if we are not allowed to store it.
6997 MaybeObject* maybe_new_map = Copy();
6998 if (!maybe_new_map->To(&new_map)) return maybe_new_map;
7000 new_map->set_elements_kind(kind);
7002 if (insert_transition) {
7003 MaybeObject* added_elements = set_elements_transition_map(new_map);
7004 if (added_elements->IsFailure()) return added_elements;
7005 new_map->SetBackPointer(this);
7012 Handle<Map> Map::CopyForObserved(Handle<Map> map) {
7013 ASSERT(!map->is_observed());
7015 Isolate* isolate = map->GetIsolate();
7017 // In case the map owned its own descriptors, share the descriptors and
7018 // transfer ownership to the new map.
7019 Handle<Map> new_map;
7020 if (map->owns_descriptors()) {
7021 new_map = Map::CopyDropDescriptors(map);
7023 new_map = Map::Copy(map);
7026 Handle<TransitionArray> transitions =
7027 Map::AddTransition(map, isolate->factory()->observed_symbol(), new_map,
7030 map->set_transitions(*transitions);
7032 new_map->set_is_observed();
7034 if (map->owns_descriptors()) {
7035 new_map->InitializeDescriptors(map->instance_descriptors());
7036 map->set_owns_descriptors(false);
7039 new_map->SetBackPointer(*map);
7044 MaybeObject* Map::CopyWithPreallocatedFieldDescriptors() {
7045 if (pre_allocated_property_fields() == 0) return CopyDropDescriptors();
7047 // If the map has pre-allocated properties always start out with a descriptor
7048 // array describing these properties.
7049 ASSERT(constructor()->IsJSFunction());
7050 JSFunction* ctor = JSFunction::cast(constructor());
7051 Map* map = ctor->initial_map();
7052 DescriptorArray* descriptors = map->instance_descriptors();
7054 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
7055 DescriptorArray* new_descriptors;
7056 MaybeObject* maybe_descriptors =
7057 descriptors->CopyUpTo(number_of_own_descriptors);
7058 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
7060 return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION);
7064 Handle<Map> Map::Copy(Handle<Map> map) {
7065 CALL_HEAP_FUNCTION(map->GetIsolate(), map->Copy(), Map);
7069 MaybeObject* Map::Copy() {
7070 DescriptorArray* descriptors = instance_descriptors();
7071 DescriptorArray* new_descriptors;
7072 int number_of_own_descriptors = NumberOfOwnDescriptors();
7073 MaybeObject* maybe_descriptors =
7074 descriptors->CopyUpTo(number_of_own_descriptors);
7075 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
7077 return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION);
7081 MaybeObject* Map::CopyAddDescriptor(Descriptor* descriptor,
7082 TransitionFlag flag) {
7083 DescriptorArray* descriptors = instance_descriptors();
7085 // Ensure the key is unique.
7086 MaybeObject* maybe_failure = descriptor->KeyToUniqueName();
7087 if (maybe_failure->IsFailure()) return maybe_failure;
7089 int old_size = NumberOfOwnDescriptors();
7090 int new_size = old_size + 1;
7092 if (flag == INSERT_TRANSITION &&
7093 owns_descriptors() &&
7094 CanHaveMoreTransitions()) {
7095 return ShareDescriptor(descriptors, descriptor);
7098 DescriptorArray* new_descriptors;
7099 MaybeObject* maybe_descriptors =
7100 DescriptorArray::Allocate(GetIsolate(), old_size, 1);
7101 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
7103 DescriptorArray::WhitenessWitness witness(new_descriptors);
7105 // Copy the descriptors, inserting a descriptor.
7106 for (int i = 0; i < old_size; ++i) {
7107 new_descriptors->CopyFrom(i, descriptors, i, witness);
7110 if (old_size != descriptors->number_of_descriptors()) {
7111 new_descriptors->SetNumberOfDescriptors(new_size);
7112 new_descriptors->Set(old_size, descriptor, witness);
7113 new_descriptors->Sort();
7115 new_descriptors->Append(descriptor, witness);
7118 Name* key = descriptor->GetKey();
7119 return CopyReplaceDescriptors(new_descriptors, flag, key, SIMPLE_TRANSITION);
7123 MaybeObject* Map::CopyInsertDescriptor(Descriptor* descriptor,
7124 TransitionFlag flag) {
7125 DescriptorArray* old_descriptors = instance_descriptors();
7127 // Ensure the key is unique.
7128 MaybeObject* maybe_result = descriptor->KeyToUniqueName();
7129 if (maybe_result->IsFailure()) return maybe_result;
7131 // We replace the key if it is already present.
7132 int index = old_descriptors->SearchWithCache(descriptor->GetKey(), this);
7133 if (index != DescriptorArray::kNotFound) {
7134 return CopyReplaceDescriptor(old_descriptors, descriptor, index, flag);
7136 return CopyAddDescriptor(descriptor, flag);
7140 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
7141 Handle<DescriptorArray> desc,
7142 int enumeration_index,
7143 PropertyAttributes attributes) {
7144 CALL_HEAP_FUNCTION(desc->GetIsolate(),
7145 desc->CopyUpToAddAttributes(enumeration_index, attributes),
7150 MaybeObject* DescriptorArray::CopyUpToAddAttributes(
7151 int enumeration_index, PropertyAttributes attributes) {
7152 if (enumeration_index == 0) return GetHeap()->empty_descriptor_array();
7154 int size = enumeration_index;
7156 DescriptorArray* descriptors;
7157 MaybeObject* maybe_descriptors = Allocate(GetIsolate(), size);
7158 if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
7159 DescriptorArray::WhitenessWitness witness(descriptors);
7161 if (attributes != NONE) {
7162 for (int i = 0; i < size; ++i) {
7163 Object* value = GetValue(i);
7164 PropertyDetails details = GetDetails(i);
7165 int mask = DONT_DELETE | DONT_ENUM;
7166 // READ_ONLY is an invalid attribute for JS setters/getters.
7167 if (details.type() != CALLBACKS || !value->IsAccessorPair()) {
7170 details = details.CopyAddAttributes(
7171 static_cast<PropertyAttributes>(attributes & mask));
7172 Descriptor desc(GetKey(i), value, details);
7173 descriptors->Set(i, &desc, witness);
7176 for (int i = 0; i < size; ++i) {
7177 descriptors->CopyFrom(i, this, i, witness);
7181 if (number_of_descriptors() != enumeration_index) descriptors->Sort();
7187 MaybeObject* Map::CopyReplaceDescriptor(DescriptorArray* descriptors,
7188 Descriptor* descriptor,
7189 int insertion_index,
7190 TransitionFlag flag) {
7191 // Ensure the key is unique.
7192 MaybeObject* maybe_failure = descriptor->KeyToUniqueName();
7193 if (maybe_failure->IsFailure()) return maybe_failure;
7195 Name* key = descriptor->GetKey();
7196 ASSERT(key == descriptors->GetKey(insertion_index));
7198 int new_size = NumberOfOwnDescriptors();
7199 ASSERT(0 <= insertion_index && insertion_index < new_size);
7201 ASSERT_LT(insertion_index, new_size);
7203 DescriptorArray* new_descriptors;
7204 MaybeObject* maybe_descriptors =
7205 DescriptorArray::Allocate(GetIsolate(), new_size);
7206 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
7207 DescriptorArray::WhitenessWitness witness(new_descriptors);
7209 for (int i = 0; i < new_size; ++i) {
7210 if (i == insertion_index) {
7211 new_descriptors->Set(i, descriptor, witness);
7213 new_descriptors->CopyFrom(i, descriptors, i, witness);
7217 // Re-sort if descriptors were removed.
7218 if (new_size != descriptors->length()) new_descriptors->Sort();
7220 SimpleTransitionFlag simple_flag =
7221 (insertion_index == descriptors->number_of_descriptors() - 1)
7224 return CopyReplaceDescriptors(new_descriptors, flag, key, simple_flag);
7228 void Map::UpdateCodeCache(Handle<Map> map,
7230 Handle<Code> code) {
7231 Isolate* isolate = map->GetIsolate();
7232 CALL_HEAP_FUNCTION_VOID(isolate,
7233 map->UpdateCodeCache(*name, *code));
7237 MaybeObject* Map::UpdateCodeCache(Name* name, Code* code) {
7238 // Allocate the code cache if not present.
7239 if (code_cache()->IsFixedArray()) {
7241 { MaybeObject* maybe_result = GetHeap()->AllocateCodeCache();
7242 if (!maybe_result->ToObject(&result)) return maybe_result;
7244 set_code_cache(result);
7247 // Update the code cache.
7248 return CodeCache::cast(code_cache())->Update(name, code);
7252 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) {
7253 // Do a lookup if a code cache exists.
7254 if (!code_cache()->IsFixedArray()) {
7255 return CodeCache::cast(code_cache())->Lookup(name, flags);
7257 return GetHeap()->undefined_value();
7262 int Map::IndexInCodeCache(Object* name, Code* code) {
7263 // Get the internal index if a code cache exists.
7264 if (!code_cache()->IsFixedArray()) {
7265 return CodeCache::cast(code_cache())->GetIndex(name, code);
7271 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) {
7272 // No GC is supposed to happen between a call to IndexInCodeCache and
7273 // RemoveFromCodeCache so the code cache must be there.
7274 ASSERT(!code_cache()->IsFixedArray());
7275 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
7279 // An iterator over all map transitions in an descriptor array, reusing the map
7280 // field of the contens array while it is running.
7281 class IntrusiveMapTransitionIterator {
7283 explicit IntrusiveMapTransitionIterator(TransitionArray* transition_array)
7284 : transition_array_(transition_array) { }
7287 ASSERT(!IsIterating());
7288 *TransitionArrayHeader() = Smi::FromInt(0);
7291 bool IsIterating() {
7292 return (*TransitionArrayHeader())->IsSmi();
7296 ASSERT(IsIterating());
7297 int index = Smi::cast(*TransitionArrayHeader())->value();
7298 int number_of_transitions = transition_array_->number_of_transitions();
7299 while (index < number_of_transitions) {
7300 *TransitionArrayHeader() = Smi::FromInt(index + 1);
7301 return transition_array_->GetTarget(index);
7304 *TransitionArrayHeader() = transition_array_->GetHeap()->fixed_array_map();
7309 Object** TransitionArrayHeader() {
7310 return HeapObject::RawField(transition_array_, TransitionArray::kMapOffset);
7313 TransitionArray* transition_array_;
7317 // An iterator over all prototype transitions, reusing the map field of the
7318 // underlying array while it is running.
7319 class IntrusivePrototypeTransitionIterator {
7321 explicit IntrusivePrototypeTransitionIterator(HeapObject* proto_trans)
7322 : proto_trans_(proto_trans) { }
7325 ASSERT(!IsIterating());
7326 *Header() = Smi::FromInt(0);
7329 bool IsIterating() {
7330 return (*Header())->IsSmi();
7334 ASSERT(IsIterating());
7335 int transitionNumber = Smi::cast(*Header())->value();
7336 if (transitionNumber < NumberOfTransitions()) {
7337 *Header() = Smi::FromInt(transitionNumber + 1);
7338 return GetTransition(transitionNumber);
7340 *Header() = proto_trans_->GetHeap()->fixed_array_map();
7346 return HeapObject::RawField(proto_trans_, FixedArray::kMapOffset);
7349 int NumberOfTransitions() {
7350 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7351 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
7352 return Smi::cast(num)->value();
7355 Map* GetTransition(int transitionNumber) {
7356 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7357 return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
7360 int IndexFor(int transitionNumber) {
7361 return Map::kProtoTransitionHeaderSize +
7362 Map::kProtoTransitionMapOffset +
7363 transitionNumber * Map::kProtoTransitionElementsPerEntry;
7366 HeapObject* proto_trans_;
7370 // To traverse the transition tree iteratively, we have to store two kinds of
7371 // information in a map: The parent map in the traversal and which children of a
7372 // node have already been visited. To do this without additional memory, we
7373 // temporarily reuse two maps with known values:
7375 // (1) The map of the map temporarily holds the parent, and is restored to the
7376 // meta map afterwards.
7378 // (2) The info which children have already been visited depends on which part
7379 // of the map we currently iterate:
7381 // (a) If we currently follow normal map transitions, we temporarily store
7382 // the current index in the map of the FixedArray of the desciptor
7383 // array's contents, and restore it to the fixed array map afterwards.
7384 // Note that a single descriptor can have 0, 1, or 2 transitions.
7386 // (b) If we currently follow prototype transitions, we temporarily store
7387 // the current index in the map of the FixedArray holding the prototype
7388 // transitions, and restore it to the fixed array map afterwards.
7390 // Note that the child iterator is just a concatenation of two iterators: One
7391 // iterating over map transitions and one iterating over prototype transisitons.
7392 class TraversableMap : public Map {
7394 // Record the parent in the traversal within this map. Note that this destroys
7396 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); }
7398 // Reset the current map's map, returning the parent previously stored in it.
7399 TraversableMap* GetAndResetParent() {
7400 TraversableMap* old_parent = static_cast<TraversableMap*>(map());
7401 set_map_no_write_barrier(GetHeap()->meta_map());
7405 // Start iterating over this map's children, possibly destroying a FixedArray
7406 // map (see explanation above).
7407 void ChildIteratorStart() {
7408 if (HasTransitionArray()) {
7409 if (HasPrototypeTransitions()) {
7410 IntrusivePrototypeTransitionIterator(GetPrototypeTransitions()).Start();
7413 IntrusiveMapTransitionIterator(transitions()).Start();
7417 // If we have an unvisited child map, return that one and advance. If we have
7418 // none, return NULL and reset any destroyed FixedArray maps.
7419 TraversableMap* ChildIteratorNext() {
7420 TransitionArray* transition_array = unchecked_transition_array();
7421 if (!transition_array->map()->IsSmi() &&
7422 !transition_array->IsTransitionArray()) {
7426 if (transition_array->HasPrototypeTransitions()) {
7427 HeapObject* proto_transitions =
7428 transition_array->UncheckedPrototypeTransitions();
7429 IntrusivePrototypeTransitionIterator proto_iterator(proto_transitions);
7430 if (proto_iterator.IsIterating()) {
7431 Map* next = proto_iterator.Next();
7432 if (next != NULL) return static_cast<TraversableMap*>(next);
7436 IntrusiveMapTransitionIterator transition_iterator(transition_array);
7437 if (transition_iterator.IsIterating()) {
7438 Map* next = transition_iterator.Next();
7439 if (next != NULL) return static_cast<TraversableMap*>(next);
7447 // Traverse the transition tree in postorder without using the C++ stack by
7448 // doing pointer reversal.
7449 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
7450 TraversableMap* current = static_cast<TraversableMap*>(this);
7451 current->ChildIteratorStart();
7453 TraversableMap* child = current->ChildIteratorNext();
7454 if (child != NULL) {
7455 child->ChildIteratorStart();
7456 child->SetParent(current);
7459 TraversableMap* parent = current->GetAndResetParent();
7460 callback(current, data);
7461 if (current == this) break;
7468 MaybeObject* CodeCache::Update(Name* name, Code* code) {
7469 // The number of monomorphic stubs for normal load/store/call IC's can grow to
7470 // a large number and therefore they need to go into a hash table. They are
7471 // used to load global properties from cells.
7472 if (code->type() == Code::NORMAL) {
7473 // Make sure that a hash table is allocated for the normal load code cache.
7474 if (normal_type_cache()->IsUndefined()) {
7476 { MaybeObject* maybe_result =
7477 CodeCacheHashTable::Allocate(GetHeap(),
7478 CodeCacheHashTable::kInitialSize);
7479 if (!maybe_result->ToObject(&result)) return maybe_result;
7481 set_normal_type_cache(result);
7483 return UpdateNormalTypeCache(name, code);
7485 ASSERT(default_cache()->IsFixedArray());
7486 return UpdateDefaultCache(name, code);
7491 MaybeObject* CodeCache::UpdateDefaultCache(Name* name, Code* code) {
7492 // When updating the default code cache we disregard the type encoded in the
7493 // flags. This allows call constant stubs to overwrite call field
7495 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags());
7497 // First check whether we can update existing code cache without
7499 FixedArray* cache = default_cache();
7500 int length = cache->length();
7501 int deleted_index = -1;
7502 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7503 Object* key = cache->get(i);
7504 if (key->IsNull()) {
7505 if (deleted_index < 0) deleted_index = i;
7508 if (key->IsUndefined()) {
7509 if (deleted_index >= 0) i = deleted_index;
7510 cache->set(i + kCodeCacheEntryNameOffset, name);
7511 cache->set(i + kCodeCacheEntryCodeOffset, code);
7514 if (name->Equals(Name::cast(key))) {
7516 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags();
7517 if (Code::RemoveTypeFromFlags(found) == flags) {
7518 cache->set(i + kCodeCacheEntryCodeOffset, code);
7524 // Reached the end of the code cache. If there were deleted
7525 // elements, reuse the space for the first of them.
7526 if (deleted_index >= 0) {
7527 cache->set(deleted_index + kCodeCacheEntryNameOffset, name);
7528 cache->set(deleted_index + kCodeCacheEntryCodeOffset, code);
7532 // Extend the code cache with some new entries (at least one). Must be a
7533 // multiple of the entry size.
7534 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize;
7535 new_length = new_length - new_length % kCodeCacheEntrySize;
7536 ASSERT((new_length % kCodeCacheEntrySize) == 0);
7538 { MaybeObject* maybe_result = cache->CopySize(new_length);
7539 if (!maybe_result->ToObject(&result)) return maybe_result;
7542 // Add the (name, code) pair to the new cache.
7543 cache = FixedArray::cast(result);
7544 cache->set(length + kCodeCacheEntryNameOffset, name);
7545 cache->set(length + kCodeCacheEntryCodeOffset, code);
7546 set_default_cache(cache);
7551 MaybeObject* CodeCache::UpdateNormalTypeCache(Name* name, Code* code) {
7552 // Adding a new entry can cause a new cache to be allocated.
7553 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7555 { MaybeObject* maybe_new_cache = cache->Put(name, code);
7556 if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache;
7558 set_normal_type_cache(new_cache);
7563 Object* CodeCache::Lookup(Name* name, Code::Flags flags) {
7564 Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags));
7565 if (result->IsCode()) {
7566 if (Code::cast(result)->flags() == flags) return result;
7567 return GetHeap()->undefined_value();
7569 return LookupNormalTypeCache(name, flags);
7573 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) {
7574 FixedArray* cache = default_cache();
7575 int length = cache->length();
7576 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7577 Object* key = cache->get(i + kCodeCacheEntryNameOffset);
7578 // Skip deleted elements.
7579 if (key->IsNull()) continue;
7580 if (key->IsUndefined()) return key;
7581 if (name->Equals(Name::cast(key))) {
7582 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset));
7583 if (Code::RemoveTypeFromFlags(code->flags()) == flags) {
7588 return GetHeap()->undefined_value();
7592 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) {
7593 if (!normal_type_cache()->IsUndefined()) {
7594 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7595 return cache->Lookup(name, flags);
7597 return GetHeap()->undefined_value();
7602 int CodeCache::GetIndex(Object* name, Code* code) {
7603 if (code->type() == Code::NORMAL) {
7604 if (normal_type_cache()->IsUndefined()) return -1;
7605 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7606 return cache->GetIndex(Name::cast(name), code->flags());
7609 FixedArray* array = default_cache();
7610 int len = array->length();
7611 for (int i = 0; i < len; i += kCodeCacheEntrySize) {
7612 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1;
7618 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
7619 if (code->type() == Code::NORMAL) {
7620 ASSERT(!normal_type_cache()->IsUndefined());
7621 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7622 ASSERT(cache->GetIndex(Name::cast(name), code->flags()) == index);
7623 cache->RemoveByIndex(index);
7625 FixedArray* array = default_cache();
7626 ASSERT(array->length() >= index && array->get(index)->IsCode());
7627 // Use null instead of undefined for deleted elements to distinguish
7628 // deleted elements from unused elements. This distinction is used
7629 // when looking up in the cache and when updating the cache.
7630 ASSERT_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset);
7631 array->set_null(index - 1); // Name.
7632 array->set_null(index); // Code.
7637 // The key in the code cache hash table consists of the property name and the
7638 // code object. The actual match is on the name and the code flags. If a key
7639 // is created using the flags and not a code object it can only be used for
7640 // lookup not to create a new entry.
7641 class CodeCacheHashTableKey : public HashTableKey {
7643 CodeCacheHashTableKey(Name* name, Code::Flags flags)
7644 : name_(name), flags_(flags), code_(NULL) { }
7646 CodeCacheHashTableKey(Name* name, Code* code)
7647 : name_(name), flags_(code->flags()), code_(code) { }
7650 bool IsMatch(Object* other) {
7651 if (!other->IsFixedArray()) return false;
7652 FixedArray* pair = FixedArray::cast(other);
7653 Name* name = Name::cast(pair->get(0));
7654 Code::Flags flags = Code::cast(pair->get(1))->flags();
7655 if (flags != flags_) {
7658 return name_->Equals(name);
7661 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) {
7662 return name->Hash() ^ flags;
7665 uint32_t Hash() { return NameFlagsHashHelper(name_, flags_); }
7667 uint32_t HashForObject(Object* obj) {
7668 FixedArray* pair = FixedArray::cast(obj);
7669 Name* name = Name::cast(pair->get(0));
7670 Code* code = Code::cast(pair->get(1));
7671 return NameFlagsHashHelper(name, code->flags());
7674 MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
7675 ASSERT(code_ != NULL);
7677 { MaybeObject* maybe_obj = heap->AllocateFixedArray(2);
7678 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7680 FixedArray* pair = FixedArray::cast(obj);
7681 pair->set(0, name_);
7682 pair->set(1, code_);
7689 // TODO(jkummerow): We should be able to get by without this.
7694 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) {
7695 CodeCacheHashTableKey key(name, flags);
7696 int entry = FindEntry(&key);
7697 if (entry == kNotFound) return GetHeap()->undefined_value();
7698 return get(EntryToIndex(entry) + 1);
7702 MaybeObject* CodeCacheHashTable::Put(Name* name, Code* code) {
7703 CodeCacheHashTableKey key(name, code);
7705 { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
7706 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7709 // Don't use |this|, as the table might have grown.
7710 CodeCacheHashTable* cache = reinterpret_cast<CodeCacheHashTable*>(obj);
7712 int entry = cache->FindInsertionEntry(key.Hash());
7714 { MaybeObject* maybe_k = key.AsObject(GetHeap());
7715 if (!maybe_k->ToObject(&k)) return maybe_k;
7718 cache->set(EntryToIndex(entry), k);
7719 cache->set(EntryToIndex(entry) + 1, code);
7720 cache->ElementAdded();
7725 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) {
7726 CodeCacheHashTableKey key(name, flags);
7727 int entry = FindEntry(&key);
7728 return (entry == kNotFound) ? -1 : entry;
7732 void CodeCacheHashTable::RemoveByIndex(int index) {
7734 Heap* heap = GetHeap();
7735 set(EntryToIndex(index), heap->the_hole_value());
7736 set(EntryToIndex(index) + 1, heap->the_hole_value());
7741 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> cache,
7742 MapHandleList* maps,
7744 Handle<Code> code) {
7745 Isolate* isolate = cache->GetIsolate();
7746 CALL_HEAP_FUNCTION_VOID(isolate, cache->Update(maps, flags, *code));
7750 MaybeObject* PolymorphicCodeCache::Update(MapHandleList* maps,
7753 // Initialize cache if necessary.
7754 if (cache()->IsUndefined()) {
7756 { MaybeObject* maybe_result =
7757 PolymorphicCodeCacheHashTable::Allocate(
7759 PolymorphicCodeCacheHashTable::kInitialSize);
7760 if (!maybe_result->ToObject(&result)) return maybe_result;
7764 // This entry shouldn't be contained in the cache yet.
7765 ASSERT(PolymorphicCodeCacheHashTable::cast(cache())
7766 ->Lookup(maps, flags)->IsUndefined());
7768 PolymorphicCodeCacheHashTable* hash_table =
7769 PolymorphicCodeCacheHashTable::cast(cache());
7771 { MaybeObject* maybe_new_cache = hash_table->Put(maps, flags, code);
7772 if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache;
7774 set_cache(new_cache);
7779 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps,
7780 Code::Flags flags) {
7781 if (!cache()->IsUndefined()) {
7782 PolymorphicCodeCacheHashTable* hash_table =
7783 PolymorphicCodeCacheHashTable::cast(cache());
7784 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate());
7786 return GetIsolate()->factory()->undefined_value();
7791 // Despite their name, object of this class are not stored in the actual
7792 // hash table; instead they're temporarily used for lookups. It is therefore
7793 // safe to have a weak (non-owning) pointer to a MapList as a member field.
7794 class PolymorphicCodeCacheHashTableKey : public HashTableKey {
7796 // Callers must ensure that |maps| outlives the newly constructed object.
7797 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags)
7799 code_flags_(code_flags) {}
7801 bool IsMatch(Object* other) {
7802 MapHandleList other_maps(kDefaultListAllocationSize);
7804 FromObject(other, &other_flags, &other_maps);
7805 if (code_flags_ != other_flags) return false;
7806 if (maps_->length() != other_maps.length()) return false;
7807 // Compare just the hashes first because it's faster.
7808 int this_hash = MapsHashHelper(maps_, code_flags_);
7809 int other_hash = MapsHashHelper(&other_maps, other_flags);
7810 if (this_hash != other_hash) return false;
7812 // Full comparison: for each map in maps_, look for an equivalent map in
7813 // other_maps. This implementation is slow, but probably good enough for
7814 // now because the lists are short (<= 4 elements currently).
7815 for (int i = 0; i < maps_->length(); ++i) {
7816 bool match_found = false;
7817 for (int j = 0; j < other_maps.length(); ++j) {
7818 if (*(maps_->at(i)) == *(other_maps.at(j))) {
7823 if (!match_found) return false;
7828 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
7829 uint32_t hash = code_flags;
7830 for (int i = 0; i < maps->length(); ++i) {
7831 hash ^= maps->at(i)->Hash();
7837 return MapsHashHelper(maps_, code_flags_);
7840 uint32_t HashForObject(Object* obj) {
7841 MapHandleList other_maps(kDefaultListAllocationSize);
7843 FromObject(obj, &other_flags, &other_maps);
7844 return MapsHashHelper(&other_maps, other_flags);
7847 MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
7849 // The maps in |maps_| must be copied to a newly allocated FixedArray,
7850 // both because the referenced MapList is short-lived, and because C++
7851 // objects can't be stored in the heap anyway.
7852 { MaybeObject* maybe_obj =
7853 heap->AllocateUninitializedFixedArray(maps_->length() + 1);
7854 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7856 FixedArray* list = FixedArray::cast(obj);
7857 list->set(0, Smi::FromInt(code_flags_));
7858 for (int i = 0; i < maps_->length(); ++i) {
7859 list->set(i + 1, *maps_->at(i));
7865 static MapHandleList* FromObject(Object* obj,
7867 MapHandleList* maps) {
7868 FixedArray* list = FixedArray::cast(obj);
7870 *code_flags = Smi::cast(list->get(0))->value();
7871 for (int i = 1; i < list->length(); ++i) {
7872 maps->Add(Handle<Map>(Map::cast(list->get(i))));
7877 MapHandleList* maps_; // weak.
7879 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
7883 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps,
7885 PolymorphicCodeCacheHashTableKey key(maps, code_flags);
7886 int entry = FindEntry(&key);
7887 if (entry == kNotFound) return GetHeap()->undefined_value();
7888 return get(EntryToIndex(entry) + 1);
7892 MaybeObject* PolymorphicCodeCacheHashTable::Put(MapHandleList* maps,
7895 PolymorphicCodeCacheHashTableKey key(maps, code_flags);
7897 { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
7898 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7900 PolymorphicCodeCacheHashTable* cache =
7901 reinterpret_cast<PolymorphicCodeCacheHashTable*>(obj);
7902 int entry = cache->FindInsertionEntry(key.Hash());
7903 { MaybeObject* maybe_obj = key.AsObject(GetHeap());
7904 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7906 cache->set(EntryToIndex(entry), obj);
7907 cache->set(EntryToIndex(entry) + 1, code);
7908 cache->ElementAdded();
7913 void FixedArray::Shrink(int new_length) {
7914 ASSERT(0 <= new_length && new_length <= length());
7915 if (new_length < length()) {
7916 RightTrimFixedArray<Heap::FROM_MUTATOR>(
7917 GetHeap(), this, length() - new_length);
7922 MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
7923 ElementsAccessor* accessor = array->GetElementsAccessor();
7924 MaybeObject* maybe_result =
7925 accessor->AddElementsToFixedArray(array, array, this);
7927 if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
7928 #ifdef ENABLE_SLOW_ASSERTS
7929 if (FLAG_enable_slow_asserts) {
7930 for (int i = 0; i < result->length(); i++) {
7931 Object* current = result->get(i);
7932 ASSERT(current->IsNumber() || current->IsName());
7940 MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
7941 ElementsAccessor* accessor = ElementsAccessor::ForArray(other);
7942 MaybeObject* maybe_result =
7943 accessor->AddElementsToFixedArray(NULL, NULL, this, other);
7945 if (!maybe_result->To(&result)) return maybe_result;
7946 #ifdef ENABLE_SLOW_ASSERTS
7947 if (FLAG_enable_slow_asserts) {
7948 for (int i = 0; i < result->length(); i++) {
7949 Object* current = result->get(i);
7950 ASSERT(current->IsNumber() || current->IsName());
7958 MaybeObject* FixedArray::CopySize(int new_length, PretenureFlag pretenure) {
7959 Heap* heap = GetHeap();
7960 if (new_length == 0) return heap->empty_fixed_array();
7962 { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length, pretenure);
7963 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7965 FixedArray* result = FixedArray::cast(obj);
7967 DisallowHeapAllocation no_gc;
7969 if (new_length < len) len = new_length;
7970 // We are taking the map from the old fixed array so the map is sure to
7971 // be an immortal immutable object.
7972 result->set_map_no_write_barrier(map());
7973 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
7974 for (int i = 0; i < len; i++) {
7975 result->set(i, get(i), mode);
7981 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
7982 DisallowHeapAllocation no_gc;
7983 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
7984 for (int index = 0; index < len; index++) {
7985 dest->set(dest_pos+index, get(pos+index), mode);
7991 bool FixedArray::IsEqualTo(FixedArray* other) {
7992 if (length() != other->length()) return false;
7993 for (int i = 0 ; i < length(); ++i) {
7994 if (get(i) != other->get(i)) return false;
8001 MaybeObject* DescriptorArray::Allocate(Isolate* isolate,
8002 int number_of_descriptors,
8004 Heap* heap = isolate->heap();
8005 // Do not use DescriptorArray::cast on incomplete object.
8006 int size = number_of_descriptors + slack;
8007 if (size == 0) return heap->empty_descriptor_array();
8009 // Allocate the array of keys.
8010 MaybeObject* maybe_array = heap->AllocateFixedArray(LengthFor(size));
8011 if (!maybe_array->To(&result)) return maybe_array;
8013 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
8014 result->set(kEnumCacheIndex, Smi::FromInt(0));
8019 void DescriptorArray::ClearEnumCache() {
8020 set(kEnumCacheIndex, Smi::FromInt(0));
8024 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
8025 FixedArray* new_cache,
8026 Object* new_index_cache) {
8027 ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
8028 ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
8030 ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
8031 FixedArray::cast(bridge_storage)->
8032 set(kEnumCacheBridgeCacheIndex, new_cache);
8033 FixedArray::cast(bridge_storage)->
8034 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
8035 set(kEnumCacheIndex, bridge_storage);
8039 void DescriptorArray::CopyFrom(int dst_index,
8040 DescriptorArray* src,
8042 const WhitenessWitness& witness) {
8043 Object* value = src->GetValue(src_index);
8044 PropertyDetails details = src->GetDetails(src_index);
8045 Descriptor desc(src->GetKey(src_index), value, details);
8046 Set(dst_index, &desc, witness);
8050 Handle<DescriptorArray> DescriptorArray::Merge(Handle<DescriptorArray> desc,
8055 StoreMode store_mode,
8056 Handle<DescriptorArray> other) {
8057 CALL_HEAP_FUNCTION(desc->GetIsolate(),
8058 desc->Merge(verbatim, valid, new_size, modify_index,
8059 store_mode, *other),
8064 // Generalize the |other| descriptor array by merging it into the (at least
8065 // partly) updated |this| descriptor array.
8066 // The method merges two descriptor array in three parts. Both descriptor arrays
8067 // are identical up to |verbatim|. They also overlap in keys up to |valid|.
8068 // Between |verbatim| and |valid|, the resulting descriptor type as well as the
8069 // representation are generalized from both |this| and |other|. Beyond |valid|,
8070 // the descriptors are copied verbatim from |other| up to |new_size|.
8071 // In case of incompatible types, the type and representation of |other| is
8073 MaybeObject* DescriptorArray::Merge(int verbatim,
8077 StoreMode store_mode,
8078 DescriptorArray* other) {
8079 ASSERT(verbatim <= valid);
8080 ASSERT(valid <= new_size);
8082 DescriptorArray* result;
8083 // Allocate a new descriptor array large enough to hold the required
8084 // descriptors, with minimally the exact same size as this descriptor array.
8085 MaybeObject* maybe_descriptors = DescriptorArray::Allocate(
8086 GetIsolate(), new_size,
8087 Max(new_size, other->number_of_descriptors()) - new_size);
8088 if (!maybe_descriptors->To(&result)) return maybe_descriptors;
8089 ASSERT(result->length() > length() ||
8090 result->NumberOfSlackDescriptors() > 0 ||
8091 result->number_of_descriptors() == other->number_of_descriptors());
8092 ASSERT(result->number_of_descriptors() == new_size);
8094 DescriptorArray::WhitenessWitness witness(result);
8099 int current_offset = 0;
8100 for (descriptor = 0; descriptor < verbatim; descriptor++) {
8101 if (GetDetails(descriptor).type() == FIELD) current_offset++;
8102 result->CopyFrom(descriptor, other, descriptor, witness);
8105 // |verbatim| -> |valid|
8106 for (; descriptor < valid; descriptor++) {
8107 Name* key = GetKey(descriptor);
8108 PropertyDetails details = GetDetails(descriptor);
8109 PropertyDetails other_details = other->GetDetails(descriptor);
8111 if (details.type() == FIELD || other_details.type() == FIELD ||
8112 (store_mode == FORCE_FIELD && descriptor == modify_index) ||
8113 (details.type() == CONSTANT &&
8114 other_details.type() == CONSTANT &&
8115 GetValue(descriptor) != other->GetValue(descriptor))) {
8116 Representation representation =
8117 details.representation().generalize(other_details.representation());
8118 FieldDescriptor d(key,
8120 other_details.attributes(),
8122 result->Set(descriptor, &d, witness);
8124 result->CopyFrom(descriptor, other, descriptor, witness);
8128 // |valid| -> |new_size|
8129 for (; descriptor < new_size; descriptor++) {
8130 PropertyDetails details = other->GetDetails(descriptor);
8131 if (details.type() == FIELD ||
8132 (store_mode == FORCE_FIELD && descriptor == modify_index)) {
8133 Name* key = other->GetKey(descriptor);
8134 FieldDescriptor d(key,
8136 details.attributes(),
8137 details.representation());
8138 result->Set(descriptor, &d, witness);
8140 result->CopyFrom(descriptor, other, descriptor, witness);
8149 // Checks whether a merge of |other| into |this| would return a copy of |this|.
8150 bool DescriptorArray::IsMoreGeneralThan(int verbatim,
8153 DescriptorArray* other) {
8154 ASSERT(verbatim <= valid);
8155 ASSERT(valid <= new_size);
8156 if (valid != new_size) return false;
8158 for (int descriptor = verbatim; descriptor < valid; descriptor++) {
8159 PropertyDetails details = GetDetails(descriptor);
8160 PropertyDetails other_details = other->GetDetails(descriptor);
8161 if (!other_details.representation().fits_into(details.representation())) {
8164 if (details.type() == CONSTANT) {
8165 if (other_details.type() != CONSTANT) return false;
8166 if (GetValue(descriptor) != other->GetValue(descriptor)) return false;
8174 // We need the whiteness witness since sort will reshuffle the entries in the
8175 // descriptor array. If the descriptor array were to be black, the shuffling
8176 // would move a slot that was already recorded as pointing into an evacuation
8177 // candidate. This would result in missing updates upon evacuation.
8178 void DescriptorArray::Sort() {
8179 // In-place heap sort.
8180 int len = number_of_descriptors();
8181 // Reset sorting since the descriptor array might contain invalid pointers.
8182 for (int i = 0; i < len; ++i) SetSortedKey(i, i);
8183 // Bottom-up max-heap construction.
8184 // Index of the last node with children
8185 const int max_parent_index = (len / 2) - 1;
8186 for (int i = max_parent_index; i >= 0; --i) {
8187 int parent_index = i;
8188 const uint32_t parent_hash = GetSortedKey(i)->Hash();
8189 while (parent_index <= max_parent_index) {
8190 int child_index = 2 * parent_index + 1;
8191 uint32_t child_hash = GetSortedKey(child_index)->Hash();
8192 if (child_index + 1 < len) {
8193 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
8194 if (right_child_hash > child_hash) {
8196 child_hash = right_child_hash;
8199 if (child_hash <= parent_hash) break;
8200 SwapSortedKeys(parent_index, child_index);
8201 // Now element at child_index could be < its children.
8202 parent_index = child_index; // parent_hash remains correct.
8206 // Extract elements and create sorted array.
8207 for (int i = len - 1; i > 0; --i) {
8208 // Put max element at the back of the array.
8209 SwapSortedKeys(0, i);
8210 // Shift down the new top element.
8211 int parent_index = 0;
8212 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
8213 const int max_parent_index = (i / 2) - 1;
8214 while (parent_index <= max_parent_index) {
8215 int child_index = parent_index * 2 + 1;
8216 uint32_t child_hash = GetSortedKey(child_index)->Hash();
8217 if (child_index + 1 < i) {
8218 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
8219 if (right_child_hash > child_hash) {
8221 child_hash = right_child_hash;
8224 if (child_hash <= parent_hash) break;
8225 SwapSortedKeys(parent_index, child_index);
8226 parent_index = child_index;
8229 ASSERT(IsSortedNoDuplicates());
8233 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) {
8234 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair();
8235 copy->set_getter(pair->getter());
8236 copy->set_setter(pair->setter());
8241 Object* AccessorPair::GetComponent(AccessorComponent component) {
8242 Object* accessor = get(component);
8243 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
8247 MaybeObject* DeoptimizationInputData::Allocate(Isolate* isolate,
8248 int deopt_entry_count,
8249 PretenureFlag pretenure) {
8250 ASSERT(deopt_entry_count > 0);
8251 return isolate->heap()->AllocateFixedArray(LengthFor(deopt_entry_count),
8256 MaybeObject* DeoptimizationOutputData::Allocate(Isolate* isolate,
8257 int number_of_deopt_points,
8258 PretenureFlag pretenure) {
8259 if (number_of_deopt_points == 0) return isolate->heap()->empty_fixed_array();
8260 return isolate->heap()->AllocateFixedArray(
8261 LengthOfFixedArray(number_of_deopt_points), pretenure);
8266 bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
8267 if (IsEmpty()) return other->IsEmpty();
8268 if (other->IsEmpty()) return false;
8269 if (length() != other->length()) return false;
8270 for (int i = 0; i < length(); ++i) {
8271 if (get(i) != other->get(i)) return false;
8278 static bool IsIdentifier(UnicodeCache* cache, Name* name) {
8279 // Checks whether the buffer contains an identifier (no escape).
8280 if (!name->IsString()) return false;
8281 String* string = String::cast(name);
8282 if (string->length() == 0) return true;
8283 ConsStringIteratorOp op;
8284 StringCharacterStream stream(string, &op);
8285 if (!cache->IsIdentifierStart(stream.GetNext())) {
8288 while (stream.HasMore()) {
8289 if (!cache->IsIdentifierPart(stream.GetNext())) {
8297 bool Name::IsCacheable(Isolate* isolate) {
8298 return IsSymbol() || IsIdentifier(isolate->unicode_cache(), this);
8302 bool String::LooksValid() {
8303 if (!GetIsolate()->heap()->Contains(this)) return false;
8308 String::FlatContent String::GetFlatContent() {
8309 ASSERT(!AllowHeapAllocation::IsAllowed());
8310 int length = this->length();
8311 StringShape shape(this);
8312 String* string = this;
8314 if (shape.representation_tag() == kConsStringTag) {
8315 ConsString* cons = ConsString::cast(string);
8316 if (cons->second()->length() != 0) {
8317 return FlatContent();
8319 string = cons->first();
8320 shape = StringShape(string);
8322 if (shape.representation_tag() == kSlicedStringTag) {
8323 SlicedString* slice = SlicedString::cast(string);
8324 offset = slice->offset();
8325 string = slice->parent();
8326 shape = StringShape(string);
8327 ASSERT(shape.representation_tag() != kConsStringTag &&
8328 shape.representation_tag() != kSlicedStringTag);
8330 if (shape.encoding_tag() == kOneByteStringTag) {
8331 const uint8_t* start;
8332 if (shape.representation_tag() == kSeqStringTag) {
8333 start = SeqOneByteString::cast(string)->GetChars();
8335 start = ExternalAsciiString::cast(string)->GetChars();
8337 return FlatContent(Vector<const uint8_t>(start + offset, length));
8339 ASSERT(shape.encoding_tag() == kTwoByteStringTag);
8341 if (shape.representation_tag() == kSeqStringTag) {
8342 start = SeqTwoByteString::cast(string)->GetChars();
8344 start = ExternalTwoByteString::cast(string)->GetChars();
8346 return FlatContent(Vector<const uc16>(start + offset, length));
8351 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8352 RobustnessFlag robust_flag,
8355 int* length_return) {
8356 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8357 return SmartArrayPointer<char>(NULL);
8359 Heap* heap = GetHeap();
8361 // Negative length means the to the end of the string.
8362 if (length < 0) length = kMaxInt - offset;
8364 // Compute the size of the UTF-8 string. Start at the specified offset.
8365 Access<ConsStringIteratorOp> op(
8366 heap->isolate()->objects_string_iterator());
8367 StringCharacterStream stream(this, op.value(), offset);
8368 int character_position = offset;
8370 int last = unibrow::Utf16::kNoPreviousCharacter;
8371 while (stream.HasMore() && character_position++ < offset + length) {
8372 uint16_t character = stream.GetNext();
8373 utf8_bytes += unibrow::Utf8::Length(character, last);
8377 if (length_return) {
8378 *length_return = utf8_bytes;
8381 char* result = NewArray<char>(utf8_bytes + 1);
8383 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset.
8384 stream.Reset(this, offset);
8385 character_position = offset;
8386 int utf8_byte_position = 0;
8387 last = unibrow::Utf16::kNoPreviousCharacter;
8388 while (stream.HasMore() && character_position++ < offset + length) {
8389 uint16_t character = stream.GetNext();
8390 if (allow_nulls == DISALLOW_NULLS && character == 0) {
8393 utf8_byte_position +=
8394 unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
8397 result[utf8_byte_position] = 0;
8398 return SmartArrayPointer<char>(result);
8402 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8403 RobustnessFlag robust_flag,
8404 int* length_return) {
8405 return ToCString(allow_nulls, robust_flag, 0, -1, length_return);
8409 const uc16* String::GetTwoByteData(unsigned start) {
8410 ASSERT(!IsOneByteRepresentationUnderneath());
8411 switch (StringShape(this).representation_tag()) {
8413 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
8414 case kExternalStringTag:
8415 return ExternalTwoByteString::cast(this)->
8416 ExternalTwoByteStringGetData(start);
8417 case kSlicedStringTag: {
8418 SlicedString* slice = SlicedString::cast(this);
8419 return slice->parent()->GetTwoByteData(start + slice->offset());
8421 case kConsStringTag:
8430 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) {
8431 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8432 return SmartArrayPointer<uc16>();
8434 Heap* heap = GetHeap();
8436 Access<ConsStringIteratorOp> op(
8437 heap->isolate()->objects_string_iterator());
8438 StringCharacterStream stream(this, op.value());
8440 uc16* result = NewArray<uc16>(length() + 1);
8443 while (stream.HasMore()) {
8444 uint16_t character = stream.GetNext();
8445 result[i++] = character;
8448 return SmartArrayPointer<uc16>(result);
8452 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) {
8453 return reinterpret_cast<uc16*>(
8454 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start;
8458 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) {
8459 Relocatable* current = isolate->relocatable_top();
8460 while (current != NULL) {
8461 current->PostGarbageCollection();
8462 current = current->prev_;
8467 // Reserve space for statics needing saving and restoring.
8468 int Relocatable::ArchiveSpacePerThread() {
8469 return sizeof(Relocatable*); // NOLINT
8473 // Archive statics that are thread local.
8474 char* Relocatable::ArchiveState(Isolate* isolate, char* to) {
8475 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
8476 isolate->set_relocatable_top(NULL);
8477 return to + ArchiveSpacePerThread();
8481 // Restore statics that are thread local.
8482 char* Relocatable::RestoreState(Isolate* isolate, char* from) {
8483 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
8484 return from + ArchiveSpacePerThread();
8488 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
8489 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
8491 return thread_storage + ArchiveSpacePerThread();
8495 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) {
8496 Iterate(v, isolate->relocatable_top());
8500 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
8501 Relocatable* current = top;
8502 while (current != NULL) {
8503 current->IterateInstance(v);
8504 current = current->prev_;
8509 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str)
8510 : Relocatable(isolate),
8511 str_(str.location()),
8512 length_(str->length()) {
8513 PostGarbageCollection();
8517 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input)
8518 : Relocatable(isolate),
8521 length_(input.length()),
8522 start_(input.start()) { }
8525 void FlatStringReader::PostGarbageCollection() {
8526 if (str_ == NULL) return;
8527 Handle<String> str(str_);
8528 ASSERT(str->IsFlat());
8529 DisallowHeapAllocation no_gc;
8530 // This does not actually prevent the vector from being relocated later.
8531 String::FlatContent content = str->GetFlatContent();
8532 ASSERT(content.IsFlat());
8533 is_ascii_ = content.IsAscii();
8535 start_ = content.ToOneByteVector().start();
8537 start_ = content.ToUC16Vector().start();
8542 String* ConsStringIteratorOp::Operate(String* string,
8543 unsigned* offset_out,
8545 unsigned* length_out) {
8546 ASSERT(string->IsConsString());
8547 ConsString* cons_string = ConsString::cast(string);
8548 // Set up search data.
8549 root_ = cons_string;
8550 consumed_ = *offset_out;
8552 return Search(offset_out, type_out, length_out);
8556 String* ConsStringIteratorOp::Search(unsigned* offset_out,
8558 unsigned* length_out) {
8559 ConsString* cons_string = root_;
8560 // Reset the stack, pushing the root string.
8563 frames_[0] = cons_string;
8564 const unsigned consumed = consumed_;
8565 unsigned offset = 0;
8567 // Loop until the string is found which contains the target offset.
8568 String* string = cons_string->first();
8569 unsigned length = string->length();
8571 if (consumed < offset + length) {
8572 // Target offset is in the left branch.
8573 // Keep going if we're still in a ConString.
8574 type = string->map()->instance_type();
8575 if ((type & kStringRepresentationMask) == kConsStringTag) {
8576 cons_string = ConsString::cast(string);
8577 PushLeft(cons_string);
8580 // Tell the stack we're done decending.
8581 AdjustMaximumDepth();
8584 // Update progress through the string.
8586 // Keep going if we're still in a ConString.
8587 string = cons_string->second();
8588 type = string->map()->instance_type();
8589 if ((type & kStringRepresentationMask) == kConsStringTag) {
8590 cons_string = ConsString::cast(string);
8591 PushRight(cons_string);
8592 // TODO(dcarney) Add back root optimization.
8595 // Need this to be updated for the current string.
8596 length = string->length();
8597 // Account for the possibility of an empty right leaf.
8598 // This happens only if we have asked for an offset outside the string.
8600 // Reset depth so future operations will return null immediately.
8604 // Tell the stack we're done decending.
8605 AdjustMaximumDepth();
8606 // Pop stack so next iteration is in correct place.
8609 ASSERT(length != 0);
8610 // Adjust return values and exit.
8611 consumed_ = offset + length;
8612 *offset_out = consumed - offset;
8614 *length_out = length;
8622 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack,
8624 unsigned* length_out) {
8626 // Tree traversal complete.
8628 *blew_stack = false;
8631 // We've lost track of higher nodes.
8632 if (maximum_depth_ - depth_ == kStackSize) {
8637 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)];
8638 String* string = cons_string->second();
8639 int32_t type = string->map()->instance_type();
8640 if ((type & kStringRepresentationMask) != kConsStringTag) {
8641 // Pop stack so next iteration is in correct place.
8643 unsigned length = static_cast<unsigned>(string->length());
8644 // Could be a flattened ConsString.
8645 if (length == 0) continue;
8646 *length_out = length;
8648 consumed_ += length;
8651 cons_string = ConsString::cast(string);
8652 // TODO(dcarney) Add back root optimization.
8653 PushRight(cons_string);
8654 // Need to traverse all the way left.
8657 string = cons_string->first();
8658 type = string->map()->instance_type();
8659 if ((type & kStringRepresentationMask) != kConsStringTag) {
8660 AdjustMaximumDepth();
8661 unsigned length = static_cast<unsigned>(string->length());
8662 ASSERT(length != 0);
8663 *length_out = length;
8665 consumed_ += length;
8668 cons_string = ConsString::cast(string);
8669 PushLeft(cons_string);
8677 uint16_t ConsString::ConsStringGet(int index) {
8678 ASSERT(index >= 0 && index < this->length());
8680 // Check for a flattened cons string
8681 if (second()->length() == 0) {
8682 String* left = first();
8683 return left->Get(index);
8686 String* string = String::cast(this);
8689 if (StringShape(string).IsCons()) {
8690 ConsString* cons_string = ConsString::cast(string);
8691 String* left = cons_string->first();
8692 if (left->length() > index) {
8695 index -= left->length();
8696 string = cons_string->second();
8699 return string->Get(index);
8708 uint16_t SlicedString::SlicedStringGet(int index) {
8709 return parent()->Get(offset() + index);
8713 template <typename sinkchar>
8714 void String::WriteToFlat(String* src,
8718 String* source = src;
8722 ASSERT(0 <= from && from <= to && to <= source->length());
8723 switch (StringShape(source).full_representation_tag()) {
8724 case kOneByteStringTag | kExternalStringTag: {
8726 ExternalAsciiString::cast(source)->GetChars() + from,
8730 case kTwoByteStringTag | kExternalStringTag: {
8732 ExternalTwoByteString::cast(source)->GetChars();
8738 case kOneByteStringTag | kSeqStringTag: {
8740 SeqOneByteString::cast(source)->GetChars() + from,
8744 case kTwoByteStringTag | kSeqStringTag: {
8746 SeqTwoByteString::cast(source)->GetChars() + from,
8750 case kOneByteStringTag | kConsStringTag:
8751 case kTwoByteStringTag | kConsStringTag: {
8752 ConsString* cons_string = ConsString::cast(source);
8753 String* first = cons_string->first();
8754 int boundary = first->length();
8755 if (to - boundary >= boundary - from) {
8756 // Right hand side is longer. Recurse over left.
8757 if (from < boundary) {
8758 WriteToFlat(first, sink, from, boundary);
8759 sink += boundary - from;
8765 source = cons_string->second();
8767 // Left hand side is longer. Recurse over right.
8768 if (to > boundary) {
8769 String* second = cons_string->second();
8770 // When repeatedly appending to a string, we get a cons string that
8771 // is unbalanced to the left, a list, essentially. We inline the
8772 // common case of sequential ascii right child.
8773 if (to - boundary == 1) {
8774 sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
8775 } else if (second->IsSeqOneByteString()) {
8776 CopyChars(sink + boundary - from,
8777 SeqOneByteString::cast(second)->GetChars(),
8781 sink + boundary - from,
8791 case kOneByteStringTag | kSlicedStringTag:
8792 case kTwoByteStringTag | kSlicedStringTag: {
8793 SlicedString* slice = SlicedString::cast(source);
8794 unsigned offset = slice->offset();
8795 WriteToFlat(slice->parent(), sink, from + offset, to + offset);
8803 // Compares the contents of two strings by reading and comparing
8804 // int-sized blocks of characters.
8805 template <typename Char>
8806 static inline bool CompareRawStringContents(const Char* const a,
8807 const Char* const b,
8810 #ifndef V8_HOST_CAN_READ_UNALIGNED
8811 // If this architecture isn't comfortable reading unaligned ints
8812 // then we have to check that the strings are aligned before
8813 // comparing them blockwise.
8814 const int kAlignmentMask = sizeof(uint32_t) - 1; // NOLINT
8815 uint32_t pa_addr = reinterpret_cast<uint32_t>(a);
8816 uint32_t pb_addr = reinterpret_cast<uint32_t>(b);
8817 if (((pa_addr & kAlignmentMask) | (pb_addr & kAlignmentMask)) == 0) {
8819 const int kStepSize = sizeof(int) / sizeof(Char); // NOLINT
8820 int endpoint = length - kStepSize;
8821 // Compare blocks until we reach near the end of the string.
8822 for (; i <= endpoint; i += kStepSize) {
8823 uint32_t wa = *reinterpret_cast<const uint32_t*>(a + i);
8824 uint32_t wb = *reinterpret_cast<const uint32_t*>(b + i);
8829 #ifndef V8_HOST_CAN_READ_UNALIGNED
8832 // Compare the remaining characters that didn't fit into a block.
8833 for (; i < length; i++) {
8842 template<typename Chars1, typename Chars2>
8843 class RawStringComparator : public AllStatic {
8845 static inline bool compare(const Chars1* a, const Chars2* b, int len) {
8846 ASSERT(sizeof(Chars1) != sizeof(Chars2));
8847 for (int i = 0; i < len; i++) {
8858 class RawStringComparator<uint16_t, uint16_t> {
8860 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) {
8861 return CompareRawStringContents(a, b, len);
8867 class RawStringComparator<uint8_t, uint8_t> {
8869 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) {
8870 return CompareRawStringContents(a, b, len);
8875 class StringComparator {
8878 explicit inline State(ConsStringIteratorOp* op)
8879 : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {}
8881 inline void Init(String* string, unsigned len) {
8883 int32_t type = string->map()->instance_type();
8884 String::Visit(string, 0, *this, *op_, type, len);
8887 inline void VisitOneByteString(const uint8_t* chars, unsigned length) {
8888 is_one_byte_ = true;
8893 inline void VisitTwoByteString(const uint16_t* chars, unsigned length) {
8894 is_one_byte_ = false;
8899 void Advance(unsigned consumed) {
8900 ASSERT(consumed <= length_);
8902 if (length_ != consumed) {
8904 buffer8_ += consumed;
8906 buffer16_ += consumed;
8908 length_ -= consumed;
8912 ASSERT(op_->HasMore());
8914 unsigned length = 0;
8915 String* next = op_->ContinueOperation(&type, &length);
8916 ASSERT(next != NULL);
8917 ConsStringNullOp null_op;
8918 String::Visit(next, 0, *this, null_op, type, length);
8921 ConsStringIteratorOp* const op_;
8925 const uint8_t* buffer8_;
8926 const uint16_t* buffer16_;
8930 DISALLOW_IMPLICIT_CONSTRUCTORS(State);
8934 inline StringComparator(ConsStringIteratorOp* op_1,
8935 ConsStringIteratorOp* op_2)
8940 template<typename Chars1, typename Chars2>
8941 static inline bool Equals(State* state_1, State* state_2, unsigned to_check) {
8942 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_);
8943 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_);
8944 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check);
8947 bool Equals(unsigned length, String* string_1, String* string_2) {
8948 ASSERT(length != 0);
8949 state_1_.Init(string_1, length);
8950 state_2_.Init(string_2, length);
8952 unsigned to_check = Min(state_1_.length_, state_2_.length_);
8953 ASSERT(to_check > 0 && to_check <= length);
8955 if (state_1_.is_one_byte_) {
8956 if (state_2_.is_one_byte_) {
8957 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check);
8959 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check);
8962 if (state_2_.is_one_byte_) {
8963 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check);
8965 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check);
8969 if (!is_equal) return false;
8971 // Exit condition. Strings are equal.
8972 if (length == 0) return true;
8973 state_1_.Advance(to_check);
8974 state_2_.Advance(to_check);
8981 DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator);
8985 bool String::SlowEquals(String* other) {
8986 // Fast check: negative check with lengths.
8988 if (len != other->length()) return false;
8989 if (len == 0) return true;
8991 // Fast check: if hash code is computed for both strings
8992 // a fast negative check can be performed.
8993 if (HasHashCode() && other->HasHashCode()) {
8994 #ifdef ENABLE_SLOW_ASSERTS
8995 if (FLAG_enable_slow_asserts) {
8996 if (Hash() != other->Hash()) {
8997 bool found_difference = false;
8998 for (int i = 0; i < len; i++) {
8999 if (Get(i) != other->Get(i)) {
9000 found_difference = true;
9004 ASSERT(found_difference);
9008 if (Hash() != other->Hash()) return false;
9011 // We know the strings are both non-empty. Compare the first chars
9012 // before we try to flatten the strings.
9013 if (this->Get(0) != other->Get(0)) return false;
9015 String* lhs = this->TryFlattenGetString();
9016 String* rhs = other->TryFlattenGetString();
9018 // TODO(dcarney): Compare all types of flat strings with a Visitor.
9019 if (StringShape(lhs).IsSequentialAscii() &&
9020 StringShape(rhs).IsSequentialAscii()) {
9021 const uint8_t* str1 = SeqOneByteString::cast(lhs)->GetChars();
9022 const uint8_t* str2 = SeqOneByteString::cast(rhs)->GetChars();
9023 return CompareRawStringContents(str1, str2, len);
9026 Isolate* isolate = GetIsolate();
9027 StringComparator comparator(isolate->objects_string_compare_iterator_a(),
9028 isolate->objects_string_compare_iterator_b());
9030 return comparator.Equals(static_cast<unsigned>(len), lhs, rhs);
9034 bool String::MarkAsUndetectable() {
9035 if (StringShape(this).IsInternalized()) return false;
9037 Map* map = this->map();
9038 Heap* heap = GetHeap();
9039 if (map == heap->string_map()) {
9040 this->set_map(heap->undetectable_string_map());
9042 } else if (map == heap->ascii_string_map()) {
9043 this->set_map(heap->undetectable_ascii_string_map());
9046 // Rest cannot be marked as undetectable
9051 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
9052 int slen = length();
9053 // Can't check exact length equality, but we can check bounds.
9054 int str_len = str.length();
9055 if (!allow_prefix_match &&
9057 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) {
9061 unsigned remaining_in_str = static_cast<unsigned>(str_len);
9062 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start());
9063 for (i = 0; i < slen && remaining_in_str > 0; i++) {
9064 unsigned cursor = 0;
9065 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor);
9066 ASSERT(cursor > 0 && cursor <= remaining_in_str);
9067 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
9068 if (i > slen - 1) return false;
9069 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
9070 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
9072 if (Get(i) != r) return false;
9074 utf8_data += cursor;
9075 remaining_in_str -= cursor;
9077 return (allow_prefix_match || i == slen) && remaining_in_str == 0;
9081 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) {
9082 int slen = length();
9083 if (str.length() != slen) return false;
9084 DisallowHeapAllocation no_gc;
9085 FlatContent content = GetFlatContent();
9086 if (content.IsAscii()) {
9087 return CompareChars(content.ToOneByteVector().start(),
9088 str.start(), slen) == 0;
9090 for (int i = 0; i < slen; i++) {
9091 if (Get(i) != static_cast<uint16_t>(str[i])) return false;
9097 bool String::IsTwoByteEqualTo(Vector<const uc16> str) {
9098 int slen = length();
9099 if (str.length() != slen) return false;
9100 DisallowHeapAllocation no_gc;
9101 FlatContent content = GetFlatContent();
9102 if (content.IsTwoByte()) {
9103 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
9105 for (int i = 0; i < slen; i++) {
9106 if (Get(i) != str[i]) return false;
9112 class IteratingStringHasher: public StringHasher {
9114 static inline uint32_t Hash(String* string, uint32_t seed) {
9115 const unsigned len = static_cast<unsigned>(string->length());
9116 IteratingStringHasher hasher(len, seed);
9117 if (hasher.has_trivial_hash()) {
9118 return hasher.GetHashField();
9120 int32_t type = string->map()->instance_type();
9121 ConsStringNullOp null_op;
9122 String::Visit(string, 0, hasher, null_op, type, len);
9123 // Flat strings terminate immediately.
9124 if (hasher.consumed_ == len) {
9125 ASSERT(!string->IsConsString());
9126 return hasher.GetHashField();
9128 ASSERT(string->IsConsString());
9129 // This is a ConsString, iterate across it.
9130 ConsStringIteratorOp op;
9131 unsigned offset = 0;
9132 unsigned leaf_length = len;
9133 string = op.Operate(string, &offset, &type, &leaf_length);
9135 ASSERT(hasher.consumed_ < len);
9136 String::Visit(string, 0, hasher, null_op, type, leaf_length);
9137 if (hasher.consumed_ == len) break;
9138 string = op.ContinueOperation(&type, &leaf_length);
9139 // This should be taken care of by the length check.
9140 ASSERT(string != NULL);
9142 return hasher.GetHashField();
9144 inline void VisitOneByteString(const uint8_t* chars, unsigned length) {
9145 AddCharacters(chars, static_cast<int>(length));
9146 consumed_ += length;
9148 inline void VisitTwoByteString(const uint16_t* chars, unsigned length) {
9149 AddCharacters(chars, static_cast<int>(length));
9150 consumed_ += length;
9154 inline IteratingStringHasher(int len, uint32_t seed)
9155 : StringHasher(len, seed),
9158 DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher);
9162 uint32_t String::ComputeAndSetHash() {
9163 // Should only be called if hash code has not yet been computed.
9164 ASSERT(!HasHashCode());
9166 // Store the hash code in the object.
9167 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed());
9168 set_hash_field(field);
9170 // Check the hash code is there.
9171 ASSERT(HasHashCode());
9172 uint32_t result = field >> kHashShift;
9173 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
9178 bool String::ComputeArrayIndex(uint32_t* index) {
9179 int length = this->length();
9180 if (length == 0 || length > kMaxArrayIndexSize) return false;
9181 ConsStringIteratorOp op;
9182 StringCharacterStream stream(this, &op);
9183 uint16_t ch = stream.GetNext();
9185 // If the string begins with a '0' character, it must only consist
9186 // of it to be a legal array index.
9192 // Convert string to uint32 array index; character by character.
9194 if (d < 0 || d > 9) return false;
9195 uint32_t result = d;
9196 while (stream.HasMore()) {
9197 d = stream.GetNext() - '0';
9198 if (d < 0 || d > 9) return false;
9199 // Check that the new result is below the 32 bit limit.
9200 if (result > 429496729U - ((d > 5) ? 1 : 0)) return false;
9201 result = (result * 10) + d;
9209 bool String::SlowAsArrayIndex(uint32_t* index) {
9210 if (length() <= kMaxCachedArrayIndexLength) {
9211 Hash(); // force computation of hash code
9212 uint32_t field = hash_field();
9213 if ((field & kIsNotArrayIndexMask) != 0) return false;
9214 // Isolate the array index form the full hash field.
9215 *index = (kArrayIndexHashMask & field) >> kHashShift;
9218 return ComputeArrayIndex(index);
9223 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
9224 int new_size, old_size;
9225 int old_length = string->length();
9226 if (old_length <= new_length) return string;
9228 if (string->IsSeqOneByteString()) {
9229 old_size = SeqOneByteString::SizeFor(old_length);
9230 new_size = SeqOneByteString::SizeFor(new_length);
9232 ASSERT(string->IsSeqTwoByteString());
9233 old_size = SeqTwoByteString::SizeFor(old_length);
9234 new_size = SeqTwoByteString::SizeFor(new_length);
9237 int delta = old_size - new_size;
9238 string->set_length(new_length);
9240 Address start_of_string = string->address();
9241 ASSERT_OBJECT_ALIGNED(start_of_string);
9242 ASSERT_OBJECT_ALIGNED(start_of_string + new_size);
9244 Heap* heap = string->GetHeap();
9245 NewSpace* newspace = heap->new_space();
9246 if (newspace->Contains(start_of_string) &&
9247 newspace->top() == start_of_string + old_size) {
9248 // Last allocated object in new space. Simply lower allocation top.
9249 newspace->set_top(start_of_string + new_size);
9251 // Sizes are pointer size aligned, so that we can use filler objects
9252 // that are a multiple of pointer size.
9253 heap->CreateFillerObjectAt(start_of_string + new_size, delta);
9255 heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR);
9257 if (new_length == 0) return heap->isolate()->factory()->empty_string();
9262 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
9263 // For array indexes mix the length into the hash as an array index could
9266 ASSERT(length <= String::kMaxArrayIndexSize);
9267 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
9268 (1 << String::kArrayIndexValueBits));
9270 value <<= String::kHashShift;
9271 value |= length << String::kArrayIndexHashLengthShift;
9273 ASSERT((value & String::kIsNotArrayIndexMask) == 0);
9274 ASSERT((length > String::kMaxCachedArrayIndexLength) ||
9275 (value & String::kContainsCachedArrayIndexMask) == 0);
9280 uint32_t StringHasher::GetHashField() {
9281 if (length_ <= String::kMaxHashCalcLength) {
9282 if (is_array_index_) {
9283 return MakeArrayIndexHash(array_index_, length_);
9285 return (GetHashCore(raw_running_hash_) << String::kHashShift) |
9286 String::kIsNotArrayIndexMask;
9288 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask;
9293 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars,
9295 int* utf16_length_out) {
9296 int vector_length = chars.length();
9297 // Handle some edge cases
9298 if (vector_length <= 1) {
9299 ASSERT(vector_length == 0 ||
9300 static_cast<uint8_t>(chars.start()[0]) <=
9301 unibrow::Utf8::kMaxOneByteChar);
9302 *utf16_length_out = vector_length;
9303 return HashSequentialString(chars.start(), vector_length, seed);
9305 // Start with a fake length which won't affect computation.
9306 // It will be updated later.
9307 StringHasher hasher(String::kMaxArrayIndexSize, seed);
9308 unsigned remaining = static_cast<unsigned>(vector_length);
9309 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start());
9310 int utf16_length = 0;
9311 bool is_index = true;
9312 ASSERT(hasher.is_array_index_);
9313 while (remaining > 0) {
9314 unsigned consumed = 0;
9315 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed);
9316 ASSERT(consumed > 0 && consumed <= remaining);
9318 remaining -= consumed;
9319 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode;
9320 utf16_length += is_two_characters ? 2 : 1;
9321 // No need to keep hashing. But we do need to calculate utf16_length.
9322 if (utf16_length > String::kMaxHashCalcLength) continue;
9323 if (is_two_characters) {
9324 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c);
9325 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c);
9326 hasher.AddCharacter(c1);
9327 hasher.AddCharacter(c2);
9328 if (is_index) is_index = hasher.UpdateIndex(c1);
9329 if (is_index) is_index = hasher.UpdateIndex(c2);
9331 hasher.AddCharacter(c);
9332 if (is_index) is_index = hasher.UpdateIndex(c);
9335 *utf16_length_out = static_cast<int>(utf16_length);
9336 // Must set length here so that hash computation is correct.
9337 hasher.length_ = utf16_length;
9338 return hasher.GetHashField();
9342 void String::PrintOn(FILE* file) {
9343 int length = this->length();
9344 for (int i = 0; i < length; i++) {
9345 PrintF(file, "%c", Get(i));
9350 static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) {
9351 int live_enum = map->EnumLength();
9352 if (live_enum == kInvalidEnumCacheSentinel) {
9353 live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM);
9355 if (live_enum == 0) return descriptors->ClearEnumCache();
9357 FixedArray* enum_cache = descriptors->GetEnumCache();
9359 int to_trim = enum_cache->length() - live_enum;
9360 if (to_trim <= 0) return;
9361 RightTrimFixedArray<Heap::FROM_GC>(
9362 heap, descriptors->GetEnumCache(), to_trim);
9364 if (!descriptors->HasEnumIndicesCache()) return;
9365 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
9366 RightTrimFixedArray<Heap::FROM_GC>(heap, enum_indices_cache, to_trim);
9370 static void TrimDescriptorArray(Heap* heap,
9372 DescriptorArray* descriptors,
9373 int number_of_own_descriptors) {
9374 int number_of_descriptors = descriptors->number_of_descriptors_storage();
9375 int to_trim = number_of_descriptors - number_of_own_descriptors;
9376 if (to_trim == 0) return;
9378 RightTrimFixedArray<Heap::FROM_GC>(
9379 heap, descriptors, to_trim * DescriptorArray::kDescriptorSize);
9380 descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
9382 if (descriptors->HasEnumCache()) TrimEnumCache(heap, map, descriptors);
9383 descriptors->Sort();
9387 // Clear a possible back pointer in case the transition leads to a dead map.
9388 // Return true in case a back pointer has been cleared and false otherwise.
9389 static bool ClearBackPointer(Heap* heap, Map* target) {
9390 if (Marking::MarkBitFrom(target).Get()) return false;
9391 target->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
9396 // TODO(mstarzinger): This method should be moved into MarkCompactCollector,
9397 // because it cannot be called from outside the GC and we already have methods
9398 // depending on the transitions layout in the GC anyways.
9399 void Map::ClearNonLiveTransitions(Heap* heap) {
9400 // If there are no transitions to be cleared, return.
9401 // TODO(verwaest) Should be an assert, otherwise back pointers are not
9402 // properly cleared.
9403 if (!HasTransitionArray()) return;
9405 TransitionArray* t = transitions();
9406 MarkCompactCollector* collector = heap->mark_compact_collector();
9408 int transition_index = 0;
9410 DescriptorArray* descriptors = instance_descriptors();
9411 bool descriptors_owner_died = false;
9413 // Compact all live descriptors to the left.
9414 for (int i = 0; i < t->number_of_transitions(); ++i) {
9415 Map* target = t->GetTarget(i);
9416 if (ClearBackPointer(heap, target)) {
9417 if (target->instance_descriptors() == descriptors) {
9418 descriptors_owner_died = true;
9421 if (i != transition_index) {
9422 Name* key = t->GetKey(i);
9423 t->SetKey(transition_index, key);
9424 Object** key_slot = t->GetKeySlot(transition_index);
9425 collector->RecordSlot(key_slot, key_slot, key);
9426 // Target slots do not need to be recorded since maps are not compacted.
9427 t->SetTarget(transition_index, t->GetTarget(i));
9433 // If there are no transitions to be cleared, return.
9434 // TODO(verwaest) Should be an assert, otherwise back pointers are not
9435 // properly cleared.
9436 if (transition_index == t->number_of_transitions()) return;
9438 int number_of_own_descriptors = NumberOfOwnDescriptors();
9440 if (descriptors_owner_died) {
9441 if (number_of_own_descriptors > 0) {
9442 TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors);
9443 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
9444 set_owns_descriptors(true);
9446 ASSERT(descriptors == GetHeap()->empty_descriptor_array());
9450 int trim = t->number_of_transitions() - transition_index;
9452 RightTrimFixedArray<Heap::FROM_GC>(heap, t, t->IsSimpleTransition()
9453 ? trim : trim * TransitionArray::kTransitionSize);
9459 // For performance reasons we only hash the 3 most variable fields of a map:
9460 // constructor, prototype and bit_field2.
9462 // Shift away the tag.
9463 int hash = (static_cast<uint32_t>(
9464 reinterpret_cast<uintptr_t>(constructor())) >> 2);
9466 // XOR-ing the prototype and constructor directly yields too many zero bits
9467 // when the two pointers are close (which is fairly common).
9468 // To avoid this we shift the prototype 4 bits relatively to the constructor.
9469 hash ^= (static_cast<uint32_t>(
9470 reinterpret_cast<uintptr_t>(prototype())) << 2);
9472 return hash ^ (hash >> 16) ^ bit_field2();
9476 static bool CheckEquivalent(Map* first, Map* second) {
9478 first->constructor() == second->constructor() &&
9479 first->prototype() == second->prototype() &&
9480 first->instance_type() == second->instance_type() &&
9481 first->bit_field() == second->bit_field() &&
9482 first->bit_field2() == second->bit_field2() &&
9483 first->is_observed() == second->is_observed() &&
9484 first->function_with_prototype() == second->function_with_prototype();
9488 bool Map::EquivalentToForTransition(Map* other) {
9489 return CheckEquivalent(this, other);
9493 bool Map::EquivalentToForNormalization(Map* other,
9494 PropertyNormalizationMode mode) {
9495 int properties = mode == CLEAR_INOBJECT_PROPERTIES
9496 ? 0 : other->inobject_properties();
9497 return CheckEquivalent(this, other) && inobject_properties() == properties;
9501 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
9502 for (int i = 0; i < count_of_code_ptr_entries(); i++) {
9503 int index = first_code_ptr_index() + i;
9504 v->VisitCodeEntry(reinterpret_cast<Address>(RawFieldOfElementAt(index)));
9506 for (int i = 0; i < count_of_heap_ptr_entries(); i++) {
9507 int index = first_heap_ptr_index() + i;
9508 v->VisitPointer(RawFieldOfElementAt(index));
9513 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
9514 // Iterate over all fields in the body but take care in dealing with
9516 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset);
9517 v->VisitCodeEntry(this->address() + kCodeEntryOffset);
9518 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
9522 void JSFunction::MarkForOptimization() {
9523 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9524 ASSERT(!IsOptimized());
9525 ASSERT(shared()->allows_lazy_compilation() ||
9526 code()->optimizable());
9527 ASSERT(!shared()->is_generator());
9528 set_code_no_write_barrier(
9529 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
9530 // No write barrier required, since the builtin is part of the root set.
9534 void JSFunction::MarkForConcurrentOptimization() {
9535 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9536 ASSERT(!IsOptimized());
9537 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9538 ASSERT(!shared()->is_generator());
9539 ASSERT(GetIsolate()->concurrent_recompilation_enabled());
9540 if (FLAG_trace_concurrent_recompilation) {
9541 PrintF(" ** Marking ");
9543 PrintF(" for concurrent recompilation.\n");
9545 set_code_no_write_barrier(
9546 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
9547 // No write barrier required, since the builtin is part of the root set.
9551 void JSFunction::MarkInOptimizationQueue() {
9552 // We can only arrive here via the concurrent-recompilation builtin. If
9553 // break points were set, the code would point to the lazy-compile builtin.
9554 ASSERT(!GetIsolate()->DebuggerHasBreakPoints());
9555 ASSERT(IsMarkedForConcurrentOptimization() && !IsOptimized());
9556 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9557 ASSERT(GetIsolate()->concurrent_recompilation_enabled());
9558 if (FLAG_trace_concurrent_recompilation) {
9559 PrintF(" ** Queueing ");
9561 PrintF(" for concurrent recompilation.\n");
9563 set_code_no_write_barrier(
9564 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
9565 // No write barrier required, since the builtin is part of the root set.
9569 void SharedFunctionInfo::AddToOptimizedCodeMap(
9570 Handle<SharedFunctionInfo> shared,
9571 Handle<Context> native_context,
9573 Handle<FixedArray> literals,
9574 BailoutId osr_ast_id) {
9575 CALL_HEAP_FUNCTION_VOID(
9576 shared->GetIsolate(),
9577 shared->AddToOptimizedCodeMap(
9578 *native_context, *code, *literals, osr_ast_id));
9582 MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context,
9584 FixedArray* literals,
9585 BailoutId osr_ast_id) {
9586 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
9587 ASSERT(native_context->IsNativeContext());
9588 STATIC_ASSERT(kEntryLength == 4);
9589 Heap* heap = GetHeap();
9590 FixedArray* new_code_map;
9591 Object* value = optimized_code_map();
9592 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
9593 if (value->IsSmi()) {
9594 // No optimized code map.
9595 ASSERT_EQ(0, Smi::cast(value)->value());
9596 // Create 3 entries per context {context, code, literals}.
9597 MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength);
9598 if (!maybe->To(&new_code_map)) return maybe;
9599 new_code_map->set(kEntriesStart + kContextOffset, native_context);
9600 new_code_map->set(kEntriesStart + kCachedCodeOffset, code);
9601 new_code_map->set(kEntriesStart + kLiteralsOffset, literals);
9602 new_code_map->set(kEntriesStart + kOsrAstIdOffset, osr_ast_id_smi);
9604 // Copy old map and append one new entry.
9605 FixedArray* old_code_map = FixedArray::cast(value);
9606 ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context, osr_ast_id));
9607 int old_length = old_code_map->length();
9608 int new_length = old_length + kEntryLength;
9609 MaybeObject* maybe = old_code_map->CopySize(new_length);
9610 if (!maybe->To(&new_code_map)) return maybe;
9611 new_code_map->set(old_length + kContextOffset, native_context);
9612 new_code_map->set(old_length + kCachedCodeOffset, code);
9613 new_code_map->set(old_length + kLiteralsOffset, literals);
9614 new_code_map->set(old_length + kOsrAstIdOffset, osr_ast_id_smi);
9615 // Zap the old map for the sake of the heap verifier.
9616 if (Heap::ShouldZapGarbage()) {
9617 Object** data = old_code_map->data_start();
9618 MemsetPointer(data, heap->the_hole_value(), old_length);
9622 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9623 ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext());
9624 ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode());
9625 ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
9626 Code::OPTIMIZED_FUNCTION);
9627 ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
9628 ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
9631 set_optimized_code_map(new_code_map);
9632 return new_code_map;
9636 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
9637 ASSERT(index > kEntriesStart);
9638 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9640 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9641 ASSERT_NE(NULL, cached_literals);
9642 return cached_literals;
9648 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
9649 ASSERT(index > kEntriesStart);
9650 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9651 Code* code = Code::cast(code_map->get(index));
9652 ASSERT_NE(NULL, code);
9657 void SharedFunctionInfo::ClearOptimizedCodeMap() {
9658 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9660 // If the next map link slot is already used then the function was
9661 // enqueued with code flushing and we remove it now.
9662 if (!code_map->get(kNextMapIndex)->IsUndefined()) {
9663 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
9664 flusher->EvictOptimizedCodeMap(this);
9667 ASSERT(code_map->get(kNextMapIndex)->IsUndefined());
9668 set_optimized_code_map(Smi::FromInt(0));
9672 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
9673 const char* reason) {
9674 if (optimized_code_map()->IsSmi()) return;
9676 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9677 int dst = kEntriesStart;
9678 int length = code_map->length();
9679 for (int src = kEntriesStart; src < length; src += kEntryLength) {
9680 ASSERT(code_map->get(src)->IsNativeContext());
9681 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) {
9682 // Evict the src entry by not copying it to the dst entry.
9683 if (FLAG_trace_opt) {
9684 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9686 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
9690 PrintF(" (osr ast id %d)]\n", osr.ToInt());
9694 // Keep the src entry by copying it to the dst entry.
9696 code_map->set(dst + kContextOffset,
9697 code_map->get(src + kContextOffset));
9698 code_map->set(dst + kCachedCodeOffset,
9699 code_map->get(src + kCachedCodeOffset));
9700 code_map->set(dst + kLiteralsOffset,
9701 code_map->get(src + kLiteralsOffset));
9702 code_map->set(dst + kOsrAstIdOffset,
9703 code_map->get(src + kOsrAstIdOffset));
9705 dst += kEntryLength;
9708 if (dst != length) {
9709 // Always trim even when array is cleared because of heap verifier.
9710 RightTrimFixedArray<Heap::FROM_MUTATOR>(GetHeap(), code_map, length - dst);
9711 if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap();
9716 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
9717 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9718 ASSERT(shrink_by % kEntryLength == 0);
9719 ASSERT(shrink_by <= code_map->length() - kEntriesStart);
9720 // Always trim even when array is cleared because of heap verifier.
9721 RightTrimFixedArray<Heap::FROM_GC>(GetHeap(), code_map, shrink_by);
9722 if (code_map->length() == kEntriesStart) {
9723 ClearOptimizedCodeMap();
9728 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) {
9729 if (object->IsGlobalObject()) return;
9731 // Make sure prototypes are fast objects and their maps have the bit set
9732 // so they remain fast.
9733 if (!object->HasFastProperties()) {
9734 TransformToFastProperties(object, 0);
9739 static MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps(
9740 Context* native_context, Map* initial_map) {
9741 // Replace all of the cached initial array maps in the native context with
9742 // the appropriate transitioned elements kind maps.
9743 Heap* heap = native_context->GetHeap();
9744 MaybeObject* maybe_maps =
9745 heap->AllocateFixedArrayWithHoles(kElementsKindCount, TENURED);
9747 if (!maybe_maps->To(&maps)) return maybe_maps;
9749 Map* current_map = initial_map;
9750 ElementsKind kind = current_map->elements_kind();
9751 ASSERT(kind == GetInitialFastElementsKind());
9752 maps->set(kind, current_map);
9753 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
9754 i < kFastElementsKindCount; ++i) {
9756 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
9757 if (current_map->HasElementsTransition()) {
9758 new_map = current_map->elements_transition_map();
9759 ASSERT(new_map->elements_kind() == next_kind);
9761 MaybeObject* maybe_new_map =
9762 current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
9763 if (!maybe_new_map->To(&new_map)) return maybe_new_map;
9765 maps->set(next_kind, new_map);
9766 current_map = new_map;
9768 native_context->set_js_array_maps(maps);
9773 Handle<Object> CacheInitialJSArrayMaps(Handle<Context> native_context,
9774 Handle<Map> initial_map) {
9775 CALL_HEAP_FUNCTION(native_context->GetIsolate(),
9776 CacheInitialJSArrayMaps(*native_context, *initial_map),
9781 void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
9782 Handle<Object> value) {
9783 ASSERT(value->IsJSReceiver());
9785 // First some logic for the map of the prototype to make sure it is in fast
9787 if (value->IsJSObject()) {
9788 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value));
9791 // Now some logic for the maps of the objects that are created by using this
9792 // function as a constructor.
9793 if (function->has_initial_map()) {
9794 // If the function has allocated the initial map replace it with a
9795 // copy containing the new prototype. Also complete any in-object
9796 // slack tracking that is in progress at this point because it is
9797 // still tracking the old copy.
9798 if (function->shared()->IsInobjectSlackTrackingInProgress()) {
9799 function->shared()->CompleteInobjectSlackTracking();
9801 Handle<Map> new_map = Map::Copy(handle(function->initial_map()));
9802 new_map->set_prototype(*value);
9804 // If the function is used as the global Array function, cache the
9805 // initial map (and transitioned versions) in the native context.
9806 Context* native_context = function->context()->native_context();
9807 Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX);
9808 if (array_function->IsJSFunction() &&
9809 *function == JSFunction::cast(array_function)) {
9810 CacheInitialJSArrayMaps(handle(native_context), new_map);
9813 function->set_initial_map(*new_map);
9815 // Put the value in the initial map field until an initial map is
9816 // needed. At that point, a new initial map is created and the
9817 // prototype is put into the initial map where it belongs.
9818 function->set_prototype_or_initial_map(*value);
9820 function->GetHeap()->ClearInstanceofCache();
9824 void JSFunction::SetPrototype(Handle<JSFunction> function,
9825 Handle<Object> value) {
9826 ASSERT(function->should_have_prototype());
9827 Handle<Object> construct_prototype = value;
9829 // If the value is not a JSReceiver, store the value in the map's
9830 // constructor field so it can be accessed. Also, set the prototype
9831 // used for constructing objects to the original object prototype.
9832 // See ECMA-262 13.2.2.
9833 if (!value->IsJSReceiver()) {
9834 // Copy the map so this does not affect unrelated functions.
9835 // Remove map transitions because they point to maps with a
9836 // different prototype.
9837 Handle<Map> new_map = Map::Copy(handle(function->map()));
9839 JSObject::MigrateToMap(function, new_map);
9840 new_map->set_constructor(*value);
9841 new_map->set_non_instance_prototype(true);
9842 Isolate* isolate = new_map->GetIsolate();
9843 construct_prototype = handle(
9844 isolate->context()->native_context()->initial_object_prototype(),
9847 function->map()->set_non_instance_prototype(false);
9850 return SetInstancePrototype(function, construct_prototype);
9854 void JSFunction::RemovePrototype() {
9855 Context* native_context = context()->native_context();
9856 Map* no_prototype_map = shared()->strict_mode() == SLOPPY
9857 ? native_context->sloppy_function_without_prototype_map()
9858 : native_context->strict_function_without_prototype_map();
9860 if (map() == no_prototype_map) return;
9862 ASSERT(map() == (shared()->strict_mode() == SLOPPY
9863 ? native_context->sloppy_function_map()
9864 : native_context->strict_function_map()));
9866 set_map(no_prototype_map);
9867 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
9871 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
9872 if (function->has_initial_map()) return;
9873 Isolate* isolate = function->GetIsolate();
9875 // First create a new map with the size and number of in-object properties
9876 // suggested by the function.
9877 InstanceType instance_type;
9879 int in_object_properties;
9880 if (function->shared()->is_generator()) {
9881 instance_type = JS_GENERATOR_OBJECT_TYPE;
9882 instance_size = JSGeneratorObject::kSize;
9883 in_object_properties = 0;
9885 instance_type = JS_OBJECT_TYPE;
9886 instance_size = function->shared()->CalculateInstanceSize();
9887 in_object_properties = function->shared()->CalculateInObjectProperties();
9889 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size);
9891 // Fetch or allocate prototype.
9892 Handle<Object> prototype;
9893 if (function->has_instance_prototype()) {
9894 prototype = handle(function->instance_prototype(), isolate);
9896 prototype = isolate->factory()->NewFunctionPrototype(function);
9898 map->set_inobject_properties(in_object_properties);
9899 map->set_unused_property_fields(in_object_properties);
9900 map->set_prototype(*prototype);
9901 ASSERT(map->has_fast_object_elements());
9903 if (!function->shared()->is_generator()) {
9904 function->shared()->StartInobjectSlackTracking(*map);
9907 // Finally link initial map and constructor function.
9908 function->set_initial_map(*map);
9909 map->set_constructor(*function);
9913 void JSFunction::SetInstanceClassName(String* name) {
9914 shared()->set_instance_class_name(name);
9918 void JSFunction::PrintName(FILE* out) {
9919 SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
9920 PrintF(out, "%s", name.get());
9924 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
9925 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
9929 // The filter is a pattern that matches function names in this way:
9930 // "*" all; the default
9931 // "-" all but the top-level function
9932 // "-name" all but the function "name"
9933 // "" only the top-level function
9934 // "name" only the function "name"
9935 // "name*" only functions starting with "name"
9936 bool JSFunction::PassesFilter(const char* raw_filter) {
9937 if (*raw_filter == '*') return true;
9938 String* name = shared()->DebugName();
9939 Vector<const char> filter = CStrVector(raw_filter);
9940 if (filter.length() == 0) return name->length() == 0;
9941 if (filter[0] == '-') {
9943 if (filter.length() == 1) {
9944 return (name->length() != 0);
9945 } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) {
9948 if (filter[filter.length() - 1] == '*' &&
9949 name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) {
9954 } else if (name->IsUtf8EqualTo(filter)) {
9957 if (filter[filter.length() - 1] == '*' &&
9958 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) {
9965 MaybeObject* Oddball::Initialize(Heap* heap,
9966 const char* to_string,
9969 String* internalized_to_string;
9970 { MaybeObject* maybe_string =
9971 heap->InternalizeUtf8String(
9972 CStrVector(to_string));
9973 if (!maybe_string->To(&internalized_to_string)) return maybe_string;
9975 set_to_string(internalized_to_string);
9976 set_to_number(to_number);
9982 String* SharedFunctionInfo::DebugName() {
9984 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name();
9985 return String::cast(n);
9989 bool SharedFunctionInfo::HasSourceCode() {
9990 return !script()->IsUndefined() &&
9991 !reinterpret_cast<Script*>(script())->source()->IsUndefined();
9995 Handle<Object> SharedFunctionInfo::GetSourceCode() {
9996 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
9997 Handle<String> source(String::cast(Script::cast(script())->source()));
9998 return GetIsolate()->factory()->NewSubString(
9999 source, start_position(), end_position());
10003 bool SharedFunctionInfo::IsInlineable() {
10004 // Check that the function has a script associated with it.
10005 if (!script()->IsScript()) return false;
10006 if (optimization_disabled()) return false;
10007 // If we never ran this (unlikely) then lets try to optimize it.
10008 if (code()->kind() != Code::FUNCTION) return true;
10009 return code()->optimizable();
10013 int SharedFunctionInfo::SourceSize() {
10014 return end_position() - start_position();
10018 int SharedFunctionInfo::CalculateInstanceSize() {
10019 int instance_size =
10020 JSObject::kHeaderSize +
10021 expected_nof_properties() * kPointerSize;
10022 if (instance_size > JSObject::kMaxInstanceSize) {
10023 instance_size = JSObject::kMaxInstanceSize;
10025 return instance_size;
10029 int SharedFunctionInfo::CalculateInObjectProperties() {
10030 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize;
10034 // Support function for printing the source code to a StringStream
10035 // without any allocation in the heap.
10036 void SharedFunctionInfo::SourceCodePrint(StringStream* accumulator,
10038 // For some native functions there is no source.
10039 if (!HasSourceCode()) {
10040 accumulator->Add("<No Source>");
10044 // Get the source for the script which this function came from.
10045 // Don't use String::cast because we don't want more assertion errors while
10046 // we are already creating a stack dump.
10047 String* script_source =
10048 reinterpret_cast<String*>(Script::cast(script())->source());
10050 if (!script_source->LooksValid()) {
10051 accumulator->Add("<Invalid Source>");
10055 if (!is_toplevel()) {
10056 accumulator->Add("function ");
10057 Object* name = this->name();
10058 if (name->IsString() && String::cast(name)->length() > 0) {
10059 accumulator->PrintName(name);
10063 int len = end_position() - start_position();
10064 if (len <= max_length || max_length < 0) {
10065 accumulator->Put(script_source, start_position(), end_position());
10067 accumulator->Put(script_source,
10069 start_position() + max_length);
10070 accumulator->Add("...\n");
10075 static bool IsCodeEquivalent(Code* code, Code* recompiled) {
10076 if (code->instruction_size() != recompiled->instruction_size()) return false;
10077 ByteArray* code_relocation = code->relocation_info();
10078 ByteArray* recompiled_relocation = recompiled->relocation_info();
10079 int length = code_relocation->length();
10080 if (length != recompiled_relocation->length()) return false;
10081 int compare = memcmp(code_relocation->GetDataStartAddress(),
10082 recompiled_relocation->GetDataStartAddress(),
10084 return compare == 0;
10088 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
10089 ASSERT(!has_deoptimization_support());
10090 DisallowHeapAllocation no_allocation;
10091 Code* code = this->code();
10092 if (IsCodeEquivalent(code, recompiled)) {
10093 // Copy the deoptimization data from the recompiled code.
10094 code->set_deoptimization_data(recompiled->deoptimization_data());
10095 code->set_has_deoptimization_support(true);
10097 // TODO(3025757): In case the recompiled isn't equivalent to the
10098 // old code, we have to replace it. We should try to avoid this
10099 // altogether because it flushes valuable type feedback by
10100 // effectively resetting all IC state.
10101 ReplaceCode(recompiled);
10103 ASSERT(has_deoptimization_support());
10107 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
10108 // Disable optimization for the shared function info and mark the
10109 // code as non-optimizable. The marker on the shared function info
10110 // is there because we flush non-optimized code thereby loosing the
10111 // non-optimizable information for the code. When the code is
10112 // regenerated and set on the shared function info it is marked as
10113 // non-optimizable if optimization is disabled for the shared
10115 set_optimization_disabled(true);
10116 set_bailout_reason(reason);
10117 // Code should be the lazy compilation stub or else unoptimized. If the
10118 // latter, disable optimization for the code too.
10119 ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN);
10120 if (code()->kind() == Code::FUNCTION) {
10121 code()->set_optimizable(false);
10123 PROFILE(GetIsolate(),
10124 LogExistingFunction(Handle<SharedFunctionInfo>(this),
10125 Handle<Code>(code())));
10126 if (FLAG_trace_opt) {
10127 PrintF("[disabled optimization for ");
10129 PrintF(", reason: %s]\n", GetBailoutReason(reason));
10134 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) {
10135 ASSERT(!id.IsNone());
10136 Code* unoptimized = code();
10137 DeoptimizationOutputData* data =
10138 DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
10139 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this);
10141 return true; // Return true if there was no ASSERT.
10145 void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) {
10146 ASSERT(!IsInobjectSlackTrackingInProgress());
10148 if (!FLAG_clever_optimizations) return;
10150 // Only initiate the tracking the first time.
10151 if (live_objects_may_exist()) return;
10152 set_live_objects_may_exist(true);
10154 // No tracking during the snapshot construction phase.
10155 if (Serializer::enabled()) return;
10157 if (map->unused_property_fields() == 0) return;
10159 // Nonzero counter is a leftover from the previous attempt interrupted
10161 if (construction_count() == 0) {
10162 set_construction_count(kGenerousAllocationCount);
10164 set_initial_map(map);
10165 Builtins* builtins = map->GetHeap()->isolate()->builtins();
10166 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
10168 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
10172 // Called from GC, hence reinterpret_cast and unchecked accessors.
10173 void SharedFunctionInfo::DetachInitialMap() {
10174 Map* map = reinterpret_cast<Map*>(initial_map());
10176 // Make the map remember to restore the link if it survives the GC.
10177 map->set_bit_field2(
10178 map->bit_field2() | (1 << Map::kAttachedToSharedFunctionInfo));
10180 // Undo state changes made by StartInobjectTracking (except the
10181 // construction_count). This way if the initial map does not survive the GC
10182 // then StartInobjectTracking will be called again the next time the
10183 // constructor is called. The countdown will continue and (possibly after
10184 // several more GCs) CompleteInobjectSlackTracking will eventually be called.
10185 Heap* heap = map->GetHeap();
10186 set_initial_map(heap->undefined_value());
10187 Builtins* builtins = heap->isolate()->builtins();
10188 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
10189 *RawField(this, kConstructStubOffset));
10190 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
10191 // It is safe to clear the flag: it will be set again if the map is live.
10192 set_live_objects_may_exist(false);
10196 // Called from GC, hence reinterpret_cast and unchecked accessors.
10197 void SharedFunctionInfo::AttachInitialMap(Map* map) {
10198 map->set_bit_field2(
10199 map->bit_field2() & ~(1 << Map::kAttachedToSharedFunctionInfo));
10201 // Resume inobject slack tracking.
10202 set_initial_map(map);
10203 Builtins* builtins = map->GetHeap()->isolate()->builtins();
10204 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
10205 *RawField(this, kConstructStubOffset));
10206 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
10207 // The map survived the gc, so there may be objects referencing it.
10208 set_live_objects_may_exist(true);
10212 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
10213 code()->ClearInlineCaches();
10214 set_ic_age(new_ic_age);
10215 if (code()->kind() == Code::FUNCTION) {
10216 code()->set_profiler_ticks(0);
10217 if (optimization_disabled() &&
10218 opt_count() >= FLAG_max_opt_count) {
10219 // Re-enable optimizations if they were disabled due to opt_count limit.
10220 set_optimization_disabled(false);
10221 code()->set_optimizable(true);
10224 set_deopt_count(0);
10229 static void GetMinInobjectSlack(Map* map, void* data) {
10230 int slack = map->unused_property_fields();
10231 if (*reinterpret_cast<int*>(data) > slack) {
10232 *reinterpret_cast<int*>(data) = slack;
10237 static void ShrinkInstanceSize(Map* map, void* data) {
10238 int slack = *reinterpret_cast<int*>(data);
10239 map->set_inobject_properties(map->inobject_properties() - slack);
10240 map->set_unused_property_fields(map->unused_property_fields() - slack);
10241 map->set_instance_size(map->instance_size() - slack * kPointerSize);
10243 // Visitor id might depend on the instance size, recalculate it.
10244 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
10248 void SharedFunctionInfo::CompleteInobjectSlackTracking() {
10249 ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress());
10250 Map* map = Map::cast(initial_map());
10252 Heap* heap = map->GetHeap();
10253 set_initial_map(heap->undefined_value());
10254 Builtins* builtins = heap->isolate()->builtins();
10255 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
10257 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
10259 int slack = map->unused_property_fields();
10260 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
10262 // Resize the initial map and all maps in its transition tree.
10263 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
10265 // Give the correct expected_nof_properties to initial maps created later.
10266 ASSERT(expected_nof_properties() >= slack);
10267 set_expected_nof_properties(expected_nof_properties() - slack);
10272 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
10273 BailoutId osr_ast_id) {
10274 ASSERT(native_context->IsNativeContext());
10275 if (!FLAG_cache_optimized_code) return -1;
10276 Object* value = optimized_code_map();
10277 if (!value->IsSmi()) {
10278 FixedArray* optimized_code_map = FixedArray::cast(value);
10279 int length = optimized_code_map->length();
10280 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
10281 for (int i = kEntriesStart; i < length; i += kEntryLength) {
10282 if (optimized_code_map->get(i + kContextOffset) == native_context &&
10283 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
10284 return i + kCachedCodeOffset;
10287 if (FLAG_trace_opt) {
10288 PrintF("[didn't find optimized code in optimized code map for ");
10297 #define DECLARE_TAG(ignore1, name, ignore2) name,
10298 const char* const VisitorSynchronization::kTags[
10299 VisitorSynchronization::kNumberOfSyncTags] = {
10300 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10305 #define DECLARE_TAG(ignore1, ignore2, name) name,
10306 const char* const VisitorSynchronization::kTagNames[
10307 VisitorSynchronization::kNumberOfSyncTags] = {
10308 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10313 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
10314 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
10315 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
10316 Object* old_target = target;
10317 VisitPointer(&target);
10318 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10322 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
10323 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
10324 Object* stub = rinfo->code_age_stub();
10326 VisitPointer(&stub);
10331 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
10332 Object* code = Code::GetObjectFromEntryAddress(entry_address);
10333 Object* old_code = code;
10334 VisitPointer(&code);
10335 if (code != old_code) {
10336 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry();
10341 void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
10342 ASSERT(rinfo->rmode() == RelocInfo::CELL);
10343 Object* cell = rinfo->target_cell();
10344 Object* old_cell = cell;
10345 VisitPointer(&cell);
10346 if (cell != old_cell) {
10347 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
10352 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
10353 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
10354 rinfo->IsPatchedReturnSequence()) ||
10355 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
10356 rinfo->IsPatchedDebugBreakSlotSequence()));
10357 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
10358 Object* old_target = target;
10359 VisitPointer(&target);
10360 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10364 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
10365 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
10366 Object* p = rinfo->target_object();
10371 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
10372 Address p = rinfo->target_reference();
10373 VisitExternalReference(&p);
10377 void Code::InvalidateRelocation() {
10378 set_relocation_info(GetHeap()->empty_byte_array());
10382 void Code::InvalidateEmbeddedObjects() {
10383 Object* undefined = GetHeap()->undefined_value();
10384 Cell* undefined_cell = GetHeap()->undefined_cell();
10385 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10386 RelocInfo::ModeMask(RelocInfo::CELL);
10387 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10388 RelocInfo::Mode mode = it.rinfo()->rmode();
10389 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10390 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
10391 } else if (mode == RelocInfo::CELL) {
10392 it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER);
10398 void Code::Relocate(intptr_t delta) {
10399 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
10400 it.rinfo()->apply(delta);
10402 CPU::FlushICache(instruction_start(), instruction_size());
10406 void Code::CopyFrom(const CodeDesc& desc) {
10407 ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT);
10410 CopyBytes(instruction_start(), desc.buffer,
10411 static_cast<size_t>(desc.instr_size));
10414 CopyBytes(relocation_start(),
10415 desc.buffer + desc.buffer_size - desc.reloc_size,
10416 static_cast<size_t>(desc.reloc_size));
10418 // unbox handles and relocate
10419 intptr_t delta = instruction_start() - desc.buffer;
10420 int mode_mask = RelocInfo::kCodeTargetMask |
10421 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10422 RelocInfo::ModeMask(RelocInfo::CELL) |
10423 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
10424 RelocInfo::kApplyMask;
10425 // Needed to find target_object and runtime_entry on X64
10426 Assembler* origin = desc.origin;
10427 AllowDeferredHandleDereference embedding_raw_address;
10428 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10429 RelocInfo::Mode mode = it.rinfo()->rmode();
10430 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10431 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10432 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER);
10433 } else if (mode == RelocInfo::CELL) {
10434 Handle<Cell> cell = it.rinfo()->target_cell_handle();
10435 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER);
10436 } else if (RelocInfo::IsCodeTarget(mode)) {
10437 // rewrite code handles in inline cache targets to direct
10438 // pointers to the first instruction in the code object
10439 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10440 Code* code = Code::cast(*p);
10441 it.rinfo()->set_target_address(code->instruction_start(),
10442 SKIP_WRITE_BARRIER);
10443 } else if (RelocInfo::IsRuntimeEntry(mode)) {
10444 Address p = it.rinfo()->target_runtime_entry(origin);
10445 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER);
10446 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) {
10447 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin);
10448 Code* code = Code::cast(*p);
10449 it.rinfo()->set_code_age_stub(code);
10451 it.rinfo()->apply(delta);
10454 CPU::FlushICache(instruction_start(), instruction_size());
10458 // Locate the source position which is closest to the address in the code. This
10459 // is using the source position information embedded in the relocation info.
10460 // The position returned is relative to the beginning of the script where the
10461 // source for this function is found.
10462 int Code::SourcePosition(Address pc) {
10463 int distance = kMaxInt;
10464 int position = RelocInfo::kNoPosition; // Initially no position found.
10465 // Run through all the relocation info to find the best matching source
10466 // position. All the code needs to be considered as the sequence of the
10467 // instructions in the code does not necessarily follow the same order as the
10469 RelocIterator it(this, RelocInfo::kPositionMask);
10470 while (!it.done()) {
10471 // Only look at positions after the current pc.
10472 if (it.rinfo()->pc() < pc) {
10473 // Get position and distance.
10475 int dist = static_cast<int>(pc - it.rinfo()->pc());
10476 int pos = static_cast<int>(it.rinfo()->data());
10477 // If this position is closer than the current candidate or if it has the
10478 // same distance as the current candidate and the position is higher then
10479 // this position is the new candidate.
10480 if ((dist < distance) ||
10481 (dist == distance && pos > position)) {
10492 // Same as Code::SourcePosition above except it only looks for statement
10494 int Code::SourceStatementPosition(Address pc) {
10495 // First find the position as close as possible using all position
10497 int position = SourcePosition(pc);
10498 // Now find the closest statement position before the position.
10499 int statement_position = 0;
10500 RelocIterator it(this, RelocInfo::kPositionMask);
10501 while (!it.done()) {
10502 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
10503 int p = static_cast<int>(it.rinfo()->data());
10504 if (statement_position < p && p <= position) {
10505 statement_position = p;
10510 return statement_position;
10514 SafepointEntry Code::GetSafepointEntry(Address pc) {
10515 SafepointTable table(this);
10516 return table.FindEntry(pc);
10520 Object* Code::FindNthObject(int n, Map* match_map) {
10521 ASSERT(is_inline_cache_stub());
10522 DisallowHeapAllocation no_allocation;
10523 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10524 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10525 RelocInfo* info = it.rinfo();
10526 Object* object = info->target_object();
10527 if (object->IsHeapObject()) {
10528 if (HeapObject::cast(object)->map() == match_map) {
10529 if (--n == 0) return object;
10537 AllocationSite* Code::FindFirstAllocationSite() {
10538 Object* result = FindNthObject(1, GetHeap()->allocation_site_map());
10539 return (result != NULL) ? AllocationSite::cast(result) : NULL;
10543 Map* Code::FindFirstMap() {
10544 Object* result = FindNthObject(1, GetHeap()->meta_map());
10545 return (result != NULL) ? Map::cast(result) : NULL;
10549 void Code::FindAndReplace(const FindAndReplacePattern& pattern) {
10550 ASSERT(is_inline_cache_stub() || is_handler());
10551 DisallowHeapAllocation no_allocation;
10552 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10553 STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32);
10554 int current_pattern = 0;
10555 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10556 RelocInfo* info = it.rinfo();
10557 Object* object = info->target_object();
10558 if (object->IsHeapObject()) {
10559 Map* map = HeapObject::cast(object)->map();
10560 if (map == *pattern.find_[current_pattern]) {
10561 info->set_target_object(*pattern.replace_[current_pattern]);
10562 if (++current_pattern == pattern.count_) return;
10570 void Code::FindAllMaps(MapHandleList* maps) {
10571 ASSERT(is_inline_cache_stub());
10572 DisallowHeapAllocation no_allocation;
10573 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10574 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10575 RelocInfo* info = it.rinfo();
10576 Object* object = info->target_object();
10577 if (object->IsMap()) maps->Add(handle(Map::cast(object)));
10582 void Code::FindAllTypes(TypeHandleList* types) {
10583 ASSERT(is_inline_cache_stub());
10584 DisallowHeapAllocation no_allocation;
10585 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10586 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10587 RelocInfo* info = it.rinfo();
10588 Object* object = info->target_object();
10589 if (object->IsMap()) {
10590 Handle<Map> map(Map::cast(object));
10591 types->Add(IC::MapToType<HeapType>(map, map->GetIsolate()));
10597 Code* Code::FindFirstHandler() {
10598 ASSERT(is_inline_cache_stub());
10599 DisallowHeapAllocation no_allocation;
10600 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10601 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10602 RelocInfo* info = it.rinfo();
10603 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10604 if (code->kind() == Code::HANDLER) return code;
10610 bool Code::FindHandlers(CodeHandleList* code_list, int length) {
10611 ASSERT(is_inline_cache_stub());
10612 DisallowHeapAllocation no_allocation;
10613 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10615 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10616 if (i == length) return true;
10617 RelocInfo* info = it.rinfo();
10618 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10619 // IC stubs with handlers never contain non-handler code objects before
10620 // handler targets.
10621 if (code->kind() != Code::HANDLER) break;
10622 code_list->Add(Handle<Code>(code));
10625 return i == length;
10629 Name* Code::FindFirstName() {
10630 ASSERT(is_inline_cache_stub());
10631 DisallowHeapAllocation no_allocation;
10632 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10633 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10634 RelocInfo* info = it.rinfo();
10635 Object* object = info->target_object();
10636 if (object->IsName()) return Name::cast(object);
10642 void Code::ClearInlineCaches() {
10643 ClearInlineCaches(NULL);
10647 void Code::ClearInlineCaches(Code::Kind kind) {
10648 ClearInlineCaches(&kind);
10652 void Code::ClearInlineCaches(Code::Kind* kind) {
10653 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10654 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
10655 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
10656 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10657 RelocInfo* info = it.rinfo();
10658 Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
10659 if (target->is_inline_cache_stub()) {
10660 if (kind == NULL || *kind == target->kind()) {
10661 IC::Clear(this->GetIsolate(), info->pc(),
10662 info->host()->constant_pool());
10669 void Code::ClearTypeFeedbackInfo(Heap* heap) {
10670 if (kind() != FUNCTION) return;
10671 Object* raw_info = type_feedback_info();
10672 if (raw_info->IsTypeFeedbackInfo()) {
10673 FixedArray* feedback_vector =
10674 TypeFeedbackInfo::cast(raw_info)->feedback_vector();
10675 for (int i = 0; i < feedback_vector->length(); i++) {
10676 Object* obj = feedback_vector->get(i);
10677 if (!obj->IsAllocationSite()) {
10678 // TODO(mvstanton): Can't I avoid a write barrier for this sentinel?
10679 feedback_vector->set(i,
10680 TypeFeedbackInfo::RawUninitializedSentinel(heap));
10687 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
10688 DisallowHeapAllocation no_gc;
10689 ASSERT(kind() == FUNCTION);
10690 BackEdgeTable back_edges(this, &no_gc);
10691 for (uint32_t i = 0; i < back_edges.length(); i++) {
10692 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
10694 return BailoutId::None();
10698 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
10699 DisallowHeapAllocation no_gc;
10700 ASSERT(kind() == FUNCTION);
10701 BackEdgeTable back_edges(this, &no_gc);
10702 for (uint32_t i = 0; i < back_edges.length(); i++) {
10703 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
10705 UNREACHABLE(); // We expect to find the back edge.
10710 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
10711 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
10715 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) {
10716 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge,
10717 NO_MARKING_PARITY);
10721 static Code::Age EffectiveAge(Code::Age age) {
10722 if (age == Code::kNotExecutedCodeAge) {
10723 // Treat that's never been executed as old immediately.
10724 age = Code::kIsOldCodeAge;
10725 } else if (age == Code::kExecutedOnceCodeAge) {
10726 // Pre-age code that has only been executed once.
10727 age = Code::kPreAgedCodeAge;
10733 void Code::MakeOlder(MarkingParity current_parity) {
10734 byte* sequence = FindCodeAgeSequence();
10735 if (sequence != NULL) {
10737 MarkingParity code_parity;
10738 GetCodeAgeAndParity(sequence, &age, &code_parity);
10739 age = EffectiveAge(age);
10740 if (age != kLastCodeAge && code_parity != current_parity) {
10741 PatchPlatformCodeAge(GetIsolate(),
10743 static_cast<Age>(age + 1),
10750 bool Code::IsOld() {
10751 return GetAge() >= kIsOldCodeAge;
10755 byte* Code::FindCodeAgeSequence() {
10756 return FLAG_age_code &&
10757 prologue_offset() != Code::kPrologueOffsetNotSet &&
10758 (kind() == OPTIMIZED_FUNCTION ||
10759 (kind() == FUNCTION && !has_debug_break_slots()))
10760 ? instruction_start() + prologue_offset()
10765 Code::Age Code::GetAge() {
10766 return EffectiveAge(GetRawAge());
10770 Code::Age Code::GetRawAge() {
10771 byte* sequence = FindCodeAgeSequence();
10772 if (sequence == NULL) {
10773 return kNoAgeCodeAge;
10776 MarkingParity parity;
10777 GetCodeAgeAndParity(sequence, &age, &parity);
10782 void Code::GetCodeAgeAndParity(Code* code, Age* age,
10783 MarkingParity* parity) {
10784 Isolate* isolate = code->GetIsolate();
10785 Builtins* builtins = isolate->builtins();
10787 #define HANDLE_CODE_AGE(AGE) \
10788 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \
10789 if (code == stub) { \
10790 *age = k##AGE##CodeAge; \
10791 *parity = EVEN_MARKING_PARITY; \
10794 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10795 if (code == stub) { \
10796 *age = k##AGE##CodeAge; \
10797 *parity = ODD_MARKING_PARITY; \
10800 CODE_AGE_LIST(HANDLE_CODE_AGE)
10801 #undef HANDLE_CODE_AGE
10802 stub = *builtins->MarkCodeAsExecutedOnce();
10803 if (code == stub) {
10804 *age = kNotExecutedCodeAge;
10805 *parity = NO_MARKING_PARITY;
10808 stub = *builtins->MarkCodeAsExecutedTwice();
10809 if (code == stub) {
10810 *age = kExecutedOnceCodeAge;
10811 *parity = NO_MARKING_PARITY;
10818 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) {
10819 Builtins* builtins = isolate->builtins();
10821 #define HANDLE_CODE_AGE(AGE) \
10822 case k##AGE##CodeAge: { \
10823 Code* stub = parity == EVEN_MARKING_PARITY \
10824 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \
10825 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10828 CODE_AGE_LIST(HANDLE_CODE_AGE)
10829 #undef HANDLE_CODE_AGE
10830 case kNotExecutedCodeAge: {
10831 ASSERT(parity == NO_MARKING_PARITY);
10832 return *builtins->MarkCodeAsExecutedOnce();
10834 case kExecutedOnceCodeAge: {
10835 ASSERT(parity == NO_MARKING_PARITY);
10836 return *builtins->MarkCodeAsExecutedTwice();
10846 void Code::PrintDeoptLocation(FILE* out, int bailout_id) {
10847 const char* last_comment = NULL;
10848 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
10849 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
10850 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10851 RelocInfo* info = it.rinfo();
10852 if (info->rmode() == RelocInfo::COMMENT) {
10853 last_comment = reinterpret_cast<const char*>(info->data());
10854 } else if (last_comment != NULL) {
10855 if ((bailout_id == Deoptimizer::GetDeoptimizationId(
10856 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
10857 (bailout_id == Deoptimizer::GetDeoptimizationId(
10858 GetIsolate(), info->target_address(), Deoptimizer::SOFT))) {
10859 CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
10860 PrintF(out, " %s\n", last_comment);
10868 bool Code::CanDeoptAt(Address pc) {
10869 DeoptimizationInputData* deopt_data =
10870 DeoptimizationInputData::cast(deoptimization_data());
10871 Address code_start_address = instruction_start();
10872 for (int i = 0; i < deopt_data->DeoptCount(); i++) {
10873 if (deopt_data->Pc(i)->value() == -1) continue;
10874 Address address = code_start_address + deopt_data->Pc(i)->value();
10875 if (address == pc) return true;
10881 // Identify kind of code.
10882 const char* Code::Kind2String(Kind kind) {
10884 #define CASE(name) case name: return #name;
10885 CODE_KIND_LIST(CASE)
10887 case NUMBER_OF_KINDS: break;
10894 #ifdef ENABLE_DISASSEMBLER
10896 void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
10897 disasm::NameConverter converter;
10898 int deopt_count = DeoptCount();
10899 PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
10900 if (0 == deopt_count) return;
10902 PrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc",
10903 FLAG_print_code_verbose ? "commands" : "");
10904 for (int i = 0; i < deopt_count; i++) {
10905 PrintF(out, "%6d %6d %6d %6d",
10908 ArgumentsStackHeight(i)->value(),
10911 if (!FLAG_print_code_verbose) {
10915 // Print details of the frame translation.
10916 int translation_index = TranslationIndex(i)->value();
10917 TranslationIterator iterator(TranslationByteArray(), translation_index);
10918 Translation::Opcode opcode =
10919 static_cast<Translation::Opcode>(iterator.Next());
10920 ASSERT(Translation::BEGIN == opcode);
10921 int frame_count = iterator.Next();
10922 int jsframe_count = iterator.Next();
10923 PrintF(out, " %s {frame count=%d, js frame count=%d}\n",
10924 Translation::StringFor(opcode),
10928 while (iterator.HasNext() &&
10929 Translation::BEGIN !=
10930 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
10931 PrintF(out, "%24s %s ", "", Translation::StringFor(opcode));
10934 case Translation::BEGIN:
10938 case Translation::JS_FRAME: {
10939 int ast_id = iterator.Next();
10940 int function_id = iterator.Next();
10941 unsigned height = iterator.Next();
10942 PrintF(out, "{ast_id=%d, function=", ast_id);
10943 if (function_id != Translation::kSelfLiteralId) {
10944 Object* function = LiteralArray()->get(function_id);
10945 JSFunction::cast(function)->PrintName(out);
10947 PrintF(out, "<self>");
10949 PrintF(out, ", height=%u}", height);
10953 case Translation::COMPILED_STUB_FRAME: {
10954 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next());
10955 PrintF(out, "{kind=%d}", stub_kind);
10959 case Translation::ARGUMENTS_ADAPTOR_FRAME:
10960 case Translation::CONSTRUCT_STUB_FRAME: {
10961 int function_id = iterator.Next();
10962 JSFunction* function =
10963 JSFunction::cast(LiteralArray()->get(function_id));
10964 unsigned height = iterator.Next();
10965 PrintF(out, "{function=");
10966 function->PrintName(out);
10967 PrintF(out, ", height=%u}", height);
10971 case Translation::GETTER_STUB_FRAME:
10972 case Translation::SETTER_STUB_FRAME: {
10973 int function_id = iterator.Next();
10974 JSFunction* function =
10975 JSFunction::cast(LiteralArray()->get(function_id));
10976 PrintF(out, "{function=");
10977 function->PrintName(out);
10982 case Translation::REGISTER: {
10983 int reg_code = iterator.Next();
10984 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
10988 case Translation::INT32_REGISTER: {
10989 int reg_code = iterator.Next();
10990 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
10994 case Translation::UINT32_REGISTER: {
10995 int reg_code = iterator.Next();
10996 PrintF(out, "{input=%s (unsigned)}",
10997 converter.NameOfCPURegister(reg_code));
11001 case Translation::DOUBLE_REGISTER: {
11002 int reg_code = iterator.Next();
11003 PrintF(out, "{input=%s}",
11004 DoubleRegister::AllocationIndexToString(reg_code));
11008 case Translation::FLOAT32x4_REGISTER: {
11009 int reg_code = iterator.Next();
11010 PrintF(out, "{input=%s}",
11011 SIMD128Register::AllocationIndexToString(reg_code));
11015 case Translation::INT32x4_REGISTER: {
11016 int reg_code = iterator.Next();
11017 PrintF(out, "{input=%s}",
11018 SIMD128Register::AllocationIndexToString(reg_code));
11022 case Translation::STACK_SLOT: {
11023 int input_slot_index = iterator.Next();
11024 PrintF(out, "{input=%d}", input_slot_index);
11028 case Translation::INT32_STACK_SLOT: {
11029 int input_slot_index = iterator.Next();
11030 PrintF(out, "{input=%d}", input_slot_index);
11034 case Translation::UINT32_STACK_SLOT: {
11035 int input_slot_index = iterator.Next();
11036 PrintF(out, "{input=%d (unsigned)}", input_slot_index);
11040 case Translation::DOUBLE_STACK_SLOT: {
11041 int input_slot_index = iterator.Next();
11042 PrintF(out, "{input=%d}", input_slot_index);
11046 case Translation::FLOAT32x4_STACK_SLOT: {
11047 int input_slot_index = iterator.Next();
11048 PrintF(out, "{input=%d}", input_slot_index);
11052 case Translation::INT32x4_STACK_SLOT: {
11053 int input_slot_index = iterator.Next();
11054 PrintF(out, "{input=%d}", input_slot_index);
11058 case Translation::LITERAL: {
11059 unsigned literal_index = iterator.Next();
11060 PrintF(out, "{literal_id=%u}", literal_index);
11064 case Translation::DUPLICATED_OBJECT: {
11065 int object_index = iterator.Next();
11066 PrintF(out, "{object_index=%d}", object_index);
11070 case Translation::ARGUMENTS_OBJECT:
11071 case Translation::CAPTURED_OBJECT: {
11072 int args_length = iterator.Next();
11073 PrintF(out, "{length=%d}", args_length);
11083 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) {
11084 PrintF(out, "Deoptimization Output Data (deopt points = %d)\n",
11085 this->DeoptPoints());
11086 if (this->DeoptPoints() == 0) return;
11088 PrintF(out, "%6s %8s %s\n", "ast id", "pc", "state");
11089 for (int i = 0; i < this->DeoptPoints(); i++) {
11090 int pc_and_state = this->PcAndState(i)->value();
11091 PrintF(out, "%6d %8d %s\n",
11092 this->AstId(i).ToInt(),
11093 FullCodeGenerator::PcField::decode(pc_and_state),
11094 FullCodeGenerator::State2String(
11095 FullCodeGenerator::StateField::decode(pc_and_state)));
11100 const char* Code::ICState2String(InlineCacheState state) {
11102 case UNINITIALIZED: return "UNINITIALIZED";
11103 case PREMONOMORPHIC: return "PREMONOMORPHIC";
11104 case MONOMORPHIC: return "MONOMORPHIC";
11105 case MONOMORPHIC_PROTOTYPE_FAILURE: return "MONOMORPHIC_PROTOTYPE_FAILURE";
11106 case POLYMORPHIC: return "POLYMORPHIC";
11107 case MEGAMORPHIC: return "MEGAMORPHIC";
11108 case GENERIC: return "GENERIC";
11109 case DEBUG_STUB: return "DEBUG_STUB";
11116 const char* Code::StubType2String(StubType type) {
11118 case NORMAL: return "NORMAL";
11119 case FAST: return "FAST";
11121 UNREACHABLE(); // keep the compiler happy
11126 void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
11127 PrintF(out, "extra_ic_state = ");
11128 const char* name = NULL;
11131 case KEYED_STORE_IC:
11132 if (extra == STRICT) name = "STRICT";
11137 if (name != NULL) {
11138 PrintF(out, "%s\n", name);
11140 PrintF(out, "%d\n", extra);
11145 void Code::Disassemble(const char* name, FILE* out) {
11146 PrintF(out, "kind = %s\n", Kind2String(kind()));
11147 if (has_major_key()) {
11148 PrintF(out, "major_key = %s\n",
11149 CodeStub::MajorName(CodeStub::GetMajorKey(this), true));
11151 if (is_inline_cache_stub()) {
11152 PrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
11153 PrintExtraICState(out, kind(), extra_ic_state());
11154 if (ic_state() == MONOMORPHIC) {
11155 PrintF(out, "type = %s\n", StubType2String(type()));
11157 if (is_compare_ic_stub()) {
11158 ASSERT(major_key() == CodeStub::CompareIC);
11159 CompareIC::State left_state, right_state, handler_state;
11161 ICCompareStub::DecodeMinorKey(stub_info(), &left_state, &right_state,
11162 &handler_state, &op);
11163 PrintF(out, "compare_state = %s*%s -> %s\n",
11164 CompareIC::GetStateName(left_state),
11165 CompareIC::GetStateName(right_state),
11166 CompareIC::GetStateName(handler_state));
11167 PrintF(out, "compare_operation = %s\n", Token::Name(op));
11170 if ((name != NULL) && (name[0] != '\0')) {
11171 PrintF(out, "name = %s\n", name);
11173 if (kind() == OPTIMIZED_FUNCTION) {
11174 PrintF(out, "stack_slots = %d\n", stack_slots());
11177 PrintF(out, "Instructions (size = %d)\n", instruction_size());
11178 Disassembler::Decode(out, this);
11181 if (kind() == FUNCTION) {
11182 DeoptimizationOutputData* data =
11183 DeoptimizationOutputData::cast(this->deoptimization_data());
11184 data->DeoptimizationOutputDataPrint(out);
11185 } else if (kind() == OPTIMIZED_FUNCTION) {
11186 DeoptimizationInputData* data =
11187 DeoptimizationInputData::cast(this->deoptimization_data());
11188 data->DeoptimizationInputDataPrint(out);
11192 if (is_crankshafted()) {
11193 SafepointTable table(this);
11194 PrintF(out, "Safepoints (size = %u)\n", table.size());
11195 for (unsigned i = 0; i < table.length(); i++) {
11196 unsigned pc_offset = table.GetPcOffset(i);
11197 PrintF(out, "%p %4d ", (instruction_start() + pc_offset), pc_offset);
11198 table.PrintEntry(i, out);
11199 PrintF(out, " (sp -> fp)");
11200 SafepointEntry entry = table.GetEntry(i);
11201 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
11202 PrintF(out, " %6d", entry.deoptimization_index());
11204 PrintF(out, " <none>");
11206 if (entry.argument_count() > 0) {
11207 PrintF(out, " argc: %d", entry.argument_count());
11212 } else if (kind() == FUNCTION) {
11213 unsigned offset = back_edge_table_offset();
11214 // If there is no back edge table, the "table start" will be at or after
11215 // (due to alignment) the end of the instruction stream.
11216 if (static_cast<int>(offset) < instruction_size()) {
11217 DisallowHeapAllocation no_gc;
11218 BackEdgeTable back_edges(this, &no_gc);
11220 PrintF(out, "Back edges (size = %u)\n", back_edges.length());
11221 PrintF(out, "ast_id pc_offset loop_depth\n");
11223 for (uint32_t i = 0; i < back_edges.length(); i++) {
11224 PrintF(out, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(),
11225 back_edges.pc_offset(i),
11226 back_edges.loop_depth(i));
11231 #ifdef OBJECT_PRINT
11232 if (!type_feedback_info()->IsUndefined()) {
11233 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(out);
11239 PrintF(out, "RelocInfo (size = %d)\n", relocation_size());
11240 for (RelocIterator it(this); !it.done(); it.next()) {
11241 it.rinfo()->Print(GetIsolate(), out);
11245 #endif // ENABLE_DISASSEMBLER
11248 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength(
11249 Handle<JSObject> object,
11252 SetFastElementsCapacitySmiMode smi_mode) {
11253 // We should never end in here with a pixel or external array.
11254 ASSERT(!object->HasExternalArrayElements());
11256 // Allocate a new fast elements backing store.
11257 Handle<FixedArray> new_elements =
11258 object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity);
11260 ElementsKind elements_kind = object->GetElementsKind();
11261 ElementsKind new_elements_kind;
11262 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
11263 // or if it's allowed and the old elements array contained only SMIs.
11264 bool has_fast_smi_elements =
11265 (smi_mode == kForceSmiElements) ||
11266 ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements());
11267 if (has_fast_smi_elements) {
11268 if (IsHoleyElementsKind(elements_kind)) {
11269 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
11271 new_elements_kind = FAST_SMI_ELEMENTS;
11274 if (IsHoleyElementsKind(elements_kind)) {
11275 new_elements_kind = FAST_HOLEY_ELEMENTS;
11277 new_elements_kind = FAST_ELEMENTS;
11280 Handle<FixedArrayBase> old_elements(object->elements());
11281 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind);
11282 accessor->CopyElements(object, new_elements, elements_kind);
11284 if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) {
11285 Handle<Map> new_map = (new_elements_kind != elements_kind)
11286 ? GetElementsTransitionMap(object, new_elements_kind)
11287 : handle(object->map());
11288 object->ValidateElements();
11289 object->set_map_and_elements(*new_map, *new_elements);
11291 // Transition through the allocation site as well if present.
11292 JSObject::UpdateAllocationSite(object, new_elements_kind);
11294 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements);
11295 parameter_map->set(1, *new_elements);
11298 if (FLAG_trace_elements_transitions) {
11299 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11300 object->GetElementsKind(), new_elements);
11303 if (object->IsJSArray()) {
11304 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11306 return new_elements;
11310 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object,
11313 // We should never end in here with a pixel or external array.
11314 ASSERT(!object->HasExternalArrayElements());
11316 Handle<FixedArrayBase> elems =
11317 object->GetIsolate()->factory()->NewFixedDoubleArray(capacity);
11319 ElementsKind elements_kind = object->GetElementsKind();
11320 CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS);
11321 ElementsKind new_elements_kind = elements_kind;
11322 if (IsHoleyElementsKind(elements_kind)) {
11323 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
11325 new_elements_kind = FAST_DOUBLE_ELEMENTS;
11328 Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind);
11330 Handle<FixedArrayBase> old_elements(object->elements());
11331 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS);
11332 accessor->CopyElements(object, elems, elements_kind);
11334 object->ValidateElements();
11335 object->set_map_and_elements(*new_map, *elems);
11337 if (FLAG_trace_elements_transitions) {
11338 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11339 object->GetElementsKind(), elems);
11342 if (object->IsJSArray()) {
11343 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11349 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) {
11350 ASSERT(capacity >= 0);
11351 array->GetIsolate()->factory()->NewJSArrayStorage(
11352 array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
11356 void JSArray::Expand(Handle<JSArray> array, int required_size) {
11357 ElementsAccessor* accessor = array->GetElementsAccessor();
11358 accessor->SetCapacityAndLength(array, required_size, required_size);
11362 // Returns false if the passed-in index is marked non-configurable,
11363 // which will cause the ES5 truncation operation to halt, and thus
11364 // no further old values need be collected.
11365 static bool GetOldValue(Isolate* isolate,
11366 Handle<JSObject> object,
11368 List<Handle<Object> >* old_values,
11369 List<uint32_t>* indices) {
11370 PropertyAttributes attributes =
11371 JSReceiver::GetLocalElementAttribute(object, index);
11372 ASSERT(attributes != ABSENT);
11373 if (attributes == DONT_DELETE) return false;
11374 Handle<Object> value;
11375 if (object->GetLocalElementAccessorPair(index) != NULL) {
11376 value = Handle<Object>::cast(isolate->factory()->the_hole_value());
11378 value = Object::GetElementNoExceptionThrown(isolate, object, index);
11380 old_values->Add(value);
11381 indices->Add(index);
11385 static void EnqueueSpliceRecord(Handle<JSArray> object,
11387 Handle<JSArray> deleted,
11388 uint32_t add_count) {
11389 Isolate* isolate = object->GetIsolate();
11390 HandleScope scope(isolate);
11391 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index);
11392 Handle<Object> add_count_object =
11393 isolate->factory()->NewNumberFromUint(add_count);
11395 Handle<Object> args[] =
11396 { object, index_object, deleted, add_count_object };
11399 Execution::Call(isolate,
11400 Handle<JSFunction>(isolate->observers_enqueue_splice()),
11401 isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11407 static void BeginPerformSplice(Handle<JSArray> object) {
11408 Isolate* isolate = object->GetIsolate();
11409 HandleScope scope(isolate);
11410 Handle<Object> args[] = { object };
11413 Execution::Call(isolate,
11414 Handle<JSFunction>(isolate->observers_begin_perform_splice()),
11415 isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11421 static void EndPerformSplice(Handle<JSArray> object) {
11422 Isolate* isolate = object->GetIsolate();
11423 HandleScope scope(isolate);
11424 Handle<Object> args[] = { object };
11427 Execution::Call(isolate,
11428 Handle<JSFunction>(isolate->observers_end_perform_splice()),
11429 isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11435 Handle<Object> JSArray::SetElementsLength(Handle<JSArray> array,
11436 Handle<Object> new_length_handle) {
11437 // We should never end in here with a pixel or external array.
11438 ASSERT(array->AllowsSetElementsLength());
11439 if (!array->map()->is_observed()) {
11440 return array->GetElementsAccessor()->SetLength(array, new_length_handle);
11443 Isolate* isolate = array->GetIsolate();
11444 List<uint32_t> indices;
11445 List<Handle<Object> > old_values;
11446 Handle<Object> old_length_handle(array->length(), isolate);
11447 uint32_t old_length = 0;
11448 CHECK(old_length_handle->ToArrayIndex(&old_length));
11449 uint32_t new_length = 0;
11450 CHECK(new_length_handle->ToArrayIndex(&new_length));
11452 static const PropertyAttributes kNoAttrFilter = NONE;
11453 int num_elements = array->NumberOfLocalElements(kNoAttrFilter);
11454 if (num_elements > 0) {
11455 if (old_length == static_cast<uint32_t>(num_elements)) {
11456 // Simple case for arrays without holes.
11457 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
11458 if (!GetOldValue(isolate, array, i, &old_values, &indices)) break;
11461 // For sparse arrays, only iterate over existing elements.
11462 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
11463 // the to-be-removed indices twice.
11464 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
11465 array->GetLocalElementKeys(*keys, kNoAttrFilter);
11466 while (num_elements-- > 0) {
11467 uint32_t index = NumberToUint32(keys->get(num_elements));
11468 if (index < new_length) break;
11469 if (!GetOldValue(isolate, array, index, &old_values, &indices)) break;
11474 Handle<Object> hresult =
11475 array->GetElementsAccessor()->SetLength(array, new_length_handle);
11476 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, hresult, hresult);
11478 CHECK(array->length()->ToArrayIndex(&new_length));
11479 if (old_length == new_length) return hresult;
11481 BeginPerformSplice(array);
11483 for (int i = 0; i < indices.length(); ++i) {
11484 JSObject::EnqueueChangeRecord(
11485 array, "delete", isolate->factory()->Uint32ToString(indices[i]),
11488 JSObject::EnqueueChangeRecord(
11489 array, "update", isolate->factory()->length_string(),
11490 old_length_handle);
11492 EndPerformSplice(array);
11494 uint32_t index = Min(old_length, new_length);
11495 uint32_t add_count = new_length > old_length ? new_length - old_length : 0;
11496 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0;
11497 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
11498 if (delete_count > 0) {
11499 for (int i = indices.length() - 1; i >= 0; i--) {
11500 JSObject::SetElement(deleted, indices[i] - index, old_values[i], NONE,
11504 SetProperty(deleted, isolate->factory()->length_string(),
11505 isolate->factory()->NewNumberFromUint(delete_count),
11509 EnqueueSpliceRecord(array, index, deleted, add_count);
11515 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map,
11516 Handle<Object> prototype) {
11517 FixedArray* cache = map->GetPrototypeTransitions();
11518 int number_of_transitions = map->NumberOfProtoTransitions();
11519 const int proto_offset =
11520 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset;
11521 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
11522 const int step = kProtoTransitionElementsPerEntry;
11523 for (int i = 0; i < number_of_transitions; i++) {
11524 if (cache->get(proto_offset + i * step) == *prototype) {
11525 Object* result = cache->get(map_offset + i * step);
11526 return Handle<Map>(Map::cast(result));
11529 return Handle<Map>();
11533 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map,
11534 Handle<Object> prototype,
11535 Handle<Map> target_map) {
11536 ASSERT(target_map->IsMap());
11537 ASSERT(HeapObject::cast(*prototype)->map()->IsMap());
11538 // Don't cache prototype transition if this map is shared.
11539 if (map->is_shared() || !FLAG_cache_prototype_transitions) return map;
11541 const int step = kProtoTransitionElementsPerEntry;
11542 const int header = kProtoTransitionHeaderSize;
11544 Handle<FixedArray> cache(map->GetPrototypeTransitions());
11545 int capacity = (cache->length() - header) / step;
11546 int transitions = map->NumberOfProtoTransitions() + 1;
11548 if (transitions > capacity) {
11549 if (capacity > kMaxCachedPrototypeTransitions) return map;
11551 // Grow array by factor 2 over and above what we need.
11552 Factory* factory = map->GetIsolate()->factory();
11553 cache = factory->CopySizeFixedArray(cache, transitions * 2 * step + header);
11555 CALL_AND_RETRY_OR_DIE(map->GetIsolate(),
11556 map->SetPrototypeTransitions(*cache),
11558 return Handle<Map>());
11561 // Reload number of transitions as GC might shrink them.
11562 int last = map->NumberOfProtoTransitions();
11563 int entry = header + last * step;
11565 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype);
11566 cache->set(entry + kProtoTransitionMapOffset, *target_map);
11567 map->SetNumberOfProtoTransitions(last + 1);
11573 void Map::ZapTransitions() {
11574 TransitionArray* transition_array = transitions();
11575 // TODO(mstarzinger): Temporarily use a slower version instead of the faster
11576 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer.
11577 Object** data = transition_array->data_start();
11578 Object* the_hole = GetHeap()->the_hole_value();
11579 int length = transition_array->length();
11580 for (int i = 0; i < length; i++) {
11581 data[i] = the_hole;
11586 void Map::ZapPrototypeTransitions() {
11587 FixedArray* proto_transitions = GetPrototypeTransitions();
11588 MemsetPointer(proto_transitions->data_start(),
11589 GetHeap()->the_hole_value(),
11590 proto_transitions->length());
11594 void Map::AddDependentCompilationInfo(DependentCode::DependencyGroup group,
11595 CompilationInfo* info) {
11596 Handle<DependentCode> dep(dependent_code());
11597 Handle<DependentCode> codes =
11598 DependentCode::Insert(dep, group, info->object_wrapper());
11599 if (*codes != dependent_code()) set_dependent_code(*codes);
11600 info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone());
11604 void Map::AddDependentCode(DependentCode::DependencyGroup group,
11605 Handle<Code> code) {
11606 Handle<DependentCode> codes = DependentCode::Insert(
11607 Handle<DependentCode>(dependent_code()), group, code);
11608 if (*codes != dependent_code()) set_dependent_code(*codes);
11612 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
11613 Recompute(entries);
11617 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) {
11618 start_indexes_[0] = 0;
11619 for (int g = 1; g <= kGroupCount; g++) {
11620 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1));
11621 start_indexes_[g] = start_indexes_[g - 1] + count;
11626 DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
11627 DependencyGroup group) {
11628 AllowDeferredHandleDereference dependencies_are_safe;
11629 if (group == DependentCode::kPropertyCellChangedGroup) {
11630 return Handle<PropertyCell>::cast(object)->dependent_code();
11631 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
11632 group == DependentCode::kAllocationSiteTransitionChangedGroup) {
11633 return Handle<AllocationSite>::cast(object)->dependent_code();
11635 return Handle<Map>::cast(object)->dependent_code();
11639 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
11640 DependencyGroup group,
11641 Handle<Object> object) {
11642 GroupStartIndexes starts(*entries);
11643 int start = starts.at(group);
11644 int end = starts.at(group + 1);
11645 int number_of_entries = starts.number_of_entries();
11646 // Check for existing entry to avoid duplicates.
11647 for (int i = start; i < end; i++) {
11648 if (entries->object_at(i) == *object) return entries;
11650 if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
11651 Factory* factory = entries->GetIsolate()->factory();
11652 int capacity = kCodesStartIndex + number_of_entries + 1;
11653 if (capacity > 5) capacity = capacity * 5 / 4;
11654 Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
11655 factory->CopySizeFixedArray(entries, capacity, TENURED));
11656 // The number of codes can change after GC.
11657 starts.Recompute(*entries);
11658 start = starts.at(group);
11659 end = starts.at(group + 1);
11660 number_of_entries = starts.number_of_entries();
11661 for (int i = 0; i < number_of_entries; i++) {
11662 entries->clear_at(i);
11664 // If the old fixed array was empty, we need to reset counters of the
11666 if (number_of_entries == 0) {
11667 for (int g = 0; g < kGroupCount; g++) {
11668 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
11671 entries = new_entries;
11673 entries->ExtendGroup(group);
11674 entries->set_object_at(end, *object);
11675 entries->set_number_of_entries(group, end + 1 - start);
11680 void DependentCode::UpdateToFinishedCode(DependencyGroup group,
11681 CompilationInfo* info,
11683 DisallowHeapAllocation no_gc;
11684 AllowDeferredHandleDereference get_object_wrapper;
11685 Foreign* info_wrapper = *info->object_wrapper();
11686 GroupStartIndexes starts(this);
11687 int start = starts.at(group);
11688 int end = starts.at(group + 1);
11689 for (int i = start; i < end; i++) {
11690 if (object_at(i) == info_wrapper) {
11691 set_object_at(i, code);
11697 for (int i = start; i < end; i++) {
11698 ASSERT(is_code_at(i) || compilation_info_at(i) != info);
11704 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
11705 CompilationInfo* info) {
11706 DisallowHeapAllocation no_allocation;
11707 AllowDeferredHandleDereference get_object_wrapper;
11708 Foreign* info_wrapper = *info->object_wrapper();
11709 GroupStartIndexes starts(this);
11710 int start = starts.at(group);
11711 int end = starts.at(group + 1);
11712 // Find compilation info wrapper.
11714 for (int i = start; i < end; i++) {
11715 if (object_at(i) == info_wrapper) {
11720 if (info_pos == -1) return; // Not found.
11721 int gap = info_pos;
11722 // Use the last of each group to fill the gap in the previous group.
11723 for (int i = group; i < kGroupCount; i++) {
11724 int last_of_group = starts.at(i + 1) - 1;
11725 ASSERT(last_of_group >= gap);
11726 if (last_of_group == gap) continue;
11727 copy(last_of_group, gap);
11728 gap = last_of_group;
11730 ASSERT(gap == starts.number_of_entries() - 1);
11731 clear_at(gap); // Clear last gap.
11732 set_number_of_entries(group, end - start - 1);
11735 for (int i = start; i < end - 1; i++) {
11736 ASSERT(is_code_at(i) || compilation_info_at(i) != info);
11742 bool DependentCode::Contains(DependencyGroup group, Code* code) {
11743 GroupStartIndexes starts(this);
11744 int start = starts.at(group);
11745 int end = starts.at(group + 1);
11746 for (int i = start; i < end; i++) {
11747 if (object_at(i) == code) return true;
11753 bool DependentCode::MarkCodeForDeoptimization(
11755 DependentCode::DependencyGroup group) {
11756 DisallowHeapAllocation no_allocation_scope;
11757 DependentCode::GroupStartIndexes starts(this);
11758 int start = starts.at(group);
11759 int end = starts.at(group + 1);
11760 int code_entries = starts.number_of_entries();
11761 if (start == end) return false;
11763 // Mark all the code that needs to be deoptimized.
11764 bool marked = false;
11765 for (int i = start; i < end; i++) {
11766 if (is_code_at(i)) {
11767 Code* code = code_at(i);
11768 if (!code->marked_for_deoptimization()) {
11769 code->set_marked_for_deoptimization(true);
11773 CompilationInfo* info = compilation_info_at(i);
11774 info->AbortDueToDependencyChange();
11777 // Compact the array by moving all subsequent groups to fill in the new holes.
11778 for (int src = end, dst = start; src < code_entries; src++, dst++) {
11781 // Now the holes are at the end of the array, zap them for heap-verifier.
11782 int removed = end - start;
11783 for (int i = code_entries - removed; i < code_entries; i++) {
11786 set_number_of_entries(group, 0);
11791 void DependentCode::DeoptimizeDependentCodeGroup(
11793 DependentCode::DependencyGroup group) {
11794 ASSERT(AllowCodeDependencyChange::IsAllowed());
11795 DisallowHeapAllocation no_allocation_scope;
11796 bool marked = MarkCodeForDeoptimization(isolate, group);
11798 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate);
11802 Handle<Object> JSObject::SetPrototype(Handle<JSObject> object,
11803 Handle<Object> value,
11804 bool skip_hidden_prototypes) {
11806 int size = object->Size();
11809 Isolate* isolate = object->GetIsolate();
11810 Heap* heap = isolate->heap();
11811 // Silently ignore the change if value is not a JSObject or null.
11812 // SpiderMonkey behaves this way.
11813 if (!value->IsJSReceiver() && !value->IsNull()) return value;
11815 // From 8.6.2 Object Internal Methods
11817 // In addition, if [[Extensible]] is false the value of the [[Class]] and
11818 // [[Prototype]] internal properties of the object may not be modified.
11820 // Implementation specific extensions that modify [[Class]], [[Prototype]]
11821 // or [[Extensible]] must not violate the invariants defined in the preceding
11823 if (!object->map()->is_extensible()) {
11824 Handle<Object> args[] = { object };
11825 Handle<Object> error = isolate->factory()->NewTypeError(
11826 "non_extensible_proto", HandleVector(args, ARRAY_SIZE(args)));
11827 isolate->Throw(*error);
11828 return Handle<Object>();
11831 // Before we can set the prototype we need to be sure
11832 // prototype cycles are prevented.
11833 // It is sufficient to validate that the receiver is not in the new prototype
11835 for (Object* pt = *value;
11836 pt != heap->null_value();
11837 pt = pt->GetPrototype(isolate)) {
11838 if (JSReceiver::cast(pt) == *object) {
11840 Handle<Object> error = isolate->factory()->NewError(
11841 "cyclic_proto", HandleVector<Object>(NULL, 0));
11842 isolate->Throw(*error);
11843 return Handle<Object>();
11847 bool dictionary_elements_in_chain =
11848 object->map()->DictionaryElementsInPrototypeChainOnly();
11849 Handle<JSObject> real_receiver = object;
11851 if (skip_hidden_prototypes) {
11852 // Find the first object in the chain whose prototype object is not
11853 // hidden and set the new prototype on that object.
11854 Object* current_proto = real_receiver->GetPrototype();
11855 while (current_proto->IsJSObject() &&
11856 JSObject::cast(current_proto)->map()->is_hidden_prototype()) {
11857 real_receiver = handle(JSObject::cast(current_proto), isolate);
11858 current_proto = current_proto->GetPrototype(isolate);
11862 // Set the new prototype of the object.
11863 Handle<Map> map(real_receiver->map());
11865 // Nothing to do if prototype is already set.
11866 if (map->prototype() == *value) return value;
11868 if (value->IsJSObject()) {
11869 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value));
11872 Handle<Map> new_map = Map::GetPrototypeTransition(map, value);
11873 if (new_map.is_null()) {
11874 new_map = Map::Copy(map);
11875 Map::PutPrototypeTransition(map, value, new_map);
11876 new_map->set_prototype(*value);
11878 ASSERT(new_map->prototype() == *value);
11879 JSObject::MigrateToMap(real_receiver, new_map);
11881 if (!dictionary_elements_in_chain &&
11882 new_map->DictionaryElementsInPrototypeChainOnly()) {
11883 // If the prototype chain didn't previously have element callbacks, then
11884 // KeyedStoreICs need to be cleared to ensure any that involve this
11886 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC);
11889 heap->ClearInstanceofCache();
11890 ASSERT(size == object->Size());
11895 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
11897 uint32_t first_arg,
11898 uint32_t arg_count,
11899 EnsureElementsMode mode) {
11900 // Elements in |Arguments| are ordered backwards (because they're on the
11901 // stack), but the method that's called here iterates over them in forward
11903 return EnsureCanContainElements(
11904 object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode);
11908 AccessorPair* JSObject::GetLocalPropertyAccessorPair(Name* name) {
11909 uint32_t index = 0;
11910 if (name->AsArrayIndex(&index)) {
11911 return GetLocalElementAccessorPair(index);
11914 LookupResult lookup(GetIsolate());
11915 LocalLookupRealNamedProperty(name, &lookup);
11917 if (lookup.IsPropertyCallbacks() &&
11918 lookup.GetCallbackObject()->IsAccessorPair()) {
11919 return AccessorPair::cast(lookup.GetCallbackObject());
11925 AccessorPair* JSObject::GetLocalElementAccessorPair(uint32_t index) {
11926 if (IsJSGlobalProxy()) {
11927 Object* proto = GetPrototype();
11928 if (proto->IsNull()) return NULL;
11929 ASSERT(proto->IsJSGlobalObject());
11930 return JSObject::cast(proto)->GetLocalElementAccessorPair(index);
11933 // Check for lookup interceptor.
11934 if (HasIndexedInterceptor()) return NULL;
11936 return GetElementsAccessor()->GetAccessorPair(this, this, index);
11940 Handle<Object> JSObject::SetElementWithInterceptor(
11941 Handle<JSObject> object,
11943 Handle<Object> value,
11944 PropertyAttributes attributes,
11945 StrictMode strict_mode,
11946 bool check_prototype,
11947 SetPropertyMode set_mode) {
11948 Isolate* isolate = object->GetIsolate();
11950 // Make sure that the top context does not change when doing
11951 // callbacks or interceptor calls.
11952 AssertNoContextChange ncc(isolate);
11954 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
11955 if (!interceptor->setter()->IsUndefined()) {
11956 v8::IndexedPropertySetterCallback setter =
11957 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter());
11959 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index));
11960 PropertyCallbackArguments args(isolate, interceptor->data(), *object,
11962 v8::Handle<v8::Value> result =
11963 args.Call(setter, index, v8::Utils::ToLocal(value));
11964 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
11965 if (!result.IsEmpty()) return value;
11968 return SetElementWithoutInterceptor(object, index, value, attributes,
11975 MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
11979 Isolate* isolate = GetIsolate();
11980 ASSERT(!structure->IsForeign());
11982 // api style callbacks.
11983 if (structure->IsExecutableAccessorInfo()) {
11984 Handle<ExecutableAccessorInfo> data(
11985 ExecutableAccessorInfo::cast(structure));
11986 Object* fun_obj = data->getter();
11987 v8::AccessorGetterCallback call_fun =
11988 v8::ToCData<v8::AccessorGetterCallback>(fun_obj);
11989 if (call_fun == NULL) return isolate->heap()->undefined_value();
11990 HandleScope scope(isolate);
11991 Handle<JSObject> self(JSObject::cast(receiver));
11992 Handle<JSObject> holder_handle(JSObject::cast(holder));
11993 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11994 Handle<String> key = isolate->factory()->NumberToString(number);
11995 LOG(isolate, ApiNamedPropertyAccess("load", *self, *key));
11996 PropertyCallbackArguments
11997 args(isolate, data->data(), *self, *holder_handle);
11998 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key));
11999 RETURN_IF_SCHEDULED_EXCEPTION(isolate);
12000 if (result.IsEmpty()) return isolate->heap()->undefined_value();
12001 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12002 result_internal->VerifyApiCallResultType();
12003 return *result_internal;
12006 // __defineGetter__ callback
12007 if (structure->IsAccessorPair()) {
12008 Object* getter = AccessorPair::cast(structure)->getter();
12009 if (getter->IsSpecFunction()) {
12010 // TODO(rossberg): nicer would be to cast to some JSCallable here...
12011 return GetPropertyWithDefinedGetter(receiver, JSReceiver::cast(getter));
12013 // Getter is not a function.
12014 return isolate->heap()->undefined_value();
12017 if (structure->IsDeclaredAccessorInfo()) {
12018 return GetDeclaredAccessorProperty(receiver,
12019 DeclaredAccessorInfo::cast(structure),
12028 Handle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object,
12029 Handle<Object> structure,
12031 Handle<Object> value,
12032 Handle<JSObject> holder,
12033 StrictMode strict_mode) {
12034 Isolate* isolate = object->GetIsolate();
12036 // We should never get here to initialize a const with the hole
12037 // value since a const declaration would conflict with the setter.
12038 ASSERT(!value->IsTheHole());
12040 // To accommodate both the old and the new api we switch on the
12041 // data structure used to store the callbacks. Eventually foreign
12042 // callbacks should be phased out.
12043 ASSERT(!structure->IsForeign());
12045 if (structure->IsExecutableAccessorInfo()) {
12046 // api style callbacks
12047 Handle<ExecutableAccessorInfo> data =
12048 Handle<ExecutableAccessorInfo>::cast(structure);
12049 Object* call_obj = data->setter();
12050 v8::AccessorSetterCallback call_fun =
12051 v8::ToCData<v8::AccessorSetterCallback>(call_obj);
12052 if (call_fun == NULL) return value;
12053 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12054 Handle<String> key(isolate->factory()->NumberToString(number));
12055 LOG(isolate, ApiNamedPropertyAccess("store", *object, *key));
12056 PropertyCallbackArguments
12057 args(isolate, data->data(), *object, *holder);
12058 args.Call(call_fun,
12059 v8::Utils::ToLocal(key),
12060 v8::Utils::ToLocal(value));
12061 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
12065 if (structure->IsAccessorPair()) {
12066 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
12067 if (setter->IsSpecFunction()) {
12068 // TODO(rossberg): nicer would be to cast to some JSCallable here...
12069 return SetPropertyWithDefinedSetter(
12070 object, Handle<JSReceiver>::cast(setter), value);
12072 if (strict_mode == SLOPPY) return value;
12073 Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
12074 Handle<Object> args[2] = { key, holder };
12075 Handle<Object> error = isolate->factory()->NewTypeError(
12076 "no_setter_in_callback", HandleVector(args, 2));
12077 isolate->Throw(*error);
12078 return Handle<Object>();
12082 // TODO(dcarney): Handle correctly.
12083 if (structure->IsDeclaredAccessorInfo()) return value;
12086 return Handle<Object>();
12090 bool JSObject::HasFastArgumentsElements() {
12091 Heap* heap = GetHeap();
12092 if (!elements()->IsFixedArray()) return false;
12093 FixedArray* elements = FixedArray::cast(this->elements());
12094 if (elements->map() != heap->sloppy_arguments_elements_map()) {
12097 FixedArray* arguments = FixedArray::cast(elements->get(1));
12098 return !arguments->IsDictionary();
12102 bool JSObject::HasDictionaryArgumentsElements() {
12103 Heap* heap = GetHeap();
12104 if (!elements()->IsFixedArray()) return false;
12105 FixedArray* elements = FixedArray::cast(this->elements());
12106 if (elements->map() != heap->sloppy_arguments_elements_map()) {
12109 FixedArray* arguments = FixedArray::cast(elements->get(1));
12110 return arguments->IsDictionary();
12114 // Adding n elements in fast case is O(n*n).
12115 // Note: revisit design to have dual undefined values to capture absent
12117 Handle<Object> JSObject::SetFastElement(Handle<JSObject> object,
12119 Handle<Object> value,
12120 StrictMode strict_mode,
12121 bool check_prototype) {
12122 ASSERT(object->HasFastSmiOrObjectElements() ||
12123 object->HasFastArgumentsElements());
12125 Isolate* isolate = object->GetIsolate();
12127 // Array optimizations rely on the prototype lookups of Array objects always
12128 // returning undefined. If there is a store to the initial prototype object,
12129 // make sure all of these optimizations are invalidated.
12130 if (isolate->is_initial_object_prototype(*object) ||
12131 isolate->is_initial_array_prototype(*object)) {
12132 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate,
12133 DependentCode::kElementsCantBeAddedGroup);
12136 Handle<FixedArray> backing_store(FixedArray::cast(object->elements()));
12137 if (backing_store->map() ==
12138 isolate->heap()->sloppy_arguments_elements_map()) {
12139 backing_store = handle(FixedArray::cast(backing_store->get(1)));
12141 backing_store = EnsureWritableFastElements(object);
12143 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
12145 if (check_prototype &&
12146 (index >= capacity || backing_store->get(index)->IsTheHole())) {
12148 Handle<Object> result = SetElementWithCallbackSetterInPrototypes(
12149 object, index, value, &found, strict_mode);
12150 if (found) return result;
12153 uint32_t new_capacity = capacity;
12154 // Check if the length property of this object needs to be updated.
12155 uint32_t array_length = 0;
12156 bool must_update_array_length = false;
12157 bool introduces_holes = true;
12158 if (object->IsJSArray()) {
12159 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12160 introduces_holes = index > array_length;
12161 if (index >= array_length) {
12162 must_update_array_length = true;
12163 array_length = index + 1;
12166 introduces_holes = index >= capacity;
12169 // If the array is growing, and it's not growth by a single element at the
12170 // end, make sure that the ElementsKind is HOLEY.
12171 ElementsKind elements_kind = object->GetElementsKind();
12172 if (introduces_holes &&
12173 IsFastElementsKind(elements_kind) &&
12174 !IsFastHoleyElementsKind(elements_kind)) {
12175 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12176 TransitionElementsKind(object, transitioned_kind);
12179 // Check if the capacity of the backing store needs to be increased, or if
12180 // a transition to slow elements is necessary.
12181 if (index >= capacity) {
12182 bool convert_to_slow = true;
12183 if ((index - capacity) < kMaxGap) {
12184 new_capacity = NewElementsCapacity(index + 1);
12185 ASSERT(new_capacity > index);
12186 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12187 convert_to_slow = false;
12190 if (convert_to_slow) {
12191 NormalizeElements(object);
12192 return SetDictionaryElement(object, index, value, NONE, strict_mode,
12196 // Convert to fast double elements if appropriate.
12197 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
12198 // Consider fixing the boilerplate as well if we have one.
12199 ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
12200 ? FAST_HOLEY_DOUBLE_ELEMENTS
12201 : FAST_DOUBLE_ELEMENTS;
12203 UpdateAllocationSite(object, to_kind);
12205 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length);
12206 FixedDoubleArray::cast(object->elements())->set(index, value->Number());
12207 object->ValidateElements();
12210 // Change elements kind from Smi-only to generic FAST if necessary.
12211 if (object->HasFastSmiElements() && !value->IsSmi()) {
12212 ElementsKind kind = object->HasFastHoleyElements()
12213 ? FAST_HOLEY_ELEMENTS
12216 UpdateAllocationSite(object, kind);
12217 Handle<Map> new_map = GetElementsTransitionMap(object, kind);
12218 JSObject::MigrateToMap(object, new_map);
12219 ASSERT(IsFastObjectElementsKind(object->GetElementsKind()));
12221 // Increase backing store capacity if that's been decided previously.
12222 if (new_capacity != capacity) {
12223 SetFastElementsCapacitySmiMode smi_mode =
12224 value->IsSmi() && object->HasFastSmiElements()
12225 ? kAllowSmiElements
12226 : kDontAllowSmiElements;
12227 Handle<FixedArray> new_elements =
12228 SetFastElementsCapacityAndLength(object, new_capacity, array_length,
12230 new_elements->set(index, *value);
12231 object->ValidateElements();
12235 // Finally, set the new element and length.
12236 ASSERT(object->elements()->IsFixedArray());
12237 backing_store->set(index, *value);
12238 if (must_update_array_length) {
12239 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length));
12245 Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object,
12247 Handle<Object> value,
12248 PropertyAttributes attributes,
12249 StrictMode strict_mode,
12250 bool check_prototype,
12251 SetPropertyMode set_mode) {
12252 ASSERT(object->HasDictionaryElements() ||
12253 object->HasDictionaryArgumentsElements());
12254 Isolate* isolate = object->GetIsolate();
12256 // Insert element in the dictionary.
12257 Handle<FixedArray> elements(FixedArray::cast(object->elements()));
12258 bool is_arguments =
12259 (elements->map() == isolate->heap()->sloppy_arguments_elements_map());
12260 Handle<SeededNumberDictionary> dictionary(is_arguments
12261 ? SeededNumberDictionary::cast(elements->get(1))
12262 : SeededNumberDictionary::cast(*elements));
12264 int entry = dictionary->FindEntry(index);
12265 if (entry != SeededNumberDictionary::kNotFound) {
12266 Handle<Object> element(dictionary->ValueAt(entry), isolate);
12267 PropertyDetails details = dictionary->DetailsAt(entry);
12268 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
12269 return SetElementWithCallback(object, element, index, value, object,
12272 dictionary->UpdateMaxNumberKey(index);
12273 // If a value has not been initialized we allow writing to it even if it
12274 // is read-only (a declared const that has not been initialized). If a
12275 // value is being defined we skip attribute checks completely.
12276 if (set_mode == DEFINE_PROPERTY) {
12277 details = PropertyDetails(
12278 attributes, NORMAL, details.dictionary_index());
12279 dictionary->DetailsAtPut(entry, details);
12280 } else if (details.IsReadOnly() && !element->IsTheHole()) {
12281 if (strict_mode == SLOPPY) {
12282 return isolate->factory()->undefined_value();
12284 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12285 Handle<Object> args[2] = { number, object };
12286 Handle<Object> error =
12287 isolate->factory()->NewTypeError("strict_read_only_property",
12288 HandleVector(args, 2));
12289 isolate->Throw(*error);
12290 return Handle<Object>();
12293 // Elements of the arguments object in slow mode might be slow aliases.
12294 if (is_arguments && element->IsAliasedArgumentsEntry()) {
12295 Handle<AliasedArgumentsEntry> entry =
12296 Handle<AliasedArgumentsEntry>::cast(element);
12297 Handle<Context> context(Context::cast(elements->get(0)));
12298 int context_index = entry->aliased_context_slot();
12299 ASSERT(!context->get(context_index)->IsTheHole());
12300 context->set(context_index, *value);
12301 // For elements that are still writable we keep slow aliasing.
12302 if (!details.IsReadOnly()) value = element;
12304 dictionary->ValueAtPut(entry, *value);
12307 // Index not already used. Look for an accessor in the prototype chain.
12309 if (check_prototype) {
12311 Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object,
12312 index, value, &found, strict_mode);
12313 if (found) return result;
12316 // When we set the is_extensible flag to false we always force the
12317 // element into dictionary mode (and force them to stay there).
12318 if (!object->map()->is_extensible()) {
12319 if (strict_mode == SLOPPY) {
12320 return isolate->factory()->undefined_value();
12322 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12323 Handle<String> name = isolate->factory()->NumberToString(number);
12324 Handle<Object> args[1] = { name };
12325 Handle<Object> error =
12326 isolate->factory()->NewTypeError("object_not_extensible",
12327 HandleVector(args, 1));
12328 isolate->Throw(*error);
12329 return Handle<Object>();
12333 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
12334 Handle<SeededNumberDictionary> new_dictionary =
12335 SeededNumberDictionary::AddNumberEntry(dictionary, index, value,
12337 if (*dictionary != *new_dictionary) {
12338 if (is_arguments) {
12339 elements->set(1, *new_dictionary);
12341 object->set_elements(*new_dictionary);
12343 dictionary = new_dictionary;
12347 // Update the array length if this JSObject is an array.
12348 if (object->IsJSArray()) {
12349 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index,
12353 // Attempt to put this object back in fast case.
12354 if (object->ShouldConvertToFastElements()) {
12355 uint32_t new_length = 0;
12356 if (object->IsJSArray()) {
12357 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length));
12359 new_length = dictionary->max_number_key() + 1;
12361 SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays
12362 ? kAllowSmiElements
12363 : kDontAllowSmiElements;
12364 bool has_smi_only_elements = false;
12365 bool should_convert_to_fast_double_elements =
12366 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
12367 if (has_smi_only_elements) {
12368 smi_mode = kForceSmiElements;
12371 if (should_convert_to_fast_double_elements) {
12372 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length);
12374 SetFastElementsCapacityAndLength(object, new_length, new_length,
12377 object->ValidateElements();
12379 if (FLAG_trace_normalization) {
12380 PrintF("Object elements are fast case again:\n");
12388 Handle<Object> JSObject::SetFastDoubleElement(
12389 Handle<JSObject> object,
12391 Handle<Object> value,
12392 StrictMode strict_mode,
12393 bool check_prototype) {
12394 ASSERT(object->HasFastDoubleElements());
12396 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements()));
12397 uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
12399 // If storing to an element that isn't in the array, pass the store request
12400 // up the prototype chain before storing in the receiver's elements.
12401 if (check_prototype &&
12402 (index >= elms_length ||
12403 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) {
12405 Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object,
12406 index, value, &found, strict_mode);
12407 if (found) return result;
12410 // If the value object is not a heap number, switch to fast elements and try
12412 bool value_is_smi = value->IsSmi();
12413 bool introduces_holes = true;
12414 uint32_t length = elms_length;
12415 if (object->IsJSArray()) {
12416 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length));
12417 introduces_holes = index > length;
12419 introduces_holes = index >= elms_length;
12422 if (!value->IsNumber()) {
12423 SetFastElementsCapacityAndLength(object, elms_length, length,
12424 kDontAllowSmiElements);
12425 Handle<Object> result = SetFastElement(object, index, value, strict_mode,
12427 RETURN_IF_EMPTY_HANDLE_VALUE(object->GetIsolate(), result,
12429 object->ValidateElements();
12433 double double_value = value_is_smi
12434 ? static_cast<double>(Handle<Smi>::cast(value)->value())
12435 : Handle<HeapNumber>::cast(value)->value();
12437 // If the array is growing, and it's not growth by a single element at the
12438 // end, make sure that the ElementsKind is HOLEY.
12439 ElementsKind elements_kind = object->GetElementsKind();
12440 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
12441 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12442 TransitionElementsKind(object, transitioned_kind);
12445 // Check whether there is extra space in the fixed array.
12446 if (index < elms_length) {
12447 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements()));
12448 elms->set(index, double_value);
12449 if (object->IsJSArray()) {
12450 // Update the length of the array if needed.
12451 uint32_t array_length = 0;
12453 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12454 if (index >= array_length) {
12455 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1));
12461 // Allow gap in fast case.
12462 if ((index - elms_length) < kMaxGap) {
12463 // Try allocating extra space.
12464 int new_capacity = NewElementsCapacity(index+1);
12465 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12466 ASSERT(static_cast<uint32_t>(new_capacity) > index);
12467 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1);
12468 FixedDoubleArray::cast(object->elements())->set(index, double_value);
12469 object->ValidateElements();
12474 // Otherwise default to slow case.
12475 ASSERT(object->HasFastDoubleElements());
12476 ASSERT(object->map()->has_fast_double_elements());
12477 ASSERT(object->elements()->IsFixedDoubleArray() ||
12478 object->elements()->length() == 0);
12480 NormalizeElements(object);
12481 ASSERT(object->HasDictionaryElements());
12482 return SetElement(object, index, value, NONE, strict_mode, check_prototype);
12486 Handle<Object> JSReceiver::SetElement(Handle<JSReceiver> object,
12488 Handle<Object> value,
12489 PropertyAttributes attributes,
12490 StrictMode strict_mode) {
12491 if (object->IsJSProxy()) {
12492 return JSProxy::SetElementWithHandler(
12493 Handle<JSProxy>::cast(object), object, index, value, strict_mode);
12495 return JSObject::SetElement(
12496 Handle<JSObject>::cast(object), index, value, attributes, strict_mode);
12500 Handle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
12502 Handle<Object> value,
12503 StrictMode strict_mode) {
12504 ASSERT(!object->HasExternalArrayElements());
12505 return JSObject::SetElement(object, index, value, NONE, strict_mode, false);
12509 Handle<Object> JSObject::SetElement(Handle<JSObject> object,
12511 Handle<Object> value,
12512 PropertyAttributes attributes,
12513 StrictMode strict_mode,
12514 bool check_prototype,
12515 SetPropertyMode set_mode) {
12516 Isolate* isolate = object->GetIsolate();
12518 if (object->HasExternalArrayElements() ||
12519 object->HasFixedTypedArrayElements()) {
12520 // TODO(ningxin): Throw an error if setting a Float32x4Array element
12521 // while the value is not Float32x4Object.
12522 if (!value->IsNumber() && !value->IsFloat32x4() &&
12523 !value->IsInt32x4() && !value->IsUndefined()) {
12524 bool has_exception;
12525 Handle<Object> number =
12526 Execution::ToNumber(isolate, value, &has_exception);
12527 if (has_exception) return Handle<Object>();
12532 // Check access rights if needed.
12533 if (object->IsAccessCheckNeeded()) {
12534 if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_SET)) {
12535 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_SET);
12536 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
12541 if (object->IsJSGlobalProxy()) {
12542 Handle<Object> proto(object->GetPrototype(), isolate);
12543 if (proto->IsNull()) return value;
12544 ASSERT(proto->IsJSGlobalObject());
12545 return SetElement(Handle<JSObject>::cast(proto), index, value, attributes,
12551 // Don't allow element properties to be redefined for external arrays.
12552 if ((object->HasExternalArrayElements() ||
12553 object->HasFixedTypedArrayElements()) &&
12554 set_mode == DEFINE_PROPERTY) {
12555 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12556 Handle<Object> args[] = { object, number };
12557 Handle<Object> error = isolate->factory()->NewTypeError(
12558 "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
12559 isolate->Throw(*error);
12560 return Handle<Object>();
12563 // Normalize the elements to enable attributes on the property.
12564 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
12565 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
12566 // Make sure that we never go back to fast case.
12567 dictionary->set_requires_slow_elements();
12570 if (!object->map()->is_observed()) {
12571 return object->HasIndexedInterceptor()
12572 ? SetElementWithInterceptor(object, index, value, attributes, strict_mode,
12575 : SetElementWithoutInterceptor(object, index, value, attributes,
12581 PropertyAttributes old_attributes =
12582 JSReceiver::GetLocalElementAttribute(object, index);
12583 Handle<Object> old_value = isolate->factory()->the_hole_value();
12584 Handle<Object> old_length_handle;
12585 Handle<Object> new_length_handle;
12587 if (old_attributes != ABSENT) {
12588 if (object->GetLocalElementAccessorPair(index) == NULL) {
12589 old_value = Object::GetElementNoExceptionThrown(isolate, object, index);
12591 } else if (object->IsJSArray()) {
12592 // Store old array length in case adding an element grows the array.
12593 old_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12597 // Check for lookup interceptor
12598 Handle<Object> result = object->HasIndexedInterceptor()
12599 ? SetElementWithInterceptor(object, index, value, attributes, strict_mode,
12602 : SetElementWithoutInterceptor(object, index, value, attributes,
12606 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
12608 Handle<String> name = isolate->factory()->Uint32ToString(index);
12609 PropertyAttributes new_attributes = GetLocalElementAttribute(object, index);
12610 if (old_attributes == ABSENT) {
12611 if (object->IsJSArray() &&
12612 !old_length_handle->SameValue(
12613 Handle<JSArray>::cast(object)->length())) {
12614 new_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12616 uint32_t old_length = 0;
12617 uint32_t new_length = 0;
12618 CHECK(old_length_handle->ToArrayIndex(&old_length));
12619 CHECK(new_length_handle->ToArrayIndex(&new_length));
12621 BeginPerformSplice(Handle<JSArray>::cast(object));
12622 EnqueueChangeRecord(object, "add", name, old_value);
12623 EnqueueChangeRecord(object, "update", isolate->factory()->length_string(),
12624 old_length_handle);
12625 EndPerformSplice(Handle<JSArray>::cast(object));
12626 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
12627 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted,
12628 new_length - old_length);
12630 EnqueueChangeRecord(object, "add", name, old_value);
12632 } else if (old_value->IsTheHole()) {
12633 EnqueueChangeRecord(object, "reconfigure", name, old_value);
12635 Handle<Object> new_value =
12636 Object::GetElementNoExceptionThrown(isolate, object, index);
12637 bool value_changed = !old_value->SameValue(*new_value);
12638 if (old_attributes != new_attributes) {
12639 if (!value_changed) old_value = isolate->factory()->the_hole_value();
12640 EnqueueChangeRecord(object, "reconfigure", name, old_value);
12641 } else if (value_changed) {
12642 EnqueueChangeRecord(object, "update", name, old_value);
12650 Handle<Object> JSObject::SetElementWithoutInterceptor(
12651 Handle<JSObject> object,
12653 Handle<Object> value,
12654 PropertyAttributes attributes,
12655 StrictMode strict_mode,
12656 bool check_prototype,
12657 SetPropertyMode set_mode) {
12658 ASSERT(object->HasDictionaryElements() ||
12659 object->HasDictionaryArgumentsElements() ||
12660 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
12661 Isolate* isolate = object->GetIsolate();
12662 if (FLAG_trace_external_array_abuse &&
12663 IsExternalArrayElementsKind(object->GetElementsKind())) {
12664 CheckArrayAbuse(*object, "external elements write", index);
12666 if (FLAG_trace_js_array_abuse &&
12667 !IsExternalArrayElementsKind(object->GetElementsKind())) {
12668 if (object->IsJSArray()) {
12669 CheckArrayAbuse(*object, "elements write", index, true);
12672 switch (object->GetElementsKind()) {
12673 case FAST_SMI_ELEMENTS:
12674 case FAST_ELEMENTS:
12675 case FAST_HOLEY_SMI_ELEMENTS:
12676 case FAST_HOLEY_ELEMENTS:
12677 return SetFastElement(object, index, value, strict_mode, check_prototype);
12678 case FAST_DOUBLE_ELEMENTS:
12679 case FAST_HOLEY_DOUBLE_ELEMENTS:
12680 return SetFastDoubleElement(object, index, value, strict_mode,
12683 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
12684 case EXTERNAL_##TYPE##_ELEMENTS: { \
12685 Handle<External##Type##Array> array( \
12686 External##Type##Array::cast(object->elements())); \
12687 return External##Type##Array::SetValue(array, index, value); \
12689 case TYPE##_ELEMENTS: { \
12690 Handle<Fixed##Type##Array> array( \
12691 Fixed##Type##Array::cast(object->elements())); \
12692 return Fixed##Type##Array::SetValue(array, index, value); \
12695 TYPED_ARRAYS(TYPED_ARRAY_CASE)
12697 #undef TYPED_ARRAY_CASE
12699 case DICTIONARY_ELEMENTS:
12700 return SetDictionaryElement(object, index, value, attributes, strict_mode,
12703 case SLOPPY_ARGUMENTS_ELEMENTS: {
12704 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()));
12705 uint32_t length = parameter_map->length();
12706 Handle<Object> probe = index < length - 2 ?
12707 Handle<Object>(parameter_map->get(index + 2), isolate) :
12709 if (!probe.is_null() && !probe->IsTheHole()) {
12710 Handle<Context> context(Context::cast(parameter_map->get(0)));
12711 int context_index = Handle<Smi>::cast(probe)->value();
12712 ASSERT(!context->get(context_index)->IsTheHole());
12713 context->set(context_index, *value);
12714 // Redefining attributes of an aliased element destroys fast aliasing.
12715 if (set_mode == SET_PROPERTY || attributes == NONE) return value;
12716 parameter_map->set_the_hole(index + 2);
12717 // For elements that are still writable we re-establish slow aliasing.
12718 if ((attributes & READ_ONLY) == 0) {
12719 value = Handle<Object>::cast(
12720 isolate->factory()->NewAliasedArgumentsEntry(context_index));
12723 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)));
12724 if (arguments->IsDictionary()) {
12725 return SetDictionaryElement(object, index, value, attributes,
12730 return SetFastElement(object, index, value, strict_mode,
12735 // All possible cases have been handled above. Add a return to avoid the
12736 // complaints from the compiler.
12738 return isolate->factory()->null_value();
12742 const double AllocationSite::kPretenureRatio = 0.85;
12745 void AllocationSite::ResetPretenureDecision() {
12746 set_pretenure_decision(kUndecided);
12747 set_memento_found_count(0);
12748 set_memento_create_count(0);
12752 PretenureFlag AllocationSite::GetPretenureMode() {
12753 PretenureDecision mode = pretenure_decision();
12754 // Zombie objects "decide" to be untenured.
12755 return mode == kTenure ? TENURED : NOT_TENURED;
12759 bool AllocationSite::IsNestedSite() {
12760 ASSERT(FLAG_trace_track_allocation_sites);
12761 Object* current = GetHeap()->allocation_sites_list();
12762 while (current->IsAllocationSite()) {
12763 AllocationSite* current_site = AllocationSite::cast(current);
12764 if (current_site->nested_site() == this) {
12767 current = current_site->weak_next();
12773 void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
12774 ElementsKind to_kind) {
12775 Isolate* isolate = site->GetIsolate();
12777 if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) {
12778 Handle<JSArray> transition_info =
12779 handle(JSArray::cast(site->transition_info()));
12780 ElementsKind kind = transition_info->GetElementsKind();
12781 // if kind is holey ensure that to_kind is as well.
12782 if (IsHoleyElementsKind(kind)) {
12783 to_kind = GetHoleyElementsKind(to_kind);
12785 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12786 // If the array is huge, it's not likely to be defined in a local
12787 // function, so we shouldn't make new instances of it very often.
12788 uint32_t length = 0;
12789 CHECK(transition_info->length()->ToArrayIndex(&length));
12790 if (length <= kMaximumArrayBytesToPretransition) {
12791 if (FLAG_trace_track_allocation_sites) {
12792 bool is_nested = site->IsNestedSite();
12794 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
12795 reinterpret_cast<void*>(*site),
12796 is_nested ? "(nested)" : "",
12797 ElementsKindToString(kind),
12798 ElementsKindToString(to_kind));
12800 JSObject::TransitionElementsKind(transition_info, to_kind);
12801 site->dependent_code()->DeoptimizeDependentCodeGroup(
12802 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
12806 ElementsKind kind = site->GetElementsKind();
12807 // if kind is holey ensure that to_kind is as well.
12808 if (IsHoleyElementsKind(kind)) {
12809 to_kind = GetHoleyElementsKind(to_kind);
12811 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12812 if (FLAG_trace_track_allocation_sites) {
12813 PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
12814 reinterpret_cast<void*>(*site),
12815 ElementsKindToString(kind),
12816 ElementsKindToString(to_kind));
12818 site->SetElementsKind(to_kind);
12819 site->dependent_code()->DeoptimizeDependentCodeGroup(
12820 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
12827 void AllocationSite::AddDependentCompilationInfo(Handle<AllocationSite> site,
12829 CompilationInfo* info) {
12830 DependentCode::DependencyGroup group = site->ToDependencyGroup(reason);
12831 Handle<DependentCode> dep(site->dependent_code());
12832 Handle<DependentCode> codes =
12833 DependentCode::Insert(dep, group, info->object_wrapper());
12834 if (*codes != site->dependent_code()) site->set_dependent_code(*codes);
12835 info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone());
12839 void JSObject::UpdateAllocationSite(Handle<JSObject> object,
12840 ElementsKind to_kind) {
12841 if (!object->IsJSArray()) return;
12843 Heap* heap = object->GetHeap();
12844 if (!heap->InNewSpace(*object)) return;
12846 Handle<AllocationSite> site;
12848 DisallowHeapAllocation no_allocation;
12849 // Check if there is potentially a memento behind the object. If
12850 // the last word of the momento is on another page we return
12852 Address object_address = object->address();
12853 Address memento_address = object_address + JSArray::kSize;
12854 Address last_memento_word_address = memento_address + kPointerSize;
12855 if (!NewSpacePage::OnSamePage(object_address,
12856 last_memento_word_address)) {
12860 // Either object is the last object in the new space, or there is another
12861 // object of at least word size (the header map word) following it, so
12862 // suffices to compare ptr and top here.
12863 Address top = heap->NewSpaceTop();
12864 ASSERT(memento_address == top ||
12865 memento_address + HeapObject::kHeaderSize <= top);
12866 if (memento_address == top) return;
12868 HeapObject* candidate = HeapObject::FromAddress(memento_address);
12869 if (candidate->map() != heap->allocation_memento_map()) return;
12871 AllocationMemento* memento = AllocationMemento::cast(candidate);
12872 if (!memento->IsValid()) return;
12874 // Walk through to the Allocation Site
12875 site = handle(memento->GetAllocationSite());
12877 AllocationSite::DigestTransitionFeedback(site, to_kind);
12881 void JSObject::TransitionElementsKind(Handle<JSObject> object,
12882 ElementsKind to_kind) {
12883 ElementsKind from_kind = object->map()->elements_kind();
12885 if (IsFastHoleyElementsKind(from_kind)) {
12886 to_kind = GetHoleyElementsKind(to_kind);
12889 if (from_kind == to_kind) return;
12890 // Don't update the site if to_kind isn't fast
12891 if (IsFastElementsKind(to_kind)) {
12892 UpdateAllocationSite(object, to_kind);
12895 Isolate* isolate = object->GetIsolate();
12896 if (object->elements() == isolate->heap()->empty_fixed_array() ||
12897 (IsFastSmiOrObjectElementsKind(from_kind) &&
12898 IsFastSmiOrObjectElementsKind(to_kind)) ||
12899 (from_kind == FAST_DOUBLE_ELEMENTS &&
12900 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
12901 ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
12902 // No change is needed to the elements() buffer, the transition
12903 // only requires a map change.
12904 Handle<Map> new_map = GetElementsTransitionMap(object, to_kind);
12905 MigrateToMap(object, new_map);
12906 if (FLAG_trace_elements_transitions) {
12907 Handle<FixedArrayBase> elms(object->elements());
12908 PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms);
12913 Handle<FixedArrayBase> elms(object->elements());
12914 uint32_t capacity = static_cast<uint32_t>(elms->length());
12915 uint32_t length = capacity;
12917 if (object->IsJSArray()) {
12918 Object* raw_length = Handle<JSArray>::cast(object)->length();
12919 if (raw_length->IsUndefined()) {
12920 // If length is undefined, then JSArray is being initialized and has no
12921 // elements, assume a length of zero.
12924 CHECK(raw_length->ToArrayIndex(&length));
12928 if (IsFastSmiElementsKind(from_kind) &&
12929 IsFastDoubleElementsKind(to_kind)) {
12930 SetFastDoubleElementsCapacityAndLength(object, capacity, length);
12931 object->ValidateElements();
12935 if (IsFastDoubleElementsKind(from_kind) &&
12936 IsFastObjectElementsKind(to_kind)) {
12937 SetFastElementsCapacityAndLength(object, capacity, length,
12938 kDontAllowSmiElements);
12939 object->ValidateElements();
12943 // This method should never be called for any other case than the ones
12950 bool Map::IsValidElementsTransition(ElementsKind from_kind,
12951 ElementsKind to_kind) {
12952 // Transitions can't go backwards.
12953 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
12957 // Transitions from HOLEY -> PACKED are not allowed.
12958 return !IsFastHoleyElementsKind(from_kind) ||
12959 IsFastHoleyElementsKind(to_kind);
12963 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array,
12965 Handle<Object> value) {
12966 CALL_HEAP_FUNCTION_VOID(array->GetIsolate(),
12967 array->JSArrayUpdateLengthFromIndex(index, *value));
12971 MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index,
12973 uint32_t old_len = 0;
12974 CHECK(length()->ToArrayIndex(&old_len));
12975 // Check to see if we need to update the length. For now, we make
12976 // sure that the length stays within 32-bits (unsigned).
12977 if (index >= old_len && index != 0xffffffff) {
12979 { MaybeObject* maybe_len =
12980 GetHeap()->NumberFromDouble(static_cast<double>(index) + 1);
12981 if (!maybe_len->ToObject(&len)) return maybe_len;
12989 Handle<Object> JSObject::GetElementWithInterceptor(Handle<JSObject> object,
12990 Handle<Object> receiver,
12992 Isolate* isolate = object->GetIsolate();
12994 // Make sure that the top context does not change when doing
12995 // callbacks or interceptor calls.
12996 AssertNoContextChange ncc(isolate);
12998 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate);
12999 if (!interceptor->getter()->IsUndefined()) {
13000 v8::IndexedPropertyGetterCallback getter =
13001 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
13003 ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index));
13004 PropertyCallbackArguments
13005 args(isolate, interceptor->data(), *receiver, *object);
13006 v8::Handle<v8::Value> result = args.Call(getter, index);
13007 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
13008 if (!result.IsEmpty()) {
13009 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13010 result_internal->VerifyApiCallResultType();
13011 // Rebox handle before return.
13012 return Handle<Object>(*result_internal, isolate);
13016 ElementsAccessor* handler = object->GetElementsAccessor();
13017 Handle<Object> result = handler->Get(receiver, object, index);
13018 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
13019 if (!result->IsTheHole()) return result;
13021 Handle<Object> proto(object->GetPrototype(), isolate);
13022 if (proto->IsNull()) return isolate->factory()->undefined_value();
13023 return Object::GetElementWithReceiver(isolate, proto, receiver, index);
13027 bool JSObject::HasDenseElements() {
13030 GetElementsCapacityAndUsage(&capacity, &used);
13031 return (capacity == 0) || (used > (capacity / 2));
13035 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
13039 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
13040 FixedArray* backing_store = NULL;
13041 switch (GetElementsKind()) {
13042 case SLOPPY_ARGUMENTS_ELEMENTS:
13043 backing_store_base =
13044 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
13045 backing_store = FixedArray::cast(backing_store_base);
13046 if (backing_store->IsDictionary()) {
13047 SeededNumberDictionary* dictionary =
13048 SeededNumberDictionary::cast(backing_store);
13049 *capacity = dictionary->Capacity();
13050 *used = dictionary->NumberOfElements();
13054 case FAST_SMI_ELEMENTS:
13055 case FAST_ELEMENTS:
13057 *capacity = backing_store_base->length();
13058 *used = Smi::cast(JSArray::cast(this)->length())->value();
13061 // Fall through if packing is not guaranteed.
13062 case FAST_HOLEY_SMI_ELEMENTS:
13063 case FAST_HOLEY_ELEMENTS:
13064 backing_store = FixedArray::cast(backing_store_base);
13065 *capacity = backing_store->length();
13066 for (int i = 0; i < *capacity; ++i) {
13067 if (!backing_store->get(i)->IsTheHole()) ++(*used);
13070 case DICTIONARY_ELEMENTS: {
13071 SeededNumberDictionary* dictionary = element_dictionary();
13072 *capacity = dictionary->Capacity();
13073 *used = dictionary->NumberOfElements();
13076 case FAST_DOUBLE_ELEMENTS:
13078 *capacity = backing_store_base->length();
13079 *used = Smi::cast(JSArray::cast(this)->length())->value();
13082 // Fall through if packing is not guaranteed.
13083 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13084 *capacity = elements()->length();
13085 if (*capacity == 0) break;
13086 FixedDoubleArray * elms = FixedDoubleArray::cast(elements());
13087 for (int i = 0; i < *capacity; i++) {
13088 if (!elms->is_the_hole(i)) ++(*used);
13093 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13094 case EXTERNAL_##TYPE##_ELEMENTS: \
13095 case TYPE##_ELEMENTS: \
13097 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13098 #undef TYPED_ARRAY_CASE
13100 // External arrays are considered 100% used.
13101 FixedArrayBase* external_array = FixedArrayBase::cast(elements());
13102 *capacity = external_array->length();
13103 *used = external_array->length();
13110 bool JSObject::WouldConvertToSlowElements(Handle<Object> key) {
13112 if (HasFastElements() && key->ToArrayIndex(&index)) {
13113 Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements()));
13114 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
13115 if (index >= capacity) {
13116 if ((index - capacity) >= kMaxGap) return true;
13117 uint32_t new_capacity = NewElementsCapacity(index + 1);
13118 return ShouldConvertToSlowElements(new_capacity);
13125 bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
13126 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
13127 kMaxUncheckedFastElementsLength);
13128 if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
13129 (new_capacity <= kMaxUncheckedFastElementsLength &&
13130 GetHeap()->InNewSpace(this))) {
13133 // If the fast-case backing storage takes up roughly three times as
13134 // much space (in machine words) as a dictionary backing storage
13135 // would, the object should have slow elements.
13136 int old_capacity = 0;
13137 int used_elements = 0;
13138 GetElementsCapacityAndUsage(&old_capacity, &used_elements);
13139 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
13140 SeededNumberDictionary::kEntrySize;
13141 return 3 * dictionary_size <= new_capacity;
13145 bool JSObject::ShouldConvertToFastElements() {
13146 ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
13147 // If the elements are sparse, we should not go back to fast case.
13148 if (!HasDenseElements()) return false;
13149 // An object requiring access checks is never allowed to have fast
13150 // elements. If it had fast elements we would skip security checks.
13151 if (IsAccessCheckNeeded()) return false;
13152 // Observed objects may not go to fast mode because they rely on map checks,
13153 // and for fast element accesses we sometimes check element kinds only.
13154 if (map()->is_observed()) return false;
13156 FixedArray* elements = FixedArray::cast(this->elements());
13157 SeededNumberDictionary* dictionary = NULL;
13158 if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) {
13159 dictionary = SeededNumberDictionary::cast(elements->get(1));
13161 dictionary = SeededNumberDictionary::cast(elements);
13163 // If an element has been added at a very high index in the elements
13164 // dictionary, we cannot go back to fast case.
13165 if (dictionary->requires_slow_elements()) return false;
13166 // If the dictionary backing storage takes up roughly half as much
13167 // space (in machine words) as a fast-case backing storage would,
13168 // the object should have fast elements.
13169 uint32_t array_size = 0;
13171 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
13173 array_size = dictionary->max_number_key();
13175 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
13176 SeededNumberDictionary::kEntrySize;
13177 return 2 * dictionary_size >= array_size;
13181 bool JSObject::ShouldConvertToFastDoubleElements(
13182 bool* has_smi_only_elements) {
13183 *has_smi_only_elements = false;
13184 if (HasSloppyArgumentsElements()) return false;
13185 if (FLAG_unbox_double_arrays) {
13186 ASSERT(HasDictionaryElements());
13187 SeededNumberDictionary* dictionary = element_dictionary();
13188 bool found_double = false;
13189 for (int i = 0; i < dictionary->Capacity(); i++) {
13190 Object* key = dictionary->KeyAt(i);
13191 if (key->IsNumber()) {
13192 Object* value = dictionary->ValueAt(i);
13193 if (!value->IsNumber()) return false;
13194 if (!value->IsSmi()) {
13195 found_double = true;
13199 *has_smi_only_elements = !found_double;
13200 return found_double;
13207 // Certain compilers request function template instantiation when they
13208 // see the definition of the other template functions in the
13209 // class. This requires us to have the template functions put
13210 // together, so even though this function belongs in objects-debug.cc,
13211 // we keep it here instead to satisfy certain compilers.
13212 #ifdef OBJECT_PRINT
13213 template<typename Shape, typename Key>
13214 void Dictionary<Shape, Key>::Print(FILE* out) {
13215 int capacity = HashTable<Shape, Key>::Capacity();
13216 for (int i = 0; i < capacity; i++) {
13217 Object* k = HashTable<Shape, Key>::KeyAt(i);
13218 if (HashTable<Shape, Key>::IsKey(k)) {
13220 if (k->IsString()) {
13221 String::cast(k)->StringPrint(out);
13223 k->ShortPrint(out);
13226 ValueAt(i)->ShortPrint(out);
13234 template<typename Shape, typename Key>
13235 void Dictionary<Shape, Key>::CopyValuesTo(FixedArray* elements) {
13237 int capacity = HashTable<Shape, Key>::Capacity();
13238 DisallowHeapAllocation no_gc;
13239 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
13240 for (int i = 0; i < capacity; i++) {
13241 Object* k = Dictionary<Shape, Key>::KeyAt(i);
13242 if (Dictionary<Shape, Key>::IsKey(k)) {
13243 elements->set(pos++, ValueAt(i), mode);
13246 ASSERT(pos == elements->length());
13250 InterceptorInfo* JSObject::GetNamedInterceptor() {
13251 ASSERT(map()->has_named_interceptor());
13252 JSFunction* constructor = JSFunction::cast(map()->constructor());
13253 ASSERT(constructor->shared()->IsApiFunction());
13255 constructor->shared()->get_api_func_data()->named_property_handler();
13256 return InterceptorInfo::cast(result);
13260 InterceptorInfo* JSObject::GetIndexedInterceptor() {
13261 ASSERT(map()->has_indexed_interceptor());
13262 JSFunction* constructor = JSFunction::cast(map()->constructor());
13263 ASSERT(constructor->shared()->IsApiFunction());
13265 constructor->shared()->get_api_func_data()->indexed_property_handler();
13266 return InterceptorInfo::cast(result);
13270 Handle<Object> JSObject::GetPropertyPostInterceptor(
13271 Handle<JSObject> object,
13272 Handle<Object> receiver,
13274 PropertyAttributes* attributes) {
13275 // Check local property in holder, ignore interceptor.
13276 Isolate* isolate = object->GetIsolate();
13277 LookupResult lookup(isolate);
13278 object->LocalLookupRealNamedProperty(*name, &lookup);
13279 Handle<Object> result;
13280 if (lookup.IsFound()) {
13281 result = GetProperty(object, receiver, &lookup, name, attributes);
13283 // Continue searching via the prototype chain.
13284 Handle<Object> prototype(object->GetPrototype(), isolate);
13285 *attributes = ABSENT;
13286 if (prototype->IsNull()) return isolate->factory()->undefined_value();
13287 result = GetPropertyWithReceiver(prototype, receiver, name, attributes);
13293 MaybeObject* JSObject::GetLocalPropertyPostInterceptor(
13296 PropertyAttributes* attributes) {
13297 // Check local property in holder, ignore interceptor.
13298 LookupResult result(GetIsolate());
13299 LocalLookupRealNamedProperty(name, &result);
13300 if (result.IsFound()) {
13301 return GetProperty(receiver, &result, name, attributes);
13303 return GetHeap()->undefined_value();
13307 Handle<Object> JSObject::GetPropertyWithInterceptor(
13308 Handle<JSObject> object,
13309 Handle<Object> receiver,
13311 PropertyAttributes* attributes) {
13312 Isolate* isolate = object->GetIsolate();
13314 // TODO(rossberg): Support symbols in the API.
13315 if (name->IsSymbol()) return isolate->factory()->undefined_value();
13317 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor(), isolate);
13318 Handle<String> name_string = Handle<String>::cast(name);
13320 if (!interceptor->getter()->IsUndefined()) {
13321 v8::NamedPropertyGetterCallback getter =
13322 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
13324 ApiNamedPropertyAccess("interceptor-named-get", *object, *name));
13325 PropertyCallbackArguments
13326 args(isolate, interceptor->data(), *receiver, *object);
13327 v8::Handle<v8::Value> result =
13328 args.Call(getter, v8::Utils::ToLocal(name_string));
13329 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
13330 if (!result.IsEmpty()) {
13331 *attributes = NONE;
13332 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13333 result_internal->VerifyApiCallResultType();
13334 // Rebox handle to escape this scope.
13335 return handle(*result_internal, isolate);
13339 return GetPropertyPostInterceptor(object, receiver, name, attributes);
13343 bool JSObject::HasRealNamedProperty(Handle<JSObject> object,
13344 Handle<Name> key) {
13345 Isolate* isolate = object->GetIsolate();
13346 SealHandleScope shs(isolate);
13347 // Check access rights if needed.
13348 if (object->IsAccessCheckNeeded()) {
13349 if (!isolate->MayNamedAccessWrapper(object, key, v8::ACCESS_HAS)) {
13350 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS);
13355 LookupResult result(isolate);
13356 object->LocalLookupRealNamedProperty(*key, &result);
13357 return result.IsFound() && !result.IsInterceptor();
13361 bool JSObject::HasRealElementProperty(Handle<JSObject> object, uint32_t index) {
13362 Isolate* isolate = object->GetIsolate();
13363 HandleScope scope(isolate);
13364 // Check access rights if needed.
13365 if (object->IsAccessCheckNeeded()) {
13366 if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_HAS)) {
13367 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS);
13372 if (object->IsJSGlobalProxy()) {
13373 HandleScope scope(isolate);
13374 Handle<Object> proto(object->GetPrototype(), isolate);
13375 if (proto->IsNull()) return false;
13376 ASSERT(proto->IsJSGlobalObject());
13377 return HasRealElementProperty(Handle<JSObject>::cast(proto), index);
13380 return GetElementAttributeWithoutInterceptor(
13381 object, object, index, false) != ABSENT;
13385 bool JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object,
13386 Handle<Name> key) {
13387 Isolate* isolate = object->GetIsolate();
13388 SealHandleScope shs(isolate);
13389 // Check access rights if needed.
13390 if (object->IsAccessCheckNeeded()) {
13391 if (!isolate->MayNamedAccessWrapper(object, key, v8::ACCESS_HAS)) {
13392 isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS);
13397 LookupResult result(isolate);
13398 object->LocalLookupRealNamedProperty(*key, &result);
13399 return result.IsPropertyCallbacks();
13403 int JSObject::NumberOfLocalProperties(PropertyAttributes filter) {
13404 if (HasFastProperties()) {
13405 Map* map = this->map();
13406 if (filter == NONE) return map->NumberOfOwnDescriptors();
13407 if (filter & DONT_ENUM) {
13408 int result = map->EnumLength();
13409 if (result != kInvalidEnumCacheSentinel) return result;
13411 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
13413 return property_dictionary()->NumberOfElementsFilterAttributes(filter);
13417 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
13418 Object* temp = get(i);
13421 if (this != numbers) {
13422 temp = numbers->get(i);
13423 numbers->set(i, Smi::cast(numbers->get(j)));
13424 numbers->set(j, Smi::cast(temp));
13429 static void InsertionSortPairs(FixedArray* content,
13430 FixedArray* numbers,
13432 for (int i = 1; i < len; i++) {
13435 (NumberToUint32(numbers->get(j - 1)) >
13436 NumberToUint32(numbers->get(j)))) {
13437 content->SwapPairs(numbers, j - 1, j);
13444 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) {
13445 // In-place heap sort.
13446 ASSERT(content->length() == numbers->length());
13448 // Bottom-up max-heap construction.
13449 for (int i = 1; i < len; ++i) {
13450 int child_index = i;
13451 while (child_index > 0) {
13452 int parent_index = ((child_index + 1) >> 1) - 1;
13453 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13454 uint32_t child_value = NumberToUint32(numbers->get(child_index));
13455 if (parent_value < child_value) {
13456 content->SwapPairs(numbers, parent_index, child_index);
13460 child_index = parent_index;
13464 // Extract elements and create sorted array.
13465 for (int i = len - 1; i > 0; --i) {
13466 // Put max element at the back of the array.
13467 content->SwapPairs(numbers, 0, i);
13468 // Sift down the new top element.
13469 int parent_index = 0;
13471 int child_index = ((parent_index + 1) << 1) - 1;
13472 if (child_index >= i) break;
13473 uint32_t child1_value = NumberToUint32(numbers->get(child_index));
13474 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1));
13475 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13476 if (child_index + 1 >= i || child1_value > child2_value) {
13477 if (parent_value > child1_value) break;
13478 content->SwapPairs(numbers, parent_index, child_index);
13479 parent_index = child_index;
13481 if (parent_value > child2_value) break;
13482 content->SwapPairs(numbers, parent_index, child_index + 1);
13483 parent_index = child_index + 1;
13490 // Sort this array and the numbers as pairs wrt. the (distinct) numbers.
13491 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
13492 ASSERT(this->length() == numbers->length());
13493 // For small arrays, simply use insertion sort.
13495 InsertionSortPairs(this, numbers, len);
13498 // Check the range of indices.
13499 uint32_t min_index = NumberToUint32(numbers->get(0));
13500 uint32_t max_index = min_index;
13502 for (i = 1; i < len; i++) {
13503 if (NumberToUint32(numbers->get(i)) < min_index) {
13504 min_index = NumberToUint32(numbers->get(i));
13505 } else if (NumberToUint32(numbers->get(i)) > max_index) {
13506 max_index = NumberToUint32(numbers->get(i));
13509 if (max_index - min_index + 1 == len) {
13510 // Indices form a contiguous range, unless there are duplicates.
13511 // Do an in-place linear time sort assuming distinct numbers, but
13512 // avoid hanging in case they are not.
13513 for (i = 0; i < len; i++) {
13516 // While the current element at i is not at its correct position p,
13517 // swap the elements at these two positions.
13518 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i &&
13520 SwapPairs(numbers, i, p);
13524 HeapSortPairs(this, numbers, len);
13530 // Fill in the names of local properties into the supplied storage. The main
13531 // purpose of this function is to provide reflection information for the object
13533 void JSObject::GetLocalPropertyNames(
13534 FixedArray* storage, int index, PropertyAttributes filter) {
13535 ASSERT(storage->length() >= (NumberOfLocalProperties(filter) - index));
13536 if (HasFastProperties()) {
13537 int real_size = map()->NumberOfOwnDescriptors();
13538 DescriptorArray* descs = map()->instance_descriptors();
13539 for (int i = 0; i < real_size; i++) {
13540 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
13541 !FilterKey(descs->GetKey(i), filter)) {
13542 storage->set(index++, descs->GetKey(i));
13546 property_dictionary()->CopyKeysTo(storage,
13549 NameDictionary::UNSORTED);
13554 int JSObject::NumberOfLocalElements(PropertyAttributes filter) {
13555 return GetLocalElementKeys(NULL, filter);
13559 int JSObject::NumberOfEnumElements() {
13560 // Fast case for objects with no elements.
13561 if (!IsJSValue() && HasFastObjectElements()) {
13562 uint32_t length = IsJSArray() ?
13563 static_cast<uint32_t>(
13564 Smi::cast(JSArray::cast(this)->length())->value()) :
13565 static_cast<uint32_t>(FixedArray::cast(elements())->length());
13566 if (length == 0) return 0;
13568 // Compute the number of enumerable elements.
13569 return NumberOfLocalElements(static_cast<PropertyAttributes>(DONT_ENUM));
13573 int JSObject::GetLocalElementKeys(FixedArray* storage,
13574 PropertyAttributes filter) {
13576 switch (GetElementsKind()) {
13577 case FAST_SMI_ELEMENTS:
13578 case FAST_ELEMENTS:
13579 case FAST_HOLEY_SMI_ELEMENTS:
13580 case FAST_HOLEY_ELEMENTS: {
13581 int length = IsJSArray() ?
13582 Smi::cast(JSArray::cast(this)->length())->value() :
13583 FixedArray::cast(elements())->length();
13584 for (int i = 0; i < length; i++) {
13585 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
13586 if (storage != NULL) {
13587 storage->set(counter, Smi::FromInt(i));
13592 ASSERT(!storage || storage->length() >= counter);
13595 case FAST_DOUBLE_ELEMENTS:
13596 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13597 int length = IsJSArray() ?
13598 Smi::cast(JSArray::cast(this)->length())->value() :
13599 FixedDoubleArray::cast(elements())->length();
13600 for (int i = 0; i < length; i++) {
13601 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) {
13602 if (storage != NULL) {
13603 storage->set(counter, Smi::FromInt(i));
13608 ASSERT(!storage || storage->length() >= counter);
13612 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13613 case EXTERNAL_##TYPE##_ELEMENTS: \
13614 case TYPE##_ELEMENTS: \
13616 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13617 #undef TYPED_ARRAY_CASE
13619 int length = FixedArrayBase::cast(elements())->length();
13620 while (counter < length) {
13621 if (storage != NULL) {
13622 storage->set(counter, Smi::FromInt(counter));
13626 ASSERT(!storage || storage->length() >= counter);
13630 case DICTIONARY_ELEMENTS: {
13631 if (storage != NULL) {
13632 element_dictionary()->CopyKeysTo(storage,
13634 SeededNumberDictionary::SORTED);
13636 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
13639 case SLOPPY_ARGUMENTS_ELEMENTS: {
13640 FixedArray* parameter_map = FixedArray::cast(elements());
13641 int mapped_length = parameter_map->length() - 2;
13642 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
13643 if (arguments->IsDictionary()) {
13644 // Copy the keys from arguments first, because Dictionary::CopyKeysTo
13645 // will insert in storage starting at index 0.
13646 SeededNumberDictionary* dictionary =
13647 SeededNumberDictionary::cast(arguments);
13648 if (storage != NULL) {
13649 dictionary->CopyKeysTo(
13650 storage, filter, SeededNumberDictionary::UNSORTED);
13652 counter += dictionary->NumberOfElementsFilterAttributes(filter);
13653 for (int i = 0; i < mapped_length; ++i) {
13654 if (!parameter_map->get(i + 2)->IsTheHole()) {
13655 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13659 if (storage != NULL) storage->SortPairs(storage, counter);
13662 int backing_length = arguments->length();
13664 for (; i < mapped_length; ++i) {
13665 if (!parameter_map->get(i + 2)->IsTheHole()) {
13666 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13668 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
13669 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13673 for (; i < backing_length; ++i) {
13674 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13682 if (this->IsJSValue()) {
13683 Object* val = JSValue::cast(this)->value();
13684 if (val->IsString()) {
13685 String* str = String::cast(val);
13687 for (int i = 0; i < str->length(); i++) {
13688 storage->set(counter + i, Smi::FromInt(i));
13691 counter += str->length();
13694 ASSERT(!storage || storage->length() == counter);
13699 int JSObject::GetEnumElementKeys(FixedArray* storage) {
13700 return GetLocalElementKeys(storage,
13701 static_cast<PropertyAttributes>(DONT_ENUM));
13705 // StringKey simply carries a string object as key.
13706 class StringKey : public HashTableKey {
13708 explicit StringKey(String* string) :
13710 hash_(HashForObject(string)) { }
13712 bool IsMatch(Object* string) {
13713 // We know that all entries in a hash table had their hash keys created.
13714 // Use that knowledge to have fast failure.
13715 if (hash_ != HashForObject(string)) {
13718 return string_->Equals(String::cast(string));
13721 uint32_t Hash() { return hash_; }
13723 uint32_t HashForObject(Object* other) { return String::cast(other)->Hash(); }
13725 Object* AsObject(Heap* heap) { return string_; }
13732 // StringSharedKeys are used as keys in the eval cache.
13733 class StringSharedKey : public HashTableKey {
13735 StringSharedKey(String* source,
13736 SharedFunctionInfo* shared,
13737 StrictMode strict_mode,
13738 int scope_position)
13741 strict_mode_(strict_mode),
13742 scope_position_(scope_position) { }
13744 bool IsMatch(Object* other) {
13745 if (!other->IsFixedArray()) return false;
13746 FixedArray* other_array = FixedArray::cast(other);
13747 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13748 if (shared != shared_) return false;
13749 int strict_unchecked = Smi::cast(other_array->get(2))->value();
13750 ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13751 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13752 if (strict_mode != strict_mode_) return false;
13753 int scope_position = Smi::cast(other_array->get(3))->value();
13754 if (scope_position != scope_position_) return false;
13755 String* source = String::cast(other_array->get(1));
13756 return source->Equals(source_);
13759 static uint32_t StringSharedHashHelper(String* source,
13760 SharedFunctionInfo* shared,
13761 StrictMode strict_mode,
13762 int scope_position) {
13763 uint32_t hash = source->Hash();
13764 if (shared->HasSourceCode()) {
13765 // Instead of using the SharedFunctionInfo pointer in the hash
13766 // code computation, we use a combination of the hash of the
13767 // script source code and the start position of the calling scope.
13768 // We do this to ensure that the cache entries can survive garbage
13770 Script* script = Script::cast(shared->script());
13771 hash ^= String::cast(script->source())->Hash();
13772 if (strict_mode == STRICT) hash ^= 0x8000;
13773 hash += scope_position;
13779 return StringSharedHashHelper(
13780 source_, shared_, strict_mode_, scope_position_);
13783 uint32_t HashForObject(Object* obj) {
13784 FixedArray* other_array = FixedArray::cast(obj);
13785 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13786 String* source = String::cast(other_array->get(1));
13787 int strict_unchecked = Smi::cast(other_array->get(2))->value();
13788 ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13789 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13790 int scope_position = Smi::cast(other_array->get(3))->value();
13791 return StringSharedHashHelper(
13792 source, shared, strict_mode, scope_position);
13795 MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
13797 { MaybeObject* maybe_obj = heap->AllocateFixedArray(4);
13798 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13800 FixedArray* other_array = FixedArray::cast(obj);
13801 other_array->set(0, shared_);
13802 other_array->set(1, source_);
13803 other_array->set(2, Smi::FromInt(strict_mode_));
13804 other_array->set(3, Smi::FromInt(scope_position_));
13805 return other_array;
13810 SharedFunctionInfo* shared_;
13811 StrictMode strict_mode_;
13812 int scope_position_;
13816 // RegExpKey carries the source and flags of a regular expression as key.
13817 class RegExpKey : public HashTableKey {
13819 RegExpKey(String* string, JSRegExp::Flags flags)
13821 flags_(Smi::FromInt(flags.value())) { }
13823 // Rather than storing the key in the hash table, a pointer to the
13824 // stored value is stored where the key should be. IsMatch then
13825 // compares the search key to the found object, rather than comparing
13827 bool IsMatch(Object* obj) {
13828 FixedArray* val = FixedArray::cast(obj);
13829 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
13830 && (flags_ == val->get(JSRegExp::kFlagsIndex));
13833 uint32_t Hash() { return RegExpHash(string_, flags_); }
13835 Object* AsObject(Heap* heap) {
13836 // Plain hash maps, which is where regexp keys are used, don't
13837 // use this function.
13842 uint32_t HashForObject(Object* obj) {
13843 FixedArray* val = FixedArray::cast(obj);
13844 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
13845 Smi::cast(val->get(JSRegExp::kFlagsIndex)));
13848 static uint32_t RegExpHash(String* string, Smi* flags) {
13849 return string->Hash() + flags->value();
13857 MaybeObject* OneByteStringKey::AsObject(Heap* heap) {
13858 if (hash_field_ == 0) Hash();
13859 return heap->AllocateOneByteInternalizedString(string_, hash_field_);
13863 MaybeObject* TwoByteStringKey::AsObject(Heap* heap) {
13864 if (hash_field_ == 0) Hash();
13865 return heap->AllocateTwoByteInternalizedString(string_, hash_field_);
13870 const uint8_t* SubStringKey<uint8_t>::GetChars() {
13871 return string_->IsSeqOneByteString()
13872 ? SeqOneByteString::cast(*string_)->GetChars()
13873 : ExternalAsciiString::cast(*string_)->GetChars();
13878 const uint16_t* SubStringKey<uint16_t>::GetChars() {
13879 return string_->IsSeqTwoByteString()
13880 ? SeqTwoByteString::cast(*string_)->GetChars()
13881 : ExternalTwoByteString::cast(*string_)->GetChars();
13886 MaybeObject* SubStringKey<uint8_t>::AsObject(Heap* heap) {
13887 if (hash_field_ == 0) Hash();
13888 Vector<const uint8_t> chars(GetChars() + from_, length_);
13889 return heap->AllocateOneByteInternalizedString(chars, hash_field_);
13894 MaybeObject* SubStringKey<uint16_t>::AsObject(
13896 if (hash_field_ == 0) Hash();
13897 Vector<const uint16_t> chars(GetChars() + from_, length_);
13898 return heap->AllocateTwoByteInternalizedString(chars, hash_field_);
13903 bool SubStringKey<uint8_t>::IsMatch(Object* string) {
13904 Vector<const uint8_t> chars(GetChars() + from_, length_);
13905 return String::cast(string)->IsOneByteEqualTo(chars);
13910 bool SubStringKey<uint16_t>::IsMatch(Object* string) {
13911 Vector<const uint16_t> chars(GetChars() + from_, length_);
13912 return String::cast(string)->IsTwoByteEqualTo(chars);
13916 template class SubStringKey<uint8_t>;
13917 template class SubStringKey<uint16_t>;
13920 // InternalizedStringKey carries a string/internalized-string object as key.
13921 class InternalizedStringKey : public HashTableKey {
13923 explicit InternalizedStringKey(String* string)
13924 : string_(string) { }
13926 bool IsMatch(Object* string) {
13927 return String::cast(string)->Equals(string_);
13930 uint32_t Hash() { return string_->Hash(); }
13932 uint32_t HashForObject(Object* other) {
13933 return String::cast(other)->Hash();
13936 MaybeObject* AsObject(Heap* heap) {
13937 // Attempt to flatten the string, so that internalized strings will most
13938 // often be flat strings.
13939 string_ = string_->TryFlattenGetString();
13940 // Internalize the string if possible.
13941 Map* map = heap->InternalizedStringMapForString(string_);
13943 string_->set_map_no_write_barrier(map);
13944 ASSERT(string_->IsInternalizedString());
13947 // Otherwise allocate a new internalized string.
13948 return heap->AllocateInternalizedStringImpl(
13949 string_, string_->length(), string_->hash_field());
13952 static uint32_t StringHash(Object* obj) {
13953 return String::cast(obj)->Hash();
13960 template<typename Shape, typename Key>
13961 void HashTable<Shape, Key>::IteratePrefix(ObjectVisitor* v) {
13962 IteratePointers(v, 0, kElementsStartOffset);
13966 template<typename Shape, typename Key>
13967 void HashTable<Shape, Key>::IterateElements(ObjectVisitor* v) {
13969 kElementsStartOffset,
13970 kHeaderSize + length() * kPointerSize);
13974 template<typename Shape, typename Key>
13975 MaybeObject* HashTable<Shape, Key>::Allocate(Heap* heap,
13976 int at_least_space_for,
13977 MinimumCapacity capacity_option,
13978 PretenureFlag pretenure) {
13979 ASSERT(!capacity_option || IS_POWER_OF_TWO(at_least_space_for));
13980 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
13981 ? at_least_space_for
13982 : ComputeCapacity(at_least_space_for);
13983 if (capacity > HashTable::kMaxCapacity) {
13984 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
13988 { MaybeObject* maybe_obj =
13989 heap-> AllocateHashTable(EntryToIndex(capacity), pretenure);
13990 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13992 HashTable::cast(obj)->SetNumberOfElements(0);
13993 HashTable::cast(obj)->SetNumberOfDeletedElements(0);
13994 HashTable::cast(obj)->SetCapacity(capacity);
13999 // Find entry for key otherwise return kNotFound.
14000 int NameDictionary::FindEntry(Name* key) {
14001 if (!key->IsUniqueName()) {
14002 return HashTable<NameDictionaryShape, Name*>::FindEntry(key);
14005 // Optimized for unique names. Knowledge of the key type allows:
14006 // 1. Move the check if the key is unique out of the loop.
14007 // 2. Avoid comparing hash codes in unique-to-unique comparison.
14008 // 3. Detect a case when a dictionary key is not unique but the key is.
14009 // In case of positive result the dictionary key may be replaced by the
14010 // internalized string with minimal performance penalty. It gives a chance
14011 // to perform further lookups in code stubs (and significant performance
14012 // boost a certain style of code).
14014 // EnsureCapacity will guarantee the hash table is never full.
14015 uint32_t capacity = Capacity();
14016 uint32_t entry = FirstProbe(key->Hash(), capacity);
14017 uint32_t count = 1;
14020 int index = EntryToIndex(entry);
14021 Object* element = get(index);
14022 if (element->IsUndefined()) break; // Empty entry.
14023 if (key == element) return entry;
14024 if (!element->IsUniqueName() &&
14025 !element->IsTheHole() &&
14026 Name::cast(element)->Equals(key)) {
14027 // Replace a key that is a non-internalized string by the equivalent
14028 // internalized string for faster further lookups.
14032 ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(key));
14033 entry = NextProbe(entry, count++, capacity);
14039 template<typename Shape, typename Key>
14040 MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) {
14041 ASSERT(NumberOfElements() < new_table->Capacity());
14043 DisallowHeapAllocation no_gc;
14044 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
14046 // Copy prefix to new array.
14047 for (int i = kPrefixStartIndex;
14048 i < kPrefixStartIndex + Shape::kPrefixSize;
14050 new_table->set(i, get(i), mode);
14053 // Rehash the elements.
14054 int capacity = Capacity();
14055 for (int i = 0; i < capacity; i++) {
14056 uint32_t from_index = EntryToIndex(i);
14057 Object* k = get(from_index);
14059 uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k);
14060 uint32_t insertion_index =
14061 EntryToIndex(new_table->FindInsertionEntry(hash));
14062 for (int j = 0; j < Shape::kEntrySize; j++) {
14063 new_table->set(insertion_index + j, get(from_index + j), mode);
14067 new_table->SetNumberOfElements(NumberOfElements());
14068 new_table->SetNumberOfDeletedElements(0);
14073 template<typename Shape, typename Key>
14074 uint32_t HashTable<Shape, Key>::EntryForProbe(Key key,
14077 uint32_t expected) {
14078 uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k);
14079 uint32_t capacity = Capacity();
14080 uint32_t entry = FirstProbe(hash, capacity);
14081 for (int i = 1; i < probe; i++) {
14082 if (entry == expected) return expected;
14083 entry = NextProbe(entry, i, capacity);
14089 template<typename Shape, typename Key>
14090 void HashTable<Shape, Key>::Swap(uint32_t entry1,
14092 WriteBarrierMode mode) {
14093 int index1 = EntryToIndex(entry1);
14094 int index2 = EntryToIndex(entry2);
14095 Object* temp[Shape::kEntrySize];
14096 for (int j = 0; j < Shape::kEntrySize; j++) {
14097 temp[j] = get(index1 + j);
14099 for (int j = 0; j < Shape::kEntrySize; j++) {
14100 set(index1 + j, get(index2 + j), mode);
14102 for (int j = 0; j < Shape::kEntrySize; j++) {
14103 set(index2 + j, temp[j], mode);
14108 template<typename Shape, typename Key>
14109 void HashTable<Shape, Key>::Rehash(Key key) {
14110 DisallowHeapAllocation no_gc;
14111 WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
14112 uint32_t capacity = Capacity();
14114 for (int probe = 1; !done; probe++) {
14115 // All elements at entries given by one of the first _probe_ probes
14116 // are placed correctly. Other elements might need to be moved.
14118 for (uint32_t current = 0; current < capacity; current++) {
14119 Object* current_key = get(EntryToIndex(current));
14120 if (IsKey(current_key)) {
14121 uint32_t target = EntryForProbe(key, current_key, probe, current);
14122 if (current == target) continue;
14123 Object* target_key = get(EntryToIndex(target));
14124 if (!IsKey(target_key) ||
14125 EntryForProbe(key, target_key, probe, target) != target) {
14126 // Put the current element into the correct position.
14127 Swap(current, target, mode);
14128 // The other element will be processed on the next iteration.
14131 // The place for the current element is occupied. Leave the element
14132 // for the next probe.
14141 template<typename Shape, typename Key>
14142 MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n,
14144 PretenureFlag pretenure) {
14145 int capacity = Capacity();
14146 int nof = NumberOfElements() + n;
14147 int nod = NumberOfDeletedElements();
14149 // 50% is still free after adding n elements and
14150 // at most 50% of the free elements are deleted elements.
14151 if (nod <= (capacity - nof) >> 1) {
14152 int needed_free = nof >> 1;
14153 if (nof + needed_free <= capacity) return this;
14156 const int kMinCapacityForPretenure = 256;
14157 bool should_pretenure = pretenure == TENURED ||
14158 ((capacity > kMinCapacityForPretenure) && !GetHeap()->InNewSpace(this));
14160 { MaybeObject* maybe_obj =
14161 Allocate(GetHeap(),
14163 USE_DEFAULT_MINIMUM_CAPACITY,
14164 should_pretenure ? TENURED : NOT_TENURED);
14165 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14168 return Rehash(HashTable::cast(obj), key);
14172 template<typename Shape, typename Key>
14173 MaybeObject* HashTable<Shape, Key>::Shrink(Key key) {
14174 int capacity = Capacity();
14175 int nof = NumberOfElements();
14177 // Shrink to fit the number of elements if only a quarter of the
14178 // capacity is filled with elements.
14179 if (nof > (capacity >> 2)) return this;
14180 // Allocate a new dictionary with room for at least the current
14181 // number of elements. The allocation method will make sure that
14182 // there is extra room in the dictionary for additions. Don't go
14183 // lower than room for 16 elements.
14184 int at_least_room_for = nof;
14185 if (at_least_room_for < 16) return this;
14187 const int kMinCapacityForPretenure = 256;
14189 (at_least_room_for > kMinCapacityForPretenure) &&
14190 !GetHeap()->InNewSpace(this);
14192 { MaybeObject* maybe_obj =
14193 Allocate(GetHeap(),
14195 USE_DEFAULT_MINIMUM_CAPACITY,
14196 pretenure ? TENURED : NOT_TENURED);
14197 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14200 return Rehash(HashTable::cast(obj), key);
14204 template<typename Shape, typename Key>
14205 uint32_t HashTable<Shape, Key>::FindInsertionEntry(uint32_t hash) {
14206 uint32_t capacity = Capacity();
14207 uint32_t entry = FirstProbe(hash, capacity);
14208 uint32_t count = 1;
14209 // EnsureCapacity will guarantee the hash table is never full.
14211 Object* element = KeyAt(entry);
14212 if (element->IsUndefined() || element->IsTheHole()) break;
14213 entry = NextProbe(entry, count++, capacity);
14219 // Force instantiation of template instances class.
14220 // Please note this list is compiler dependent.
14222 template class HashTable<StringTableShape, HashTableKey*>;
14224 template class HashTable<CompilationCacheShape, HashTableKey*>;
14226 template class HashTable<MapCacheShape, HashTableKey*>;
14228 template class HashTable<ObjectHashTableShape<1>, Object*>;
14230 template class HashTable<ObjectHashTableShape<2>, Object*>;
14232 template class HashTable<WeakHashTableShape<2>, Object*>;
14234 template class Dictionary<NameDictionaryShape, Name*>;
14236 template class Dictionary<SeededNumberDictionaryShape, uint32_t>;
14238 template class Dictionary<UnseededNumberDictionaryShape, uint32_t>;
14240 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
14241 Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure);
14243 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
14244 Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure);
14246 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::
14247 Allocate(Heap* heap, int n, PretenureFlag pretenure);
14249 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::AtPut(
14250 uint32_t, Object*);
14252 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
14253 AtPut(uint32_t, Object*);
14255 template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>::
14256 SlowReverseLookup(Object* value);
14258 template Object* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
14259 SlowReverseLookup(Object* value);
14261 template Object* Dictionary<NameDictionaryShape, Name*>::SlowReverseLookup(
14264 template void Dictionary<SeededNumberDictionaryShape, uint32_t>::CopyKeysTo(
14266 PropertyAttributes,
14267 Dictionary<SeededNumberDictionaryShape, uint32_t>::SortMode);
14269 template Object* Dictionary<NameDictionaryShape, Name*>::DeleteProperty(
14270 int, JSObject::DeleteMode);
14272 template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>::
14273 DeleteProperty(int, JSObject::DeleteMode);
14275 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Shrink(Name* n);
14277 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Shrink(
14280 template void Dictionary<NameDictionaryShape, Name*>::CopyKeysTo(
14283 PropertyAttributes,
14284 Dictionary<NameDictionaryShape, Name*>::SortMode);
14287 Dictionary<NameDictionaryShape, Name*>::NumberOfElementsFilterAttributes(
14288 PropertyAttributes);
14290 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Add(
14291 Name*, Object*, PropertyDetails);
14293 template MaybeObject*
14294 Dictionary<NameDictionaryShape, Name*>::GenerateNewEnumerationIndices();
14297 Dictionary<SeededNumberDictionaryShape, uint32_t>::
14298 NumberOfElementsFilterAttributes(PropertyAttributes);
14300 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Add(
14301 uint32_t, Object*, PropertyDetails);
14303 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::Add(
14304 uint32_t, Object*, PropertyDetails);
14306 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
14307 EnsureCapacity(int, uint32_t);
14309 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
14310 EnsureCapacity(int, uint32_t);
14312 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::
14313 EnsureCapacity(int, Name*);
14315 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
14316 AddEntry(uint32_t, Object*, PropertyDetails, uint32_t);
14318 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
14319 AddEntry(uint32_t, Object*, PropertyDetails, uint32_t);
14321 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::AddEntry(
14322 Name*, Object*, PropertyDetails, uint32_t);
14325 int Dictionary<SeededNumberDictionaryShape, uint32_t>::NumberOfEnumElements();
14328 int Dictionary<NameDictionaryShape, Name*>::NumberOfEnumElements();
14331 int HashTable<SeededNumberDictionaryShape, uint32_t>::FindEntry(uint32_t);
14334 Handle<Object> JSObject::PrepareSlowElementsForSort(
14335 Handle<JSObject> object, uint32_t limit) {
14336 CALL_HEAP_FUNCTION(object->GetIsolate(),
14337 object->PrepareSlowElementsForSort(limit),
14342 // Collates undefined and unexisting elements below limit from position
14343 // zero of the elements. The object stays in Dictionary mode.
14344 MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
14345 ASSERT(HasDictionaryElements());
14346 // Must stay in dictionary mode, either because of requires_slow_elements,
14347 // or because we are not going to sort (and therefore compact) all of the
14349 SeededNumberDictionary* dict = element_dictionary();
14350 HeapNumber* result_double = NULL;
14351 if (limit > static_cast<uint32_t>(Smi::kMaxValue)) {
14352 // Allocate space for result before we start mutating the object.
14353 Object* new_double;
14354 { MaybeObject* maybe_new_double = GetHeap()->AllocateHeapNumber(0.0);
14355 if (!maybe_new_double->ToObject(&new_double)) return maybe_new_double;
14357 result_double = HeapNumber::cast(new_double);
14361 { MaybeObject* maybe_obj =
14362 SeededNumberDictionary::Allocate(GetHeap(), dict->NumberOfElements());
14363 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14365 SeededNumberDictionary* new_dict = SeededNumberDictionary::cast(obj);
14367 DisallowHeapAllocation no_alloc;
14370 uint32_t undefs = 0;
14371 int capacity = dict->Capacity();
14372 for (int i = 0; i < capacity; i++) {
14373 Object* k = dict->KeyAt(i);
14374 if (dict->IsKey(k)) {
14375 ASSERT(k->IsNumber());
14376 ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0);
14377 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0);
14378 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32);
14379 Object* value = dict->ValueAt(i);
14380 PropertyDetails details = dict->DetailsAt(i);
14381 if (details.type() == CALLBACKS || details.IsReadOnly()) {
14382 // Bail out and do the sorting of undefineds and array holes in JS.
14383 // Also bail out if the element is not supposed to be moved.
14384 return Smi::FromInt(-1);
14386 uint32_t key = NumberToUint32(k);
14387 // In the following we assert that adding the entry to the new dictionary
14388 // does not cause GC. This is the case because we made sure to allocate
14389 // the dictionary big enough above, so it need not grow.
14391 if (value->IsUndefined()) {
14394 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14395 // Adding an entry with the key beyond smi-range requires
14396 // allocation. Bailout.
14397 return Smi::FromInt(-1);
14399 new_dict->AddNumberEntry(pos, value, details)->ToObjectUnchecked();
14403 if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
14404 // Adding an entry with the key beyond smi-range requires
14405 // allocation. Bailout.
14406 return Smi::FromInt(-1);
14408 new_dict->AddNumberEntry(key, value, details)->ToObjectUnchecked();
14413 uint32_t result = pos;
14414 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
14415 Heap* heap = GetHeap();
14416 while (undefs > 0) {
14417 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14418 // Adding an entry with the key beyond smi-range requires
14419 // allocation. Bailout.
14420 return Smi::FromInt(-1);
14422 new_dict->AddNumberEntry(pos, heap->undefined_value(), no_details)->
14423 ToObjectUnchecked();
14428 set_elements(new_dict);
14430 if (result <= static_cast<uint32_t>(Smi::kMaxValue)) {
14431 return Smi::FromInt(static_cast<int>(result));
14434 ASSERT_NE(NULL, result_double);
14435 result_double->set_value(static_cast<double>(result));
14436 return result_double;
14440 // Collects all defined (non-hole) and non-undefined (array) elements at
14441 // the start of the elements array.
14442 // If the object is in dictionary mode, it is converted to fast elements
14444 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object,
14446 Isolate* isolate = object->GetIsolate();
14447 if (object->HasSloppyArgumentsElements() ||
14448 object->map()->is_observed()) {
14449 return handle(Smi::FromInt(-1), isolate);
14452 if (object->HasDictionaryElements()) {
14453 // Convert to fast elements containing only the existing properties.
14454 // Ordering is irrelevant, since we are going to sort anyway.
14455 Handle<SeededNumberDictionary> dict(object->element_dictionary());
14456 if (object->IsJSArray() || dict->requires_slow_elements() ||
14457 dict->max_number_key() >= limit) {
14458 return JSObject::PrepareSlowElementsForSort(object, limit);
14460 // Convert to fast elements.
14462 Handle<Map> new_map =
14463 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS);
14465 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ?
14466 NOT_TENURED: TENURED;
14467 Handle<FixedArray> fast_elements =
14468 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure);
14469 dict->CopyValuesTo(*fast_elements);
14470 object->ValidateElements();
14472 object->set_map_and_elements(*new_map, *fast_elements);
14473 } else if (object->HasExternalArrayElements() ||
14474 object->HasFixedTypedArrayElements()) {
14475 // Typed arrays cannot have holes or undefined elements.
14476 return handle(Smi::FromInt(
14477 FixedArrayBase::cast(object->elements())->length()), isolate);
14478 } else if (!object->HasFastDoubleElements()) {
14479 EnsureWritableFastElements(object);
14481 ASSERT(object->HasFastSmiOrObjectElements() ||
14482 object->HasFastDoubleElements());
14484 // Collect holes at the end, undefined before that and the rest at the
14485 // start, and return the number of non-hole, non-undefined values.
14487 Handle<FixedArrayBase> elements_base(object->elements());
14488 uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
14489 if (limit > elements_length) {
14490 limit = elements_length ;
14493 return handle(Smi::FromInt(0), isolate);
14496 uint32_t result = 0;
14497 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) {
14498 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base);
14499 // Split elements into defined and the_hole, in that order.
14500 unsigned int holes = limit;
14501 // Assume most arrays contain no holes and undefined values, so minimize the
14502 // number of stores of non-undefined, non-the-hole values.
14503 for (unsigned int i = 0; i < holes; i++) {
14504 if (elements->is_the_hole(i)) {
14509 // Position i needs to be filled.
14510 while (holes > i) {
14511 if (elements->is_the_hole(holes)) {
14514 elements->set(i, elements->get_scalar(holes));
14520 while (holes < limit) {
14521 elements->set_the_hole(holes);
14525 FixedArray* elements = FixedArray::cast(*elements_base);
14526 DisallowHeapAllocation no_gc;
14528 // Split elements into defined, undefined and the_hole, in that order. Only
14529 // count locations for undefined and the hole, and fill them afterwards.
14530 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
14531 unsigned int undefs = limit;
14532 unsigned int holes = limit;
14533 // Assume most arrays contain no holes and undefined values, so minimize the
14534 // number of stores of non-undefined, non-the-hole values.
14535 for (unsigned int i = 0; i < undefs; i++) {
14536 Object* current = elements->get(i);
14537 if (current->IsTheHole()) {
14540 } else if (current->IsUndefined()) {
14545 // Position i needs to be filled.
14546 while (undefs > i) {
14547 current = elements->get(undefs);
14548 if (current->IsTheHole()) {
14551 } else if (current->IsUndefined()) {
14554 elements->set(i, current, write_barrier);
14560 while (undefs < holes) {
14561 elements->set_undefined(undefs);
14564 while (holes < limit) {
14565 elements->set_the_hole(holes);
14570 return isolate->factory()->NewNumberFromUint(result);
14574 ExternalArrayType JSTypedArray::type() {
14575 switch (elements()->map()->instance_type()) {
14576 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \
14577 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14578 case FIXED_##TYPE##_ARRAY_TYPE: \
14579 return kExternal##Type##Array;
14581 TYPED_ARRAYS(INSTANCE_TYPE_TO_ARRAY_TYPE)
14582 #undef INSTANCE_TYPE_TO_ARRAY_TYPE
14586 return static_cast<ExternalArrayType>(-1);
14591 size_t JSTypedArray::element_size() {
14592 switch (elements()->map()->instance_type()) {
14593 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \
14594 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14597 TYPED_ARRAYS(INSTANCE_TYPE_TO_ELEMENT_SIZE)
14598 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE
14607 Object* ExternalUint8ClampedArray::SetValue(uint32_t index, Object* value) {
14608 uint8_t clamped_value = 0;
14609 if (index < static_cast<uint32_t>(length())) {
14610 if (value->IsSmi()) {
14611 int int_value = Smi::cast(value)->value();
14612 if (int_value < 0) {
14614 } else if (int_value > 255) {
14615 clamped_value = 255;
14617 clamped_value = static_cast<uint8_t>(int_value);
14619 } else if (value->IsHeapNumber()) {
14620 double double_value = HeapNumber::cast(value)->value();
14621 if (!(double_value > 0)) {
14622 // NaN and less than zero clamp to zero.
14624 } else if (double_value > 255) {
14625 // Greater than 255 clamp to 255.
14626 clamped_value = 255;
14628 // Other doubles are rounded to the nearest integer.
14629 clamped_value = static_cast<uint8_t>(lrint(double_value));
14632 // Clamp undefined to zero (default). All other types have been
14633 // converted to a number type further up in the call chain.
14634 ASSERT(value->IsUndefined());
14636 set(index, clamped_value);
14638 return Smi::FromInt(clamped_value);
14642 Handle<Object> ExternalUint8ClampedArray::SetValue(
14643 Handle<ExternalUint8ClampedArray> array,
14645 Handle<Object> value) {
14646 return Handle<Object>(array->SetValue(index, *value), array->GetIsolate());
14650 template<typename ExternalArrayClass, typename ValueType>
14651 static MaybeObject* ExternalArrayIntSetter(Heap* heap,
14652 ExternalArrayClass* receiver,
14655 ValueType cast_value = 0;
14656 if (index < static_cast<uint32_t>(receiver->length())) {
14657 if (value->IsSmi()) {
14658 int int_value = Smi::cast(value)->value();
14659 cast_value = static_cast<ValueType>(int_value);
14660 } else if (value->IsHeapNumber()) {
14661 double double_value = HeapNumber::cast(value)->value();
14662 cast_value = static_cast<ValueType>(DoubleToInt32(double_value));
14664 // Clamp undefined to zero (default). All other types have been
14665 // converted to a number type further up in the call chain.
14666 ASSERT(value->IsUndefined());
14668 receiver->set(index, cast_value);
14670 return heap->NumberFromInt32(cast_value);
14674 Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array,
14676 Handle<Object> value) {
14677 CALL_HEAP_FUNCTION(array->GetIsolate(),
14678 array->SetValue(index, *value),
14683 MaybeObject* ExternalInt8Array::SetValue(uint32_t index, Object* value) {
14684 return ExternalArrayIntSetter<ExternalInt8Array, int8_t>
14685 (GetHeap(), this, index, value);
14689 Handle<Object> ExternalUint8Array::SetValue(
14690 Handle<ExternalUint8Array> array,
14692 Handle<Object> value) {
14693 CALL_HEAP_FUNCTION(array->GetIsolate(),
14694 array->SetValue(index, *value),
14699 MaybeObject* ExternalUint8Array::SetValue(uint32_t index,
14701 return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>
14702 (GetHeap(), this, index, value);
14706 Handle<Object> ExternalInt16Array::SetValue(
14707 Handle<ExternalInt16Array> array,
14709 Handle<Object> value) {
14710 CALL_HEAP_FUNCTION(array->GetIsolate(),
14711 array->SetValue(index, *value),
14716 MaybeObject* ExternalInt16Array::SetValue(uint32_t index,
14718 return ExternalArrayIntSetter<ExternalInt16Array, int16_t>
14719 (GetHeap(), this, index, value);
14723 Handle<Object> ExternalUint16Array::SetValue(
14724 Handle<ExternalUint16Array> array,
14726 Handle<Object> value) {
14727 CALL_HEAP_FUNCTION(array->GetIsolate(),
14728 array->SetValue(index, *value),
14733 MaybeObject* ExternalUint16Array::SetValue(uint32_t index,
14735 return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>
14736 (GetHeap(), this, index, value);
14740 Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array,
14742 Handle<Object> value) {
14743 CALL_HEAP_FUNCTION(array->GetIsolate(),
14744 array->SetValue(index, *value),
14749 MaybeObject* ExternalInt32Array::SetValue(uint32_t index, Object* value) {
14750 return ExternalArrayIntSetter<ExternalInt32Array, int32_t>
14751 (GetHeap(), this, index, value);
14755 Handle<Object> ExternalUint32Array::SetValue(
14756 Handle<ExternalUint32Array> array,
14758 Handle<Object> value) {
14759 CALL_HEAP_FUNCTION(array->GetIsolate(),
14760 array->SetValue(index, *value),
14765 MaybeObject* ExternalUint32Array::SetValue(uint32_t index, Object* value) {
14766 uint32_t cast_value = 0;
14767 Heap* heap = GetHeap();
14768 if (index < static_cast<uint32_t>(length())) {
14769 if (value->IsSmi()) {
14770 int int_value = Smi::cast(value)->value();
14771 cast_value = static_cast<uint32_t>(int_value);
14772 } else if (value->IsHeapNumber()) {
14773 double double_value = HeapNumber::cast(value)->value();
14774 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value));
14776 // Clamp undefined to zero (default). All other types have been
14777 // converted to a number type further up in the call chain.
14778 ASSERT(value->IsUndefined());
14780 set(index, cast_value);
14782 return heap->NumberFromUint32(cast_value);
14786 Handle<Object> ExternalFloat32Array::SetValue(
14787 Handle<ExternalFloat32Array> array,
14789 Handle<Object> value) {
14790 CALL_HEAP_FUNCTION(array->GetIsolate(),
14791 array->SetValue(index, *value),
14796 MaybeObject* ExternalFloat32Array::SetValue(uint32_t index, Object* value) {
14797 float cast_value = static_cast<float>(OS::nan_value());
14798 Heap* heap = GetHeap();
14799 if (index < static_cast<uint32_t>(length())) {
14800 if (value->IsSmi()) {
14801 int int_value = Smi::cast(value)->value();
14802 cast_value = static_cast<float>(int_value);
14803 } else if (value->IsHeapNumber()) {
14804 double double_value = HeapNumber::cast(value)->value();
14805 cast_value = static_cast<float>(double_value);
14807 // Clamp undefined to NaN (default). All other types have been
14808 // converted to a number type further up in the call chain.
14809 ASSERT(value->IsUndefined());
14811 set(index, cast_value);
14813 return heap->AllocateHeapNumber(cast_value);
14817 Handle<Object> ExternalFloat64Array::SetValue(
14818 Handle<ExternalFloat64Array> array,
14820 Handle<Object> value) {
14821 CALL_HEAP_FUNCTION(array->GetIsolate(),
14822 array->SetValue(index, *value),
14827 MaybeObject* ExternalFloat64Array::SetValue(uint32_t index, Object* value) {
14828 double double_value = OS::nan_value();
14829 Heap* heap = GetHeap();
14830 if (index < static_cast<uint32_t>(length())) {
14831 if (value->IsSmi()) {
14832 int int_value = Smi::cast(value)->value();
14833 double_value = static_cast<double>(int_value);
14834 } else if (value->IsHeapNumber()) {
14835 double_value = HeapNumber::cast(value)->value();
14837 // Clamp undefined to NaN (default). All other types have been
14838 // converted to a number type further up in the call chain.
14839 ASSERT(value->IsUndefined());
14841 set(index, double_value);
14843 return heap->AllocateHeapNumber(double_value);
14847 Handle<Object> ExternalFloat32x4Array::SetValue(
14848 Handle<ExternalFloat32x4Array> array,
14850 Handle<Object> value) {
14851 CALL_HEAP_FUNCTION(array->GetIsolate(),
14852 array->SetValue(index, *value),
14857 MaybeObject* ExternalFloat32x4Array::SetValue(uint32_t index, Object* value) {
14858 float32x4_value_t cast_value;
14859 cast_value.storage[0] = static_cast<float>(OS::nan_value());
14860 cast_value.storage[1] = static_cast<float>(OS::nan_value());
14861 cast_value.storage[2] = static_cast<float>(OS::nan_value());
14862 cast_value.storage[3] = static_cast<float>(OS::nan_value());
14863 Heap* heap = GetHeap();
14864 if (index < static_cast<uint32_t>(length())) {
14865 if (value->IsFloat32x4()) {
14866 cast_value = Float32x4::cast(value)->value();
14868 // Clamp undefined to NaN (default). All other types have been
14869 // converted to a number type further up in the call chain.
14870 ASSERT(value->IsUndefined());
14872 set(index, cast_value);
14874 return heap->AllocateFloat32x4(cast_value);
14878 Handle<Object> ExternalInt32x4Array::SetValue(
14879 Handle<ExternalInt32x4Array> array, uint32_t index, Handle<Object> value) {
14880 CALL_HEAP_FUNCTION(array->GetIsolate(),
14881 array->SetValue(index, *value),
14886 MaybeObject* ExternalInt32x4Array::SetValue(uint32_t index, Object* value) {
14887 int32x4_value_t cast_value;
14888 cast_value.storage[0] = 0;
14889 cast_value.storage[1] = 0;
14890 cast_value.storage[2] = 0;
14891 cast_value.storage[3] = 0;
14892 Heap* heap = GetHeap();
14893 if (index < static_cast<uint32_t>(length())) {
14894 if (value->IsInt32x4()) {
14895 cast_value = Int32x4::cast(value)->value();
14897 // Clamp undefined to zero (default). All other types have been
14898 // converted to a number type further up in the call chain.
14899 ASSERT(value->IsUndefined());
14901 set(index, cast_value);
14903 return heap->AllocateInt32x4(cast_value);
14907 PropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) {
14908 ASSERT(!HasFastProperties());
14909 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
14910 return PropertyCell::cast(value);
14914 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell(
14915 Handle<JSGlobalObject> global,
14916 Handle<Name> name) {
14917 ASSERT(!global->HasFastProperties());
14918 int entry = global->property_dictionary()->FindEntry(*name);
14919 if (entry == NameDictionary::kNotFound) {
14920 Isolate* isolate = global->GetIsolate();
14921 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(
14922 isolate->factory()->the_hole_value());
14923 PropertyDetails details(NONE, NORMAL, 0);
14924 details = details.AsDeleted();
14925 Handle<NameDictionary> dictionary = NameDictionaryAdd(
14926 handle(global->property_dictionary()), name, cell, details);
14927 global->set_properties(*dictionary);
14930 Object* value = global->property_dictionary()->ValueAt(entry);
14931 ASSERT(value->IsPropertyCell());
14932 return handle(PropertyCell::cast(value));
14937 MaybeObject* StringTable::LookupString(String* string, Object** s) {
14938 InternalizedStringKey key(string);
14939 return LookupKey(&key, s);
14943 // This class is used for looking up two character strings in the string table.
14944 // If we don't have a hit we don't want to waste much time so we unroll the
14945 // string hash calculation loop here for speed. Doesn't work if the two
14946 // characters form a decimal integer, since such strings have a different hash
14948 class TwoCharHashTableKey : public HashTableKey {
14950 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
14951 : c1_(c1), c2_(c2) {
14953 uint32_t hash = seed;
14955 hash += hash << 10;
14959 hash += hash << 10;
14963 hash ^= hash >> 11;
14964 hash += hash << 15;
14965 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
14968 // If this assert fails then we failed to reproduce the two-character
14969 // version of the string hashing algorithm above. One reason could be
14970 // that we were passed two digits as characters, since the hash
14971 // algorithm is different in that case.
14972 uint16_t chars[2] = {c1, c2};
14973 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed);
14974 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask;
14975 ASSERT_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash));
14979 bool IsMatch(Object* o) {
14980 if (!o->IsString()) return false;
14981 String* other = String::cast(o);
14982 if (other->length() != 2) return false;
14983 if (other->Get(0) != c1_) return false;
14984 return other->Get(1) == c2_;
14987 uint32_t Hash() { return hash_; }
14988 uint32_t HashForObject(Object* key) {
14989 if (!key->IsString()) return 0;
14990 return String::cast(key)->Hash();
14993 Object* AsObject(Heap* heap) {
14994 // The TwoCharHashTableKey is only used for looking in the string
14995 // table, not for adding to it.
15007 bool StringTable::LookupStringIfExists(String* string, String** result) {
15008 InternalizedStringKey key(string);
15009 int entry = FindEntry(&key);
15010 if (entry == kNotFound) {
15013 *result = String::cast(KeyAt(entry));
15014 ASSERT(StringShape(*result).IsInternalized());
15020 bool StringTable::LookupTwoCharsStringIfExists(uint16_t c1,
15023 TwoCharHashTableKey key(c1, c2, GetHeap()->HashSeed());
15024 int entry = FindEntry(&key);
15025 if (entry == kNotFound) {
15028 *result = String::cast(KeyAt(entry));
15029 ASSERT(StringShape(*result).IsInternalized());
15035 MaybeObject* StringTable::LookupKey(HashTableKey* key, Object** s) {
15036 int entry = FindEntry(key);
15038 // String already in table.
15039 if (entry != kNotFound) {
15044 // Adding new string. Grow table if needed.
15046 { MaybeObject* maybe_obj = EnsureCapacity(1, key);
15047 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15050 // Create string object.
15052 { MaybeObject* maybe_string = key->AsObject(GetHeap());
15053 if (!maybe_string->ToObject(&string)) return maybe_string;
15056 // If the string table grew as part of EnsureCapacity, obj is not
15057 // the current string table and therefore we cannot use
15058 // StringTable::cast here.
15059 StringTable* table = reinterpret_cast<StringTable*>(obj);
15061 // Add the new string and return it along with the string table.
15062 entry = table->FindInsertionEntry(key->Hash());
15063 table->set(EntryToIndex(entry), string);
15064 table->ElementAdded();
15070 Object* CompilationCacheTable::Lookup(String* src, Context* context) {
15071 SharedFunctionInfo* shared = context->closure()->shared();
15072 StringSharedKey key(src,
15074 FLAG_use_strict ? STRICT : SLOPPY,
15075 RelocInfo::kNoPosition);
15076 int entry = FindEntry(&key);
15077 if (entry == kNotFound) return GetHeap()->undefined_value();
15078 return get(EntryToIndex(entry) + 1);
15082 Object* CompilationCacheTable::LookupEval(String* src,
15084 StrictMode strict_mode,
15085 int scope_position) {
15086 StringSharedKey key(src,
15087 context->closure()->shared(),
15090 int entry = FindEntry(&key);
15091 if (entry == kNotFound) return GetHeap()->undefined_value();
15092 return get(EntryToIndex(entry) + 1);
15096 Object* CompilationCacheTable::LookupRegExp(String* src,
15097 JSRegExp::Flags flags) {
15098 RegExpKey key(src, flags);
15099 int entry = FindEntry(&key);
15100 if (entry == kNotFound) return GetHeap()->undefined_value();
15101 return get(EntryToIndex(entry) + 1);
15105 MaybeObject* CompilationCacheTable::Put(String* src,
15108 SharedFunctionInfo* shared = context->closure()->shared();
15109 StringSharedKey key(src,
15111 FLAG_use_strict ? STRICT : SLOPPY,
15112 RelocInfo::kNoPosition);
15113 CompilationCacheTable* cache;
15114 MaybeObject* maybe_cache = EnsureCapacity(1, &key);
15115 if (!maybe_cache->To(&cache)) return maybe_cache;
15118 MaybeObject* maybe_k = key.AsObject(GetHeap());
15119 if (!maybe_k->To(&k)) return maybe_k;
15121 int entry = cache->FindInsertionEntry(key.Hash());
15122 cache->set(EntryToIndex(entry), k);
15123 cache->set(EntryToIndex(entry) + 1, value);
15124 cache->ElementAdded();
15129 MaybeObject* CompilationCacheTable::PutEval(String* src,
15131 SharedFunctionInfo* value,
15132 int scope_position) {
15133 StringSharedKey key(src,
15134 context->closure()->shared(),
15135 value->strict_mode(),
15138 { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
15139 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15142 CompilationCacheTable* cache =
15143 reinterpret_cast<CompilationCacheTable*>(obj);
15144 int entry = cache->FindInsertionEntry(key.Hash());
15147 { MaybeObject* maybe_k = key.AsObject(GetHeap());
15148 if (!maybe_k->ToObject(&k)) return maybe_k;
15151 cache->set(EntryToIndex(entry), k);
15152 cache->set(EntryToIndex(entry) + 1, value);
15153 cache->ElementAdded();
15158 MaybeObject* CompilationCacheTable::PutRegExp(String* src,
15159 JSRegExp::Flags flags,
15160 FixedArray* value) {
15161 RegExpKey key(src, flags);
15163 { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
15164 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15167 CompilationCacheTable* cache =
15168 reinterpret_cast<CompilationCacheTable*>(obj);
15169 int entry = cache->FindInsertionEntry(key.Hash());
15170 // We store the value in the key slot, and compare the search key
15171 // to the stored value with a custon IsMatch function during lookups.
15172 cache->set(EntryToIndex(entry), value);
15173 cache->set(EntryToIndex(entry) + 1, value);
15174 cache->ElementAdded();
15179 void CompilationCacheTable::Remove(Object* value) {
15180 Object* the_hole_value = GetHeap()->the_hole_value();
15181 for (int entry = 0, size = Capacity(); entry < size; entry++) {
15182 int entry_index = EntryToIndex(entry);
15183 int value_index = entry_index + 1;
15184 if (get(value_index) == value) {
15185 NoWriteBarrierSet(this, entry_index, the_hole_value);
15186 NoWriteBarrierSet(this, value_index, the_hole_value);
15194 // StringsKey used for HashTable where key is array of internalized strings.
15195 class StringsKey : public HashTableKey {
15197 explicit StringsKey(FixedArray* strings) : strings_(strings) { }
15199 bool IsMatch(Object* strings) {
15200 FixedArray* o = FixedArray::cast(strings);
15201 int len = strings_->length();
15202 if (o->length() != len) return false;
15203 for (int i = 0; i < len; i++) {
15204 if (o->get(i) != strings_->get(i)) return false;
15209 uint32_t Hash() { return HashForObject(strings_); }
15211 uint32_t HashForObject(Object* obj) {
15212 FixedArray* strings = FixedArray::cast(obj);
15213 int len = strings->length();
15215 for (int i = 0; i < len; i++) {
15216 hash ^= String::cast(strings->get(i))->Hash();
15221 Object* AsObject(Heap* heap) { return strings_; }
15224 FixedArray* strings_;
15228 Object* MapCache::Lookup(FixedArray* array) {
15229 StringsKey key(array);
15230 int entry = FindEntry(&key);
15231 if (entry == kNotFound) return GetHeap()->undefined_value();
15232 return get(EntryToIndex(entry) + 1);
15236 MaybeObject* MapCache::Put(FixedArray* array, Map* value) {
15237 StringsKey key(array);
15239 { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
15240 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15243 MapCache* cache = reinterpret_cast<MapCache*>(obj);
15244 int entry = cache->FindInsertionEntry(key.Hash());
15245 cache->set(EntryToIndex(entry), array);
15246 cache->set(EntryToIndex(entry) + 1, value);
15247 cache->ElementAdded();
15252 template<typename Shape, typename Key>
15253 MaybeObject* Dictionary<Shape, Key>::Allocate(Heap* heap,
15254 int at_least_space_for,
15255 PretenureFlag pretenure) {
15257 { MaybeObject* maybe_obj =
15258 HashTable<Shape, Key>::Allocate(
15260 at_least_space_for,
15261 USE_DEFAULT_MINIMUM_CAPACITY,
15263 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15265 // Initialize the next enumeration index.
15266 Dictionary<Shape, Key>::cast(obj)->
15267 SetNextEnumerationIndex(PropertyDetails::kInitialIndex);
15272 void NameDictionary::DoGenerateNewEnumerationIndices(
15273 Handle<NameDictionary> dictionary) {
15274 CALL_HEAP_FUNCTION_VOID(dictionary->GetIsolate(),
15275 dictionary->GenerateNewEnumerationIndices());
15278 template<typename Shape, typename Key>
15279 MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
15280 Heap* heap = Dictionary<Shape, Key>::GetHeap();
15281 int length = HashTable<Shape, Key>::NumberOfElements();
15283 // Allocate and initialize iteration order array.
15285 { MaybeObject* maybe_obj = heap->AllocateFixedArray(length);
15286 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15288 FixedArray* iteration_order = FixedArray::cast(obj);
15289 for (int i = 0; i < length; i++) {
15290 iteration_order->set(i, Smi::FromInt(i));
15293 // Allocate array with enumeration order.
15294 { MaybeObject* maybe_obj = heap->AllocateFixedArray(length);
15295 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15297 FixedArray* enumeration_order = FixedArray::cast(obj);
15299 // Fill the enumeration order array with property details.
15300 int capacity = HashTable<Shape, Key>::Capacity();
15302 for (int i = 0; i < capacity; i++) {
15303 if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) {
15304 int index = DetailsAt(i).dictionary_index();
15305 enumeration_order->set(pos++, Smi::FromInt(index));
15309 // Sort the arrays wrt. enumeration order.
15310 iteration_order->SortPairs(enumeration_order, enumeration_order->length());
15312 // Overwrite the enumeration_order with the enumeration indices.
15313 for (int i = 0; i < length; i++) {
15314 int index = Smi::cast(iteration_order->get(i))->value();
15315 int enum_index = PropertyDetails::kInitialIndex + i;
15316 enumeration_order->set(index, Smi::FromInt(enum_index));
15319 // Update the dictionary with new indices.
15320 capacity = HashTable<Shape, Key>::Capacity();
15322 for (int i = 0; i < capacity; i++) {
15323 if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) {
15324 int enum_index = Smi::cast(enumeration_order->get(pos++))->value();
15325 PropertyDetails details = DetailsAt(i);
15326 PropertyDetails new_details = PropertyDetails(
15327 details.attributes(), details.type(), enum_index);
15328 DetailsAtPut(i, new_details);
15332 // Set the next enumeration index.
15333 SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length);
15337 template<typename Shape, typename Key>
15338 MaybeObject* Dictionary<Shape, Key>::EnsureCapacity(int n, Key key) {
15339 // Check whether there are enough enumeration indices to add n elements.
15340 if (Shape::kIsEnumerable &&
15341 !PropertyDetails::IsValidIndex(NextEnumerationIndex() + n)) {
15342 // If not, we generate new indices for the properties.
15344 { MaybeObject* maybe_result = GenerateNewEnumerationIndices();
15345 if (!maybe_result->ToObject(&result)) return maybe_result;
15348 return HashTable<Shape, Key>::EnsureCapacity(n, key);
15352 template<typename Shape, typename Key>
15353 Object* Dictionary<Shape, Key>::DeleteProperty(int entry,
15354 JSReceiver::DeleteMode mode) {
15355 Heap* heap = Dictionary<Shape, Key>::GetHeap();
15356 PropertyDetails details = DetailsAt(entry);
15357 // Ignore attributes if forcing a deletion.
15358 if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) {
15359 return heap->false_value();
15361 SetEntry(entry, heap->the_hole_value(), heap->the_hole_value());
15362 HashTable<Shape, Key>::ElementRemoved();
15363 return heap->true_value();
15367 template<typename Shape, typename Key>
15368 MaybeObject* Dictionary<Shape, Key>::Shrink(Key key) {
15369 return HashTable<Shape, Key>::Shrink(key);
15373 template<typename Shape, typename Key>
15374 MaybeObject* Dictionary<Shape, Key>::AtPut(Key key, Object* value) {
15375 int entry = this->FindEntry(key);
15377 // If the entry is present set the value;
15378 if (entry != Dictionary<Shape, Key>::kNotFound) {
15379 ValueAtPut(entry, value);
15383 // Check whether the dictionary should be extended.
15385 { MaybeObject* maybe_obj = EnsureCapacity(1, key);
15386 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15390 { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key);
15391 if (!maybe_k->ToObject(&k)) return maybe_k;
15393 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
15395 return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
15396 Dictionary<Shape, Key>::Hash(key));
15400 template<typename Shape, typename Key>
15401 MaybeObject* Dictionary<Shape, Key>::Add(Key key,
15403 PropertyDetails details) {
15404 // Valdate key is absent.
15405 SLOW_ASSERT((this->FindEntry(key) == Dictionary<Shape, Key>::kNotFound));
15406 // Check whether the dictionary should be extended.
15408 { MaybeObject* maybe_obj = EnsureCapacity(1, key);
15409 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15412 return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
15413 Dictionary<Shape, Key>::Hash(key));
15417 // Add a key, value pair to the dictionary.
15418 template<typename Shape, typename Key>
15419 MaybeObject* Dictionary<Shape, Key>::AddEntry(Key key,
15421 PropertyDetails details,
15423 // Compute the key object.
15425 { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key);
15426 if (!maybe_k->ToObject(&k)) return maybe_k;
15429 uint32_t entry = Dictionary<Shape, Key>::FindInsertionEntry(hash);
15430 // Insert element at empty or deleted entry
15431 if (!details.IsDeleted() &&
15432 details.dictionary_index() == 0 &&
15433 Shape::kIsEnumerable) {
15434 // Assign an enumeration index to the property and update
15435 // SetNextEnumerationIndex.
15436 int index = NextEnumerationIndex();
15437 details = PropertyDetails(details.attributes(), details.type(), index);
15438 SetNextEnumerationIndex(index + 1);
15440 SetEntry(entry, k, value, details);
15441 ASSERT((Dictionary<Shape, Key>::KeyAt(entry)->IsNumber() ||
15442 Dictionary<Shape, Key>::KeyAt(entry)->IsName()));
15443 HashTable<Shape, Key>::ElementAdded();
15448 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) {
15449 // If the dictionary requires slow elements an element has already
15450 // been added at a high index.
15451 if (requires_slow_elements()) return;
15452 // Check if this index is high enough that we should require slow
15454 if (key > kRequiresSlowElementsLimit) {
15455 set_requires_slow_elements();
15458 // Update max key value.
15459 Object* max_index_object = get(kMaxNumberKeyIndex);
15460 if (!max_index_object->IsSmi() || max_number_key() < key) {
15461 FixedArray::set(kMaxNumberKeyIndex,
15462 Smi::FromInt(key << kRequiresSlowElementsTagSize));
15466 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry(
15467 Handle<SeededNumberDictionary> dictionary,
15469 Handle<Object> value,
15470 PropertyDetails details) {
15471 CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
15472 dictionary->AddNumberEntry(key, *value, details),
15473 SeededNumberDictionary);
15476 MaybeObject* SeededNumberDictionary::AddNumberEntry(uint32_t key,
15478 PropertyDetails details) {
15479 UpdateMaxNumberKey(key);
15480 SLOW_ASSERT(this->FindEntry(key) == kNotFound);
15481 return Add(key, value, details);
15485 MaybeObject* UnseededNumberDictionary::AddNumberEntry(uint32_t key,
15487 SLOW_ASSERT(this->FindEntry(key) == kNotFound);
15488 return Add(key, value, PropertyDetails(NONE, NORMAL, 0));
15492 MaybeObject* SeededNumberDictionary::AtNumberPut(uint32_t key, Object* value) {
15493 UpdateMaxNumberKey(key);
15494 return AtPut(key, value);
15498 MaybeObject* UnseededNumberDictionary::AtNumberPut(uint32_t key,
15500 return AtPut(key, value);
15504 Handle<SeededNumberDictionary> SeededNumberDictionary::Set(
15505 Handle<SeededNumberDictionary> dictionary,
15507 Handle<Object> value,
15508 PropertyDetails details) {
15509 CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
15510 dictionary->Set(index, *value, details),
15511 SeededNumberDictionary);
15515 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set(
15516 Handle<UnseededNumberDictionary> dictionary,
15518 Handle<Object> value) {
15519 CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
15520 dictionary->Set(index, *value),
15521 UnseededNumberDictionary);
15525 MaybeObject* SeededNumberDictionary::Set(uint32_t key,
15527 PropertyDetails details) {
15528 int entry = FindEntry(key);
15529 if (entry == kNotFound) return AddNumberEntry(key, value, details);
15530 // Preserve enumeration index.
15531 details = PropertyDetails(details.attributes(),
15533 DetailsAt(entry).dictionary_index());
15534 MaybeObject* maybe_object_key =
15535 SeededNumberDictionaryShape::AsObject(GetHeap(), key);
15536 Object* object_key;
15537 if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
15538 SetEntry(entry, object_key, value, details);
15543 MaybeObject* UnseededNumberDictionary::Set(uint32_t key,
15545 int entry = FindEntry(key);
15546 if (entry == kNotFound) return AddNumberEntry(key, value);
15547 MaybeObject* maybe_object_key =
15548 UnseededNumberDictionaryShape::AsObject(GetHeap(), key);
15549 Object* object_key;
15550 if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
15551 SetEntry(entry, object_key, value);
15557 template<typename Shape, typename Key>
15558 int Dictionary<Shape, Key>::NumberOfElementsFilterAttributes(
15559 PropertyAttributes filter) {
15560 int capacity = HashTable<Shape, Key>::Capacity();
15562 for (int i = 0; i < capacity; i++) {
15563 Object* k = HashTable<Shape, Key>::KeyAt(i);
15564 if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) {
15565 PropertyDetails details = DetailsAt(i);
15566 if (details.IsDeleted()) continue;
15567 PropertyAttributes attr = details.attributes();
15568 if ((attr & filter) == 0) result++;
15575 template<typename Shape, typename Key>
15576 int Dictionary<Shape, Key>::NumberOfEnumElements() {
15577 return NumberOfElementsFilterAttributes(
15578 static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC));
15582 template<typename Shape, typename Key>
15583 void Dictionary<Shape, Key>::CopyKeysTo(
15584 FixedArray* storage,
15585 PropertyAttributes filter,
15586 typename Dictionary<Shape, Key>::SortMode sort_mode) {
15587 ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter));
15588 int capacity = HashTable<Shape, Key>::Capacity();
15590 for (int i = 0; i < capacity; i++) {
15591 Object* k = HashTable<Shape, Key>::KeyAt(i);
15592 if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) {
15593 PropertyDetails details = DetailsAt(i);
15594 if (details.IsDeleted()) continue;
15595 PropertyAttributes attr = details.attributes();
15596 if ((attr & filter) == 0) storage->set(index++, k);
15599 if (sort_mode == Dictionary<Shape, Key>::SORTED) {
15600 storage->SortPairs(storage, index);
15602 ASSERT(storage->length() >= index);
15606 struct EnumIndexComparator {
15607 explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { }
15608 bool operator() (Smi* a, Smi* b) {
15609 PropertyDetails da(dict->DetailsAt(a->value()));
15610 PropertyDetails db(dict->DetailsAt(b->value()));
15611 return da.dictionary_index() < db.dictionary_index();
15613 NameDictionary* dict;
15617 void NameDictionary::CopyEnumKeysTo(FixedArray* storage) {
15618 int length = storage->length();
15619 int capacity = Capacity();
15620 int properties = 0;
15621 for (int i = 0; i < capacity; i++) {
15622 Object* k = KeyAt(i);
15623 if (IsKey(k) && !k->IsSymbol()) {
15624 PropertyDetails details = DetailsAt(i);
15625 if (details.IsDeleted() || details.IsDontEnum()) continue;
15626 storage->set(properties, Smi::FromInt(i));
15628 if (properties == length) break;
15631 EnumIndexComparator cmp(this);
15632 Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress());
15633 std::sort(start, start + length, cmp);
15634 for (int i = 0; i < length; i++) {
15635 int index = Smi::cast(storage->get(i))->value();
15636 storage->set(i, KeyAt(index));
15641 template<typename Shape, typename Key>
15642 void Dictionary<Shape, Key>::CopyKeysTo(
15643 FixedArray* storage,
15645 PropertyAttributes filter,
15646 typename Dictionary<Shape, Key>::SortMode sort_mode) {
15647 ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter));
15648 int capacity = HashTable<Shape, Key>::Capacity();
15649 for (int i = 0; i < capacity; i++) {
15650 Object* k = HashTable<Shape, Key>::KeyAt(i);
15651 if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) {
15652 PropertyDetails details = DetailsAt(i);
15653 if (details.IsDeleted()) continue;
15654 PropertyAttributes attr = details.attributes();
15655 if ((attr & filter) == 0) storage->set(index++, k);
15658 if (sort_mode == Dictionary<Shape, Key>::SORTED) {
15659 storage->SortPairs(storage, index);
15661 ASSERT(storage->length() >= index);
15665 // Backwards lookup (slow).
15666 template<typename Shape, typename Key>
15667 Object* Dictionary<Shape, Key>::SlowReverseLookup(Object* value) {
15668 int capacity = HashTable<Shape, Key>::Capacity();
15669 for (int i = 0; i < capacity; i++) {
15670 Object* k = HashTable<Shape, Key>::KeyAt(i);
15671 if (Dictionary<Shape, Key>::IsKey(k)) {
15672 Object* e = ValueAt(i);
15673 if (e->IsPropertyCell()) {
15674 e = PropertyCell::cast(e)->value();
15676 if (e == value) return k;
15679 Heap* heap = Dictionary<Shape, Key>::GetHeap();
15680 return heap->undefined_value();
15684 MaybeObject* NameDictionary::TransformPropertiesToFastFor(
15685 JSObject* obj, int unused_property_fields) {
15686 // Make sure we preserve dictionary representation if there are too many
15688 int number_of_elements = NumberOfElements();
15689 if (number_of_elements > kMaxNumberOfDescriptors) return obj;
15691 if (number_of_elements != NextEnumerationIndex()) {
15692 MaybeObject* maybe_result = GenerateNewEnumerationIndices();
15693 if (maybe_result->IsFailure()) return maybe_result;
15696 int instance_descriptor_length = 0;
15697 int number_of_fields = 0;
15699 Heap* heap = GetHeap();
15701 // Compute the length of the instance descriptor.
15702 int capacity = Capacity();
15703 for (int i = 0; i < capacity; i++) {
15704 Object* k = KeyAt(i);
15706 Object* value = ValueAt(i);
15707 PropertyType type = DetailsAt(i).type();
15708 ASSERT(type != FIELD);
15709 instance_descriptor_length++;
15710 if (type == NORMAL && !value->IsJSFunction()) {
15711 number_of_fields += 1;
15716 int inobject_props = obj->map()->inobject_properties();
15718 // Allocate new map.
15720 MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors();
15721 if (!maybe_new_map->To(&new_map)) return maybe_new_map;
15722 new_map->set_dictionary_map(false);
15724 if (instance_descriptor_length == 0) {
15725 ASSERT_LE(unused_property_fields, inobject_props);
15726 // Transform the object.
15727 new_map->set_unused_property_fields(inobject_props);
15728 obj->set_map(new_map);
15729 obj->set_properties(heap->empty_fixed_array());
15730 // Check that it really works.
15731 ASSERT(obj->HasFastProperties());
15735 // Allocate the instance descriptor.
15736 DescriptorArray* descriptors;
15737 MaybeObject* maybe_descriptors =
15738 DescriptorArray::Allocate(GetIsolate(), instance_descriptor_length);
15739 if (!maybe_descriptors->To(&descriptors)) {
15740 return maybe_descriptors;
15743 DescriptorArray::WhitenessWitness witness(descriptors);
15745 int number_of_allocated_fields =
15746 number_of_fields + unused_property_fields - inobject_props;
15747 if (number_of_allocated_fields < 0) {
15748 // There is enough inobject space for all fields (including unused).
15749 number_of_allocated_fields = 0;
15750 unused_property_fields = inobject_props - number_of_fields;
15753 // Allocate the fixed array for the fields.
15754 FixedArray* fields;
15755 MaybeObject* maybe_fields =
15756 heap->AllocateFixedArray(number_of_allocated_fields);
15757 if (!maybe_fields->To(&fields)) return maybe_fields;
15759 // Fill in the instance descriptor and the fields.
15760 int current_offset = 0;
15761 for (int i = 0; i < capacity; i++) {
15762 Object* k = KeyAt(i);
15764 Object* value = ValueAt(i);
15766 if (k->IsSymbol()) {
15767 key = Symbol::cast(k);
15769 // Ensure the key is a unique name before writing into the
15770 // instance descriptor.
15771 MaybeObject* maybe_key = heap->InternalizeString(String::cast(k));
15772 if (!maybe_key->To(&key)) return maybe_key;
15775 PropertyDetails details = DetailsAt(i);
15776 int enumeration_index = details.dictionary_index();
15777 PropertyType type = details.type();
15779 if (value->IsJSFunction()) {
15780 ConstantDescriptor d(key, value, details.attributes());
15781 descriptors->Set(enumeration_index - 1, &d, witness);
15782 } else if (type == NORMAL) {
15783 if (current_offset < inobject_props) {
15784 obj->InObjectPropertyAtPut(current_offset,
15786 UPDATE_WRITE_BARRIER);
15788 int offset = current_offset - inobject_props;
15789 fields->set(offset, value);
15791 FieldDescriptor d(key,
15793 details.attributes(),
15794 // TODO(verwaest): value->OptimalRepresentation();
15795 Representation::Tagged());
15796 descriptors->Set(enumeration_index - 1, &d, witness);
15797 } else if (type == CALLBACKS) {
15798 CallbacksDescriptor d(key,
15800 details.attributes());
15801 descriptors->Set(enumeration_index - 1, &d, witness);
15807 ASSERT(current_offset == number_of_fields);
15809 descriptors->Sort();
15811 new_map->InitializeDescriptors(descriptors);
15812 new_map->set_unused_property_fields(unused_property_fields);
15814 // Transform the object.
15815 obj->set_map(new_map);
15817 obj->set_properties(fields);
15818 ASSERT(obj->IsJSObject());
15820 // Check that it really works.
15821 ASSERT(obj->HasFastProperties());
15827 Handle<ObjectHashSet> ObjectHashSet::EnsureCapacity(
15828 Handle<ObjectHashSet> table,
15830 Handle<Object> key,
15831 PretenureFlag pretenure) {
15832 Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table;
15833 CALL_HEAP_FUNCTION(table_base->GetIsolate(),
15834 table_base->EnsureCapacity(n, *key, pretenure),
15839 Handle<ObjectHashSet> ObjectHashSet::Shrink(Handle<ObjectHashSet> table,
15840 Handle<Object> key) {
15841 Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table;
15842 CALL_HEAP_FUNCTION(table_base->GetIsolate(),
15843 table_base->Shrink(*key),
15848 bool ObjectHashSet::Contains(Object* key) {
15849 ASSERT(IsKey(key));
15851 // If the object does not have an identity hash, it was never used as a key.
15852 Object* hash = key->GetHash();
15853 if (hash->IsUndefined()) return false;
15855 return (FindEntry(key) != kNotFound);
15859 Handle<ObjectHashSet> ObjectHashSet::Add(Handle<ObjectHashSet> table,
15860 Handle<Object> key) {
15861 ASSERT(table->IsKey(*key));
15863 // Make sure the key object has an identity hash code.
15864 Handle<Object> object_hash = Object::GetOrCreateHash(key,
15865 table->GetIsolate());
15867 int entry = table->FindEntry(*key);
15869 // Check whether key is already present.
15870 if (entry != kNotFound) return table;
15872 // Check whether the hash set should be extended and add entry.
15873 Handle<ObjectHashSet> new_table =
15874 ObjectHashSet::EnsureCapacity(table, 1, key);
15875 entry = new_table->FindInsertionEntry(Smi::cast(*object_hash)->value());
15876 new_table->set(EntryToIndex(entry), *key);
15877 new_table->ElementAdded();
15882 Handle<ObjectHashSet> ObjectHashSet::Remove(Handle<ObjectHashSet> table,
15883 Handle<Object> key) {
15884 ASSERT(table->IsKey(*key));
15886 // If the object does not have an identity hash, it was never used as a key.
15887 if (key->GetHash()->IsUndefined()) return table;
15889 int entry = table->FindEntry(*key);
15891 // Check whether key is actually present.
15892 if (entry == kNotFound) return table;
15894 // Remove entry and try to shrink this hash set.
15895 table->set_the_hole(EntryToIndex(entry));
15896 table->ElementRemoved();
15898 return ObjectHashSet::Shrink(table, key);
15902 Handle<ObjectHashTable> ObjectHashTable::EnsureCapacity(
15903 Handle<ObjectHashTable> table,
15905 Handle<Object> key,
15906 PretenureFlag pretenure) {
15907 Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table;
15908 CALL_HEAP_FUNCTION(table_base->GetIsolate(),
15909 table_base->EnsureCapacity(n, *key, pretenure),
15914 Handle<ObjectHashTable> ObjectHashTable::Shrink(
15915 Handle<ObjectHashTable> table, Handle<Object> key) {
15916 Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table;
15917 CALL_HEAP_FUNCTION(table_base->GetIsolate(),
15918 table_base->Shrink(*key),
15923 Object* ObjectHashTable::Lookup(Object* key) {
15924 ASSERT(IsKey(key));
15926 // If the object does not have an identity hash, it was never used as a key.
15927 Object* hash = key->GetHash();
15928 if (hash->IsUndefined()) {
15929 return GetHeap()->the_hole_value();
15931 int entry = FindEntry(key);
15932 if (entry == kNotFound) return GetHeap()->the_hole_value();
15933 return get(EntryToIndex(entry) + 1);
15937 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table,
15938 Handle<Object> key,
15939 Handle<Object> value) {
15940 ASSERT(table->IsKey(*key));
15942 Isolate* isolate = table->GetIsolate();
15944 // Make sure the key object has an identity hash code.
15945 Handle<Object> hash = Object::GetOrCreateHash(key, isolate);
15947 int entry = table->FindEntry(*key);
15949 // Check whether to perform removal operation.
15950 if (value->IsTheHole()) {
15951 if (entry == kNotFound) return table;
15952 table->RemoveEntry(entry);
15953 return Shrink(table, key);
15956 // Key is already in table, just overwrite value.
15957 if (entry != kNotFound) {
15958 table->set(EntryToIndex(entry) + 1, *value);
15962 // Check whether the hash table should be extended.
15963 table = EnsureCapacity(table, 1, key);
15964 table->AddEntry(table->FindInsertionEntry(Handle<Smi>::cast(hash)->value()),
15971 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
15972 set(EntryToIndex(entry), key);
15973 set(EntryToIndex(entry) + 1, value);
15978 void ObjectHashTable::RemoveEntry(int entry) {
15979 set_the_hole(EntryToIndex(entry));
15980 set_the_hole(EntryToIndex(entry) + 1);
15985 Object* WeakHashTable::Lookup(Object* key) {
15986 ASSERT(IsKey(key));
15987 int entry = FindEntry(key);
15988 if (entry == kNotFound) return GetHeap()->the_hole_value();
15989 return get(EntryToValueIndex(entry));
15993 MaybeObject* WeakHashTable::Put(Object* key, Object* value) {
15994 ASSERT(IsKey(key));
15995 int entry = FindEntry(key);
15996 // Key is already in table, just overwrite value.
15997 if (entry != kNotFound) {
15998 set(EntryToValueIndex(entry), value);
16002 // Check whether the hash table should be extended.
16004 { MaybeObject* maybe_obj = EnsureCapacity(1, key, TENURED);
16005 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
16007 WeakHashTable* table = WeakHashTable::cast(obj);
16008 table->AddEntry(table->FindInsertionEntry(Hash(key)), key, value);
16013 void WeakHashTable::AddEntry(int entry, Object* key, Object* value) {
16014 set(EntryToIndex(entry), key);
16015 set(EntryToValueIndex(entry), value);
16020 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator(
16021 DeclaredAccessorDescriptor* descriptor)
16022 : array_(descriptor->serialized_data()->GetDataStartAddress()),
16023 length_(descriptor->serialized_data()->length()),
16028 const DeclaredAccessorDescriptorData*
16029 DeclaredAccessorDescriptorIterator::Next() {
16030 ASSERT(offset_ < length_);
16031 uint8_t* ptr = &array_[offset_];
16032 ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0);
16033 const DeclaredAccessorDescriptorData* data =
16034 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr);
16035 offset_ += sizeof(*data);
16036 ASSERT(offset_ <= length_);
16041 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create(
16043 const DeclaredAccessorDescriptorData& descriptor,
16044 Handle<DeclaredAccessorDescriptor> previous) {
16045 int previous_length =
16046 previous.is_null() ? 0 : previous->serialized_data()->length();
16047 int length = sizeof(descriptor) + previous_length;
16048 Handle<ByteArray> serialized_descriptor =
16049 isolate->factory()->NewByteArray(length);
16050 Handle<DeclaredAccessorDescriptor> value =
16051 isolate->factory()->NewDeclaredAccessorDescriptor();
16052 value->set_serialized_data(*serialized_descriptor);
16053 // Copy in the data.
16055 DisallowHeapAllocation no_allocation;
16056 uint8_t* array = serialized_descriptor->GetDataStartAddress();
16057 if (previous_length != 0) {
16058 uint8_t* previous_array =
16059 previous->serialized_data()->GetDataStartAddress();
16060 OS::MemCopy(array, previous_array, previous_length);
16061 array += previous_length;
16063 ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0);
16064 DeclaredAccessorDescriptorData* data =
16065 reinterpret_cast<DeclaredAccessorDescriptorData*>(array);
16066 *data = descriptor;
16072 #ifdef ENABLE_DEBUGGER_SUPPORT
16073 // Check if there is a break point at this code position.
16074 bool DebugInfo::HasBreakPoint(int code_position) {
16075 // Get the break point info object for this code position.
16076 Object* break_point_info = GetBreakPointInfo(code_position);
16078 // If there is no break point info object or no break points in the break
16079 // point info object there is no break point at this code position.
16080 if (break_point_info->IsUndefined()) return false;
16081 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0;
16085 // Get the break point info object for this code position.
16086 Object* DebugInfo::GetBreakPointInfo(int code_position) {
16087 // Find the index of the break point info object for this code position.
16088 int index = GetBreakPointInfoIndex(code_position);
16090 // Return the break point info object if any.
16091 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value();
16092 return BreakPointInfo::cast(break_points()->get(index));
16096 // Clear a break point at the specified code position.
16097 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info,
16099 Handle<Object> break_point_object) {
16100 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16101 debug_info->GetIsolate());
16102 if (break_point_info->IsUndefined()) return;
16103 BreakPointInfo::ClearBreakPoint(
16104 Handle<BreakPointInfo>::cast(break_point_info),
16105 break_point_object);
16109 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
16111 int source_position,
16112 int statement_position,
16113 Handle<Object> break_point_object) {
16114 Isolate* isolate = debug_info->GetIsolate();
16115 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16117 if (!break_point_info->IsUndefined()) {
16118 BreakPointInfo::SetBreakPoint(
16119 Handle<BreakPointInfo>::cast(break_point_info),
16120 break_point_object);
16124 // Adding a new break point for a code position which did not have any
16125 // break points before. Try to find a free slot.
16126 int index = kNoBreakPointInfo;
16127 for (int i = 0; i < debug_info->break_points()->length(); i++) {
16128 if (debug_info->break_points()->get(i)->IsUndefined()) {
16133 if (index == kNoBreakPointInfo) {
16134 // No free slot - extend break point info array.
16135 Handle<FixedArray> old_break_points =
16136 Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
16137 Handle<FixedArray> new_break_points =
16138 isolate->factory()->NewFixedArray(
16139 old_break_points->length() +
16140 Debug::kEstimatedNofBreakPointsInFunction);
16142 debug_info->set_break_points(*new_break_points);
16143 for (int i = 0; i < old_break_points->length(); i++) {
16144 new_break_points->set(i, old_break_points->get(i));
16146 index = old_break_points->length();
16148 ASSERT(index != kNoBreakPointInfo);
16150 // Allocate new BreakPointInfo object and set the break point.
16151 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
16152 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
16153 new_break_point_info->set_code_position(Smi::FromInt(code_position));
16154 new_break_point_info->set_source_position(Smi::FromInt(source_position));
16155 new_break_point_info->
16156 set_statement_position(Smi::FromInt(statement_position));
16157 new_break_point_info->set_break_point_objects(
16158 isolate->heap()->undefined_value());
16159 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
16160 debug_info->break_points()->set(index, *new_break_point_info);
16164 // Get the break point objects for a code position.
16165 Object* DebugInfo::GetBreakPointObjects(int code_position) {
16166 Object* break_point_info = GetBreakPointInfo(code_position);
16167 if (break_point_info->IsUndefined()) {
16168 return GetHeap()->undefined_value();
16170 return BreakPointInfo::cast(break_point_info)->break_point_objects();
16174 // Get the total number of break points.
16175 int DebugInfo::GetBreakPointCount() {
16176 if (break_points()->IsUndefined()) return 0;
16178 for (int i = 0; i < break_points()->length(); i++) {
16179 if (!break_points()->get(i)->IsUndefined()) {
16180 BreakPointInfo* break_point_info =
16181 BreakPointInfo::cast(break_points()->get(i));
16182 count += break_point_info->GetBreakPointCount();
16189 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
16190 Handle<Object> break_point_object) {
16191 Heap* heap = debug_info->GetHeap();
16192 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
16193 for (int i = 0; i < debug_info->break_points()->length(); i++) {
16194 if (!debug_info->break_points()->get(i)->IsUndefined()) {
16195 Handle<BreakPointInfo> break_point_info =
16196 Handle<BreakPointInfo>(BreakPointInfo::cast(
16197 debug_info->break_points()->get(i)));
16198 if (BreakPointInfo::HasBreakPointObject(break_point_info,
16199 break_point_object)) {
16200 return *break_point_info;
16204 return heap->undefined_value();
16208 // Find the index of the break point info object for the specified code
16210 int DebugInfo::GetBreakPointInfoIndex(int code_position) {
16211 if (break_points()->IsUndefined()) return kNoBreakPointInfo;
16212 for (int i = 0; i < break_points()->length(); i++) {
16213 if (!break_points()->get(i)->IsUndefined()) {
16214 BreakPointInfo* break_point_info =
16215 BreakPointInfo::cast(break_points()->get(i));
16216 if (break_point_info->code_position()->value() == code_position) {
16221 return kNoBreakPointInfo;
16225 // Remove the specified break point object.
16226 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
16227 Handle<Object> break_point_object) {
16228 Isolate* isolate = break_point_info->GetIsolate();
16229 // If there are no break points just ignore.
16230 if (break_point_info->break_point_objects()->IsUndefined()) return;
16231 // If there is a single break point clear it if it is the same.
16232 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16233 if (break_point_info->break_point_objects() == *break_point_object) {
16234 break_point_info->set_break_point_objects(
16235 isolate->heap()->undefined_value());
16239 // If there are multiple break points shrink the array
16240 ASSERT(break_point_info->break_point_objects()->IsFixedArray());
16241 Handle<FixedArray> old_array =
16242 Handle<FixedArray>(
16243 FixedArray::cast(break_point_info->break_point_objects()));
16244 Handle<FixedArray> new_array =
16245 isolate->factory()->NewFixedArray(old_array->length() - 1);
16246 int found_count = 0;
16247 for (int i = 0; i < old_array->length(); i++) {
16248 if (old_array->get(i) == *break_point_object) {
16249 ASSERT(found_count == 0);
16252 new_array->set(i - found_count, old_array->get(i));
16255 // If the break point was found in the list change it.
16256 if (found_count > 0) break_point_info->set_break_point_objects(*new_array);
16260 // Add the specified break point object.
16261 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
16262 Handle<Object> break_point_object) {
16263 Isolate* isolate = break_point_info->GetIsolate();
16265 // If there was no break point objects before just set it.
16266 if (break_point_info->break_point_objects()->IsUndefined()) {
16267 break_point_info->set_break_point_objects(*break_point_object);
16270 // If the break point object is the same as before just ignore.
16271 if (break_point_info->break_point_objects() == *break_point_object) return;
16272 // If there was one break point object before replace with array.
16273 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16274 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
16275 array->set(0, break_point_info->break_point_objects());
16276 array->set(1, *break_point_object);
16277 break_point_info->set_break_point_objects(*array);
16280 // If there was more than one break point before extend array.
16281 Handle<FixedArray> old_array =
16282 Handle<FixedArray>(
16283 FixedArray::cast(break_point_info->break_point_objects()));
16284 Handle<FixedArray> new_array =
16285 isolate->factory()->NewFixedArray(old_array->length() + 1);
16286 for (int i = 0; i < old_array->length(); i++) {
16287 // If the break point was there before just ignore.
16288 if (old_array->get(i) == *break_point_object) return;
16289 new_array->set(i, old_array->get(i));
16291 // Add the new break point.
16292 new_array->set(old_array->length(), *break_point_object);
16293 break_point_info->set_break_point_objects(*new_array);
16297 bool BreakPointInfo::HasBreakPointObject(
16298 Handle<BreakPointInfo> break_point_info,
16299 Handle<Object> break_point_object) {
16301 if (break_point_info->break_point_objects()->IsUndefined()) return false;
16302 // Single break point.
16303 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16304 return break_point_info->break_point_objects() == *break_point_object;
16306 // Multiple break points.
16307 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects());
16308 for (int i = 0; i < array->length(); i++) {
16309 if (array->get(i) == *break_point_object) {
16317 // Get the number of break points.
16318 int BreakPointInfo::GetBreakPointCount() {
16320 if (break_point_objects()->IsUndefined()) return 0;
16321 // Single break point.
16322 if (!break_point_objects()->IsFixedArray()) return 1;
16323 // Multiple break points.
16324 return FixedArray::cast(break_point_objects())->length();
16326 #endif // ENABLE_DEBUGGER_SUPPORT
16329 Object* JSDate::GetField(Object* object, Smi* index) {
16330 return JSDate::cast(object)->DoGetField(
16331 static_cast<FieldIndex>(index->value()));
16335 Object* JSDate::DoGetField(FieldIndex index) {
16336 ASSERT(index != kDateValue);
16338 DateCache* date_cache = GetIsolate()->date_cache();
16340 if (index < kFirstUncachedField) {
16341 Object* stamp = cache_stamp();
16342 if (stamp != date_cache->stamp() && stamp->IsSmi()) {
16343 // Since the stamp is not NaN, the value is also not NaN.
16344 int64_t local_time_ms =
16345 date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
16346 SetLocalFields(local_time_ms, date_cache);
16349 case kYear: return year();
16350 case kMonth: return month();
16351 case kDay: return day();
16352 case kWeekday: return weekday();
16353 case kHour: return hour();
16354 case kMinute: return min();
16355 case kSecond: return sec();
16356 default: UNREACHABLE();
16360 if (index >= kFirstUTCField) {
16361 return GetUTCField(index, value()->Number(), date_cache);
16364 double time = value()->Number();
16365 if (std::isnan(time)) return GetIsolate()->heap()->nan_value();
16367 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
16368 int days = DateCache::DaysFromTime(local_time_ms);
16370 if (index == kDays) return Smi::FromInt(days);
16372 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16373 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
16374 ASSERT(index == kTimeInDay);
16375 return Smi::FromInt(time_in_day_ms);
16379 Object* JSDate::GetUTCField(FieldIndex index,
16381 DateCache* date_cache) {
16382 ASSERT(index >= kFirstUTCField);
16384 if (std::isnan(value)) return GetIsolate()->heap()->nan_value();
16386 int64_t time_ms = static_cast<int64_t>(value);
16388 if (index == kTimezoneOffset) {
16389 return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
16392 int days = DateCache::DaysFromTime(time_ms);
16394 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
16396 if (index <= kDayUTC) {
16397 int year, month, day;
16398 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16399 if (index == kYearUTC) return Smi::FromInt(year);
16400 if (index == kMonthUTC) return Smi::FromInt(month);
16401 ASSERT(index == kDayUTC);
16402 return Smi::FromInt(day);
16405 int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
16407 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
16408 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
16409 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
16410 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
16411 case kDaysUTC: return Smi::FromInt(days);
16412 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
16413 default: UNREACHABLE();
16421 void JSDate::SetValue(Object* value, bool is_value_nan) {
16423 if (is_value_nan) {
16424 HeapNumber* nan = GetIsolate()->heap()->nan_value();
16425 set_cache_stamp(nan, SKIP_WRITE_BARRIER);
16426 set_year(nan, SKIP_WRITE_BARRIER);
16427 set_month(nan, SKIP_WRITE_BARRIER);
16428 set_day(nan, SKIP_WRITE_BARRIER);
16429 set_hour(nan, SKIP_WRITE_BARRIER);
16430 set_min(nan, SKIP_WRITE_BARRIER);
16431 set_sec(nan, SKIP_WRITE_BARRIER);
16432 set_weekday(nan, SKIP_WRITE_BARRIER);
16434 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
16439 void JSDate::SetLocalFields(int64_t local_time_ms, DateCache* date_cache) {
16440 int days = DateCache::DaysFromTime(local_time_ms);
16441 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16442 int year, month, day;
16443 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16444 int weekday = date_cache->Weekday(days);
16445 int hour = time_in_day_ms / (60 * 60 * 1000);
16446 int min = (time_in_day_ms / (60 * 1000)) % 60;
16447 int sec = (time_in_day_ms / 1000) % 60;
16448 set_cache_stamp(date_cache->stamp());
16449 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
16450 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
16451 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
16452 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
16453 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
16454 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
16455 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
16459 void JSArrayBuffer::Neuter() {
16460 ASSERT(is_external());
16461 set_backing_store(NULL);
16462 set_byte_length(Smi::FromInt(0));
16466 void JSArrayBufferView::NeuterView() {
16467 set_byte_offset(Smi::FromInt(0));
16468 set_byte_length(Smi::FromInt(0));
16472 void JSDataView::Neuter() {
16477 void JSTypedArray::Neuter() {
16479 set_length(Smi::FromInt(0));
16480 set_elements(GetHeap()->EmptyExternalArrayForMap(map()));
16484 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) {
16485 switch (elements_kind) {
16486 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
16487 case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS;
16489 TYPED_ARRAYS(TYPED_ARRAY_CASE)
16490 #undef TYPED_ARRAY_CASE
16494 return FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND;
16499 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
16500 Handle<JSTypedArray> typed_array) {
16502 Handle<Map> map(typed_array->map());
16503 Isolate* isolate = typed_array->GetIsolate();
16505 ASSERT(IsFixedTypedArrayElementsKind(map->elements_kind()));
16507 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
16508 Handle<FixedTypedArrayBase> fixed_typed_array(
16509 FixedTypedArrayBase::cast(typed_array->elements()));
16510 Runtime::SetupArrayBufferAllocatingData(isolate, buffer,
16511 fixed_typed_array->DataSize(), false);
16512 memcpy(buffer->backing_store(),
16513 fixed_typed_array->DataPtr(),
16514 fixed_typed_array->DataSize());
16515 Handle<ExternalArray> new_elements =
16516 isolate->factory()->NewExternalArray(
16517 fixed_typed_array->length(), typed_array->type(),
16518 static_cast<uint8_t*>(buffer->backing_store()));
16519 Handle<Map> new_map = JSObject::GetElementsTransitionMap(
16521 FixedToExternalElementsKind(map->elements_kind()));
16523 buffer->set_weak_first_view(*typed_array);
16524 ASSERT(typed_array->weak_next() == isolate->heap()->undefined_value());
16525 typed_array->set_buffer(*buffer);
16526 typed_array->set_map_and_elements(*new_map, *new_elements);
16532 Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
16533 Handle<Object> result(buffer(), GetIsolate());
16534 if (*result != Smi::FromInt(0)) {
16535 ASSERT(IsExternalArrayElementsKind(map()->elements_kind()));
16536 return Handle<JSArrayBuffer>::cast(result);
16538 Handle<JSTypedArray> self(this);
16539 return MaterializeArrayBuffer(self);
16543 HeapType* PropertyCell::type() {
16544 return static_cast<HeapType*>(type_raw());
16548 void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) {
16549 ASSERT(IsPropertyCell());
16550 set_type_raw(type, ignored);
16554 Handle<HeapType> PropertyCell::UpdatedType(Handle<PropertyCell> cell,
16555 Handle<Object> value) {
16556 Isolate* isolate = cell->GetIsolate();
16557 Handle<HeapType> old_type(cell->type(), isolate);
16558 // TODO(2803): Do not track ConsString as constant because they cannot be
16559 // embedded into code.
16560 Handle<HeapType> new_type = value->IsConsString() || value->IsTheHole()
16561 ? HeapType::Any(isolate) : HeapType::Constant(value, isolate);
16563 if (new_type->Is(old_type)) {
16567 cell->dependent_code()->DeoptimizeDependentCodeGroup(
16568 isolate, DependentCode::kPropertyCellChangedGroup);
16570 if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) {
16574 return HeapType::Any(isolate);
16578 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell,
16579 Handle<Object> value) {
16580 cell->set_value(*value);
16581 if (!HeapType::Any()->Is(cell->type())) {
16582 Handle<HeapType> new_type = UpdatedType(cell, value);
16583 cell->set_type(*new_type);
16588 void PropertyCell::AddDependentCompilationInfo(CompilationInfo* info) {
16589 Handle<DependentCode> dep(dependent_code());
16590 Handle<DependentCode> codes =
16591 DependentCode::Insert(dep, DependentCode::kPropertyCellChangedGroup,
16592 info->object_wrapper());
16593 if (*codes != dependent_code()) set_dependent_code(*codes);
16594 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
16595 Handle<HeapObject>(this), info->zone());
16599 const char* GetBailoutReason(BailoutReason reason) {
16600 ASSERT(reason < kLastErrorMessage);
16601 #define ERROR_MESSAGES_TEXTS(C, T) T,
16602 static const char* error_messages_[] = {
16603 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
16605 #undef ERROR_MESSAGES_TEXTS
16606 return error_messages_[reason];
16610 } } // namespace v8::internal