1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/accessors.h"
8 #include "src/allocation-site-scopes.h"
10 #include "src/arguments.h"
11 #include "src/bootstrapper.h"
12 #include "src/codegen.h"
13 #include "src/code-stubs.h"
14 #include "src/cpu-profiler.h"
15 #include "src/debug.h"
16 #include "src/deoptimizer.h"
18 #include "src/elements.h"
19 #include "src/execution.h"
20 #include "src/field-index.h"
21 #include "src/field-index-inl.h"
22 #include "src/full-codegen.h"
23 #include "src/hydrogen.h"
24 #include "src/isolate-inl.h"
26 #include "src/lookup.h"
27 #include "src/objects-inl.h"
28 #include "src/objects-visiting-inl.h"
29 #include "src/macro-assembler.h"
30 #include "src/mark-compact.h"
31 #include "src/safepoint-table.h"
32 #include "src/string-search.h"
33 #include "src/string-stream.h"
34 #include "src/utils.h"
36 #ifdef ENABLE_DISASSEMBLER
37 #include "src/disasm.h"
38 #include "src/disassembler.h"
44 Handle<HeapType> Object::OptimalType(Isolate* isolate,
45 Representation representation) {
46 if (representation.IsNone()) return HeapType::None(isolate);
47 if (FLAG_track_field_types) {
48 if (representation.IsHeapObject() && IsHeapObject()) {
49 // We can track only JavaScript objects with stable maps.
50 Handle<Map> map(HeapObject::cast(this)->map(), isolate);
51 if (map->is_stable() &&
52 map->instance_type() >= FIRST_NONCALLABLE_SPEC_OBJECT_TYPE &&
53 map->instance_type() <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE) {
54 return HeapType::Class(map, isolate);
58 return HeapType::Any(isolate);
62 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
63 Handle<Object> object,
64 Handle<Context> native_context) {
65 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
66 Handle<JSFunction> constructor;
67 if (object->IsNumber()) {
68 constructor = handle(native_context->number_function(), isolate);
69 } else if (object->IsBoolean()) {
70 constructor = handle(native_context->boolean_function(), isolate);
71 } else if (object->IsString()) {
72 constructor = handle(native_context->string_function(), isolate);
73 } else if (object->IsSymbol()) {
74 constructor = handle(native_context->symbol_function(), isolate);
76 return MaybeHandle<JSReceiver>();
78 Handle<JSObject> result = isolate->factory()->NewJSObject(constructor);
79 Handle<JSValue>::cast(result)->set_value(*object);
84 bool Object::BooleanValue() {
85 if (IsBoolean()) return IsTrue();
86 if (IsSmi()) return Smi::cast(this)->value() != 0;
87 if (IsUndefined() || IsNull()) return false;
88 if (IsUndetectableObject()) return false; // Undetectable object is false.
89 if (IsString()) return String::cast(this)->length() != 0;
90 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue();
95 bool Object::IsCallable() {
97 while (fun->IsJSFunctionProxy()) {
98 fun = JSFunctionProxy::cast(fun)->call_trap();
100 return fun->IsJSFunction() ||
101 (fun->IsHeapObject() &&
102 HeapObject::cast(fun)->map()->has_instance_call_handler());
106 void Object::Lookup(Handle<Name> name, LookupResult* result) {
107 DisallowHeapAllocation no_gc;
108 Object* holder = NULL;
109 if (IsJSReceiver()) {
112 Context* native_context = result->isolate()->context()->native_context();
114 holder = native_context->number_function()->instance_prototype();
115 } else if (IsString()) {
116 holder = native_context->string_function()->instance_prototype();
117 } else if (IsSymbol()) {
118 holder = native_context->symbol_function()->instance_prototype();
119 } else if (IsBoolean()) {
120 holder = native_context->boolean_function()->instance_prototype();
122 result->isolate()->PushStackTraceAndDie(
123 0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001);
126 ASSERT(holder != NULL); // Cannot handle null or undefined.
127 JSReceiver::cast(holder)->Lookup(name, result);
131 MaybeHandle<Object> Object::GetProperty(LookupIterator* it) {
132 for (; it->IsFound(); it->Next()) {
133 switch (it->state()) {
134 case LookupIterator::NOT_FOUND:
136 case LookupIterator::JSPROXY:
137 return JSProxy::GetPropertyWithHandler(
138 it->GetJSProxy(), it->GetReceiver(), it->name());
139 case LookupIterator::INTERCEPTOR: {
140 MaybeHandle<Object> maybe_result = JSObject::GetPropertyWithInterceptor(
141 it->GetHolder(), it->GetReceiver(), it->name());
142 if (!maybe_result.is_null()) return maybe_result;
143 if (it->isolate()->has_pending_exception()) return maybe_result;
146 case LookupIterator::ACCESS_CHECK:
147 if (it->HasAccess(v8::ACCESS_GET)) break;
148 return JSObject::GetPropertyWithFailedAccessCheck(it);
149 case LookupIterator::PROPERTY:
150 if (it->HasProperty()) {
151 switch (it->property_kind()) {
152 case LookupIterator::ACCESSOR:
153 return GetPropertyWithAccessor(
154 it->GetReceiver(), it->name(),
155 it->GetHolder(), it->GetAccessors());
156 case LookupIterator::DATA:
157 return it->GetDataValue();
163 return it->factory()->undefined_value();
167 bool Object::ToInt32(int32_t* value) {
169 *value = Smi::cast(this)->value();
172 if (IsHeapNumber()) {
173 double num = HeapNumber::cast(this)->value();
174 if (FastI2D(FastD2I(num)) == num) {
175 *value = FastD2I(num);
183 bool Object::ToUint32(uint32_t* value) {
185 int num = Smi::cast(this)->value();
187 *value = static_cast<uint32_t>(num);
191 if (IsHeapNumber()) {
192 double num = HeapNumber::cast(this)->value();
193 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) {
194 *value = FastD2UI(num);
202 bool FunctionTemplateInfo::IsTemplateFor(Object* object) {
203 if (!object->IsHeapObject()) return false;
204 return IsTemplateFor(HeapObject::cast(object)->map());
208 bool FunctionTemplateInfo::IsTemplateFor(Map* map) {
209 // There is a constraint on the object; check.
210 if (!map->IsJSObjectMap()) return false;
211 // Fetch the constructor function of the object.
212 Object* cons_obj = map->constructor();
213 if (!cons_obj->IsJSFunction()) return false;
214 JSFunction* fun = JSFunction::cast(cons_obj);
215 // Iterate through the chain of inheriting function templates to
216 // see if the required one occurs.
217 for (Object* type = fun->shared()->function_data();
218 type->IsFunctionTemplateInfo();
219 type = FunctionTemplateInfo::cast(type)->parent_template()) {
220 if (type == this) return true;
222 // Didn't find the required type in the inheritance chain.
227 template<typename To>
228 static inline To* CheckedCast(void *from) {
229 uintptr_t temp = reinterpret_cast<uintptr_t>(from);
230 ASSERT(temp % sizeof(To) == 0);
231 return reinterpret_cast<To*>(temp);
235 static Handle<Object> PerformCompare(const BitmaskCompareDescriptor& descriptor,
238 uint32_t bitmask = descriptor.bitmask;
239 uint32_t compare_value = descriptor.compare_value;
241 switch (descriptor.size) {
243 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr));
244 compare_value &= 0xff;
248 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr));
249 compare_value &= 0xffff;
253 value = *CheckedCast<uint32_t>(ptr);
257 return isolate->factory()->undefined_value();
259 return isolate->factory()->ToBoolean(
260 (bitmask & value) == (bitmask & compare_value));
264 static Handle<Object> PerformCompare(const PointerCompareDescriptor& descriptor,
267 uintptr_t compare_value =
268 reinterpret_cast<uintptr_t>(descriptor.compare_value);
269 uintptr_t value = *CheckedCast<uintptr_t>(ptr);
270 return isolate->factory()->ToBoolean(compare_value == value);
274 static Handle<Object> GetPrimitiveValue(
275 const PrimitiveValueDescriptor& descriptor,
278 int32_t int32_value = 0;
279 switch (descriptor.data_type) {
280 case kDescriptorInt8Type:
281 int32_value = *CheckedCast<int8_t>(ptr);
283 case kDescriptorUint8Type:
284 int32_value = *CheckedCast<uint8_t>(ptr);
286 case kDescriptorInt16Type:
287 int32_value = *CheckedCast<int16_t>(ptr);
289 case kDescriptorUint16Type:
290 int32_value = *CheckedCast<uint16_t>(ptr);
292 case kDescriptorInt32Type:
293 int32_value = *CheckedCast<int32_t>(ptr);
295 case kDescriptorUint32Type: {
296 uint32_t value = *CheckedCast<uint32_t>(ptr);
297 AllowHeapAllocation allow_gc;
298 return isolate->factory()->NewNumberFromUint(value);
300 case kDescriptorBoolType: {
301 uint8_t byte = *CheckedCast<uint8_t>(ptr);
302 return isolate->factory()->ToBoolean(
303 byte & (0x1 << descriptor.bool_offset));
305 case kDescriptorFloatType: {
306 float value = *CheckedCast<float>(ptr);
307 AllowHeapAllocation allow_gc;
308 return isolate->factory()->NewNumber(value);
310 case kDescriptorDoubleType: {
311 double value = *CheckedCast<double>(ptr);
312 AllowHeapAllocation allow_gc;
313 return isolate->factory()->NewNumber(value);
316 AllowHeapAllocation allow_gc;
317 return isolate->factory()->NewNumberFromInt(int32_value);
321 static Handle<Object> GetDeclaredAccessorProperty(
322 Handle<Object> receiver,
323 Handle<DeclaredAccessorInfo> info,
325 DisallowHeapAllocation no_gc;
326 char* current = reinterpret_cast<char*>(*receiver);
327 DeclaredAccessorDescriptorIterator iterator(info->descriptor());
329 const DeclaredAccessorDescriptorData* data = iterator.Next();
330 switch (data->type) {
331 case kDescriptorReturnObject: {
332 ASSERT(iterator.Complete());
333 current = *CheckedCast<char*>(current);
334 return handle(*CheckedCast<Object*>(current), isolate);
336 case kDescriptorPointerDereference:
337 ASSERT(!iterator.Complete());
338 current = *reinterpret_cast<char**>(current);
340 case kDescriptorPointerShift:
341 ASSERT(!iterator.Complete());
342 current += data->pointer_shift_descriptor.byte_offset;
344 case kDescriptorObjectDereference: {
345 ASSERT(!iterator.Complete());
346 Object* object = CheckedCast<Object>(current);
347 int field = data->object_dereference_descriptor.internal_field;
348 Object* smi = JSObject::cast(object)->GetInternalField(field);
349 ASSERT(smi->IsSmi());
350 current = reinterpret_cast<char*>(smi);
353 case kDescriptorBitmaskCompare:
354 ASSERT(iterator.Complete());
355 return PerformCompare(data->bitmask_compare_descriptor,
358 case kDescriptorPointerCompare:
359 ASSERT(iterator.Complete());
360 return PerformCompare(data->pointer_compare_descriptor,
363 case kDescriptorPrimitiveValue:
364 ASSERT(iterator.Complete());
365 return GetPrimitiveValue(data->primitive_value_descriptor,
371 return isolate->factory()->undefined_value();
375 Handle<FixedArray> JSObject::EnsureWritableFastElements(
376 Handle<JSObject> object) {
377 ASSERT(object->HasFastSmiOrObjectElements());
378 Isolate* isolate = object->GetIsolate();
379 Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate);
380 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
381 Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap(
382 elems, isolate->factory()->fixed_array_map());
383 object->set_elements(*writable_elems);
384 isolate->counters()->cow_arrays_converted()->Increment();
385 return writable_elems;
389 MaybeHandle<Object> JSProxy::GetPropertyWithHandler(Handle<JSProxy> proxy,
390 Handle<Object> receiver,
392 Isolate* isolate = proxy->GetIsolate();
394 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
395 if (name->IsSymbol()) return isolate->factory()->undefined_value();
397 Handle<Object> args[] = { receiver, name };
399 proxy, "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args);
403 MaybeHandle<Object> Object::GetPropertyWithAccessor(Handle<Object> receiver,
405 Handle<JSObject> holder,
406 Handle<Object> structure) {
407 Isolate* isolate = name->GetIsolate();
408 ASSERT(!structure->IsForeign());
409 // api style callbacks.
410 if (structure->IsAccessorInfo()) {
411 Handle<AccessorInfo> accessor_info = Handle<AccessorInfo>::cast(structure);
412 if (!accessor_info->IsCompatibleReceiver(*receiver)) {
413 Handle<Object> args[2] = { name, receiver };
414 Handle<Object> error =
415 isolate->factory()->NewTypeError("incompatible_method_receiver",
418 return isolate->Throw<Object>(error);
420 // TODO(rossberg): Handling symbols in the API requires changing the API,
421 // so we do not support it for now.
422 if (name->IsSymbol()) return isolate->factory()->undefined_value();
423 if (structure->IsDeclaredAccessorInfo()) {
424 return GetDeclaredAccessorProperty(
426 Handle<DeclaredAccessorInfo>::cast(structure),
430 Handle<ExecutableAccessorInfo> data =
431 Handle<ExecutableAccessorInfo>::cast(structure);
432 v8::AccessorGetterCallback call_fun =
433 v8::ToCData<v8::AccessorGetterCallback>(data->getter());
434 if (call_fun == NULL) return isolate->factory()->undefined_value();
436 Handle<String> key = Handle<String>::cast(name);
437 LOG(isolate, ApiNamedPropertyAccess("load", *holder, *name));
438 PropertyCallbackArguments args(isolate, data->data(), *receiver, *holder);
439 v8::Handle<v8::Value> result =
440 args.Call(call_fun, v8::Utils::ToLocal(key));
441 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
442 if (result.IsEmpty()) {
443 return isolate->factory()->undefined_value();
445 Handle<Object> return_value = v8::Utils::OpenHandle(*result);
446 return_value->VerifyApiCallResultType();
447 // Rebox handle before return.
448 return handle(*return_value, isolate);
451 // __defineGetter__ callback
452 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
454 if (getter->IsSpecFunction()) {
455 // TODO(rossberg): nicer would be to cast to some JSCallable here...
456 return Object::GetPropertyWithDefinedGetter(
457 receiver, Handle<JSReceiver>::cast(getter));
459 // Getter is not a function.
460 return isolate->factory()->undefined_value();
464 MaybeHandle<Object> Object::SetPropertyWithCallback(Handle<Object> receiver,
466 Handle<Object> value,
467 Handle<JSObject> holder,
468 Handle<Object> structure,
469 StrictMode strict_mode) {
470 Isolate* isolate = name->GetIsolate();
472 // We should never get here to initialize a const with the hole
473 // value since a const declaration would conflict with the setter.
474 ASSERT(!value->IsTheHole());
475 ASSERT(!structure->IsForeign());
476 if (structure->IsExecutableAccessorInfo()) {
477 // api style callbacks
478 ExecutableAccessorInfo* data = ExecutableAccessorInfo::cast(*structure);
479 if (!data->IsCompatibleReceiver(*receiver)) {
480 Handle<Object> args[2] = { name, receiver };
481 Handle<Object> error =
482 isolate->factory()->NewTypeError("incompatible_method_receiver",
485 return isolate->Throw<Object>(error);
487 // TODO(rossberg): Support symbols in the API.
488 if (name->IsSymbol()) return value;
489 Object* call_obj = data->setter();
490 v8::AccessorSetterCallback call_fun =
491 v8::ToCData<v8::AccessorSetterCallback>(call_obj);
492 if (call_fun == NULL) return value;
493 Handle<String> key = Handle<String>::cast(name);
494 LOG(isolate, ApiNamedPropertyAccess("store", *holder, *name));
495 PropertyCallbackArguments args(isolate, data->data(), *receiver, *holder);
497 v8::Utils::ToLocal(key),
498 v8::Utils::ToLocal(value));
499 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
503 if (structure->IsAccessorPair()) {
504 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
505 if (setter->IsSpecFunction()) {
506 // TODO(rossberg): nicer would be to cast to some JSCallable here...
507 return SetPropertyWithDefinedSetter(
508 receiver, Handle<JSReceiver>::cast(setter), value);
510 if (strict_mode == SLOPPY) return value;
511 Handle<Object> args[2] = { name, holder };
512 Handle<Object> error =
513 isolate->factory()->NewTypeError("no_setter_in_callback",
514 HandleVector(args, 2));
515 return isolate->Throw<Object>(error);
519 // TODO(dcarney): Handle correctly.
520 if (structure->IsDeclaredAccessorInfo()) {
525 return MaybeHandle<Object>();
529 MaybeHandle<Object> Object::GetPropertyWithDefinedGetter(
530 Handle<Object> receiver,
531 Handle<JSReceiver> getter) {
532 Isolate* isolate = getter->GetIsolate();
533 Debug* debug = isolate->debug();
534 // Handle stepping into a getter if step into is active.
535 // TODO(rossberg): should this apply to getters that are function proxies?
536 if (debug->StepInActive() && getter->IsJSFunction()) {
538 Handle<JSFunction>::cast(getter), Handle<Object>::null(), 0, false);
541 return Execution::Call(isolate, getter, receiver, 0, NULL, true);
545 MaybeHandle<Object> Object::SetPropertyWithDefinedSetter(
546 Handle<Object> receiver,
547 Handle<JSReceiver> setter,
548 Handle<Object> value) {
549 Isolate* isolate = setter->GetIsolate();
551 Debug* debug = isolate->debug();
552 // Handle stepping into a setter if step into is active.
553 // TODO(rossberg): should this apply to getters that are function proxies?
554 if (debug->StepInActive() && setter->IsJSFunction()) {
556 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false);
559 Handle<Object> argv[] = { value };
562 Execution::Call(isolate, setter, receiver, ARRAY_SIZE(argv), argv),
568 static bool FindAllCanReadHolder(LookupIterator* it) {
569 it->skip_interceptor();
570 it->skip_access_check();
571 for (; it->IsFound(); it->Next()) {
572 if (it->state() == LookupIterator::PROPERTY &&
574 it->property_kind() == LookupIterator::ACCESSOR) {
575 Handle<Object> accessors = it->GetAccessors();
576 if (accessors->IsAccessorInfo()) {
577 if (AccessorInfo::cast(*accessors)->all_can_read()) return true;
578 } else if (accessors->IsAccessorPair()) {
579 if (AccessorPair::cast(*accessors)->all_can_read()) return true;
587 MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck(
588 LookupIterator* it) {
589 Handle<JSObject> checked = Handle<JSObject>::cast(it->GetHolder());
590 if (FindAllCanReadHolder(it)) {
591 return GetPropertyWithAccessor(
592 it->GetReceiver(), it->name(), it->GetHolder(), it->GetAccessors());
594 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_GET);
595 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
596 return it->factory()->undefined_value();
600 PropertyAttributes JSObject::GetPropertyAttributesWithFailedAccessCheck(
601 LookupIterator* it) {
602 Handle<JSObject> checked = Handle<JSObject>::cast(it->GetHolder());
603 if (FindAllCanReadHolder(it)) return it->property_details().attributes();
604 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_HAS);
605 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
610 static bool FindAllCanWriteHolder(LookupResult* result,
612 bool check_prototype) {
613 if (result->IsInterceptor()) {
614 result->holder()->LookupOwnRealNamedProperty(name, result);
617 while (result->IsProperty()) {
618 if (result->type() == CALLBACKS) {
619 Object* callback_obj = result->GetCallbackObject();
620 if (callback_obj->IsAccessorInfo()) {
621 if (AccessorInfo::cast(callback_obj)->all_can_write()) return true;
622 } else if (callback_obj->IsAccessorPair()) {
623 if (AccessorPair::cast(callback_obj)->all_can_write()) return true;
626 if (!check_prototype) break;
627 result->holder()->LookupRealNamedPropertyInPrototypes(name, result);
633 MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck(
634 Handle<JSObject> object,
635 LookupResult* result,
637 Handle<Object> value,
638 bool check_prototype,
639 StrictMode strict_mode) {
640 if (check_prototype && !result->IsProperty()) {
641 object->LookupRealNamedPropertyInPrototypes(name, result);
644 if (FindAllCanWriteHolder(result, name, check_prototype)) {
645 Handle<JSObject> holder(result->holder());
646 Handle<Object> callbacks(result->GetCallbackObject(), result->isolate());
647 return SetPropertyWithCallback(
648 object, name, value, holder, callbacks, strict_mode);
651 Isolate* isolate = object->GetIsolate();
652 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
653 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
658 Object* JSObject::GetNormalizedProperty(const LookupResult* result) {
659 ASSERT(!HasFastProperties());
660 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
661 if (IsGlobalObject()) {
662 value = PropertyCell::cast(value)->value();
664 ASSERT(!value->IsPropertyCell() && !value->IsCell());
669 Handle<Object> JSObject::GetNormalizedProperty(Handle<JSObject> object,
670 const LookupResult* result) {
671 ASSERT(!object->HasFastProperties());
672 Isolate* isolate = object->GetIsolate();
673 Handle<Object> value(object->property_dictionary()->ValueAt(
674 result->GetDictionaryEntry()), isolate);
675 if (object->IsGlobalObject()) {
676 value = Handle<Object>(Handle<PropertyCell>::cast(value)->value(), isolate);
678 ASSERT(!value->IsPropertyCell() && !value->IsCell());
683 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
684 const LookupResult* result,
685 Handle<Object> value) {
686 ASSERT(!object->HasFastProperties());
687 NameDictionary* property_dictionary = object->property_dictionary();
688 if (object->IsGlobalObject()) {
689 Handle<PropertyCell> cell(PropertyCell::cast(
690 property_dictionary->ValueAt(result->GetDictionaryEntry())));
691 PropertyCell::SetValueInferType(cell, value);
693 property_dictionary->ValueAtPut(result->GetDictionaryEntry(), *value);
698 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
700 Handle<Object> value,
701 PropertyDetails details) {
702 ASSERT(!object->HasFastProperties());
703 Handle<NameDictionary> property_dictionary(object->property_dictionary());
705 if (!name->IsUniqueName()) {
706 name = object->GetIsolate()->factory()->InternalizeString(
707 Handle<String>::cast(name));
710 int entry = property_dictionary->FindEntry(name);
711 if (entry == NameDictionary::kNotFound) {
712 Handle<Object> store_value = value;
713 if (object->IsGlobalObject()) {
714 store_value = object->GetIsolate()->factory()->NewPropertyCell(value);
717 property_dictionary = NameDictionary::Add(
718 property_dictionary, name, store_value, details);
719 object->set_properties(*property_dictionary);
723 PropertyDetails original_details = property_dictionary->DetailsAt(entry);
724 int enumeration_index;
725 // Preserve the enumeration index unless the property was deleted.
726 if (original_details.IsDeleted()) {
727 enumeration_index = property_dictionary->NextEnumerationIndex();
728 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1);
730 enumeration_index = original_details.dictionary_index();
731 ASSERT(enumeration_index > 0);
734 details = PropertyDetails(
735 details.attributes(), details.type(), enumeration_index);
737 if (object->IsGlobalObject()) {
738 Handle<PropertyCell> cell(
739 PropertyCell::cast(property_dictionary->ValueAt(entry)));
740 PropertyCell::SetValueInferType(cell, value);
741 // Please note we have to update the property details.
742 property_dictionary->DetailsAtPut(entry, details);
744 property_dictionary->SetEntry(entry, name, value, details);
749 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object,
752 ASSERT(!object->HasFastProperties());
753 Isolate* isolate = object->GetIsolate();
754 Handle<NameDictionary> dictionary(object->property_dictionary());
755 int entry = dictionary->FindEntry(name);
756 if (entry != NameDictionary::kNotFound) {
757 // If we have a global object set the cell to the hole.
758 if (object->IsGlobalObject()) {
759 PropertyDetails details = dictionary->DetailsAt(entry);
760 if (details.IsDontDelete()) {
761 if (mode != FORCE_DELETION) return isolate->factory()->false_value();
762 // When forced to delete global properties, we have to make a
763 // map change to invalidate any ICs that think they can load
764 // from the DontDelete cell without checking if it contains
766 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
767 ASSERT(new_map->is_dictionary_map());
768 object->set_map(*new_map);
770 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry)));
771 Handle<Object> value = isolate->factory()->the_hole_value();
772 PropertyCell::SetValueInferType(cell, value);
773 dictionary->DetailsAtPut(entry, details.AsDeleted());
775 Handle<Object> deleted(
776 NameDictionary::DeleteProperty(dictionary, entry, mode));
777 if (*deleted == isolate->heap()->true_value()) {
778 Handle<NameDictionary> new_properties =
779 NameDictionary::Shrink(dictionary, name);
780 object->set_properties(*new_properties);
785 return isolate->factory()->true_value();
789 bool JSObject::IsDirty() {
790 Object* cons_obj = map()->constructor();
791 if (!cons_obj->IsJSFunction())
793 JSFunction* fun = JSFunction::cast(cons_obj);
794 if (!fun->shared()->IsApiFunction())
796 // If the object is fully fast case and has the same map it was
797 // created with then no changes can have been made to it.
798 return map() != fun->initial_map()
799 || !HasFastObjectElements()
800 || !HasFastProperties();
804 MaybeHandle<Object> Object::GetElementWithReceiver(Isolate* isolate,
805 Handle<Object> object,
806 Handle<Object> receiver,
808 Handle<Object> holder;
810 // Iterate up the prototype chain until an element is found or the null
811 // prototype is encountered.
812 for (holder = object;
814 holder = Handle<Object>(holder->GetPrototype(isolate), isolate)) {
815 if (!holder->IsJSObject()) {
816 Context* native_context = isolate->context()->native_context();
817 if (holder->IsNumber()) {
818 holder = Handle<Object>(
819 native_context->number_function()->instance_prototype(), isolate);
820 } else if (holder->IsString()) {
821 holder = Handle<Object>(
822 native_context->string_function()->instance_prototype(), isolate);
823 } else if (holder->IsSymbol()) {
824 holder = Handle<Object>(
825 native_context->symbol_function()->instance_prototype(), isolate);
826 } else if (holder->IsBoolean()) {
827 holder = Handle<Object>(
828 native_context->boolean_function()->instance_prototype(), isolate);
829 } else if (holder->IsJSProxy()) {
830 return JSProxy::GetElementWithHandler(
831 Handle<JSProxy>::cast(holder), receiver, index);
833 // Undefined and null have no indexed properties.
834 ASSERT(holder->IsUndefined() || holder->IsNull());
835 return isolate->factory()->undefined_value();
839 // Inline the case for JSObjects. Doing so significantly improves the
840 // performance of fetching elements where checking the prototype chain is
842 Handle<JSObject> js_object = Handle<JSObject>::cast(holder);
844 // Check access rights if needed.
845 if (js_object->IsAccessCheckNeeded()) {
846 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) {
847 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET);
848 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
849 return isolate->factory()->undefined_value();
853 if (js_object->HasIndexedInterceptor()) {
854 return JSObject::GetElementWithInterceptor(js_object, receiver, index);
857 if (js_object->elements() != isolate->heap()->empty_fixed_array()) {
858 Handle<Object> result;
859 ASSIGN_RETURN_ON_EXCEPTION(
861 js_object->GetElementsAccessor()->Get(receiver, js_object, index),
863 if (!result->IsTheHole()) return result;
867 return isolate->factory()->undefined_value();
871 Object* Object::GetPrototype(Isolate* isolate) {
872 DisallowHeapAllocation no_alloc;
874 Context* context = isolate->context()->native_context();
875 return context->number_function()->instance_prototype();
878 HeapObject* heap_object = HeapObject::cast(this);
880 // The object is either a number, a string, a boolean,
881 // a real JS object, or a Harmony proxy.
882 if (heap_object->IsJSReceiver()) {
883 return heap_object->map()->prototype();
885 Context* context = isolate->context()->native_context();
887 if (heap_object->IsHeapNumber()) {
888 return context->number_function()->instance_prototype();
890 if (heap_object->IsString()) {
891 return context->string_function()->instance_prototype();
893 if (heap_object->IsSymbol()) {
894 return context->symbol_function()->instance_prototype();
896 if (heap_object->IsBoolean()) {
897 return context->boolean_function()->instance_prototype();
899 return isolate->heap()->null_value();
904 Handle<Object> Object::GetPrototype(Isolate* isolate,
905 Handle<Object> object) {
906 return handle(object->GetPrototype(isolate), isolate);
910 Object* Object::GetHash() {
911 // The object is either a number, a name, an odd-ball,
912 // a real JS object, or a Harmony proxy.
914 uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
915 return Smi::FromInt(hash & Smi::kMaxValue);
918 uint32_t hash = Name::cast(this)->Hash();
919 return Smi::FromInt(hash);
922 uint32_t hash = Oddball::cast(this)->to_string()->Hash();
923 return Smi::FromInt(hash);
926 ASSERT(IsJSReceiver());
927 return JSReceiver::cast(this)->GetIdentityHash();
931 Handle<Smi> Object::GetOrCreateHash(Isolate* isolate, Handle<Object> object) {
932 Handle<Object> hash(object->GetHash(), isolate);
933 if (hash->IsSmi()) return Handle<Smi>::cast(hash);
935 ASSERT(object->IsJSReceiver());
936 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object));
940 bool Object::SameValue(Object* other) {
941 if (other == this) return true;
943 // The object is either a number, a name, an odd-ball,
944 // a real JS object, or a Harmony proxy.
945 if (IsNumber() && other->IsNumber()) {
946 double this_value = Number();
947 double other_value = other->Number();
948 bool equal = this_value == other_value;
949 // SameValue(NaN, NaN) is true.
950 if (!equal) return std::isnan(this_value) && std::isnan(other_value);
951 // SameValue(0.0, -0.0) is false.
952 return (this_value != 0) || ((1 / this_value) == (1 / other_value));
954 if (IsString() && other->IsString()) {
955 return String::cast(this)->Equals(String::cast(other));
961 bool Object::SameValueZero(Object* other) {
962 if (other == this) return true;
964 // The object is either a number, a name, an odd-ball,
965 // a real JS object, or a Harmony proxy.
966 if (IsNumber() && other->IsNumber()) {
967 double this_value = Number();
968 double other_value = other->Number();
970 return this_value == other_value
971 || (std::isnan(this_value) && std::isnan(other_value));
973 if (IsString() && other->IsString()) {
974 return String::cast(this)->Equals(String::cast(other));
980 void Object::ShortPrint(FILE* out) {
981 HeapStringAllocator allocator;
982 StringStream accumulator(&allocator);
983 ShortPrint(&accumulator);
984 accumulator.OutputToFile(out);
988 void Object::ShortPrint(StringStream* accumulator) {
990 Smi::cast(this)->SmiPrint(accumulator);
992 HeapObject::cast(this)->HeapObjectShortPrint(accumulator);
997 void Smi::SmiPrint(FILE* out) {
998 PrintF(out, "%d", value());
1002 void Smi::SmiPrint(StringStream* accumulator) {
1003 accumulator->Add("%d", value());
1007 // Should a word be prefixed by 'a' or 'an' in order to read naturally in
1008 // English? Returns false for non-ASCII or words that don't start with
1009 // a capital letter. The a/an rule follows pronunciation in English.
1010 // We don't use the BBC's overcorrect "an historic occasion" though if
1011 // you speak a dialect you may well say "an 'istoric occasion".
1012 static bool AnWord(String* str) {
1013 if (str->length() == 0) return false; // A nothing.
1014 int c0 = str->Get(0);
1015 int c1 = str->length() > 1 ? str->Get(1) : 0;
1018 return true; // An Umpire, but a UTF8String, a U.
1020 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') {
1021 return true; // An Ape, an ABCBook.
1022 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) &&
1023 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' ||
1024 c0 == 'S' || c0 == 'X')) {
1025 return true; // An MP3File, an M.
1031 Handle<String> String::SlowFlatten(Handle<ConsString> cons,
1032 PretenureFlag pretenure) {
1033 ASSERT(AllowHeapAllocation::IsAllowed());
1034 ASSERT(cons->second()->length() != 0);
1035 Isolate* isolate = cons->GetIsolate();
1036 int length = cons->length();
1037 PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure
1039 Handle<SeqString> result;
1040 if (cons->IsOneByteRepresentation()) {
1041 Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString(
1042 length, tenure).ToHandleChecked();
1043 DisallowHeapAllocation no_gc;
1044 WriteToFlat(*cons, flat->GetChars(), 0, length);
1047 Handle<SeqTwoByteString> flat = isolate->factory()->NewRawTwoByteString(
1048 length, tenure).ToHandleChecked();
1049 DisallowHeapAllocation no_gc;
1050 WriteToFlat(*cons, flat->GetChars(), 0, length);
1053 cons->set_first(*result);
1054 cons->set_second(isolate->heap()->empty_string());
1055 ASSERT(result->IsFlat());
1061 bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
1062 // Externalizing twice leaks the external resource, so it's
1063 // prohibited by the API.
1064 ASSERT(!this->IsExternalString());
1065 #ifdef ENABLE_SLOW_ASSERTS
1066 if (FLAG_enable_slow_asserts) {
1067 // Assert that the resource and the string are equivalent.
1068 ASSERT(static_cast<size_t>(this->length()) == resource->length());
1069 ScopedVector<uc16> smart_chars(this->length());
1070 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1071 ASSERT(memcmp(smart_chars.start(),
1073 resource->length() * sizeof(smart_chars[0])) == 0);
1076 Heap* heap = GetHeap();
1077 int size = this->Size(); // Byte size of the original string.
1078 if (size < ExternalString::kShortSize) {
1081 bool is_ascii = this->IsOneByteRepresentation();
1082 bool is_internalized = this->IsInternalizedString();
1084 // Morph the string to an external string by replacing the map and
1085 // reinitializing the fields. This won't work if
1086 // - the space the existing string occupies is too small for a regular
1088 // - the existing string is in old pointer space and the backing store of
1089 // the external string is not aligned. The GC cannot deal with a field
1090 // containing a possibly unaligned address to outside of V8's heap.
1091 // In either case we resort to a short external string instead, omitting
1092 // the field caching the address of the backing store. When we encounter
1093 // short external strings in generated code, we need to bailout to runtime.
1095 if (size < ExternalString::kSize ||
1096 heap->old_pointer_space()->Contains(this)) {
1097 new_map = is_internalized
1100 short_external_internalized_string_with_one_byte_data_map()
1101 : heap->short_external_internalized_string_map())
1103 ? heap->short_external_string_with_one_byte_data_map()
1104 : heap->short_external_string_map());
1106 new_map = is_internalized
1108 ? heap->external_internalized_string_with_one_byte_data_map()
1109 : heap->external_internalized_string_map())
1111 ? heap->external_string_with_one_byte_data_map()
1112 : heap->external_string_map());
1115 // Byte size of the external String object.
1116 int new_size = this->SizeFromMap(new_map);
1117 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1119 // We are storing the new map using release store after creating a filler for
1120 // the left-over space to avoid races with the sweeper thread.
1121 this->synchronized_set_map(new_map);
1123 ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
1124 self->set_resource(resource);
1125 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1127 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1132 bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) {
1133 #ifdef ENABLE_SLOW_ASSERTS
1134 if (FLAG_enable_slow_asserts) {
1135 // Assert that the resource and the string are equivalent.
1136 ASSERT(static_cast<size_t>(this->length()) == resource->length());
1137 if (this->IsTwoByteRepresentation()) {
1138 ScopedVector<uint16_t> smart_chars(this->length());
1139 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1140 ASSERT(String::IsOneByte(smart_chars.start(), this->length()));
1142 ScopedVector<char> smart_chars(this->length());
1143 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1144 ASSERT(memcmp(smart_chars.start(),
1146 resource->length() * sizeof(smart_chars[0])) == 0);
1149 Heap* heap = GetHeap();
1150 int size = this->Size(); // Byte size of the original string.
1151 if (size < ExternalString::kShortSize) {
1154 bool is_internalized = this->IsInternalizedString();
1156 // Morph the string to an external string by replacing the map and
1157 // reinitializing the fields. This won't work if
1158 // - the space the existing string occupies is too small for a regular
1160 // - the existing string is in old pointer space and the backing store of
1161 // the external string is not aligned. The GC cannot deal with a field
1162 // containing a possibly unaligned address to outside of V8's heap.
1163 // In either case we resort to a short external string instead, omitting
1164 // the field caching the address of the backing store. When we encounter
1165 // short external strings in generated code, we need to bailout to runtime.
1167 if (size < ExternalString::kSize ||
1168 heap->old_pointer_space()->Contains(this)) {
1169 new_map = is_internalized
1170 ? heap->short_external_ascii_internalized_string_map()
1171 : heap->short_external_ascii_string_map();
1173 new_map = is_internalized
1174 ? heap->external_ascii_internalized_string_map()
1175 : heap->external_ascii_string_map();
1178 // Byte size of the external String object.
1179 int new_size = this->SizeFromMap(new_map);
1180 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1182 // We are storing the new map using release store after creating a filler for
1183 // the left-over space to avoid races with the sweeper thread.
1184 this->synchronized_set_map(new_map);
1186 ExternalAsciiString* self = ExternalAsciiString::cast(this);
1187 self->set_resource(resource);
1188 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1190 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1195 void String::StringShortPrint(StringStream* accumulator) {
1197 if (len > kMaxShortPrintLength) {
1198 accumulator->Add("<Very long string[%u]>", len);
1202 if (!LooksValid()) {
1203 accumulator->Add("<Invalid String>");
1207 ConsStringIteratorOp op;
1208 StringCharacterStream stream(this, &op);
1210 bool truncated = false;
1211 if (len > kMaxShortPrintLength) {
1212 len = kMaxShortPrintLength;
1216 for (int i = 0; i < len; i++) {
1217 uint16_t c = stream.GetNext();
1219 if (c < 32 || c >= 127) {
1225 accumulator->Add("<String[%u]: ", length());
1226 for (int i = 0; i < len; i++) {
1227 accumulator->Put(static_cast<char>(stream.GetNext()));
1229 accumulator->Put('>');
1231 // Backslash indicates that the string contains control
1232 // characters and that backslashes are therefore escaped.
1233 accumulator->Add("<String[%u]\\: ", length());
1234 for (int i = 0; i < len; i++) {
1235 uint16_t c = stream.GetNext();
1237 accumulator->Add("\\n");
1238 } else if (c == '\r') {
1239 accumulator->Add("\\r");
1240 } else if (c == '\\') {
1241 accumulator->Add("\\\\");
1242 } else if (c < 32 || c > 126) {
1243 accumulator->Add("\\x%02x", c);
1245 accumulator->Put(static_cast<char>(c));
1249 accumulator->Put('.');
1250 accumulator->Put('.');
1251 accumulator->Put('.');
1253 accumulator->Put('>');
1259 void JSObject::JSObjectShortPrint(StringStream* accumulator) {
1260 switch (map()->instance_type()) {
1261 case JS_ARRAY_TYPE: {
1262 double length = JSArray::cast(this)->length()->IsUndefined()
1264 : JSArray::cast(this)->length()->Number();
1265 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
1268 case JS_WEAK_MAP_TYPE: {
1269 accumulator->Add("<JS WeakMap>");
1272 case JS_WEAK_SET_TYPE: {
1273 accumulator->Add("<JS WeakSet>");
1276 case JS_REGEXP_TYPE: {
1277 accumulator->Add("<JS RegExp>");
1280 case JS_FUNCTION_TYPE: {
1281 JSFunction* function = JSFunction::cast(this);
1282 Object* fun_name = function->shared()->DebugName();
1283 bool printed = false;
1284 if (fun_name->IsString()) {
1285 String* str = String::cast(fun_name);
1286 if (str->length() > 0) {
1287 accumulator->Add("<JS Function ");
1288 accumulator->Put(str);
1293 accumulator->Add("<JS Function");
1295 accumulator->Add(" (SharedFunctionInfo %p)",
1296 reinterpret_cast<void*>(function->shared()));
1297 accumulator->Put('>');
1300 case JS_GENERATOR_OBJECT_TYPE: {
1301 accumulator->Add("<JS Generator>");
1304 case JS_MODULE_TYPE: {
1305 accumulator->Add("<JS Module>");
1308 // All other JSObjects are rather similar to each other (JSObject,
1309 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
1311 Map* map_of_this = map();
1312 Heap* heap = GetHeap();
1313 Object* constructor = map_of_this->constructor();
1314 bool printed = false;
1315 if (constructor->IsHeapObject() &&
1316 !heap->Contains(HeapObject::cast(constructor))) {
1317 accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
1319 bool global_object = IsJSGlobalProxy();
1320 if (constructor->IsJSFunction()) {
1321 if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
1322 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
1324 Object* constructor_name =
1325 JSFunction::cast(constructor)->shared()->name();
1326 if (constructor_name->IsString()) {
1327 String* str = String::cast(constructor_name);
1328 if (str->length() > 0) {
1329 bool vowel = AnWord(str);
1330 accumulator->Add("<%sa%s ",
1331 global_object ? "Global Object: " : "",
1333 accumulator->Put(str);
1334 accumulator->Add(" with %smap %p",
1335 map_of_this->is_deprecated() ? "deprecated " : "",
1343 accumulator->Add("<JS %sObject", global_object ? "Global " : "");
1347 accumulator->Add(" value = ");
1348 JSValue::cast(this)->value()->ShortPrint(accumulator);
1350 accumulator->Put('>');
1357 void JSObject::PrintElementsTransition(
1358 FILE* file, Handle<JSObject> object,
1359 ElementsKind from_kind, Handle<FixedArrayBase> from_elements,
1360 ElementsKind to_kind, Handle<FixedArrayBase> to_elements) {
1361 if (from_kind != to_kind) {
1362 PrintF(file, "elements transition [");
1363 PrintElementsKind(file, from_kind);
1364 PrintF(file, " -> ");
1365 PrintElementsKind(file, to_kind);
1366 PrintF(file, "] in ");
1367 JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true);
1368 PrintF(file, " for ");
1369 object->ShortPrint(file);
1370 PrintF(file, " from ");
1371 from_elements->ShortPrint(file);
1372 PrintF(file, " to ");
1373 to_elements->ShortPrint(file);
1379 void Map::PrintGeneralization(FILE* file,
1384 bool constant_to_field,
1385 Representation old_representation,
1386 Representation new_representation,
1387 HeapType* old_field_type,
1388 HeapType* new_field_type) {
1389 PrintF(file, "[generalizing ");
1390 constructor_name()->PrintOn(file);
1392 Name* name = instance_descriptors()->GetKey(modify_index);
1393 if (name->IsString()) {
1394 String::cast(name)->PrintOn(file);
1396 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1399 if (constant_to_field) {
1402 PrintF(file, "%s", old_representation.Mnemonic());
1404 old_field_type->TypePrint(file, HeapType::SEMANTIC_DIM);
1407 PrintF(file, "->%s", new_representation.Mnemonic());
1409 new_field_type->TypePrint(file, HeapType::SEMANTIC_DIM);
1412 if (strlen(reason) > 0) {
1413 PrintF(file, "%s", reason);
1415 PrintF(file, "+%i maps", descriptors - split);
1417 PrintF(file, ") [");
1418 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true);
1419 PrintF(file, "]\n");
1423 void JSObject::PrintInstanceMigration(FILE* file,
1426 PrintF(file, "[migrating ");
1427 map()->constructor_name()->PrintOn(file);
1429 DescriptorArray* o = original_map->instance_descriptors();
1430 DescriptorArray* n = new_map->instance_descriptors();
1431 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) {
1432 Representation o_r = o->GetDetails(i).representation();
1433 Representation n_r = n->GetDetails(i).representation();
1434 if (!o_r.Equals(n_r)) {
1435 String::cast(o->GetKey(i))->PrintOn(file);
1436 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic());
1437 } else if (o->GetDetails(i).type() == CONSTANT &&
1438 n->GetDetails(i).type() == FIELD) {
1439 Name* name = o->GetKey(i);
1440 if (name->IsString()) {
1441 String::cast(name)->PrintOn(file);
1443 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1452 void HeapObject::HeapObjectShortPrint(StringStream* accumulator) {
1453 Heap* heap = GetHeap();
1454 if (!heap->Contains(this)) {
1455 accumulator->Add("!!!INVALID POINTER!!!");
1458 if (!heap->Contains(map())) {
1459 accumulator->Add("!!!INVALID MAP!!!");
1463 accumulator->Add("%p ", this);
1466 String::cast(this)->StringShortPrint(accumulator);
1470 JSObject::cast(this)->JSObjectShortPrint(accumulator);
1473 switch (map()->instance_type()) {
1475 accumulator->Add("<Map(elements=%u)>", Map::cast(this)->elements_kind());
1477 case FIXED_ARRAY_TYPE:
1478 accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length());
1480 case FIXED_DOUBLE_ARRAY_TYPE:
1481 accumulator->Add("<FixedDoubleArray[%u]>",
1482 FixedDoubleArray::cast(this)->length());
1484 case BYTE_ARRAY_TYPE:
1485 accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length());
1487 case FREE_SPACE_TYPE:
1488 accumulator->Add("<FreeSpace[%u]>", FreeSpace::cast(this)->Size());
1490 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \
1491 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1492 accumulator->Add("<External" #Type "Array[%u]>", \
1493 External##Type##Array::cast(this)->length()); \
1495 case FIXED_##TYPE##_ARRAY_TYPE: \
1496 accumulator->Add("<Fixed" #Type "Array[%u]>", \
1497 Fixed##Type##Array::cast(this)->length()); \
1500 TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT)
1501 #undef TYPED_ARRAY_SHORT_PRINT
1503 case SHARED_FUNCTION_INFO_TYPE: {
1504 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this);
1505 SmartArrayPointer<char> debug_name =
1506 shared->DebugName()->ToCString();
1507 if (debug_name[0] != 0) {
1508 accumulator->Add("<SharedFunctionInfo %s>", debug_name.get());
1510 accumulator->Add("<SharedFunctionInfo>");
1514 case JS_MESSAGE_OBJECT_TYPE:
1515 accumulator->Add("<JSMessageObject>");
1517 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1519 accumulator->Put('<'); \
1520 accumulator->Add(#Name); \
1521 accumulator->Put('>'); \
1523 STRUCT_LIST(MAKE_STRUCT_CASE)
1524 #undef MAKE_STRUCT_CASE
1526 accumulator->Add("<Code>");
1528 case ODDBALL_TYPE: {
1530 accumulator->Add("<undefined>");
1531 else if (IsTheHole())
1532 accumulator->Add("<the hole>");
1534 accumulator->Add("<null>");
1536 accumulator->Add("<true>");
1538 accumulator->Add("<false>");
1540 accumulator->Add("<Odd Oddball>");
1544 Symbol* symbol = Symbol::cast(this);
1545 accumulator->Add("<Symbol: %d", symbol->Hash());
1546 if (!symbol->name()->IsUndefined()) {
1547 accumulator->Add(" ");
1548 String::cast(symbol->name())->StringShortPrint(accumulator);
1550 accumulator->Add(">");
1553 case HEAP_NUMBER_TYPE:
1554 accumulator->Add("<Number: ");
1555 HeapNumber::cast(this)->HeapNumberPrint(accumulator);
1556 accumulator->Put('>');
1559 accumulator->Add("<JSProxy>");
1561 case JS_FUNCTION_PROXY_TYPE:
1562 accumulator->Add("<JSFunctionProxy>");
1565 accumulator->Add("<Foreign>");
1568 accumulator->Add("Cell for ");
1569 Cell::cast(this)->value()->ShortPrint(accumulator);
1571 case PROPERTY_CELL_TYPE:
1572 accumulator->Add("PropertyCell for ");
1573 PropertyCell::cast(this)->value()->ShortPrint(accumulator);
1576 accumulator->Add("<Other heap object (%d)>", map()->instance_type());
1582 void HeapObject::Iterate(ObjectVisitor* v) {
1584 IteratePointer(v, kMapOffset);
1585 // Handle object body
1587 IterateBody(m->instance_type(), SizeFromMap(m), v);
1591 void HeapObject::IterateBody(InstanceType type, int object_size,
1593 // Avoiding <Type>::cast(this) because it accesses the map pointer field.
1594 // During GC, the map pointer field is encoded.
1595 if (type < FIRST_NONSTRING_TYPE) {
1596 switch (type & kStringRepresentationMask) {
1599 case kConsStringTag:
1600 ConsString::BodyDescriptor::IterateBody(this, v);
1602 case kSlicedStringTag:
1603 SlicedString::BodyDescriptor::IterateBody(this, v);
1605 case kExternalStringTag:
1606 if ((type & kStringEncodingMask) == kOneByteStringTag) {
1607 reinterpret_cast<ExternalAsciiString*>(this)->
1608 ExternalAsciiStringIterateBody(v);
1610 reinterpret_cast<ExternalTwoByteString*>(this)->
1611 ExternalTwoByteStringIterateBody(v);
1619 case FIXED_ARRAY_TYPE:
1620 FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
1622 case CONSTANT_POOL_ARRAY_TYPE:
1623 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v);
1625 case FIXED_DOUBLE_ARRAY_TYPE:
1627 case JS_OBJECT_TYPE:
1628 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1629 case JS_GENERATOR_OBJECT_TYPE:
1630 case JS_MODULE_TYPE:
1634 case JS_ARRAY_BUFFER_TYPE:
1635 case JS_TYPED_ARRAY_TYPE:
1636 case JS_DATA_VIEW_TYPE:
1639 case JS_SET_ITERATOR_TYPE:
1640 case JS_MAP_ITERATOR_TYPE:
1641 case JS_WEAK_MAP_TYPE:
1642 case JS_WEAK_SET_TYPE:
1643 case JS_REGEXP_TYPE:
1644 case JS_GLOBAL_PROXY_TYPE:
1645 case JS_GLOBAL_OBJECT_TYPE:
1646 case JS_BUILTINS_OBJECT_TYPE:
1647 case JS_MESSAGE_OBJECT_TYPE:
1648 JSObject::BodyDescriptor::IterateBody(this, object_size, v);
1650 case JS_FUNCTION_TYPE:
1651 reinterpret_cast<JSFunction*>(this)
1652 ->JSFunctionIterateBody(object_size, v);
1655 Oddball::BodyDescriptor::IterateBody(this, v);
1658 JSProxy::BodyDescriptor::IterateBody(this, v);
1660 case JS_FUNCTION_PROXY_TYPE:
1661 JSFunctionProxy::BodyDescriptor::IterateBody(this, v);
1664 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v);
1667 Map::BodyDescriptor::IterateBody(this, v);
1670 reinterpret_cast<Code*>(this)->CodeIterateBody(v);
1673 Cell::BodyDescriptor::IterateBody(this, v);
1675 case PROPERTY_CELL_TYPE:
1676 PropertyCell::BodyDescriptor::IterateBody(this, v);
1679 Symbol::BodyDescriptor::IterateBody(this, v);
1682 case HEAP_NUMBER_TYPE:
1684 case BYTE_ARRAY_TYPE:
1685 case FREE_SPACE_TYPE:
1688 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
1689 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1690 case FIXED_##TYPE##_ARRAY_TYPE: \
1693 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1694 #undef TYPED_ARRAY_CASE
1696 case SHARED_FUNCTION_INFO_TYPE: {
1697 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
1701 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1703 STRUCT_LIST(MAKE_STRUCT_CASE)
1704 #undef MAKE_STRUCT_CASE
1705 if (type == ALLOCATION_SITE_TYPE) {
1706 AllocationSite::BodyDescriptor::IterateBody(this, v);
1708 StructBodyDescriptor::IterateBody(this, object_size, v);
1712 PrintF("Unknown type: %d\n", type);
1718 bool HeapNumber::HeapNumberBooleanValue() {
1719 // NaN, +0, and -0 should return the false object
1720 #if __BYTE_ORDER == __LITTLE_ENDIAN
1721 union IeeeDoubleLittleEndianArchType u;
1722 #elif __BYTE_ORDER == __BIG_ENDIAN
1723 union IeeeDoubleBigEndianArchType u;
1726 if (u.bits.exp == 2047) {
1727 // Detect NaN for IEEE double precision floating point.
1728 if ((u.bits.man_low | u.bits.man_high) != 0) return false;
1730 if (u.bits.exp == 0) {
1731 // Detect +0, and -0 for IEEE double precision floating point.
1732 if ((u.bits.man_low | u.bits.man_high) == 0) return false;
1738 void HeapNumber::HeapNumberPrint(FILE* out) {
1739 PrintF(out, "%.16g", Number());
1743 void HeapNumber::HeapNumberPrint(StringStream* accumulator) {
1744 // The Windows version of vsnprintf can allocate when printing a %g string
1745 // into a buffer that may not be big enough. We don't want random memory
1746 // allocation when producing post-crash stack traces, so we print into a
1747 // buffer that is plenty big enough for any floating point number, then
1748 // print that using vsnprintf (which may truncate but never allocate if
1749 // there is no more space in the buffer).
1750 EmbeddedVector<char, 100> buffer;
1751 SNPrintF(buffer, "%.16g", Number());
1752 accumulator->Add("%s", buffer.start());
1756 String* JSReceiver::class_name() {
1757 if (IsJSFunction() && IsJSFunctionProxy()) {
1758 return GetHeap()->function_class_string();
1760 if (map()->constructor()->IsJSFunction()) {
1761 JSFunction* constructor = JSFunction::cast(map()->constructor());
1762 return String::cast(constructor->shared()->instance_class_name());
1764 // If the constructor is not present, return "Object".
1765 return GetHeap()->Object_string();
1769 String* Map::constructor_name() {
1770 if (constructor()->IsJSFunction()) {
1771 JSFunction* constructor = JSFunction::cast(this->constructor());
1772 String* name = String::cast(constructor->shared()->name());
1773 if (name->length() > 0) return name;
1774 String* inferred_name = constructor->shared()->inferred_name();
1775 if (inferred_name->length() > 0) return inferred_name;
1776 Object* proto = prototype();
1777 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
1779 // TODO(rossberg): what about proxies?
1780 // If the constructor is not present, return "Object".
1781 return GetHeap()->Object_string();
1785 String* JSReceiver::constructor_name() {
1786 return map()->constructor_name();
1790 MaybeHandle<Map> Map::CopyWithField(Handle<Map> map,
1792 Handle<HeapType> type,
1793 PropertyAttributes attributes,
1794 Representation representation,
1795 TransitionFlag flag) {
1796 ASSERT(DescriptorArray::kNotFound ==
1797 map->instance_descriptors()->Search(
1798 *name, map->NumberOfOwnDescriptors()));
1800 // Ensure the descriptor array does not get too big.
1801 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1802 return MaybeHandle<Map>();
1805 Isolate* isolate = map->GetIsolate();
1807 // Compute the new index for new field.
1808 int index = map->NextFreePropertyIndex();
1810 if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
1811 representation = Representation::Tagged();
1812 type = HeapType::Any(isolate);
1815 FieldDescriptor new_field_desc(name, index, type, attributes, representation);
1816 Handle<Map> new_map = Map::CopyAddDescriptor(map, &new_field_desc, flag);
1817 int unused_property_fields = new_map->unused_property_fields() - 1;
1818 if (unused_property_fields < 0) {
1819 unused_property_fields += JSObject::kFieldsAdded;
1821 new_map->set_unused_property_fields(unused_property_fields);
1826 MaybeHandle<Map> Map::CopyWithConstant(Handle<Map> map,
1828 Handle<Object> constant,
1829 PropertyAttributes attributes,
1830 TransitionFlag flag) {
1831 // Ensure the descriptor array does not get too big.
1832 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1833 return MaybeHandle<Map>();
1836 // Allocate new instance descriptors with (name, constant) added.
1837 ConstantDescriptor new_constant_desc(name, constant, attributes);
1838 return Map::CopyAddDescriptor(map, &new_constant_desc, flag);
1842 void JSObject::AddFastProperty(Handle<JSObject> object,
1844 Handle<Object> value,
1845 PropertyAttributes attributes,
1846 StoreFromKeyed store_mode,
1847 ValueType value_type,
1848 TransitionFlag flag) {
1849 ASSERT(!object->IsJSGlobalProxy());
1851 MaybeHandle<Map> maybe_map;
1852 if (value->IsJSFunction()) {
1853 maybe_map = Map::CopyWithConstant(
1854 handle(object->map()), name, value, attributes, flag);
1855 } else if (!object->TooManyFastProperties(store_mode)) {
1856 Isolate* isolate = object->GetIsolate();
1857 Representation representation = value->OptimalRepresentation(value_type);
1858 maybe_map = Map::CopyWithField(
1859 handle(object->map(), isolate), name,
1860 value->OptimalType(isolate, representation),
1861 attributes, representation, flag);
1864 Handle<Map> new_map;
1865 if (!maybe_map.ToHandle(&new_map)) {
1866 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
1870 JSObject::MigrateToNewProperty(object, new_map, value);
1874 void JSObject::AddSlowProperty(Handle<JSObject> object,
1876 Handle<Object> value,
1877 PropertyAttributes attributes) {
1878 ASSERT(!object->HasFastProperties());
1879 Isolate* isolate = object->GetIsolate();
1880 Handle<NameDictionary> dict(object->property_dictionary());
1881 if (object->IsGlobalObject()) {
1882 // In case name is an orphaned property reuse the cell.
1883 int entry = dict->FindEntry(name);
1884 if (entry != NameDictionary::kNotFound) {
1885 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry)));
1886 PropertyCell::SetValueInferType(cell, value);
1887 // Assign an enumeration index to the property and update
1888 // SetNextEnumerationIndex.
1889 int index = dict->NextEnumerationIndex();
1890 PropertyDetails details = PropertyDetails(attributes, NORMAL, index);
1891 dict->SetNextEnumerationIndex(index + 1);
1892 dict->SetEntry(entry, name, cell, details);
1895 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value);
1896 PropertyCell::SetValueInferType(cell, value);
1899 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
1900 Handle<NameDictionary> result =
1901 NameDictionary::Add(dict, name, value, details);
1902 if (*dict != *result) object->set_properties(*result);
1906 MaybeHandle<Object> JSObject::AddProperty(
1907 Handle<JSObject> object,
1909 Handle<Object> value,
1910 PropertyAttributes attributes,
1911 StrictMode strict_mode,
1912 JSReceiver::StoreFromKeyed store_mode,
1913 ExtensibilityCheck extensibility_check,
1914 ValueType value_type,
1916 TransitionFlag transition_flag) {
1917 ASSERT(!object->IsJSGlobalProxy());
1918 Isolate* isolate = object->GetIsolate();
1920 if (!name->IsUniqueName()) {
1921 name = isolate->factory()->InternalizeString(
1922 Handle<String>::cast(name));
1925 if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK &&
1926 !object->map()->is_extensible()) {
1927 if (strict_mode == SLOPPY) {
1930 Handle<Object> args[1] = { name };
1931 Handle<Object> error = isolate->factory()->NewTypeError(
1932 "object_not_extensible", HandleVector(args, ARRAY_SIZE(args)));
1933 return isolate->Throw<Object>(error);
1937 if (object->HasFastProperties()) {
1938 AddFastProperty(object, name, value, attributes, store_mode,
1939 value_type, transition_flag);
1942 if (!object->HasFastProperties()) {
1943 AddSlowProperty(object, name, value, attributes);
1946 if (object->map()->is_observed() &&
1947 *name != isolate->heap()->hidden_string()) {
1948 Handle<Object> old_value = isolate->factory()->the_hole_value();
1949 EnqueueChangeRecord(object, "add", name, old_value);
1956 Context* JSObject::GetCreationContext() {
1957 Object* constructor = this->map()->constructor();
1958 JSFunction* function;
1959 if (!constructor->IsJSFunction()) {
1960 // Functions have null as a constructor,
1961 // but any JSFunction knows its context immediately.
1962 function = JSFunction::cast(this);
1964 function = JSFunction::cast(constructor);
1967 return function->context()->native_context();
1971 void JSObject::EnqueueChangeRecord(Handle<JSObject> object,
1972 const char* type_str,
1974 Handle<Object> old_value) {
1975 ASSERT(!object->IsJSGlobalProxy());
1976 ASSERT(!object->IsJSGlobalObject());
1977 Isolate* isolate = object->GetIsolate();
1978 HandleScope scope(isolate);
1979 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str);
1980 Handle<Object> args[] = { type, object, name, old_value };
1981 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4;
1983 Execution::Call(isolate,
1984 Handle<JSFunction>(isolate->observers_notify_change()),
1985 isolate->factory()->undefined_value(),
1986 argc, args).Assert();
1990 MaybeHandle<Object> JSObject::SetPropertyPostInterceptor(
1991 Handle<JSObject> object,
1993 Handle<Object> value,
1994 PropertyAttributes attributes,
1995 StrictMode strict_mode) {
1996 // Check own property, ignore interceptor.
1997 Isolate* isolate = object->GetIsolate();
1998 LookupResult result(isolate);
1999 object->LookupOwnRealNamedProperty(name, &result);
2000 if (!result.IsFound()) {
2001 object->map()->LookupTransition(*object, *name, &result);
2003 return SetPropertyForResult(object, &result, name, value, attributes,
2004 strict_mode, MAY_BE_STORE_FROM_KEYED);
2008 static void ReplaceSlowProperty(Handle<JSObject> object,
2010 Handle<Object> value,
2011 PropertyAttributes attributes) {
2012 NameDictionary* dictionary = object->property_dictionary();
2013 int old_index = dictionary->FindEntry(name);
2014 int new_enumeration_index = 0; // 0 means "Use the next available index."
2015 if (old_index != -1) {
2016 // All calls to ReplaceSlowProperty have had all transitions removed.
2017 new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index();
2020 PropertyDetails new_details(attributes, NORMAL, new_enumeration_index);
2021 JSObject::SetNormalizedProperty(object, name, value, new_details);
2025 const char* Representation::Mnemonic() const {
2027 case kNone: return "v";
2028 case kTagged: return "t";
2029 case kSmi: return "s";
2030 case kDouble: return "d";
2031 case kInteger32: return "i";
2032 case kHeapObject: return "h";
2033 case kExternal: return "x";
2041 static void ZapEndOfFixedArray(Address new_end, int to_trim) {
2042 // If we are doing a big trim in old space then we zap the space.
2043 Object** zap = reinterpret_cast<Object**>(new_end);
2044 zap++; // Header of filler must be at least one word so skip that.
2045 for (int i = 1; i < to_trim; i++) {
2046 *zap++ = Smi::FromInt(0);
2051 template<Heap::InvocationMode mode>
2052 static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
2053 ASSERT(elms->map() != heap->fixed_cow_array_map());
2054 // For now this trick is only applied to fixed arrays in new and paged space.
2055 ASSERT(!heap->lo_space()->Contains(elms));
2057 const int len = elms->length();
2059 ASSERT(to_trim < len);
2061 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
2063 if (mode != Heap::FROM_GC || Heap::ShouldZapGarbage()) {
2064 ZapEndOfFixedArray(new_end, to_trim);
2067 int size_delta = to_trim * kPointerSize;
2069 // Technically in new space this write might be omitted (except for
2070 // debug mode which iterates through the heap), but to play safer
2072 heap->CreateFillerObjectAt(new_end, size_delta);
2074 // We are storing the new length using release store after creating a filler
2075 // for the left-over space to avoid races with the sweeper thread.
2076 elms->synchronized_set_length(len - to_trim);
2078 heap->AdjustLiveBytes(elms->address(), -size_delta, mode);
2080 // The array may not be moved during GC,
2081 // and size has to be adjusted nevertheless.
2082 HeapProfiler* profiler = heap->isolate()->heap_profiler();
2083 if (profiler->is_tracking_allocations()) {
2084 profiler->UpdateObjectSizeEvent(elms->address(), elms->Size());
2089 bool Map::InstancesNeedRewriting(Map* target,
2090 int target_number_of_fields,
2091 int target_inobject,
2092 int target_unused) {
2093 // If fields were added (or removed), rewrite the instance.
2094 int number_of_fields = NumberOfFields();
2095 ASSERT(target_number_of_fields >= number_of_fields);
2096 if (target_number_of_fields != number_of_fields) return true;
2098 // If smi descriptors were replaced by double descriptors, rewrite.
2099 DescriptorArray* old_desc = instance_descriptors();
2100 DescriptorArray* new_desc = target->instance_descriptors();
2101 int limit = NumberOfOwnDescriptors();
2102 for (int i = 0; i < limit; i++) {
2103 if (new_desc->GetDetails(i).representation().IsDouble() &&
2104 !old_desc->GetDetails(i).representation().IsDouble()) {
2109 // If no fields were added, and no inobject properties were removed, setting
2110 // the map is sufficient.
2111 if (target_inobject == inobject_properties()) return false;
2112 // In-object slack tracking may have reduced the object size of the new map.
2113 // In that case, succeed if all existing fields were inobject, and they still
2114 // fit within the new inobject size.
2115 ASSERT(target_inobject < inobject_properties());
2116 if (target_number_of_fields <= target_inobject) {
2117 ASSERT(target_number_of_fields + target_unused == target_inobject);
2120 // Otherwise, properties will need to be moved to the backing store.
2125 Handle<TransitionArray> Map::SetElementsTransitionMap(
2126 Handle<Map> map, Handle<Map> transitioned_map) {
2127 Handle<TransitionArray> transitions = TransitionArray::CopyInsert(
2129 map->GetIsolate()->factory()->elements_transition_symbol(),
2132 map->set_transitions(*transitions);
2137 // To migrate an instance to a map:
2138 // - First check whether the instance needs to be rewritten. If not, simply
2140 // - Otherwise, allocate a fixed array large enough to hold all fields, in
2141 // addition to unused space.
2142 // - Copy all existing properties in, in the following order: backing store
2143 // properties, unused fields, inobject properties.
2144 // - If all allocation succeeded, commit the state atomically:
2145 // * Copy inobject properties from the backing store back into the object.
2146 // * Trim the difference in instance size of the object. This also cleanly
2147 // frees inobject properties that moved to the backing store.
2148 // * If there are properties left in the backing store, trim of the space used
2149 // to temporarily store the inobject properties.
2150 // * If there are properties left in the backing store, install the backing
2152 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) {
2153 Isolate* isolate = object->GetIsolate();
2154 Handle<Map> old_map(object->map());
2155 int number_of_fields = new_map->NumberOfFields();
2156 int inobject = new_map->inobject_properties();
2157 int unused = new_map->unused_property_fields();
2159 // Nothing to do if no functions were converted to fields and no smis were
2160 // converted to doubles.
2161 if (!old_map->InstancesNeedRewriting(
2162 *new_map, number_of_fields, inobject, unused)) {
2163 // Writing the new map here does not require synchronization since it does
2164 // not change the actual object size.
2165 object->synchronized_set_map(*new_map);
2169 int total_size = number_of_fields + unused;
2170 int external = total_size - inobject;
2171 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size);
2173 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
2174 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors());
2175 int old_nof = old_map->NumberOfOwnDescriptors();
2176 int new_nof = new_map->NumberOfOwnDescriptors();
2178 // This method only supports generalizing instances to at least the same
2179 // number of properties.
2180 ASSERT(old_nof <= new_nof);
2182 for (int i = 0; i < old_nof; i++) {
2183 PropertyDetails details = new_descriptors->GetDetails(i);
2184 if (details.type() != FIELD) continue;
2185 PropertyDetails old_details = old_descriptors->GetDetails(i);
2186 if (old_details.type() == CALLBACKS) {
2187 ASSERT(details.representation().IsTagged());
2190 ASSERT(old_details.type() == CONSTANT ||
2191 old_details.type() == FIELD);
2192 Object* raw_value = old_details.type() == CONSTANT
2193 ? old_descriptors->GetValue(i)
2194 : object->RawFastPropertyAt(FieldIndex::ForDescriptor(*old_map, i));
2195 Handle<Object> value(raw_value, isolate);
2196 if (!old_details.representation().IsDouble() &&
2197 details.representation().IsDouble()) {
2198 if (old_details.representation().IsNone()) {
2199 value = handle(Smi::FromInt(0), isolate);
2201 value = Object::NewStorageFor(isolate, value, details.representation());
2203 ASSERT(!(details.representation().IsDouble() && value->IsSmi()));
2204 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2205 if (target_index < 0) target_index += total_size;
2206 array->set(target_index, *value);
2209 for (int i = old_nof; i < new_nof; i++) {
2210 PropertyDetails details = new_descriptors->GetDetails(i);
2211 if (details.type() != FIELD) continue;
2212 Handle<Object> value;
2213 if (details.representation().IsDouble()) {
2214 value = isolate->factory()->NewHeapNumber(0);
2216 value = isolate->factory()->uninitialized_value();
2218 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2219 if (target_index < 0) target_index += total_size;
2220 array->set(target_index, *value);
2223 // From here on we cannot fail and we shouldn't GC anymore.
2224 DisallowHeapAllocation no_allocation;
2226 // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2227 // avoid overwriting |one_pointer_filler_map|.
2228 int limit = Min(inobject, number_of_fields);
2229 for (int i = 0; i < limit; i++) {
2230 FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
2231 object->FastPropertyAtPut(index, array->get(external + i));
2234 // Create filler object past the new instance size.
2235 int new_instance_size = new_map->instance_size();
2236 int instance_size_delta = old_map->instance_size() - new_instance_size;
2237 ASSERT(instance_size_delta >= 0);
2238 Address address = object->address() + new_instance_size;
2240 // The trimming is performed on a newly allocated object, which is on a
2241 // fresly allocated page or on an already swept page. Hence, the sweeper
2242 // thread can not get confused with the filler creation. No synchronization
2244 isolate->heap()->CreateFillerObjectAt(address, instance_size_delta);
2246 // If there are properties in the new backing store, trim it to the correct
2247 // size and install the backing store into the object.
2249 RightTrimFixedArray<Heap::FROM_MUTATOR>(isolate->heap(), *array, inobject);
2250 object->set_properties(*array);
2253 // The trimming is performed on a newly allocated object, which is on a
2254 // fresly allocated page or on an already swept page. Hence, the sweeper
2255 // thread can not get confused with the filler creation. No synchronization
2257 object->set_map(*new_map);
2261 void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object,
2263 Representation new_representation,
2264 Handle<HeapType> new_field_type,
2265 StoreMode store_mode) {
2266 Handle<Map> new_map = Map::GeneralizeRepresentation(
2267 handle(object->map()), modify_index, new_representation,
2268 new_field_type, store_mode);
2269 if (object->map() == *new_map) return;
2270 return MigrateToMap(object, new_map);
2274 int Map::NumberOfFields() {
2275 DescriptorArray* descriptors = instance_descriptors();
2277 for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
2278 if (descriptors->GetDetails(i).type() == FIELD) result++;
2284 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2286 StoreMode store_mode,
2287 PropertyAttributes attributes,
2288 const char* reason) {
2289 Isolate* isolate = map->GetIsolate();
2290 Handle<Map> new_map = Copy(map);
2292 DescriptorArray* descriptors = new_map->instance_descriptors();
2293 int length = descriptors->number_of_descriptors();
2294 for (int i = 0; i < length; i++) {
2295 descriptors->SetRepresentation(i, Representation::Tagged());
2296 if (descriptors->GetDetails(i).type() == FIELD) {
2297 descriptors->SetValue(i, HeapType::Any());
2301 // Unless the instance is being migrated, ensure that modify_index is a field.
2302 PropertyDetails details = descriptors->GetDetails(modify_index);
2303 if (store_mode == FORCE_FIELD && details.type() != FIELD) {
2304 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate),
2305 new_map->NumberOfFields(),
2307 Representation::Tagged());
2308 descriptors->Replace(modify_index, &d);
2309 int unused_property_fields = new_map->unused_property_fields() - 1;
2310 if (unused_property_fields < 0) {
2311 unused_property_fields += JSObject::kFieldsAdded;
2313 new_map->set_unused_property_fields(unused_property_fields);
2316 if (FLAG_trace_generalization) {
2317 HeapType* field_type = (details.type() == FIELD)
2318 ? map->instance_descriptors()->GetFieldType(modify_index)
2320 map->PrintGeneralization(stdout, reason, modify_index,
2321 new_map->NumberOfOwnDescriptors(),
2322 new_map->NumberOfOwnDescriptors(),
2323 details.type() == CONSTANT && store_mode == FORCE_FIELD,
2324 details.representation(), Representation::Tagged(),
2325 field_type, HeapType::Any());
2332 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2334 StoreMode store_mode,
2335 const char* reason) {
2336 PropertyDetails details =
2337 map->instance_descriptors()->GetDetails(modify_index);
2338 return CopyGeneralizeAllRepresentations(map, modify_index, store_mode,
2339 details.attributes(), reason);
2343 void Map::DeprecateTransitionTree() {
2344 if (is_deprecated()) return;
2345 if (HasTransitionArray()) {
2346 TransitionArray* transitions = this->transitions();
2347 for (int i = 0; i < transitions->number_of_transitions(); i++) {
2348 transitions->GetTarget(i)->DeprecateTransitionTree();
2352 dependent_code()->DeoptimizeDependentCodeGroup(
2353 GetIsolate(), DependentCode::kTransitionGroup);
2354 NotifyLeafMapLayoutChange();
2358 // Invalidates a transition target at |key|, and installs |new_descriptors| over
2359 // the current instance_descriptors to ensure proper sharing of descriptor
2361 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
2362 if (HasTransitionArray()) {
2363 TransitionArray* transitions = this->transitions();
2364 int transition = transitions->Search(key);
2365 if (transition != TransitionArray::kNotFound) {
2366 transitions->GetTarget(transition)->DeprecateTransitionTree();
2370 // Don't overwrite the empty descriptor array.
2371 if (NumberOfOwnDescriptors() == 0) return;
2373 DescriptorArray* to_replace = instance_descriptors();
2374 Map* current = this;
2375 GetHeap()->incremental_marking()->RecordWrites(to_replace);
2376 while (current->instance_descriptors() == to_replace) {
2377 current->SetEnumLength(kInvalidEnumCacheSentinel);
2378 current->set_instance_descriptors(new_descriptors);
2379 Object* next = current->GetBackPointer();
2380 if (next->IsUndefined()) break;
2381 current = Map::cast(next);
2384 set_owns_descriptors(false);
2388 Map* Map::FindRootMap() {
2391 Object* back = result->GetBackPointer();
2392 if (back->IsUndefined()) return result;
2393 result = Map::cast(back);
2398 Map* Map::FindLastMatchMap(int verbatim,
2400 DescriptorArray* descriptors) {
2401 DisallowHeapAllocation no_allocation;
2403 // This can only be called on roots of transition trees.
2404 ASSERT(GetBackPointer()->IsUndefined());
2406 Map* current = this;
2408 for (int i = verbatim; i < length; i++) {
2409 if (!current->HasTransitionArray()) break;
2410 Name* name = descriptors->GetKey(i);
2411 TransitionArray* transitions = current->transitions();
2412 int transition = transitions->Search(name);
2413 if (transition == TransitionArray::kNotFound) break;
2415 Map* next = transitions->GetTarget(transition);
2416 DescriptorArray* next_descriptors = next->instance_descriptors();
2418 PropertyDetails details = descriptors->GetDetails(i);
2419 PropertyDetails next_details = next_descriptors->GetDetails(i);
2420 if (details.type() != next_details.type()) break;
2421 if (details.attributes() != next_details.attributes()) break;
2422 if (!details.representation().Equals(next_details.representation())) break;
2423 if (next_details.type() == FIELD) {
2424 if (!descriptors->GetFieldType(i)->NowIs(
2425 next_descriptors->GetFieldType(i))) break;
2427 if (descriptors->GetValue(i) != next_descriptors->GetValue(i)) break;
2436 Map* Map::FindFieldOwner(int descriptor) {
2437 DisallowHeapAllocation no_allocation;
2438 ASSERT_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type());
2441 Object* back = result->GetBackPointer();
2442 if (back->IsUndefined()) break;
2443 Map* parent = Map::cast(back);
2444 if (parent->NumberOfOwnDescriptors() <= descriptor) break;
2451 void Map::UpdateDescriptor(int descriptor_number, Descriptor* desc) {
2452 DisallowHeapAllocation no_allocation;
2453 if (HasTransitionArray()) {
2454 TransitionArray* transitions = this->transitions();
2455 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
2456 transitions->GetTarget(i)->UpdateDescriptor(descriptor_number, desc);
2459 instance_descriptors()->Replace(descriptor_number, desc);;
2464 Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1,
2465 Handle<HeapType> type2,
2467 static const int kMaxClassesPerFieldType = 5;
2468 if (type1->NowIs(type2)) return type2;
2469 if (type2->NowIs(type1)) return type1;
2470 if (type1->NowStable() && type2->NowStable()) {
2471 Handle<HeapType> type = HeapType::Union(type1, type2, isolate);
2472 if (type->NumClasses() <= kMaxClassesPerFieldType) {
2473 ASSERT(type->NowStable());
2474 ASSERT(type1->NowIs(type));
2475 ASSERT(type2->NowIs(type));
2479 return HeapType::Any(isolate);
2484 void Map::GeneralizeFieldType(Handle<Map> map,
2486 Handle<HeapType> new_field_type) {
2487 Isolate* isolate = map->GetIsolate();
2489 // Check if we actually need to generalize the field type at all.
2490 Handle<HeapType> old_field_type(
2491 map->instance_descriptors()->GetFieldType(modify_index), isolate);
2492 if (new_field_type->NowIs(old_field_type)) {
2493 ASSERT(Map::GeneralizeFieldType(old_field_type,
2495 isolate)->NowIs(old_field_type));
2499 // Determine the field owner.
2500 Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate);
2501 Handle<DescriptorArray> descriptors(
2502 field_owner->instance_descriptors(), isolate);
2503 ASSERT_EQ(*old_field_type, descriptors->GetFieldType(modify_index));
2505 // Determine the generalized new field type.
2506 new_field_type = Map::GeneralizeFieldType(
2507 old_field_type, new_field_type, isolate);
2509 PropertyDetails details = descriptors->GetDetails(modify_index);
2510 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate),
2511 descriptors->GetFieldIndex(modify_index),
2513 details.attributes(),
2514 details.representation());
2515 field_owner->UpdateDescriptor(modify_index, &d);
2516 field_owner->dependent_code()->DeoptimizeDependentCodeGroup(
2517 isolate, DependentCode::kFieldTypeGroup);
2519 if (FLAG_trace_generalization) {
2520 map->PrintGeneralization(
2521 stdout, "field type generalization",
2522 modify_index, map->NumberOfOwnDescriptors(),
2523 map->NumberOfOwnDescriptors(), false,
2524 details.representation(), details.representation(),
2525 *old_field_type, *new_field_type);
2530 // Generalize the representation of the descriptor at |modify_index|.
2531 // This method rewrites the transition tree to reflect the new change. To avoid
2532 // high degrees over polymorphism, and to stabilize quickly, on every rewrite
2533 // the new type is deduced by merging the current type with any potential new
2534 // (partial) version of the type in the transition tree.
2535 // To do this, on each rewrite:
2536 // - Search the root of the transition tree using FindRootMap.
2537 // - Find |target_map|, the newest matching version of this map using the keys
2538 // in the |old_map|'s descriptor array to walk the transition tree.
2539 // - Merge/generalize the descriptor array of the |old_map| and |target_map|.
2540 // - Generalize the |modify_index| descriptor using |new_representation| and
2541 // |new_field_type|.
2542 // - Walk the tree again starting from the root towards |target_map|. Stop at
2543 // |split_map|, the first map who's descriptor array does not match the merged
2544 // descriptor array.
2545 // - If |target_map| == |split_map|, |target_map| is in the expected state.
2547 // - Otherwise, invalidate the outdated transition target from |target_map|, and
2548 // replace its transition tree with a new branch for the updated descriptors.
2549 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map,
2551 Representation new_representation,
2552 Handle<HeapType> new_field_type,
2553 StoreMode store_mode) {
2554 Isolate* isolate = old_map->GetIsolate();
2556 Handle<DescriptorArray> old_descriptors(
2557 old_map->instance_descriptors(), isolate);
2558 int old_nof = old_map->NumberOfOwnDescriptors();
2559 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2560 Representation old_representation = old_details.representation();
2562 // It's fine to transition from None to anything but double without any
2563 // modification to the object, because the default uninitialized value for
2564 // representation None can be overwritten by both smi and tagged values.
2565 // Doubles, however, would require a box allocation.
2566 if (old_representation.IsNone() &&
2567 !new_representation.IsNone() &&
2568 !new_representation.IsDouble()) {
2569 ASSERT(old_details.type() == FIELD);
2570 ASSERT(old_descriptors->GetFieldType(modify_index)->NowIs(
2572 if (FLAG_trace_generalization) {
2573 old_map->PrintGeneralization(
2574 stdout, "uninitialized field",
2575 modify_index, old_map->NumberOfOwnDescriptors(),
2576 old_map->NumberOfOwnDescriptors(), false,
2577 old_representation, new_representation,
2578 old_descriptors->GetFieldType(modify_index), *new_field_type);
2580 old_descriptors->SetRepresentation(modify_index, new_representation);
2581 old_descriptors->SetValue(modify_index, *new_field_type);
2585 // Check the state of the root map.
2586 Handle<Map> root_map(old_map->FindRootMap(), isolate);
2587 if (!old_map->EquivalentToForTransition(*root_map)) {
2588 return CopyGeneralizeAllRepresentations(
2589 old_map, modify_index, store_mode, "not equivalent");
2591 int root_nof = root_map->NumberOfOwnDescriptors();
2592 if (modify_index < root_nof) {
2593 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2594 if ((old_details.type() != FIELD && store_mode == FORCE_FIELD) ||
2595 (old_details.type() == FIELD &&
2596 (!new_field_type->NowIs(old_descriptors->GetFieldType(modify_index)) ||
2597 !new_representation.fits_into(old_details.representation())))) {
2598 return CopyGeneralizeAllRepresentations(
2599 old_map, modify_index, store_mode, "root modification");
2603 Handle<Map> target_map = root_map;
2604 for (int i = root_nof; i < old_nof; ++i) {
2605 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2606 if (j == TransitionArray::kNotFound) break;
2607 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2608 Handle<DescriptorArray> tmp_descriptors = handle(
2609 tmp_map->instance_descriptors(), isolate);
2611 // Check if target map is incompatible.
2612 PropertyDetails old_details = old_descriptors->GetDetails(i);
2613 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2614 PropertyType old_type = old_details.type();
2615 PropertyType tmp_type = tmp_details.type();
2616 if (tmp_details.attributes() != old_details.attributes() ||
2617 ((tmp_type == CALLBACKS || old_type == CALLBACKS) &&
2618 (tmp_type != old_type ||
2619 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2620 return CopyGeneralizeAllRepresentations(
2621 old_map, modify_index, store_mode, "incompatible");
2623 Representation old_representation = old_details.representation();
2624 Representation tmp_representation = tmp_details.representation();
2625 if (!old_representation.fits_into(tmp_representation) ||
2626 (!new_representation.fits_into(tmp_representation) &&
2627 modify_index == i)) {
2630 if (tmp_type == FIELD) {
2631 // Generalize the field type as necessary.
2632 Handle<HeapType> old_field_type = (old_type == FIELD)
2633 ? handle(old_descriptors->GetFieldType(i), isolate)
2634 : old_descriptors->GetValue(i)->OptimalType(
2635 isolate, tmp_representation);
2636 if (modify_index == i) {
2637 old_field_type = GeneralizeFieldType(
2638 new_field_type, old_field_type, isolate);
2640 GeneralizeFieldType(tmp_map, i, old_field_type);
2641 } else if (tmp_type == CONSTANT) {
2642 if (old_type != CONSTANT ||
2643 old_descriptors->GetConstant(i) != tmp_descriptors->GetConstant(i)) {
2647 ASSERT_EQ(tmp_type, old_type);
2648 ASSERT_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i));
2650 target_map = tmp_map;
2653 // Directly change the map if the target map is more general.
2654 Handle<DescriptorArray> target_descriptors(
2655 target_map->instance_descriptors(), isolate);
2656 int target_nof = target_map->NumberOfOwnDescriptors();
2657 if (target_nof == old_nof &&
2658 (store_mode != FORCE_FIELD ||
2659 target_descriptors->GetDetails(modify_index).type() == FIELD)) {
2660 ASSERT(modify_index < target_nof);
2661 ASSERT(new_representation.fits_into(
2662 target_descriptors->GetDetails(modify_index).representation()));
2663 ASSERT(target_descriptors->GetDetails(modify_index).type() != FIELD ||
2664 new_field_type->NowIs(
2665 target_descriptors->GetFieldType(modify_index)));
2669 // Find the last compatible target map in the transition tree.
2670 for (int i = target_nof; i < old_nof; ++i) {
2671 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2672 if (j == TransitionArray::kNotFound) break;
2673 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2674 Handle<DescriptorArray> tmp_descriptors(
2675 tmp_map->instance_descriptors(), isolate);
2677 // Check if target map is compatible.
2678 PropertyDetails old_details = old_descriptors->GetDetails(i);
2679 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2680 if (tmp_details.attributes() != old_details.attributes() ||
2681 ((tmp_details.type() == CALLBACKS || old_details.type() == CALLBACKS) &&
2682 (tmp_details.type() != old_details.type() ||
2683 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2684 return CopyGeneralizeAllRepresentations(
2685 old_map, modify_index, store_mode, "incompatible");
2687 target_map = tmp_map;
2689 target_nof = target_map->NumberOfOwnDescriptors();
2690 target_descriptors = handle(target_map->instance_descriptors(), isolate);
2692 // Allocate a new descriptor array large enough to hold the required
2693 // descriptors, with minimally the exact same size as the old descriptor
2695 int new_slack = Max(
2696 old_nof, old_descriptors->number_of_descriptors()) - old_nof;
2697 Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate(
2698 isolate, old_nof, new_slack);
2699 ASSERT(new_descriptors->length() > target_descriptors->length() ||
2700 new_descriptors->NumberOfSlackDescriptors() > 0 ||
2701 new_descriptors->number_of_descriptors() ==
2702 old_descriptors->number_of_descriptors());
2703 ASSERT(new_descriptors->number_of_descriptors() == old_nof);
2706 int current_offset = 0;
2707 for (int i = 0; i < root_nof; ++i) {
2708 PropertyDetails old_details = old_descriptors->GetDetails(i);
2709 if (old_details.type() == FIELD) current_offset++;
2710 Descriptor d(handle(old_descriptors->GetKey(i), isolate),
2711 handle(old_descriptors->GetValue(i), isolate),
2713 new_descriptors->Set(i, &d);
2716 // |root_nof| -> |target_nof|
2717 for (int i = root_nof; i < target_nof; ++i) {
2718 Handle<Name> target_key(target_descriptors->GetKey(i), isolate);
2719 PropertyDetails old_details = old_descriptors->GetDetails(i);
2720 PropertyDetails target_details = target_descriptors->GetDetails(i);
2721 target_details = target_details.CopyWithRepresentation(
2722 old_details.representation().generalize(
2723 target_details.representation()));
2724 if (modify_index == i) {
2725 target_details = target_details.CopyWithRepresentation(
2726 new_representation.generalize(target_details.representation()));
2728 ASSERT_EQ(old_details.attributes(), target_details.attributes());
2729 if (old_details.type() == FIELD ||
2730 target_details.type() == FIELD ||
2731 (modify_index == i && store_mode == FORCE_FIELD) ||
2732 (target_descriptors->GetValue(i) != old_descriptors->GetValue(i))) {
2733 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2734 ? handle(old_descriptors->GetFieldType(i), isolate)
2735 : old_descriptors->GetValue(i)->OptimalType(
2736 isolate, target_details.representation());
2737 Handle<HeapType> target_field_type = (target_details.type() == FIELD)
2738 ? handle(target_descriptors->GetFieldType(i), isolate)
2739 : target_descriptors->GetValue(i)->OptimalType(
2740 isolate, target_details.representation());
2741 target_field_type = GeneralizeFieldType(
2742 target_field_type, old_field_type, isolate);
2743 if (modify_index == i) {
2744 target_field_type = GeneralizeFieldType(
2745 target_field_type, new_field_type, isolate);
2747 FieldDescriptor d(target_key,
2750 target_details.attributes(),
2751 target_details.representation());
2752 new_descriptors->Set(i, &d);
2754 ASSERT_NE(FIELD, target_details.type());
2755 Descriptor d(target_key,
2756 handle(target_descriptors->GetValue(i), isolate),
2758 new_descriptors->Set(i, &d);
2762 // |target_nof| -> |old_nof|
2763 for (int i = target_nof; i < old_nof; ++i) {
2764 PropertyDetails old_details = old_descriptors->GetDetails(i);
2765 Handle<Name> old_key(old_descriptors->GetKey(i), isolate);
2766 if (modify_index == i) {
2767 old_details = old_details.CopyWithRepresentation(
2768 new_representation.generalize(old_details.representation()));
2770 if (old_details.type() == FIELD) {
2771 Handle<HeapType> old_field_type(
2772 old_descriptors->GetFieldType(i), isolate);
2773 if (modify_index == i) {
2774 old_field_type = GeneralizeFieldType(
2775 old_field_type, new_field_type, isolate);
2777 FieldDescriptor d(old_key,
2780 old_details.attributes(),
2781 old_details.representation());
2782 new_descriptors->Set(i, &d);
2784 ASSERT(old_details.type() == CONSTANT || old_details.type() == CALLBACKS);
2785 if (modify_index == i && store_mode == FORCE_FIELD) {
2786 FieldDescriptor d(old_key,
2788 GeneralizeFieldType(
2789 old_descriptors->GetValue(i)->OptimalType(
2790 isolate, old_details.representation()),
2791 new_field_type, isolate),
2792 old_details.attributes(),
2793 old_details.representation());
2794 new_descriptors->Set(i, &d);
2796 ASSERT_NE(FIELD, old_details.type());
2797 Descriptor d(old_key,
2798 handle(old_descriptors->GetValue(i), isolate),
2800 new_descriptors->Set(i, &d);
2805 new_descriptors->Sort();
2807 ASSERT(store_mode != FORCE_FIELD ||
2808 new_descriptors->GetDetails(modify_index).type() == FIELD);
2810 Handle<Map> split_map(root_map->FindLastMatchMap(
2811 root_nof, old_nof, *new_descriptors), isolate);
2812 int split_nof = split_map->NumberOfOwnDescriptors();
2813 ASSERT_NE(old_nof, split_nof);
2815 split_map->DeprecateTarget(
2816 old_descriptors->GetKey(split_nof), *new_descriptors);
2818 if (FLAG_trace_generalization) {
2819 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2820 PropertyDetails new_details = new_descriptors->GetDetails(modify_index);
2821 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2822 ? handle(old_descriptors->GetFieldType(modify_index), isolate)
2823 : HeapType::Constant(handle(old_descriptors->GetValue(modify_index),
2825 Handle<HeapType> new_field_type = (new_details.type() == FIELD)
2826 ? handle(new_descriptors->GetFieldType(modify_index), isolate)
2827 : HeapType::Constant(handle(new_descriptors->GetValue(modify_index),
2829 old_map->PrintGeneralization(
2830 stdout, "", modify_index, split_nof, old_nof,
2831 old_details.type() == CONSTANT && store_mode == FORCE_FIELD,
2832 old_details.representation(), new_details.representation(),
2833 *old_field_type, *new_field_type);
2836 // Add missing transitions.
2837 Handle<Map> new_map = split_map;
2838 for (int i = split_nof; i < old_nof; ++i) {
2839 new_map = CopyInstallDescriptors(new_map, i, new_descriptors);
2841 new_map->set_owns_descriptors(true);
2846 // Generalize the representation of all FIELD descriptors.
2847 Handle<Map> Map::GeneralizeAllFieldRepresentations(
2849 Handle<DescriptorArray> descriptors(map->instance_descriptors());
2850 for (int i = 0; i < map->NumberOfOwnDescriptors(); ++i) {
2851 if (descriptors->GetDetails(i).type() == FIELD) {
2852 map = GeneralizeRepresentation(map, i, Representation::Tagged(),
2853 HeapType::Any(map->GetIsolate()),
2862 MaybeHandle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) {
2863 Handle<Map> proto_map(map);
2864 while (proto_map->prototype()->IsJSObject()) {
2865 Handle<JSObject> holder(JSObject::cast(proto_map->prototype()));
2866 proto_map = Handle<Map>(holder->map());
2867 if (proto_map->is_deprecated() && JSObject::TryMigrateInstance(holder)) {
2868 proto_map = Handle<Map>(holder->map());
2871 return CurrentMapForDeprecatedInternal(map);
2876 MaybeHandle<Map> Map::CurrentMapForDeprecatedInternal(Handle<Map> old_map) {
2877 DisallowHeapAllocation no_allocation;
2878 DisallowDeoptimization no_deoptimization(old_map->GetIsolate());
2880 if (!old_map->is_deprecated()) return old_map;
2882 // Check the state of the root map.
2883 Map* root_map = old_map->FindRootMap();
2884 if (!old_map->EquivalentToForTransition(root_map)) return MaybeHandle<Map>();
2885 int root_nof = root_map->NumberOfOwnDescriptors();
2887 int old_nof = old_map->NumberOfOwnDescriptors();
2888 DescriptorArray* old_descriptors = old_map->instance_descriptors();
2890 Map* new_map = root_map;
2891 for (int i = root_nof; i < old_nof; ++i) {
2892 int j = new_map->SearchTransition(old_descriptors->GetKey(i));
2893 if (j == TransitionArray::kNotFound) return MaybeHandle<Map>();
2894 new_map = new_map->GetTransition(j);
2895 DescriptorArray* new_descriptors = new_map->instance_descriptors();
2897 PropertyDetails new_details = new_descriptors->GetDetails(i);
2898 PropertyDetails old_details = old_descriptors->GetDetails(i);
2899 if (old_details.attributes() != new_details.attributes() ||
2900 !old_details.representation().fits_into(new_details.representation())) {
2901 return MaybeHandle<Map>();
2903 PropertyType new_type = new_details.type();
2904 PropertyType old_type = old_details.type();
2905 Object* new_value = new_descriptors->GetValue(i);
2906 Object* old_value = old_descriptors->GetValue(i);
2909 if ((old_type == FIELD &&
2910 !HeapType::cast(old_value)->NowIs(HeapType::cast(new_value))) ||
2911 (old_type == CONSTANT &&
2912 !HeapType::cast(new_value)->NowContains(old_value)) ||
2913 (old_type == CALLBACKS &&
2914 !HeapType::Any()->Is(HeapType::cast(new_value)))) {
2915 return MaybeHandle<Map>();
2921 if (old_type != new_type || old_value != new_value) {
2922 return MaybeHandle<Map>();
2933 if (new_map->NumberOfOwnDescriptors() != old_nof) return MaybeHandle<Map>();
2934 return handle(new_map);
2938 MaybeHandle<Object> JSObject::SetPropertyWithInterceptor(
2939 Handle<JSObject> object,
2941 Handle<Object> value,
2942 PropertyAttributes attributes,
2943 StrictMode strict_mode) {
2944 // TODO(rossberg): Support symbols in the API.
2945 if (name->IsSymbol()) return value;
2946 Isolate* isolate = object->GetIsolate();
2947 Handle<String> name_string = Handle<String>::cast(name);
2948 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
2949 if (!interceptor->setter()->IsUndefined()) {
2951 ApiNamedPropertyAccess("interceptor-named-set", *object, *name));
2952 PropertyCallbackArguments args(
2953 isolate, interceptor->data(), *object, *object);
2954 v8::NamedPropertySetterCallback setter =
2955 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter());
2956 Handle<Object> value_unhole = value->IsTheHole()
2957 ? Handle<Object>(isolate->factory()->undefined_value()) : value;
2958 v8::Handle<v8::Value> result = args.Call(setter,
2959 v8::Utils::ToLocal(name_string),
2960 v8::Utils::ToLocal(value_unhole));
2961 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
2962 if (!result.IsEmpty()) return value;
2964 return SetPropertyPostInterceptor(
2965 object, name, value, attributes, strict_mode);
2969 MaybeHandle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
2971 Handle<Object> value,
2972 PropertyAttributes attributes,
2973 StrictMode strict_mode,
2974 StoreFromKeyed store_mode) {
2975 LookupResult result(object->GetIsolate());
2976 object->LookupOwn(name, &result, true);
2977 if (!result.IsFound()) {
2978 object->map()->LookupTransition(JSObject::cast(*object), *name, &result);
2980 return SetProperty(object, &result, name, value, attributes, strict_mode,
2985 MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes(
2986 Handle<JSObject> object,
2988 Handle<Object> value,
2990 StrictMode strict_mode) {
2991 Isolate *isolate = object->GetIsolate();
2992 for (Handle<Object> proto = handle(object->GetPrototype(), isolate);
2994 proto = handle(proto->GetPrototype(isolate), isolate)) {
2995 if (proto->IsJSProxy()) {
2996 return JSProxy::SetPropertyViaPrototypesWithHandler(
2997 Handle<JSProxy>::cast(proto),
2999 isolate->factory()->Uint32ToString(index), // name
3005 Handle<JSObject> js_proto = Handle<JSObject>::cast(proto);
3006 if (!js_proto->HasDictionaryElements()) {
3009 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary());
3010 int entry = dictionary->FindEntry(index);
3011 if (entry != SeededNumberDictionary::kNotFound) {
3012 PropertyDetails details = dictionary->DetailsAt(entry);
3013 if (details.type() == CALLBACKS) {
3015 Handle<Object> structure(dictionary->ValueAt(entry), isolate);
3016 return SetElementWithCallback(object, structure, index, value, js_proto,
3022 return isolate->factory()->the_hole_value();
3026 MaybeHandle<Object> JSObject::SetPropertyViaPrototypes(
3027 Handle<JSObject> object,
3029 Handle<Object> value,
3030 PropertyAttributes attributes,
3031 StrictMode strict_mode,
3033 Isolate* isolate = object->GetIsolate();
3036 // We could not find an own property, so let's check whether there is an
3037 // accessor that wants to handle the property, or whether the property is
3038 // read-only on the prototype chain.
3039 LookupResult result(isolate);
3040 object->LookupRealNamedPropertyInPrototypes(name, &result);
3041 if (result.IsFound()) {
3042 switch (result.type()) {
3046 *done = result.IsReadOnly();
3049 LookupIterator it(object, name, handle(result.holder()));
3050 PropertyAttributes attr = GetPropertyAttributes(&it);
3051 *done = !!(attr & READ_ONLY);
3056 if (!result.IsReadOnly()) {
3057 Handle<Object> callback_object(result.GetCallbackObject(), isolate);
3058 return SetPropertyWithCallback(object, name, value,
3059 handle(result.holder()),
3060 callback_object, strict_mode);
3065 Handle<JSProxy> proxy(result.proxy());
3066 return JSProxy::SetPropertyViaPrototypesWithHandler(
3067 proxy, object, name, value, attributes, strict_mode, done);
3075 // If we get here with *done true, we have encountered a read-only property.
3077 if (strict_mode == SLOPPY) return value;
3078 Handle<Object> args[] = { name, object };
3079 Handle<Object> error = isolate->factory()->NewTypeError(
3080 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3081 return isolate->Throw<Object>(error);
3083 return isolate->factory()->the_hole_value();
3087 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
3088 // Only supports adding slack to owned descriptors.
3089 ASSERT(map->owns_descriptors());
3091 Handle<DescriptorArray> descriptors(map->instance_descriptors());
3092 int old_size = map->NumberOfOwnDescriptors();
3093 if (slack <= descriptors->NumberOfSlackDescriptors()) return;
3095 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
3096 descriptors, old_size, slack);
3098 if (old_size == 0) {
3099 map->set_instance_descriptors(*new_descriptors);
3103 // If the source descriptors had an enum cache we copy it. This ensures
3104 // that the maps to which we push the new descriptor array back can rely
3105 // on a cache always being available once it is set. If the map has more
3106 // enumerated descriptors than available in the original cache, the cache
3107 // will be lazily replaced by the extended cache when needed.
3108 if (descriptors->HasEnumCache()) {
3109 new_descriptors->CopyEnumCacheFrom(*descriptors);
3112 // Replace descriptors by new_descriptors in all maps that share it.
3113 map->GetHeap()->incremental_marking()->RecordWrites(*descriptors);
3116 for (Object* current = map->GetBackPointer();
3117 !current->IsUndefined();
3118 current = walk_map->GetBackPointer()) {
3119 walk_map = Map::cast(current);
3120 if (walk_map->instance_descriptors() != *descriptors) break;
3121 walk_map->set_instance_descriptors(*new_descriptors);
3124 map->set_instance_descriptors(*new_descriptors);
3129 static int AppendUniqueCallbacks(NeanderArray* callbacks,
3130 Handle<typename T::Array> array,
3131 int valid_descriptors) {
3132 int nof_callbacks = callbacks->length();
3134 Isolate* isolate = array->GetIsolate();
3135 // Ensure the keys are unique names before writing them into the
3136 // instance descriptor. Since it may cause a GC, it has to be done before we
3137 // temporarily put the heap in an invalid state while appending descriptors.
3138 for (int i = 0; i < nof_callbacks; ++i) {
3139 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3140 if (entry->name()->IsUniqueName()) continue;
3141 Handle<String> key =
3142 isolate->factory()->InternalizeString(
3143 Handle<String>(String::cast(entry->name())));
3144 entry->set_name(*key);
3147 // Fill in new callback descriptors. Process the callbacks from
3148 // back to front so that the last callback with a given name takes
3149 // precedence over previously added callbacks with that name.
3150 for (int i = nof_callbacks - 1; i >= 0; i--) {
3151 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3152 Handle<Name> key(Name::cast(entry->name()));
3153 // Check if a descriptor with this name already exists before writing.
3154 if (!T::Contains(key, entry, valid_descriptors, array)) {
3155 T::Insert(key, entry, valid_descriptors, array);
3156 valid_descriptors++;
3160 return valid_descriptors;
3163 struct DescriptorArrayAppender {
3164 typedef DescriptorArray Array;
3165 static bool Contains(Handle<Name> key,
3166 Handle<AccessorInfo> entry,
3167 int valid_descriptors,
3168 Handle<DescriptorArray> array) {
3169 DisallowHeapAllocation no_gc;
3170 return array->Search(*key, valid_descriptors) != DescriptorArray::kNotFound;
3172 static void Insert(Handle<Name> key,
3173 Handle<AccessorInfo> entry,
3174 int valid_descriptors,
3175 Handle<DescriptorArray> array) {
3176 DisallowHeapAllocation no_gc;
3177 CallbacksDescriptor desc(key, entry, entry->property_attributes());
3178 array->Append(&desc);
3183 struct FixedArrayAppender {
3184 typedef FixedArray Array;
3185 static bool Contains(Handle<Name> key,
3186 Handle<AccessorInfo> entry,
3187 int valid_descriptors,
3188 Handle<FixedArray> array) {
3189 for (int i = 0; i < valid_descriptors; i++) {
3190 if (*key == AccessorInfo::cast(array->get(i))->name()) return true;
3194 static void Insert(Handle<Name> key,
3195 Handle<AccessorInfo> entry,
3196 int valid_descriptors,
3197 Handle<FixedArray> array) {
3198 DisallowHeapAllocation no_gc;
3199 array->set(valid_descriptors, *entry);
3204 void Map::AppendCallbackDescriptors(Handle<Map> map,
3205 Handle<Object> descriptors) {
3206 int nof = map->NumberOfOwnDescriptors();
3207 Handle<DescriptorArray> array(map->instance_descriptors());
3208 NeanderArray callbacks(descriptors);
3209 ASSERT(array->NumberOfSlackDescriptors() >= callbacks.length());
3210 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof);
3211 map->SetNumberOfOwnDescriptors(nof);
3215 int AccessorInfo::AppendUnique(Handle<Object> descriptors,
3216 Handle<FixedArray> array,
3217 int valid_descriptors) {
3218 NeanderArray callbacks(descriptors);
3219 ASSERT(array->length() >= callbacks.length() + valid_descriptors);
3220 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks,
3226 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
3227 ASSERT(!map.is_null());
3228 for (int i = 0; i < maps->length(); ++i) {
3229 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
3236 static Handle<T> MaybeNull(T* p) {
3237 if (p == NULL) return Handle<T>::null();
3238 return Handle<T>(p);
3242 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
3243 ElementsKind kind = elements_kind();
3244 Handle<Map> transitioned_map = Handle<Map>::null();
3245 Handle<Map> current_map(this);
3246 bool packed = IsFastPackedElementsKind(kind);
3247 if (IsTransitionableFastElementsKind(kind)) {
3248 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
3249 kind = GetNextMoreGeneralFastElementsKind(kind, false);
3250 Handle<Map> maybe_transitioned_map =
3251 MaybeNull(current_map->LookupElementsTransitionMap(kind));
3252 if (maybe_transitioned_map.is_null()) break;
3253 if (ContainsMap(candidates, maybe_transitioned_map) &&
3254 (packed || !IsFastPackedElementsKind(kind))) {
3255 transitioned_map = maybe_transitioned_map;
3256 if (!IsFastPackedElementsKind(kind)) packed = false;
3258 current_map = maybe_transitioned_map;
3261 return transitioned_map;
3265 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
3266 Map* current_map = map;
3268 IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind)
3270 : TERMINAL_FAST_ELEMENTS_KIND;
3272 // Support for legacy API: SetIndexedPropertiesTo{External,Pixel}Data
3273 // allows to change elements from arbitrary kind to any ExternalArray
3274 // elements kind. Satisfy its requirements, checking whether we already
3275 // have the cached transition.
3276 if (IsExternalArrayElementsKind(to_kind) &&
3277 !IsFixedTypedArrayElementsKind(map->elements_kind())) {
3278 if (map->HasElementsTransition()) {
3279 Map* next_map = map->elements_transition_map();
3280 if (next_map->elements_kind() == to_kind) return next_map;
3285 ElementsKind kind = map->elements_kind();
3286 while (kind != target_kind) {
3287 kind = GetNextTransitionElementsKind(kind);
3288 if (!current_map->HasElementsTransition()) return current_map;
3289 current_map = current_map->elements_transition_map();
3292 if (to_kind != kind && current_map->HasElementsTransition()) {
3293 ASSERT(to_kind == DICTIONARY_ELEMENTS);
3294 Map* next_map = current_map->elements_transition_map();
3295 if (next_map->elements_kind() == to_kind) return next_map;
3298 ASSERT(current_map->elements_kind() == target_kind);
3303 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
3304 Map* to_map = FindClosestElementsTransition(this, to_kind);
3305 if (to_map->elements_kind() == to_kind) return to_map;
3310 bool Map::IsMapInArrayPrototypeChain() {
3311 Isolate* isolate = GetIsolate();
3312 if (isolate->initial_array_prototype()->map() == this) {
3316 if (isolate->initial_object_prototype()->map() == this) {
3324 static Handle<Map> AddMissingElementsTransitions(Handle<Map> map,
3325 ElementsKind to_kind) {
3326 ASSERT(IsTransitionElementsKind(map->elements_kind()));
3328 Handle<Map> current_map = map;
3330 ElementsKind kind = map->elements_kind();
3331 while (kind != to_kind && !IsTerminalElementsKind(kind)) {
3332 kind = GetNextTransitionElementsKind(kind);
3333 current_map = Map::CopyAsElementsKind(
3334 current_map, kind, INSERT_TRANSITION);
3337 // In case we are exiting the fast elements kind system, just add the map in
3339 if (kind != to_kind) {
3340 current_map = Map::CopyAsElementsKind(
3341 current_map, to_kind, INSERT_TRANSITION);
3344 ASSERT(current_map->elements_kind() == to_kind);
3349 Handle<Map> Map::TransitionElementsTo(Handle<Map> map,
3350 ElementsKind to_kind) {
3351 ElementsKind from_kind = map->elements_kind();
3352 if (from_kind == to_kind) return map;
3354 Isolate* isolate = map->GetIsolate();
3355 Context* native_context = isolate->context()->native_context();
3356 Object* maybe_array_maps = native_context->js_array_maps();
3357 if (maybe_array_maps->IsFixedArray()) {
3358 DisallowHeapAllocation no_gc;
3359 FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
3360 if (array_maps->get(from_kind) == *map) {
3361 Object* maybe_transitioned_map = array_maps->get(to_kind);
3362 if (maybe_transitioned_map->IsMap()) {
3363 return handle(Map::cast(maybe_transitioned_map));
3368 return TransitionElementsToSlow(map, to_kind);
3372 Handle<Map> Map::TransitionElementsToSlow(Handle<Map> map,
3373 ElementsKind to_kind) {
3374 ElementsKind from_kind = map->elements_kind();
3376 if (from_kind == to_kind) {
3380 bool allow_store_transition =
3381 // Only remember the map transition if there is not an already existing
3382 // non-matching element transition.
3383 !map->IsUndefined() && !map->is_shared() &&
3384 IsTransitionElementsKind(from_kind);
3386 // Only store fast element maps in ascending generality.
3387 if (IsFastElementsKind(to_kind)) {
3388 allow_store_transition &=
3389 IsTransitionableFastElementsKind(from_kind) &&
3390 IsMoreGeneralElementsKindTransition(from_kind, to_kind);
3393 if (!allow_store_transition) {
3394 return Map::CopyAsElementsKind(map, to_kind, OMIT_TRANSITION);
3397 return Map::AsElementsKind(map, to_kind);
3402 Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) {
3403 Handle<Map> closest_map(FindClosestElementsTransition(*map, kind));
3405 if (closest_map->elements_kind() == kind) {
3409 return AddMissingElementsTransitions(closest_map, kind);
3413 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
3414 ElementsKind to_kind) {
3415 Handle<Map> map(object->map());
3416 return Map::TransitionElementsTo(map, to_kind);
3420 void JSObject::LookupOwnRealNamedProperty(Handle<Name> name,
3421 LookupResult* result) {
3422 DisallowHeapAllocation no_gc;
3423 if (IsJSGlobalProxy()) {
3424 Object* proto = GetPrototype();
3425 if (proto->IsNull()) return result->NotFound();
3426 ASSERT(proto->IsJSGlobalObject());
3427 return JSObject::cast(proto)->LookupOwnRealNamedProperty(name, result);
3430 if (HasFastProperties()) {
3431 map()->LookupDescriptor(this, *name, result);
3432 // A property or a map transition was found. We return all of these result
3433 // types because LookupOwnRealNamedProperty is used when setting
3434 // properties where map transitions are handled.
3435 ASSERT(!result->IsFound() ||
3436 (result->holder() == this && result->IsFastPropertyType()));
3437 // Disallow caching for uninitialized constants. These can only
3439 if (result->IsField() &&
3440 result->IsReadOnly() &&
3441 RawFastPropertyAt(result->GetFieldIndex())->IsTheHole()) {
3442 result->DisallowCaching();
3447 int entry = property_dictionary()->FindEntry(name);
3448 if (entry != NameDictionary::kNotFound) {
3449 Object* value = property_dictionary()->ValueAt(entry);
3450 if (IsGlobalObject()) {
3451 PropertyDetails d = property_dictionary()->DetailsAt(entry);
3452 if (d.IsDeleted()) {
3456 value = PropertyCell::cast(value)->value();
3458 // Make sure to disallow caching for uninitialized constants
3459 // found in the dictionary-mode objects.
3460 if (value->IsTheHole()) result->DisallowCaching();
3461 result->DictionaryResult(this, entry);
3469 void JSObject::LookupRealNamedProperty(Handle<Name> name,
3470 LookupResult* result) {
3471 DisallowHeapAllocation no_gc;
3472 LookupOwnRealNamedProperty(name, result);
3473 if (result->IsFound()) return;
3475 LookupRealNamedPropertyInPrototypes(name, result);
3479 void JSObject::LookupRealNamedPropertyInPrototypes(Handle<Name> name,
3480 LookupResult* result) {
3481 DisallowHeapAllocation no_gc;
3482 Isolate* isolate = GetIsolate();
3483 Heap* heap = isolate->heap();
3484 for (Object* pt = GetPrototype();
3485 pt != heap->null_value();
3486 pt = pt->GetPrototype(isolate)) {
3487 if (pt->IsJSProxy()) {
3488 return result->HandlerResult(JSProxy::cast(pt));
3490 JSObject::cast(pt)->LookupOwnRealNamedProperty(name, result);
3491 ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR));
3492 if (result->IsFound()) return;
3498 MaybeHandle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
3499 LookupResult* result,
3501 Handle<Object> value,
3502 PropertyAttributes attributes,
3503 StrictMode strict_mode,
3504 StoreFromKeyed store_mode) {
3505 if (result->IsHandler()) {
3506 return JSProxy::SetPropertyWithHandler(handle(result->proxy()),
3507 object, key, value, attributes, strict_mode);
3509 return JSObject::SetPropertyForResult(Handle<JSObject>::cast(object),
3510 result, key, value, attributes, strict_mode, store_mode);
3515 bool JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy, Handle<Name> name) {
3516 Isolate* isolate = proxy->GetIsolate();
3518 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3519 if (name->IsSymbol()) return false;
3521 Handle<Object> args[] = { name };
3522 Handle<Object> result;
3523 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3527 isolate->derived_has_trap(),
3532 return result->BooleanValue();
3536 MaybeHandle<Object> JSProxy::SetPropertyWithHandler(
3537 Handle<JSProxy> proxy,
3538 Handle<JSReceiver> receiver,
3540 Handle<Object> value,
3541 PropertyAttributes attributes,
3542 StrictMode strict_mode) {
3543 Isolate* isolate = proxy->GetIsolate();
3545 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3546 if (name->IsSymbol()) return value;
3548 Handle<Object> args[] = { receiver, name, value };
3549 RETURN_ON_EXCEPTION(
3553 isolate->derived_set_trap(),
3562 MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler(
3563 Handle<JSProxy> proxy,
3564 Handle<JSReceiver> receiver,
3566 Handle<Object> value,
3567 PropertyAttributes attributes,
3568 StrictMode strict_mode,
3570 Isolate* isolate = proxy->GetIsolate();
3571 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy.
3573 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3574 if (name->IsSymbol()) {
3576 return isolate->factory()->the_hole_value();
3579 *done = true; // except where redefined...
3580 Handle<Object> args[] = { name };
3581 Handle<Object> result;
3582 ASSIGN_RETURN_ON_EXCEPTION(
3585 "getPropertyDescriptor",
3591 if (result->IsUndefined()) {
3593 return isolate->factory()->the_hole_value();
3596 // Emulate [[GetProperty]] semantics for proxies.
3597 Handle<Object> argv[] = { result };
3598 Handle<Object> desc;
3599 ASSIGN_RETURN_ON_EXCEPTION(
3601 Execution::Call(isolate,
3602 isolate->to_complete_property_descriptor(),
3608 // [[GetProperty]] requires to check that all properties are configurable.
3609 Handle<String> configurable_name =
3610 isolate->factory()->InternalizeOneByteString(
3611 STATIC_ASCII_VECTOR("configurable_"));
3612 Handle<Object> configurable =
3613 Object::GetProperty(desc, configurable_name).ToHandleChecked();
3614 ASSERT(configurable->IsBoolean());
3615 if (configurable->IsFalse()) {
3616 Handle<String> trap =
3617 isolate->factory()->InternalizeOneByteString(
3618 STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3619 Handle<Object> args[] = { handler, trap, name };
3620 Handle<Object> error = isolate->factory()->NewTypeError(
3621 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3622 return isolate->Throw<Object>(error);
3624 ASSERT(configurable->IsTrue());
3626 // Check for DataDescriptor.
3627 Handle<String> hasWritable_name =
3628 isolate->factory()->InternalizeOneByteString(
3629 STATIC_ASCII_VECTOR("hasWritable_"));
3630 Handle<Object> hasWritable =
3631 Object::GetProperty(desc, hasWritable_name).ToHandleChecked();
3632 ASSERT(hasWritable->IsBoolean());
3633 if (hasWritable->IsTrue()) {
3634 Handle<String> writable_name =
3635 isolate->factory()->InternalizeOneByteString(
3636 STATIC_ASCII_VECTOR("writable_"));
3637 Handle<Object> writable =
3638 Object::GetProperty(desc, writable_name).ToHandleChecked();
3639 ASSERT(writable->IsBoolean());
3640 *done = writable->IsFalse();
3641 if (!*done) return isolate->factory()->the_hole_value();
3642 if (strict_mode == SLOPPY) return value;
3643 Handle<Object> args[] = { name, receiver };
3644 Handle<Object> error = isolate->factory()->NewTypeError(
3645 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3646 return isolate->Throw<Object>(error);
3649 // We have an AccessorDescriptor.
3650 Handle<String> set_name = isolate->factory()->InternalizeOneByteString(
3651 STATIC_ASCII_VECTOR("set_"));
3652 Handle<Object> setter = Object::GetProperty(desc, set_name).ToHandleChecked();
3653 if (!setter->IsUndefined()) {
3654 // TODO(rossberg): nicer would be to cast to some JSCallable here...
3655 return SetPropertyWithDefinedSetter(
3656 receiver, Handle<JSReceiver>::cast(setter), value);
3659 if (strict_mode == SLOPPY) return value;
3660 Handle<Object> args2[] = { name, proxy };
3661 Handle<Object> error = isolate->factory()->NewTypeError(
3662 "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2)));
3663 return isolate->Throw<Object>(error);
3667 MaybeHandle<Object> JSProxy::DeletePropertyWithHandler(
3668 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) {
3669 Isolate* isolate = proxy->GetIsolate();
3671 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3672 if (name->IsSymbol()) return isolate->factory()->false_value();
3674 Handle<Object> args[] = { name };
3675 Handle<Object> result;
3676 ASSIGN_RETURN_ON_EXCEPTION(
3685 bool result_bool = result->BooleanValue();
3686 if (mode == STRICT_DELETION && !result_bool) {
3687 Handle<Object> handler(proxy->handler(), isolate);
3688 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString(
3689 STATIC_ASCII_VECTOR("delete"));
3690 Handle<Object> args[] = { handler, trap_name };
3691 Handle<Object> error = isolate->factory()->NewTypeError(
3692 "handler_failed", HandleVector(args, ARRAY_SIZE(args)));
3693 return isolate->Throw<Object>(error);
3695 return isolate->factory()->ToBoolean(result_bool);
3699 MaybeHandle<Object> JSProxy::DeleteElementWithHandler(
3700 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) {
3701 Isolate* isolate = proxy->GetIsolate();
3702 Handle<String> name = isolate->factory()->Uint32ToString(index);
3703 return JSProxy::DeletePropertyWithHandler(proxy, name, mode);
3707 PropertyAttributes JSProxy::GetPropertyAttributesWithHandler(
3708 Handle<JSProxy> proxy,
3709 Handle<Object> receiver,
3710 Handle<Name> name) {
3711 Isolate* isolate = proxy->GetIsolate();
3712 HandleScope scope(isolate);
3714 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3715 if (name->IsSymbol()) return ABSENT;
3717 Handle<Object> args[] = { name };
3718 Handle<Object> result;
3719 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3721 proxy->CallTrap(proxy,
3722 "getPropertyDescriptor",
3728 if (result->IsUndefined()) return ABSENT;
3730 Handle<Object> argv[] = { result };
3731 Handle<Object> desc;
3732 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3734 Execution::Call(isolate,
3735 isolate->to_complete_property_descriptor(),
3741 // Convert result to PropertyAttributes.
3742 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString(
3743 STATIC_ASCII_VECTOR("enumerable_"));
3744 Handle<Object> enumerable;
3745 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3746 isolate, enumerable, Object::GetProperty(desc, enum_n), NONE);
3747 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString(
3748 STATIC_ASCII_VECTOR("configurable_"));
3749 Handle<Object> configurable;
3750 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3751 isolate, configurable, Object::GetProperty(desc, conf_n), NONE);
3752 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString(
3753 STATIC_ASCII_VECTOR("writable_"));
3754 Handle<Object> writable;
3755 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3756 isolate, writable, Object::GetProperty(desc, writ_n), NONE);
3757 if (!writable->BooleanValue()) {
3758 Handle<String> set_n = isolate->factory()->InternalizeOneByteString(
3759 STATIC_ASCII_VECTOR("set_"));
3760 Handle<Object> setter;
3761 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3762 isolate, setter, Object::GetProperty(desc, set_n), NONE);
3763 writable = isolate->factory()->ToBoolean(!setter->IsUndefined());
3766 if (configurable->IsFalse()) {
3767 Handle<Object> handler(proxy->handler(), isolate);
3768 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3769 STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3770 Handle<Object> args[] = { handler, trap, name };
3771 Handle<Object> error = isolate->factory()->NewTypeError(
3772 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3773 isolate->Throw(*error);
3777 int attributes = NONE;
3778 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM;
3779 if (!configurable->BooleanValue()) attributes |= DONT_DELETE;
3780 if (!writable->BooleanValue()) attributes |= READ_ONLY;
3781 return static_cast<PropertyAttributes>(attributes);
3785 PropertyAttributes JSProxy::GetElementAttributeWithHandler(
3786 Handle<JSProxy> proxy,
3787 Handle<JSReceiver> receiver,
3789 Isolate* isolate = proxy->GetIsolate();
3790 Handle<String> name = isolate->factory()->Uint32ToString(index);
3791 return GetPropertyAttributesWithHandler(proxy, receiver, name);
3795 void JSProxy::Fix(Handle<JSProxy> proxy) {
3796 Isolate* isolate = proxy->GetIsolate();
3798 // Save identity hash.
3799 Handle<Object> hash(proxy->GetIdentityHash(), isolate);
3801 if (proxy->IsJSFunctionProxy()) {
3802 isolate->factory()->BecomeJSFunction(proxy);
3803 // Code will be set on the JavaScript side.
3805 isolate->factory()->BecomeJSObject(proxy);
3807 ASSERT(proxy->IsJSObject());
3809 // Inherit identity, if it was present.
3810 if (hash->IsSmi()) {
3811 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy),
3812 Handle<Smi>::cast(hash));
3817 MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy,
3819 Handle<Object> derived,
3821 Handle<Object> argv[]) {
3822 Isolate* isolate = proxy->GetIsolate();
3823 Handle<Object> handler(proxy->handler(), isolate);
3825 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name);
3826 Handle<Object> trap;
3827 ASSIGN_RETURN_ON_EXCEPTION(
3829 Object::GetPropertyOrElement(handler, trap_name),
3832 if (trap->IsUndefined()) {
3833 if (derived.is_null()) {
3834 Handle<Object> args[] = { handler, trap_name };
3835 Handle<Object> error = isolate->factory()->NewTypeError(
3836 "handler_trap_missing", HandleVector(args, ARRAY_SIZE(args)));
3837 return isolate->Throw<Object>(error);
3839 trap = Handle<Object>(derived);
3842 return Execution::Call(isolate, trap, handler, argc, argv);
3846 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
3847 ASSERT(object->map()->inobject_properties() == map->inobject_properties());
3848 ElementsKind obj_kind = object->map()->elements_kind();
3849 ElementsKind map_kind = map->elements_kind();
3850 if (map_kind != obj_kind) {
3851 ElementsKind to_kind = map_kind;
3852 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
3853 IsDictionaryElementsKind(obj_kind)) {
3856 if (IsDictionaryElementsKind(to_kind)) {
3857 NormalizeElements(object);
3859 TransitionElementsKind(object, to_kind);
3861 map = Map::AsElementsKind(map, to_kind);
3863 JSObject::MigrateToMap(object, map);
3867 void JSObject::MigrateInstance(Handle<JSObject> object) {
3868 // Converting any field to the most specific type will cause the
3869 // GeneralizeFieldRepresentation algorithm to create the most general existing
3870 // transition that matches the object. This achieves what is needed.
3871 Handle<Map> original_map(object->map());
3872 GeneralizeFieldRepresentation(
3873 object, 0, Representation::None(),
3874 HeapType::None(object->GetIsolate()),
3876 object->map()->set_migration_target(true);
3877 if (FLAG_trace_migration) {
3878 object->PrintInstanceMigration(stdout, *original_map, object->map());
3884 bool JSObject::TryMigrateInstance(Handle<JSObject> object) {
3885 Isolate* isolate = object->GetIsolate();
3886 DisallowDeoptimization no_deoptimization(isolate);
3887 Handle<Map> original_map(object->map(), isolate);
3888 Handle<Map> new_map;
3889 if (!Map::CurrentMapForDeprecatedInternal(original_map).ToHandle(&new_map)) {
3892 JSObject::MigrateToMap(object, new_map);
3893 if (FLAG_trace_migration) {
3894 object->PrintInstanceMigration(stdout, *original_map, object->map());
3900 MaybeHandle<Object> JSObject::SetPropertyUsingTransition(
3901 Handle<JSObject> object,
3902 LookupResult* lookup,
3904 Handle<Object> value,
3905 PropertyAttributes attributes) {
3906 Handle<Map> transition_map(lookup->GetTransitionTarget());
3907 int descriptor = transition_map->LastAdded();
3909 Handle<DescriptorArray> descriptors(transition_map->instance_descriptors());
3910 PropertyDetails details = descriptors->GetDetails(descriptor);
3912 if (details.type() == CALLBACKS || attributes != details.attributes()) {
3913 // AddProperty will either normalize the object, or create a new fast copy
3914 // of the map. If we get a fast copy of the map, all field representations
3915 // will be tagged since the transition is omitted.
3916 return JSObject::AddProperty(
3917 object, name, value, attributes, SLOPPY,
3918 JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED,
3919 JSReceiver::OMIT_EXTENSIBILITY_CHECK,
3920 JSObject::FORCE_TAGGED, FORCE_FIELD, OMIT_TRANSITION);
3923 // Keep the target CONSTANT if the same value is stored.
3924 // TODO(verwaest): Also support keeping the placeholder
3925 // (value->IsUninitialized) as constant.
3926 if (!lookup->CanHoldValue(value)) {
3927 Representation field_representation = value->OptimalRepresentation();
3928 Handle<HeapType> field_type = value->OptimalType(
3929 lookup->isolate(), field_representation);
3930 transition_map = Map::GeneralizeRepresentation(
3931 transition_map, descriptor,
3932 field_representation, field_type, FORCE_FIELD);
3935 JSObject::MigrateToNewProperty(object, transition_map, value);
3940 void JSObject::MigrateToNewProperty(Handle<JSObject> object,
3942 Handle<Object> value) {
3943 JSObject::MigrateToMap(object, map);
3944 if (map->GetLastDescriptorDetails().type() != FIELD) return;
3945 object->WriteToField(map->LastAdded(), *value);
3949 void JSObject::WriteToField(int descriptor, Object* value) {
3950 DisallowHeapAllocation no_gc;
3952 DescriptorArray* desc = map()->instance_descriptors();
3953 PropertyDetails details = desc->GetDetails(descriptor);
3955 ASSERT(details.type() == FIELD);
3957 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
3958 if (details.representation().IsDouble()) {
3959 // Nothing more to be done.
3960 if (value->IsUninitialized()) return;
3961 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
3962 box->set_value(value->Number());
3964 FastPropertyAtPut(index, value);
3969 static void SetPropertyToField(LookupResult* lookup,
3970 Handle<Object> value) {
3971 if (lookup->type() == CONSTANT || !lookup->CanHoldValue(value)) {
3972 Representation field_representation = value->OptimalRepresentation();
3973 Handle<HeapType> field_type = value->OptimalType(
3974 lookup->isolate(), field_representation);
3975 JSObject::GeneralizeFieldRepresentation(handle(lookup->holder()),
3976 lookup->GetDescriptorIndex(),
3977 field_representation, field_type,
3980 lookup->holder()->WriteToField(lookup->GetDescriptorIndex(), *value);
3984 static void ConvertAndSetOwnProperty(LookupResult* lookup,
3986 Handle<Object> value,
3987 PropertyAttributes attributes) {
3988 Handle<JSObject> object(lookup->holder());
3989 if (object->TooManyFastProperties()) {
3990 JSObject::NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
3993 if (!object->HasFastProperties()) {
3994 ReplaceSlowProperty(object, name, value, attributes);
3998 int descriptor_index = lookup->GetDescriptorIndex();
3999 if (lookup->GetAttributes() == attributes) {
4000 JSObject::GeneralizeFieldRepresentation(
4001 object, descriptor_index, Representation::Tagged(),
4002 HeapType::Any(lookup->isolate()), FORCE_FIELD);
4004 Handle<Map> old_map(object->map());
4005 Handle<Map> new_map = Map::CopyGeneralizeAllRepresentations(old_map,
4006 descriptor_index, FORCE_FIELD, attributes, "attributes mismatch");
4007 JSObject::MigrateToMap(object, new_map);
4010 object->WriteToField(descriptor_index, *value);
4014 static void SetPropertyToFieldWithAttributes(LookupResult* lookup,
4016 Handle<Object> value,
4017 PropertyAttributes attributes) {
4018 if (lookup->GetAttributes() == attributes) {
4019 if (value->IsUninitialized()) return;
4020 SetPropertyToField(lookup, value);
4022 ConvertAndSetOwnProperty(lookup, name, value, attributes);
4027 MaybeHandle<Object> JSObject::SetPropertyForResult(
4028 Handle<JSObject> object,
4029 LookupResult* lookup,
4031 Handle<Object> value,
4032 PropertyAttributes attributes,
4033 StrictMode strict_mode,
4034 StoreFromKeyed store_mode) {
4035 Isolate* isolate = object->GetIsolate();
4037 // Make sure that the top context does not change when doing callbacks or
4038 // interceptor calls.
4039 AssertNoContextChange ncc(isolate);
4041 // Optimization for 2-byte strings often used as keys in a decompression
4042 // dictionary. We internalize these short keys to avoid constantly
4043 // reallocating them.
4044 if (name->IsString() && !name->IsInternalizedString() &&
4045 Handle<String>::cast(name)->length() <= 2) {
4046 name = isolate->factory()->InternalizeString(Handle<String>::cast(name));
4049 // Check access rights if needed.
4050 if (object->IsAccessCheckNeeded()) {
4051 if (!isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
4052 return SetPropertyWithFailedAccessCheck(object, lookup, name, value,
4057 if (object->IsJSGlobalProxy()) {
4058 Handle<Object> proto(object->GetPrototype(), isolate);
4059 if (proto->IsNull()) return value;
4060 ASSERT(proto->IsJSGlobalObject());
4061 return SetPropertyForResult(Handle<JSObject>::cast(proto),
4062 lookup, name, value, attributes, strict_mode, store_mode);
4065 ASSERT(!lookup->IsFound() || lookup->holder() == *object ||
4066 lookup->holder()->map()->is_hidden_prototype());
4068 if (!lookup->IsProperty() && !object->IsJSContextExtensionObject()) {
4070 Handle<Object> result_object;
4071 ASSIGN_RETURN_ON_EXCEPTION(
4072 isolate, result_object,
4073 SetPropertyViaPrototypes(
4074 object, name, value, attributes, strict_mode, &done),
4076 if (done) return result_object;
4079 if (!lookup->IsFound()) {
4080 // Neither properties nor transitions found.
4082 object, name, value, attributes, strict_mode, store_mode);
4085 if (lookup->IsProperty() && lookup->IsReadOnly()) {
4086 if (strict_mode == STRICT) {
4087 Handle<Object> args[] = { name, object };
4088 Handle<Object> error = isolate->factory()->NewTypeError(
4089 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
4090 return isolate->Throw<Object>(error);
4096 Handle<Object> old_value = isolate->factory()->the_hole_value();
4097 bool is_observed = object->map()->is_observed() &&
4098 *name != isolate->heap()->hidden_string();
4099 if (is_observed && lookup->IsDataProperty()) {
4100 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked();
4103 // This is a real property that is not read-only, or it is a
4104 // transition or null descriptor and there are no setters in the prototypes.
4105 MaybeHandle<Object> maybe_result = value;
4106 if (lookup->IsTransition()) {
4107 maybe_result = SetPropertyUsingTransition(handle(lookup->holder()), lookup,
4108 name, value, attributes);
4110 switch (lookup->type()) {
4112 SetNormalizedProperty(handle(lookup->holder()), lookup, value);
4115 SetPropertyToField(lookup, value);
4118 // Only replace the constant if necessary.
4119 if (*value == lookup->GetConstant()) return value;
4120 SetPropertyToField(lookup, value);
4123 Handle<Object> callback_object(lookup->GetCallbackObject(), isolate);
4124 return SetPropertyWithCallback(object, name, value,
4125 handle(lookup->holder()),
4126 callback_object, strict_mode);
4129 maybe_result = SetPropertyWithInterceptor(
4130 handle(lookup->holder()), name, value, attributes, strict_mode);
4138 Handle<Object> result;
4139 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object);
4142 if (lookup->IsTransition()) {
4143 EnqueueChangeRecord(object, "add", name, old_value);
4145 LookupResult new_lookup(isolate);
4146 object->LookupOwn(name, &new_lookup, true);
4147 if (new_lookup.IsDataProperty()) {
4148 Handle<Object> new_value =
4149 Object::GetPropertyOrElement(object, name).ToHandleChecked();
4150 if (!new_value->SameValue(*old_value)) {
4151 EnqueueChangeRecord(object, "update", name, old_value);
4161 // Set a real own property, even if it is READ_ONLY. If the property is not
4162 // present, add it with attributes NONE. This code is an exact clone of
4163 // SetProperty, with the check for IsReadOnly and the check for a
4164 // callback setter removed. The two lines looking up the LookupResult
4165 // result are also added. If one of the functions is changed, the other
4167 MaybeHandle<Object> JSObject::SetOwnPropertyIgnoreAttributes(
4168 Handle<JSObject> object,
4170 Handle<Object> value,
4171 PropertyAttributes attributes,
4172 ValueType value_type,
4174 ExtensibilityCheck extensibility_check,
4175 StoreFromKeyed store_from_keyed,
4176 ExecutableAccessorInfoHandling handling) {
4177 Isolate* isolate = object->GetIsolate();
4179 // Make sure that the top context does not change when doing callbacks or
4180 // interceptor calls.
4181 AssertNoContextChange ncc(isolate);
4183 LookupResult lookup(isolate);
4184 object->LookupOwn(name, &lookup, true);
4185 if (!lookup.IsFound()) {
4186 object->map()->LookupTransition(*object, *name, &lookup);
4189 // Check access rights if needed.
4190 if (object->IsAccessCheckNeeded()) {
4191 if (!isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
4192 return SetPropertyWithFailedAccessCheck(object, &lookup, name, value,
4197 if (object->IsJSGlobalProxy()) {
4198 Handle<Object> proto(object->GetPrototype(), isolate);
4199 if (proto->IsNull()) return value;
4200 ASSERT(proto->IsJSGlobalObject());
4201 return SetOwnPropertyIgnoreAttributes(Handle<JSObject>::cast(proto),
4202 name, value, attributes, value_type, mode, extensibility_check);
4205 if (lookup.IsInterceptor() ||
4206 (lookup.IsDescriptorOrDictionary() && lookup.type() == CALLBACKS)) {
4207 object->LookupOwnRealNamedProperty(name, &lookup);
4210 // Check for accessor in prototype chain removed here in clone.
4211 if (!lookup.IsFound()) {
4212 object->map()->LookupTransition(*object, *name, &lookup);
4213 TransitionFlag flag = lookup.IsFound()
4214 ? OMIT_TRANSITION : INSERT_TRANSITION;
4215 // Neither properties nor transitions found.
4216 return AddProperty(object, name, value, attributes, SLOPPY,
4217 store_from_keyed, extensibility_check, value_type, mode, flag);
4220 Handle<Object> old_value = isolate->factory()->the_hole_value();
4221 PropertyAttributes old_attributes = ABSENT;
4222 bool is_observed = object->map()->is_observed() &&
4223 *name != isolate->heap()->hidden_string();
4224 if (is_observed && lookup.IsProperty()) {
4225 if (lookup.IsDataProperty()) {
4226 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked();
4228 old_attributes = lookup.GetAttributes();
4231 bool executed_set_prototype = false;
4233 // Check of IsReadOnly removed from here in clone.
4234 if (lookup.IsTransition()) {
4235 Handle<Object> result;
4236 ASSIGN_RETURN_ON_EXCEPTION(
4238 SetPropertyUsingTransition(
4239 handle(lookup.holder()), &lookup, name, value, attributes),
4242 switch (lookup.type()) {
4244 ReplaceSlowProperty(object, name, value, attributes);
4247 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes);
4250 // Only replace the constant if necessary.
4251 if (lookup.GetAttributes() != attributes ||
4252 *value != lookup.GetConstant()) {
4253 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes);
4258 Handle<Object> callback(lookup.GetCallbackObject(), isolate);
4259 if (callback->IsExecutableAccessorInfo() &&
4260 handling == DONT_FORCE_FIELD) {
4261 Handle<Object> result;
4262 ASSIGN_RETURN_ON_EXCEPTION(
4264 JSObject::SetPropertyWithCallback(object,
4267 handle(lookup.holder()),
4272 if (attributes != lookup.GetAttributes()) {
4273 Handle<ExecutableAccessorInfo> new_data =
4274 Accessors::CloneAccessor(
4275 isolate, Handle<ExecutableAccessorInfo>::cast(callback));
4276 new_data->set_property_attributes(attributes);
4277 if (attributes & READ_ONLY) {
4278 // This way we don't have to introduce a lookup to the setter,
4279 // simply make it unavailable to reflect the attributes.
4280 new_data->clear_setter();
4283 SetPropertyCallback(object, name, new_data, attributes);
4286 // If we are setting the prototype of a function and are observed,
4287 // don't send change records because the prototype handles that
4289 executed_set_prototype = object->IsJSFunction() &&
4290 String::Equals(isolate->factory()->prototype_string(),
4291 Handle<String>::cast(name)) &&
4292 Handle<JSFunction>::cast(object)->should_have_prototype();
4295 ConvertAndSetOwnProperty(&lookup, name, value, attributes);
4306 if (is_observed && !executed_set_prototype) {
4307 if (lookup.IsTransition()) {
4308 EnqueueChangeRecord(object, "add", name, old_value);
4309 } else if (old_value->IsTheHole()) {
4310 EnqueueChangeRecord(object, "reconfigure", name, old_value);
4312 LookupResult new_lookup(isolate);
4313 object->LookupOwn(name, &new_lookup, true);
4314 bool value_changed = false;
4315 if (new_lookup.IsDataProperty()) {
4316 Handle<Object> new_value =
4317 Object::GetPropertyOrElement(object, name).ToHandleChecked();
4318 value_changed = !old_value->SameValue(*new_value);
4320 if (new_lookup.GetAttributes() != old_attributes) {
4321 if (!value_changed) old_value = isolate->factory()->the_hole_value();
4322 EnqueueChangeRecord(object, "reconfigure", name, old_value);
4323 } else if (value_changed) {
4324 EnqueueChangeRecord(object, "update", name, old_value);
4333 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithInterceptor(
4334 Handle<JSObject> holder,
4335 Handle<Object> receiver,
4336 Handle<Name> name) {
4337 // TODO(rossberg): Support symbols in the API.
4338 if (name->IsSymbol()) return Maybe<PropertyAttributes>(ABSENT);
4340 Isolate* isolate = holder->GetIsolate();
4341 HandleScope scope(isolate);
4343 // Make sure that the top context does not change when doing
4344 // callbacks or interceptor calls.
4345 AssertNoContextChange ncc(isolate);
4347 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
4348 PropertyCallbackArguments args(
4349 isolate, interceptor->data(), *receiver, *holder);
4350 if (!interceptor->query()->IsUndefined()) {
4351 v8::NamedPropertyQueryCallback query =
4352 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query());
4354 ApiNamedPropertyAccess("interceptor-named-has", *holder, *name));
4355 v8::Handle<v8::Integer> result =
4356 args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name)));
4357 if (!result.IsEmpty()) {
4358 ASSERT(result->IsInt32());
4359 return Maybe<PropertyAttributes>(
4360 static_cast<PropertyAttributes>(result->Int32Value()));
4362 } else if (!interceptor->getter()->IsUndefined()) {
4363 v8::NamedPropertyGetterCallback getter =
4364 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
4366 ApiNamedPropertyAccess("interceptor-named-get-has", *holder, *name));
4367 v8::Handle<v8::Value> result =
4368 args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4369 if (!result.IsEmpty()) return Maybe<PropertyAttributes>(DONT_ENUM);
4371 return Maybe<PropertyAttributes>();
4375 PropertyAttributes JSReceiver::GetOwnPropertyAttributes(
4376 Handle<JSReceiver> object, Handle<Name> name) {
4377 // Check whether the name is an array index.
4379 if (object->IsJSObject() && name->AsArrayIndex(&index)) {
4380 return GetOwnElementAttribute(object, index);
4382 LookupIterator it(object, name, LookupIterator::CHECK_OWN);
4383 return GetPropertyAttributes(&it);
4387 PropertyAttributes JSReceiver::GetPropertyAttributes(LookupIterator* it) {
4388 for (; it->IsFound(); it->Next()) {
4389 switch (it->state()) {
4390 case LookupIterator::NOT_FOUND:
4392 case LookupIterator::JSPROXY:
4393 return JSProxy::GetPropertyAttributesWithHandler(
4394 it->GetJSProxy(), it->GetReceiver(), it->name());
4395 case LookupIterator::INTERCEPTOR: {
4396 Maybe<PropertyAttributes> result =
4397 JSObject::GetPropertyAttributesWithInterceptor(
4398 it->GetHolder(), it->GetReceiver(), it->name());
4399 if (result.has_value) return result.value;
4402 case LookupIterator::ACCESS_CHECK:
4403 if (it->HasAccess(v8::ACCESS_HAS)) break;
4404 return JSObject::GetPropertyAttributesWithFailedAccessCheck(it);
4405 case LookupIterator::PROPERTY:
4406 if (it->HasProperty()) return it->property_details().attributes();
4414 PropertyAttributes JSObject::GetElementAttributeWithReceiver(
4415 Handle<JSObject> object,
4416 Handle<JSReceiver> receiver,
4418 bool check_prototype) {
4419 Isolate* isolate = object->GetIsolate();
4421 // Check access rights if needed.
4422 if (object->IsAccessCheckNeeded()) {
4423 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
4424 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
4425 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
4430 if (object->IsJSGlobalProxy()) {
4431 Handle<Object> proto(object->GetPrototype(), isolate);
4432 if (proto->IsNull()) return ABSENT;
4433 ASSERT(proto->IsJSGlobalObject());
4434 return JSObject::GetElementAttributeWithReceiver(
4435 Handle<JSObject>::cast(proto), receiver, index, check_prototype);
4438 // Check for lookup interceptor except when bootstrapping.
4439 if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
4440 return JSObject::GetElementAttributeWithInterceptor(
4441 object, receiver, index, check_prototype);
4444 return GetElementAttributeWithoutInterceptor(
4445 object, receiver, index, check_prototype);
4449 PropertyAttributes JSObject::GetElementAttributeWithInterceptor(
4450 Handle<JSObject> object,
4451 Handle<JSReceiver> receiver,
4453 bool check_prototype) {
4454 Isolate* isolate = object->GetIsolate();
4455 HandleScope scope(isolate);
4457 // Make sure that the top context does not change when doing
4458 // callbacks or interceptor calls.
4459 AssertNoContextChange ncc(isolate);
4461 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4462 PropertyCallbackArguments args(
4463 isolate, interceptor->data(), *receiver, *object);
4464 if (!interceptor->query()->IsUndefined()) {
4465 v8::IndexedPropertyQueryCallback query =
4466 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query());
4468 ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index));
4469 v8::Handle<v8::Integer> result = args.Call(query, index);
4470 if (!result.IsEmpty())
4471 return static_cast<PropertyAttributes>(result->Int32Value());
4472 } else if (!interceptor->getter()->IsUndefined()) {
4473 v8::IndexedPropertyGetterCallback getter =
4474 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
4476 ApiIndexedPropertyAccess(
4477 "interceptor-indexed-get-has", *object, index));
4478 v8::Handle<v8::Value> result = args.Call(getter, index);
4479 if (!result.IsEmpty()) return NONE;
4482 return GetElementAttributeWithoutInterceptor(
4483 object, receiver, index, check_prototype);
4487 PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor(
4488 Handle<JSObject> object,
4489 Handle<JSReceiver> receiver,
4491 bool check_prototype) {
4492 PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes(
4493 receiver, object, index);
4494 if (attr != ABSENT) return attr;
4496 // Handle [] on String objects.
4497 if (object->IsStringObjectWithCharacterAt(index)) {
4498 return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
4501 if (!check_prototype) return ABSENT;
4503 Handle<Object> proto(object->GetPrototype(), object->GetIsolate());
4504 if (proto->IsJSProxy()) {
4505 // We need to follow the spec and simulate a call to [[GetOwnProperty]].
4506 return JSProxy::GetElementAttributeWithHandler(
4507 Handle<JSProxy>::cast(proto), receiver, index);
4509 if (proto->IsNull()) return ABSENT;
4510 return GetElementAttributeWithReceiver(
4511 Handle<JSObject>::cast(proto), receiver, index, true);
4515 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
4516 Handle<FixedArray> array(
4517 isolate->factory()->NewFixedArray(kEntries, TENURED));
4518 return Handle<NormalizedMapCache>::cast(array);
4522 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
4523 PropertyNormalizationMode mode) {
4524 DisallowHeapAllocation no_gc;
4525 Object* value = FixedArray::get(GetIndex(fast_map));
4526 if (!value->IsMap() ||
4527 !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) {
4528 return MaybeHandle<Map>();
4530 return handle(Map::cast(value));
4534 void NormalizedMapCache::Set(Handle<Map> fast_map,
4535 Handle<Map> normalized_map) {
4536 DisallowHeapAllocation no_gc;
4537 ASSERT(normalized_map->is_dictionary_map());
4538 FixedArray::set(GetIndex(fast_map), *normalized_map);
4542 void NormalizedMapCache::Clear() {
4543 int entries = length();
4544 for (int i = 0; i != entries; i++) {
4550 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object,
4552 Handle<Code> code) {
4553 Handle<Map> map(object->map());
4554 Map::UpdateCodeCache(map, name, code);
4558 void JSObject::NormalizeProperties(Handle<JSObject> object,
4559 PropertyNormalizationMode mode,
4560 int expected_additional_properties) {
4561 if (!object->HasFastProperties()) return;
4563 // The global object is always normalized.
4564 ASSERT(!object->IsGlobalObject());
4565 // JSGlobalProxy must never be normalized
4566 ASSERT(!object->IsJSGlobalProxy());
4568 Isolate* isolate = object->GetIsolate();
4569 HandleScope scope(isolate);
4570 Handle<Map> map(object->map());
4571 Handle<Map> new_map = Map::Normalize(map, mode);
4573 // Allocate new content.
4574 int real_size = map->NumberOfOwnDescriptors();
4575 int property_count = real_size;
4576 if (expected_additional_properties > 0) {
4577 property_count += expected_additional_properties;
4579 property_count += 2; // Make space for two more properties.
4581 Handle<NameDictionary> dictionary =
4582 NameDictionary::New(isolate, property_count);
4584 Handle<DescriptorArray> descs(map->instance_descriptors());
4585 for (int i = 0; i < real_size; i++) {
4586 PropertyDetails details = descs->GetDetails(i);
4587 switch (details.type()) {
4589 Handle<Name> key(descs->GetKey(i));
4590 Handle<Object> value(descs->GetConstant(i), isolate);
4591 PropertyDetails d = PropertyDetails(
4592 details.attributes(), NORMAL, i + 1);
4593 dictionary = NameDictionary::Add(dictionary, key, value, d);
4597 Handle<Name> key(descs->GetKey(i));
4598 FieldIndex index = FieldIndex::ForDescriptor(*map, i);
4599 Handle<Object> value(
4600 object->RawFastPropertyAt(index), isolate);
4602 PropertyDetails(details.attributes(), NORMAL, i + 1);
4603 dictionary = NameDictionary::Add(dictionary, key, value, d);
4607 Handle<Name> key(descs->GetKey(i));
4608 Handle<Object> value(descs->GetCallbacksObject(i), isolate);
4609 PropertyDetails d = PropertyDetails(
4610 details.attributes(), CALLBACKS, i + 1);
4611 dictionary = NameDictionary::Add(dictionary, key, value, d);
4624 // Copy the next enumeration index from instance descriptor.
4625 dictionary->SetNextEnumerationIndex(real_size + 1);
4627 // From here on we cannot fail and we shouldn't GC anymore.
4628 DisallowHeapAllocation no_allocation;
4630 // Resize the object in the heap if necessary.
4631 int new_instance_size = new_map->instance_size();
4632 int instance_size_delta = map->instance_size() - new_instance_size;
4633 ASSERT(instance_size_delta >= 0);
4634 Heap* heap = isolate->heap();
4635 heap->CreateFillerObjectAt(object->address() + new_instance_size,
4636 instance_size_delta);
4637 heap->AdjustLiveBytes(object->address(),
4638 -instance_size_delta,
4639 Heap::FROM_MUTATOR);
4641 // We are storing the new map using release store after creating a filler for
4642 // the left-over space to avoid races with the sweeper thread.
4643 object->synchronized_set_map(*new_map);
4645 object->set_properties(*dictionary);
4647 isolate->counters()->props_to_dictionary()->Increment();
4650 if (FLAG_trace_normalization) {
4651 PrintF("Object properties have been normalized:\n");
4658 void JSObject::TransformToFastProperties(Handle<JSObject> object,
4659 int unused_property_fields) {
4660 if (object->HasFastProperties()) return;
4661 ASSERT(!object->IsGlobalObject());
4662 Isolate* isolate = object->GetIsolate();
4663 Factory* factory = isolate->factory();
4664 Handle<NameDictionary> dictionary(object->property_dictionary());
4666 // Make sure we preserve dictionary representation if there are too many
4668 int number_of_elements = dictionary->NumberOfElements();
4669 if (number_of_elements > kMaxNumberOfDescriptors) return;
4671 if (number_of_elements != dictionary->NextEnumerationIndex()) {
4672 NameDictionary::DoGenerateNewEnumerationIndices(dictionary);
4675 int instance_descriptor_length = 0;
4676 int number_of_fields = 0;
4678 // Compute the length of the instance descriptor.
4679 int capacity = dictionary->Capacity();
4680 for (int i = 0; i < capacity; i++) {
4681 Object* k = dictionary->KeyAt(i);
4682 if (dictionary->IsKey(k)) {
4683 Object* value = dictionary->ValueAt(i);
4684 PropertyType type = dictionary->DetailsAt(i).type();
4685 ASSERT(type != FIELD);
4686 instance_descriptor_length++;
4687 if (type == NORMAL && !value->IsJSFunction()) {
4688 number_of_fields += 1;
4693 int inobject_props = object->map()->inobject_properties();
4695 // Allocate new map.
4696 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
4697 new_map->set_dictionary_map(false);
4699 if (instance_descriptor_length == 0) {
4700 DisallowHeapAllocation no_gc;
4701 ASSERT_LE(unused_property_fields, inobject_props);
4702 // Transform the object.
4703 new_map->set_unused_property_fields(inobject_props);
4704 object->set_map(*new_map);
4705 object->set_properties(isolate->heap()->empty_fixed_array());
4706 // Check that it really works.
4707 ASSERT(object->HasFastProperties());
4711 // Allocate the instance descriptor.
4712 Handle<DescriptorArray> descriptors = DescriptorArray::Allocate(
4713 isolate, instance_descriptor_length);
4715 int number_of_allocated_fields =
4716 number_of_fields + unused_property_fields - inobject_props;
4717 if (number_of_allocated_fields < 0) {
4718 // There is enough inobject space for all fields (including unused).
4719 number_of_allocated_fields = 0;
4720 unused_property_fields = inobject_props - number_of_fields;
4723 // Allocate the fixed array for the fields.
4724 Handle<FixedArray> fields = factory->NewFixedArray(
4725 number_of_allocated_fields);
4727 // Fill in the instance descriptor and the fields.
4728 int current_offset = 0;
4729 for (int i = 0; i < capacity; i++) {
4730 Object* k = dictionary->KeyAt(i);
4731 if (dictionary->IsKey(k)) {
4732 Object* value = dictionary->ValueAt(i);
4734 if (k->IsSymbol()) {
4735 key = handle(Symbol::cast(k));
4737 // Ensure the key is a unique name before writing into the
4738 // instance descriptor.
4739 key = factory->InternalizeString(handle(String::cast(k)));
4742 PropertyDetails details = dictionary->DetailsAt(i);
4743 int enumeration_index = details.dictionary_index();
4744 PropertyType type = details.type();
4746 if (value->IsJSFunction()) {
4747 ConstantDescriptor d(key,
4748 handle(value, isolate),
4749 details.attributes());
4750 descriptors->Set(enumeration_index - 1, &d);
4751 } else if (type == NORMAL) {
4752 if (current_offset < inobject_props) {
4753 object->InObjectPropertyAtPut(current_offset,
4755 UPDATE_WRITE_BARRIER);
4757 int offset = current_offset - inobject_props;
4758 fields->set(offset, value);
4760 FieldDescriptor d(key,
4762 details.attributes(),
4763 // TODO(verwaest): value->OptimalRepresentation();
4764 Representation::Tagged());
4765 descriptors->Set(enumeration_index - 1, &d);
4766 } else if (type == CALLBACKS) {
4767 CallbacksDescriptor d(key,
4768 handle(value, isolate),
4769 details.attributes());
4770 descriptors->Set(enumeration_index - 1, &d);
4776 ASSERT(current_offset == number_of_fields);
4778 descriptors->Sort();
4780 DisallowHeapAllocation no_gc;
4781 new_map->InitializeDescriptors(*descriptors);
4782 new_map->set_unused_property_fields(unused_property_fields);
4784 // Transform the object.
4785 object->set_map(*new_map);
4787 object->set_properties(*fields);
4788 ASSERT(object->IsJSObject());
4790 // Check that it really works.
4791 ASSERT(object->HasFastProperties());
4795 void JSObject::ResetElements(Handle<JSObject> object) {
4796 Heap* heap = object->GetIsolate()->heap();
4797 CHECK(object->map() != heap->sloppy_arguments_elements_map());
4798 object->set_elements(object->map()->GetInitialElements());
4802 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary(
4803 Handle<FixedArrayBase> array,
4805 Handle<SeededNumberDictionary> dictionary) {
4806 Isolate* isolate = array->GetIsolate();
4807 Factory* factory = isolate->factory();
4808 bool has_double_elements = array->IsFixedDoubleArray();
4809 for (int i = 0; i < length; i++) {
4810 Handle<Object> value;
4811 if (has_double_elements) {
4812 Handle<FixedDoubleArray> double_array =
4813 Handle<FixedDoubleArray>::cast(array);
4814 if (double_array->is_the_hole(i)) {
4815 value = factory->the_hole_value();
4817 value = factory->NewHeapNumber(double_array->get_scalar(i));
4820 value = handle(Handle<FixedArray>::cast(array)->get(i), isolate);
4822 if (!value->IsTheHole()) {
4823 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
4825 SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details);
4832 Handle<SeededNumberDictionary> JSObject::NormalizeElements(
4833 Handle<JSObject> object) {
4834 ASSERT(!object->HasExternalArrayElements() &&
4835 !object->HasFixedTypedArrayElements());
4836 Isolate* isolate = object->GetIsolate();
4838 // Find the backing store.
4839 Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements()));
4841 (array->map() == isolate->heap()->sloppy_arguments_elements_map());
4843 array = handle(FixedArrayBase::cast(
4844 Handle<FixedArray>::cast(array)->get(1)));
4846 if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array);
4848 ASSERT(object->HasFastSmiOrObjectElements() ||
4849 object->HasFastDoubleElements() ||
4850 object->HasFastArgumentsElements());
4851 // Compute the effective length and allocate a new backing store.
4852 int length = object->IsJSArray()
4853 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
4855 int old_capacity = 0;
4856 int used_elements = 0;
4857 object->GetElementsCapacityAndUsage(&old_capacity, &used_elements);
4858 Handle<SeededNumberDictionary> dictionary =
4859 SeededNumberDictionary::New(isolate, used_elements);
4861 dictionary = CopyFastElementsToDictionary(array, length, dictionary);
4863 // Switch to using the dictionary as the backing storage for elements.
4865 FixedArray::cast(object->elements())->set(1, *dictionary);
4867 // Set the new map first to satify the elements type assert in
4869 Handle<Map> new_map =
4870 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
4872 JSObject::MigrateToMap(object, new_map);
4873 object->set_elements(*dictionary);
4876 isolate->counters()->elements_to_dictionary()->Increment();
4879 if (FLAG_trace_normalization) {
4880 PrintF("Object elements have been normalized:\n");
4885 ASSERT(object->HasDictionaryElements() ||
4886 object->HasDictionaryArgumentsElements());
4891 static Smi* GenerateIdentityHash(Isolate* isolate) {
4895 // Generate a random 32-bit hash value but limit range to fit
4897 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue;
4899 } while (hash_value == 0 && attempts < 30);
4900 hash_value = hash_value != 0 ? hash_value : 1; // never return 0
4902 return Smi::FromInt(hash_value);
4906 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) {
4907 ASSERT(!object->IsJSGlobalProxy());
4908 Isolate* isolate = object->GetIsolate();
4909 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4913 template<typename ProxyType>
4914 static Handle<Smi> GetOrCreateIdentityHashHelper(Handle<ProxyType> proxy) {
4915 Isolate* isolate = proxy->GetIsolate();
4917 Handle<Object> maybe_hash(proxy->hash(), isolate);
4918 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash);
4920 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate);
4921 proxy->set_hash(*hash);
4926 Object* JSObject::GetIdentityHash() {
4927 DisallowHeapAllocation no_gc;
4928 Isolate* isolate = GetIsolate();
4929 if (IsJSGlobalProxy()) {
4930 return JSGlobalProxy::cast(this)->hash();
4932 Object* stored_value =
4933 GetHiddenProperty(isolate->factory()->identity_hash_string());
4934 return stored_value->IsSmi()
4936 : isolate->heap()->undefined_value();
4940 Handle<Smi> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) {
4941 if (object->IsJSGlobalProxy()) {
4942 return GetOrCreateIdentityHashHelper(Handle<JSGlobalProxy>::cast(object));
4945 Isolate* isolate = object->GetIsolate();
4947 Handle<Object> maybe_hash(object->GetIdentityHash(), isolate);
4948 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash);
4950 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate);
4951 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4956 Object* JSProxy::GetIdentityHash() {
4957 return this->hash();
4961 Handle<Smi> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) {
4962 return GetOrCreateIdentityHashHelper(proxy);
4966 Object* JSObject::GetHiddenProperty(Handle<Name> key) {
4967 DisallowHeapAllocation no_gc;
4968 ASSERT(key->IsUniqueName());
4969 if (IsJSGlobalProxy()) {
4970 // JSGlobalProxies store their hash internally.
4971 ASSERT(*key != GetHeap()->identity_hash_string());
4972 // For a proxy, use the prototype as target object.
4973 Object* proxy_parent = GetPrototype();
4974 // If the proxy is detached, return undefined.
4975 if (proxy_parent->IsNull()) return GetHeap()->the_hole_value();
4976 ASSERT(proxy_parent->IsJSGlobalObject());
4977 return JSObject::cast(proxy_parent)->GetHiddenProperty(key);
4979 ASSERT(!IsJSGlobalProxy());
4980 Object* inline_value = GetHiddenPropertiesHashTable();
4982 if (inline_value->IsSmi()) {
4983 // Handle inline-stored identity hash.
4984 if (*key == GetHeap()->identity_hash_string()) {
4985 return inline_value;
4987 return GetHeap()->the_hole_value();
4991 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value();
4993 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
4994 Object* entry = hashtable->Lookup(key);
4999 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object,
5001 Handle<Object> value) {
5002 Isolate* isolate = object->GetIsolate();
5004 ASSERT(key->IsUniqueName());
5005 if (object->IsJSGlobalProxy()) {
5006 // JSGlobalProxies store their hash internally.
5007 ASSERT(*key != *isolate->factory()->identity_hash_string());
5008 // For a proxy, use the prototype as target object.
5009 Handle<Object> proxy_parent(object->GetPrototype(), isolate);
5010 // If the proxy is detached, return undefined.
5011 if (proxy_parent->IsNull()) return isolate->factory()->undefined_value();
5012 ASSERT(proxy_parent->IsJSGlobalObject());
5013 return SetHiddenProperty(Handle<JSObject>::cast(proxy_parent), key, value);
5015 ASSERT(!object->IsJSGlobalProxy());
5017 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
5019 // If there is no backing store yet, store the identity hash inline.
5020 if (value->IsSmi() &&
5021 *key == *isolate->factory()->identity_hash_string() &&
5022 (inline_value->IsUndefined() || inline_value->IsSmi())) {
5023 return JSObject::SetHiddenPropertiesHashTable(object, value);
5026 Handle<ObjectHashTable> hashtable =
5027 GetOrCreateHiddenPropertiesHashtable(object);
5029 // If it was found, check if the key is already in the dictionary.
5030 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key,
5032 if (*new_table != *hashtable) {
5033 // If adding the key expanded the dictionary (i.e., Add returned a new
5034 // dictionary), store it back to the object.
5035 SetHiddenPropertiesHashTable(object, new_table);
5038 // Return this to mark success.
5043 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) {
5044 Isolate* isolate = object->GetIsolate();
5045 ASSERT(key->IsUniqueName());
5047 if (object->IsJSGlobalProxy()) {
5048 Handle<Object> proto(object->GetPrototype(), isolate);
5049 if (proto->IsNull()) return;
5050 ASSERT(proto->IsJSGlobalObject());
5051 return DeleteHiddenProperty(Handle<JSObject>::cast(proto), key);
5054 Object* inline_value = object->GetHiddenPropertiesHashTable();
5056 // We never delete (inline-stored) identity hashes.
5057 ASSERT(*key != *isolate->factory()->identity_hash_string());
5058 if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
5060 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value));
5061 bool was_present = false;
5062 ObjectHashTable::Remove(hashtable, key, &was_present);
5066 bool JSObject::HasHiddenProperties(Handle<JSObject> object) {
5067 Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string();
5068 LookupIterator it(object, hidden, LookupIterator::CHECK_OWN_REAL);
5069 return GetPropertyAttributes(&it) != ABSENT;
5073 Object* JSObject::GetHiddenPropertiesHashTable() {
5074 ASSERT(!IsJSGlobalProxy());
5075 if (HasFastProperties()) {
5076 // If the object has fast properties, check whether the first slot
5077 // in the descriptor array matches the hidden string. Since the
5078 // hidden strings hash code is zero (and no other name has hash
5079 // code zero) it will always occupy the first entry if present.
5080 DescriptorArray* descriptors = this->map()->instance_descriptors();
5081 if (descriptors->number_of_descriptors() > 0) {
5082 int sorted_index = descriptors->GetSortedKeyIndex(0);
5083 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
5084 sorted_index < map()->NumberOfOwnDescriptors()) {
5085 ASSERT(descriptors->GetType(sorted_index) == FIELD);
5086 ASSERT(descriptors->GetDetails(sorted_index).representation().
5087 IsCompatibleForLoad(Representation::Tagged()));
5088 FieldIndex index = FieldIndex::ForDescriptor(this->map(),
5090 return this->RawFastPropertyAt(index);
5092 return GetHeap()->undefined_value();
5095 return GetHeap()->undefined_value();
5098 Isolate* isolate = GetIsolate();
5099 LookupResult result(isolate);
5100 LookupOwnRealNamedProperty(isolate->factory()->hidden_string(), &result);
5101 if (result.IsFound()) {
5102 ASSERT(result.IsNormal());
5103 ASSERT(result.holder() == this);
5104 Object* value = GetNormalizedProperty(&result);
5105 if (!value->IsTheHole()) return value;
5107 return GetHeap()->undefined_value();
5111 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable(
5112 Handle<JSObject> object) {
5113 Isolate* isolate = object->GetIsolate();
5115 static const int kInitialCapacity = 4;
5116 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
5117 if (inline_value->IsHashTable()) {
5118 return Handle<ObjectHashTable>::cast(inline_value);
5121 Handle<ObjectHashTable> hashtable = ObjectHashTable::New(
5122 isolate, kInitialCapacity, USE_CUSTOM_MINIMUM_CAPACITY);
5124 if (inline_value->IsSmi()) {
5125 // We were storing the identity hash inline and now allocated an actual
5126 // dictionary. Put the identity hash into the new dictionary.
5127 hashtable = ObjectHashTable::Put(hashtable,
5128 isolate->factory()->identity_hash_string(),
5132 JSObject::SetOwnPropertyIgnoreAttributes(
5134 isolate->factory()->hidden_string(),
5137 OPTIMAL_REPRESENTATION,
5139 OMIT_EXTENSIBILITY_CHECK).Assert();
5145 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object,
5146 Handle<Object> value) {
5147 ASSERT(!object->IsJSGlobalProxy());
5149 Isolate* isolate = object->GetIsolate();
5151 // We can store the identity hash inline iff there is no backing store
5152 // for hidden properties yet.
5153 ASSERT(JSObject::HasHiddenProperties(object) != value->IsSmi());
5154 if (object->HasFastProperties()) {
5155 // If the object has fast properties, check whether the first slot
5156 // in the descriptor array matches the hidden string. Since the
5157 // hidden strings hash code is zero (and no other name has hash
5158 // code zero) it will always occupy the first entry if present.
5159 DescriptorArray* descriptors = object->map()->instance_descriptors();
5160 if (descriptors->number_of_descriptors() > 0) {
5161 int sorted_index = descriptors->GetSortedKeyIndex(0);
5162 if (descriptors->GetKey(sorted_index) == isolate->heap()->hidden_string()
5163 && sorted_index < object->map()->NumberOfOwnDescriptors()) {
5164 object->WriteToField(sorted_index, *value);
5170 SetOwnPropertyIgnoreAttributes(object,
5171 isolate->factory()->hidden_string(),
5174 OPTIMAL_REPRESENTATION,
5176 OMIT_EXTENSIBILITY_CHECK).Assert();
5181 Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object,
5184 // Check own property, ignore interceptor.
5185 Isolate* isolate = object->GetIsolate();
5186 LookupResult result(isolate);
5187 object->LookupOwnRealNamedProperty(name, &result);
5188 if (!result.IsFound()) return isolate->factory()->true_value();
5190 // Normalize object if needed.
5191 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5193 return DeleteNormalizedProperty(object, name, mode);
5197 MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor(
5198 Handle<JSObject> object, Handle<Name> name) {
5199 Isolate* isolate = object->GetIsolate();
5201 // TODO(rossberg): Support symbols in the API.
5202 if (name->IsSymbol()) return isolate->factory()->false_value();
5204 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
5205 if (!interceptor->deleter()->IsUndefined()) {
5206 v8::NamedPropertyDeleterCallback deleter =
5207 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter());
5209 ApiNamedPropertyAccess("interceptor-named-delete", *object, *name));
5210 PropertyCallbackArguments args(
5211 isolate, interceptor->data(), *object, *object);
5212 v8::Handle<v8::Boolean> result =
5213 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name)));
5214 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5215 if (!result.IsEmpty()) {
5216 ASSERT(result->IsBoolean());
5217 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5218 result_internal->VerifyApiCallResultType();
5219 // Rebox CustomArguments::kReturnValueOffset before returning.
5220 return handle(*result_internal, isolate);
5223 Handle<Object> result =
5224 DeletePropertyPostInterceptor(object, name, NORMAL_DELETION);
5229 MaybeHandle<Object> JSObject::DeleteElementWithInterceptor(
5230 Handle<JSObject> object,
5232 Isolate* isolate = object->GetIsolate();
5233 Factory* factory = isolate->factory();
5235 // Make sure that the top context does not change when doing
5236 // callbacks or interceptor calls.
5237 AssertNoContextChange ncc(isolate);
5239 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
5240 if (interceptor->deleter()->IsUndefined()) return factory->false_value();
5241 v8::IndexedPropertyDeleterCallback deleter =
5242 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter());
5244 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index));
5245 PropertyCallbackArguments args(
5246 isolate, interceptor->data(), *object, *object);
5247 v8::Handle<v8::Boolean> result = args.Call(deleter, index);
5248 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5249 if (!result.IsEmpty()) {
5250 ASSERT(result->IsBoolean());
5251 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5252 result_internal->VerifyApiCallResultType();
5253 // Rebox CustomArguments::kReturnValueOffset before returning.
5254 return handle(*result_internal, isolate);
5256 MaybeHandle<Object> delete_result = object->GetElementsAccessor()->Delete(
5257 object, index, NORMAL_DELETION);
5258 return delete_result;
5262 MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object,
5265 Isolate* isolate = object->GetIsolate();
5266 Factory* factory = isolate->factory();
5268 // Check access rights if needed.
5269 if (object->IsAccessCheckNeeded() &&
5270 !isolate->MayIndexedAccess(object, index, v8::ACCESS_DELETE)) {
5271 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE);
5272 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5273 return factory->false_value();
5276 if (object->IsStringObjectWithCharacterAt(index)) {
5277 if (mode == STRICT_DELETION) {
5278 // Deleting a non-configurable property in strict mode.
5279 Handle<Object> name = factory->NewNumberFromUint(index);
5280 Handle<Object> args[2] = { name, object };
5281 Handle<Object> error =
5282 factory->NewTypeError("strict_delete_property",
5283 HandleVector(args, 2));
5284 isolate->Throw(*error);
5285 return Handle<Object>();
5287 return factory->false_value();
5290 if (object->IsJSGlobalProxy()) {
5291 Handle<Object> proto(object->GetPrototype(), isolate);
5292 if (proto->IsNull()) return factory->false_value();
5293 ASSERT(proto->IsJSGlobalObject());
5294 return DeleteElement(Handle<JSObject>::cast(proto), index, mode);
5297 Handle<Object> old_value;
5298 bool should_enqueue_change_record = false;
5299 if (object->map()->is_observed()) {
5300 should_enqueue_change_record = HasOwnElement(object, index);
5301 if (should_enqueue_change_record) {
5302 if (!GetOwnElementAccessorPair(object, index).is_null()) {
5303 old_value = Handle<Object>::cast(factory->the_hole_value());
5305 old_value = Object::GetElement(
5306 isolate, object, index).ToHandleChecked();
5311 // Skip interceptor if forcing deletion.
5312 MaybeHandle<Object> maybe_result;
5313 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) {
5314 maybe_result = DeleteElementWithInterceptor(object, index);
5316 maybe_result = object->GetElementsAccessor()->Delete(object, index, mode);
5318 Handle<Object> result;
5319 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object);
5321 if (should_enqueue_change_record && !HasOwnElement(object, index)) {
5322 Handle<String> name = factory->Uint32ToString(index);
5323 EnqueueChangeRecord(object, "delete", name, old_value);
5330 MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object,
5333 Isolate* isolate = object->GetIsolate();
5334 // ECMA-262, 3rd, 8.6.2.5
5335 ASSERT(name->IsName());
5337 // Check access rights if needed.
5338 if (object->IsAccessCheckNeeded() &&
5339 !isolate->MayNamedAccess(object, name, v8::ACCESS_DELETE)) {
5340 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE);
5341 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5342 return isolate->factory()->false_value();
5345 if (object->IsJSGlobalProxy()) {
5346 Object* proto = object->GetPrototype();
5347 if (proto->IsNull()) return isolate->factory()->false_value();
5348 ASSERT(proto->IsJSGlobalObject());
5349 return JSGlobalObject::DeleteProperty(
5350 handle(JSGlobalObject::cast(proto)), name, mode);
5354 if (name->AsArrayIndex(&index)) {
5355 return DeleteElement(object, index, mode);
5358 LookupResult lookup(isolate);
5359 object->LookupOwn(name, &lookup, true);
5360 if (!lookup.IsFound()) return isolate->factory()->true_value();
5361 // Ignore attributes if forcing a deletion.
5362 if (lookup.IsDontDelete() && mode != FORCE_DELETION) {
5363 if (mode == STRICT_DELETION) {
5364 // Deleting a non-configurable property in strict mode.
5365 Handle<Object> args[2] = { name, object };
5366 Handle<Object> error = isolate->factory()->NewTypeError(
5367 "strict_delete_property", HandleVector(args, ARRAY_SIZE(args)));
5368 isolate->Throw(*error);
5369 return Handle<Object>();
5371 return isolate->factory()->false_value();
5374 Handle<Object> old_value = isolate->factory()->the_hole_value();
5375 bool is_observed = object->map()->is_observed() &&
5376 *name != isolate->heap()->hidden_string();
5377 if (is_observed && lookup.IsDataProperty()) {
5378 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked();
5380 Handle<Object> result;
5382 // Check for interceptor.
5383 if (lookup.IsInterceptor()) {
5384 // Skip interceptor if forcing a deletion.
5385 if (mode == FORCE_DELETION) {
5386 result = DeletePropertyPostInterceptor(object, name, mode);
5388 ASSIGN_RETURN_ON_EXCEPTION(
5390 DeletePropertyWithInterceptor(object, name),
5394 // Normalize object if needed.
5395 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5396 // Make sure the properties are normalized before removing the entry.
5397 result = DeleteNormalizedProperty(object, name, mode);
5400 if (is_observed && !HasOwnProperty(object, name)) {
5401 EnqueueChangeRecord(object, "delete", name, old_value);
5408 MaybeHandle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object,
5411 if (object->IsJSProxy()) {
5412 return JSProxy::DeleteElementWithHandler(
5413 Handle<JSProxy>::cast(object), index, mode);
5415 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode);
5419 MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object,
5422 if (object->IsJSProxy()) {
5423 return JSProxy::DeletePropertyWithHandler(
5424 Handle<JSProxy>::cast(object), name, mode);
5426 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode);
5430 bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
5433 ASSERT(IsFastObjectElementsKind(kind) ||
5434 kind == DICTIONARY_ELEMENTS);
5435 if (IsFastObjectElementsKind(kind)) {
5436 int length = IsJSArray()
5437 ? Smi::cast(JSArray::cast(this)->length())->value()
5438 : elements->length();
5439 for (int i = 0; i < length; ++i) {
5440 Object* element = elements->get(i);
5441 if (!element->IsTheHole() && element == object) return true;
5445 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object);
5446 if (!key->IsUndefined()) return true;
5452 // Check whether this object references another object.
5453 bool JSObject::ReferencesObject(Object* obj) {
5454 Map* map_of_this = map();
5455 Heap* heap = GetHeap();
5456 DisallowHeapAllocation no_allocation;
5458 // Is the object the constructor for this object?
5459 if (map_of_this->constructor() == obj) {
5463 // Is the object the prototype for this object?
5464 if (map_of_this->prototype() == obj) {
5468 // Check if the object is among the named properties.
5469 Object* key = SlowReverseLookup(obj);
5470 if (!key->IsUndefined()) {
5474 // Check if the object is among the indexed properties.
5475 ElementsKind kind = GetElementsKind();
5477 // Raw pixels and external arrays do not reference other
5479 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5480 case EXTERNAL_##TYPE##_ELEMENTS: \
5481 case TYPE##_ELEMENTS: \
5484 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5485 #undef TYPED_ARRAY_CASE
5487 case FAST_DOUBLE_ELEMENTS:
5488 case FAST_HOLEY_DOUBLE_ELEMENTS:
5490 case FAST_SMI_ELEMENTS:
5491 case FAST_HOLEY_SMI_ELEMENTS:
5494 case FAST_HOLEY_ELEMENTS:
5495 case DICTIONARY_ELEMENTS: {
5496 FixedArray* elements = FixedArray::cast(this->elements());
5497 if (ReferencesObjectFromElements(elements, kind, obj)) return true;
5500 case SLOPPY_ARGUMENTS_ELEMENTS: {
5501 FixedArray* parameter_map = FixedArray::cast(elements());
5502 // Check the mapped parameters.
5503 int length = parameter_map->length();
5504 for (int i = 2; i < length; ++i) {
5505 Object* value = parameter_map->get(i);
5506 if (!value->IsTheHole() && value == obj) return true;
5508 // Check the arguments.
5509 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5510 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
5511 FAST_HOLEY_ELEMENTS;
5512 if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
5517 // For functions check the context.
5518 if (IsJSFunction()) {
5519 // Get the constructor function for arguments array.
5520 JSObject* arguments_boilerplate =
5521 heap->isolate()->context()->native_context()->
5522 sloppy_arguments_boilerplate();
5523 JSFunction* arguments_function =
5524 JSFunction::cast(arguments_boilerplate->map()->constructor());
5526 // Get the context and don't check if it is the native context.
5527 JSFunction* f = JSFunction::cast(this);
5528 Context* context = f->context();
5529 if (context->IsNativeContext()) {
5533 // Check the non-special context slots.
5534 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) {
5535 // Only check JS objects.
5536 if (context->get(i)->IsJSObject()) {
5537 JSObject* ctxobj = JSObject::cast(context->get(i));
5538 // If it is an arguments array check the content.
5539 if (ctxobj->map()->constructor() == arguments_function) {
5540 if (ctxobj->ReferencesObject(obj)) {
5543 } else if (ctxobj == obj) {
5549 // Check the context extension (if any) if it can have references.
5550 if (context->has_extension() && !context->IsCatchContext()) {
5551 // With harmony scoping, a JSFunction may have a global context.
5552 // TODO(mvstanton): walk into the ScopeInfo.
5553 if (FLAG_harmony_scoping && context->IsGlobalContext()) {
5557 return JSObject::cast(context->extension())->ReferencesObject(obj);
5561 // No references to object.
5566 MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
5567 Isolate* isolate = object->GetIsolate();
5569 if (!object->map()->is_extensible()) return object;
5571 if (object->IsAccessCheckNeeded() &&
5572 !isolate->MayNamedAccess(
5573 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5574 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5575 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5576 return isolate->factory()->false_value();
5579 if (object->IsJSGlobalProxy()) {
5580 Handle<Object> proto(object->GetPrototype(), isolate);
5581 if (proto->IsNull()) return object;
5582 ASSERT(proto->IsJSGlobalObject());
5583 return PreventExtensions(Handle<JSObject>::cast(proto));
5586 // It's not possible to seal objects with external array elements
5587 if (object->HasExternalArrayElements() ||
5588 object->HasFixedTypedArrayElements()) {
5589 Handle<Object> error =
5590 isolate->factory()->NewTypeError(
5591 "cant_prevent_ext_external_array_elements",
5592 HandleVector(&object, 1));
5593 return isolate->Throw<Object>(error);
5596 // If there are fast elements we normalize.
5597 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
5598 ASSERT(object->HasDictionaryElements() ||
5599 object->HasDictionaryArgumentsElements());
5601 // Make sure that we never go back to fast case.
5602 dictionary->set_requires_slow_elements();
5604 // Do a map transition, other objects with this map may still
5606 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5607 Handle<Map> new_map = Map::Copy(handle(object->map()));
5609 new_map->set_is_extensible(false);
5610 JSObject::MigrateToMap(object, new_map);
5611 ASSERT(!object->map()->is_extensible());
5613 if (object->map()->is_observed()) {
5614 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(),
5615 isolate->factory()->the_hole_value());
5621 template<typename Dictionary>
5622 static void FreezeDictionary(Dictionary* dictionary) {
5623 int capacity = dictionary->Capacity();
5624 for (int i = 0; i < capacity; i++) {
5625 Object* k = dictionary->KeyAt(i);
5626 if (dictionary->IsKey(k)) {
5627 PropertyDetails details = dictionary->DetailsAt(i);
5628 int attrs = DONT_DELETE;
5629 // READ_ONLY is an invalid attribute for JS setters/getters.
5630 if (details.type() == CALLBACKS) {
5631 Object* v = dictionary->ValueAt(i);
5632 if (v->IsPropertyCell()) v = PropertyCell::cast(v)->value();
5633 if (!v->IsAccessorPair()) attrs |= READ_ONLY;
5637 details = details.CopyAddAttributes(
5638 static_cast<PropertyAttributes>(attrs));
5639 dictionary->DetailsAtPut(i, details);
5645 MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) {
5646 // Freezing sloppy arguments should be handled elsewhere.
5647 ASSERT(!object->HasSloppyArgumentsElements());
5648 ASSERT(!object->map()->is_observed());
5650 if (object->map()->is_frozen()) return object;
5652 Isolate* isolate = object->GetIsolate();
5653 if (object->IsAccessCheckNeeded() &&
5654 !isolate->MayNamedAccess(
5655 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5656 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5657 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5658 return isolate->factory()->false_value();
5661 if (object->IsJSGlobalProxy()) {
5662 Handle<Object> proto(object->GetPrototype(), isolate);
5663 if (proto->IsNull()) return object;
5664 ASSERT(proto->IsJSGlobalObject());
5665 return Freeze(Handle<JSObject>::cast(proto));
5668 // It's not possible to freeze objects with external array elements
5669 if (object->HasExternalArrayElements() ||
5670 object->HasFixedTypedArrayElements()) {
5671 Handle<Object> error =
5672 isolate->factory()->NewTypeError(
5673 "cant_prevent_ext_external_array_elements",
5674 HandleVector(&object, 1));
5675 return isolate->Throw<Object>(error);
5678 Handle<SeededNumberDictionary> new_element_dictionary;
5679 if (!object->elements()->IsDictionary()) {
5680 int length = object->IsJSArray()
5681 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
5682 : object->elements()->length();
5686 object->GetElementsCapacityAndUsage(&capacity, &used);
5687 new_element_dictionary = SeededNumberDictionary::New(isolate, used);
5689 // Move elements to a dictionary; avoid calling NormalizeElements to avoid
5690 // unnecessary transitions.
5691 new_element_dictionary = CopyFastElementsToDictionary(
5692 handle(object->elements()), length, new_element_dictionary);
5694 // No existing elements, use a pre-allocated empty backing store
5695 new_element_dictionary =
5696 isolate->factory()->empty_slow_element_dictionary();
5700 Handle<Map> old_map(object->map(), isolate);
5701 int transition_index = old_map->SearchTransition(
5702 isolate->heap()->frozen_symbol());
5703 if (transition_index != TransitionArray::kNotFound) {
5704 Handle<Map> transition_map(old_map->GetTransition(transition_index));
5705 ASSERT(transition_map->has_dictionary_elements());
5706 ASSERT(transition_map->is_frozen());
5707 ASSERT(!transition_map->is_extensible());
5708 JSObject::MigrateToMap(object, transition_map);
5709 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5710 // Create a new descriptor array with fully-frozen properties
5711 Handle<Map> new_map = Map::CopyForFreeze(old_map);
5712 JSObject::MigrateToMap(object, new_map);
5714 // Slow path: need to normalize properties for safety
5715 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5717 // Create a new map, since other objects with this map may be extensible.
5718 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5719 Handle<Map> new_map = Map::Copy(handle(object->map()));
5721 new_map->set_is_extensible(false);
5722 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5723 JSObject::MigrateToMap(object, new_map);
5725 // Freeze dictionary-mode properties
5726 FreezeDictionary(object->property_dictionary());
5729 ASSERT(object->map()->has_dictionary_elements());
5730 if (!new_element_dictionary.is_null()) {
5731 object->set_elements(*new_element_dictionary);
5734 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) {
5735 SeededNumberDictionary* dictionary = object->element_dictionary();
5736 // Make sure we never go back to the fast case
5737 dictionary->set_requires_slow_elements();
5738 // Freeze all elements in the dictionary
5739 FreezeDictionary(dictionary);
5746 void JSObject::SetObserved(Handle<JSObject> object) {
5747 ASSERT(!object->IsJSGlobalProxy());
5748 ASSERT(!object->IsJSGlobalObject());
5749 Isolate* isolate = object->GetIsolate();
5750 Handle<Map> new_map;
5751 Handle<Map> old_map(object->map(), isolate);
5752 ASSERT(!old_map->is_observed());
5753 int transition_index = old_map->SearchTransition(
5754 isolate->heap()->observed_symbol());
5755 if (transition_index != TransitionArray::kNotFound) {
5756 new_map = handle(old_map->GetTransition(transition_index), isolate);
5757 ASSERT(new_map->is_observed());
5758 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5759 new_map = Map::CopyForObserved(old_map);
5761 new_map = Map::Copy(old_map);
5762 new_map->set_is_observed();
5764 JSObject::MigrateToMap(object, new_map);
5768 Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object,
5769 Representation representation,
5771 Isolate* isolate = object->GetIsolate();
5772 Handle<Object> raw_value(object->RawFastPropertyAt(index), isolate);
5773 return Object::NewStorageFor(isolate, raw_value, representation);
5777 template<class ContextObject>
5778 class JSObjectWalkVisitor {
5780 JSObjectWalkVisitor(ContextObject* site_context, bool copying,
5781 JSObject::DeepCopyHints hints)
5782 : site_context_(site_context),
5786 MUST_USE_RESULT MaybeHandle<JSObject> StructureWalk(Handle<JSObject> object);
5789 MUST_USE_RESULT inline MaybeHandle<JSObject> VisitElementOrProperty(
5790 Handle<JSObject> object,
5791 Handle<JSObject> value) {
5792 Handle<AllocationSite> current_site = site_context()->EnterNewScope();
5793 MaybeHandle<JSObject> copy_of_value = StructureWalk(value);
5794 site_context()->ExitScope(current_site, value);
5795 return copy_of_value;
5798 inline ContextObject* site_context() { return site_context_; }
5799 inline Isolate* isolate() { return site_context()->isolate(); }
5801 inline bool copying() const { return copying_; }
5804 ContextObject* site_context_;
5805 const bool copying_;
5806 const JSObject::DeepCopyHints hints_;
5810 template <class ContextObject>
5811 MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk(
5812 Handle<JSObject> object) {
5813 Isolate* isolate = this->isolate();
5814 bool copying = this->copying();
5815 bool shallow = hints_ == JSObject::kObjectIsShallowArray;
5818 StackLimitCheck check(isolate);
5820 if (check.HasOverflowed()) {
5821 isolate->StackOverflow();
5822 return MaybeHandle<JSObject>();
5826 if (object->map()->is_deprecated()) {
5827 JSObject::MigrateInstance(object);
5830 Handle<JSObject> copy;
5832 Handle<AllocationSite> site_to_pass;
5833 if (site_context()->ShouldCreateMemento(object)) {
5834 site_to_pass = site_context()->current();
5836 copy = isolate->factory()->CopyJSObjectWithAllocationSite(
5837 object, site_to_pass);
5842 ASSERT(copying || copy.is_identical_to(object));
5844 ElementsKind kind = copy->GetElementsKind();
5845 if (copying && IsFastSmiOrObjectElementsKind(kind) &&
5846 FixedArray::cast(copy->elements())->map() ==
5847 isolate->heap()->fixed_cow_array_map()) {
5848 isolate->counters()->cow_arrays_created_runtime()->Increment();
5852 HandleScope scope(isolate);
5854 // Deep copy own properties.
5855 if (copy->HasFastProperties()) {
5856 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors());
5857 int limit = copy->map()->NumberOfOwnDescriptors();
5858 for (int i = 0; i < limit; i++) {
5859 PropertyDetails details = descriptors->GetDetails(i);
5860 if (details.type() != FIELD) continue;
5861 FieldIndex index = FieldIndex::ForDescriptor(copy->map(), i);
5862 Handle<Object> value(object->RawFastPropertyAt(index), isolate);
5863 if (value->IsJSObject()) {
5864 ASSIGN_RETURN_ON_EXCEPTION(
5866 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5869 Representation representation = details.representation();
5870 value = Object::NewStorageFor(isolate, value, representation);
5873 copy->FastPropertyAtPut(index, *value);
5877 Handle<FixedArray> names =
5878 isolate->factory()->NewFixedArray(copy->NumberOfOwnProperties());
5879 copy->GetOwnPropertyNames(*names, 0);
5880 for (int i = 0; i < names->length(); i++) {
5881 ASSERT(names->get(i)->IsString());
5882 Handle<String> key_string(String::cast(names->get(i)));
5883 PropertyAttributes attributes =
5884 JSReceiver::GetOwnPropertyAttributes(copy, key_string);
5885 // Only deep copy fields from the object literal expression.
5886 // In particular, don't try to copy the length attribute of
5888 if (attributes != NONE) continue;
5889 Handle<Object> value =
5890 Object::GetProperty(copy, key_string).ToHandleChecked();
5891 if (value->IsJSObject()) {
5892 Handle<JSObject> result;
5893 ASSIGN_RETURN_ON_EXCEPTION(
5895 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5898 // Creating object copy for literals. No strict mode needed.
5899 JSObject::SetProperty(
5900 copy, key_string, result, NONE, SLOPPY).Assert();
5906 // Deep copy own elements.
5907 // Pixel elements cannot be created using an object literal.
5908 ASSERT(!copy->HasExternalArrayElements());
5910 case FAST_SMI_ELEMENTS:
5912 case FAST_HOLEY_SMI_ELEMENTS:
5913 case FAST_HOLEY_ELEMENTS: {
5914 Handle<FixedArray> elements(FixedArray::cast(copy->elements()));
5915 if (elements->map() == isolate->heap()->fixed_cow_array_map()) {
5917 for (int i = 0; i < elements->length(); i++) {
5918 ASSERT(!elements->get(i)->IsJSObject());
5922 for (int i = 0; i < elements->length(); i++) {
5923 Handle<Object> value(elements->get(i), isolate);
5924 ASSERT(value->IsSmi() ||
5925 value->IsTheHole() ||
5926 (IsFastObjectElementsKind(copy->GetElementsKind())));
5927 if (value->IsJSObject()) {
5928 Handle<JSObject> result;
5929 ASSIGN_RETURN_ON_EXCEPTION(
5931 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5934 elements->set(i, *result);
5941 case DICTIONARY_ELEMENTS: {
5942 Handle<SeededNumberDictionary> element_dictionary(
5943 copy->element_dictionary());
5944 int capacity = element_dictionary->Capacity();
5945 for (int i = 0; i < capacity; i++) {
5946 Object* k = element_dictionary->KeyAt(i);
5947 if (element_dictionary->IsKey(k)) {
5948 Handle<Object> value(element_dictionary->ValueAt(i), isolate);
5949 if (value->IsJSObject()) {
5950 Handle<JSObject> result;
5951 ASSIGN_RETURN_ON_EXCEPTION(
5953 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5956 element_dictionary->ValueAtPut(i, *result);
5963 case SLOPPY_ARGUMENTS_ELEMENTS:
5968 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5969 case EXTERNAL_##TYPE##_ELEMENTS: \
5970 case TYPE##_ELEMENTS: \
5972 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5973 #undef TYPED_ARRAY_CASE
5975 case FAST_DOUBLE_ELEMENTS:
5976 case FAST_HOLEY_DOUBLE_ELEMENTS:
5977 // No contained objects, nothing to do.
5986 MaybeHandle<JSObject> JSObject::DeepWalk(
5987 Handle<JSObject> object,
5988 AllocationSiteCreationContext* site_context) {
5989 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false,
5991 MaybeHandle<JSObject> result = v.StructureWalk(object);
5992 Handle<JSObject> for_assert;
5993 ASSERT(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object));
5998 MaybeHandle<JSObject> JSObject::DeepCopy(
5999 Handle<JSObject> object,
6000 AllocationSiteUsageContext* site_context,
6001 DeepCopyHints hints) {
6002 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints);
6003 MaybeHandle<JSObject> copy = v.StructureWalk(object);
6004 Handle<JSObject> for_assert;
6005 ASSERT(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object));
6010 Handle<Object> JSObject::GetDataProperty(Handle<JSObject> object,
6012 Isolate* isolate = object->GetIsolate();
6013 LookupResult lookup(isolate);
6015 DisallowHeapAllocation no_allocation;
6016 object->LookupRealNamedProperty(key, &lookup);
6018 Handle<Object> result = isolate->factory()->undefined_value();
6019 if (lookup.IsFound() && !lookup.IsTransition()) {
6020 switch (lookup.type()) {
6022 result = GetNormalizedProperty(
6023 Handle<JSObject>(lookup.holder(), isolate), &lookup);
6026 result = FastPropertyAt(Handle<JSObject>(lookup.holder(), isolate),
6027 lookup.representation(),
6028 lookup.GetFieldIndex());
6031 result = Handle<Object>(lookup.GetConstant(), isolate);
6045 // Tests for the fast common case for property enumeration:
6046 // - This object and all prototypes has an enum cache (which means that
6047 // it is no proxy, has no interceptors and needs no access checks).
6048 // - This object has no elements.
6049 // - No prototype has enumerable properties/elements.
6050 bool JSReceiver::IsSimpleEnum() {
6051 Heap* heap = GetHeap();
6052 for (Object* o = this;
6053 o != heap->null_value();
6054 o = JSObject::cast(o)->GetPrototype()) {
6055 if (!o->IsJSObject()) return false;
6056 JSObject* curr = JSObject::cast(o);
6057 int enum_length = curr->map()->EnumLength();
6058 if (enum_length == kInvalidEnumCacheSentinel) return false;
6059 if (curr->IsAccessCheckNeeded()) return false;
6060 ASSERT(!curr->HasNamedInterceptor());
6061 ASSERT(!curr->HasIndexedInterceptor());
6062 if (curr->NumberOfEnumElements() > 0) return false;
6063 if (curr != this && enum_length != 0) return false;
6069 static bool FilterKey(Object* key, PropertyAttributes filter) {
6070 if ((filter & SYMBOLIC) && key->IsSymbol()) {
6074 if ((filter & PRIVATE_SYMBOL) &&
6075 key->IsSymbol() && Symbol::cast(key)->is_private()) {
6079 if ((filter & STRING) && !key->IsSymbol()) {
6087 int Map::NumberOfDescribedProperties(DescriptorFlag which,
6088 PropertyAttributes filter) {
6090 DescriptorArray* descs = instance_descriptors();
6091 int limit = which == ALL_DESCRIPTORS
6092 ? descs->number_of_descriptors()
6093 : NumberOfOwnDescriptors();
6094 for (int i = 0; i < limit; i++) {
6095 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
6096 !FilterKey(descs->GetKey(i), filter)) {
6104 int Map::NextFreePropertyIndex() {
6106 int number_of_own_descriptors = NumberOfOwnDescriptors();
6107 DescriptorArray* descs = instance_descriptors();
6108 for (int i = 0; i < number_of_own_descriptors; i++) {
6109 if (descs->GetType(i) == FIELD) {
6110 int current_index = descs->GetFieldIndex(i);
6111 if (current_index > max_index) max_index = current_index;
6114 return max_index + 1;
6118 void JSReceiver::LookupOwn(
6119 Handle<Name> name, LookupResult* result, bool search_hidden_prototypes) {
6120 DisallowHeapAllocation no_gc;
6121 ASSERT(name->IsName());
6123 if (IsJSGlobalProxy()) {
6124 Object* proto = GetPrototype();
6125 if (proto->IsNull()) return result->NotFound();
6126 ASSERT(proto->IsJSGlobalObject());
6127 return JSReceiver::cast(proto)->LookupOwn(
6128 name, result, search_hidden_prototypes);
6132 result->HandlerResult(JSProxy::cast(this));
6136 // Do not use inline caching if the object is a non-global object
6137 // that requires access checks.
6138 if (IsAccessCheckNeeded()) {
6139 result->DisallowCaching();
6142 JSObject* js_object = JSObject::cast(this);
6144 // Check for lookup interceptor except when bootstrapping.
6145 if (js_object->HasNamedInterceptor() &&
6146 !GetIsolate()->bootstrapper()->IsActive()) {
6147 result->InterceptorResult(js_object);
6151 js_object->LookupOwnRealNamedProperty(name, result);
6152 if (result->IsFound() || !search_hidden_prototypes) return;
6154 Object* proto = js_object->GetPrototype();
6155 if (!proto->IsJSReceiver()) return;
6156 JSReceiver* receiver = JSReceiver::cast(proto);
6157 if (receiver->map()->is_hidden_prototype()) {
6158 receiver->LookupOwn(name, result, search_hidden_prototypes);
6163 void JSReceiver::Lookup(Handle<Name> name, LookupResult* result) {
6164 DisallowHeapAllocation no_gc;
6165 // Ecma-262 3rd 8.6.2.4
6166 Handle<Object> null_value = GetIsolate()->factory()->null_value();
6167 for (Object* current = this;
6168 current != *null_value;
6169 current = JSObject::cast(current)->GetPrototype()) {
6170 JSReceiver::cast(current)->LookupOwn(name, result, false);
6171 if (result->IsFound()) return;
6177 static bool ContainsOnlyValidKeys(Handle<FixedArray> array) {
6178 int len = array->length();
6179 for (int i = 0; i < len; i++) {
6180 Object* e = array->get(i);
6181 if (!(e->IsString() || e->IsNumber())) return false;
6187 static Handle<FixedArray> ReduceFixedArrayTo(
6188 Handle<FixedArray> array, int length) {
6189 ASSERT(array->length() >= length);
6190 if (array->length() == length) return array;
6192 Handle<FixedArray> new_array =
6193 array->GetIsolate()->factory()->NewFixedArray(length);
6194 for (int i = 0; i < length; ++i) new_array->set(i, array->get(i));
6199 static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
6200 bool cache_result) {
6201 Isolate* isolate = object->GetIsolate();
6202 if (object->HasFastProperties()) {
6203 int own_property_count = object->map()->EnumLength();
6204 // If the enum length of the given map is set to kInvalidEnumCache, this
6205 // means that the map itself has never used the present enum cache. The
6206 // first step to using the cache is to set the enum length of the map by
6207 // counting the number of own descriptors that are not DONT_ENUM or
6209 if (own_property_count == kInvalidEnumCacheSentinel) {
6210 own_property_count = object->map()->NumberOfDescribedProperties(
6211 OWN_DESCRIPTORS, DONT_SHOW);
6213 ASSERT(own_property_count == object->map()->NumberOfDescribedProperties(
6214 OWN_DESCRIPTORS, DONT_SHOW));
6217 if (object->map()->instance_descriptors()->HasEnumCache()) {
6218 DescriptorArray* desc = object->map()->instance_descriptors();
6219 Handle<FixedArray> keys(desc->GetEnumCache(), isolate);
6221 // In case the number of properties required in the enum are actually
6222 // present, we can reuse the enum cache. Otherwise, this means that the
6223 // enum cache was generated for a previous (smaller) version of the
6224 // Descriptor Array. In that case we regenerate the enum cache.
6225 if (own_property_count <= keys->length()) {
6226 if (cache_result) object->map()->SetEnumLength(own_property_count);
6227 isolate->counters()->enum_cache_hits()->Increment();
6228 return ReduceFixedArrayTo(keys, own_property_count);
6232 Handle<Map> map(object->map());
6234 if (map->instance_descriptors()->IsEmpty()) {
6235 isolate->counters()->enum_cache_hits()->Increment();
6236 if (cache_result) map->SetEnumLength(0);
6237 return isolate->factory()->empty_fixed_array();
6240 isolate->counters()->enum_cache_misses()->Increment();
6242 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(
6243 own_property_count);
6244 Handle<FixedArray> indices = isolate->factory()->NewFixedArray(
6245 own_property_count);
6247 Handle<DescriptorArray> descs =
6248 Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
6250 int size = map->NumberOfOwnDescriptors();
6253 for (int i = 0; i < size; i++) {
6254 PropertyDetails details = descs->GetDetails(i);
6255 Object* key = descs->GetKey(i);
6256 if (!(details.IsDontEnum() || key->IsSymbol())) {
6257 storage->set(index, key);
6258 if (!indices.is_null()) {
6259 if (details.type() != FIELD) {
6260 indices = Handle<FixedArray>();
6262 FieldIndex field_index = FieldIndex::ForDescriptor(*map, i);
6263 int load_by_field_index = field_index.GetLoadByFieldIndex();
6264 indices->set(index, Smi::FromInt(load_by_field_index));
6270 ASSERT(index == storage->length());
6272 Handle<FixedArray> bridge_storage =
6273 isolate->factory()->NewFixedArray(
6274 DescriptorArray::kEnumCacheBridgeLength);
6275 DescriptorArray* desc = object->map()->instance_descriptors();
6276 desc->SetEnumCache(*bridge_storage,
6278 indices.is_null() ? Object::cast(Smi::FromInt(0))
6279 : Object::cast(*indices));
6281 object->map()->SetEnumLength(own_property_count);
6285 Handle<NameDictionary> dictionary(object->property_dictionary());
6286 int length = dictionary->NumberOfEnumElements();
6288 return Handle<FixedArray>(isolate->heap()->empty_fixed_array());
6290 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length);
6291 dictionary->CopyEnumKeysTo(*storage);
6297 MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object,
6298 KeyCollectionType type) {
6299 USE(ContainsOnlyValidKeys);
6300 Isolate* isolate = object->GetIsolate();
6301 Handle<FixedArray> content = isolate->factory()->empty_fixed_array();
6302 Handle<JSObject> arguments_boilerplate = Handle<JSObject>(
6303 isolate->context()->native_context()->sloppy_arguments_boilerplate(),
6305 Handle<JSFunction> arguments_function = Handle<JSFunction>(
6306 JSFunction::cast(arguments_boilerplate->map()->constructor()),
6309 // Only collect keys if access is permitted.
6310 for (Handle<Object> p = object;
6311 *p != isolate->heap()->null_value();
6312 p = Handle<Object>(p->GetPrototype(isolate), isolate)) {
6313 if (p->IsJSProxy()) {
6314 Handle<JSProxy> proxy(JSProxy::cast(*p), isolate);
6315 Handle<Object> args[] = { proxy };
6316 Handle<Object> names;
6317 ASSIGN_RETURN_ON_EXCEPTION(
6319 Execution::Call(isolate,
6320 isolate->proxy_enumerate(),
6325 ASSIGN_RETURN_ON_EXCEPTION(
6327 FixedArray::AddKeysFromArrayLike(
6328 content, Handle<JSObject>::cast(names)),
6333 Handle<JSObject> current(JSObject::cast(*p), isolate);
6335 // Check access rights if required.
6336 if (current->IsAccessCheckNeeded() &&
6337 !isolate->MayNamedAccess(
6338 current, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
6339 isolate->ReportFailedAccessCheck(current, v8::ACCESS_KEYS);
6340 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, FixedArray);
6344 // Compute the element keys.
6345 Handle<FixedArray> element_keys =
6346 isolate->factory()->NewFixedArray(current->NumberOfEnumElements());
6347 current->GetEnumElementKeys(*element_keys);
6348 ASSIGN_RETURN_ON_EXCEPTION(
6350 FixedArray::UnionOfKeys(content, element_keys),
6352 ASSERT(ContainsOnlyValidKeys(content));
6354 // Add the element keys from the interceptor.
6355 if (current->HasIndexedInterceptor()) {
6356 Handle<JSObject> result;
6357 if (JSObject::GetKeysForIndexedInterceptor(
6358 current, object).ToHandle(&result)) {
6359 ASSIGN_RETURN_ON_EXCEPTION(
6361 FixedArray::AddKeysFromArrayLike(content, result),
6364 ASSERT(ContainsOnlyValidKeys(content));
6367 // We can cache the computed property keys if access checks are
6368 // not needed and no interceptors are involved.
6370 // We do not use the cache if the object has elements and
6371 // therefore it does not make sense to cache the property names
6372 // for arguments objects. Arguments objects will always have
6374 // Wrapped strings have elements, but don't have an elements
6375 // array or dictionary. So the fast inline test for whether to
6376 // use the cache says yes, so we should not create a cache.
6377 bool cache_enum_keys =
6378 ((current->map()->constructor() != *arguments_function) &&
6379 !current->IsJSValue() &&
6380 !current->IsAccessCheckNeeded() &&
6381 !current->HasNamedInterceptor() &&
6382 !current->HasIndexedInterceptor());
6383 // Compute the property keys and cache them if possible.
6384 ASSIGN_RETURN_ON_EXCEPTION(
6386 FixedArray::UnionOfKeys(
6387 content, GetEnumPropertyKeys(current, cache_enum_keys)),
6389 ASSERT(ContainsOnlyValidKeys(content));
6391 // Add the property keys from the interceptor.
6392 if (current->HasNamedInterceptor()) {
6393 Handle<JSObject> result;
6394 if (JSObject::GetKeysForNamedInterceptor(
6395 current, object).ToHandle(&result)) {
6396 ASSIGN_RETURN_ON_EXCEPTION(
6398 FixedArray::AddKeysFromArrayLike(content, result),
6401 ASSERT(ContainsOnlyValidKeys(content));
6404 // If we only want own properties we bail out after the first
6406 if (type == OWN_ONLY) break;
6412 // Try to update an accessor in an elements dictionary. Return true if the
6413 // update succeeded, and false otherwise.
6414 static bool UpdateGetterSetterInDictionary(
6415 SeededNumberDictionary* dictionary,
6419 PropertyAttributes attributes) {
6420 int entry = dictionary->FindEntry(index);
6421 if (entry != SeededNumberDictionary::kNotFound) {
6422 Object* result = dictionary->ValueAt(entry);
6423 PropertyDetails details = dictionary->DetailsAt(entry);
6424 if (details.type() == CALLBACKS && result->IsAccessorPair()) {
6425 ASSERT(!details.IsDontDelete());
6426 if (details.attributes() != attributes) {
6427 dictionary->DetailsAtPut(
6429 PropertyDetails(attributes, CALLBACKS, index));
6431 AccessorPair::cast(result)->SetComponents(getter, setter);
6439 void JSObject::DefineElementAccessor(Handle<JSObject> object,
6441 Handle<Object> getter,
6442 Handle<Object> setter,
6443 PropertyAttributes attributes,
6444 v8::AccessControl access_control) {
6445 switch (object->GetElementsKind()) {
6446 case FAST_SMI_ELEMENTS:
6448 case FAST_DOUBLE_ELEMENTS:
6449 case FAST_HOLEY_SMI_ELEMENTS:
6450 case FAST_HOLEY_ELEMENTS:
6451 case FAST_HOLEY_DOUBLE_ELEMENTS:
6454 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6455 case EXTERNAL_##TYPE##_ELEMENTS: \
6456 case TYPE##_ELEMENTS: \
6458 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6459 #undef TYPED_ARRAY_CASE
6460 // Ignore getters and setters on pixel and external array elements.
6463 case DICTIONARY_ELEMENTS:
6464 if (UpdateGetterSetterInDictionary(object->element_dictionary(),
6472 case SLOPPY_ARGUMENTS_ELEMENTS: {
6473 // Ascertain whether we have read-only properties or an existing
6474 // getter/setter pair in an arguments elements dictionary backing
6476 FixedArray* parameter_map = FixedArray::cast(object->elements());
6477 uint32_t length = parameter_map->length();
6479 index < (length - 2) ? parameter_map->get(index + 2) : NULL;
6480 if (probe == NULL || probe->IsTheHole()) {
6481 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
6482 if (arguments->IsDictionary()) {
6483 SeededNumberDictionary* dictionary =
6484 SeededNumberDictionary::cast(arguments);
6485 if (UpdateGetterSetterInDictionary(dictionary,
6498 Isolate* isolate = object->GetIsolate();
6499 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair();
6500 accessors->SetComponents(*getter, *setter);
6501 accessors->set_access_flags(access_control);
6503 SetElementCallback(object, index, accessors, attributes);
6507 Handle<AccessorPair> JSObject::CreateAccessorPairFor(Handle<JSObject> object,
6508 Handle<Name> name) {
6509 Isolate* isolate = object->GetIsolate();
6510 LookupResult result(isolate);
6511 object->LookupOwnRealNamedProperty(name, &result);
6512 if (result.IsPropertyCallbacks()) {
6513 // Note that the result can actually have IsDontDelete() == true when we
6514 // e.g. have to fall back to the slow case while adding a setter after
6515 // successfully reusing a map transition for a getter. Nevertheless, this is
6516 // OK, because the assertion only holds for the whole addition of both
6517 // accessors, not for the addition of each part. See first comment in
6518 // DefinePropertyAccessor below.
6519 Object* obj = result.GetCallbackObject();
6520 if (obj->IsAccessorPair()) {
6521 return AccessorPair::Copy(handle(AccessorPair::cast(obj), isolate));
6524 return isolate->factory()->NewAccessorPair();
6528 void JSObject::DefinePropertyAccessor(Handle<JSObject> object,
6530 Handle<Object> getter,
6531 Handle<Object> setter,
6532 PropertyAttributes attributes,
6533 v8::AccessControl access_control) {
6534 // We could assert that the property is configurable here, but we would need
6535 // to do a lookup, which seems to be a bit of overkill.
6536 bool only_attribute_changes = getter->IsNull() && setter->IsNull();
6537 if (object->HasFastProperties() && !only_attribute_changes &&
6538 access_control == v8::DEFAULT &&
6539 (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors)) {
6540 bool getterOk = getter->IsNull() ||
6541 DefineFastAccessor(object, name, ACCESSOR_GETTER, getter, attributes);
6542 bool setterOk = !getterOk || setter->IsNull() ||
6543 DefineFastAccessor(object, name, ACCESSOR_SETTER, setter, attributes);
6544 if (getterOk && setterOk) return;
6547 Handle<AccessorPair> accessors = CreateAccessorPairFor(object, name);
6548 accessors->SetComponents(*getter, *setter);
6549 accessors->set_access_flags(access_control);
6551 SetPropertyCallback(object, name, accessors, attributes);
6555 bool Map::DictionaryElementsInPrototypeChainOnly() {
6556 Heap* heap = GetHeap();
6558 if (IsDictionaryElementsKind(elements_kind())) {
6562 for (Object* prototype = this->prototype();
6563 prototype != heap->null_value();
6564 prototype = prototype->GetPrototype(GetIsolate())) {
6565 if (prototype->IsJSProxy()) {
6566 // Be conservative, don't walk into proxies.
6570 if (IsDictionaryElementsKind(
6571 JSObject::cast(prototype)->map()->elements_kind())) {
6580 void JSObject::SetElementCallback(Handle<JSObject> object,
6582 Handle<Object> structure,
6583 PropertyAttributes attributes) {
6584 Heap* heap = object->GetHeap();
6585 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6587 // Normalize elements to make this operation simple.
6588 bool had_dictionary_elements = object->HasDictionaryElements();
6589 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
6590 ASSERT(object->HasDictionaryElements() ||
6591 object->HasDictionaryArgumentsElements());
6592 // Update the dictionary with the new CALLBACKS property.
6593 dictionary = SeededNumberDictionary::Set(dictionary, index, structure,
6595 dictionary->set_requires_slow_elements();
6597 // Update the dictionary backing store on the object.
6598 if (object->elements()->map() == heap->sloppy_arguments_elements_map()) {
6599 // Also delete any parameter alias.
6601 // TODO(kmillikin): when deleting the last parameter alias we could
6602 // switch to a direct backing store without the parameter map. This
6603 // would allow GC of the context.
6604 FixedArray* parameter_map = FixedArray::cast(object->elements());
6605 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
6606 parameter_map->set(index + 2, heap->the_hole_value());
6608 parameter_map->set(1, *dictionary);
6610 object->set_elements(*dictionary);
6612 if (!had_dictionary_elements) {
6613 // KeyedStoreICs (at least the non-generic ones) need a reset.
6614 heap->ClearAllICsByKind(Code::KEYED_STORE_IC);
6620 void JSObject::SetPropertyCallback(Handle<JSObject> object,
6622 Handle<Object> structure,
6623 PropertyAttributes attributes) {
6624 // Normalize object to make this operation simple.
6625 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
6627 // For the global object allocate a new map to invalidate the global inline
6628 // caches which have a global property cell reference directly in the code.
6629 if (object->IsGlobalObject()) {
6630 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
6631 ASSERT(new_map->is_dictionary_map());
6632 object->set_map(*new_map);
6634 // When running crankshaft, changing the map is not enough. We
6635 // need to deoptimize all functions that rely on this global
6637 Deoptimizer::DeoptimizeGlobalObject(*object);
6640 // Update the dictionary with the new CALLBACKS property.
6641 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6642 SetNormalizedProperty(object, name, structure, details);
6646 void JSObject::DefineAccessor(Handle<JSObject> object,
6648 Handle<Object> getter,
6649 Handle<Object> setter,
6650 PropertyAttributes attributes,
6651 v8::AccessControl access_control) {
6652 Isolate* isolate = object->GetIsolate();
6653 // Check access rights if needed.
6654 if (object->IsAccessCheckNeeded() &&
6655 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6656 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6657 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
6661 if (object->IsJSGlobalProxy()) {
6662 Handle<Object> proto(object->GetPrototype(), isolate);
6663 if (proto->IsNull()) return;
6664 ASSERT(proto->IsJSGlobalObject());
6665 DefineAccessor(Handle<JSObject>::cast(proto),
6674 // Make sure that the top context does not change when doing callbacks or
6675 // interceptor calls.
6676 AssertNoContextChange ncc(isolate);
6678 // Try to flatten before operating on the string.
6679 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
6682 bool is_element = name->AsArrayIndex(&index);
6684 Handle<Object> old_value = isolate->factory()->the_hole_value();
6685 bool is_observed = object->map()->is_observed() &&
6686 *name != isolate->heap()->hidden_string();
6687 bool preexists = false;
6690 preexists = HasOwnElement(object, index);
6691 if (preexists && GetOwnElementAccessorPair(object, index).is_null()) {
6693 Object::GetElement(isolate, object, index).ToHandleChecked();
6696 LookupResult lookup(isolate);
6697 object->LookupOwn(name, &lookup, true);
6698 preexists = lookup.IsProperty();
6699 if (preexists && lookup.IsDataProperty()) {
6701 Object::GetPropertyOrElement(object, name).ToHandleChecked();
6707 DefineElementAccessor(
6708 object, index, getter, setter, attributes, access_control);
6710 DefinePropertyAccessor(
6711 object, name, getter, setter, attributes, access_control);
6715 const char* type = preexists ? "reconfigure" : "add";
6716 EnqueueChangeRecord(object, type, name, old_value);
6721 static bool TryAccessorTransition(Handle<JSObject> self,
6722 Handle<Map> transitioned_map,
6723 int target_descriptor,
6724 AccessorComponent component,
6725 Handle<Object> accessor,
6726 PropertyAttributes attributes) {
6727 DescriptorArray* descs = transitioned_map->instance_descriptors();
6728 PropertyDetails details = descs->GetDetails(target_descriptor);
6730 // If the transition target was not callbacks, fall back to the slow case.
6731 if (details.type() != CALLBACKS) return false;
6732 Object* descriptor = descs->GetCallbacksObject(target_descriptor);
6733 if (!descriptor->IsAccessorPair()) return false;
6735 Object* target_accessor = AccessorPair::cast(descriptor)->get(component);
6736 PropertyAttributes target_attributes = details.attributes();
6738 // Reuse transition if adding same accessor with same attributes.
6739 if (target_accessor == *accessor && target_attributes == attributes) {
6740 JSObject::MigrateToMap(self, transitioned_map);
6744 // If either not the same accessor, or not the same attributes, fall back to
6750 bool JSObject::DefineFastAccessor(Handle<JSObject> object,
6752 AccessorComponent component,
6753 Handle<Object> accessor,
6754 PropertyAttributes attributes) {
6755 ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined());
6756 Isolate* isolate = object->GetIsolate();
6757 LookupResult result(isolate);
6758 object->LookupOwn(name, &result);
6760 if (result.IsFound() && !result.IsPropertyCallbacks()) {
6764 // Return success if the same accessor with the same attributes already exist.
6765 AccessorPair* source_accessors = NULL;
6766 if (result.IsPropertyCallbacks()) {
6767 Object* callback_value = result.GetCallbackObject();
6768 if (callback_value->IsAccessorPair()) {
6769 source_accessors = AccessorPair::cast(callback_value);
6770 Object* entry = source_accessors->get(component);
6771 if (entry == *accessor && result.GetAttributes() == attributes) {
6778 int descriptor_number = result.GetDescriptorIndex();
6780 object->map()->LookupTransition(*object, *name, &result);
6782 if (result.IsFound()) {
6783 Handle<Map> target(result.GetTransitionTarget());
6784 ASSERT(target->NumberOfOwnDescriptors() ==
6785 object->map()->NumberOfOwnDescriptors());
6786 // This works since descriptors are sorted in order of addition.
6787 ASSERT(object->map()->instance_descriptors()->
6788 GetKey(descriptor_number) == *name);
6789 return TryAccessorTransition(object, target, descriptor_number,
6790 component, accessor, attributes);
6793 // If not, lookup a transition.
6794 object->map()->LookupTransition(*object, *name, &result);
6796 // If there is a transition, try to follow it.
6797 if (result.IsFound()) {
6798 Handle<Map> target(result.GetTransitionTarget());
6799 int descriptor_number = target->LastAdded();
6800 ASSERT(Name::Equals(name,
6801 handle(target->instance_descriptors()->GetKey(descriptor_number))));
6802 return TryAccessorTransition(object, target, descriptor_number,
6803 component, accessor, attributes);
6807 // If there is no transition yet, add a transition to the a new accessor pair
6808 // containing the accessor. Allocate a new pair if there were no source
6809 // accessors. Otherwise, copy the pair and modify the accessor.
6810 Handle<AccessorPair> accessors = source_accessors != NULL
6811 ? AccessorPair::Copy(Handle<AccessorPair>(source_accessors))
6812 : isolate->factory()->NewAccessorPair();
6813 accessors->set(component, *accessor);
6815 CallbacksDescriptor new_accessors_desc(name, accessors, attributes);
6816 Handle<Map> new_map = Map::CopyInsertDescriptor(
6817 handle(object->map()), &new_accessors_desc, INSERT_TRANSITION);
6819 JSObject::MigrateToMap(object, new_map);
6824 MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object,
6825 Handle<AccessorInfo> info) {
6826 Isolate* isolate = object->GetIsolate();
6827 Factory* factory = isolate->factory();
6828 Handle<Name> name(Name::cast(info->name()));
6830 // Check access rights if needed.
6831 if (object->IsAccessCheckNeeded() &&
6832 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6833 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6834 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6835 return factory->undefined_value();
6838 if (object->IsJSGlobalProxy()) {
6839 Handle<Object> proto(object->GetPrototype(), isolate);
6840 if (proto->IsNull()) return object;
6841 ASSERT(proto->IsJSGlobalObject());
6842 return SetAccessor(Handle<JSObject>::cast(proto), info);
6845 // Make sure that the top context does not change when doing callbacks or
6846 // interceptor calls.
6847 AssertNoContextChange ncc(isolate);
6849 // Try to flatten before operating on the string.
6850 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
6853 bool is_element = name->AsArrayIndex(&index);
6856 if (object->IsJSArray()) return factory->undefined_value();
6858 // Accessors overwrite previous callbacks (cf. with getters/setters).
6859 switch (object->GetElementsKind()) {
6860 case FAST_SMI_ELEMENTS:
6862 case FAST_DOUBLE_ELEMENTS:
6863 case FAST_HOLEY_SMI_ELEMENTS:
6864 case FAST_HOLEY_ELEMENTS:
6865 case FAST_HOLEY_DOUBLE_ELEMENTS:
6868 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6869 case EXTERNAL_##TYPE##_ELEMENTS: \
6870 case TYPE##_ELEMENTS: \
6872 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6873 #undef TYPED_ARRAY_CASE
6874 // Ignore getters and setters on pixel and external array
6876 return factory->undefined_value();
6878 case DICTIONARY_ELEMENTS:
6880 case SLOPPY_ARGUMENTS_ELEMENTS:
6885 SetElementCallback(object, index, info, info->property_attributes());
6888 LookupResult result(isolate);
6889 object->LookupOwn(name, &result, true);
6890 // ES5 forbids turning a property into an accessor if it's not
6891 // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5).
6892 if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) {
6893 return factory->undefined_value();
6896 SetPropertyCallback(object, name, info, info->property_attributes());
6903 MaybeHandle<Object> JSObject::GetAccessor(Handle<JSObject> object,
6905 AccessorComponent component) {
6906 Isolate* isolate = object->GetIsolate();
6908 // Make sure that the top context does not change when doing callbacks or
6909 // interceptor calls.
6910 AssertNoContextChange ncc(isolate);
6912 // Check access rights if needed.
6913 if (object->IsAccessCheckNeeded() &&
6914 !isolate->MayNamedAccess(object, name, v8::ACCESS_HAS)) {
6915 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
6916 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6917 return isolate->factory()->undefined_value();
6920 // Make the lookup and include prototypes.
6922 if (name->AsArrayIndex(&index)) {
6923 for (Handle<Object> obj = object;
6925 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) {
6926 if (obj->IsJSObject() && JSObject::cast(*obj)->HasDictionaryElements()) {
6927 JSObject* js_object = JSObject::cast(*obj);
6928 SeededNumberDictionary* dictionary = js_object->element_dictionary();
6929 int entry = dictionary->FindEntry(index);
6930 if (entry != SeededNumberDictionary::kNotFound) {
6931 Object* element = dictionary->ValueAt(entry);
6932 if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
6933 element->IsAccessorPair()) {
6934 return handle(AccessorPair::cast(element)->GetComponent(component),
6941 for (Handle<Object> obj = object;
6943 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) {
6944 LookupResult result(isolate);
6945 JSReceiver::cast(*obj)->LookupOwn(name, &result);
6946 if (result.IsFound()) {
6947 if (result.IsReadOnly()) return isolate->factory()->undefined_value();
6948 if (result.IsPropertyCallbacks()) {
6949 Object* obj = result.GetCallbackObject();
6950 if (obj->IsAccessorPair()) {
6951 return handle(AccessorPair::cast(obj)->GetComponent(component),
6958 return isolate->factory()->undefined_value();
6962 Object* JSObject::SlowReverseLookup(Object* value) {
6963 if (HasFastProperties()) {
6964 int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
6965 DescriptorArray* descs = map()->instance_descriptors();
6966 for (int i = 0; i < number_of_own_descriptors; i++) {
6967 if (descs->GetType(i) == FIELD) {
6969 RawFastPropertyAt(FieldIndex::ForDescriptor(map(), i));
6970 if (descs->GetDetails(i).representation().IsDouble()) {
6971 ASSERT(property->IsHeapNumber());
6972 if (value->IsNumber() && property->Number() == value->Number()) {
6973 return descs->GetKey(i);
6975 } else if (property == value) {
6976 return descs->GetKey(i);
6978 } else if (descs->GetType(i) == CONSTANT) {
6979 if (descs->GetConstant(i) == value) {
6980 return descs->GetKey(i);
6984 return GetHeap()->undefined_value();
6986 return property_dictionary()->SlowReverseLookup(value);
6991 Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) {
6992 Handle<Map> result = map->GetIsolate()->factory()->NewMap(
6993 map->instance_type(), instance_size);
6994 result->set_prototype(map->prototype());
6995 result->set_constructor(map->constructor());
6996 result->set_bit_field(map->bit_field());
6997 result->set_bit_field2(map->bit_field2());
6998 int new_bit_field3 = map->bit_field3();
6999 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
7000 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
7001 new_bit_field3 = EnumLengthBits::update(new_bit_field3,
7002 kInvalidEnumCacheSentinel);
7003 new_bit_field3 = Deprecated::update(new_bit_field3, false);
7004 if (!map->is_dictionary_map()) {
7005 new_bit_field3 = IsUnstable::update(new_bit_field3, false);
7007 new_bit_field3 = ConstructionCount::update(new_bit_field3,
7008 JSFunction::kNoSlackTracking);
7009 result->set_bit_field3(new_bit_field3);
7014 Handle<Map> Map::Normalize(Handle<Map> fast_map,
7015 PropertyNormalizationMode mode) {
7016 ASSERT(!fast_map->is_dictionary_map());
7018 Isolate* isolate = fast_map->GetIsolate();
7019 Handle<NormalizedMapCache> cache(
7020 isolate->context()->native_context()->normalized_map_cache());
7022 Handle<Map> new_map;
7023 if (cache->Get(fast_map, mode).ToHandle(&new_map)) {
7025 if (FLAG_verify_heap) {
7026 new_map->SharedMapVerify();
7029 #ifdef ENABLE_SLOW_ASSERTS
7030 if (FLAG_enable_slow_asserts) {
7031 // The cached map should match newly created normalized map bit-by-bit,
7032 // except for the code cache, which can contain some ics which can be
7033 // applied to the shared map.
7034 Handle<Map> fresh = Map::CopyNormalized(
7035 fast_map, mode, SHARED_NORMALIZED_MAP);
7037 ASSERT(memcmp(fresh->address(),
7039 Map::kCodeCacheOffset) == 0);
7040 STATIC_ASSERT(Map::kDependentCodeOffset ==
7041 Map::kCodeCacheOffset + kPointerSize);
7042 int offset = Map::kDependentCodeOffset + kPointerSize;
7043 ASSERT(memcmp(fresh->address() + offset,
7044 new_map->address() + offset,
7045 Map::kSize - offset) == 0);
7049 new_map = Map::CopyNormalized(fast_map, mode, SHARED_NORMALIZED_MAP);
7050 cache->Set(fast_map, new_map);
7051 isolate->counters()->normalized_maps()->Increment();
7053 fast_map->NotifyLeafMapLayoutChange();
7058 Handle<Map> Map::CopyNormalized(Handle<Map> map,
7059 PropertyNormalizationMode mode,
7060 NormalizedMapSharingMode sharing) {
7061 int new_instance_size = map->instance_size();
7062 if (mode == CLEAR_INOBJECT_PROPERTIES) {
7063 new_instance_size -= map->inobject_properties() * kPointerSize;
7066 Handle<Map> result = RawCopy(map, new_instance_size);
7068 if (mode != CLEAR_INOBJECT_PROPERTIES) {
7069 result->set_inobject_properties(map->inobject_properties());
7072 result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
7073 result->set_dictionary_map(true);
7074 result->set_migration_target(false);
7077 if (FLAG_verify_heap && result->is_shared()) {
7078 result->SharedMapVerify();
7086 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) {
7087 Handle<Map> result = RawCopy(map, map->instance_size());
7089 // Please note instance_type and instance_size are set when allocated.
7090 result->set_inobject_properties(map->inobject_properties());
7091 result->set_unused_property_fields(map->unused_property_fields());
7093 result->set_pre_allocated_property_fields(
7094 map->pre_allocated_property_fields());
7095 result->set_is_shared(false);
7096 result->ClearCodeCache(map->GetHeap());
7097 map->NotifyLeafMapLayoutChange();
7102 Handle<Map> Map::ShareDescriptor(Handle<Map> map,
7103 Handle<DescriptorArray> descriptors,
7104 Descriptor* descriptor) {
7105 // Sanity check. This path is only to be taken if the map owns its descriptor
7106 // array, implying that its NumberOfOwnDescriptors equals the number of
7107 // descriptors in the descriptor array.
7108 ASSERT(map->NumberOfOwnDescriptors() ==
7109 map->instance_descriptors()->number_of_descriptors());
7111 Handle<Map> result = CopyDropDescriptors(map);
7112 Handle<Name> name = descriptor->GetKey();
7113 Handle<TransitionArray> transitions =
7114 TransitionArray::CopyInsert(map, name, result, SIMPLE_TRANSITION);
7116 // Ensure there's space for the new descriptor in the shared descriptor array.
7117 if (descriptors->NumberOfSlackDescriptors() == 0) {
7118 int old_size = descriptors->number_of_descriptors();
7119 if (old_size == 0) {
7120 descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1);
7122 EnsureDescriptorSlack(map, old_size < 4 ? 1 : old_size / 2);
7123 descriptors = handle(map->instance_descriptors());
7127 // Commit the state atomically.
7128 DisallowHeapAllocation no_gc;
7130 descriptors->Append(descriptor);
7131 result->SetBackPointer(*map);
7132 result->InitializeDescriptors(*descriptors);
7134 ASSERT(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1);
7136 map->set_transitions(*transitions);
7137 map->set_owns_descriptors(false);
7143 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map,
7144 Handle<DescriptorArray> descriptors,
7145 TransitionFlag flag,
7146 MaybeHandle<Name> maybe_name,
7147 SimpleTransitionFlag simple_flag) {
7148 ASSERT(descriptors->IsSortedNoDuplicates());
7150 Handle<Map> result = CopyDropDescriptors(map);
7151 result->InitializeDescriptors(*descriptors);
7153 if (flag == INSERT_TRANSITION && map->CanHaveMoreTransitions()) {
7155 CHECK(maybe_name.ToHandle(&name));
7156 Handle<TransitionArray> transitions = TransitionArray::CopyInsert(
7157 map, name, result, simple_flag);
7158 map->set_transitions(*transitions);
7159 result->SetBackPointer(*map);
7161 int length = descriptors->number_of_descriptors();
7162 for (int i = 0; i < length; i++) {
7163 descriptors->SetRepresentation(i, Representation::Tagged());
7164 if (descriptors->GetDetails(i).type() == FIELD) {
7165 descriptors->SetValue(i, HeapType::Any());
7174 // Since this method is used to rewrite an existing transition tree, it can
7175 // always insert transitions without checking.
7176 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map,
7178 Handle<DescriptorArray> descriptors) {
7179 ASSERT(descriptors->IsSortedNoDuplicates());
7181 Handle<Map> result = CopyDropDescriptors(map);
7183 result->InitializeDescriptors(*descriptors);
7184 result->SetNumberOfOwnDescriptors(new_descriptor + 1);
7186 int unused_property_fields = map->unused_property_fields();
7187 if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
7188 unused_property_fields = map->unused_property_fields() - 1;
7189 if (unused_property_fields < 0) {
7190 unused_property_fields += JSObject::kFieldsAdded;
7194 result->set_unused_property_fields(unused_property_fields);
7195 result->set_owns_descriptors(false);
7197 Handle<Name> name = handle(descriptors->GetKey(new_descriptor));
7198 Handle<TransitionArray> transitions = TransitionArray::CopyInsert(
7199 map, name, result, SIMPLE_TRANSITION);
7201 map->set_transitions(*transitions);
7202 result->SetBackPointer(*map);
7208 Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind,
7209 TransitionFlag flag) {
7210 if (flag == INSERT_TRANSITION) {
7211 ASSERT(!map->HasElementsTransition() ||
7212 ((map->elements_transition_map()->elements_kind() ==
7213 DICTIONARY_ELEMENTS ||
7214 IsExternalArrayElementsKind(
7215 map->elements_transition_map()->elements_kind())) &&
7216 (kind == DICTIONARY_ELEMENTS ||
7217 IsExternalArrayElementsKind(kind))));
7218 ASSERT(!IsFastElementsKind(kind) ||
7219 IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
7220 ASSERT(kind != map->elements_kind());
7223 bool insert_transition =
7224 flag == INSERT_TRANSITION && !map->HasElementsTransition();
7226 if (insert_transition && map->owns_descriptors()) {
7227 // In case the map owned its own descriptors, share the descriptors and
7228 // transfer ownership to the new map.
7229 Handle<Map> new_map = CopyDropDescriptors(map);
7231 SetElementsTransitionMap(map, new_map);
7233 new_map->set_elements_kind(kind);
7234 new_map->InitializeDescriptors(map->instance_descriptors());
7235 new_map->SetBackPointer(*map);
7236 map->set_owns_descriptors(false);
7240 // In case the map did not own its own descriptors, a split is forced by
7241 // copying the map; creating a new descriptor array cell.
7242 // Create a new free-floating map only if we are not allowed to store it.
7243 Handle<Map> new_map = Copy(map);
7245 new_map->set_elements_kind(kind);
7247 if (insert_transition) {
7248 SetElementsTransitionMap(map, new_map);
7249 new_map->SetBackPointer(*map);
7256 Handle<Map> Map::CopyForObserved(Handle<Map> map) {
7257 ASSERT(!map->is_observed());
7259 Isolate* isolate = map->GetIsolate();
7261 // In case the map owned its own descriptors, share the descriptors and
7262 // transfer ownership to the new map.
7263 Handle<Map> new_map;
7264 if (map->owns_descriptors()) {
7265 new_map = CopyDropDescriptors(map);
7267 new_map = Copy(map);
7270 Handle<TransitionArray> transitions = TransitionArray::CopyInsert(
7271 map, isolate->factory()->observed_symbol(), new_map, FULL_TRANSITION);
7273 map->set_transitions(*transitions);
7275 new_map->set_is_observed();
7277 if (map->owns_descriptors()) {
7278 new_map->InitializeDescriptors(map->instance_descriptors());
7279 map->set_owns_descriptors(false);
7282 new_map->SetBackPointer(*map);
7287 Handle<Map> Map::Copy(Handle<Map> map) {
7288 Handle<DescriptorArray> descriptors(map->instance_descriptors());
7289 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
7290 Handle<DescriptorArray> new_descriptors =
7291 DescriptorArray::CopyUpTo(descriptors, number_of_own_descriptors);
7292 return CopyReplaceDescriptors(
7293 map, new_descriptors, OMIT_TRANSITION, MaybeHandle<Name>());
7297 Handle<Map> Map::Create(Handle<JSFunction> constructor,
7298 int extra_inobject_properties) {
7299 Handle<Map> copy = Copy(handle(constructor->initial_map()));
7301 // Check that we do not overflow the instance size when adding the
7302 // extra inobject properties.
7303 int instance_size_delta = extra_inobject_properties * kPointerSize;
7304 int max_instance_size_delta =
7305 JSObject::kMaxInstanceSize - copy->instance_size();
7306 int max_extra_properties = max_instance_size_delta >> kPointerSizeLog2;
7308 // If the instance size overflows, we allocate as many properties as we can as
7309 // inobject properties.
7310 if (extra_inobject_properties > max_extra_properties) {
7311 instance_size_delta = max_instance_size_delta;
7312 extra_inobject_properties = max_extra_properties;
7315 // Adjust the map with the extra inobject properties.
7316 int inobject_properties =
7317 copy->inobject_properties() + extra_inobject_properties;
7318 copy->set_inobject_properties(inobject_properties);
7319 copy->set_unused_property_fields(inobject_properties);
7320 copy->set_instance_size(copy->instance_size() + instance_size_delta);
7321 copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy));
7326 Handle<Map> Map::CopyForFreeze(Handle<Map> map) {
7327 int num_descriptors = map->NumberOfOwnDescriptors();
7328 Isolate* isolate = map->GetIsolate();
7329 Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
7330 handle(map->instance_descriptors(), isolate), num_descriptors, FROZEN);
7331 Handle<Map> new_map = CopyReplaceDescriptors(
7332 map, new_desc, INSERT_TRANSITION, isolate->factory()->frozen_symbol());
7334 new_map->set_is_extensible(false);
7335 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
7340 Handle<Map> Map::CopyAddDescriptor(Handle<Map> map,
7341 Descriptor* descriptor,
7342 TransitionFlag flag) {
7343 Handle<DescriptorArray> descriptors(map->instance_descriptors());
7345 // Ensure the key is unique.
7346 descriptor->KeyToUniqueName();
7348 if (flag == INSERT_TRANSITION &&
7349 map->owns_descriptors() &&
7350 map->CanHaveMoreTransitions()) {
7351 return ShareDescriptor(map, descriptors, descriptor);
7354 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
7355 descriptors, map->NumberOfOwnDescriptors(), 1);
7356 new_descriptors->Append(descriptor);
7358 return CopyReplaceDescriptors(
7359 map, new_descriptors, flag, descriptor->GetKey(), SIMPLE_TRANSITION);
7363 Handle<Map> Map::CopyInsertDescriptor(Handle<Map> map,
7364 Descriptor* descriptor,
7365 TransitionFlag flag) {
7366 Handle<DescriptorArray> old_descriptors(map->instance_descriptors());
7368 // Ensure the key is unique.
7369 descriptor->KeyToUniqueName();
7371 // We replace the key if it is already present.
7372 int index = old_descriptors->SearchWithCache(*descriptor->GetKey(), *map);
7373 if (index != DescriptorArray::kNotFound) {
7374 return CopyReplaceDescriptor(map, old_descriptors, descriptor, index, flag);
7376 return CopyAddDescriptor(map, descriptor, flag);
7380 Handle<DescriptorArray> DescriptorArray::CopyUpTo(
7381 Handle<DescriptorArray> desc,
7382 int enumeration_index,
7384 return DescriptorArray::CopyUpToAddAttributes(
7385 desc, enumeration_index, NONE, slack);
7389 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
7390 Handle<DescriptorArray> desc,
7391 int enumeration_index,
7392 PropertyAttributes attributes,
7394 if (enumeration_index + slack == 0) {
7395 return desc->GetIsolate()->factory()->empty_descriptor_array();
7398 int size = enumeration_index;
7400 Handle<DescriptorArray> descriptors =
7401 DescriptorArray::Allocate(desc->GetIsolate(), size, slack);
7402 DescriptorArray::WhitenessWitness witness(*descriptors);
7404 if (attributes != NONE) {
7405 for (int i = 0; i < size; ++i) {
7406 Object* value = desc->GetValue(i);
7407 PropertyDetails details = desc->GetDetails(i);
7408 int mask = DONT_DELETE | DONT_ENUM;
7409 // READ_ONLY is an invalid attribute for JS setters/getters.
7410 if (details.type() != CALLBACKS || !value->IsAccessorPair()) {
7413 details = details.CopyAddAttributes(
7414 static_cast<PropertyAttributes>(attributes & mask));
7415 Descriptor inner_desc(handle(desc->GetKey(i)),
7416 handle(value, desc->GetIsolate()),
7418 descriptors->Set(i, &inner_desc, witness);
7421 for (int i = 0; i < size; ++i) {
7422 descriptors->CopyFrom(i, *desc, witness);
7426 if (desc->number_of_descriptors() != enumeration_index) descriptors->Sort();
7432 Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map,
7433 Handle<DescriptorArray> descriptors,
7434 Descriptor* descriptor,
7435 int insertion_index,
7436 TransitionFlag flag) {
7437 // Ensure the key is unique.
7438 descriptor->KeyToUniqueName();
7440 Handle<Name> key = descriptor->GetKey();
7441 ASSERT(*key == descriptors->GetKey(insertion_index));
7443 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
7444 descriptors, map->NumberOfOwnDescriptors());
7446 new_descriptors->Replace(insertion_index, descriptor);
7448 SimpleTransitionFlag simple_flag =
7449 (insertion_index == descriptors->number_of_descriptors() - 1)
7452 return CopyReplaceDescriptors(map, new_descriptors, flag, key, simple_flag);
7456 void Map::UpdateCodeCache(Handle<Map> map,
7458 Handle<Code> code) {
7459 Isolate* isolate = map->GetIsolate();
7460 HandleScope scope(isolate);
7461 // Allocate the code cache if not present.
7462 if (map->code_cache()->IsFixedArray()) {
7463 Handle<Object> result = isolate->factory()->NewCodeCache();
7464 map->set_code_cache(*result);
7467 // Update the code cache.
7468 Handle<CodeCache> code_cache(CodeCache::cast(map->code_cache()), isolate);
7469 CodeCache::Update(code_cache, name, code);
7473 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) {
7474 // Do a lookup if a code cache exists.
7475 if (!code_cache()->IsFixedArray()) {
7476 return CodeCache::cast(code_cache())->Lookup(name, flags);
7478 return GetHeap()->undefined_value();
7483 int Map::IndexInCodeCache(Object* name, Code* code) {
7484 // Get the internal index if a code cache exists.
7485 if (!code_cache()->IsFixedArray()) {
7486 return CodeCache::cast(code_cache())->GetIndex(name, code);
7492 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) {
7493 // No GC is supposed to happen between a call to IndexInCodeCache and
7494 // RemoveFromCodeCache so the code cache must be there.
7495 ASSERT(!code_cache()->IsFixedArray());
7496 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
7500 // An iterator over all map transitions in an descriptor array, reusing the
7501 // constructor field of the map while it is running. Negative values in
7502 // the constructor field indicate an active map transition iteration. The
7503 // original constructor is restored after iterating over all entries.
7504 class IntrusiveMapTransitionIterator {
7506 IntrusiveMapTransitionIterator(
7507 Map* map, TransitionArray* transition_array, Object* constructor)
7509 transition_array_(transition_array),
7510 constructor_(constructor) { }
7512 void StartIfNotStarted() {
7513 ASSERT(!(*IteratorField())->IsSmi() || IsIterating());
7514 if (!(*IteratorField())->IsSmi()) {
7515 ASSERT(*IteratorField() == constructor_);
7516 *IteratorField() = Smi::FromInt(-1);
7520 bool IsIterating() {
7521 return (*IteratorField())->IsSmi() &&
7522 Smi::cast(*IteratorField())->value() < 0;
7526 ASSERT(IsIterating());
7527 int value = Smi::cast(*IteratorField())->value();
7528 int index = -value - 1;
7529 int number_of_transitions = transition_array_->number_of_transitions();
7530 while (index < number_of_transitions) {
7531 *IteratorField() = Smi::FromInt(value - 1);
7532 return transition_array_->GetTarget(index);
7535 *IteratorField() = constructor_;
7540 Object** IteratorField() {
7541 return HeapObject::RawField(map_, Map::kConstructorOffset);
7545 TransitionArray* transition_array_;
7546 Object* constructor_;
7550 // An iterator over all prototype transitions, reusing the constructor field
7551 // of the map while it is running. Positive values in the constructor field
7552 // indicate an active prototype transition iteration. The original constructor
7553 // is restored after iterating over all entries.
7554 class IntrusivePrototypeTransitionIterator {
7556 IntrusivePrototypeTransitionIterator(
7557 Map* map, HeapObject* proto_trans, Object* constructor)
7558 : map_(map), proto_trans_(proto_trans), constructor_(constructor) { }
7560 void StartIfNotStarted() {
7561 if (!(*IteratorField())->IsSmi()) {
7562 ASSERT(*IteratorField() == constructor_);
7563 *IteratorField() = Smi::FromInt(0);
7567 bool IsIterating() {
7568 return (*IteratorField())->IsSmi() &&
7569 Smi::cast(*IteratorField())->value() >= 0;
7573 ASSERT(IsIterating());
7574 int transitionNumber = Smi::cast(*IteratorField())->value();
7575 if (transitionNumber < NumberOfTransitions()) {
7576 *IteratorField() = Smi::FromInt(transitionNumber + 1);
7577 return GetTransition(transitionNumber);
7579 *IteratorField() = constructor_;
7584 Object** IteratorField() {
7585 return HeapObject::RawField(map_, Map::kConstructorOffset);
7588 int NumberOfTransitions() {
7589 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7590 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
7591 return Smi::cast(num)->value();
7594 Map* GetTransition(int transitionNumber) {
7595 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7596 return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
7599 int IndexFor(int transitionNumber) {
7600 return Map::kProtoTransitionHeaderSize +
7601 Map::kProtoTransitionMapOffset +
7602 transitionNumber * Map::kProtoTransitionElementsPerEntry;
7606 HeapObject* proto_trans_;
7607 Object* constructor_;
7611 // To traverse the transition tree iteratively, we have to store two kinds of
7612 // information in a map: The parent map in the traversal and which children of a
7613 // node have already been visited. To do this without additional memory, we
7614 // temporarily reuse two fields with known values:
7616 // (1) The map of the map temporarily holds the parent, and is restored to the
7617 // meta map afterwards.
7619 // (2) The info which children have already been visited depends on which part
7620 // of the map we currently iterate. We use the constructor field of the
7621 // map to store the current index. We can do that because the constructor
7622 // is the same for all involved maps.
7624 // (a) If we currently follow normal map transitions, we temporarily store
7625 // the current index in the constructor field, and restore it to the
7626 // original constructor afterwards. Note that a single descriptor can
7627 // have 0, 1, or 2 transitions.
7629 // (b) If we currently follow prototype transitions, we temporarily store
7630 // the current index in the constructor field, and restore it to the
7631 // original constructor afterwards.
7633 // Note that the child iterator is just a concatenation of two iterators: One
7634 // iterating over map transitions and one iterating over prototype transisitons.
7635 class TraversableMap : public Map {
7637 // Record the parent in the traversal within this map. Note that this destroys
7639 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); }
7641 // Reset the current map's map, returning the parent previously stored in it.
7642 TraversableMap* GetAndResetParent() {
7643 TraversableMap* old_parent = static_cast<TraversableMap*>(map());
7644 set_map_no_write_barrier(GetHeap()->meta_map());
7648 // If we have an unvisited child map, return that one and advance. If we have
7649 // none, return NULL and restore the overwritten constructor field.
7650 TraversableMap* ChildIteratorNext(Object* constructor) {
7651 if (!HasTransitionArray()) return NULL;
7653 TransitionArray* transition_array = transitions();
7654 if (transition_array->HasPrototypeTransitions()) {
7655 HeapObject* proto_transitions =
7656 transition_array->GetPrototypeTransitions();
7657 IntrusivePrototypeTransitionIterator proto_iterator(this,
7660 proto_iterator.StartIfNotStarted();
7661 if (proto_iterator.IsIterating()) {
7662 Map* next = proto_iterator.Next();
7663 if (next != NULL) return static_cast<TraversableMap*>(next);
7667 IntrusiveMapTransitionIterator transition_iterator(this,
7670 transition_iterator.StartIfNotStarted();
7671 if (transition_iterator.IsIterating()) {
7672 Map* next = transition_iterator.Next();
7673 if (next != NULL) return static_cast<TraversableMap*>(next);
7681 // Traverse the transition tree in postorder without using the C++ stack by
7682 // doing pointer reversal.
7683 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
7684 // Make sure that we do not allocate in the callback.
7685 DisallowHeapAllocation no_allocation;
7687 TraversableMap* current = static_cast<TraversableMap*>(this);
7688 // Get the root constructor here to restore it later when finished iterating
7690 Object* root_constructor = constructor();
7692 TraversableMap* child = current->ChildIteratorNext(root_constructor);
7693 if (child != NULL) {
7694 child->SetParent(current);
7697 TraversableMap* parent = current->GetAndResetParent();
7698 callback(current, data);
7699 if (current == this) break;
7706 void CodeCache::Update(
7707 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7708 // The number of monomorphic stubs for normal load/store/call IC's can grow to
7709 // a large number and therefore they need to go into a hash table. They are
7710 // used to load global properties from cells.
7711 if (code->type() == Code::NORMAL) {
7712 // Make sure that a hash table is allocated for the normal load code cache.
7713 if (code_cache->normal_type_cache()->IsUndefined()) {
7714 Handle<Object> result =
7715 CodeCacheHashTable::New(code_cache->GetIsolate(),
7716 CodeCacheHashTable::kInitialSize);
7717 code_cache->set_normal_type_cache(*result);
7719 UpdateNormalTypeCache(code_cache, name, code);
7721 ASSERT(code_cache->default_cache()->IsFixedArray());
7722 UpdateDefaultCache(code_cache, name, code);
7727 void CodeCache::UpdateDefaultCache(
7728 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7729 // When updating the default code cache we disregard the type encoded in the
7730 // flags. This allows call constant stubs to overwrite call field
7732 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags());
7734 // First check whether we can update existing code cache without
7736 Handle<FixedArray> cache = handle(code_cache->default_cache());
7737 int length = cache->length();
7739 DisallowHeapAllocation no_alloc;
7740 int deleted_index = -1;
7741 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7742 Object* key = cache->get(i);
7743 if (key->IsNull()) {
7744 if (deleted_index < 0) deleted_index = i;
7747 if (key->IsUndefined()) {
7748 if (deleted_index >= 0) i = deleted_index;
7749 cache->set(i + kCodeCacheEntryNameOffset, *name);
7750 cache->set(i + kCodeCacheEntryCodeOffset, *code);
7753 if (name->Equals(Name::cast(key))) {
7755 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags();
7756 if (Code::RemoveTypeFromFlags(found) == flags) {
7757 cache->set(i + kCodeCacheEntryCodeOffset, *code);
7763 // Reached the end of the code cache. If there were deleted
7764 // elements, reuse the space for the first of them.
7765 if (deleted_index >= 0) {
7766 cache->set(deleted_index + kCodeCacheEntryNameOffset, *name);
7767 cache->set(deleted_index + kCodeCacheEntryCodeOffset, *code);
7772 // Extend the code cache with some new entries (at least one). Must be a
7773 // multiple of the entry size.
7774 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize;
7775 new_length = new_length - new_length % kCodeCacheEntrySize;
7776 ASSERT((new_length % kCodeCacheEntrySize) == 0);
7777 cache = FixedArray::CopySize(cache, new_length);
7779 // Add the (name, code) pair to the new cache.
7780 cache->set(length + kCodeCacheEntryNameOffset, *name);
7781 cache->set(length + kCodeCacheEntryCodeOffset, *code);
7782 code_cache->set_default_cache(*cache);
7786 void CodeCache::UpdateNormalTypeCache(
7787 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7788 // Adding a new entry can cause a new cache to be allocated.
7789 Handle<CodeCacheHashTable> cache(
7790 CodeCacheHashTable::cast(code_cache->normal_type_cache()));
7791 Handle<Object> new_cache = CodeCacheHashTable::Put(cache, name, code);
7792 code_cache->set_normal_type_cache(*new_cache);
7796 Object* CodeCache::Lookup(Name* name, Code::Flags flags) {
7797 Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags));
7798 if (result->IsCode()) {
7799 if (Code::cast(result)->flags() == flags) return result;
7800 return GetHeap()->undefined_value();
7802 return LookupNormalTypeCache(name, flags);
7806 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) {
7807 FixedArray* cache = default_cache();
7808 int length = cache->length();
7809 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7810 Object* key = cache->get(i + kCodeCacheEntryNameOffset);
7811 // Skip deleted elements.
7812 if (key->IsNull()) continue;
7813 if (key->IsUndefined()) return key;
7814 if (name->Equals(Name::cast(key))) {
7815 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset));
7816 if (Code::RemoveTypeFromFlags(code->flags()) == flags) {
7821 return GetHeap()->undefined_value();
7825 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) {
7826 if (!normal_type_cache()->IsUndefined()) {
7827 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7828 return cache->Lookup(name, flags);
7830 return GetHeap()->undefined_value();
7835 int CodeCache::GetIndex(Object* name, Code* code) {
7836 if (code->type() == Code::NORMAL) {
7837 if (normal_type_cache()->IsUndefined()) return -1;
7838 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7839 return cache->GetIndex(Name::cast(name), code->flags());
7842 FixedArray* array = default_cache();
7843 int len = array->length();
7844 for (int i = 0; i < len; i += kCodeCacheEntrySize) {
7845 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1;
7851 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
7852 if (code->type() == Code::NORMAL) {
7853 ASSERT(!normal_type_cache()->IsUndefined());
7854 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7855 ASSERT(cache->GetIndex(Name::cast(name), code->flags()) == index);
7856 cache->RemoveByIndex(index);
7858 FixedArray* array = default_cache();
7859 ASSERT(array->length() >= index && array->get(index)->IsCode());
7860 // Use null instead of undefined for deleted elements to distinguish
7861 // deleted elements from unused elements. This distinction is used
7862 // when looking up in the cache and when updating the cache.
7863 ASSERT_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset);
7864 array->set_null(index - 1); // Name.
7865 array->set_null(index); // Code.
7870 // The key in the code cache hash table consists of the property name and the
7871 // code object. The actual match is on the name and the code flags. If a key
7872 // is created using the flags and not a code object it can only be used for
7873 // lookup not to create a new entry.
7874 class CodeCacheHashTableKey : public HashTableKey {
7876 CodeCacheHashTableKey(Handle<Name> name, Code::Flags flags)
7877 : name_(name), flags_(flags), code_() { }
7879 CodeCacheHashTableKey(Handle<Name> name, Handle<Code> code)
7880 : name_(name), flags_(code->flags()), code_(code) { }
7882 bool IsMatch(Object* other) V8_OVERRIDE {
7883 if (!other->IsFixedArray()) return false;
7884 FixedArray* pair = FixedArray::cast(other);
7885 Name* name = Name::cast(pair->get(0));
7886 Code::Flags flags = Code::cast(pair->get(1))->flags();
7887 if (flags != flags_) {
7890 return name_->Equals(name);
7893 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) {
7894 return name->Hash() ^ flags;
7897 uint32_t Hash() V8_OVERRIDE { return NameFlagsHashHelper(*name_, flags_); }
7899 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
7900 FixedArray* pair = FixedArray::cast(obj);
7901 Name* name = Name::cast(pair->get(0));
7902 Code* code = Code::cast(pair->get(1));
7903 return NameFlagsHashHelper(name, code->flags());
7906 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
7907 Handle<Code> code = code_.ToHandleChecked();
7908 Handle<FixedArray> pair = isolate->factory()->NewFixedArray(2);
7909 pair->set(0, *name_);
7910 pair->set(1, *code);
7917 // TODO(jkummerow): We should be able to get by without this.
7918 MaybeHandle<Code> code_;
7922 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) {
7923 DisallowHeapAllocation no_alloc;
7924 CodeCacheHashTableKey key(handle(name), flags);
7925 int entry = FindEntry(&key);
7926 if (entry == kNotFound) return GetHeap()->undefined_value();
7927 return get(EntryToIndex(entry) + 1);
7931 Handle<CodeCacheHashTable> CodeCacheHashTable::Put(
7932 Handle<CodeCacheHashTable> cache, Handle<Name> name, Handle<Code> code) {
7933 CodeCacheHashTableKey key(name, code);
7935 Handle<CodeCacheHashTable> new_cache = EnsureCapacity(cache, 1, &key);
7937 int entry = new_cache->FindInsertionEntry(key.Hash());
7938 Handle<Object> k = key.AsHandle(cache->GetIsolate());
7940 new_cache->set(EntryToIndex(entry), *k);
7941 new_cache->set(EntryToIndex(entry) + 1, *code);
7942 new_cache->ElementAdded();
7947 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) {
7948 DisallowHeapAllocation no_alloc;
7949 CodeCacheHashTableKey key(handle(name), flags);
7950 int entry = FindEntry(&key);
7951 return (entry == kNotFound) ? -1 : entry;
7955 void CodeCacheHashTable::RemoveByIndex(int index) {
7957 Heap* heap = GetHeap();
7958 set(EntryToIndex(index), heap->the_hole_value());
7959 set(EntryToIndex(index) + 1, heap->the_hole_value());
7964 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache,
7965 MapHandleList* maps,
7967 Handle<Code> code) {
7968 Isolate* isolate = code_cache->GetIsolate();
7969 if (code_cache->cache()->IsUndefined()) {
7970 Handle<PolymorphicCodeCacheHashTable> result =
7971 PolymorphicCodeCacheHashTable::New(
7973 PolymorphicCodeCacheHashTable::kInitialSize);
7974 code_cache->set_cache(*result);
7976 // This entry shouldn't be contained in the cache yet.
7977 ASSERT(PolymorphicCodeCacheHashTable::cast(code_cache->cache())
7978 ->Lookup(maps, flags)->IsUndefined());
7980 Handle<PolymorphicCodeCacheHashTable> hash_table =
7981 handle(PolymorphicCodeCacheHashTable::cast(code_cache->cache()));
7982 Handle<PolymorphicCodeCacheHashTable> new_cache =
7983 PolymorphicCodeCacheHashTable::Put(hash_table, maps, flags, code);
7984 code_cache->set_cache(*new_cache);
7988 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps,
7989 Code::Flags flags) {
7990 if (!cache()->IsUndefined()) {
7991 PolymorphicCodeCacheHashTable* hash_table =
7992 PolymorphicCodeCacheHashTable::cast(cache());
7993 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate());
7995 return GetIsolate()->factory()->undefined_value();
8000 // Despite their name, object of this class are not stored in the actual
8001 // hash table; instead they're temporarily used for lookups. It is therefore
8002 // safe to have a weak (non-owning) pointer to a MapList as a member field.
8003 class PolymorphicCodeCacheHashTableKey : public HashTableKey {
8005 // Callers must ensure that |maps| outlives the newly constructed object.
8006 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags)
8008 code_flags_(code_flags) {}
8010 bool IsMatch(Object* other) V8_OVERRIDE {
8011 MapHandleList other_maps(kDefaultListAllocationSize);
8013 FromObject(other, &other_flags, &other_maps);
8014 if (code_flags_ != other_flags) return false;
8015 if (maps_->length() != other_maps.length()) return false;
8016 // Compare just the hashes first because it's faster.
8017 int this_hash = MapsHashHelper(maps_, code_flags_);
8018 int other_hash = MapsHashHelper(&other_maps, other_flags);
8019 if (this_hash != other_hash) return false;
8021 // Full comparison: for each map in maps_, look for an equivalent map in
8022 // other_maps. This implementation is slow, but probably good enough for
8023 // now because the lists are short (<= 4 elements currently).
8024 for (int i = 0; i < maps_->length(); ++i) {
8025 bool match_found = false;
8026 for (int j = 0; j < other_maps.length(); ++j) {
8027 if (*(maps_->at(i)) == *(other_maps.at(j))) {
8032 if (!match_found) return false;
8037 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
8038 uint32_t hash = code_flags;
8039 for (int i = 0; i < maps->length(); ++i) {
8040 hash ^= maps->at(i)->Hash();
8045 uint32_t Hash() V8_OVERRIDE {
8046 return MapsHashHelper(maps_, code_flags_);
8049 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
8050 MapHandleList other_maps(kDefaultListAllocationSize);
8052 FromObject(obj, &other_flags, &other_maps);
8053 return MapsHashHelper(&other_maps, other_flags);
8056 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
8057 // The maps in |maps_| must be copied to a newly allocated FixedArray,
8058 // both because the referenced MapList is short-lived, and because C++
8059 // objects can't be stored in the heap anyway.
8060 Handle<FixedArray> list =
8061 isolate->factory()->NewUninitializedFixedArray(maps_->length() + 1);
8062 list->set(0, Smi::FromInt(code_flags_));
8063 for (int i = 0; i < maps_->length(); ++i) {
8064 list->set(i + 1, *maps_->at(i));
8070 static MapHandleList* FromObject(Object* obj,
8072 MapHandleList* maps) {
8073 FixedArray* list = FixedArray::cast(obj);
8075 *code_flags = Smi::cast(list->get(0))->value();
8076 for (int i = 1; i < list->length(); ++i) {
8077 maps->Add(Handle<Map>(Map::cast(list->get(i))));
8082 MapHandleList* maps_; // weak.
8084 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
8088 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps,
8090 DisallowHeapAllocation no_alloc;
8091 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
8092 int entry = FindEntry(&key);
8093 if (entry == kNotFound) return GetHeap()->undefined_value();
8094 return get(EntryToIndex(entry) + 1);
8098 Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put(
8099 Handle<PolymorphicCodeCacheHashTable> hash_table,
8100 MapHandleList* maps,
8102 Handle<Code> code) {
8103 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
8104 Handle<PolymorphicCodeCacheHashTable> cache =
8105 EnsureCapacity(hash_table, 1, &key);
8106 int entry = cache->FindInsertionEntry(key.Hash());
8108 Handle<Object> obj = key.AsHandle(hash_table->GetIsolate());
8109 cache->set(EntryToIndex(entry), *obj);
8110 cache->set(EntryToIndex(entry) + 1, *code);
8111 cache->ElementAdded();
8116 void FixedArray::Shrink(int new_length) {
8117 ASSERT(0 <= new_length && new_length <= length());
8118 if (new_length < length()) {
8119 RightTrimFixedArray<Heap::FROM_MUTATOR>(
8120 GetHeap(), this, length() - new_length);
8125 MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike(
8126 Handle<FixedArray> content,
8127 Handle<JSObject> array) {
8128 ASSERT(array->IsJSArray() || array->HasSloppyArgumentsElements());
8129 ElementsAccessor* accessor = array->GetElementsAccessor();
8130 Handle<FixedArray> result;
8131 ASSIGN_RETURN_ON_EXCEPTION(
8132 array->GetIsolate(), result,
8133 accessor->AddElementsToFixedArray(array, array, content),
8136 #ifdef ENABLE_SLOW_ASSERTS
8137 if (FLAG_enable_slow_asserts) {
8138 DisallowHeapAllocation no_allocation;
8139 for (int i = 0; i < result->length(); i++) {
8140 Object* current = result->get(i);
8141 ASSERT(current->IsNumber() || current->IsName());
8149 MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first,
8150 Handle<FixedArray> second) {
8151 ElementsAccessor* accessor = ElementsAccessor::ForArray(second);
8152 Handle<FixedArray> result;
8153 ASSIGN_RETURN_ON_EXCEPTION(
8154 first->GetIsolate(), result,
8155 accessor->AddElementsToFixedArray(
8156 Handle<Object>::null(), // receiver
8157 Handle<JSObject>::null(), // holder
8159 Handle<FixedArrayBase>::cast(second)),
8162 #ifdef ENABLE_SLOW_ASSERTS
8163 if (FLAG_enable_slow_asserts) {
8164 DisallowHeapAllocation no_allocation;
8165 for (int i = 0; i < result->length(); i++) {
8166 Object* current = result->get(i);
8167 ASSERT(current->IsNumber() || current->IsName());
8175 Handle<FixedArray> FixedArray::CopySize(
8176 Handle<FixedArray> array, int new_length, PretenureFlag pretenure) {
8177 Isolate* isolate = array->GetIsolate();
8178 if (new_length == 0) return isolate->factory()->empty_fixed_array();
8179 Handle<FixedArray> result =
8180 isolate->factory()->NewFixedArray(new_length, pretenure);
8182 DisallowHeapAllocation no_gc;
8183 int len = array->length();
8184 if (new_length < len) len = new_length;
8185 // We are taking the map from the old fixed array so the map is sure to
8186 // be an immortal immutable object.
8187 result->set_map_no_write_barrier(array->map());
8188 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
8189 for (int i = 0; i < len; i++) {
8190 result->set(i, array->get(i), mode);
8196 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
8197 DisallowHeapAllocation no_gc;
8198 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
8199 for (int index = 0; index < len; index++) {
8200 dest->set(dest_pos+index, get(pos+index), mode);
8206 bool FixedArray::IsEqualTo(FixedArray* other) {
8207 if (length() != other->length()) return false;
8208 for (int i = 0 ; i < length(); ++i) {
8209 if (get(i) != other->get(i)) return false;
8216 Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
8217 int number_of_descriptors,
8219 ASSERT(0 <= number_of_descriptors);
8220 Factory* factory = isolate->factory();
8221 // Do not use DescriptorArray::cast on incomplete object.
8222 int size = number_of_descriptors + slack;
8223 if (size == 0) return factory->empty_descriptor_array();
8224 // Allocate the array of keys.
8225 Handle<FixedArray> result = factory->NewFixedArray(LengthFor(size));
8227 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
8228 result->set(kEnumCacheIndex, Smi::FromInt(0));
8229 return Handle<DescriptorArray>::cast(result);
8233 void DescriptorArray::ClearEnumCache() {
8234 set(kEnumCacheIndex, Smi::FromInt(0));
8238 void DescriptorArray::Replace(int index, Descriptor* descriptor) {
8239 descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index));
8240 Set(index, descriptor);
8244 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
8245 FixedArray* new_cache,
8246 Object* new_index_cache) {
8247 ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
8248 ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
8250 ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
8251 FixedArray::cast(bridge_storage)->
8252 set(kEnumCacheBridgeCacheIndex, new_cache);
8253 FixedArray::cast(bridge_storage)->
8254 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
8255 set(kEnumCacheIndex, bridge_storage);
8259 void DescriptorArray::CopyFrom(int index,
8260 DescriptorArray* src,
8261 const WhitenessWitness& witness) {
8262 Object* value = src->GetValue(index);
8263 PropertyDetails details = src->GetDetails(index);
8264 Descriptor desc(handle(src->GetKey(index)),
8265 handle(value, src->GetIsolate()),
8267 Set(index, &desc, witness);
8271 // We need the whiteness witness since sort will reshuffle the entries in the
8272 // descriptor array. If the descriptor array were to be black, the shuffling
8273 // would move a slot that was already recorded as pointing into an evacuation
8274 // candidate. This would result in missing updates upon evacuation.
8275 void DescriptorArray::Sort() {
8276 // In-place heap sort.
8277 int len = number_of_descriptors();
8278 // Reset sorting since the descriptor array might contain invalid pointers.
8279 for (int i = 0; i < len; ++i) SetSortedKey(i, i);
8280 // Bottom-up max-heap construction.
8281 // Index of the last node with children
8282 const int max_parent_index = (len / 2) - 1;
8283 for (int i = max_parent_index; i >= 0; --i) {
8284 int parent_index = i;
8285 const uint32_t parent_hash = GetSortedKey(i)->Hash();
8286 while (parent_index <= max_parent_index) {
8287 int child_index = 2 * parent_index + 1;
8288 uint32_t child_hash = GetSortedKey(child_index)->Hash();
8289 if (child_index + 1 < len) {
8290 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
8291 if (right_child_hash > child_hash) {
8293 child_hash = right_child_hash;
8296 if (child_hash <= parent_hash) break;
8297 SwapSortedKeys(parent_index, child_index);
8298 // Now element at child_index could be < its children.
8299 parent_index = child_index; // parent_hash remains correct.
8303 // Extract elements and create sorted array.
8304 for (int i = len - 1; i > 0; --i) {
8305 // Put max element at the back of the array.
8306 SwapSortedKeys(0, i);
8307 // Shift down the new top element.
8308 int parent_index = 0;
8309 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
8310 const int max_parent_index = (i / 2) - 1;
8311 while (parent_index <= max_parent_index) {
8312 int child_index = parent_index * 2 + 1;
8313 uint32_t child_hash = GetSortedKey(child_index)->Hash();
8314 if (child_index + 1 < i) {
8315 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
8316 if (right_child_hash > child_hash) {
8318 child_hash = right_child_hash;
8321 if (child_hash <= parent_hash) break;
8322 SwapSortedKeys(parent_index, child_index);
8323 parent_index = child_index;
8326 ASSERT(IsSortedNoDuplicates());
8330 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) {
8331 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair();
8332 copy->set_getter(pair->getter());
8333 copy->set_setter(pair->setter());
8338 Object* AccessorPair::GetComponent(AccessorComponent component) {
8339 Object* accessor = get(component);
8340 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
8344 Handle<DeoptimizationInputData> DeoptimizationInputData::New(
8346 int deopt_entry_count,
8347 PretenureFlag pretenure) {
8348 ASSERT(deopt_entry_count > 0);
8349 return Handle<DeoptimizationInputData>::cast(
8350 isolate->factory()->NewFixedArray(
8351 LengthFor(deopt_entry_count), pretenure));
8355 Handle<DeoptimizationOutputData> DeoptimizationOutputData::New(
8357 int number_of_deopt_points,
8358 PretenureFlag pretenure) {
8359 Handle<FixedArray> result;
8360 if (number_of_deopt_points == 0) {
8361 result = isolate->factory()->empty_fixed_array();
8363 result = isolate->factory()->NewFixedArray(
8364 LengthOfFixedArray(number_of_deopt_points), pretenure);
8366 return Handle<DeoptimizationOutputData>::cast(result);
8371 bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
8372 if (IsEmpty()) return other->IsEmpty();
8373 if (other->IsEmpty()) return false;
8374 if (length() != other->length()) return false;
8375 for (int i = 0; i < length(); ++i) {
8376 if (get(i) != other->get(i)) return false;
8383 bool String::LooksValid() {
8384 if (!GetIsolate()->heap()->Contains(this)) return false;
8389 String::FlatContent String::GetFlatContent() {
8390 ASSERT(!AllowHeapAllocation::IsAllowed());
8391 int length = this->length();
8392 StringShape shape(this);
8393 String* string = this;
8395 if (shape.representation_tag() == kConsStringTag) {
8396 ConsString* cons = ConsString::cast(string);
8397 if (cons->second()->length() != 0) {
8398 return FlatContent();
8400 string = cons->first();
8401 shape = StringShape(string);
8403 if (shape.representation_tag() == kSlicedStringTag) {
8404 SlicedString* slice = SlicedString::cast(string);
8405 offset = slice->offset();
8406 string = slice->parent();
8407 shape = StringShape(string);
8408 ASSERT(shape.representation_tag() != kConsStringTag &&
8409 shape.representation_tag() != kSlicedStringTag);
8411 if (shape.encoding_tag() == kOneByteStringTag) {
8412 const uint8_t* start;
8413 if (shape.representation_tag() == kSeqStringTag) {
8414 start = SeqOneByteString::cast(string)->GetChars();
8416 start = ExternalAsciiString::cast(string)->GetChars();
8418 return FlatContent(start + offset, length);
8420 ASSERT(shape.encoding_tag() == kTwoByteStringTag);
8422 if (shape.representation_tag() == kSeqStringTag) {
8423 start = SeqTwoByteString::cast(string)->GetChars();
8425 start = ExternalTwoByteString::cast(string)->GetChars();
8427 return FlatContent(start + offset, length);
8432 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8433 RobustnessFlag robust_flag,
8436 int* length_return) {
8437 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8438 return SmartArrayPointer<char>(NULL);
8440 Heap* heap = GetHeap();
8442 // Negative length means the to the end of the string.
8443 if (length < 0) length = kMaxInt - offset;
8445 // Compute the size of the UTF-8 string. Start at the specified offset.
8446 Access<ConsStringIteratorOp> op(
8447 heap->isolate()->objects_string_iterator());
8448 StringCharacterStream stream(this, op.value(), offset);
8449 int character_position = offset;
8451 int last = unibrow::Utf16::kNoPreviousCharacter;
8452 while (stream.HasMore() && character_position++ < offset + length) {
8453 uint16_t character = stream.GetNext();
8454 utf8_bytes += unibrow::Utf8::Length(character, last);
8458 if (length_return) {
8459 *length_return = utf8_bytes;
8462 char* result = NewArray<char>(utf8_bytes + 1);
8464 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset.
8465 stream.Reset(this, offset);
8466 character_position = offset;
8467 int utf8_byte_position = 0;
8468 last = unibrow::Utf16::kNoPreviousCharacter;
8469 while (stream.HasMore() && character_position++ < offset + length) {
8470 uint16_t character = stream.GetNext();
8471 if (allow_nulls == DISALLOW_NULLS && character == 0) {
8474 utf8_byte_position +=
8475 unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
8478 result[utf8_byte_position] = 0;
8479 return SmartArrayPointer<char>(result);
8483 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8484 RobustnessFlag robust_flag,
8485 int* length_return) {
8486 return ToCString(allow_nulls, robust_flag, 0, -1, length_return);
8490 const uc16* String::GetTwoByteData(unsigned start) {
8491 ASSERT(!IsOneByteRepresentationUnderneath());
8492 switch (StringShape(this).representation_tag()) {
8494 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
8495 case kExternalStringTag:
8496 return ExternalTwoByteString::cast(this)->
8497 ExternalTwoByteStringGetData(start);
8498 case kSlicedStringTag: {
8499 SlicedString* slice = SlicedString::cast(this);
8500 return slice->parent()->GetTwoByteData(start + slice->offset());
8502 case kConsStringTag:
8511 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) {
8512 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8513 return SmartArrayPointer<uc16>();
8515 Heap* heap = GetHeap();
8517 Access<ConsStringIteratorOp> op(
8518 heap->isolate()->objects_string_iterator());
8519 StringCharacterStream stream(this, op.value());
8521 uc16* result = NewArray<uc16>(length() + 1);
8524 while (stream.HasMore()) {
8525 uint16_t character = stream.GetNext();
8526 result[i++] = character;
8529 return SmartArrayPointer<uc16>(result);
8533 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) {
8534 return reinterpret_cast<uc16*>(
8535 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start;
8539 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) {
8540 Relocatable* current = isolate->relocatable_top();
8541 while (current != NULL) {
8542 current->PostGarbageCollection();
8543 current = current->prev_;
8548 // Reserve space for statics needing saving and restoring.
8549 int Relocatable::ArchiveSpacePerThread() {
8550 return sizeof(Relocatable*); // NOLINT
8554 // Archive statics that are thread-local.
8555 char* Relocatable::ArchiveState(Isolate* isolate, char* to) {
8556 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
8557 isolate->set_relocatable_top(NULL);
8558 return to + ArchiveSpacePerThread();
8562 // Restore statics that are thread-local.
8563 char* Relocatable::RestoreState(Isolate* isolate, char* from) {
8564 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
8565 return from + ArchiveSpacePerThread();
8569 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
8570 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
8572 return thread_storage + ArchiveSpacePerThread();
8576 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) {
8577 Iterate(v, isolate->relocatable_top());
8581 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
8582 Relocatable* current = top;
8583 while (current != NULL) {
8584 current->IterateInstance(v);
8585 current = current->prev_;
8590 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str)
8591 : Relocatable(isolate),
8592 str_(str.location()),
8593 length_(str->length()) {
8594 PostGarbageCollection();
8598 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input)
8599 : Relocatable(isolate),
8602 length_(input.length()),
8603 start_(input.start()) { }
8606 void FlatStringReader::PostGarbageCollection() {
8607 if (str_ == NULL) return;
8608 Handle<String> str(str_);
8609 ASSERT(str->IsFlat());
8610 DisallowHeapAllocation no_gc;
8611 // This does not actually prevent the vector from being relocated later.
8612 String::FlatContent content = str->GetFlatContent();
8613 ASSERT(content.IsFlat());
8614 is_ascii_ = content.IsAscii();
8616 start_ = content.ToOneByteVector().start();
8618 start_ = content.ToUC16Vector().start();
8623 void ConsStringIteratorOp::Initialize(ConsString* cons_string, int offset) {
8624 ASSERT(cons_string != NULL);
8625 root_ = cons_string;
8627 // Force stack blown condition to trigger restart.
8629 maximum_depth_ = kStackSize + depth_;
8630 ASSERT(StackBlown());
8634 String* ConsStringIteratorOp::Continue(int* offset_out) {
8635 ASSERT(depth_ != 0);
8636 ASSERT_EQ(0, *offset_out);
8637 bool blew_stack = StackBlown();
8638 String* string = NULL;
8639 // Get the next leaf if there is one.
8640 if (!blew_stack) string = NextLeaf(&blew_stack);
8641 // Restart search from root.
8643 ASSERT(string == NULL);
8644 string = Search(offset_out);
8646 // Ensure future calls return null immediately.
8647 if (string == NULL) Reset(NULL);
8652 String* ConsStringIteratorOp::Search(int* offset_out) {
8653 ConsString* cons_string = root_;
8654 // Reset the stack, pushing the root string.
8657 frames_[0] = cons_string;
8658 const int consumed = consumed_;
8661 // Loop until the string is found which contains the target offset.
8662 String* string = cons_string->first();
8663 int length = string->length();
8665 if (consumed < offset + length) {
8666 // Target offset is in the left branch.
8667 // Keep going if we're still in a ConString.
8668 type = string->map()->instance_type();
8669 if ((type & kStringRepresentationMask) == kConsStringTag) {
8670 cons_string = ConsString::cast(string);
8671 PushLeft(cons_string);
8674 // Tell the stack we're done descending.
8675 AdjustMaximumDepth();
8678 // Update progress through the string.
8680 // Keep going if we're still in a ConString.
8681 string = cons_string->second();
8682 type = string->map()->instance_type();
8683 if ((type & kStringRepresentationMask) == kConsStringTag) {
8684 cons_string = ConsString::cast(string);
8685 PushRight(cons_string);
8688 // Need this to be updated for the current string.
8689 length = string->length();
8690 // Account for the possibility of an empty right leaf.
8691 // This happens only if we have asked for an offset outside the string.
8693 // Reset so future operations will return null immediately.
8697 // Tell the stack we're done descending.
8698 AdjustMaximumDepth();
8699 // Pop stack so next iteration is in correct place.
8702 ASSERT(length != 0);
8703 // Adjust return values and exit.
8704 consumed_ = offset + length;
8705 *offset_out = consumed - offset;
8713 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) {
8715 // Tree traversal complete.
8717 *blew_stack = false;
8720 // We've lost track of higher nodes.
8726 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)];
8727 String* string = cons_string->second();
8728 int32_t type = string->map()->instance_type();
8729 if ((type & kStringRepresentationMask) != kConsStringTag) {
8730 // Pop stack so next iteration is in correct place.
8732 int length = string->length();
8733 // Could be a flattened ConsString.
8734 if (length == 0) continue;
8735 consumed_ += length;
8738 cons_string = ConsString::cast(string);
8739 PushRight(cons_string);
8740 // Need to traverse all the way left.
8743 string = cons_string->first();
8744 type = string->map()->instance_type();
8745 if ((type & kStringRepresentationMask) != kConsStringTag) {
8746 AdjustMaximumDepth();
8747 int length = string->length();
8748 ASSERT(length != 0);
8749 consumed_ += length;
8752 cons_string = ConsString::cast(string);
8753 PushLeft(cons_string);
8761 uint16_t ConsString::ConsStringGet(int index) {
8762 ASSERT(index >= 0 && index < this->length());
8764 // Check for a flattened cons string
8765 if (second()->length() == 0) {
8766 String* left = first();
8767 return left->Get(index);
8770 String* string = String::cast(this);
8773 if (StringShape(string).IsCons()) {
8774 ConsString* cons_string = ConsString::cast(string);
8775 String* left = cons_string->first();
8776 if (left->length() > index) {
8779 index -= left->length();
8780 string = cons_string->second();
8783 return string->Get(index);
8792 uint16_t SlicedString::SlicedStringGet(int index) {
8793 return parent()->Get(offset() + index);
8797 template <typename sinkchar>
8798 void String::WriteToFlat(String* src,
8802 String* source = src;
8806 ASSERT(0 <= from && from <= to && to <= source->length());
8807 switch (StringShape(source).full_representation_tag()) {
8808 case kOneByteStringTag | kExternalStringTag: {
8810 ExternalAsciiString::cast(source)->GetChars() + from,
8814 case kTwoByteStringTag | kExternalStringTag: {
8816 ExternalTwoByteString::cast(source)->GetChars();
8822 case kOneByteStringTag | kSeqStringTag: {
8824 SeqOneByteString::cast(source)->GetChars() + from,
8828 case kTwoByteStringTag | kSeqStringTag: {
8830 SeqTwoByteString::cast(source)->GetChars() + from,
8834 case kOneByteStringTag | kConsStringTag:
8835 case kTwoByteStringTag | kConsStringTag: {
8836 ConsString* cons_string = ConsString::cast(source);
8837 String* first = cons_string->first();
8838 int boundary = first->length();
8839 if (to - boundary >= boundary - from) {
8840 // Right hand side is longer. Recurse over left.
8841 if (from < boundary) {
8842 WriteToFlat(first, sink, from, boundary);
8843 sink += boundary - from;
8849 source = cons_string->second();
8851 // Left hand side is longer. Recurse over right.
8852 if (to > boundary) {
8853 String* second = cons_string->second();
8854 // When repeatedly appending to a string, we get a cons string that
8855 // is unbalanced to the left, a list, essentially. We inline the
8856 // common case of sequential ascii right child.
8857 if (to - boundary == 1) {
8858 sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
8859 } else if (second->IsSeqOneByteString()) {
8860 CopyChars(sink + boundary - from,
8861 SeqOneByteString::cast(second)->GetChars(),
8865 sink + boundary - from,
8875 case kOneByteStringTag | kSlicedStringTag:
8876 case kTwoByteStringTag | kSlicedStringTag: {
8877 SlicedString* slice = SlicedString::cast(source);
8878 unsigned offset = slice->offset();
8879 WriteToFlat(slice->parent(), sink, from + offset, to + offset);
8888 template <typename SourceChar>
8889 static void CalculateLineEndsImpl(Isolate* isolate,
8890 List<int>* line_ends,
8891 Vector<const SourceChar> src,
8892 bool include_ending_line) {
8893 const int src_len = src.length();
8894 StringSearch<uint8_t, SourceChar> search(isolate, STATIC_ASCII_VECTOR("\n"));
8896 // Find and record line ends.
8898 while (position != -1 && position < src_len) {
8899 position = search.Search(src, position);
8900 if (position != -1) {
8901 line_ends->Add(position);
8903 } else if (include_ending_line) {
8904 // Even if the last line misses a line end, it is counted.
8905 line_ends->Add(src_len);
8912 Handle<FixedArray> String::CalculateLineEnds(Handle<String> src,
8913 bool include_ending_line) {
8915 // Rough estimate of line count based on a roughly estimated average
8916 // length of (unpacked) code.
8917 int line_count_estimate = src->length() >> 4;
8918 List<int> line_ends(line_count_estimate);
8919 Isolate* isolate = src->GetIsolate();
8920 { DisallowHeapAllocation no_allocation; // ensure vectors stay valid.
8921 // Dispatch on type of strings.
8922 String::FlatContent content = src->GetFlatContent();
8923 ASSERT(content.IsFlat());
8924 if (content.IsAscii()) {
8925 CalculateLineEndsImpl(isolate,
8927 content.ToOneByteVector(),
8928 include_ending_line);
8930 CalculateLineEndsImpl(isolate,
8932 content.ToUC16Vector(),
8933 include_ending_line);
8936 int line_count = line_ends.length();
8937 Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count);
8938 for (int i = 0; i < line_count; i++) {
8939 array->set(i, Smi::FromInt(line_ends[i]));
8945 // Compares the contents of two strings by reading and comparing
8946 // int-sized blocks of characters.
8947 template <typename Char>
8948 static inline bool CompareRawStringContents(const Char* const a,
8949 const Char* const b,
8952 #ifndef V8_HOST_CAN_READ_UNALIGNED
8953 // If this architecture isn't comfortable reading unaligned ints
8954 // then we have to check that the strings are aligned before
8955 // comparing them blockwise.
8956 const int kAlignmentMask = sizeof(uint32_t) - 1; // NOLINT
8957 uint32_t pa_addr = reinterpret_cast<uint32_t>(a);
8958 uint32_t pb_addr = reinterpret_cast<uint32_t>(b);
8959 if (((pa_addr & kAlignmentMask) | (pb_addr & kAlignmentMask)) == 0) {
8961 const int kStepSize = sizeof(int) / sizeof(Char); // NOLINT
8962 int endpoint = length - kStepSize;
8963 // Compare blocks until we reach near the end of the string.
8964 for (; i <= endpoint; i += kStepSize) {
8965 uint32_t wa = *reinterpret_cast<const uint32_t*>(a + i);
8966 uint32_t wb = *reinterpret_cast<const uint32_t*>(b + i);
8971 #ifndef V8_HOST_CAN_READ_UNALIGNED
8974 // Compare the remaining characters that didn't fit into a block.
8975 for (; i < length; i++) {
8984 template<typename Chars1, typename Chars2>
8985 class RawStringComparator : public AllStatic {
8987 static inline bool compare(const Chars1* a, const Chars2* b, int len) {
8988 ASSERT(sizeof(Chars1) != sizeof(Chars2));
8989 for (int i = 0; i < len; i++) {
9000 class RawStringComparator<uint16_t, uint16_t> {
9002 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) {
9003 return CompareRawStringContents(a, b, len);
9009 class RawStringComparator<uint8_t, uint8_t> {
9011 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) {
9012 return CompareRawStringContents(a, b, len);
9017 class StringComparator {
9020 explicit inline State(ConsStringIteratorOp* op)
9021 : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {}
9023 inline void Init(String* string) {
9024 ConsString* cons_string = String::VisitFlat(this, string);
9025 op_->Reset(cons_string);
9026 if (cons_string != NULL) {
9028 string = op_->Next(&offset);
9029 String::VisitFlat(this, string, offset);
9033 inline void VisitOneByteString(const uint8_t* chars, int length) {
9034 is_one_byte_ = true;
9039 inline void VisitTwoByteString(const uint16_t* chars, int length) {
9040 is_one_byte_ = false;
9045 void Advance(int consumed) {
9046 ASSERT(consumed <= length_);
9048 if (length_ != consumed) {
9050 buffer8_ += consumed;
9052 buffer16_ += consumed;
9054 length_ -= consumed;
9059 String* next = op_->Next(&offset);
9060 ASSERT_EQ(0, offset);
9061 ASSERT(next != NULL);
9062 String::VisitFlat(this, next);
9065 ConsStringIteratorOp* const op_;
9069 const uint8_t* buffer8_;
9070 const uint16_t* buffer16_;
9074 DISALLOW_IMPLICIT_CONSTRUCTORS(State);
9078 inline StringComparator(ConsStringIteratorOp* op_1,
9079 ConsStringIteratorOp* op_2)
9084 template<typename Chars1, typename Chars2>
9085 static inline bool Equals(State* state_1, State* state_2, int to_check) {
9086 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_);
9087 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_);
9088 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check);
9091 bool Equals(String* string_1, String* string_2) {
9092 int length = string_1->length();
9093 state_1_.Init(string_1);
9094 state_2_.Init(string_2);
9096 int to_check = Min(state_1_.length_, state_2_.length_);
9097 ASSERT(to_check > 0 && to_check <= length);
9099 if (state_1_.is_one_byte_) {
9100 if (state_2_.is_one_byte_) {
9101 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check);
9103 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check);
9106 if (state_2_.is_one_byte_) {
9107 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check);
9109 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check);
9113 if (!is_equal) return false;
9115 // Exit condition. Strings are equal.
9116 if (length == 0) return true;
9117 state_1_.Advance(to_check);
9118 state_2_.Advance(to_check);
9125 DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator);
9129 bool String::SlowEquals(String* other) {
9130 DisallowHeapAllocation no_gc;
9131 // Fast check: negative check with lengths.
9133 if (len != other->length()) return false;
9134 if (len == 0) return true;
9136 // Fast check: if hash code is computed for both strings
9137 // a fast negative check can be performed.
9138 if (HasHashCode() && other->HasHashCode()) {
9139 #ifdef ENABLE_SLOW_ASSERTS
9140 if (FLAG_enable_slow_asserts) {
9141 if (Hash() != other->Hash()) {
9142 bool found_difference = false;
9143 for (int i = 0; i < len; i++) {
9144 if (Get(i) != other->Get(i)) {
9145 found_difference = true;
9149 ASSERT(found_difference);
9153 if (Hash() != other->Hash()) return false;
9156 // We know the strings are both non-empty. Compare the first chars
9157 // before we try to flatten the strings.
9158 if (this->Get(0) != other->Get(0)) return false;
9160 if (IsSeqOneByteString() && other->IsSeqOneByteString()) {
9161 const uint8_t* str1 = SeqOneByteString::cast(this)->GetChars();
9162 const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars();
9163 return CompareRawStringContents(str1, str2, len);
9166 Isolate* isolate = GetIsolate();
9167 StringComparator comparator(isolate->objects_string_compare_iterator_a(),
9168 isolate->objects_string_compare_iterator_b());
9170 return comparator.Equals(this, other);
9174 bool String::SlowEquals(Handle<String> one, Handle<String> two) {
9175 // Fast check: negative check with lengths.
9176 int one_length = one->length();
9177 if (one_length != two->length()) return false;
9178 if (one_length == 0) return true;
9180 // Fast check: if hash code is computed for both strings
9181 // a fast negative check can be performed.
9182 if (one->HasHashCode() && two->HasHashCode()) {
9183 #ifdef ENABLE_SLOW_ASSERTS
9184 if (FLAG_enable_slow_asserts) {
9185 if (one->Hash() != two->Hash()) {
9186 bool found_difference = false;
9187 for (int i = 0; i < one_length; i++) {
9188 if (one->Get(i) != two->Get(i)) {
9189 found_difference = true;
9193 ASSERT(found_difference);
9197 if (one->Hash() != two->Hash()) return false;
9200 // We know the strings are both non-empty. Compare the first chars
9201 // before we try to flatten the strings.
9202 if (one->Get(0) != two->Get(0)) return false;
9204 one = String::Flatten(one);
9205 two = String::Flatten(two);
9207 DisallowHeapAllocation no_gc;
9208 String::FlatContent flat1 = one->GetFlatContent();
9209 String::FlatContent flat2 = two->GetFlatContent();
9211 if (flat1.IsAscii() && flat2.IsAscii()) {
9212 return CompareRawStringContents(flat1.ToOneByteVector().start(),
9213 flat2.ToOneByteVector().start(),
9216 for (int i = 0; i < one_length; i++) {
9217 if (flat1.Get(i) != flat2.Get(i)) return false;
9224 bool String::MarkAsUndetectable() {
9225 if (StringShape(this).IsInternalized()) return false;
9227 Map* map = this->map();
9228 Heap* heap = GetHeap();
9229 if (map == heap->string_map()) {
9230 this->set_map(heap->undetectable_string_map());
9232 } else if (map == heap->ascii_string_map()) {
9233 this->set_map(heap->undetectable_ascii_string_map());
9236 // Rest cannot be marked as undetectable
9241 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
9242 int slen = length();
9243 // Can't check exact length equality, but we can check bounds.
9244 int str_len = str.length();
9245 if (!allow_prefix_match &&
9247 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) {
9251 unsigned remaining_in_str = static_cast<unsigned>(str_len);
9252 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start());
9253 for (i = 0; i < slen && remaining_in_str > 0; i++) {
9254 unsigned cursor = 0;
9255 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor);
9256 ASSERT(cursor > 0 && cursor <= remaining_in_str);
9257 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
9258 if (i > slen - 1) return false;
9259 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
9260 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
9262 if (Get(i) != r) return false;
9264 utf8_data += cursor;
9265 remaining_in_str -= cursor;
9267 return (allow_prefix_match || i == slen) && remaining_in_str == 0;
9271 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) {
9272 int slen = length();
9273 if (str.length() != slen) return false;
9274 DisallowHeapAllocation no_gc;
9275 FlatContent content = GetFlatContent();
9276 if (content.IsAscii()) {
9277 return CompareChars(content.ToOneByteVector().start(),
9278 str.start(), slen) == 0;
9280 for (int i = 0; i < slen; i++) {
9281 if (Get(i) != static_cast<uint16_t>(str[i])) return false;
9287 bool String::IsTwoByteEqualTo(Vector<const uc16> str) {
9288 int slen = length();
9289 if (str.length() != slen) return false;
9290 DisallowHeapAllocation no_gc;
9291 FlatContent content = GetFlatContent();
9292 if (content.IsTwoByte()) {
9293 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
9295 for (int i = 0; i < slen; i++) {
9296 if (Get(i) != str[i]) return false;
9302 class IteratingStringHasher: public StringHasher {
9304 static inline uint32_t Hash(String* string, uint32_t seed) {
9305 IteratingStringHasher hasher(string->length(), seed);
9307 if (hasher.has_trivial_hash()) return hasher.GetHashField();
9308 ConsString* cons_string = String::VisitFlat(&hasher, string);
9309 // The string was flat.
9310 if (cons_string == NULL) return hasher.GetHashField();
9311 // This is a ConsString, iterate across it.
9312 ConsStringIteratorOp op(cons_string);
9314 while (NULL != (string = op.Next(&offset))) {
9315 String::VisitFlat(&hasher, string, offset);
9317 return hasher.GetHashField();
9319 inline void VisitOneByteString(const uint8_t* chars, int length) {
9320 AddCharacters(chars, length);
9322 inline void VisitTwoByteString(const uint16_t* chars, int length) {
9323 AddCharacters(chars, length);
9327 inline IteratingStringHasher(int len, uint32_t seed)
9328 : StringHasher(len, seed) {
9330 DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher);
9334 uint32_t String::ComputeAndSetHash() {
9335 // Should only be called if hash code has not yet been computed.
9336 ASSERT(!HasHashCode());
9338 // Store the hash code in the object.
9339 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed());
9340 set_hash_field(field);
9342 // Check the hash code is there.
9343 ASSERT(HasHashCode());
9344 uint32_t result = field >> kHashShift;
9345 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
9350 bool String::ComputeArrayIndex(uint32_t* index) {
9351 int length = this->length();
9352 if (length == 0 || length > kMaxArrayIndexSize) return false;
9353 ConsStringIteratorOp op;
9354 StringCharacterStream stream(this, &op);
9355 uint16_t ch = stream.GetNext();
9357 // If the string begins with a '0' character, it must only consist
9358 // of it to be a legal array index.
9364 // Convert string to uint32 array index; character by character.
9366 if (d < 0 || d > 9) return false;
9367 uint32_t result = d;
9368 while (stream.HasMore()) {
9369 d = stream.GetNext() - '0';
9370 if (d < 0 || d > 9) return false;
9371 // Check that the new result is below the 32 bit limit.
9372 if (result > 429496729U - ((d > 5) ? 1 : 0)) return false;
9373 result = (result * 10) + d;
9381 bool String::SlowAsArrayIndex(uint32_t* index) {
9382 if (length() <= kMaxCachedArrayIndexLength) {
9383 Hash(); // force computation of hash code
9384 uint32_t field = hash_field();
9385 if ((field & kIsNotArrayIndexMask) != 0) return false;
9386 // Isolate the array index form the full hash field.
9387 *index = ArrayIndexValueBits::decode(field);
9390 return ComputeArrayIndex(index);
9395 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
9396 int new_size, old_size;
9397 int old_length = string->length();
9398 if (old_length <= new_length) return string;
9400 if (string->IsSeqOneByteString()) {
9401 old_size = SeqOneByteString::SizeFor(old_length);
9402 new_size = SeqOneByteString::SizeFor(new_length);
9404 ASSERT(string->IsSeqTwoByteString());
9405 old_size = SeqTwoByteString::SizeFor(old_length);
9406 new_size = SeqTwoByteString::SizeFor(new_length);
9409 int delta = old_size - new_size;
9411 Address start_of_string = string->address();
9412 ASSERT_OBJECT_ALIGNED(start_of_string);
9413 ASSERT_OBJECT_ALIGNED(start_of_string + new_size);
9415 Heap* heap = string->GetHeap();
9416 NewSpace* newspace = heap->new_space();
9417 if (newspace->Contains(start_of_string) &&
9418 newspace->top() == start_of_string + old_size) {
9419 // Last allocated object in new space. Simply lower allocation top.
9420 newspace->set_top(start_of_string + new_size);
9422 // Sizes are pointer size aligned, so that we can use filler objects
9423 // that are a multiple of pointer size.
9424 heap->CreateFillerObjectAt(start_of_string + new_size, delta);
9426 heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR);
9428 // We are storing the new length using release store after creating a filler
9429 // for the left-over space to avoid races with the sweeper thread.
9430 string->synchronized_set_length(new_length);
9432 if (new_length == 0) return heap->isolate()->factory()->empty_string();
9437 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
9438 // For array indexes mix the length into the hash as an array index could
9441 ASSERT(length <= String::kMaxArrayIndexSize);
9442 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
9443 (1 << String::kArrayIndexValueBits));
9445 value <<= String::ArrayIndexValueBits::kShift;
9446 value |= length << String::ArrayIndexLengthBits::kShift;
9448 ASSERT((value & String::kIsNotArrayIndexMask) == 0);
9449 ASSERT((length > String::kMaxCachedArrayIndexLength) ||
9450 (value & String::kContainsCachedArrayIndexMask) == 0);
9455 uint32_t StringHasher::GetHashField() {
9456 if (length_ <= String::kMaxHashCalcLength) {
9457 if (is_array_index_) {
9458 return MakeArrayIndexHash(array_index_, length_);
9460 return (GetHashCore(raw_running_hash_) << String::kHashShift) |
9461 String::kIsNotArrayIndexMask;
9463 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask;
9468 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars,
9470 int* utf16_length_out) {
9471 int vector_length = chars.length();
9472 // Handle some edge cases
9473 if (vector_length <= 1) {
9474 ASSERT(vector_length == 0 ||
9475 static_cast<uint8_t>(chars.start()[0]) <=
9476 unibrow::Utf8::kMaxOneByteChar);
9477 *utf16_length_out = vector_length;
9478 return HashSequentialString(chars.start(), vector_length, seed);
9480 // Start with a fake length which won't affect computation.
9481 // It will be updated later.
9482 StringHasher hasher(String::kMaxArrayIndexSize, seed);
9483 unsigned remaining = static_cast<unsigned>(vector_length);
9484 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start());
9485 int utf16_length = 0;
9486 bool is_index = true;
9487 ASSERT(hasher.is_array_index_);
9488 while (remaining > 0) {
9489 unsigned consumed = 0;
9490 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed);
9491 ASSERT(consumed > 0 && consumed <= remaining);
9493 remaining -= consumed;
9494 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode;
9495 utf16_length += is_two_characters ? 2 : 1;
9496 // No need to keep hashing. But we do need to calculate utf16_length.
9497 if (utf16_length > String::kMaxHashCalcLength) continue;
9498 if (is_two_characters) {
9499 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c);
9500 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c);
9501 hasher.AddCharacter(c1);
9502 hasher.AddCharacter(c2);
9503 if (is_index) is_index = hasher.UpdateIndex(c1);
9504 if (is_index) is_index = hasher.UpdateIndex(c2);
9506 hasher.AddCharacter(c);
9507 if (is_index) is_index = hasher.UpdateIndex(c);
9510 *utf16_length_out = static_cast<int>(utf16_length);
9511 // Must set length here so that hash computation is correct.
9512 hasher.length_ = utf16_length;
9513 return hasher.GetHashField();
9517 void String::PrintOn(FILE* file) {
9518 int length = this->length();
9519 for (int i = 0; i < length; i++) {
9520 PrintF(file, "%c", Get(i));
9525 static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) {
9526 int live_enum = map->EnumLength();
9527 if (live_enum == kInvalidEnumCacheSentinel) {
9528 live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM);
9530 if (live_enum == 0) return descriptors->ClearEnumCache();
9532 FixedArray* enum_cache = descriptors->GetEnumCache();
9534 int to_trim = enum_cache->length() - live_enum;
9535 if (to_trim <= 0) return;
9536 RightTrimFixedArray<Heap::FROM_GC>(
9537 heap, descriptors->GetEnumCache(), to_trim);
9539 if (!descriptors->HasEnumIndicesCache()) return;
9540 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
9541 RightTrimFixedArray<Heap::FROM_GC>(heap, enum_indices_cache, to_trim);
9545 static void TrimDescriptorArray(Heap* heap,
9547 DescriptorArray* descriptors,
9548 int number_of_own_descriptors) {
9549 int number_of_descriptors = descriptors->number_of_descriptors_storage();
9550 int to_trim = number_of_descriptors - number_of_own_descriptors;
9551 if (to_trim == 0) return;
9553 RightTrimFixedArray<Heap::FROM_GC>(
9554 heap, descriptors, to_trim * DescriptorArray::kDescriptorSize);
9555 descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
9557 if (descriptors->HasEnumCache()) TrimEnumCache(heap, map, descriptors);
9558 descriptors->Sort();
9562 // Clear a possible back pointer in case the transition leads to a dead map.
9563 // Return true in case a back pointer has been cleared and false otherwise.
9564 static bool ClearBackPointer(Heap* heap, Map* target) {
9565 if (Marking::MarkBitFrom(target).Get()) return false;
9566 target->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
9571 // TODO(mstarzinger): This method should be moved into MarkCompactCollector,
9572 // because it cannot be called from outside the GC and we already have methods
9573 // depending on the transitions layout in the GC anyways.
9574 void Map::ClearNonLiveTransitions(Heap* heap) {
9575 // If there are no transitions to be cleared, return.
9576 // TODO(verwaest) Should be an assert, otherwise back pointers are not
9577 // properly cleared.
9578 if (!HasTransitionArray()) return;
9580 TransitionArray* t = transitions();
9581 MarkCompactCollector* collector = heap->mark_compact_collector();
9583 int transition_index = 0;
9585 DescriptorArray* descriptors = instance_descriptors();
9586 bool descriptors_owner_died = false;
9588 // Compact all live descriptors to the left.
9589 for (int i = 0; i < t->number_of_transitions(); ++i) {
9590 Map* target = t->GetTarget(i);
9591 if (ClearBackPointer(heap, target)) {
9592 if (target->instance_descriptors() == descriptors) {
9593 descriptors_owner_died = true;
9596 if (i != transition_index) {
9597 Name* key = t->GetKey(i);
9598 t->SetKey(transition_index, key);
9599 Object** key_slot = t->GetKeySlot(transition_index);
9600 collector->RecordSlot(key_slot, key_slot, key);
9601 // Target slots do not need to be recorded since maps are not compacted.
9602 t->SetTarget(transition_index, t->GetTarget(i));
9608 // If there are no transitions to be cleared, return.
9609 // TODO(verwaest) Should be an assert, otherwise back pointers are not
9610 // properly cleared.
9611 if (transition_index == t->number_of_transitions()) return;
9613 int number_of_own_descriptors = NumberOfOwnDescriptors();
9615 if (descriptors_owner_died) {
9616 if (number_of_own_descriptors > 0) {
9617 TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors);
9618 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
9619 set_owns_descriptors(true);
9621 ASSERT(descriptors == GetHeap()->empty_descriptor_array());
9625 // Note that we never eliminate a transition array, though we might right-trim
9626 // such that number_of_transitions() == 0. If this assumption changes,
9627 // TransitionArray::CopyInsert() will need to deal with the case that a
9628 // transition array disappeared during GC.
9629 int trim = t->number_of_transitions() - transition_index;
9631 RightTrimFixedArray<Heap::FROM_GC>(heap, t, t->IsSimpleTransition()
9632 ? trim : trim * TransitionArray::kTransitionSize);
9634 ASSERT(HasTransitionArray());
9639 // For performance reasons we only hash the 3 most variable fields of a map:
9640 // constructor, prototype and bit_field2.
9642 // Shift away the tag.
9643 int hash = (static_cast<uint32_t>(
9644 reinterpret_cast<uintptr_t>(constructor())) >> 2);
9646 // XOR-ing the prototype and constructor directly yields too many zero bits
9647 // when the two pointers are close (which is fairly common).
9648 // To avoid this we shift the prototype 4 bits relatively to the constructor.
9649 hash ^= (static_cast<uint32_t>(
9650 reinterpret_cast<uintptr_t>(prototype())) << 2);
9652 return hash ^ (hash >> 16) ^ bit_field2();
9656 static bool CheckEquivalent(Map* first, Map* second) {
9658 first->constructor() == second->constructor() &&
9659 first->prototype() == second->prototype() &&
9660 first->instance_type() == second->instance_type() &&
9661 first->bit_field() == second->bit_field() &&
9662 first->bit_field2() == second->bit_field2() &&
9663 first->is_frozen() == second->is_frozen() &&
9664 first->has_instance_call_handler() == second->has_instance_call_handler();
9668 bool Map::EquivalentToForTransition(Map* other) {
9669 return CheckEquivalent(this, other);
9673 bool Map::EquivalentToForNormalization(Map* other,
9674 PropertyNormalizationMode mode) {
9675 int properties = mode == CLEAR_INOBJECT_PROPERTIES
9676 ? 0 : other->inobject_properties();
9677 return CheckEquivalent(this, other) && inobject_properties() == properties;
9681 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
9682 ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR);
9683 while (!code_iter.is_finished()) {
9684 v->VisitCodeEntry(reinterpret_cast<Address>(
9685 RawFieldOfElementAt(code_iter.next_index())));
9688 ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR);
9689 while (!heap_iter.is_finished()) {
9690 v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index()));
9695 void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) {
9696 Type type[] = { CODE_PTR, HEAP_PTR };
9697 Address default_value[] = {
9698 isolate->builtins()->builtin(Builtins::kIllegal)->entry(),
9699 reinterpret_cast<Address>(isolate->heap()->undefined_value()) };
9701 for (int i = 0; i < 2; ++i) {
9702 for (int s = 0; s <= final_section(); ++s) {
9703 LayoutSection section = static_cast<LayoutSection>(s);
9704 if (number_of_entries(type[i], section) > 0) {
9705 int offset = OffsetOfElementAt(first_index(type[i], section));
9707 reinterpret_cast<Address*>(HeapObject::RawField(this, offset)),
9709 number_of_entries(type[i], section));
9716 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
9717 // Iterate over all fields in the body but take care in dealing with
9719 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset);
9720 v->VisitCodeEntry(this->address() + kCodeEntryOffset);
9721 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
9725 void JSFunction::MarkForOptimization() {
9726 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9727 ASSERT(!IsOptimized());
9728 ASSERT(shared()->allows_lazy_compilation() ||
9729 code()->optimizable());
9730 ASSERT(!shared()->is_generator());
9731 set_code_no_write_barrier(
9732 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
9733 // No write barrier required, since the builtin is part of the root set.
9737 void JSFunction::MarkForConcurrentOptimization() {
9738 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9739 ASSERT(!IsOptimized());
9740 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9741 ASSERT(!shared()->is_generator());
9742 ASSERT(GetIsolate()->concurrent_recompilation_enabled());
9743 if (FLAG_trace_concurrent_recompilation) {
9744 PrintF(" ** Marking ");
9746 PrintF(" for concurrent recompilation.\n");
9748 set_code_no_write_barrier(
9749 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
9750 // No write barrier required, since the builtin is part of the root set.
9754 void JSFunction::MarkInOptimizationQueue() {
9755 // We can only arrive here via the concurrent-recompilation builtin. If
9756 // break points were set, the code would point to the lazy-compile builtin.
9757 ASSERT(!GetIsolate()->DebuggerHasBreakPoints());
9758 ASSERT(IsMarkedForConcurrentOptimization() && !IsOptimized());
9759 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9760 ASSERT(GetIsolate()->concurrent_recompilation_enabled());
9761 if (FLAG_trace_concurrent_recompilation) {
9762 PrintF(" ** Queueing ");
9764 PrintF(" for concurrent recompilation.\n");
9766 set_code_no_write_barrier(
9767 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
9768 // No write barrier required, since the builtin is part of the root set.
9772 void SharedFunctionInfo::AddToOptimizedCodeMap(
9773 Handle<SharedFunctionInfo> shared,
9774 Handle<Context> native_context,
9776 Handle<FixedArray> literals,
9777 BailoutId osr_ast_id) {
9778 Isolate* isolate = shared->GetIsolate();
9779 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
9780 ASSERT(native_context->IsNativeContext());
9781 STATIC_ASSERT(kEntryLength == 4);
9782 Handle<FixedArray> new_code_map;
9783 Handle<Object> value(shared->optimized_code_map(), isolate);
9785 if (value->IsSmi()) {
9786 // No optimized code map.
9787 ASSERT_EQ(0, Smi::cast(*value)->value());
9788 // Create 3 entries per context {context, code, literals}.
9789 new_code_map = isolate->factory()->NewFixedArray(kInitialLength);
9790 old_length = kEntriesStart;
9792 // Copy old map and append one new entry.
9793 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value);
9794 ASSERT_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id));
9795 old_length = old_code_map->length();
9796 new_code_map = FixedArray::CopySize(
9797 old_code_map, old_length + kEntryLength);
9798 // Zap the old map for the sake of the heap verifier.
9799 if (Heap::ShouldZapGarbage()) {
9800 Object** data = old_code_map->data_start();
9801 MemsetPointer(data, isolate->heap()->the_hole_value(), old_length);
9804 new_code_map->set(old_length + kContextOffset, *native_context);
9805 new_code_map->set(old_length + kCachedCodeOffset, *code);
9806 new_code_map->set(old_length + kLiteralsOffset, *literals);
9807 new_code_map->set(old_length + kOsrAstIdOffset,
9808 Smi::FromInt(osr_ast_id.ToInt()));
9811 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9812 ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext());
9813 ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode());
9814 ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
9815 Code::OPTIMIZED_FUNCTION);
9816 ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
9817 ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
9820 shared->set_optimized_code_map(*new_code_map);
9824 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
9825 ASSERT(index > kEntriesStart);
9826 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9828 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9829 ASSERT_NE(NULL, cached_literals);
9830 return cached_literals;
9836 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
9837 ASSERT(index > kEntriesStart);
9838 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9839 Code* code = Code::cast(code_map->get(index));
9840 ASSERT_NE(NULL, code);
9845 void SharedFunctionInfo::ClearOptimizedCodeMap() {
9846 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9848 // If the next map link slot is already used then the function was
9849 // enqueued with code flushing and we remove it now.
9850 if (!code_map->get(kNextMapIndex)->IsUndefined()) {
9851 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
9852 flusher->EvictOptimizedCodeMap(this);
9855 ASSERT(code_map->get(kNextMapIndex)->IsUndefined());
9856 set_optimized_code_map(Smi::FromInt(0));
9860 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
9861 const char* reason) {
9862 DisallowHeapAllocation no_gc;
9863 if (optimized_code_map()->IsSmi()) return;
9865 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9866 int dst = kEntriesStart;
9867 int length = code_map->length();
9868 for (int src = kEntriesStart; src < length; src += kEntryLength) {
9869 ASSERT(code_map->get(src)->IsNativeContext());
9870 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) {
9871 // Evict the src entry by not copying it to the dst entry.
9872 if (FLAG_trace_opt) {
9873 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9875 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
9879 PrintF(" (osr ast id %d)]\n", osr.ToInt());
9883 // Keep the src entry by copying it to the dst entry.
9885 code_map->set(dst + kContextOffset,
9886 code_map->get(src + kContextOffset));
9887 code_map->set(dst + kCachedCodeOffset,
9888 code_map->get(src + kCachedCodeOffset));
9889 code_map->set(dst + kLiteralsOffset,
9890 code_map->get(src + kLiteralsOffset));
9891 code_map->set(dst + kOsrAstIdOffset,
9892 code_map->get(src + kOsrAstIdOffset));
9894 dst += kEntryLength;
9897 if (dst != length) {
9898 // Always trim even when array is cleared because of heap verifier.
9899 RightTrimFixedArray<Heap::FROM_MUTATOR>(GetHeap(), code_map, length - dst);
9900 if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap();
9905 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
9906 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9907 ASSERT(shrink_by % kEntryLength == 0);
9908 ASSERT(shrink_by <= code_map->length() - kEntriesStart);
9909 // Always trim even when array is cleared because of heap verifier.
9910 RightTrimFixedArray<Heap::FROM_GC>(GetHeap(), code_map, shrink_by);
9911 if (code_map->length() == kEntriesStart) {
9912 ClearOptimizedCodeMap();
9917 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) {
9918 if (object->IsGlobalObject()) return;
9920 // Make sure prototypes are fast objects and their maps have the bit set
9921 // so they remain fast.
9922 if (!object->HasFastProperties()) {
9923 TransformToFastProperties(object, 0);
9928 Handle<Object> CacheInitialJSArrayMaps(
9929 Handle<Context> native_context, Handle<Map> initial_map) {
9930 // Replace all of the cached initial array maps in the native context with
9931 // the appropriate transitioned elements kind maps.
9932 Factory* factory = native_context->GetIsolate()->factory();
9933 Handle<FixedArray> maps = factory->NewFixedArrayWithHoles(
9934 kElementsKindCount, TENURED);
9936 Handle<Map> current_map = initial_map;
9937 ElementsKind kind = current_map->elements_kind();
9938 ASSERT(kind == GetInitialFastElementsKind());
9939 maps->set(kind, *current_map);
9940 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
9941 i < kFastElementsKindCount; ++i) {
9942 Handle<Map> new_map;
9943 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
9944 if (current_map->HasElementsTransition()) {
9945 new_map = handle(current_map->elements_transition_map());
9946 ASSERT(new_map->elements_kind() == next_kind);
9948 new_map = Map::CopyAsElementsKind(
9949 current_map, next_kind, INSERT_TRANSITION);
9951 maps->set(next_kind, *new_map);
9952 current_map = new_map;
9954 native_context->set_js_array_maps(*maps);
9959 void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
9960 Handle<Object> value) {
9961 Isolate* isolate = function->GetIsolate();
9963 ASSERT(value->IsJSReceiver());
9965 // First some logic for the map of the prototype to make sure it is in fast
9967 if (value->IsJSObject()) {
9968 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value));
9971 // Now some logic for the maps of the objects that are created by using this
9972 // function as a constructor.
9973 if (function->has_initial_map()) {
9974 // If the function has allocated the initial map replace it with a
9975 // copy containing the new prototype. Also complete any in-object
9976 // slack tracking that is in progress at this point because it is
9977 // still tracking the old copy.
9978 if (function->IsInobjectSlackTrackingInProgress()) {
9979 function->CompleteInobjectSlackTracking();
9981 Handle<Map> initial_map(function->initial_map(), isolate);
9982 Handle<Map> new_map = Map::Copy(initial_map);
9983 new_map->set_prototype(*value);
9985 // If the function is used as the global Array function, cache the
9986 // initial map (and transitioned versions) in the native context.
9987 Context* native_context = function->context()->native_context();
9988 Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX);
9989 if (array_function->IsJSFunction() &&
9990 *function == JSFunction::cast(array_function)) {
9991 CacheInitialJSArrayMaps(handle(native_context, isolate), new_map);
9994 function->set_initial_map(*new_map);
9996 // Deoptimize all code that embeds the previous initial map.
9997 initial_map->dependent_code()->DeoptimizeDependentCodeGroup(
9998 isolate, DependentCode::kInitialMapChangedGroup);
10000 // Put the value in the initial map field until an initial map is
10001 // needed. At that point, a new initial map is created and the
10002 // prototype is put into the initial map where it belongs.
10003 function->set_prototype_or_initial_map(*value);
10005 isolate->heap()->ClearInstanceofCache();
10009 void JSFunction::SetPrototype(Handle<JSFunction> function,
10010 Handle<Object> value) {
10011 ASSERT(function->should_have_prototype());
10012 Handle<Object> construct_prototype = value;
10014 // If the value is not a JSReceiver, store the value in the map's
10015 // constructor field so it can be accessed. Also, set the prototype
10016 // used for constructing objects to the original object prototype.
10017 // See ECMA-262 13.2.2.
10018 if (!value->IsJSReceiver()) {
10019 // Copy the map so this does not affect unrelated functions.
10020 // Remove map transitions because they point to maps with a
10021 // different prototype.
10022 Handle<Map> new_map = Map::Copy(handle(function->map()));
10024 JSObject::MigrateToMap(function, new_map);
10025 new_map->set_constructor(*value);
10026 new_map->set_non_instance_prototype(true);
10027 Isolate* isolate = new_map->GetIsolate();
10028 construct_prototype = handle(
10029 isolate->context()->native_context()->initial_object_prototype(),
10032 function->map()->set_non_instance_prototype(false);
10035 return SetInstancePrototype(function, construct_prototype);
10039 bool JSFunction::RemovePrototype() {
10040 Context* native_context = context()->native_context();
10041 Map* no_prototype_map = shared()->strict_mode() == SLOPPY
10042 ? native_context->sloppy_function_without_prototype_map()
10043 : native_context->strict_function_without_prototype_map();
10045 if (map() == no_prototype_map) return true;
10048 if (map() != (shared()->strict_mode() == SLOPPY
10049 ? native_context->sloppy_function_map()
10050 : native_context->strict_function_map())) {
10055 set_map(no_prototype_map);
10056 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
10061 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
10062 if (function->has_initial_map()) return;
10063 Isolate* isolate = function->GetIsolate();
10065 // First create a new map with the size and number of in-object properties
10066 // suggested by the function.
10067 InstanceType instance_type;
10069 int in_object_properties;
10070 if (function->shared()->is_generator()) {
10071 instance_type = JS_GENERATOR_OBJECT_TYPE;
10072 instance_size = JSGeneratorObject::kSize;
10073 in_object_properties = 0;
10075 instance_type = JS_OBJECT_TYPE;
10076 instance_size = function->shared()->CalculateInstanceSize();
10077 in_object_properties = function->shared()->CalculateInObjectProperties();
10079 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size);
10081 // Fetch or allocate prototype.
10082 Handle<Object> prototype;
10083 if (function->has_instance_prototype()) {
10084 prototype = handle(function->instance_prototype(), isolate);
10085 for (Handle<Object> p = prototype; !p->IsNull() && !p->IsJSProxy();
10086 p = Object::GetPrototype(isolate, p)) {
10087 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(p));
10090 prototype = isolate->factory()->NewFunctionPrototype(function);
10092 map->set_inobject_properties(in_object_properties);
10093 map->set_unused_property_fields(in_object_properties);
10094 map->set_prototype(*prototype);
10095 ASSERT(map->has_fast_object_elements());
10097 // Finally link initial map and constructor function.
10098 function->set_initial_map(*map);
10099 map->set_constructor(*function);
10101 if (!function->shared()->is_generator()) {
10102 function->StartInobjectSlackTracking();
10107 void JSFunction::SetInstanceClassName(String* name) {
10108 shared()->set_instance_class_name(name);
10112 void JSFunction::PrintName(FILE* out) {
10113 SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
10114 PrintF(out, "%s", name.get());
10118 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
10119 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
10123 // The filter is a pattern that matches function names in this way:
10124 // "*" all; the default
10125 // "-" all but the top-level function
10126 // "-name" all but the function "name"
10127 // "" only the top-level function
10128 // "name" only the function "name"
10129 // "name*" only functions starting with "name"
10130 bool JSFunction::PassesFilter(const char* raw_filter) {
10131 if (*raw_filter == '*') return true;
10132 String* name = shared()->DebugName();
10133 Vector<const char> filter = CStrVector(raw_filter);
10134 if (filter.length() == 0) return name->length() == 0;
10135 if (filter[0] == '-') {
10136 // Negative filter.
10137 if (filter.length() == 1) {
10138 return (name->length() != 0);
10139 } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) {
10142 if (filter[filter.length() - 1] == '*' &&
10143 name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) {
10148 } else if (name->IsUtf8EqualTo(filter)) {
10151 if (filter[filter.length() - 1] == '*' &&
10152 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) {
10159 void Oddball::Initialize(Isolate* isolate,
10160 Handle<Oddball> oddball,
10161 const char* to_string,
10162 Handle<Object> to_number,
10164 Handle<String> internalized_to_string =
10165 isolate->factory()->InternalizeUtf8String(to_string);
10166 oddball->set_to_string(*internalized_to_string);
10167 oddball->set_to_number(*to_number);
10168 oddball->set_kind(kind);
10172 void Script::InitLineEnds(Handle<Script> script) {
10173 if (!script->line_ends()->IsUndefined()) return;
10175 Isolate* isolate = script->GetIsolate();
10177 if (!script->source()->IsString()) {
10178 ASSERT(script->source()->IsUndefined());
10179 Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0);
10180 script->set_line_ends(*empty);
10181 ASSERT(script->line_ends()->IsFixedArray());
10185 Handle<String> src(String::cast(script->source()), isolate);
10187 Handle<FixedArray> array = String::CalculateLineEnds(src, true);
10189 if (*array != isolate->heap()->empty_fixed_array()) {
10190 array->set_map(isolate->heap()->fixed_cow_array_map());
10193 script->set_line_ends(*array);
10194 ASSERT(script->line_ends()->IsFixedArray());
10198 int Script::GetColumnNumber(Handle<Script> script, int code_pos) {
10199 int line_number = GetLineNumber(script, code_pos);
10200 if (line_number == -1) return -1;
10202 DisallowHeapAllocation no_allocation;
10203 FixedArray* line_ends_array = FixedArray::cast(script->line_ends());
10204 line_number = line_number - script->line_offset()->value();
10205 if (line_number == 0) return code_pos + script->column_offset()->value();
10206 int prev_line_end_pos =
10207 Smi::cast(line_ends_array->get(line_number - 1))->value();
10208 return code_pos - (prev_line_end_pos + 1);
10212 int Script::GetLineNumberWithArray(int code_pos) {
10213 DisallowHeapAllocation no_allocation;
10214 ASSERT(line_ends()->IsFixedArray());
10215 FixedArray* line_ends_array = FixedArray::cast(line_ends());
10216 int line_ends_len = line_ends_array->length();
10217 if (line_ends_len == 0) return -1;
10219 if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) {
10220 return line_offset()->value();
10224 int right = line_ends_len;
10225 while (int half = (right - left) / 2) {
10226 if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) {
10232 return right + line_offset()->value();
10236 int Script::GetLineNumber(Handle<Script> script, int code_pos) {
10237 InitLineEnds(script);
10238 return script->GetLineNumberWithArray(code_pos);
10242 int Script::GetLineNumber(int code_pos) {
10243 DisallowHeapAllocation no_allocation;
10244 if (!line_ends()->IsUndefined()) return GetLineNumberWithArray(code_pos);
10246 // Slow mode: we do not have line_ends. We have to iterate through source.
10247 if (!source()->IsString()) return -1;
10249 String* source_string = String::cast(source());
10251 int len = source_string->length();
10252 for (int pos = 0; pos < len; pos++) {
10253 if (pos == code_pos) break;
10254 if (source_string->Get(pos) == '\n') line++;
10260 Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) {
10261 Isolate* isolate = script->GetIsolate();
10262 Handle<String> name_or_source_url_key =
10263 isolate->factory()->InternalizeOneByteString(
10264 STATIC_ASCII_VECTOR("nameOrSourceURL"));
10265 Handle<JSObject> script_wrapper = Script::GetWrapper(script);
10266 Handle<Object> property = Object::GetProperty(
10267 script_wrapper, name_or_source_url_key).ToHandleChecked();
10268 ASSERT(property->IsJSFunction());
10269 Handle<JSFunction> method = Handle<JSFunction>::cast(property);
10270 Handle<Object> result;
10271 // Do not check against pending exception, since this function may be called
10272 // when an exception has already been pending.
10273 if (!Execution::TryCall(method, script_wrapper, 0, NULL).ToHandle(&result)) {
10274 return isolate->factory()->undefined_value();
10280 // Wrappers for scripts are kept alive and cached in weak global
10281 // handles referred from foreign objects held by the scripts as long as
10282 // they are used. When they are not used anymore, the garbage
10283 // collector will call the weak callback on the global handle
10284 // associated with the wrapper and get rid of both the wrapper and the
10286 static void ClearWrapperCache(
10287 const v8::WeakCallbackData<v8::Value, void>& data) {
10288 Object** location = reinterpret_cast<Object**>(data.GetParameter());
10289 JSValue* wrapper = JSValue::cast(*location);
10290 Foreign* foreign = Script::cast(wrapper->value())->wrapper();
10291 ASSERT_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location));
10292 foreign->set_foreign_address(0);
10293 GlobalHandles::Destroy(location);
10294 Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate());
10295 isolate->counters()->script_wrappers()->Decrement();
10299 Handle<JSObject> Script::GetWrapper(Handle<Script> script) {
10300 if (script->wrapper()->foreign_address() != NULL) {
10301 // Return a handle for the existing script wrapper from the cache.
10302 return Handle<JSValue>(
10303 *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address()));
10305 Isolate* isolate = script->GetIsolate();
10306 // Construct a new script wrapper.
10307 isolate->counters()->script_wrappers()->Increment();
10308 Handle<JSFunction> constructor = isolate->script_function();
10309 Handle<JSValue> result =
10310 Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor));
10312 result->set_value(*script);
10314 // Create a new weak global handle and use it to cache the wrapper
10315 // for future use. The cache will automatically be cleared by the
10316 // garbage collector when it is not used anymore.
10317 Handle<Object> handle = isolate->global_handles()->Create(*result);
10318 GlobalHandles::MakeWeak(handle.location(),
10319 reinterpret_cast<void*>(handle.location()),
10320 &ClearWrapperCache);
10321 script->wrapper()->set_foreign_address(
10322 reinterpret_cast<Address>(handle.location()));
10327 String* SharedFunctionInfo::DebugName() {
10328 Object* n = name();
10329 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name();
10330 return String::cast(n);
10334 bool SharedFunctionInfo::HasSourceCode() {
10335 return !script()->IsUndefined() &&
10336 !reinterpret_cast<Script*>(script())->source()->IsUndefined();
10340 Handle<Object> SharedFunctionInfo::GetSourceCode() {
10341 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
10342 Handle<String> source(String::cast(Script::cast(script())->source()));
10343 return GetIsolate()->factory()->NewSubString(
10344 source, start_position(), end_position());
10348 bool SharedFunctionInfo::IsInlineable() {
10349 // Check that the function has a script associated with it.
10350 if (!script()->IsScript()) return false;
10351 if (optimization_disabled()) return false;
10352 // If we never ran this (unlikely) then lets try to optimize it.
10353 if (code()->kind() != Code::FUNCTION) return true;
10354 return code()->optimizable();
10358 int SharedFunctionInfo::SourceSize() {
10359 return end_position() - start_position();
10363 int SharedFunctionInfo::CalculateInstanceSize() {
10364 int instance_size =
10365 JSObject::kHeaderSize +
10366 expected_nof_properties() * kPointerSize;
10367 if (instance_size > JSObject::kMaxInstanceSize) {
10368 instance_size = JSObject::kMaxInstanceSize;
10370 return instance_size;
10374 int SharedFunctionInfo::CalculateInObjectProperties() {
10375 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize;
10379 // Support function for printing the source code to a StringStream
10380 // without any allocation in the heap.
10381 void SharedFunctionInfo::SourceCodePrint(StringStream* accumulator,
10383 // For some native functions there is no source.
10384 if (!HasSourceCode()) {
10385 accumulator->Add("<No Source>");
10389 // Get the source for the script which this function came from.
10390 // Don't use String::cast because we don't want more assertion errors while
10391 // we are already creating a stack dump.
10392 String* script_source =
10393 reinterpret_cast<String*>(Script::cast(script())->source());
10395 if (!script_source->LooksValid()) {
10396 accumulator->Add("<Invalid Source>");
10400 if (!is_toplevel()) {
10401 accumulator->Add("function ");
10402 Object* name = this->name();
10403 if (name->IsString() && String::cast(name)->length() > 0) {
10404 accumulator->PrintName(name);
10408 int len = end_position() - start_position();
10409 if (len <= max_length || max_length < 0) {
10410 accumulator->Put(script_source, start_position(), end_position());
10412 accumulator->Put(script_source,
10414 start_position() + max_length);
10415 accumulator->Add("...\n");
10420 static bool IsCodeEquivalent(Code* code, Code* recompiled) {
10421 if (code->instruction_size() != recompiled->instruction_size()) return false;
10422 ByteArray* code_relocation = code->relocation_info();
10423 ByteArray* recompiled_relocation = recompiled->relocation_info();
10424 int length = code_relocation->length();
10425 if (length != recompiled_relocation->length()) return false;
10426 int compare = memcmp(code_relocation->GetDataStartAddress(),
10427 recompiled_relocation->GetDataStartAddress(),
10429 return compare == 0;
10433 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
10434 ASSERT(!has_deoptimization_support());
10435 DisallowHeapAllocation no_allocation;
10436 Code* code = this->code();
10437 if (IsCodeEquivalent(code, recompiled)) {
10438 // Copy the deoptimization data from the recompiled code.
10439 code->set_deoptimization_data(recompiled->deoptimization_data());
10440 code->set_has_deoptimization_support(true);
10442 // TODO(3025757): In case the recompiled isn't equivalent to the
10443 // old code, we have to replace it. We should try to avoid this
10444 // altogether because it flushes valuable type feedback by
10445 // effectively resetting all IC state.
10446 ReplaceCode(recompiled);
10448 ASSERT(has_deoptimization_support());
10452 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
10453 // Disable optimization for the shared function info and mark the
10454 // code as non-optimizable. The marker on the shared function info
10455 // is there because we flush non-optimized code thereby loosing the
10456 // non-optimizable information for the code. When the code is
10457 // regenerated and set on the shared function info it is marked as
10458 // non-optimizable if optimization is disabled for the shared
10460 set_optimization_disabled(true);
10461 set_bailout_reason(reason);
10462 // Code should be the lazy compilation stub or else unoptimized. If the
10463 // latter, disable optimization for the code too.
10464 ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN);
10465 if (code()->kind() == Code::FUNCTION) {
10466 code()->set_optimizable(false);
10468 PROFILE(GetIsolate(), CodeDisableOptEvent(code(), this));
10469 if (FLAG_trace_opt) {
10470 PrintF("[disabled optimization for ");
10472 PrintF(", reason: %s]\n", GetBailoutReason(reason));
10477 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) {
10478 ASSERT(!id.IsNone());
10479 Code* unoptimized = code();
10480 DeoptimizationOutputData* data =
10481 DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
10482 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this);
10484 return true; // Return true if there was no ASSERT.
10488 void JSFunction::StartInobjectSlackTracking() {
10489 ASSERT(has_initial_map() && !IsInobjectSlackTrackingInProgress());
10491 if (!FLAG_clever_optimizations) return;
10492 Map* map = initial_map();
10494 // Only initiate the tracking the first time.
10495 if (map->done_inobject_slack_tracking()) return;
10496 map->set_done_inobject_slack_tracking(true);
10498 // No tracking during the snapshot construction phase.
10499 Isolate* isolate = GetIsolate();
10500 if (isolate->serializer_enabled()) return;
10502 if (map->unused_property_fields() == 0) return;
10504 map->set_construction_count(kGenerousAllocationCount);
10508 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
10509 code()->ClearInlineCaches();
10510 // If we clear ICs, we need to clear the type feedback vector too, since
10511 // CallICs are synced with a feedback vector slot.
10512 ClearTypeFeedbackInfo();
10513 set_ic_age(new_ic_age);
10514 if (code()->kind() == Code::FUNCTION) {
10515 code()->set_profiler_ticks(0);
10516 if (optimization_disabled() &&
10517 opt_count() >= FLAG_max_opt_count) {
10518 // Re-enable optimizations if they were disabled due to opt_count limit.
10519 set_optimization_disabled(false);
10520 code()->set_optimizable(true);
10523 set_deopt_count(0);
10528 static void GetMinInobjectSlack(Map* map, void* data) {
10529 int slack = map->unused_property_fields();
10530 if (*reinterpret_cast<int*>(data) > slack) {
10531 *reinterpret_cast<int*>(data) = slack;
10536 static void ShrinkInstanceSize(Map* map, void* data) {
10537 int slack = *reinterpret_cast<int*>(data);
10538 map->set_inobject_properties(map->inobject_properties() - slack);
10539 map->set_unused_property_fields(map->unused_property_fields() - slack);
10540 map->set_instance_size(map->instance_size() - slack * kPointerSize);
10542 // Visitor id might depend on the instance size, recalculate it.
10543 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
10547 void JSFunction::CompleteInobjectSlackTracking() {
10548 ASSERT(has_initial_map());
10549 Map* map = initial_map();
10551 ASSERT(map->done_inobject_slack_tracking());
10552 map->set_construction_count(kNoSlackTracking);
10554 int slack = map->unused_property_fields();
10555 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
10557 // Resize the initial map and all maps in its transition tree.
10558 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
10563 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
10564 BailoutId osr_ast_id) {
10565 DisallowHeapAllocation no_gc;
10566 ASSERT(native_context->IsNativeContext());
10567 if (!FLAG_cache_optimized_code) return -1;
10568 Object* value = optimized_code_map();
10569 if (!value->IsSmi()) {
10570 FixedArray* optimized_code_map = FixedArray::cast(value);
10571 int length = optimized_code_map->length();
10572 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
10573 for (int i = kEntriesStart; i < length; i += kEntryLength) {
10574 if (optimized_code_map->get(i + kContextOffset) == native_context &&
10575 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
10576 return i + kCachedCodeOffset;
10579 if (FLAG_trace_opt) {
10580 PrintF("[didn't find optimized code in optimized code map for ");
10589 #define DECLARE_TAG(ignore1, name, ignore2) name,
10590 const char* const VisitorSynchronization::kTags[
10591 VisitorSynchronization::kNumberOfSyncTags] = {
10592 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10597 #define DECLARE_TAG(ignore1, ignore2, name) name,
10598 const char* const VisitorSynchronization::kTagNames[
10599 VisitorSynchronization::kNumberOfSyncTags] = {
10600 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10605 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
10606 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
10607 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
10608 Object* old_target = target;
10609 VisitPointer(&target);
10610 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10614 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
10615 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
10616 Object* stub = rinfo->code_age_stub();
10618 VisitPointer(&stub);
10623 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
10624 Object* code = Code::GetObjectFromEntryAddress(entry_address);
10625 Object* old_code = code;
10626 VisitPointer(&code);
10627 if (code != old_code) {
10628 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry();
10633 void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
10634 ASSERT(rinfo->rmode() == RelocInfo::CELL);
10635 Object* cell = rinfo->target_cell();
10636 Object* old_cell = cell;
10637 VisitPointer(&cell);
10638 if (cell != old_cell) {
10639 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
10644 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
10645 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
10646 rinfo->IsPatchedReturnSequence()) ||
10647 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
10648 rinfo->IsPatchedDebugBreakSlotSequence()));
10649 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
10650 Object* old_target = target;
10651 VisitPointer(&target);
10652 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10656 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
10657 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
10658 Object* p = rinfo->target_object();
10663 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
10664 Address p = rinfo->target_reference();
10665 VisitExternalReference(&p);
10669 void Code::InvalidateRelocation() {
10670 set_relocation_info(GetHeap()->empty_byte_array());
10674 void Code::InvalidateEmbeddedObjects() {
10675 Object* undefined = GetHeap()->undefined_value();
10676 Cell* undefined_cell = GetHeap()->undefined_cell();
10677 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10678 RelocInfo::ModeMask(RelocInfo::CELL);
10679 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10680 RelocInfo::Mode mode = it.rinfo()->rmode();
10681 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10682 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
10683 } else if (mode == RelocInfo::CELL) {
10684 it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER);
10690 void Code::Relocate(intptr_t delta) {
10691 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
10692 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH);
10694 CPU::FlushICache(instruction_start(), instruction_size());
10698 void Code::CopyFrom(const CodeDesc& desc) {
10699 ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT);
10702 CopyBytes(instruction_start(), desc.buffer,
10703 static_cast<size_t>(desc.instr_size));
10706 CopyBytes(relocation_start(),
10707 desc.buffer + desc.buffer_size - desc.reloc_size,
10708 static_cast<size_t>(desc.reloc_size));
10710 // unbox handles and relocate
10711 intptr_t delta = instruction_start() - desc.buffer;
10712 int mode_mask = RelocInfo::kCodeTargetMask |
10713 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10714 RelocInfo::ModeMask(RelocInfo::CELL) |
10715 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
10716 RelocInfo::kApplyMask;
10717 // Needed to find target_object and runtime_entry on X64
10718 Assembler* origin = desc.origin;
10719 AllowDeferredHandleDereference embedding_raw_address;
10720 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10721 RelocInfo::Mode mode = it.rinfo()->rmode();
10722 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10723 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10724 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
10725 } else if (mode == RelocInfo::CELL) {
10726 Handle<Cell> cell = it.rinfo()->target_cell_handle();
10727 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
10728 } else if (RelocInfo::IsCodeTarget(mode)) {
10729 // rewrite code handles in inline cache targets to direct
10730 // pointers to the first instruction in the code object
10731 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10732 Code* code = Code::cast(*p);
10733 it.rinfo()->set_target_address(code->instruction_start(),
10734 SKIP_WRITE_BARRIER,
10735 SKIP_ICACHE_FLUSH);
10736 } else if (RelocInfo::IsRuntimeEntry(mode)) {
10737 Address p = it.rinfo()->target_runtime_entry(origin);
10738 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER,
10739 SKIP_ICACHE_FLUSH);
10740 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) {
10741 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin);
10742 Code* code = Code::cast(*p);
10743 it.rinfo()->set_code_age_stub(code, SKIP_ICACHE_FLUSH);
10745 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH);
10748 CPU::FlushICache(instruction_start(), instruction_size());
10752 // Locate the source position which is closest to the address in the code. This
10753 // is using the source position information embedded in the relocation info.
10754 // The position returned is relative to the beginning of the script where the
10755 // source for this function is found.
10756 int Code::SourcePosition(Address pc) {
10757 int distance = kMaxInt;
10758 int position = RelocInfo::kNoPosition; // Initially no position found.
10759 // Run through all the relocation info to find the best matching source
10760 // position. All the code needs to be considered as the sequence of the
10761 // instructions in the code does not necessarily follow the same order as the
10763 RelocIterator it(this, RelocInfo::kPositionMask);
10764 while (!it.done()) {
10765 // Only look at positions after the current pc.
10766 if (it.rinfo()->pc() < pc) {
10767 // Get position and distance.
10769 int dist = static_cast<int>(pc - it.rinfo()->pc());
10770 int pos = static_cast<int>(it.rinfo()->data());
10771 // If this position is closer than the current candidate or if it has the
10772 // same distance as the current candidate and the position is higher then
10773 // this position is the new candidate.
10774 if ((dist < distance) ||
10775 (dist == distance && pos > position)) {
10786 // Same as Code::SourcePosition above except it only looks for statement
10788 int Code::SourceStatementPosition(Address pc) {
10789 // First find the position as close as possible using all position
10791 int position = SourcePosition(pc);
10792 // Now find the closest statement position before the position.
10793 int statement_position = 0;
10794 RelocIterator it(this, RelocInfo::kPositionMask);
10795 while (!it.done()) {
10796 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
10797 int p = static_cast<int>(it.rinfo()->data());
10798 if (statement_position < p && p <= position) {
10799 statement_position = p;
10804 return statement_position;
10808 SafepointEntry Code::GetSafepointEntry(Address pc) {
10809 SafepointTable table(this);
10810 return table.FindEntry(pc);
10814 Object* Code::FindNthObject(int n, Map* match_map) {
10815 ASSERT(is_inline_cache_stub());
10816 DisallowHeapAllocation no_allocation;
10817 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10818 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10819 RelocInfo* info = it.rinfo();
10820 Object* object = info->target_object();
10821 if (object->IsHeapObject()) {
10822 if (HeapObject::cast(object)->map() == match_map) {
10823 if (--n == 0) return object;
10831 AllocationSite* Code::FindFirstAllocationSite() {
10832 Object* result = FindNthObject(1, GetHeap()->allocation_site_map());
10833 return (result != NULL) ? AllocationSite::cast(result) : NULL;
10837 Map* Code::FindFirstMap() {
10838 Object* result = FindNthObject(1, GetHeap()->meta_map());
10839 return (result != NULL) ? Map::cast(result) : NULL;
10843 void Code::FindAndReplace(const FindAndReplacePattern& pattern) {
10844 ASSERT(is_inline_cache_stub() || is_handler());
10845 DisallowHeapAllocation no_allocation;
10846 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10847 STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32);
10848 int current_pattern = 0;
10849 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10850 RelocInfo* info = it.rinfo();
10851 Object* object = info->target_object();
10852 if (object->IsHeapObject()) {
10853 Map* map = HeapObject::cast(object)->map();
10854 if (map == *pattern.find_[current_pattern]) {
10855 info->set_target_object(*pattern.replace_[current_pattern]);
10856 if (++current_pattern == pattern.count_) return;
10864 void Code::FindAllMaps(MapHandleList* maps) {
10865 ASSERT(is_inline_cache_stub());
10866 DisallowHeapAllocation no_allocation;
10867 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10868 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10869 RelocInfo* info = it.rinfo();
10870 Object* object = info->target_object();
10871 if (object->IsMap()) maps->Add(handle(Map::cast(object)));
10876 Code* Code::FindFirstHandler() {
10877 ASSERT(is_inline_cache_stub());
10878 DisallowHeapAllocation no_allocation;
10879 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10880 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10881 RelocInfo* info = it.rinfo();
10882 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10883 if (code->kind() == Code::HANDLER) return code;
10889 bool Code::FindHandlers(CodeHandleList* code_list, int length) {
10890 ASSERT(is_inline_cache_stub());
10891 DisallowHeapAllocation no_allocation;
10892 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10894 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10895 if (i == length) return true;
10896 RelocInfo* info = it.rinfo();
10897 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10898 // IC stubs with handlers never contain non-handler code objects before
10899 // handler targets.
10900 if (code->kind() != Code::HANDLER) break;
10901 code_list->Add(Handle<Code>(code));
10904 return i == length;
10908 Name* Code::FindFirstName() {
10909 ASSERT(is_inline_cache_stub());
10910 DisallowHeapAllocation no_allocation;
10911 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10912 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10913 RelocInfo* info = it.rinfo();
10914 Object* object = info->target_object();
10915 if (object->IsName()) return Name::cast(object);
10921 void Code::ClearInlineCaches() {
10922 ClearInlineCaches(NULL);
10926 void Code::ClearInlineCaches(Code::Kind kind) {
10927 ClearInlineCaches(&kind);
10931 void Code::ClearInlineCaches(Code::Kind* kind) {
10932 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10933 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
10934 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
10935 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10936 RelocInfo* info = it.rinfo();
10937 Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
10938 if (target->is_inline_cache_stub()) {
10939 if (kind == NULL || *kind == target->kind()) {
10940 IC::Clear(this->GetIsolate(), info->pc(),
10941 info->host()->constant_pool());
10948 void SharedFunctionInfo::ClearTypeFeedbackInfo() {
10949 FixedArray* vector = feedback_vector();
10950 Heap* heap = GetHeap();
10951 int length = vector->length();
10953 for (int i = 0; i < length; i++) {
10954 Object* obj = vector->get(i);
10955 if (obj->IsHeapObject()) {
10956 InstanceType instance_type =
10957 HeapObject::cast(obj)->map()->instance_type();
10958 switch (instance_type) {
10959 case ALLOCATION_SITE_TYPE:
10960 // AllocationSites are not cleared because they do not store
10961 // information that leaks.
10965 vector->set(i, TypeFeedbackInfo::RawUninitializedSentinel(heap),
10966 SKIP_WRITE_BARRIER);
10973 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
10974 DisallowHeapAllocation no_gc;
10975 ASSERT(kind() == FUNCTION);
10976 BackEdgeTable back_edges(this, &no_gc);
10977 for (uint32_t i = 0; i < back_edges.length(); i++) {
10978 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
10980 return BailoutId::None();
10984 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
10985 DisallowHeapAllocation no_gc;
10986 ASSERT(kind() == FUNCTION);
10987 BackEdgeTable back_edges(this, &no_gc);
10988 for (uint32_t i = 0; i < back_edges.length(); i++) {
10989 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
10991 UNREACHABLE(); // We expect to find the back edge.
10996 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
10997 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
11001 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) {
11002 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge,
11003 NO_MARKING_PARITY);
11007 static Code::Age EffectiveAge(Code::Age age) {
11008 if (age == Code::kNotExecutedCodeAge) {
11009 // Treat that's never been executed as old immediately.
11010 age = Code::kIsOldCodeAge;
11011 } else if (age == Code::kExecutedOnceCodeAge) {
11012 // Pre-age code that has only been executed once.
11013 age = Code::kPreAgedCodeAge;
11019 void Code::MakeOlder(MarkingParity current_parity) {
11020 byte* sequence = FindCodeAgeSequence();
11021 if (sequence != NULL) {
11023 MarkingParity code_parity;
11024 Isolate* isolate = GetIsolate();
11025 GetCodeAgeAndParity(isolate, sequence, &age, &code_parity);
11026 age = EffectiveAge(age);
11027 if (age != kLastCodeAge && code_parity != current_parity) {
11028 PatchPlatformCodeAge(isolate,
11030 static_cast<Age>(age + 1),
11037 bool Code::IsOld() {
11038 return GetAge() >= kIsOldCodeAge;
11042 byte* Code::FindCodeAgeSequence() {
11043 return FLAG_age_code &&
11044 prologue_offset() != Code::kPrologueOffsetNotSet &&
11045 (kind() == OPTIMIZED_FUNCTION ||
11046 (kind() == FUNCTION && !has_debug_break_slots()))
11047 ? instruction_start() + prologue_offset()
11052 Code::Age Code::GetAge() {
11053 return EffectiveAge(GetRawAge());
11057 Code::Age Code::GetRawAge() {
11058 byte* sequence = FindCodeAgeSequence();
11059 if (sequence == NULL) {
11060 return kNoAgeCodeAge;
11063 MarkingParity parity;
11064 GetCodeAgeAndParity(GetIsolate(), sequence, &age, &parity);
11069 void Code::GetCodeAgeAndParity(Code* code, Age* age,
11070 MarkingParity* parity) {
11071 Isolate* isolate = code->GetIsolate();
11072 Builtins* builtins = isolate->builtins();
11074 #define HANDLE_CODE_AGE(AGE) \
11075 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \
11076 if (code == stub) { \
11077 *age = k##AGE##CodeAge; \
11078 *parity = EVEN_MARKING_PARITY; \
11081 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
11082 if (code == stub) { \
11083 *age = k##AGE##CodeAge; \
11084 *parity = ODD_MARKING_PARITY; \
11087 CODE_AGE_LIST(HANDLE_CODE_AGE)
11088 #undef HANDLE_CODE_AGE
11089 stub = *builtins->MarkCodeAsExecutedOnce();
11090 if (code == stub) {
11091 *age = kNotExecutedCodeAge;
11092 *parity = NO_MARKING_PARITY;
11095 stub = *builtins->MarkCodeAsExecutedTwice();
11096 if (code == stub) {
11097 *age = kExecutedOnceCodeAge;
11098 *parity = NO_MARKING_PARITY;
11105 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) {
11106 Builtins* builtins = isolate->builtins();
11108 #define HANDLE_CODE_AGE(AGE) \
11109 case k##AGE##CodeAge: { \
11110 Code* stub = parity == EVEN_MARKING_PARITY \
11111 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \
11112 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
11115 CODE_AGE_LIST(HANDLE_CODE_AGE)
11116 #undef HANDLE_CODE_AGE
11117 case kNotExecutedCodeAge: {
11118 ASSERT(parity == NO_MARKING_PARITY);
11119 return *builtins->MarkCodeAsExecutedOnce();
11121 case kExecutedOnceCodeAge: {
11122 ASSERT(parity == NO_MARKING_PARITY);
11123 return *builtins->MarkCodeAsExecutedTwice();
11133 void Code::PrintDeoptLocation(FILE* out, int bailout_id) {
11134 const char* last_comment = NULL;
11135 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
11136 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
11137 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11138 RelocInfo* info = it.rinfo();
11139 if (info->rmode() == RelocInfo::COMMENT) {
11140 last_comment = reinterpret_cast<const char*>(info->data());
11141 } else if (last_comment != NULL) {
11142 if ((bailout_id == Deoptimizer::GetDeoptimizationId(
11143 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
11144 (bailout_id == Deoptimizer::GetDeoptimizationId(
11145 GetIsolate(), info->target_address(), Deoptimizer::SOFT))) {
11146 CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
11147 PrintF(out, " %s\n", last_comment);
11155 bool Code::CanDeoptAt(Address pc) {
11156 DeoptimizationInputData* deopt_data =
11157 DeoptimizationInputData::cast(deoptimization_data());
11158 Address code_start_address = instruction_start();
11159 for (int i = 0; i < deopt_data->DeoptCount(); i++) {
11160 if (deopt_data->Pc(i)->value() == -1) continue;
11161 Address address = code_start_address + deopt_data->Pc(i)->value();
11162 if (address == pc) return true;
11168 // Identify kind of code.
11169 const char* Code::Kind2String(Kind kind) {
11171 #define CASE(name) case name: return #name;
11172 CODE_KIND_LIST(CASE)
11174 case NUMBER_OF_KINDS: break;
11181 #ifdef ENABLE_DISASSEMBLER
11183 void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
11184 disasm::NameConverter converter;
11185 int deopt_count = DeoptCount();
11186 PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
11187 if (0 == deopt_count) return;
11189 PrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc",
11190 FLAG_print_code_verbose ? "commands" : "");
11191 for (int i = 0; i < deopt_count; i++) {
11192 PrintF(out, "%6d %6d %6d %6d",
11195 ArgumentsStackHeight(i)->value(),
11198 if (!FLAG_print_code_verbose) {
11202 // Print details of the frame translation.
11203 int translation_index = TranslationIndex(i)->value();
11204 TranslationIterator iterator(TranslationByteArray(), translation_index);
11205 Translation::Opcode opcode =
11206 static_cast<Translation::Opcode>(iterator.Next());
11207 ASSERT(Translation::BEGIN == opcode);
11208 int frame_count = iterator.Next();
11209 int jsframe_count = iterator.Next();
11210 PrintF(out, " %s {frame count=%d, js frame count=%d}\n",
11211 Translation::StringFor(opcode),
11215 while (iterator.HasNext() &&
11216 Translation::BEGIN !=
11217 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
11218 PrintF(out, "%24s %s ", "", Translation::StringFor(opcode));
11221 case Translation::BEGIN:
11225 case Translation::JS_FRAME: {
11226 int ast_id = iterator.Next();
11227 int function_id = iterator.Next();
11228 unsigned height = iterator.Next();
11229 PrintF(out, "{ast_id=%d, function=", ast_id);
11230 if (function_id != Translation::kSelfLiteralId) {
11231 Object* function = LiteralArray()->get(function_id);
11232 JSFunction::cast(function)->PrintName(out);
11234 PrintF(out, "<self>");
11236 PrintF(out, ", height=%u}", height);
11240 case Translation::COMPILED_STUB_FRAME: {
11241 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next());
11242 PrintF(out, "{kind=%d}", stub_kind);
11246 case Translation::ARGUMENTS_ADAPTOR_FRAME:
11247 case Translation::CONSTRUCT_STUB_FRAME: {
11248 int function_id = iterator.Next();
11249 JSFunction* function =
11250 JSFunction::cast(LiteralArray()->get(function_id));
11251 unsigned height = iterator.Next();
11252 PrintF(out, "{function=");
11253 function->PrintName(out);
11254 PrintF(out, ", height=%u}", height);
11258 case Translation::GETTER_STUB_FRAME:
11259 case Translation::SETTER_STUB_FRAME: {
11260 int function_id = iterator.Next();
11261 JSFunction* function =
11262 JSFunction::cast(LiteralArray()->get(function_id));
11263 PrintF(out, "{function=");
11264 function->PrintName(out);
11269 case Translation::REGISTER: {
11270 int reg_code = iterator.Next();
11271 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
11275 case Translation::INT32_REGISTER: {
11276 int reg_code = iterator.Next();
11277 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
11281 case Translation::UINT32_REGISTER: {
11282 int reg_code = iterator.Next();
11283 PrintF(out, "{input=%s (unsigned)}",
11284 converter.NameOfCPURegister(reg_code));
11288 case Translation::DOUBLE_REGISTER: {
11289 int reg_code = iterator.Next();
11290 PrintF(out, "{input=%s}",
11291 DoubleRegister::AllocationIndexToString(reg_code));
11295 case Translation::STACK_SLOT: {
11296 int input_slot_index = iterator.Next();
11297 PrintF(out, "{input=%d}", input_slot_index);
11301 case Translation::INT32_STACK_SLOT: {
11302 int input_slot_index = iterator.Next();
11303 PrintF(out, "{input=%d}", input_slot_index);
11307 case Translation::UINT32_STACK_SLOT: {
11308 int input_slot_index = iterator.Next();
11309 PrintF(out, "{input=%d (unsigned)}", input_slot_index);
11313 case Translation::DOUBLE_STACK_SLOT: {
11314 int input_slot_index = iterator.Next();
11315 PrintF(out, "{input=%d}", input_slot_index);
11319 case Translation::LITERAL: {
11320 unsigned literal_index = iterator.Next();
11321 PrintF(out, "{literal_id=%u}", literal_index);
11325 case Translation::DUPLICATED_OBJECT: {
11326 int object_index = iterator.Next();
11327 PrintF(out, "{object_index=%d}", object_index);
11331 case Translation::ARGUMENTS_OBJECT:
11332 case Translation::CAPTURED_OBJECT: {
11333 int args_length = iterator.Next();
11334 PrintF(out, "{length=%d}", args_length);
11344 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) {
11345 PrintF(out, "Deoptimization Output Data (deopt points = %d)\n",
11346 this->DeoptPoints());
11347 if (this->DeoptPoints() == 0) return;
11349 PrintF(out, "%6s %8s %s\n", "ast id", "pc", "state");
11350 for (int i = 0; i < this->DeoptPoints(); i++) {
11351 int pc_and_state = this->PcAndState(i)->value();
11352 PrintF(out, "%6d %8d %s\n",
11353 this->AstId(i).ToInt(),
11354 FullCodeGenerator::PcField::decode(pc_and_state),
11355 FullCodeGenerator::State2String(
11356 FullCodeGenerator::StateField::decode(pc_and_state)));
11361 const char* Code::ICState2String(InlineCacheState state) {
11363 case UNINITIALIZED: return "UNINITIALIZED";
11364 case PREMONOMORPHIC: return "PREMONOMORPHIC";
11365 case MONOMORPHIC: return "MONOMORPHIC";
11366 case MONOMORPHIC_PROTOTYPE_FAILURE: return "MONOMORPHIC_PROTOTYPE_FAILURE";
11367 case POLYMORPHIC: return "POLYMORPHIC";
11368 case MEGAMORPHIC: return "MEGAMORPHIC";
11369 case GENERIC: return "GENERIC";
11370 case DEBUG_STUB: return "DEBUG_STUB";
11377 const char* Code::StubType2String(StubType type) {
11379 case NORMAL: return "NORMAL";
11380 case FAST: return "FAST";
11382 UNREACHABLE(); // keep the compiler happy
11387 void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
11388 PrintF(out, "extra_ic_state = ");
11389 const char* name = NULL;
11392 case KEYED_STORE_IC:
11393 if (extra == STRICT) name = "STRICT";
11398 if (name != NULL) {
11399 PrintF(out, "%s\n", name);
11401 PrintF(out, "%d\n", extra);
11406 void Code::Disassemble(const char* name, FILE* out) {
11407 PrintF(out, "kind = %s\n", Kind2String(kind()));
11408 if (has_major_key()) {
11409 PrintF(out, "major_key = %s\n",
11410 CodeStub::MajorName(CodeStub::GetMajorKey(this), true));
11412 if (is_inline_cache_stub()) {
11413 PrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
11414 PrintExtraICState(out, kind(), extra_ic_state());
11415 if (ic_state() == MONOMORPHIC) {
11416 PrintF(out, "type = %s\n", StubType2String(type()));
11418 if (is_compare_ic_stub()) {
11419 ASSERT(major_key() == CodeStub::CompareIC);
11420 CompareIC::State left_state, right_state, handler_state;
11422 ICCompareStub::DecodeMinorKey(stub_info(), &left_state, &right_state,
11423 &handler_state, &op);
11424 PrintF(out, "compare_state = %s*%s -> %s\n",
11425 CompareIC::GetStateName(left_state),
11426 CompareIC::GetStateName(right_state),
11427 CompareIC::GetStateName(handler_state));
11428 PrintF(out, "compare_operation = %s\n", Token::Name(op));
11431 if ((name != NULL) && (name[0] != '\0')) {
11432 PrintF(out, "name = %s\n", name);
11434 if (kind() == OPTIMIZED_FUNCTION) {
11435 PrintF(out, "stack_slots = %d\n", stack_slots());
11438 PrintF(out, "Instructions (size = %d)\n", instruction_size());
11439 Disassembler::Decode(out, this);
11442 if (kind() == FUNCTION) {
11443 DeoptimizationOutputData* data =
11444 DeoptimizationOutputData::cast(this->deoptimization_data());
11445 data->DeoptimizationOutputDataPrint(out);
11446 } else if (kind() == OPTIMIZED_FUNCTION) {
11447 DeoptimizationInputData* data =
11448 DeoptimizationInputData::cast(this->deoptimization_data());
11449 data->DeoptimizationInputDataPrint(out);
11453 if (is_crankshafted()) {
11454 SafepointTable table(this);
11455 PrintF(out, "Safepoints (size = %u)\n", table.size());
11456 for (unsigned i = 0; i < table.length(); i++) {
11457 unsigned pc_offset = table.GetPcOffset(i);
11458 PrintF(out, "%p %4d ", (instruction_start() + pc_offset), pc_offset);
11459 table.PrintEntry(i, out);
11460 PrintF(out, " (sp -> fp)");
11461 SafepointEntry entry = table.GetEntry(i);
11462 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
11463 PrintF(out, " %6d", entry.deoptimization_index());
11465 PrintF(out, " <none>");
11467 if (entry.argument_count() > 0) {
11468 PrintF(out, " argc: %d", entry.argument_count());
11473 } else if (kind() == FUNCTION) {
11474 unsigned offset = back_edge_table_offset();
11475 // If there is no back edge table, the "table start" will be at or after
11476 // (due to alignment) the end of the instruction stream.
11477 if (static_cast<int>(offset) < instruction_size()) {
11478 DisallowHeapAllocation no_gc;
11479 BackEdgeTable back_edges(this, &no_gc);
11481 PrintF(out, "Back edges (size = %u)\n", back_edges.length());
11482 PrintF(out, "ast_id pc_offset loop_depth\n");
11484 for (uint32_t i = 0; i < back_edges.length(); i++) {
11485 PrintF(out, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(),
11486 back_edges.pc_offset(i),
11487 back_edges.loop_depth(i));
11492 #ifdef OBJECT_PRINT
11493 if (!type_feedback_info()->IsUndefined()) {
11494 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(out);
11500 PrintF(out, "RelocInfo (size = %d)\n", relocation_size());
11501 for (RelocIterator it(this); !it.done(); it.next()) {
11502 it.rinfo()->Print(GetIsolate(), out);
11506 #endif // ENABLE_DISASSEMBLER
11509 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength(
11510 Handle<JSObject> object,
11513 SetFastElementsCapacitySmiMode smi_mode) {
11514 // We should never end in here with a pixel or external array.
11515 ASSERT(!object->HasExternalArrayElements());
11517 // Allocate a new fast elements backing store.
11518 Handle<FixedArray> new_elements =
11519 object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity);
11521 ElementsKind elements_kind = object->GetElementsKind();
11522 ElementsKind new_elements_kind;
11523 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
11524 // or if it's allowed and the old elements array contained only SMIs.
11525 bool has_fast_smi_elements =
11526 (smi_mode == kForceSmiElements) ||
11527 ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements());
11528 if (has_fast_smi_elements) {
11529 if (IsHoleyElementsKind(elements_kind)) {
11530 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
11532 new_elements_kind = FAST_SMI_ELEMENTS;
11535 if (IsHoleyElementsKind(elements_kind)) {
11536 new_elements_kind = FAST_HOLEY_ELEMENTS;
11538 new_elements_kind = FAST_ELEMENTS;
11541 Handle<FixedArrayBase> old_elements(object->elements());
11542 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind);
11543 accessor->CopyElements(object, new_elements, elements_kind);
11545 if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) {
11546 Handle<Map> new_map = (new_elements_kind != elements_kind)
11547 ? GetElementsTransitionMap(object, new_elements_kind)
11548 : handle(object->map());
11549 JSObject::ValidateElements(object);
11550 JSObject::SetMapAndElements(object, new_map, new_elements);
11552 // Transition through the allocation site as well if present.
11553 JSObject::UpdateAllocationSite(object, new_elements_kind);
11555 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements);
11556 parameter_map->set(1, *new_elements);
11559 if (FLAG_trace_elements_transitions) {
11560 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11561 object->GetElementsKind(), new_elements);
11564 if (object->IsJSArray()) {
11565 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11567 return new_elements;
11571 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object,
11574 // We should never end in here with a pixel or external array.
11575 ASSERT(!object->HasExternalArrayElements());
11577 Handle<FixedArrayBase> elems =
11578 object->GetIsolate()->factory()->NewFixedDoubleArray(capacity);
11580 ElementsKind elements_kind = object->GetElementsKind();
11581 CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS);
11582 ElementsKind new_elements_kind = elements_kind;
11583 if (IsHoleyElementsKind(elements_kind)) {
11584 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
11586 new_elements_kind = FAST_DOUBLE_ELEMENTS;
11589 Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind);
11591 Handle<FixedArrayBase> old_elements(object->elements());
11592 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS);
11593 accessor->CopyElements(object, elems, elements_kind);
11595 JSObject::ValidateElements(object);
11596 JSObject::SetMapAndElements(object, new_map, elems);
11598 if (FLAG_trace_elements_transitions) {
11599 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11600 object->GetElementsKind(), elems);
11603 if (object->IsJSArray()) {
11604 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11610 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) {
11611 ASSERT(capacity >= 0);
11612 array->GetIsolate()->factory()->NewJSArrayStorage(
11613 array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
11617 void JSArray::Expand(Handle<JSArray> array, int required_size) {
11618 ElementsAccessor* accessor = array->GetElementsAccessor();
11619 accessor->SetCapacityAndLength(array, required_size, required_size);
11623 // Returns false if the passed-in index is marked non-configurable,
11624 // which will cause the ES5 truncation operation to halt, and thus
11625 // no further old values need be collected.
11626 static bool GetOldValue(Isolate* isolate,
11627 Handle<JSObject> object,
11629 List<Handle<Object> >* old_values,
11630 List<uint32_t>* indices) {
11631 PropertyAttributes attributes =
11632 JSReceiver::GetOwnElementAttribute(object, index);
11633 ASSERT(attributes != ABSENT);
11634 if (attributes == DONT_DELETE) return false;
11635 Handle<Object> value;
11636 if (!JSObject::GetOwnElementAccessorPair(object, index).is_null()) {
11637 value = Handle<Object>::cast(isolate->factory()->the_hole_value());
11639 value = Object::GetElement(isolate, object, index).ToHandleChecked();
11641 old_values->Add(value);
11642 indices->Add(index);
11646 static void EnqueueSpliceRecord(Handle<JSArray> object,
11648 Handle<JSArray> deleted,
11649 uint32_t add_count) {
11650 Isolate* isolate = object->GetIsolate();
11651 HandleScope scope(isolate);
11652 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index);
11653 Handle<Object> add_count_object =
11654 isolate->factory()->NewNumberFromUint(add_count);
11656 Handle<Object> args[] =
11657 { object, index_object, deleted, add_count_object };
11659 Execution::Call(isolate,
11660 Handle<JSFunction>(isolate->observers_enqueue_splice()),
11661 isolate->factory()->undefined_value(),
11667 static void BeginPerformSplice(Handle<JSArray> object) {
11668 Isolate* isolate = object->GetIsolate();
11669 HandleScope scope(isolate);
11670 Handle<Object> args[] = { object };
11672 Execution::Call(isolate,
11673 Handle<JSFunction>(isolate->observers_begin_perform_splice()),
11674 isolate->factory()->undefined_value(),
11680 static void EndPerformSplice(Handle<JSArray> object) {
11681 Isolate* isolate = object->GetIsolate();
11682 HandleScope scope(isolate);
11683 Handle<Object> args[] = { object };
11685 Execution::Call(isolate,
11686 Handle<JSFunction>(isolate->observers_end_perform_splice()),
11687 isolate->factory()->undefined_value(),
11693 MaybeHandle<Object> JSArray::SetElementsLength(
11694 Handle<JSArray> array,
11695 Handle<Object> new_length_handle) {
11696 // We should never end in here with a pixel or external array.
11697 ASSERT(array->AllowsSetElementsLength());
11698 if (!array->map()->is_observed()) {
11699 return array->GetElementsAccessor()->SetLength(array, new_length_handle);
11702 Isolate* isolate = array->GetIsolate();
11703 List<uint32_t> indices;
11704 List<Handle<Object> > old_values;
11705 Handle<Object> old_length_handle(array->length(), isolate);
11706 uint32_t old_length = 0;
11707 CHECK(old_length_handle->ToArrayIndex(&old_length));
11708 uint32_t new_length = 0;
11709 CHECK(new_length_handle->ToArrayIndex(&new_length));
11711 static const PropertyAttributes kNoAttrFilter = NONE;
11712 int num_elements = array->NumberOfOwnElements(kNoAttrFilter);
11713 if (num_elements > 0) {
11714 if (old_length == static_cast<uint32_t>(num_elements)) {
11715 // Simple case for arrays without holes.
11716 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
11717 if (!GetOldValue(isolate, array, i, &old_values, &indices)) break;
11720 // For sparse arrays, only iterate over existing elements.
11721 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
11722 // the to-be-removed indices twice.
11723 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
11724 array->GetOwnElementKeys(*keys, kNoAttrFilter);
11725 while (num_elements-- > 0) {
11726 uint32_t index = NumberToUint32(keys->get(num_elements));
11727 if (index < new_length) break;
11728 if (!GetOldValue(isolate, array, index, &old_values, &indices)) break;
11733 Handle<Object> hresult;
11734 ASSIGN_RETURN_ON_EXCEPTION(
11736 array->GetElementsAccessor()->SetLength(array, new_length_handle),
11739 CHECK(array->length()->ToArrayIndex(&new_length));
11740 if (old_length == new_length) return hresult;
11742 BeginPerformSplice(array);
11744 for (int i = 0; i < indices.length(); ++i) {
11745 // For deletions where the property was an accessor, old_values[i]
11746 // will be the hole, which instructs EnqueueChangeRecord to elide
11747 // the "oldValue" property.
11748 JSObject::EnqueueChangeRecord(
11749 array, "delete", isolate->factory()->Uint32ToString(indices[i]),
11752 JSObject::EnqueueChangeRecord(
11753 array, "update", isolate->factory()->length_string(),
11754 old_length_handle);
11756 EndPerformSplice(array);
11758 uint32_t index = Min(old_length, new_length);
11759 uint32_t add_count = new_length > old_length ? new_length - old_length : 0;
11760 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0;
11761 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
11762 if (delete_count > 0) {
11763 for (int i = indices.length() - 1; i >= 0; i--) {
11764 // Skip deletions where the property was an accessor, leaving holes
11765 // in the array of old values.
11766 if (old_values[i]->IsTheHole()) continue;
11767 JSObject::SetElement(
11768 deleted, indices[i] - index, old_values[i], NONE, SLOPPY).Assert();
11771 SetProperty(deleted, isolate->factory()->length_string(),
11772 isolate->factory()->NewNumberFromUint(delete_count),
11773 NONE, SLOPPY).Assert();
11776 EnqueueSpliceRecord(array, index, deleted, add_count);
11782 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map,
11783 Handle<Object> prototype) {
11784 FixedArray* cache = map->GetPrototypeTransitions();
11785 int number_of_transitions = map->NumberOfProtoTransitions();
11786 const int proto_offset =
11787 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset;
11788 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
11789 const int step = kProtoTransitionElementsPerEntry;
11790 for (int i = 0; i < number_of_transitions; i++) {
11791 if (cache->get(proto_offset + i * step) == *prototype) {
11792 Object* result = cache->get(map_offset + i * step);
11793 return Handle<Map>(Map::cast(result));
11796 return Handle<Map>();
11800 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map,
11801 Handle<Object> prototype,
11802 Handle<Map> target_map) {
11803 ASSERT(target_map->IsMap());
11804 ASSERT(HeapObject::cast(*prototype)->map()->IsMap());
11805 // Don't cache prototype transition if this map is shared.
11806 if (map->is_shared() || !FLAG_cache_prototype_transitions) return map;
11808 const int step = kProtoTransitionElementsPerEntry;
11809 const int header = kProtoTransitionHeaderSize;
11811 Handle<FixedArray> cache(map->GetPrototypeTransitions());
11812 int capacity = (cache->length() - header) / step;
11813 int transitions = map->NumberOfProtoTransitions() + 1;
11815 if (transitions > capacity) {
11816 if (capacity > kMaxCachedPrototypeTransitions) return map;
11818 // Grow array by factor 2 over and above what we need.
11819 cache = FixedArray::CopySize(cache, transitions * 2 * step + header);
11821 SetPrototypeTransitions(map, cache);
11824 // Reload number of transitions as GC might shrink them.
11825 int last = map->NumberOfProtoTransitions();
11826 int entry = header + last * step;
11828 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype);
11829 cache->set(entry + kProtoTransitionMapOffset, *target_map);
11830 map->SetNumberOfProtoTransitions(last + 1);
11836 void Map::ZapTransitions() {
11837 TransitionArray* transition_array = transitions();
11838 // TODO(mstarzinger): Temporarily use a slower version instead of the faster
11839 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer.
11840 Object** data = transition_array->data_start();
11841 Object* the_hole = GetHeap()->the_hole_value();
11842 int length = transition_array->length();
11843 for (int i = 0; i < length; i++) {
11844 data[i] = the_hole;
11849 void Map::ZapPrototypeTransitions() {
11850 FixedArray* proto_transitions = GetPrototypeTransitions();
11851 MemsetPointer(proto_transitions->data_start(),
11852 GetHeap()->the_hole_value(),
11853 proto_transitions->length());
11858 void Map::AddDependentCompilationInfo(Handle<Map> map,
11859 DependentCode::DependencyGroup group,
11860 CompilationInfo* info) {
11861 Handle<DependentCode> codes =
11862 DependentCode::Insert(handle(map->dependent_code(), info->isolate()),
11863 group, info->object_wrapper());
11864 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
11865 info->dependencies(group)->Add(map, info->zone());
11870 void Map::AddDependentCode(Handle<Map> map,
11871 DependentCode::DependencyGroup group,
11872 Handle<Code> code) {
11873 Handle<DependentCode> codes = DependentCode::Insert(
11874 Handle<DependentCode>(map->dependent_code()), group, code);
11875 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
11880 void Map::AddDependentIC(Handle<Map> map,
11881 Handle<Code> stub) {
11882 ASSERT(stub->next_code_link()->IsUndefined());
11883 int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup);
11885 // Slow path: insert the head of the list with possible heap allocation.
11886 Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub);
11888 // Fast path: link the stub to the existing head of the list without any
11889 // heap allocation.
11891 map->dependent_code()->AddToDependentICList(stub);
11896 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
11897 Recompute(entries);
11901 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) {
11902 start_indexes_[0] = 0;
11903 for (int g = 1; g <= kGroupCount; g++) {
11904 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1));
11905 start_indexes_[g] = start_indexes_[g - 1] + count;
11910 DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
11911 DependencyGroup group) {
11912 AllowDeferredHandleDereference dependencies_are_safe;
11913 if (group == DependentCode::kPropertyCellChangedGroup) {
11914 return Handle<PropertyCell>::cast(object)->dependent_code();
11915 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
11916 group == DependentCode::kAllocationSiteTransitionChangedGroup) {
11917 return Handle<AllocationSite>::cast(object)->dependent_code();
11919 return Handle<Map>::cast(object)->dependent_code();
11923 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
11924 DependencyGroup group,
11925 Handle<Object> object) {
11926 GroupStartIndexes starts(*entries);
11927 int start = starts.at(group);
11928 int end = starts.at(group + 1);
11929 int number_of_entries = starts.number_of_entries();
11930 // Check for existing entry to avoid duplicates.
11931 for (int i = start; i < end; i++) {
11932 if (entries->object_at(i) == *object) return entries;
11934 if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
11935 int capacity = kCodesStartIndex + number_of_entries + 1;
11936 if (capacity > 5) capacity = capacity * 5 / 4;
11937 Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
11938 FixedArray::CopySize(entries, capacity, TENURED));
11939 // The number of codes can change after GC.
11940 starts.Recompute(*entries);
11941 start = starts.at(group);
11942 end = starts.at(group + 1);
11943 number_of_entries = starts.number_of_entries();
11944 for (int i = 0; i < number_of_entries; i++) {
11945 entries->clear_at(i);
11947 // If the old fixed array was empty, we need to reset counters of the
11949 if (number_of_entries == 0) {
11950 for (int g = 0; g < kGroupCount; g++) {
11951 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
11954 entries = new_entries;
11956 entries->ExtendGroup(group);
11957 entries->set_object_at(end, *object);
11958 entries->set_number_of_entries(group, end + 1 - start);
11963 void DependentCode::UpdateToFinishedCode(DependencyGroup group,
11964 CompilationInfo* info,
11966 DisallowHeapAllocation no_gc;
11967 AllowDeferredHandleDereference get_object_wrapper;
11968 Foreign* info_wrapper = *info->object_wrapper();
11969 GroupStartIndexes starts(this);
11970 int start = starts.at(group);
11971 int end = starts.at(group + 1);
11972 for (int i = start; i < end; i++) {
11973 if (object_at(i) == info_wrapper) {
11974 set_object_at(i, code);
11980 for (int i = start; i < end; i++) {
11981 ASSERT(is_code_at(i) || compilation_info_at(i) != info);
11987 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
11988 CompilationInfo* info) {
11989 DisallowHeapAllocation no_allocation;
11990 AllowDeferredHandleDereference get_object_wrapper;
11991 Foreign* info_wrapper = *info->object_wrapper();
11992 GroupStartIndexes starts(this);
11993 int start = starts.at(group);
11994 int end = starts.at(group + 1);
11995 // Find compilation info wrapper.
11997 for (int i = start; i < end; i++) {
11998 if (object_at(i) == info_wrapper) {
12003 if (info_pos == -1) return; // Not found.
12004 int gap = info_pos;
12005 // Use the last of each group to fill the gap in the previous group.
12006 for (int i = group; i < kGroupCount; i++) {
12007 int last_of_group = starts.at(i + 1) - 1;
12008 ASSERT(last_of_group >= gap);
12009 if (last_of_group == gap) continue;
12010 copy(last_of_group, gap);
12011 gap = last_of_group;
12013 ASSERT(gap == starts.number_of_entries() - 1);
12014 clear_at(gap); // Clear last gap.
12015 set_number_of_entries(group, end - start - 1);
12018 for (int i = start; i < end - 1; i++) {
12019 ASSERT(is_code_at(i) || compilation_info_at(i) != info);
12025 static bool CodeListContains(Object* head, Code* code) {
12026 while (!head->IsUndefined()) {
12027 if (head == code) return true;
12028 head = Code::cast(head)->next_code_link();
12034 bool DependentCode::Contains(DependencyGroup group, Code* code) {
12035 GroupStartIndexes starts(this);
12036 int start = starts.at(group);
12037 int end = starts.at(group + 1);
12038 if (group == kWeakICGroup) {
12039 return CodeListContains(object_at(start), code);
12041 for (int i = start; i < end; i++) {
12042 if (object_at(i) == code) return true;
12048 bool DependentCode::MarkCodeForDeoptimization(
12050 DependentCode::DependencyGroup group) {
12051 DisallowHeapAllocation no_allocation_scope;
12052 DependentCode::GroupStartIndexes starts(this);
12053 int start = starts.at(group);
12054 int end = starts.at(group + 1);
12055 int code_entries = starts.number_of_entries();
12056 if (start == end) return false;
12058 // Mark all the code that needs to be deoptimized.
12059 bool marked = false;
12060 for (int i = start; i < end; i++) {
12061 if (is_code_at(i)) {
12062 Code* code = code_at(i);
12063 if (!code->marked_for_deoptimization()) {
12064 code->set_marked_for_deoptimization(true);
12068 CompilationInfo* info = compilation_info_at(i);
12069 info->AbortDueToDependencyChange();
12072 // Compact the array by moving all subsequent groups to fill in the new holes.
12073 for (int src = end, dst = start; src < code_entries; src++, dst++) {
12076 // Now the holes are at the end of the array, zap them for heap-verifier.
12077 int removed = end - start;
12078 for (int i = code_entries - removed; i < code_entries; i++) {
12081 set_number_of_entries(group, 0);
12086 void DependentCode::DeoptimizeDependentCodeGroup(
12088 DependentCode::DependencyGroup group) {
12089 ASSERT(AllowCodeDependencyChange::IsAllowed());
12090 DisallowHeapAllocation no_allocation_scope;
12091 bool marked = MarkCodeForDeoptimization(isolate, group);
12093 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate);
12097 void DependentCode::AddToDependentICList(Handle<Code> stub) {
12098 DisallowHeapAllocation no_heap_allocation;
12099 GroupStartIndexes starts(this);
12100 int i = starts.at(kWeakICGroup);
12101 Object* head = object_at(i);
12102 // Try to insert the stub after the head of the list to minimize number of
12103 // writes to the DependentCode array, since a write to the array can make it
12104 // strong if it was alread marked by incremental marker.
12105 if (head->IsCode()) {
12106 stub->set_next_code_link(Code::cast(head)->next_code_link());
12107 Code::cast(head)->set_next_code_link(*stub);
12109 stub->set_next_code_link(head);
12110 set_object_at(i, *stub);
12115 Handle<Map> Map::TransitionToPrototype(Handle<Map> map,
12116 Handle<Object> prototype) {
12117 Handle<Map> new_map = GetPrototypeTransition(map, prototype);
12118 if (new_map.is_null()) {
12119 new_map = Copy(map);
12120 PutPrototypeTransition(map, prototype, new_map);
12121 new_map->set_prototype(*prototype);
12127 MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object,
12128 Handle<Object> value,
12129 bool skip_hidden_prototypes) {
12131 int size = object->Size();
12134 Isolate* isolate = object->GetIsolate();
12135 Heap* heap = isolate->heap();
12136 // Silently ignore the change if value is not a JSObject or null.
12137 // SpiderMonkey behaves this way.
12138 if (!value->IsJSReceiver() && !value->IsNull()) return value;
12140 // From 8.6.2 Object Internal Methods
12142 // In addition, if [[Extensible]] is false the value of the [[Class]] and
12143 // [[Prototype]] internal properties of the object may not be modified.
12145 // Implementation specific extensions that modify [[Class]], [[Prototype]]
12146 // or [[Extensible]] must not violate the invariants defined in the preceding
12148 if (!object->map()->is_extensible()) {
12149 Handle<Object> args[] = { object };
12150 Handle<Object> error = isolate->factory()->NewTypeError(
12151 "non_extensible_proto", HandleVector(args, ARRAY_SIZE(args)));
12152 return isolate->Throw<Object>(error);
12155 // Before we can set the prototype we need to be sure
12156 // prototype cycles are prevented.
12157 // It is sufficient to validate that the receiver is not in the new prototype
12159 for (Object* pt = *value;
12160 pt != heap->null_value();
12161 pt = pt->GetPrototype(isolate)) {
12162 if (JSReceiver::cast(pt) == *object) {
12164 Handle<Object> error = isolate->factory()->NewError(
12165 "cyclic_proto", HandleVector<Object>(NULL, 0));
12166 return isolate->Throw<Object>(error);
12170 bool dictionary_elements_in_chain =
12171 object->map()->DictionaryElementsInPrototypeChainOnly();
12172 Handle<JSObject> real_receiver = object;
12174 if (skip_hidden_prototypes) {
12175 // Find the first object in the chain whose prototype object is not
12176 // hidden and set the new prototype on that object.
12177 Object* current_proto = real_receiver->GetPrototype();
12178 while (current_proto->IsJSObject() &&
12179 JSObject::cast(current_proto)->map()->is_hidden_prototype()) {
12180 real_receiver = handle(JSObject::cast(current_proto), isolate);
12181 current_proto = current_proto->GetPrototype(isolate);
12185 // Set the new prototype of the object.
12186 Handle<Map> map(real_receiver->map());
12188 // Nothing to do if prototype is already set.
12189 if (map->prototype() == *value) return value;
12191 if (value->IsJSObject()) {
12192 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value));
12195 Handle<Map> new_map = Map::TransitionToPrototype(map, value);
12196 ASSERT(new_map->prototype() == *value);
12197 JSObject::MigrateToMap(real_receiver, new_map);
12199 if (!dictionary_elements_in_chain &&
12200 new_map->DictionaryElementsInPrototypeChainOnly()) {
12201 // If the prototype chain didn't previously have element callbacks, then
12202 // KeyedStoreICs need to be cleared to ensure any that involve this
12204 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC);
12207 heap->ClearInstanceofCache();
12208 ASSERT(size == object->Size());
12213 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
12215 uint32_t first_arg,
12216 uint32_t arg_count,
12217 EnsureElementsMode mode) {
12218 // Elements in |Arguments| are ordered backwards (because they're on the
12219 // stack), but the method that's called here iterates over them in forward
12221 return EnsureCanContainElements(
12222 object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode);
12226 MaybeHandle<AccessorPair> JSObject::GetOwnPropertyAccessorPair(
12227 Handle<JSObject> object,
12228 Handle<Name> name) {
12229 uint32_t index = 0;
12230 if (name->AsArrayIndex(&index)) {
12231 return GetOwnElementAccessorPair(object, index);
12234 Isolate* isolate = object->GetIsolate();
12235 LookupResult lookup(isolate);
12236 object->LookupOwnRealNamedProperty(name, &lookup);
12238 if (lookup.IsPropertyCallbacks() &&
12239 lookup.GetCallbackObject()->IsAccessorPair()) {
12240 return handle(AccessorPair::cast(lookup.GetCallbackObject()), isolate);
12242 return MaybeHandle<AccessorPair>();
12246 MaybeHandle<AccessorPair> JSObject::GetOwnElementAccessorPair(
12247 Handle<JSObject> object,
12249 if (object->IsJSGlobalProxy()) {
12250 Handle<Object> proto(object->GetPrototype(), object->GetIsolate());
12251 if (proto->IsNull()) return MaybeHandle<AccessorPair>();
12252 ASSERT(proto->IsJSGlobalObject());
12253 return GetOwnElementAccessorPair(Handle<JSObject>::cast(proto), index);
12256 // Check for lookup interceptor.
12257 if (object->HasIndexedInterceptor()) return MaybeHandle<AccessorPair>();
12259 return object->GetElementsAccessor()->GetAccessorPair(object, object, index);
12263 MaybeHandle<Object> JSObject::SetElementWithInterceptor(
12264 Handle<JSObject> object,
12266 Handle<Object> value,
12267 PropertyAttributes attributes,
12268 StrictMode strict_mode,
12269 bool check_prototype,
12270 SetPropertyMode set_mode) {
12271 Isolate* isolate = object->GetIsolate();
12273 // Make sure that the top context does not change when doing
12274 // callbacks or interceptor calls.
12275 AssertNoContextChange ncc(isolate);
12277 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
12278 if (!interceptor->setter()->IsUndefined()) {
12279 v8::IndexedPropertySetterCallback setter =
12280 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter());
12282 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index));
12283 PropertyCallbackArguments args(isolate, interceptor->data(), *object,
12285 v8::Handle<v8::Value> result =
12286 args.Call(setter, index, v8::Utils::ToLocal(value));
12287 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12288 if (!result.IsEmpty()) return value;
12291 return SetElementWithoutInterceptor(object, index, value, attributes,
12298 MaybeHandle<Object> JSObject::GetElementWithCallback(
12299 Handle<JSObject> object,
12300 Handle<Object> receiver,
12301 Handle<Object> structure,
12303 Handle<Object> holder) {
12304 Isolate* isolate = object->GetIsolate();
12305 ASSERT(!structure->IsForeign());
12306 // api style callbacks.
12307 if (structure->IsExecutableAccessorInfo()) {
12308 Handle<ExecutableAccessorInfo> data =
12309 Handle<ExecutableAccessorInfo>::cast(structure);
12310 Object* fun_obj = data->getter();
12311 v8::AccessorGetterCallback call_fun =
12312 v8::ToCData<v8::AccessorGetterCallback>(fun_obj);
12313 if (call_fun == NULL) return isolate->factory()->undefined_value();
12314 Handle<JSObject> holder_handle = Handle<JSObject>::cast(holder);
12315 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12316 Handle<String> key = isolate->factory()->NumberToString(number);
12317 LOG(isolate, ApiNamedPropertyAccess("load", *holder_handle, *key));
12318 PropertyCallbackArguments
12319 args(isolate, data->data(), *receiver, *holder_handle);
12320 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key));
12321 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12322 if (result.IsEmpty()) return isolate->factory()->undefined_value();
12323 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12324 result_internal->VerifyApiCallResultType();
12325 // Rebox handle before return.
12326 return handle(*result_internal, isolate);
12329 // __defineGetter__ callback
12330 if (structure->IsAccessorPair()) {
12331 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
12333 if (getter->IsSpecFunction()) {
12334 // TODO(rossberg): nicer would be to cast to some JSCallable here...
12335 return GetPropertyWithDefinedGetter(
12336 receiver, Handle<JSReceiver>::cast(getter));
12338 // Getter is not a function.
12339 return isolate->factory()->undefined_value();
12342 if (structure->IsDeclaredAccessorInfo()) {
12343 return GetDeclaredAccessorProperty(
12344 receiver, Handle<DeclaredAccessorInfo>::cast(structure), isolate);
12348 return MaybeHandle<Object>();
12352 MaybeHandle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object,
12353 Handle<Object> structure,
12355 Handle<Object> value,
12356 Handle<JSObject> holder,
12357 StrictMode strict_mode) {
12358 Isolate* isolate = object->GetIsolate();
12360 // We should never get here to initialize a const with the hole
12361 // value since a const declaration would conflict with the setter.
12362 ASSERT(!value->IsTheHole());
12363 ASSERT(!structure->IsForeign());
12364 if (structure->IsExecutableAccessorInfo()) {
12365 // api style callbacks
12366 Handle<ExecutableAccessorInfo> data =
12367 Handle<ExecutableAccessorInfo>::cast(structure);
12368 Object* call_obj = data->setter();
12369 v8::AccessorSetterCallback call_fun =
12370 v8::ToCData<v8::AccessorSetterCallback>(call_obj);
12371 if (call_fun == NULL) return value;
12372 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12373 Handle<String> key(isolate->factory()->NumberToString(number));
12374 LOG(isolate, ApiNamedPropertyAccess("store", *object, *key));
12375 PropertyCallbackArguments
12376 args(isolate, data->data(), *object, *holder);
12377 args.Call(call_fun,
12378 v8::Utils::ToLocal(key),
12379 v8::Utils::ToLocal(value));
12380 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12384 if (structure->IsAccessorPair()) {
12385 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
12386 if (setter->IsSpecFunction()) {
12387 // TODO(rossberg): nicer would be to cast to some JSCallable here...
12388 return SetPropertyWithDefinedSetter(
12389 object, Handle<JSReceiver>::cast(setter), value);
12391 if (strict_mode == SLOPPY) return value;
12392 Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
12393 Handle<Object> args[2] = { key, holder };
12394 Handle<Object> error = isolate->factory()->NewTypeError(
12395 "no_setter_in_callback", HandleVector(args, 2));
12396 return isolate->Throw<Object>(error);
12400 // TODO(dcarney): Handle correctly.
12401 if (structure->IsDeclaredAccessorInfo()) return value;
12404 return MaybeHandle<Object>();
12408 bool JSObject::HasFastArgumentsElements() {
12409 Heap* heap = GetHeap();
12410 if (!elements()->IsFixedArray()) return false;
12411 FixedArray* elements = FixedArray::cast(this->elements());
12412 if (elements->map() != heap->sloppy_arguments_elements_map()) {
12415 FixedArray* arguments = FixedArray::cast(elements->get(1));
12416 return !arguments->IsDictionary();
12420 bool JSObject::HasDictionaryArgumentsElements() {
12421 Heap* heap = GetHeap();
12422 if (!elements()->IsFixedArray()) return false;
12423 FixedArray* elements = FixedArray::cast(this->elements());
12424 if (elements->map() != heap->sloppy_arguments_elements_map()) {
12427 FixedArray* arguments = FixedArray::cast(elements->get(1));
12428 return arguments->IsDictionary();
12432 // Adding n elements in fast case is O(n*n).
12433 // Note: revisit design to have dual undefined values to capture absent
12435 MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object,
12437 Handle<Object> value,
12438 StrictMode strict_mode,
12439 bool check_prototype) {
12440 ASSERT(object->HasFastSmiOrObjectElements() ||
12441 object->HasFastArgumentsElements());
12443 Isolate* isolate = object->GetIsolate();
12445 // Array optimizations rely on the prototype lookups of Array objects always
12446 // returning undefined. If there is a store to the initial prototype object,
12447 // make sure all of these optimizations are invalidated.
12448 if (isolate->is_initial_object_prototype(*object) ||
12449 isolate->is_initial_array_prototype(*object)) {
12450 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate,
12451 DependentCode::kElementsCantBeAddedGroup);
12454 Handle<FixedArray> backing_store(FixedArray::cast(object->elements()));
12455 if (backing_store->map() ==
12456 isolate->heap()->sloppy_arguments_elements_map()) {
12457 backing_store = handle(FixedArray::cast(backing_store->get(1)));
12459 backing_store = EnsureWritableFastElements(object);
12461 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
12463 if (check_prototype &&
12464 (index >= capacity || backing_store->get(index)->IsTheHole())) {
12466 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12467 object, index, value, &found, strict_mode);
12468 if (found) return result;
12471 uint32_t new_capacity = capacity;
12472 // Check if the length property of this object needs to be updated.
12473 uint32_t array_length = 0;
12474 bool must_update_array_length = false;
12475 bool introduces_holes = true;
12476 if (object->IsJSArray()) {
12477 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12478 introduces_holes = index > array_length;
12479 if (index >= array_length) {
12480 must_update_array_length = true;
12481 array_length = index + 1;
12484 introduces_holes = index >= capacity;
12487 // If the array is growing, and it's not growth by a single element at the
12488 // end, make sure that the ElementsKind is HOLEY.
12489 ElementsKind elements_kind = object->GetElementsKind();
12490 if (introduces_holes &&
12491 IsFastElementsKind(elements_kind) &&
12492 !IsFastHoleyElementsKind(elements_kind)) {
12493 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12494 TransitionElementsKind(object, transitioned_kind);
12497 // Check if the capacity of the backing store needs to be increased, or if
12498 // a transition to slow elements is necessary.
12499 if (index >= capacity) {
12500 bool convert_to_slow = true;
12501 if ((index - capacity) < kMaxGap) {
12502 new_capacity = NewElementsCapacity(index + 1);
12503 ASSERT(new_capacity > index);
12504 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12505 convert_to_slow = false;
12508 if (convert_to_slow) {
12509 NormalizeElements(object);
12510 return SetDictionaryElement(object, index, value, NONE, strict_mode,
12514 // Convert to fast double elements if appropriate.
12515 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
12516 // Consider fixing the boilerplate as well if we have one.
12517 ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
12518 ? FAST_HOLEY_DOUBLE_ELEMENTS
12519 : FAST_DOUBLE_ELEMENTS;
12521 UpdateAllocationSite(object, to_kind);
12523 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length);
12524 FixedDoubleArray::cast(object->elements())->set(index, value->Number());
12525 JSObject::ValidateElements(object);
12528 // Change elements kind from Smi-only to generic FAST if necessary.
12529 if (object->HasFastSmiElements() && !value->IsSmi()) {
12530 ElementsKind kind = object->HasFastHoleyElements()
12531 ? FAST_HOLEY_ELEMENTS
12534 UpdateAllocationSite(object, kind);
12535 Handle<Map> new_map = GetElementsTransitionMap(object, kind);
12536 JSObject::MigrateToMap(object, new_map);
12537 ASSERT(IsFastObjectElementsKind(object->GetElementsKind()));
12539 // Increase backing store capacity if that's been decided previously.
12540 if (new_capacity != capacity) {
12541 SetFastElementsCapacitySmiMode smi_mode =
12542 value->IsSmi() && object->HasFastSmiElements()
12543 ? kAllowSmiElements
12544 : kDontAllowSmiElements;
12545 Handle<FixedArray> new_elements =
12546 SetFastElementsCapacityAndLength(object, new_capacity, array_length,
12548 new_elements->set(index, *value);
12549 JSObject::ValidateElements(object);
12553 // Finally, set the new element and length.
12554 ASSERT(object->elements()->IsFixedArray());
12555 backing_store->set(index, *value);
12556 if (must_update_array_length) {
12557 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length));
12563 MaybeHandle<Object> JSObject::SetDictionaryElement(
12564 Handle<JSObject> object,
12566 Handle<Object> value,
12567 PropertyAttributes attributes,
12568 StrictMode strict_mode,
12569 bool check_prototype,
12570 SetPropertyMode set_mode) {
12571 ASSERT(object->HasDictionaryElements() ||
12572 object->HasDictionaryArgumentsElements());
12573 Isolate* isolate = object->GetIsolate();
12575 // Insert element in the dictionary.
12576 Handle<FixedArray> elements(FixedArray::cast(object->elements()));
12577 bool is_arguments =
12578 (elements->map() == isolate->heap()->sloppy_arguments_elements_map());
12579 Handle<SeededNumberDictionary> dictionary(is_arguments
12580 ? SeededNumberDictionary::cast(elements->get(1))
12581 : SeededNumberDictionary::cast(*elements));
12583 int entry = dictionary->FindEntry(index);
12584 if (entry != SeededNumberDictionary::kNotFound) {
12585 Handle<Object> element(dictionary->ValueAt(entry), isolate);
12586 PropertyDetails details = dictionary->DetailsAt(entry);
12587 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
12588 return SetElementWithCallback(object, element, index, value, object,
12591 dictionary->UpdateMaxNumberKey(index);
12592 // If a value has not been initialized we allow writing to it even if it
12593 // is read-only (a declared const that has not been initialized). If a
12594 // value is being defined we skip attribute checks completely.
12595 if (set_mode == DEFINE_PROPERTY) {
12596 details = PropertyDetails(
12597 attributes, NORMAL, details.dictionary_index());
12598 dictionary->DetailsAtPut(entry, details);
12599 } else if (details.IsReadOnly() && !element->IsTheHole()) {
12600 if (strict_mode == SLOPPY) {
12601 return isolate->factory()->undefined_value();
12603 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12604 Handle<Object> args[2] = { number, object };
12605 Handle<Object> error =
12606 isolate->factory()->NewTypeError("strict_read_only_property",
12607 HandleVector(args, 2));
12608 return isolate->Throw<Object>(error);
12611 // Elements of the arguments object in slow mode might be slow aliases.
12612 if (is_arguments && element->IsAliasedArgumentsEntry()) {
12613 Handle<AliasedArgumentsEntry> entry =
12614 Handle<AliasedArgumentsEntry>::cast(element);
12615 Handle<Context> context(Context::cast(elements->get(0)));
12616 int context_index = entry->aliased_context_slot();
12617 ASSERT(!context->get(context_index)->IsTheHole());
12618 context->set(context_index, *value);
12619 // For elements that are still writable we keep slow aliasing.
12620 if (!details.IsReadOnly()) value = element;
12622 dictionary->ValueAtPut(entry, *value);
12625 // Index not already used. Look for an accessor in the prototype chain.
12627 if (check_prototype) {
12629 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12630 object, index, value, &found, strict_mode);
12631 if (found) return result;
12634 // When we set the is_extensible flag to false we always force the
12635 // element into dictionary mode (and force them to stay there).
12636 if (!object->map()->is_extensible()) {
12637 if (strict_mode == SLOPPY) {
12638 return isolate->factory()->undefined_value();
12640 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12641 Handle<String> name = isolate->factory()->NumberToString(number);
12642 Handle<Object> args[1] = { name };
12643 Handle<Object> error =
12644 isolate->factory()->NewTypeError("object_not_extensible",
12645 HandleVector(args, 1));
12646 return isolate->Throw<Object>(error);
12650 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
12651 Handle<SeededNumberDictionary> new_dictionary =
12652 SeededNumberDictionary::AddNumberEntry(dictionary, index, value,
12654 if (*dictionary != *new_dictionary) {
12655 if (is_arguments) {
12656 elements->set(1, *new_dictionary);
12658 object->set_elements(*new_dictionary);
12660 dictionary = new_dictionary;
12664 // Update the array length if this JSObject is an array.
12665 if (object->IsJSArray()) {
12666 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index,
12670 // Attempt to put this object back in fast case.
12671 if (object->ShouldConvertToFastElements()) {
12672 uint32_t new_length = 0;
12673 if (object->IsJSArray()) {
12674 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length));
12676 new_length = dictionary->max_number_key() + 1;
12678 SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays
12679 ? kAllowSmiElements
12680 : kDontAllowSmiElements;
12681 bool has_smi_only_elements = false;
12682 bool should_convert_to_fast_double_elements =
12683 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
12684 if (has_smi_only_elements) {
12685 smi_mode = kForceSmiElements;
12688 if (should_convert_to_fast_double_elements) {
12689 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length);
12691 SetFastElementsCapacityAndLength(object, new_length, new_length,
12694 JSObject::ValidateElements(object);
12696 if (FLAG_trace_normalization) {
12697 PrintF("Object elements are fast case again:\n");
12705 MaybeHandle<Object> JSObject::SetFastDoubleElement(
12706 Handle<JSObject> object,
12708 Handle<Object> value,
12709 StrictMode strict_mode,
12710 bool check_prototype) {
12711 ASSERT(object->HasFastDoubleElements());
12713 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements()));
12714 uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
12716 // If storing to an element that isn't in the array, pass the store request
12717 // up the prototype chain before storing in the receiver's elements.
12718 if (check_prototype &&
12719 (index >= elms_length ||
12720 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) {
12722 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12723 object, index, value, &found, strict_mode);
12724 if (found) return result;
12727 // If the value object is not a heap number, switch to fast elements and try
12729 bool value_is_smi = value->IsSmi();
12730 bool introduces_holes = true;
12731 uint32_t length = elms_length;
12732 if (object->IsJSArray()) {
12733 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length));
12734 introduces_holes = index > length;
12736 introduces_holes = index >= elms_length;
12739 if (!value->IsNumber()) {
12740 SetFastElementsCapacityAndLength(object, elms_length, length,
12741 kDontAllowSmiElements);
12742 Handle<Object> result;
12743 ASSIGN_RETURN_ON_EXCEPTION(
12744 object->GetIsolate(), result,
12745 SetFastElement(object, index, value, strict_mode, check_prototype),
12747 JSObject::ValidateElements(object);
12751 double double_value = value_is_smi
12752 ? static_cast<double>(Handle<Smi>::cast(value)->value())
12753 : Handle<HeapNumber>::cast(value)->value();
12755 // If the array is growing, and it's not growth by a single element at the
12756 // end, make sure that the ElementsKind is HOLEY.
12757 ElementsKind elements_kind = object->GetElementsKind();
12758 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
12759 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12760 TransitionElementsKind(object, transitioned_kind);
12763 // Check whether there is extra space in the fixed array.
12764 if (index < elms_length) {
12765 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements()));
12766 elms->set(index, double_value);
12767 if (object->IsJSArray()) {
12768 // Update the length of the array if needed.
12769 uint32_t array_length = 0;
12771 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12772 if (index >= array_length) {
12773 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1));
12779 // Allow gap in fast case.
12780 if ((index - elms_length) < kMaxGap) {
12781 // Try allocating extra space.
12782 int new_capacity = NewElementsCapacity(index+1);
12783 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12784 ASSERT(static_cast<uint32_t>(new_capacity) > index);
12785 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1);
12786 FixedDoubleArray::cast(object->elements())->set(index, double_value);
12787 JSObject::ValidateElements(object);
12792 // Otherwise default to slow case.
12793 ASSERT(object->HasFastDoubleElements());
12794 ASSERT(object->map()->has_fast_double_elements());
12795 ASSERT(object->elements()->IsFixedDoubleArray() ||
12796 object->elements()->length() == 0);
12798 NormalizeElements(object);
12799 ASSERT(object->HasDictionaryElements());
12800 return SetElement(object, index, value, NONE, strict_mode, check_prototype);
12804 MaybeHandle<Object> JSReceiver::SetElement(Handle<JSReceiver> object,
12806 Handle<Object> value,
12807 PropertyAttributes attributes,
12808 StrictMode strict_mode) {
12809 if (object->IsJSProxy()) {
12810 return JSProxy::SetElementWithHandler(
12811 Handle<JSProxy>::cast(object), object, index, value, strict_mode);
12813 return JSObject::SetElement(
12814 Handle<JSObject>::cast(object), index, value, attributes, strict_mode);
12818 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
12820 Handle<Object> value,
12821 StrictMode strict_mode) {
12822 ASSERT(!object->HasExternalArrayElements());
12823 return JSObject::SetElement(object, index, value, NONE, strict_mode, false);
12827 MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object,
12829 Handle<Object> value,
12830 PropertyAttributes attributes,
12831 StrictMode strict_mode,
12832 bool check_prototype,
12833 SetPropertyMode set_mode) {
12834 Isolate* isolate = object->GetIsolate();
12836 if (object->HasExternalArrayElements() ||
12837 object->HasFixedTypedArrayElements()) {
12838 if (!value->IsNumber() && !value->IsUndefined()) {
12839 ASSIGN_RETURN_ON_EXCEPTION(
12841 Execution::ToNumber(isolate, value), Object);
12845 // Check access rights if needed.
12846 if (object->IsAccessCheckNeeded()) {
12847 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_SET)) {
12848 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
12849 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12854 if (object->IsJSGlobalProxy()) {
12855 Handle<Object> proto(object->GetPrototype(), isolate);
12856 if (proto->IsNull()) return value;
12857 ASSERT(proto->IsJSGlobalObject());
12858 return SetElement(Handle<JSObject>::cast(proto), index, value, attributes,
12864 // Don't allow element properties to be redefined for external arrays.
12865 if ((object->HasExternalArrayElements() ||
12866 object->HasFixedTypedArrayElements()) &&
12867 set_mode == DEFINE_PROPERTY) {
12868 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12869 Handle<Object> args[] = { object, number };
12870 Handle<Object> error = isolate->factory()->NewTypeError(
12871 "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
12872 return isolate->Throw<Object>(error);
12875 // Normalize the elements to enable attributes on the property.
12876 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
12877 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
12878 // Make sure that we never go back to fast case.
12879 dictionary->set_requires_slow_elements();
12882 if (!object->map()->is_observed()) {
12883 return object->HasIndexedInterceptor()
12884 ? SetElementWithInterceptor(object, index, value, attributes,
12885 strict_mode, check_prototype, set_mode)
12886 : SetElementWithoutInterceptor(object, index, value, attributes,
12887 strict_mode, check_prototype, set_mode);
12890 PropertyAttributes old_attributes =
12891 JSReceiver::GetOwnElementAttribute(object, index);
12892 Handle<Object> old_value = isolate->factory()->the_hole_value();
12893 Handle<Object> old_length_handle;
12894 Handle<Object> new_length_handle;
12896 if (old_attributes != ABSENT) {
12897 if (GetOwnElementAccessorPair(object, index).is_null()) {
12898 old_value = Object::GetElement(isolate, object, index).ToHandleChecked();
12900 } else if (object->IsJSArray()) {
12901 // Store old array length in case adding an element grows the array.
12902 old_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12906 // Check for lookup interceptor
12907 Handle<Object> result;
12908 ASSIGN_RETURN_ON_EXCEPTION(
12910 object->HasIndexedInterceptor()
12911 ? SetElementWithInterceptor(
12912 object, index, value, attributes,
12913 strict_mode, check_prototype, set_mode)
12914 : SetElementWithoutInterceptor(
12915 object, index, value, attributes,
12916 strict_mode, check_prototype, set_mode),
12919 Handle<String> name = isolate->factory()->Uint32ToString(index);
12920 PropertyAttributes new_attributes = GetOwnElementAttribute(object, index);
12921 if (old_attributes == ABSENT) {
12922 if (object->IsJSArray() &&
12923 !old_length_handle->SameValue(
12924 Handle<JSArray>::cast(object)->length())) {
12925 new_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12927 uint32_t old_length = 0;
12928 uint32_t new_length = 0;
12929 CHECK(old_length_handle->ToArrayIndex(&old_length));
12930 CHECK(new_length_handle->ToArrayIndex(&new_length));
12932 BeginPerformSplice(Handle<JSArray>::cast(object));
12933 EnqueueChangeRecord(object, "add", name, old_value);
12934 EnqueueChangeRecord(object, "update", isolate->factory()->length_string(),
12935 old_length_handle);
12936 EndPerformSplice(Handle<JSArray>::cast(object));
12937 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
12938 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted,
12939 new_length - old_length);
12941 EnqueueChangeRecord(object, "add", name, old_value);
12943 } else if (old_value->IsTheHole()) {
12944 EnqueueChangeRecord(object, "reconfigure", name, old_value);
12946 Handle<Object> new_value =
12947 Object::GetElement(isolate, object, index).ToHandleChecked();
12948 bool value_changed = !old_value->SameValue(*new_value);
12949 if (old_attributes != new_attributes) {
12950 if (!value_changed) old_value = isolate->factory()->the_hole_value();
12951 EnqueueChangeRecord(object, "reconfigure", name, old_value);
12952 } else if (value_changed) {
12953 EnqueueChangeRecord(object, "update", name, old_value);
12961 MaybeHandle<Object> JSObject::SetElementWithoutInterceptor(
12962 Handle<JSObject> object,
12964 Handle<Object> value,
12965 PropertyAttributes attributes,
12966 StrictMode strict_mode,
12967 bool check_prototype,
12968 SetPropertyMode set_mode) {
12969 ASSERT(object->HasDictionaryElements() ||
12970 object->HasDictionaryArgumentsElements() ||
12971 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
12972 Isolate* isolate = object->GetIsolate();
12973 if (FLAG_trace_external_array_abuse &&
12974 IsExternalArrayElementsKind(object->GetElementsKind())) {
12975 CheckArrayAbuse(object, "external elements write", index);
12977 if (FLAG_trace_js_array_abuse &&
12978 !IsExternalArrayElementsKind(object->GetElementsKind())) {
12979 if (object->IsJSArray()) {
12980 CheckArrayAbuse(object, "elements write", index, true);
12983 if (object->IsJSArray() && JSArray::WouldChangeReadOnlyLength(
12984 Handle<JSArray>::cast(object), index)) {
12985 if (strict_mode == SLOPPY) {
12988 return JSArray::ReadOnlyLengthError(Handle<JSArray>::cast(object));
12991 switch (object->GetElementsKind()) {
12992 case FAST_SMI_ELEMENTS:
12993 case FAST_ELEMENTS:
12994 case FAST_HOLEY_SMI_ELEMENTS:
12995 case FAST_HOLEY_ELEMENTS:
12996 return SetFastElement(object, index, value, strict_mode, check_prototype);
12997 case FAST_DOUBLE_ELEMENTS:
12998 case FAST_HOLEY_DOUBLE_ELEMENTS:
12999 return SetFastDoubleElement(object, index, value, strict_mode,
13002 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13003 case EXTERNAL_##TYPE##_ELEMENTS: { \
13004 Handle<External##Type##Array> array( \
13005 External##Type##Array::cast(object->elements())); \
13006 return External##Type##Array::SetValue(array, index, value); \
13008 case TYPE##_ELEMENTS: { \
13009 Handle<Fixed##Type##Array> array( \
13010 Fixed##Type##Array::cast(object->elements())); \
13011 return Fixed##Type##Array::SetValue(array, index, value); \
13014 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13016 #undef TYPED_ARRAY_CASE
13018 case DICTIONARY_ELEMENTS:
13019 return SetDictionaryElement(object, index, value, attributes, strict_mode,
13022 case SLOPPY_ARGUMENTS_ELEMENTS: {
13023 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()));
13024 uint32_t length = parameter_map->length();
13025 Handle<Object> probe = index < length - 2 ?
13026 Handle<Object>(parameter_map->get(index + 2), isolate) :
13028 if (!probe.is_null() && !probe->IsTheHole()) {
13029 Handle<Context> context(Context::cast(parameter_map->get(0)));
13030 int context_index = Handle<Smi>::cast(probe)->value();
13031 ASSERT(!context->get(context_index)->IsTheHole());
13032 context->set(context_index, *value);
13033 // Redefining attributes of an aliased element destroys fast aliasing.
13034 if (set_mode == SET_PROPERTY || attributes == NONE) return value;
13035 parameter_map->set_the_hole(index + 2);
13036 // For elements that are still writable we re-establish slow aliasing.
13037 if ((attributes & READ_ONLY) == 0) {
13038 value = Handle<Object>::cast(
13039 isolate->factory()->NewAliasedArgumentsEntry(context_index));
13042 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)));
13043 if (arguments->IsDictionary()) {
13044 return SetDictionaryElement(object, index, value, attributes,
13049 return SetFastElement(object, index, value, strict_mode,
13054 // All possible cases have been handled above. Add a return to avoid the
13055 // complaints from the compiler.
13057 return isolate->factory()->null_value();
13061 const double AllocationSite::kPretenureRatio = 0.85;
13064 void AllocationSite::ResetPretenureDecision() {
13065 set_pretenure_decision(kUndecided);
13066 set_memento_found_count(0);
13067 set_memento_create_count(0);
13071 PretenureFlag AllocationSite::GetPretenureMode() {
13072 PretenureDecision mode = pretenure_decision();
13073 // Zombie objects "decide" to be untenured.
13074 return mode == kTenure ? TENURED : NOT_TENURED;
13078 bool AllocationSite::IsNestedSite() {
13079 ASSERT(FLAG_trace_track_allocation_sites);
13080 Object* current = GetHeap()->allocation_sites_list();
13081 while (current->IsAllocationSite()) {
13082 AllocationSite* current_site = AllocationSite::cast(current);
13083 if (current_site->nested_site() == this) {
13086 current = current_site->weak_next();
13092 void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
13093 ElementsKind to_kind) {
13094 Isolate* isolate = site->GetIsolate();
13096 if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) {
13097 Handle<JSArray> transition_info =
13098 handle(JSArray::cast(site->transition_info()));
13099 ElementsKind kind = transition_info->GetElementsKind();
13100 // if kind is holey ensure that to_kind is as well.
13101 if (IsHoleyElementsKind(kind)) {
13102 to_kind = GetHoleyElementsKind(to_kind);
13104 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
13105 // If the array is huge, it's not likely to be defined in a local
13106 // function, so we shouldn't make new instances of it very often.
13107 uint32_t length = 0;
13108 CHECK(transition_info->length()->ToArrayIndex(&length));
13109 if (length <= kMaximumArrayBytesToPretransition) {
13110 if (FLAG_trace_track_allocation_sites) {
13111 bool is_nested = site->IsNestedSite();
13113 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
13114 reinterpret_cast<void*>(*site),
13115 is_nested ? "(nested)" : "",
13116 ElementsKindToString(kind),
13117 ElementsKindToString(to_kind));
13119 JSObject::TransitionElementsKind(transition_info, to_kind);
13120 site->dependent_code()->DeoptimizeDependentCodeGroup(
13121 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
13125 ElementsKind kind = site->GetElementsKind();
13126 // if kind is holey ensure that to_kind is as well.
13127 if (IsHoleyElementsKind(kind)) {
13128 to_kind = GetHoleyElementsKind(to_kind);
13130 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
13131 if (FLAG_trace_track_allocation_sites) {
13132 PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
13133 reinterpret_cast<void*>(*site),
13134 ElementsKindToString(kind),
13135 ElementsKindToString(to_kind));
13137 site->SetElementsKind(to_kind);
13138 site->dependent_code()->DeoptimizeDependentCodeGroup(
13139 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
13146 void AllocationSite::AddDependentCompilationInfo(Handle<AllocationSite> site,
13148 CompilationInfo* info) {
13149 DependentCode::DependencyGroup group = site->ToDependencyGroup(reason);
13150 Handle<DependentCode> dep(site->dependent_code());
13151 Handle<DependentCode> codes =
13152 DependentCode::Insert(dep, group, info->object_wrapper());
13153 if (*codes != site->dependent_code()) site->set_dependent_code(*codes);
13154 info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone());
13158 const char* AllocationSite::PretenureDecisionName(PretenureDecision decision) {
13159 switch (decision) {
13160 case kUndecided: return "undecided";
13161 case kDontTenure: return "don't tenure";
13162 case kMaybeTenure: return "maybe tenure";
13163 case kTenure: return "tenure";
13164 case kZombie: return "zombie";
13165 default: UNREACHABLE();
13171 void JSObject::UpdateAllocationSite(Handle<JSObject> object,
13172 ElementsKind to_kind) {
13173 if (!object->IsJSArray()) return;
13175 Heap* heap = object->GetHeap();
13176 if (!heap->InNewSpace(*object)) return;
13178 Handle<AllocationSite> site;
13180 DisallowHeapAllocation no_allocation;
13182 AllocationMemento* memento = heap->FindAllocationMemento(*object);
13183 if (memento == NULL) return;
13185 // Walk through to the Allocation Site
13186 site = handle(memento->GetAllocationSite());
13188 AllocationSite::DigestTransitionFeedback(site, to_kind);
13192 void JSObject::TransitionElementsKind(Handle<JSObject> object,
13193 ElementsKind to_kind) {
13194 ElementsKind from_kind = object->map()->elements_kind();
13196 if (IsFastHoleyElementsKind(from_kind)) {
13197 to_kind = GetHoleyElementsKind(to_kind);
13200 if (from_kind == to_kind) return;
13201 // Don't update the site if to_kind isn't fast
13202 if (IsFastElementsKind(to_kind)) {
13203 UpdateAllocationSite(object, to_kind);
13206 Isolate* isolate = object->GetIsolate();
13207 if (object->elements() == isolate->heap()->empty_fixed_array() ||
13208 (IsFastSmiOrObjectElementsKind(from_kind) &&
13209 IsFastSmiOrObjectElementsKind(to_kind)) ||
13210 (from_kind == FAST_DOUBLE_ELEMENTS &&
13211 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
13212 ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
13213 // No change is needed to the elements() buffer, the transition
13214 // only requires a map change.
13215 Handle<Map> new_map = GetElementsTransitionMap(object, to_kind);
13216 MigrateToMap(object, new_map);
13217 if (FLAG_trace_elements_transitions) {
13218 Handle<FixedArrayBase> elms(object->elements());
13219 PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms);
13224 Handle<FixedArrayBase> elms(object->elements());
13225 uint32_t capacity = static_cast<uint32_t>(elms->length());
13226 uint32_t length = capacity;
13228 if (object->IsJSArray()) {
13229 Object* raw_length = Handle<JSArray>::cast(object)->length();
13230 if (raw_length->IsUndefined()) {
13231 // If length is undefined, then JSArray is being initialized and has no
13232 // elements, assume a length of zero.
13235 CHECK(raw_length->ToArrayIndex(&length));
13239 if (IsFastSmiElementsKind(from_kind) &&
13240 IsFastDoubleElementsKind(to_kind)) {
13241 SetFastDoubleElementsCapacityAndLength(object, capacity, length);
13242 JSObject::ValidateElements(object);
13246 if (IsFastDoubleElementsKind(from_kind) &&
13247 IsFastObjectElementsKind(to_kind)) {
13248 SetFastElementsCapacityAndLength(object, capacity, length,
13249 kDontAllowSmiElements);
13250 JSObject::ValidateElements(object);
13254 // This method should never be called for any other case than the ones
13261 bool Map::IsValidElementsTransition(ElementsKind from_kind,
13262 ElementsKind to_kind) {
13263 // Transitions can't go backwards.
13264 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
13268 // Transitions from HOLEY -> PACKED are not allowed.
13269 return !IsFastHoleyElementsKind(from_kind) ||
13270 IsFastHoleyElementsKind(to_kind);
13274 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array,
13276 Handle<Object> value) {
13277 uint32_t old_len = 0;
13278 CHECK(array->length()->ToArrayIndex(&old_len));
13279 // Check to see if we need to update the length. For now, we make
13280 // sure that the length stays within 32-bits (unsigned).
13281 if (index >= old_len && index != 0xffffffff) {
13282 Handle<Object> len = array->GetIsolate()->factory()->NewNumber(
13283 static_cast<double>(index) + 1);
13284 array->set_length(*len);
13289 bool JSArray::IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map) {
13290 Isolate* isolate = jsarray_map->GetIsolate();
13291 ASSERT(!jsarray_map->is_dictionary_map());
13292 LookupResult lookup(isolate);
13293 Handle<Name> length_string = isolate->factory()->length_string();
13294 jsarray_map->LookupDescriptor(NULL, *length_string, &lookup);
13295 return lookup.IsReadOnly();
13299 bool JSArray::WouldChangeReadOnlyLength(Handle<JSArray> array,
13301 uint32_t length = 0;
13302 CHECK(array->length()->ToArrayIndex(&length));
13303 if (length <= index) {
13304 Isolate* isolate = array->GetIsolate();
13305 LookupResult lookup(isolate);
13306 Handle<Name> length_string = isolate->factory()->length_string();
13307 array->LookupOwnRealNamedProperty(length_string, &lookup);
13308 return lookup.IsReadOnly();
13314 MaybeHandle<Object> JSArray::ReadOnlyLengthError(Handle<JSArray> array) {
13315 Isolate* isolate = array->GetIsolate();
13316 Handle<Name> length = isolate->factory()->length_string();
13317 Handle<Object> args[2] = { length, array };
13318 Handle<Object> error = isolate->factory()->NewTypeError(
13319 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
13320 return isolate->Throw<Object>(error);
13324 MaybeHandle<Object> JSObject::GetElementWithInterceptor(
13325 Handle<JSObject> object,
13326 Handle<Object> receiver,
13328 Isolate* isolate = object->GetIsolate();
13330 // Make sure that the top context does not change when doing
13331 // callbacks or interceptor calls.
13332 AssertNoContextChange ncc(isolate);
13334 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate);
13335 if (!interceptor->getter()->IsUndefined()) {
13336 v8::IndexedPropertyGetterCallback getter =
13337 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
13339 ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index));
13340 PropertyCallbackArguments
13341 args(isolate, interceptor->data(), *receiver, *object);
13342 v8::Handle<v8::Value> result = args.Call(getter, index);
13343 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
13344 if (!result.IsEmpty()) {
13345 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13346 result_internal->VerifyApiCallResultType();
13347 // Rebox handle before return.
13348 return handle(*result_internal, isolate);
13352 ElementsAccessor* handler = object->GetElementsAccessor();
13353 Handle<Object> result;
13354 ASSIGN_RETURN_ON_EXCEPTION(
13355 isolate, result, handler->Get(receiver, object, index),
13357 if (!result->IsTheHole()) return result;
13359 Handle<Object> proto(object->GetPrototype(), isolate);
13360 if (proto->IsNull()) return isolate->factory()->undefined_value();
13361 return Object::GetElementWithReceiver(isolate, proto, receiver, index);
13365 bool JSObject::HasDenseElements() {
13368 GetElementsCapacityAndUsage(&capacity, &used);
13369 return (capacity == 0) || (used > (capacity / 2));
13373 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
13377 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
13378 FixedArray* backing_store = NULL;
13379 switch (GetElementsKind()) {
13380 case SLOPPY_ARGUMENTS_ELEMENTS:
13381 backing_store_base =
13382 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
13383 backing_store = FixedArray::cast(backing_store_base);
13384 if (backing_store->IsDictionary()) {
13385 SeededNumberDictionary* dictionary =
13386 SeededNumberDictionary::cast(backing_store);
13387 *capacity = dictionary->Capacity();
13388 *used = dictionary->NumberOfElements();
13392 case FAST_SMI_ELEMENTS:
13393 case FAST_ELEMENTS:
13395 *capacity = backing_store_base->length();
13396 *used = Smi::cast(JSArray::cast(this)->length())->value();
13399 // Fall through if packing is not guaranteed.
13400 case FAST_HOLEY_SMI_ELEMENTS:
13401 case FAST_HOLEY_ELEMENTS:
13402 backing_store = FixedArray::cast(backing_store_base);
13403 *capacity = backing_store->length();
13404 for (int i = 0; i < *capacity; ++i) {
13405 if (!backing_store->get(i)->IsTheHole()) ++(*used);
13408 case DICTIONARY_ELEMENTS: {
13409 SeededNumberDictionary* dictionary = element_dictionary();
13410 *capacity = dictionary->Capacity();
13411 *used = dictionary->NumberOfElements();
13414 case FAST_DOUBLE_ELEMENTS:
13416 *capacity = backing_store_base->length();
13417 *used = Smi::cast(JSArray::cast(this)->length())->value();
13420 // Fall through if packing is not guaranteed.
13421 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13422 *capacity = elements()->length();
13423 if (*capacity == 0) break;
13424 FixedDoubleArray * elms = FixedDoubleArray::cast(elements());
13425 for (int i = 0; i < *capacity; i++) {
13426 if (!elms->is_the_hole(i)) ++(*used);
13431 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13432 case EXTERNAL_##TYPE##_ELEMENTS: \
13433 case TYPE##_ELEMENTS: \
13435 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13436 #undef TYPED_ARRAY_CASE
13438 // External arrays are considered 100% used.
13439 FixedArrayBase* external_array = FixedArrayBase::cast(elements());
13440 *capacity = external_array->length();
13441 *used = external_array->length();
13448 bool JSObject::WouldConvertToSlowElements(Handle<Object> key) {
13450 if (HasFastElements() && key->ToArrayIndex(&index)) {
13451 Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements()));
13452 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
13453 if (index >= capacity) {
13454 if ((index - capacity) >= kMaxGap) return true;
13455 uint32_t new_capacity = NewElementsCapacity(index + 1);
13456 return ShouldConvertToSlowElements(new_capacity);
13463 bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
13464 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
13465 kMaxUncheckedFastElementsLength);
13466 if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
13467 (new_capacity <= kMaxUncheckedFastElementsLength &&
13468 GetHeap()->InNewSpace(this))) {
13471 // If the fast-case backing storage takes up roughly three times as
13472 // much space (in machine words) as a dictionary backing storage
13473 // would, the object should have slow elements.
13474 int old_capacity = 0;
13475 int used_elements = 0;
13476 GetElementsCapacityAndUsage(&old_capacity, &used_elements);
13477 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
13478 SeededNumberDictionary::kEntrySize;
13479 return 3 * dictionary_size <= new_capacity;
13483 bool JSObject::ShouldConvertToFastElements() {
13484 ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
13485 // If the elements are sparse, we should not go back to fast case.
13486 if (!HasDenseElements()) return false;
13487 // An object requiring access checks is never allowed to have fast
13488 // elements. If it had fast elements we would skip security checks.
13489 if (IsAccessCheckNeeded()) return false;
13490 // Observed objects may not go to fast mode because they rely on map checks,
13491 // and for fast element accesses we sometimes check element kinds only.
13492 if (map()->is_observed()) return false;
13494 FixedArray* elements = FixedArray::cast(this->elements());
13495 SeededNumberDictionary* dictionary = NULL;
13496 if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) {
13497 dictionary = SeededNumberDictionary::cast(elements->get(1));
13499 dictionary = SeededNumberDictionary::cast(elements);
13501 // If an element has been added at a very high index in the elements
13502 // dictionary, we cannot go back to fast case.
13503 if (dictionary->requires_slow_elements()) return false;
13504 // If the dictionary backing storage takes up roughly half as much
13505 // space (in machine words) as a fast-case backing storage would,
13506 // the object should have fast elements.
13507 uint32_t array_size = 0;
13509 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
13511 array_size = dictionary->max_number_key();
13513 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
13514 SeededNumberDictionary::kEntrySize;
13515 return 2 * dictionary_size >= array_size;
13519 bool JSObject::ShouldConvertToFastDoubleElements(
13520 bool* has_smi_only_elements) {
13521 *has_smi_only_elements = false;
13522 if (HasSloppyArgumentsElements()) return false;
13523 if (FLAG_unbox_double_arrays) {
13524 ASSERT(HasDictionaryElements());
13525 SeededNumberDictionary* dictionary = element_dictionary();
13526 bool found_double = false;
13527 for (int i = 0; i < dictionary->Capacity(); i++) {
13528 Object* key = dictionary->KeyAt(i);
13529 if (key->IsNumber()) {
13530 Object* value = dictionary->ValueAt(i);
13531 if (!value->IsNumber()) return false;
13532 if (!value->IsSmi()) {
13533 found_double = true;
13537 *has_smi_only_elements = !found_double;
13538 return found_double;
13545 // Certain compilers request function template instantiation when they
13546 // see the definition of the other template functions in the
13547 // class. This requires us to have the template functions put
13548 // together, so even though this function belongs in objects-debug.cc,
13549 // we keep it here instead to satisfy certain compilers.
13550 #ifdef OBJECT_PRINT
13551 template<typename Derived, typename Shape, typename Key>
13552 void Dictionary<Derived, Shape, Key>::Print(FILE* out) {
13553 int capacity = DerivedHashTable::Capacity();
13554 for (int i = 0; i < capacity; i++) {
13555 Object* k = DerivedHashTable::KeyAt(i);
13556 if (DerivedHashTable::IsKey(k)) {
13558 if (k->IsString()) {
13559 String::cast(k)->StringPrint(out);
13561 k->ShortPrint(out);
13564 ValueAt(i)->ShortPrint(out);
13572 template<typename Derived, typename Shape, typename Key>
13573 void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) {
13575 int capacity = DerivedHashTable::Capacity();
13576 DisallowHeapAllocation no_gc;
13577 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
13578 for (int i = 0; i < capacity; i++) {
13579 Object* k = Dictionary::KeyAt(i);
13580 if (Dictionary::IsKey(k)) {
13581 elements->set(pos++, ValueAt(i), mode);
13584 ASSERT(pos == elements->length());
13588 InterceptorInfo* JSObject::GetNamedInterceptor() {
13589 ASSERT(map()->has_named_interceptor());
13590 JSFunction* constructor = JSFunction::cast(map()->constructor());
13591 ASSERT(constructor->shared()->IsApiFunction());
13593 constructor->shared()->get_api_func_data()->named_property_handler();
13594 return InterceptorInfo::cast(result);
13598 InterceptorInfo* JSObject::GetIndexedInterceptor() {
13599 ASSERT(map()->has_indexed_interceptor());
13600 JSFunction* constructor = JSFunction::cast(map()->constructor());
13601 ASSERT(constructor->shared()->IsApiFunction());
13603 constructor->shared()->get_api_func_data()->indexed_property_handler();
13604 return InterceptorInfo::cast(result);
13608 MaybeHandle<Object> JSObject::GetPropertyWithInterceptor(
13609 Handle<JSObject> holder,
13610 Handle<Object> receiver,
13611 Handle<Name> name) {
13612 Isolate* isolate = holder->GetIsolate();
13614 // TODO(rossberg): Support symbols in the API.
13615 if (name->IsSymbol()) return isolate->factory()->undefined_value();
13617 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor(), isolate);
13618 Handle<String> name_string = Handle<String>::cast(name);
13620 if (interceptor->getter()->IsUndefined()) return MaybeHandle<Object>();
13622 v8::NamedPropertyGetterCallback getter =
13623 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
13625 ApiNamedPropertyAccess("interceptor-named-get", *holder, *name));
13626 PropertyCallbackArguments
13627 args(isolate, interceptor->data(), *receiver, *holder);
13628 v8::Handle<v8::Value> result =
13629 args.Call(getter, v8::Utils::ToLocal(name_string));
13630 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
13631 if (result.IsEmpty()) return MaybeHandle<Object>();
13633 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13634 result_internal->VerifyApiCallResultType();
13635 // Rebox handle before return
13636 return handle(*result_internal, isolate);
13640 // Compute the property keys from the interceptor.
13641 // TODO(rossberg): support symbols in API, and filter here if needed.
13642 MaybeHandle<JSObject> JSObject::GetKeysForNamedInterceptor(
13643 Handle<JSObject> object, Handle<JSReceiver> receiver) {
13644 Isolate* isolate = receiver->GetIsolate();
13645 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
13646 PropertyCallbackArguments
13647 args(isolate, interceptor->data(), *receiver, *object);
13648 v8::Handle<v8::Object> result;
13649 if (!interceptor->enumerator()->IsUndefined()) {
13650 v8::NamedPropertyEnumeratorCallback enum_fun =
13651 v8::ToCData<v8::NamedPropertyEnumeratorCallback>(
13652 interceptor->enumerator());
13653 LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object));
13654 result = args.Call(enum_fun);
13656 if (result.IsEmpty()) return MaybeHandle<JSObject>();
13657 #if ENABLE_EXTRA_CHECKS
13658 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
13659 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
13661 // Rebox before returning.
13662 return handle(*v8::Utils::OpenHandle(*result), isolate);
13666 // Compute the element keys from the interceptor.
13667 MaybeHandle<JSObject> JSObject::GetKeysForIndexedInterceptor(
13668 Handle<JSObject> object, Handle<JSReceiver> receiver) {
13669 Isolate* isolate = receiver->GetIsolate();
13670 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
13671 PropertyCallbackArguments
13672 args(isolate, interceptor->data(), *receiver, *object);
13673 v8::Handle<v8::Object> result;
13674 if (!interceptor->enumerator()->IsUndefined()) {
13675 v8::IndexedPropertyEnumeratorCallback enum_fun =
13676 v8::ToCData<v8::IndexedPropertyEnumeratorCallback>(
13677 interceptor->enumerator());
13678 LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object));
13679 result = args.Call(enum_fun);
13681 if (result.IsEmpty()) return MaybeHandle<JSObject>();
13682 #if ENABLE_EXTRA_CHECKS
13683 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
13684 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
13686 // Rebox before returning.
13687 return handle(*v8::Utils::OpenHandle(*result), isolate);
13691 bool JSObject::HasRealNamedProperty(Handle<JSObject> object,
13692 Handle<Name> key) {
13693 Isolate* isolate = object->GetIsolate();
13694 SealHandleScope shs(isolate);
13695 // Check access rights if needed.
13696 if (object->IsAccessCheckNeeded()) {
13697 if (!isolate->MayNamedAccess(object, key, v8::ACCESS_HAS)) {
13698 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
13699 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
13704 LookupResult result(isolate);
13705 object->LookupOwnRealNamedProperty(key, &result);
13706 return result.IsFound() && !result.IsInterceptor();
13710 bool JSObject::HasRealElementProperty(Handle<JSObject> object, uint32_t index) {
13711 Isolate* isolate = object->GetIsolate();
13712 HandleScope scope(isolate);
13713 // Check access rights if needed.
13714 if (object->IsAccessCheckNeeded()) {
13715 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
13716 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
13717 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
13722 if (object->IsJSGlobalProxy()) {
13723 HandleScope scope(isolate);
13724 Handle<Object> proto(object->GetPrototype(), isolate);
13725 if (proto->IsNull()) return false;
13726 ASSERT(proto->IsJSGlobalObject());
13727 return HasRealElementProperty(Handle<JSObject>::cast(proto), index);
13730 return GetElementAttributeWithoutInterceptor(
13731 object, object, index, false) != ABSENT;
13735 bool JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object,
13736 Handle<Name> key) {
13737 Isolate* isolate = object->GetIsolate();
13738 SealHandleScope shs(isolate);
13739 // Check access rights if needed.
13740 if (object->IsAccessCheckNeeded()) {
13741 if (!isolate->MayNamedAccess(object, key, v8::ACCESS_HAS)) {
13742 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
13743 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
13748 LookupResult result(isolate);
13749 object->LookupOwnRealNamedProperty(key, &result);
13750 return result.IsPropertyCallbacks();
13754 int JSObject::NumberOfOwnProperties(PropertyAttributes filter) {
13755 if (HasFastProperties()) {
13756 Map* map = this->map();
13757 if (filter == NONE) return map->NumberOfOwnDescriptors();
13758 if (filter & DONT_ENUM) {
13759 int result = map->EnumLength();
13760 if (result != kInvalidEnumCacheSentinel) return result;
13762 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
13764 return property_dictionary()->NumberOfElementsFilterAttributes(filter);
13768 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
13769 Object* temp = get(i);
13772 if (this != numbers) {
13773 temp = numbers->get(i);
13774 numbers->set(i, Smi::cast(numbers->get(j)));
13775 numbers->set(j, Smi::cast(temp));
13780 static void InsertionSortPairs(FixedArray* content,
13781 FixedArray* numbers,
13783 for (int i = 1; i < len; i++) {
13786 (NumberToUint32(numbers->get(j - 1)) >
13787 NumberToUint32(numbers->get(j)))) {
13788 content->SwapPairs(numbers, j - 1, j);
13795 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) {
13796 // In-place heap sort.
13797 ASSERT(content->length() == numbers->length());
13799 // Bottom-up max-heap construction.
13800 for (int i = 1; i < len; ++i) {
13801 int child_index = i;
13802 while (child_index > 0) {
13803 int parent_index = ((child_index + 1) >> 1) - 1;
13804 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13805 uint32_t child_value = NumberToUint32(numbers->get(child_index));
13806 if (parent_value < child_value) {
13807 content->SwapPairs(numbers, parent_index, child_index);
13811 child_index = parent_index;
13815 // Extract elements and create sorted array.
13816 for (int i = len - 1; i > 0; --i) {
13817 // Put max element at the back of the array.
13818 content->SwapPairs(numbers, 0, i);
13819 // Sift down the new top element.
13820 int parent_index = 0;
13822 int child_index = ((parent_index + 1) << 1) - 1;
13823 if (child_index >= i) break;
13824 uint32_t child1_value = NumberToUint32(numbers->get(child_index));
13825 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1));
13826 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13827 if (child_index + 1 >= i || child1_value > child2_value) {
13828 if (parent_value > child1_value) break;
13829 content->SwapPairs(numbers, parent_index, child_index);
13830 parent_index = child_index;
13832 if (parent_value > child2_value) break;
13833 content->SwapPairs(numbers, parent_index, child_index + 1);
13834 parent_index = child_index + 1;
13841 // Sort this array and the numbers as pairs wrt. the (distinct) numbers.
13842 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
13843 ASSERT(this->length() == numbers->length());
13844 // For small arrays, simply use insertion sort.
13846 InsertionSortPairs(this, numbers, len);
13849 // Check the range of indices.
13850 uint32_t min_index = NumberToUint32(numbers->get(0));
13851 uint32_t max_index = min_index;
13853 for (i = 1; i < len; i++) {
13854 if (NumberToUint32(numbers->get(i)) < min_index) {
13855 min_index = NumberToUint32(numbers->get(i));
13856 } else if (NumberToUint32(numbers->get(i)) > max_index) {
13857 max_index = NumberToUint32(numbers->get(i));
13860 if (max_index - min_index + 1 == len) {
13861 // Indices form a contiguous range, unless there are duplicates.
13862 // Do an in-place linear time sort assuming distinct numbers, but
13863 // avoid hanging in case they are not.
13864 for (i = 0; i < len; i++) {
13867 // While the current element at i is not at its correct position p,
13868 // swap the elements at these two positions.
13869 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i &&
13871 SwapPairs(numbers, i, p);
13875 HeapSortPairs(this, numbers, len);
13881 // Fill in the names of own properties into the supplied storage. The main
13882 // purpose of this function is to provide reflection information for the object
13884 void JSObject::GetOwnPropertyNames(
13885 FixedArray* storage, int index, PropertyAttributes filter) {
13886 ASSERT(storage->length() >= (NumberOfOwnProperties(filter) - index));
13887 if (HasFastProperties()) {
13888 int real_size = map()->NumberOfOwnDescriptors();
13889 DescriptorArray* descs = map()->instance_descriptors();
13890 for (int i = 0; i < real_size; i++) {
13891 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
13892 !FilterKey(descs->GetKey(i), filter)) {
13893 storage->set(index++, descs->GetKey(i));
13897 property_dictionary()->CopyKeysTo(storage,
13900 NameDictionary::UNSORTED);
13905 int JSObject::NumberOfOwnElements(PropertyAttributes filter) {
13906 return GetOwnElementKeys(NULL, filter);
13910 int JSObject::NumberOfEnumElements() {
13911 // Fast case for objects with no elements.
13912 if (!IsJSValue() && HasFastObjectElements()) {
13913 uint32_t length = IsJSArray() ?
13914 static_cast<uint32_t>(
13915 Smi::cast(JSArray::cast(this)->length())->value()) :
13916 static_cast<uint32_t>(FixedArray::cast(elements())->length());
13917 if (length == 0) return 0;
13919 // Compute the number of enumerable elements.
13920 return NumberOfOwnElements(static_cast<PropertyAttributes>(DONT_ENUM));
13924 int JSObject::GetOwnElementKeys(FixedArray* storage,
13925 PropertyAttributes filter) {
13927 switch (GetElementsKind()) {
13928 case FAST_SMI_ELEMENTS:
13929 case FAST_ELEMENTS:
13930 case FAST_HOLEY_SMI_ELEMENTS:
13931 case FAST_HOLEY_ELEMENTS: {
13932 int length = IsJSArray() ?
13933 Smi::cast(JSArray::cast(this)->length())->value() :
13934 FixedArray::cast(elements())->length();
13935 for (int i = 0; i < length; i++) {
13936 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
13937 if (storage != NULL) {
13938 storage->set(counter, Smi::FromInt(i));
13943 ASSERT(!storage || storage->length() >= counter);
13946 case FAST_DOUBLE_ELEMENTS:
13947 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13948 int length = IsJSArray() ?
13949 Smi::cast(JSArray::cast(this)->length())->value() :
13950 FixedArrayBase::cast(elements())->length();
13951 for (int i = 0; i < length; i++) {
13952 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) {
13953 if (storage != NULL) {
13954 storage->set(counter, Smi::FromInt(i));
13959 ASSERT(!storage || storage->length() >= counter);
13963 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13964 case EXTERNAL_##TYPE##_ELEMENTS: \
13965 case TYPE##_ELEMENTS: \
13967 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13968 #undef TYPED_ARRAY_CASE
13970 int length = FixedArrayBase::cast(elements())->length();
13971 while (counter < length) {
13972 if (storage != NULL) {
13973 storage->set(counter, Smi::FromInt(counter));
13977 ASSERT(!storage || storage->length() >= counter);
13981 case DICTIONARY_ELEMENTS: {
13982 if (storage != NULL) {
13983 element_dictionary()->CopyKeysTo(storage,
13985 SeededNumberDictionary::SORTED);
13987 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
13990 case SLOPPY_ARGUMENTS_ELEMENTS: {
13991 FixedArray* parameter_map = FixedArray::cast(elements());
13992 int mapped_length = parameter_map->length() - 2;
13993 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
13994 if (arguments->IsDictionary()) {
13995 // Copy the keys from arguments first, because Dictionary::CopyKeysTo
13996 // will insert in storage starting at index 0.
13997 SeededNumberDictionary* dictionary =
13998 SeededNumberDictionary::cast(arguments);
13999 if (storage != NULL) {
14000 dictionary->CopyKeysTo(
14001 storage, filter, SeededNumberDictionary::UNSORTED);
14003 counter += dictionary->NumberOfElementsFilterAttributes(filter);
14004 for (int i = 0; i < mapped_length; ++i) {
14005 if (!parameter_map->get(i + 2)->IsTheHole()) {
14006 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
14010 if (storage != NULL) storage->SortPairs(storage, counter);
14013 int backing_length = arguments->length();
14015 for (; i < mapped_length; ++i) {
14016 if (!parameter_map->get(i + 2)->IsTheHole()) {
14017 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
14019 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
14020 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
14024 for (; i < backing_length; ++i) {
14025 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
14033 if (this->IsJSValue()) {
14034 Object* val = JSValue::cast(this)->value();
14035 if (val->IsString()) {
14036 String* str = String::cast(val);
14038 for (int i = 0; i < str->length(); i++) {
14039 storage->set(counter + i, Smi::FromInt(i));
14042 counter += str->length();
14045 ASSERT(!storage || storage->length() == counter);
14050 int JSObject::GetEnumElementKeys(FixedArray* storage) {
14051 return GetOwnElementKeys(storage, static_cast<PropertyAttributes>(DONT_ENUM));
14055 // StringKey simply carries a string object as key.
14056 class StringKey : public HashTableKey {
14058 explicit StringKey(String* string) :
14060 hash_(HashForObject(string)) { }
14062 bool IsMatch(Object* string) {
14063 // We know that all entries in a hash table had their hash keys created.
14064 // Use that knowledge to have fast failure.
14065 if (hash_ != HashForObject(string)) {
14068 return string_->Equals(String::cast(string));
14071 uint32_t Hash() { return hash_; }
14073 uint32_t HashForObject(Object* other) { return String::cast(other)->Hash(); }
14075 Object* AsObject(Heap* heap) { return string_; }
14082 // StringSharedKeys are used as keys in the eval cache.
14083 class StringSharedKey : public HashTableKey {
14085 StringSharedKey(Handle<String> source,
14086 Handle<SharedFunctionInfo> shared,
14087 StrictMode strict_mode,
14088 int scope_position)
14091 strict_mode_(strict_mode),
14092 scope_position_(scope_position) { }
14094 bool IsMatch(Object* other) V8_OVERRIDE {
14095 DisallowHeapAllocation no_allocation;
14096 if (!other->IsFixedArray()) return false;
14097 FixedArray* other_array = FixedArray::cast(other);
14098 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
14099 if (shared != *shared_) return false;
14100 int strict_unchecked = Smi::cast(other_array->get(2))->value();
14101 ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
14102 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
14103 if (strict_mode != strict_mode_) return false;
14104 int scope_position = Smi::cast(other_array->get(3))->value();
14105 if (scope_position != scope_position_) return false;
14106 String* source = String::cast(other_array->get(1));
14107 return source->Equals(*source_);
14110 static uint32_t StringSharedHashHelper(String* source,
14111 SharedFunctionInfo* shared,
14112 StrictMode strict_mode,
14113 int scope_position) {
14114 uint32_t hash = source->Hash();
14115 if (shared->HasSourceCode()) {
14116 // Instead of using the SharedFunctionInfo pointer in the hash
14117 // code computation, we use a combination of the hash of the
14118 // script source code and the start position of the calling scope.
14119 // We do this to ensure that the cache entries can survive garbage
14121 Script* script(Script::cast(shared->script()));
14122 hash ^= String::cast(script->source())->Hash();
14123 if (strict_mode == STRICT) hash ^= 0x8000;
14124 hash += scope_position;
14129 uint32_t Hash() V8_OVERRIDE {
14130 return StringSharedHashHelper(*source_, *shared_, strict_mode_,
14134 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
14135 DisallowHeapAllocation no_allocation;
14136 FixedArray* other_array = FixedArray::cast(obj);
14137 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
14138 String* source = String::cast(other_array->get(1));
14139 int strict_unchecked = Smi::cast(other_array->get(2))->value();
14140 ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
14141 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
14142 int scope_position = Smi::cast(other_array->get(3))->value();
14143 return StringSharedHashHelper(
14144 source, shared, strict_mode, scope_position);
14148 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
14149 Handle<FixedArray> array = isolate->factory()->NewFixedArray(4);
14150 array->set(0, *shared_);
14151 array->set(1, *source_);
14152 array->set(2, Smi::FromInt(strict_mode_));
14153 array->set(3, Smi::FromInt(scope_position_));
14158 Handle<String> source_;
14159 Handle<SharedFunctionInfo> shared_;
14160 StrictMode strict_mode_;
14161 int scope_position_;
14165 // RegExpKey carries the source and flags of a regular expression as key.
14166 class RegExpKey : public HashTableKey {
14168 RegExpKey(Handle<String> string, JSRegExp::Flags flags)
14170 flags_(Smi::FromInt(flags.value())) { }
14172 // Rather than storing the key in the hash table, a pointer to the
14173 // stored value is stored where the key should be. IsMatch then
14174 // compares the search key to the found object, rather than comparing
14176 bool IsMatch(Object* obj) V8_OVERRIDE {
14177 FixedArray* val = FixedArray::cast(obj);
14178 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
14179 && (flags_ == val->get(JSRegExp::kFlagsIndex));
14182 uint32_t Hash() V8_OVERRIDE { return RegExpHash(*string_, flags_); }
14184 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
14185 // Plain hash maps, which is where regexp keys are used, don't
14186 // use this function.
14188 return MaybeHandle<Object>().ToHandleChecked();
14191 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
14192 FixedArray* val = FixedArray::cast(obj);
14193 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
14194 Smi::cast(val->get(JSRegExp::kFlagsIndex)));
14197 static uint32_t RegExpHash(String* string, Smi* flags) {
14198 return string->Hash() + flags->value();
14201 Handle<String> string_;
14206 Handle<Object> OneByteStringKey::AsHandle(Isolate* isolate) {
14207 if (hash_field_ == 0) Hash();
14208 return isolate->factory()->NewOneByteInternalizedString(string_, hash_field_);
14212 Handle<Object> TwoByteStringKey::AsHandle(Isolate* isolate) {
14213 if (hash_field_ == 0) Hash();
14214 return isolate->factory()->NewTwoByteInternalizedString(string_, hash_field_);
14219 const uint8_t* SubStringKey<uint8_t>::GetChars() {
14220 return string_->IsSeqOneByteString()
14221 ? SeqOneByteString::cast(*string_)->GetChars()
14222 : ExternalAsciiString::cast(*string_)->GetChars();
14227 const uint16_t* SubStringKey<uint16_t>::GetChars() {
14228 return string_->IsSeqTwoByteString()
14229 ? SeqTwoByteString::cast(*string_)->GetChars()
14230 : ExternalTwoByteString::cast(*string_)->GetChars();
14235 Handle<Object> SubStringKey<uint8_t>::AsHandle(Isolate* isolate) {
14236 if (hash_field_ == 0) Hash();
14237 Vector<const uint8_t> chars(GetChars() + from_, length_);
14238 return isolate->factory()->NewOneByteInternalizedString(chars, hash_field_);
14243 Handle<Object> SubStringKey<uint16_t>::AsHandle(Isolate* isolate) {
14244 if (hash_field_ == 0) Hash();
14245 Vector<const uint16_t> chars(GetChars() + from_, length_);
14246 return isolate->factory()->NewTwoByteInternalizedString(chars, hash_field_);
14251 bool SubStringKey<uint8_t>::IsMatch(Object* string) {
14252 Vector<const uint8_t> chars(GetChars() + from_, length_);
14253 return String::cast(string)->IsOneByteEqualTo(chars);
14258 bool SubStringKey<uint16_t>::IsMatch(Object* string) {
14259 Vector<const uint16_t> chars(GetChars() + from_, length_);
14260 return String::cast(string)->IsTwoByteEqualTo(chars);
14264 template class SubStringKey<uint8_t>;
14265 template class SubStringKey<uint16_t>;
14268 // InternalizedStringKey carries a string/internalized-string object as key.
14269 class InternalizedStringKey : public HashTableKey {
14271 explicit InternalizedStringKey(Handle<String> string)
14272 : string_(string) { }
14274 virtual bool IsMatch(Object* string) V8_OVERRIDE {
14275 return String::cast(string)->Equals(*string_);
14278 virtual uint32_t Hash() V8_OVERRIDE { return string_->Hash(); }
14280 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
14281 return String::cast(other)->Hash();
14284 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
14285 // Internalize the string if possible.
14286 MaybeHandle<Map> maybe_map =
14287 isolate->factory()->InternalizedStringMapForString(string_);
14289 if (maybe_map.ToHandle(&map)) {
14290 string_->set_map_no_write_barrier(*map);
14291 ASSERT(string_->IsInternalizedString());
14294 // Otherwise allocate a new internalized string.
14295 return isolate->factory()->NewInternalizedStringImpl(
14296 string_, string_->length(), string_->hash_field());
14299 static uint32_t StringHash(Object* obj) {
14300 return String::cast(obj)->Hash();
14303 Handle<String> string_;
14307 template<typename Derived, typename Shape, typename Key>
14308 void HashTable<Derived, Shape, Key>::IteratePrefix(ObjectVisitor* v) {
14309 IteratePointers(v, 0, kElementsStartOffset);
14313 template<typename Derived, typename Shape, typename Key>
14314 void HashTable<Derived, Shape, Key>::IterateElements(ObjectVisitor* v) {
14316 kElementsStartOffset,
14317 kHeaderSize + length() * kPointerSize);
14321 template<typename Derived, typename Shape, typename Key>
14322 Handle<Derived> HashTable<Derived, Shape, Key>::New(
14324 int at_least_space_for,
14325 MinimumCapacity capacity_option,
14326 PretenureFlag pretenure) {
14327 ASSERT(0 <= at_least_space_for);
14328 ASSERT(!capacity_option || IsPowerOf2(at_least_space_for));
14329 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
14330 ? at_least_space_for
14331 : ComputeCapacity(at_least_space_for);
14332 if (capacity > HashTable::kMaxCapacity) {
14333 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
14336 Factory* factory = isolate->factory();
14337 int length = EntryToIndex(capacity);
14338 Handle<FixedArray> array = factory->NewFixedArray(length, pretenure);
14339 array->set_map_no_write_barrier(*factory->hash_table_map());
14340 Handle<Derived> table = Handle<Derived>::cast(array);
14342 table->SetNumberOfElements(0);
14343 table->SetNumberOfDeletedElements(0);
14344 table->SetCapacity(capacity);
14349 // Find entry for key otherwise return kNotFound.
14350 int NameDictionary::FindEntry(Handle<Name> key) {
14351 if (!key->IsUniqueName()) {
14352 return DerivedHashTable::FindEntry(key);
14355 // Optimized for unique names. Knowledge of the key type allows:
14356 // 1. Move the check if the key is unique out of the loop.
14357 // 2. Avoid comparing hash codes in unique-to-unique comparison.
14358 // 3. Detect a case when a dictionary key is not unique but the key is.
14359 // In case of positive result the dictionary key may be replaced by the
14360 // internalized string with minimal performance penalty. It gives a chance
14361 // to perform further lookups in code stubs (and significant performance
14362 // boost a certain style of code).
14364 // EnsureCapacity will guarantee the hash table is never full.
14365 uint32_t capacity = Capacity();
14366 uint32_t entry = FirstProbe(key->Hash(), capacity);
14367 uint32_t count = 1;
14370 int index = EntryToIndex(entry);
14371 Object* element = get(index);
14372 if (element->IsUndefined()) break; // Empty entry.
14373 if (*key == element) return entry;
14374 if (!element->IsUniqueName() &&
14375 !element->IsTheHole() &&
14376 Name::cast(element)->Equals(*key)) {
14377 // Replace a key that is a non-internalized string by the equivalent
14378 // internalized string for faster further lookups.
14382 ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(*key));
14383 entry = NextProbe(entry, count++, capacity);
14389 template<typename Derived, typename Shape, typename Key>
14390 void HashTable<Derived, Shape, Key>::Rehash(
14391 Handle<Derived> new_table,
14393 ASSERT(NumberOfElements() < new_table->Capacity());
14395 DisallowHeapAllocation no_gc;
14396 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
14398 // Copy prefix to new array.
14399 for (int i = kPrefixStartIndex;
14400 i < kPrefixStartIndex + Shape::kPrefixSize;
14402 new_table->set(i, get(i), mode);
14405 // Rehash the elements.
14406 int capacity = Capacity();
14407 for (int i = 0; i < capacity; i++) {
14408 uint32_t from_index = EntryToIndex(i);
14409 Object* k = get(from_index);
14411 uint32_t hash = HashTable::HashForObject(key, k);
14412 uint32_t insertion_index =
14413 EntryToIndex(new_table->FindInsertionEntry(hash));
14414 for (int j = 0; j < Shape::kEntrySize; j++) {
14415 new_table->set(insertion_index + j, get(from_index + j), mode);
14419 new_table->SetNumberOfElements(NumberOfElements());
14420 new_table->SetNumberOfDeletedElements(0);
14424 template<typename Derived, typename Shape, typename Key>
14425 uint32_t HashTable<Derived, Shape, Key>::EntryForProbe(
14429 uint32_t expected) {
14430 uint32_t hash = HashTable::HashForObject(key, k);
14431 uint32_t capacity = Capacity();
14432 uint32_t entry = FirstProbe(hash, capacity);
14433 for (int i = 1; i < probe; i++) {
14434 if (entry == expected) return expected;
14435 entry = NextProbe(entry, i, capacity);
14441 template<typename Derived, typename Shape, typename Key>
14442 void HashTable<Derived, Shape, Key>::Swap(uint32_t entry1,
14444 WriteBarrierMode mode) {
14445 int index1 = EntryToIndex(entry1);
14446 int index2 = EntryToIndex(entry2);
14447 Object* temp[Shape::kEntrySize];
14448 for (int j = 0; j < Shape::kEntrySize; j++) {
14449 temp[j] = get(index1 + j);
14451 for (int j = 0; j < Shape::kEntrySize; j++) {
14452 set(index1 + j, get(index2 + j), mode);
14454 for (int j = 0; j < Shape::kEntrySize; j++) {
14455 set(index2 + j, temp[j], mode);
14460 template<typename Derived, typename Shape, typename Key>
14461 void HashTable<Derived, Shape, Key>::Rehash(Key key) {
14462 DisallowHeapAllocation no_gc;
14463 WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
14464 uint32_t capacity = Capacity();
14466 for (int probe = 1; !done; probe++) {
14467 // All elements at entries given by one of the first _probe_ probes
14468 // are placed correctly. Other elements might need to be moved.
14470 for (uint32_t current = 0; current < capacity; current++) {
14471 Object* current_key = get(EntryToIndex(current));
14472 if (IsKey(current_key)) {
14473 uint32_t target = EntryForProbe(key, current_key, probe, current);
14474 if (current == target) continue;
14475 Object* target_key = get(EntryToIndex(target));
14476 if (!IsKey(target_key) ||
14477 EntryForProbe(key, target_key, probe, target) != target) {
14478 // Put the current element into the correct position.
14479 Swap(current, target, mode);
14480 // The other element will be processed on the next iteration.
14483 // The place for the current element is occupied. Leave the element
14484 // for the next probe.
14493 template<typename Derived, typename Shape, typename Key>
14494 Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity(
14495 Handle<Derived> table,
14498 PretenureFlag pretenure) {
14499 Isolate* isolate = table->GetIsolate();
14500 int capacity = table->Capacity();
14501 int nof = table->NumberOfElements() + n;
14502 int nod = table->NumberOfDeletedElements();
14504 // 50% is still free after adding n elements and
14505 // at most 50% of the free elements are deleted elements.
14506 if (nod <= (capacity - nof) >> 1) {
14507 int needed_free = nof >> 1;
14508 if (nof + needed_free <= capacity) return table;
14511 const int kMinCapacityForPretenure = 256;
14512 bool should_pretenure = pretenure == TENURED ||
14513 ((capacity > kMinCapacityForPretenure) &&
14514 !isolate->heap()->InNewSpace(*table));
14515 Handle<Derived> new_table = HashTable::New(
14518 USE_DEFAULT_MINIMUM_CAPACITY,
14519 should_pretenure ? TENURED : NOT_TENURED);
14521 table->Rehash(new_table, key);
14526 template<typename Derived, typename Shape, typename Key>
14527 Handle<Derived> HashTable<Derived, Shape, Key>::Shrink(Handle<Derived> table,
14529 int capacity = table->Capacity();
14530 int nof = table->NumberOfElements();
14532 // Shrink to fit the number of elements if only a quarter of the
14533 // capacity is filled with elements.
14534 if (nof > (capacity >> 2)) return table;
14535 // Allocate a new dictionary with room for at least the current
14536 // number of elements. The allocation method will make sure that
14537 // there is extra room in the dictionary for additions. Don't go
14538 // lower than room for 16 elements.
14539 int at_least_room_for = nof;
14540 if (at_least_room_for < 16) return table;
14542 Isolate* isolate = table->GetIsolate();
14543 const int kMinCapacityForPretenure = 256;
14545 (at_least_room_for > kMinCapacityForPretenure) &&
14546 !isolate->heap()->InNewSpace(*table);
14547 Handle<Derived> new_table = HashTable::New(
14550 USE_DEFAULT_MINIMUM_CAPACITY,
14551 pretenure ? TENURED : NOT_TENURED);
14553 table->Rehash(new_table, key);
14558 template<typename Derived, typename Shape, typename Key>
14559 uint32_t HashTable<Derived, Shape, Key>::FindInsertionEntry(uint32_t hash) {
14560 uint32_t capacity = Capacity();
14561 uint32_t entry = FirstProbe(hash, capacity);
14562 uint32_t count = 1;
14563 // EnsureCapacity will guarantee the hash table is never full.
14565 Object* element = KeyAt(entry);
14566 if (element->IsUndefined() || element->IsTheHole()) break;
14567 entry = NextProbe(entry, count++, capacity);
14573 // Force instantiation of template instances class.
14574 // Please note this list is compiler dependent.
14576 template class HashTable<StringTable, StringTableShape, HashTableKey*>;
14578 template class HashTable<CompilationCacheTable,
14579 CompilationCacheShape,
14582 template class HashTable<MapCache, MapCacheShape, HashTableKey*>;
14584 template class HashTable<ObjectHashTable,
14585 ObjectHashTableShape,
14588 template class HashTable<WeakHashTable, WeakHashTableShape<2>, Handle<Object> >;
14590 template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >;
14592 template class Dictionary<SeededNumberDictionary,
14593 SeededNumberDictionaryShape,
14596 template class Dictionary<UnseededNumberDictionary,
14597 UnseededNumberDictionaryShape,
14600 template Handle<SeededNumberDictionary>
14601 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14602 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14604 template Handle<UnseededNumberDictionary>
14605 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14606 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14608 template Handle<NameDictionary>
14609 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14610 New(Isolate*, int n, PretenureFlag pretenure);
14612 template Handle<SeededNumberDictionary>
14613 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14614 AtPut(Handle<SeededNumberDictionary>, uint32_t, Handle<Object>);
14616 template Handle<UnseededNumberDictionary>
14617 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14618 AtPut(Handle<UnseededNumberDictionary>, uint32_t, Handle<Object>);
14621 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14622 SlowReverseLookup(Object* value);
14625 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14626 SlowReverseLookup(Object* value);
14629 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14630 SlowReverseLookup(Object* value);
14633 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14636 PropertyAttributes,
14637 Dictionary<SeededNumberDictionary,
14638 SeededNumberDictionaryShape,
14639 uint32_t>::SortMode);
14641 template Handle<Object>
14642 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::DeleteProperty(
14643 Handle<NameDictionary>, int, JSObject::DeleteMode);
14645 template Handle<Object>
14646 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14647 DeleteProperty(Handle<SeededNumberDictionary>, int, JSObject::DeleteMode);
14649 template Handle<NameDictionary>
14650 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
14651 New(Isolate*, int, MinimumCapacity, PretenureFlag);
14653 template Handle<NameDictionary>
14654 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
14655 Shrink(Handle<NameDictionary>, Handle<Name>);
14657 template Handle<SeededNumberDictionary>
14658 HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14659 Shrink(Handle<SeededNumberDictionary>, uint32_t);
14661 template void Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14665 PropertyAttributes,
14667 NameDictionary, NameDictionaryShape, Handle<Name> >::SortMode);
14670 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14671 NumberOfElementsFilterAttributes(PropertyAttributes);
14673 template Handle<NameDictionary>
14674 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::Add(
14675 Handle<NameDictionary>, Handle<Name>, Handle<Object>, PropertyDetails);
14678 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14679 GenerateNewEnumerationIndices(Handle<NameDictionary>);
14682 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14683 NumberOfElementsFilterAttributes(PropertyAttributes);
14685 template Handle<SeededNumberDictionary>
14686 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14687 Add(Handle<SeededNumberDictionary>,
14692 template Handle<UnseededNumberDictionary>
14693 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14694 Add(Handle<UnseededNumberDictionary>,
14699 template Handle<SeededNumberDictionary>
14700 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14701 EnsureCapacity(Handle<SeededNumberDictionary>, int, uint32_t);
14703 template Handle<UnseededNumberDictionary>
14704 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14705 EnsureCapacity(Handle<UnseededNumberDictionary>, int, uint32_t);
14707 template Handle<NameDictionary>
14708 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14709 EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>);
14712 int Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14713 NumberOfEnumElements();
14716 int Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14717 NumberOfEnumElements();
14720 int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14721 FindEntry(uint32_t);
14724 Handle<Object> JSObject::PrepareSlowElementsForSort(
14725 Handle<JSObject> object, uint32_t limit) {
14726 ASSERT(object->HasDictionaryElements());
14727 Isolate* isolate = object->GetIsolate();
14728 // Must stay in dictionary mode, either because of requires_slow_elements,
14729 // or because we are not going to sort (and therefore compact) all of the
14731 Handle<SeededNumberDictionary> dict(object->element_dictionary(), isolate);
14732 Handle<SeededNumberDictionary> new_dict =
14733 SeededNumberDictionary::New(isolate, dict->NumberOfElements());
14736 uint32_t undefs = 0;
14737 int capacity = dict->Capacity();
14738 Handle<Smi> bailout(Smi::FromInt(-1), isolate);
14739 // Entry to the new dictionary does not cause it to grow, as we have
14740 // allocated one that is large enough for all entries.
14741 DisallowHeapAllocation no_gc;
14742 for (int i = 0; i < capacity; i++) {
14743 Object* k = dict->KeyAt(i);
14744 if (!dict->IsKey(k)) continue;
14746 ASSERT(k->IsNumber());
14747 ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0);
14748 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0);
14749 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32);
14751 HandleScope scope(isolate);
14752 Handle<Object> value(dict->ValueAt(i), isolate);
14753 PropertyDetails details = dict->DetailsAt(i);
14754 if (details.type() == CALLBACKS || details.IsReadOnly()) {
14755 // Bail out and do the sorting of undefineds and array holes in JS.
14756 // Also bail out if the element is not supposed to be moved.
14760 uint32_t key = NumberToUint32(k);
14762 if (value->IsUndefined()) {
14764 } else if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14765 // Adding an entry with the key beyond smi-range requires
14766 // allocation. Bailout.
14769 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14770 new_dict, pos, value, details);
14771 ASSERT(result.is_identical_to(new_dict));
14775 } else if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
14776 // Adding an entry with the key beyond smi-range requires
14777 // allocation. Bailout.
14780 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14781 new_dict, key, value, details);
14782 ASSERT(result.is_identical_to(new_dict));
14787 uint32_t result = pos;
14788 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
14789 while (undefs > 0) {
14790 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14791 // Adding an entry with the key beyond smi-range requires
14792 // allocation. Bailout.
14795 HandleScope scope(isolate);
14796 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14797 new_dict, pos, isolate->factory()->undefined_value(), no_details);
14798 ASSERT(result.is_identical_to(new_dict));
14804 object->set_elements(*new_dict);
14806 AllowHeapAllocation allocate_return_value;
14807 return isolate->factory()->NewNumberFromUint(result);
14811 // Collects all defined (non-hole) and non-undefined (array) elements at
14812 // the start of the elements array.
14813 // If the object is in dictionary mode, it is converted to fast elements
14815 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object,
14817 Isolate* isolate = object->GetIsolate();
14818 if (object->HasSloppyArgumentsElements() ||
14819 object->map()->is_observed()) {
14820 return handle(Smi::FromInt(-1), isolate);
14823 if (object->HasDictionaryElements()) {
14824 // Convert to fast elements containing only the existing properties.
14825 // Ordering is irrelevant, since we are going to sort anyway.
14826 Handle<SeededNumberDictionary> dict(object->element_dictionary());
14827 if (object->IsJSArray() || dict->requires_slow_elements() ||
14828 dict->max_number_key() >= limit) {
14829 return JSObject::PrepareSlowElementsForSort(object, limit);
14831 // Convert to fast elements.
14833 Handle<Map> new_map =
14834 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS);
14836 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ?
14837 NOT_TENURED: TENURED;
14838 Handle<FixedArray> fast_elements =
14839 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure);
14840 dict->CopyValuesTo(*fast_elements);
14841 JSObject::ValidateElements(object);
14843 JSObject::SetMapAndElements(object, new_map, fast_elements);
14844 } else if (object->HasExternalArrayElements() ||
14845 object->HasFixedTypedArrayElements()) {
14846 // Typed arrays cannot have holes or undefined elements.
14847 return handle(Smi::FromInt(
14848 FixedArrayBase::cast(object->elements())->length()), isolate);
14849 } else if (!object->HasFastDoubleElements()) {
14850 EnsureWritableFastElements(object);
14852 ASSERT(object->HasFastSmiOrObjectElements() ||
14853 object->HasFastDoubleElements());
14855 // Collect holes at the end, undefined before that and the rest at the
14856 // start, and return the number of non-hole, non-undefined values.
14858 Handle<FixedArrayBase> elements_base(object->elements());
14859 uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
14860 if (limit > elements_length) {
14861 limit = elements_length ;
14864 return handle(Smi::FromInt(0), isolate);
14867 uint32_t result = 0;
14868 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) {
14869 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base);
14870 // Split elements into defined and the_hole, in that order.
14871 unsigned int holes = limit;
14872 // Assume most arrays contain no holes and undefined values, so minimize the
14873 // number of stores of non-undefined, non-the-hole values.
14874 for (unsigned int i = 0; i < holes; i++) {
14875 if (elements->is_the_hole(i)) {
14880 // Position i needs to be filled.
14881 while (holes > i) {
14882 if (elements->is_the_hole(holes)) {
14885 elements->set(i, elements->get_scalar(holes));
14891 while (holes < limit) {
14892 elements->set_the_hole(holes);
14896 FixedArray* elements = FixedArray::cast(*elements_base);
14897 DisallowHeapAllocation no_gc;
14899 // Split elements into defined, undefined and the_hole, in that order. Only
14900 // count locations for undefined and the hole, and fill them afterwards.
14901 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
14902 unsigned int undefs = limit;
14903 unsigned int holes = limit;
14904 // Assume most arrays contain no holes and undefined values, so minimize the
14905 // number of stores of non-undefined, non-the-hole values.
14906 for (unsigned int i = 0; i < undefs; i++) {
14907 Object* current = elements->get(i);
14908 if (current->IsTheHole()) {
14911 } else if (current->IsUndefined()) {
14916 // Position i needs to be filled.
14917 while (undefs > i) {
14918 current = elements->get(undefs);
14919 if (current->IsTheHole()) {
14922 } else if (current->IsUndefined()) {
14925 elements->set(i, current, write_barrier);
14931 while (undefs < holes) {
14932 elements->set_undefined(undefs);
14935 while (holes < limit) {
14936 elements->set_the_hole(holes);
14941 return isolate->factory()->NewNumberFromUint(result);
14945 ExternalArrayType JSTypedArray::type() {
14946 switch (elements()->map()->instance_type()) {
14947 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \
14948 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14949 case FIXED_##TYPE##_ARRAY_TYPE: \
14950 return kExternal##Type##Array;
14952 TYPED_ARRAYS(INSTANCE_TYPE_TO_ARRAY_TYPE)
14953 #undef INSTANCE_TYPE_TO_ARRAY_TYPE
14957 return static_cast<ExternalArrayType>(-1);
14962 size_t JSTypedArray::element_size() {
14963 switch (elements()->map()->instance_type()) {
14964 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \
14965 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14968 TYPED_ARRAYS(INSTANCE_TYPE_TO_ELEMENT_SIZE)
14969 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE
14978 Handle<Object> ExternalUint8ClampedArray::SetValue(
14979 Handle<ExternalUint8ClampedArray> array,
14981 Handle<Object> value) {
14982 uint8_t clamped_value = 0;
14983 if (index < static_cast<uint32_t>(array->length())) {
14984 if (value->IsSmi()) {
14985 int int_value = Handle<Smi>::cast(value)->value();
14986 if (int_value < 0) {
14988 } else if (int_value > 255) {
14989 clamped_value = 255;
14991 clamped_value = static_cast<uint8_t>(int_value);
14993 } else if (value->IsHeapNumber()) {
14994 double double_value = Handle<HeapNumber>::cast(value)->value();
14995 if (!(double_value > 0)) {
14996 // NaN and less than zero clamp to zero.
14998 } else if (double_value > 255) {
14999 // Greater than 255 clamp to 255.
15000 clamped_value = 255;
15002 // Other doubles are rounded to the nearest integer.
15003 clamped_value = static_cast<uint8_t>(lrint(double_value));
15006 // Clamp undefined to zero (default). All other types have been
15007 // converted to a number type further up in the call chain.
15008 ASSERT(value->IsUndefined());
15010 array->set(index, clamped_value);
15012 return handle(Smi::FromInt(clamped_value), array->GetIsolate());
15016 template<typename ExternalArrayClass, typename ValueType>
15017 static Handle<Object> ExternalArrayIntSetter(
15019 Handle<ExternalArrayClass> receiver,
15021 Handle<Object> value) {
15022 ValueType cast_value = 0;
15023 if (index < static_cast<uint32_t>(receiver->length())) {
15024 if (value->IsSmi()) {
15025 int int_value = Handle<Smi>::cast(value)->value();
15026 cast_value = static_cast<ValueType>(int_value);
15027 } else if (value->IsHeapNumber()) {
15028 double double_value = Handle<HeapNumber>::cast(value)->value();
15029 cast_value = static_cast<ValueType>(DoubleToInt32(double_value));
15031 // Clamp undefined to zero (default). All other types have been
15032 // converted to a number type further up in the call chain.
15033 ASSERT(value->IsUndefined());
15035 receiver->set(index, cast_value);
15037 return isolate->factory()->NewNumberFromInt(cast_value);
15041 Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array,
15043 Handle<Object> value) {
15044 return ExternalArrayIntSetter<ExternalInt8Array, int8_t>(
15045 array->GetIsolate(), array, index, value);
15049 Handle<Object> ExternalUint8Array::SetValue(Handle<ExternalUint8Array> array,
15051 Handle<Object> value) {
15052 return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>(
15053 array->GetIsolate(), array, index, value);
15057 Handle<Object> ExternalInt16Array::SetValue(Handle<ExternalInt16Array> array,
15059 Handle<Object> value) {
15060 return ExternalArrayIntSetter<ExternalInt16Array, int16_t>(
15061 array->GetIsolate(), array, index, value);
15065 Handle<Object> ExternalUint16Array::SetValue(Handle<ExternalUint16Array> array,
15067 Handle<Object> value) {
15068 return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>(
15069 array->GetIsolate(), array, index, value);
15073 Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array,
15075 Handle<Object> value) {
15076 return ExternalArrayIntSetter<ExternalInt32Array, int32_t>(
15077 array->GetIsolate(), array, index, value);
15081 Handle<Object> ExternalUint32Array::SetValue(
15082 Handle<ExternalUint32Array> array,
15084 Handle<Object> value) {
15085 uint32_t cast_value = 0;
15086 if (index < static_cast<uint32_t>(array->length())) {
15087 if (value->IsSmi()) {
15088 int int_value = Handle<Smi>::cast(value)->value();
15089 cast_value = static_cast<uint32_t>(int_value);
15090 } else if (value->IsHeapNumber()) {
15091 double double_value = Handle<HeapNumber>::cast(value)->value();
15092 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value));
15094 // Clamp undefined to zero (default). All other types have been
15095 // converted to a number type further up in the call chain.
15096 ASSERT(value->IsUndefined());
15098 array->set(index, cast_value);
15100 return array->GetIsolate()->factory()->NewNumberFromUint(cast_value);
15104 Handle<Object> ExternalFloat32Array::SetValue(
15105 Handle<ExternalFloat32Array> array,
15107 Handle<Object> value) {
15108 float cast_value = static_cast<float>(OS::nan_value());
15109 if (index < static_cast<uint32_t>(array->length())) {
15110 if (value->IsSmi()) {
15111 int int_value = Handle<Smi>::cast(value)->value();
15112 cast_value = static_cast<float>(int_value);
15113 } else if (value->IsHeapNumber()) {
15114 double double_value = Handle<HeapNumber>::cast(value)->value();
15115 cast_value = static_cast<float>(double_value);
15117 // Clamp undefined to NaN (default). All other types have been
15118 // converted to a number type further up in the call chain.
15119 ASSERT(value->IsUndefined());
15121 array->set(index, cast_value);
15123 return array->GetIsolate()->factory()->NewNumber(cast_value);
15127 Handle<Object> ExternalFloat64Array::SetValue(
15128 Handle<ExternalFloat64Array> array,
15130 Handle<Object> value) {
15131 double double_value = OS::nan_value();
15132 if (index < static_cast<uint32_t>(array->length())) {
15133 if (value->IsNumber()) {
15134 double_value = value->Number();
15136 // Clamp undefined to NaN (default). All other types have been
15137 // converted to a number type further up in the call chain.
15138 ASSERT(value->IsUndefined());
15140 array->set(index, double_value);
15142 return array->GetIsolate()->factory()->NewNumber(double_value);
15146 PropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) {
15147 ASSERT(!HasFastProperties());
15148 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
15149 return PropertyCell::cast(value);
15153 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell(
15154 Handle<JSGlobalObject> global,
15155 Handle<Name> name) {
15156 ASSERT(!global->HasFastProperties());
15157 int entry = global->property_dictionary()->FindEntry(name);
15158 if (entry == NameDictionary::kNotFound) {
15159 Isolate* isolate = global->GetIsolate();
15160 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(
15161 isolate->factory()->the_hole_value());
15162 PropertyDetails details(NONE, NORMAL, 0);
15163 details = details.AsDeleted();
15164 Handle<NameDictionary> dictionary = NameDictionary::Add(
15165 handle(global->property_dictionary()), name, cell, details);
15166 global->set_properties(*dictionary);
15169 Object* value = global->property_dictionary()->ValueAt(entry);
15170 ASSERT(value->IsPropertyCell());
15171 return handle(PropertyCell::cast(value));
15176 // This class is used for looking up two character strings in the string table.
15177 // If we don't have a hit we don't want to waste much time so we unroll the
15178 // string hash calculation loop here for speed. Doesn't work if the two
15179 // characters form a decimal integer, since such strings have a different hash
15181 class TwoCharHashTableKey : public HashTableKey {
15183 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
15184 : c1_(c1), c2_(c2) {
15186 uint32_t hash = seed;
15188 hash += hash << 10;
15192 hash += hash << 10;
15196 hash ^= hash >> 11;
15197 hash += hash << 15;
15198 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
15201 // If this assert fails then we failed to reproduce the two-character
15202 // version of the string hashing algorithm above. One reason could be
15203 // that we were passed two digits as characters, since the hash
15204 // algorithm is different in that case.
15205 uint16_t chars[2] = {c1, c2};
15206 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed);
15207 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask;
15208 ASSERT_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash));
15212 bool IsMatch(Object* o) V8_OVERRIDE {
15213 if (!o->IsString()) return false;
15214 String* other = String::cast(o);
15215 if (other->length() != 2) return false;
15216 if (other->Get(0) != c1_) return false;
15217 return other->Get(1) == c2_;
15220 uint32_t Hash() V8_OVERRIDE { return hash_; }
15221 uint32_t HashForObject(Object* key) V8_OVERRIDE {
15222 if (!key->IsString()) return 0;
15223 return String::cast(key)->Hash();
15226 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
15227 // The TwoCharHashTableKey is only used for looking in the string
15228 // table, not for adding to it.
15230 return MaybeHandle<Object>().ToHandleChecked();
15240 MaybeHandle<String> StringTable::InternalizeStringIfExists(
15242 Handle<String> string) {
15243 if (string->IsInternalizedString()) {
15246 return LookupStringIfExists(isolate, string);
15250 MaybeHandle<String> StringTable::LookupStringIfExists(
15252 Handle<String> string) {
15253 Handle<StringTable> string_table = isolate->factory()->string_table();
15254 InternalizedStringKey key(string);
15255 int entry = string_table->FindEntry(&key);
15256 if (entry == kNotFound) {
15257 return MaybeHandle<String>();
15259 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
15260 ASSERT(StringShape(*result).IsInternalized());
15266 MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists(
15270 Handle<StringTable> string_table = isolate->factory()->string_table();
15271 TwoCharHashTableKey key(c1, c2, isolate->heap()->HashSeed());
15272 int entry = string_table->FindEntry(&key);
15273 if (entry == kNotFound) {
15274 return MaybeHandle<String>();
15276 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
15277 ASSERT(StringShape(*result).IsInternalized());
15283 Handle<String> StringTable::LookupString(Isolate* isolate,
15284 Handle<String> string) {
15285 InternalizedStringKey key(string);
15286 return LookupKey(isolate, &key);
15290 Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) {
15291 Handle<StringTable> table = isolate->factory()->string_table();
15292 int entry = table->FindEntry(key);
15294 // String already in table.
15295 if (entry != kNotFound) {
15296 return handle(String::cast(table->KeyAt(entry)), isolate);
15299 // Adding new string. Grow table if needed.
15300 table = StringTable::EnsureCapacity(table, 1, key);
15302 // Create string object.
15303 Handle<Object> string = key->AsHandle(isolate);
15304 // There must be no attempts to internalize strings that could throw
15305 // InvalidStringLength error.
15306 CHECK(!string.is_null());
15308 // Add the new string and return it along with the string table.
15309 entry = table->FindInsertionEntry(key->Hash());
15310 table->set(EntryToIndex(entry), *string);
15311 table->ElementAdded();
15313 isolate->factory()->set_string_table(table);
15314 return Handle<String>::cast(string);
15318 Handle<Object> CompilationCacheTable::Lookup(Handle<String> src,
15319 Handle<Context> context) {
15320 Isolate* isolate = GetIsolate();
15321 Handle<SharedFunctionInfo> shared(context->closure()->shared());
15322 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
15323 RelocInfo::kNoPosition);
15324 int entry = FindEntry(&key);
15325 if (entry == kNotFound) return isolate->factory()->undefined_value();
15326 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
15330 Handle<Object> CompilationCacheTable::LookupEval(Handle<String> src,
15331 Handle<Context> context,
15332 StrictMode strict_mode,
15333 int scope_position) {
15334 Isolate* isolate = GetIsolate();
15335 Handle<SharedFunctionInfo> shared(context->closure()->shared());
15336 StringSharedKey key(src, shared, strict_mode, scope_position);
15337 int entry = FindEntry(&key);
15338 if (entry == kNotFound) return isolate->factory()->undefined_value();
15339 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
15343 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src,
15344 JSRegExp::Flags flags) {
15345 Isolate* isolate = GetIsolate();
15346 DisallowHeapAllocation no_allocation;
15347 RegExpKey key(src, flags);
15348 int entry = FindEntry(&key);
15349 if (entry == kNotFound) return isolate->factory()->undefined_value();
15350 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
15354 Handle<CompilationCacheTable> CompilationCacheTable::Put(
15355 Handle<CompilationCacheTable> cache, Handle<String> src,
15356 Handle<Context> context, Handle<Object> value) {
15357 Isolate* isolate = cache->GetIsolate();
15358 Handle<SharedFunctionInfo> shared(context->closure()->shared());
15359 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
15360 RelocInfo::kNoPosition);
15361 cache = EnsureCapacity(cache, 1, &key);
15362 Handle<Object> k = key.AsHandle(isolate);
15363 int entry = cache->FindInsertionEntry(key.Hash());
15364 cache->set(EntryToIndex(entry), *k);
15365 cache->set(EntryToIndex(entry) + 1, *value);
15366 cache->ElementAdded();
15371 Handle<CompilationCacheTable> CompilationCacheTable::PutEval(
15372 Handle<CompilationCacheTable> cache, Handle<String> src,
15373 Handle<Context> context, Handle<SharedFunctionInfo> value,
15374 int scope_position) {
15375 Isolate* isolate = cache->GetIsolate();
15376 Handle<SharedFunctionInfo> shared(context->closure()->shared());
15377 StringSharedKey key(src, shared, value->strict_mode(), scope_position);
15378 cache = EnsureCapacity(cache, 1, &key);
15379 Handle<Object> k = key.AsHandle(isolate);
15380 int entry = cache->FindInsertionEntry(key.Hash());
15381 cache->set(EntryToIndex(entry), *k);
15382 cache->set(EntryToIndex(entry) + 1, *value);
15383 cache->ElementAdded();
15388 Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp(
15389 Handle<CompilationCacheTable> cache, Handle<String> src,
15390 JSRegExp::Flags flags, Handle<FixedArray> value) {
15391 RegExpKey key(src, flags);
15392 cache = EnsureCapacity(cache, 1, &key);
15393 int entry = cache->FindInsertionEntry(key.Hash());
15394 // We store the value in the key slot, and compare the search key
15395 // to the stored value with a custon IsMatch function during lookups.
15396 cache->set(EntryToIndex(entry), *value);
15397 cache->set(EntryToIndex(entry) + 1, *value);
15398 cache->ElementAdded();
15403 void CompilationCacheTable::Remove(Object* value) {
15404 DisallowHeapAllocation no_allocation;
15405 Object* the_hole_value = GetHeap()->the_hole_value();
15406 for (int entry = 0, size = Capacity(); entry < size; entry++) {
15407 int entry_index = EntryToIndex(entry);
15408 int value_index = entry_index + 1;
15409 if (get(value_index) == value) {
15410 NoWriteBarrierSet(this, entry_index, the_hole_value);
15411 NoWriteBarrierSet(this, value_index, the_hole_value);
15419 // StringsKey used for HashTable where key is array of internalized strings.
15420 class StringsKey : public HashTableKey {
15422 explicit StringsKey(Handle<FixedArray> strings) : strings_(strings) { }
15424 bool IsMatch(Object* strings) V8_OVERRIDE {
15425 FixedArray* o = FixedArray::cast(strings);
15426 int len = strings_->length();
15427 if (o->length() != len) return false;
15428 for (int i = 0; i < len; i++) {
15429 if (o->get(i) != strings_->get(i)) return false;
15434 uint32_t Hash() V8_OVERRIDE { return HashForObject(*strings_); }
15436 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
15437 FixedArray* strings = FixedArray::cast(obj);
15438 int len = strings->length();
15440 for (int i = 0; i < len; i++) {
15441 hash ^= String::cast(strings->get(i))->Hash();
15446 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { return strings_; }
15449 Handle<FixedArray> strings_;
15453 Object* MapCache::Lookup(FixedArray* array) {
15454 DisallowHeapAllocation no_alloc;
15455 StringsKey key(handle(array));
15456 int entry = FindEntry(&key);
15457 if (entry == kNotFound) return GetHeap()->undefined_value();
15458 return get(EntryToIndex(entry) + 1);
15462 Handle<MapCache> MapCache::Put(
15463 Handle<MapCache> map_cache, Handle<FixedArray> array, Handle<Map> value) {
15464 StringsKey key(array);
15466 Handle<MapCache> new_cache = EnsureCapacity(map_cache, 1, &key);
15467 int entry = new_cache->FindInsertionEntry(key.Hash());
15468 new_cache->set(EntryToIndex(entry), *array);
15469 new_cache->set(EntryToIndex(entry) + 1, *value);
15470 new_cache->ElementAdded();
15475 template<typename Derived, typename Shape, typename Key>
15476 Handle<Derived> Dictionary<Derived, Shape, Key>::New(
15478 int at_least_space_for,
15479 PretenureFlag pretenure) {
15480 ASSERT(0 <= at_least_space_for);
15481 Handle<Derived> dict = DerivedHashTable::New(isolate,
15482 at_least_space_for,
15483 USE_DEFAULT_MINIMUM_CAPACITY,
15486 // Initialize the next enumeration index.
15487 dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex);
15492 template<typename Derived, typename Shape, typename Key>
15493 void Dictionary<Derived, Shape, Key>::GenerateNewEnumerationIndices(
15494 Handle<Derived> dictionary) {
15495 Factory* factory = dictionary->GetIsolate()->factory();
15496 int length = dictionary->NumberOfElements();
15498 // Allocate and initialize iteration order array.
15499 Handle<FixedArray> iteration_order = factory->NewFixedArray(length);
15500 for (int i = 0; i < length; i++) {
15501 iteration_order->set(i, Smi::FromInt(i));
15504 // Allocate array with enumeration order.
15505 Handle<FixedArray> enumeration_order = factory->NewFixedArray(length);
15507 // Fill the enumeration order array with property details.
15508 int capacity = dictionary->Capacity();
15510 for (int i = 0; i < capacity; i++) {
15511 if (dictionary->IsKey(dictionary->KeyAt(i))) {
15512 int index = dictionary->DetailsAt(i).dictionary_index();
15513 enumeration_order->set(pos++, Smi::FromInt(index));
15517 // Sort the arrays wrt. enumeration order.
15518 iteration_order->SortPairs(*enumeration_order, enumeration_order->length());
15520 // Overwrite the enumeration_order with the enumeration indices.
15521 for (int i = 0; i < length; i++) {
15522 int index = Smi::cast(iteration_order->get(i))->value();
15523 int enum_index = PropertyDetails::kInitialIndex + i;
15524 enumeration_order->set(index, Smi::FromInt(enum_index));
15527 // Update the dictionary with new indices.
15528 capacity = dictionary->Capacity();
15530 for (int i = 0; i < capacity; i++) {
15531 if (dictionary->IsKey(dictionary->KeyAt(i))) {
15532 int enum_index = Smi::cast(enumeration_order->get(pos++))->value();
15533 PropertyDetails details = dictionary->DetailsAt(i);
15534 PropertyDetails new_details = PropertyDetails(
15535 details.attributes(), details.type(), enum_index);
15536 dictionary->DetailsAtPut(i, new_details);
15540 // Set the next enumeration index.
15541 dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length);
15545 template<typename Derived, typename Shape, typename Key>
15546 Handle<Derived> Dictionary<Derived, Shape, Key>::EnsureCapacity(
15547 Handle<Derived> dictionary, int n, Key key) {
15548 // Check whether there are enough enumeration indices to add n elements.
15549 if (Shape::kIsEnumerable &&
15550 !PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) {
15551 // If not, we generate new indices for the properties.
15552 GenerateNewEnumerationIndices(dictionary);
15554 return DerivedHashTable::EnsureCapacity(dictionary, n, key);
15558 template<typename Derived, typename Shape, typename Key>
15559 Handle<Object> Dictionary<Derived, Shape, Key>::DeleteProperty(
15560 Handle<Derived> dictionary,
15562 JSObject::DeleteMode mode) {
15563 Factory* factory = dictionary->GetIsolate()->factory();
15564 PropertyDetails details = dictionary->DetailsAt(entry);
15565 // Ignore attributes if forcing a deletion.
15566 if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) {
15567 return factory->false_value();
15570 dictionary->SetEntry(
15571 entry, factory->the_hole_value(), factory->the_hole_value());
15572 dictionary->ElementRemoved();
15573 return factory->true_value();
15577 template<typename Derived, typename Shape, typename Key>
15578 Handle<Derived> Dictionary<Derived, Shape, Key>::AtPut(
15579 Handle<Derived> dictionary, Key key, Handle<Object> value) {
15580 int entry = dictionary->FindEntry(key);
15582 // If the entry is present set the value;
15583 if (entry != Dictionary::kNotFound) {
15584 dictionary->ValueAtPut(entry, *value);
15588 // Check whether the dictionary should be extended.
15589 dictionary = EnsureCapacity(dictionary, 1, key);
15591 USE(Shape::AsHandle(dictionary->GetIsolate(), key));
15593 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
15595 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
15600 template<typename Derived, typename Shape, typename Key>
15601 Handle<Derived> Dictionary<Derived, Shape, Key>::Add(
15602 Handle<Derived> dictionary,
15604 Handle<Object> value,
15605 PropertyDetails details) {
15606 // Valdate key is absent.
15607 SLOW_ASSERT((dictionary->FindEntry(key) == Dictionary::kNotFound));
15608 // Check whether the dictionary should be extended.
15609 dictionary = EnsureCapacity(dictionary, 1, key);
15611 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
15616 // Add a key, value pair to the dictionary.
15617 template<typename Derived, typename Shape, typename Key>
15618 void Dictionary<Derived, Shape, Key>::AddEntry(
15619 Handle<Derived> dictionary,
15621 Handle<Object> value,
15622 PropertyDetails details,
15624 // Compute the key object.
15625 Handle<Object> k = Shape::AsHandle(dictionary->GetIsolate(), key);
15627 uint32_t entry = dictionary->FindInsertionEntry(hash);
15628 // Insert element at empty or deleted entry
15629 if (!details.IsDeleted() &&
15630 details.dictionary_index() == 0 &&
15631 Shape::kIsEnumerable) {
15632 // Assign an enumeration index to the property and update
15633 // SetNextEnumerationIndex.
15634 int index = dictionary->NextEnumerationIndex();
15635 details = PropertyDetails(details.attributes(), details.type(), index);
15636 dictionary->SetNextEnumerationIndex(index + 1);
15638 dictionary->SetEntry(entry, k, value, details);
15639 ASSERT((dictionary->KeyAt(entry)->IsNumber() ||
15640 dictionary->KeyAt(entry)->IsName()));
15641 dictionary->ElementAdded();
15645 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) {
15646 DisallowHeapAllocation no_allocation;
15647 // If the dictionary requires slow elements an element has already
15648 // been added at a high index.
15649 if (requires_slow_elements()) return;
15650 // Check if this index is high enough that we should require slow
15652 if (key > kRequiresSlowElementsLimit) {
15653 set_requires_slow_elements();
15656 // Update max key value.
15657 Object* max_index_object = get(kMaxNumberKeyIndex);
15658 if (!max_index_object->IsSmi() || max_number_key() < key) {
15659 FixedArray::set(kMaxNumberKeyIndex,
15660 Smi::FromInt(key << kRequiresSlowElementsTagSize));
15665 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry(
15666 Handle<SeededNumberDictionary> dictionary,
15668 Handle<Object> value,
15669 PropertyDetails details) {
15670 dictionary->UpdateMaxNumberKey(key);
15671 SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound);
15672 return Add(dictionary, key, value, details);
15676 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry(
15677 Handle<UnseededNumberDictionary> dictionary,
15679 Handle<Object> value) {
15680 SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound);
15681 return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0));
15685 Handle<SeededNumberDictionary> SeededNumberDictionary::AtNumberPut(
15686 Handle<SeededNumberDictionary> dictionary,
15688 Handle<Object> value) {
15689 dictionary->UpdateMaxNumberKey(key);
15690 return AtPut(dictionary, key, value);
15694 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AtNumberPut(
15695 Handle<UnseededNumberDictionary> dictionary,
15697 Handle<Object> value) {
15698 return AtPut(dictionary, key, value);
15702 Handle<SeededNumberDictionary> SeededNumberDictionary::Set(
15703 Handle<SeededNumberDictionary> dictionary,
15705 Handle<Object> value,
15706 PropertyDetails details) {
15707 int entry = dictionary->FindEntry(key);
15708 if (entry == kNotFound) {
15709 return AddNumberEntry(dictionary, key, value, details);
15711 // Preserve enumeration index.
15712 details = PropertyDetails(details.attributes(),
15714 dictionary->DetailsAt(entry).dictionary_index());
15715 Handle<Object> object_key =
15716 SeededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
15717 dictionary->SetEntry(entry, object_key, value, details);
15722 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set(
15723 Handle<UnseededNumberDictionary> dictionary,
15725 Handle<Object> value) {
15726 int entry = dictionary->FindEntry(key);
15727 if (entry == kNotFound) return AddNumberEntry(dictionary, key, value);
15728 Handle<Object> object_key =
15729 UnseededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
15730 dictionary->SetEntry(entry, object_key, value);
15736 template<typename Derived, typename Shape, typename Key>
15737 int Dictionary<Derived, Shape, Key>::NumberOfElementsFilterAttributes(
15738 PropertyAttributes filter) {
15739 int capacity = DerivedHashTable::Capacity();
15741 for (int i = 0; i < capacity; i++) {
15742 Object* k = DerivedHashTable::KeyAt(i);
15743 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15744 PropertyDetails details = DetailsAt(i);
15745 if (details.IsDeleted()) continue;
15746 PropertyAttributes attr = details.attributes();
15747 if ((attr & filter) == 0) result++;
15754 template<typename Derived, typename Shape, typename Key>
15755 int Dictionary<Derived, Shape, Key>::NumberOfEnumElements() {
15756 return NumberOfElementsFilterAttributes(
15757 static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC));
15761 template<typename Derived, typename Shape, typename Key>
15762 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
15763 FixedArray* storage,
15764 PropertyAttributes filter,
15765 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
15766 ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter));
15767 int capacity = DerivedHashTable::Capacity();
15769 for (int i = 0; i < capacity; i++) {
15770 Object* k = DerivedHashTable::KeyAt(i);
15771 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15772 PropertyDetails details = DetailsAt(i);
15773 if (details.IsDeleted()) continue;
15774 PropertyAttributes attr = details.attributes();
15775 if ((attr & filter) == 0) storage->set(index++, k);
15778 if (sort_mode == Dictionary::SORTED) {
15779 storage->SortPairs(storage, index);
15781 ASSERT(storage->length() >= index);
15785 struct EnumIndexComparator {
15786 explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { }
15787 bool operator() (Smi* a, Smi* b) {
15788 PropertyDetails da(dict->DetailsAt(a->value()));
15789 PropertyDetails db(dict->DetailsAt(b->value()));
15790 return da.dictionary_index() < db.dictionary_index();
15792 NameDictionary* dict;
15796 void NameDictionary::CopyEnumKeysTo(FixedArray* storage) {
15797 int length = storage->length();
15798 int capacity = Capacity();
15799 int properties = 0;
15800 for (int i = 0; i < capacity; i++) {
15801 Object* k = KeyAt(i);
15802 if (IsKey(k) && !k->IsSymbol()) {
15803 PropertyDetails details = DetailsAt(i);
15804 if (details.IsDeleted() || details.IsDontEnum()) continue;
15805 storage->set(properties, Smi::FromInt(i));
15807 if (properties == length) break;
15810 CHECK_EQ(length, properties);
15811 EnumIndexComparator cmp(this);
15812 Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress());
15813 std::sort(start, start + length, cmp);
15814 for (int i = 0; i < length; i++) {
15815 int index = Smi::cast(storage->get(i))->value();
15816 storage->set(i, KeyAt(index));
15821 template<typename Derived, typename Shape, typename Key>
15822 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
15823 FixedArray* storage,
15825 PropertyAttributes filter,
15826 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
15827 ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter));
15828 int capacity = DerivedHashTable::Capacity();
15829 for (int i = 0; i < capacity; i++) {
15830 Object* k = DerivedHashTable::KeyAt(i);
15831 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15832 PropertyDetails details = DetailsAt(i);
15833 if (details.IsDeleted()) continue;
15834 PropertyAttributes attr = details.attributes();
15835 if ((attr & filter) == 0) storage->set(index++, k);
15838 if (sort_mode == Dictionary::SORTED) {
15839 storage->SortPairs(storage, index);
15841 ASSERT(storage->length() >= index);
15845 // Backwards lookup (slow).
15846 template<typename Derived, typename Shape, typename Key>
15847 Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) {
15848 int capacity = DerivedHashTable::Capacity();
15849 for (int i = 0; i < capacity; i++) {
15850 Object* k = DerivedHashTable::KeyAt(i);
15851 if (Dictionary::IsKey(k)) {
15852 Object* e = ValueAt(i);
15853 if (e->IsPropertyCell()) {
15854 e = PropertyCell::cast(e)->value();
15856 if (e == value) return k;
15859 Heap* heap = Dictionary::GetHeap();
15860 return heap->undefined_value();
15864 Object* ObjectHashTable::Lookup(Handle<Object> key) {
15865 DisallowHeapAllocation no_gc;
15866 ASSERT(IsKey(*key));
15868 // If the object does not have an identity hash, it was never used as a key.
15869 Object* hash = key->GetHash();
15870 if (hash->IsUndefined()) {
15871 return GetHeap()->the_hole_value();
15873 int entry = FindEntry(key);
15874 if (entry == kNotFound) return GetHeap()->the_hole_value();
15875 return get(EntryToIndex(entry) + 1);
15879 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table,
15880 Handle<Object> key,
15881 Handle<Object> value) {
15882 ASSERT(table->IsKey(*key));
15883 ASSERT(!value->IsTheHole());
15885 Isolate* isolate = table->GetIsolate();
15887 // Make sure the key object has an identity hash code.
15888 Handle<Smi> hash = Object::GetOrCreateHash(isolate, key);
15890 int entry = table->FindEntry(key);
15892 // Key is already in table, just overwrite value.
15893 if (entry != kNotFound) {
15894 table->set(EntryToIndex(entry) + 1, *value);
15898 // Check whether the hash table should be extended.
15899 table = EnsureCapacity(table, 1, key);
15900 table->AddEntry(table->FindInsertionEntry(hash->value()),
15907 Handle<ObjectHashTable> ObjectHashTable::Remove(Handle<ObjectHashTable> table,
15908 Handle<Object> key,
15909 bool* was_present) {
15910 ASSERT(table->IsKey(*key));
15912 Object* hash = key->GetHash();
15913 if (hash->IsUndefined()) {
15914 *was_present = false;
15918 int entry = table->FindEntry(key);
15919 if (entry == kNotFound) {
15920 *was_present = false;
15924 *was_present = true;
15925 table->RemoveEntry(entry);
15926 return Shrink(table, key);
15930 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
15931 set(EntryToIndex(entry), key);
15932 set(EntryToIndex(entry) + 1, value);
15937 void ObjectHashTable::RemoveEntry(int entry) {
15938 set_the_hole(EntryToIndex(entry));
15939 set_the_hole(EntryToIndex(entry) + 1);
15944 Object* WeakHashTable::Lookup(Handle<Object> key) {
15945 DisallowHeapAllocation no_gc;
15946 ASSERT(IsKey(*key));
15947 int entry = FindEntry(key);
15948 if (entry == kNotFound) return GetHeap()->the_hole_value();
15949 return get(EntryToValueIndex(entry));
15953 Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table,
15954 Handle<Object> key,
15955 Handle<Object> value) {
15956 ASSERT(table->IsKey(*key));
15957 int entry = table->FindEntry(key);
15958 // Key is already in table, just overwrite value.
15959 if (entry != kNotFound) {
15960 // TODO(ulan): Skipping write barrier is a temporary solution to avoid
15961 // memory leaks. Remove this once we have special visitor for weak fixed
15963 table->set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
15967 // Check whether the hash table should be extended.
15968 table = EnsureCapacity(table, 1, key, TENURED);
15970 table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value);
15975 void WeakHashTable::AddEntry(int entry,
15976 Handle<Object> key,
15977 Handle<Object> value) {
15978 DisallowHeapAllocation no_allocation;
15979 // TODO(ulan): Skipping write barrier is a temporary solution to avoid
15980 // memory leaks. Remove this once we have special visitor for weak fixed
15982 set(EntryToIndex(entry), *key, SKIP_WRITE_BARRIER);
15983 set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
15988 template<class Derived, class Iterator, int entrysize>
15989 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate(
15990 Isolate* isolate, int capacity, PretenureFlag pretenure) {
15991 // Capacity must be a power of two, since we depend on being able
15992 // to divide and multiple by 2 (kLoadFactor) to derive capacity
15993 // from number of buckets. If we decide to change kLoadFactor
15994 // to something other than 2, capacity should be stored as another
15995 // field of this object.
15996 capacity = RoundUpToPowerOf2(Max(kMinCapacity, capacity));
15997 if (capacity > kMaxCapacity) {
15998 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
16000 int num_buckets = capacity / kLoadFactor;
16001 Handle<FixedArray> backing_store = isolate->factory()->NewFixedArray(
16002 kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure);
16003 backing_store->set_map_no_write_barrier(
16004 isolate->heap()->ordered_hash_table_map());
16005 Handle<Derived> table = Handle<Derived>::cast(backing_store);
16006 for (int i = 0; i < num_buckets; ++i) {
16007 table->set(kHashTableStartIndex + i, Smi::FromInt(kNotFound));
16009 table->SetNumberOfBuckets(num_buckets);
16010 table->SetNumberOfElements(0);
16011 table->SetNumberOfDeletedElements(0);
16016 template<class Derived, class Iterator, int entrysize>
16017 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable(
16018 Handle<Derived> table) {
16019 ASSERT(!table->IsObsolete());
16021 int nof = table->NumberOfElements();
16022 int nod = table->NumberOfDeletedElements();
16023 int capacity = table->Capacity();
16024 if ((nof + nod) < capacity) return table;
16025 // Don't need to grow if we can simply clear out deleted entries instead.
16026 // Note that we can't compact in place, though, so we always allocate
16028 return Rehash(table, (nod < (capacity >> 1)) ? capacity << 1 : capacity);
16032 template<class Derived, class Iterator, int entrysize>
16033 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink(
16034 Handle<Derived> table) {
16035 ASSERT(!table->IsObsolete());
16037 int nof = table->NumberOfElements();
16038 int capacity = table->Capacity();
16039 if (nof >= (capacity >> 2)) return table;
16040 return Rehash(table, capacity / 2);
16044 template<class Derived, class Iterator, int entrysize>
16045 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear(
16046 Handle<Derived> table) {
16047 ASSERT(!table->IsObsolete());
16049 Handle<Derived> new_table =
16050 Allocate(table->GetIsolate(),
16052 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
16054 table->SetNextTable(*new_table);
16055 table->SetNumberOfDeletedElements(-1);
16061 template<class Derived, class Iterator, int entrysize>
16062 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Remove(
16063 Handle<Derived> table, Handle<Object> key, bool* was_present) {
16064 int entry = table->FindEntry(key);
16065 if (entry == kNotFound) {
16066 *was_present = false;
16069 *was_present = true;
16070 table->RemoveEntry(entry);
16071 return Shrink(table);
16075 template<class Derived, class Iterator, int entrysize>
16076 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash(
16077 Handle<Derived> table, int new_capacity) {
16078 ASSERT(!table->IsObsolete());
16080 Handle<Derived> new_table =
16081 Allocate(table->GetIsolate(),
16083 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
16084 int nof = table->NumberOfElements();
16085 int nod = table->NumberOfDeletedElements();
16086 int new_buckets = new_table->NumberOfBuckets();
16088 int removed_holes_index = 0;
16090 for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) {
16091 Object* key = table->KeyAt(old_entry);
16092 if (key->IsTheHole()) {
16093 table->SetRemovedIndexAt(removed_holes_index++, old_entry);
16097 Object* hash = key->GetHash();
16098 int bucket = Smi::cast(hash)->value() & (new_buckets - 1);
16099 Object* chain_entry = new_table->get(kHashTableStartIndex + bucket);
16100 new_table->set(kHashTableStartIndex + bucket, Smi::FromInt(new_entry));
16101 int new_index = new_table->EntryToIndex(new_entry);
16102 int old_index = table->EntryToIndex(old_entry);
16103 for (int i = 0; i < entrysize; ++i) {
16104 Object* value = table->get(old_index + i);
16105 new_table->set(new_index + i, value);
16107 new_table->set(new_index + kChainOffset, chain_entry);
16111 ASSERT_EQ(nod, removed_holes_index);
16113 new_table->SetNumberOfElements(nof);
16114 table->SetNextTable(*new_table);
16120 template<class Derived, class Iterator, int entrysize>
16121 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry(
16122 Handle<Object> key) {
16123 ASSERT(!IsObsolete());
16125 DisallowHeapAllocation no_gc;
16126 ASSERT(!key->IsTheHole());
16127 Object* hash = key->GetHash();
16128 if (hash->IsUndefined()) return kNotFound;
16129 for (int entry = HashToEntry(Smi::cast(hash)->value());
16130 entry != kNotFound;
16131 entry = ChainAt(entry)) {
16132 Object* candidate = KeyAt(entry);
16133 if (candidate->SameValueZero(*key))
16140 template<class Derived, class Iterator, int entrysize>
16141 int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) {
16142 ASSERT(!IsObsolete());
16144 int entry = UsedCapacity();
16145 int bucket = HashToBucket(hash);
16146 int index = EntryToIndex(entry);
16147 Object* chain_entry = get(kHashTableStartIndex + bucket);
16148 set(kHashTableStartIndex + bucket, Smi::FromInt(entry));
16149 set(index + kChainOffset, chain_entry);
16150 SetNumberOfElements(NumberOfElements() + 1);
16155 template<class Derived, class Iterator, int entrysize>
16156 void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) {
16157 ASSERT(!IsObsolete());
16159 int index = EntryToIndex(entry);
16160 for (int i = 0; i < entrysize; ++i) {
16161 set_the_hole(index + i);
16163 SetNumberOfElements(NumberOfElements() - 1);
16164 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
16168 template Handle<OrderedHashSet>
16169 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Allocate(
16170 Isolate* isolate, int capacity, PretenureFlag pretenure);
16172 template Handle<OrderedHashSet>
16173 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::EnsureGrowable(
16174 Handle<OrderedHashSet> table);
16176 template Handle<OrderedHashSet>
16177 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Shrink(
16178 Handle<OrderedHashSet> table);
16180 template Handle<OrderedHashSet>
16181 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Clear(
16182 Handle<OrderedHashSet> table);
16184 template Handle<OrderedHashSet>
16185 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Remove(
16186 Handle<OrderedHashSet> table, Handle<Object> key, bool* was_present);
16189 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry(
16190 Handle<Object> key);
16193 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::AddEntry(int hash);
16196 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::RemoveEntry(int entry);
16199 template Handle<OrderedHashMap>
16200 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Allocate(
16201 Isolate* isolate, int capacity, PretenureFlag pretenure);
16203 template Handle<OrderedHashMap>
16204 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::EnsureGrowable(
16205 Handle<OrderedHashMap> table);
16207 template Handle<OrderedHashMap>
16208 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Shrink(
16209 Handle<OrderedHashMap> table);
16211 template Handle<OrderedHashMap>
16212 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Clear(
16213 Handle<OrderedHashMap> table);
16215 template Handle<OrderedHashMap>
16216 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Remove(
16217 Handle<OrderedHashMap> table, Handle<Object> key, bool* was_present);
16220 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry(
16221 Handle<Object> key);
16224 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::AddEntry(int hash);
16227 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::RemoveEntry(int entry);
16230 bool OrderedHashSet::Contains(Handle<Object> key) {
16231 return FindEntry(key) != kNotFound;
16235 Handle<OrderedHashSet> OrderedHashSet::Add(Handle<OrderedHashSet> table,
16236 Handle<Object> key) {
16237 if (table->FindEntry(key) != kNotFound) return table;
16239 table = EnsureGrowable(table);
16241 Handle<Smi> hash = GetOrCreateHash(table->GetIsolate(), key);
16242 int index = table->AddEntry(hash->value());
16243 table->set(index, *key);
16248 Object* OrderedHashMap::Lookup(Handle<Object> key) {
16249 DisallowHeapAllocation no_gc;
16250 int entry = FindEntry(key);
16251 if (entry == kNotFound) return GetHeap()->the_hole_value();
16252 return ValueAt(entry);
16256 Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table,
16257 Handle<Object> key,
16258 Handle<Object> value) {
16259 ASSERT(!key->IsTheHole());
16261 int entry = table->FindEntry(key);
16263 if (entry != kNotFound) {
16264 table->set(table->EntryToIndex(entry) + kValueOffset, *value);
16268 table = EnsureGrowable(table);
16270 Handle<Smi> hash = GetOrCreateHash(table->GetIsolate(), key);
16271 int index = table->AddEntry(hash->value());
16272 table->set(index, *key);
16273 table->set(index + kValueOffset, *value);
16278 template<class Derived, class TableType>
16279 Handle<JSObject> OrderedHashTableIterator<Derived, TableType>::Next(
16280 Handle<Derived> iterator) {
16281 Isolate* isolate = iterator->GetIsolate();
16282 Factory* factory = isolate->factory();
16284 Handle<Object> maybe_table(iterator->table(), isolate);
16285 if (!maybe_table->IsUndefined()) {
16286 iterator->Transition();
16288 Handle<TableType> table(TableType::cast(iterator->table()), isolate);
16289 int index = Smi::cast(iterator->index())->value();
16290 int used_capacity = table->UsedCapacity();
16292 while (index < used_capacity && table->KeyAt(index)->IsTheHole()) {
16296 if (index < used_capacity) {
16297 int entry_index = table->EntryToIndex(index);
16298 Handle<Object> value =
16299 Derived::ValueForKind(iterator, entry_index);
16300 iterator->set_index(Smi::FromInt(index + 1));
16301 return factory->NewIteratorResultObject(value, false);
16304 iterator->set_table(iterator->GetHeap()->undefined_value());
16307 return factory->NewIteratorResultObject(factory->undefined_value(), true);
16311 template<class Derived, class TableType>
16312 void OrderedHashTableIterator<Derived, TableType>::Transition() {
16313 Isolate* isolate = GetIsolate();
16314 Handle<TableType> table(TableType::cast(this->table()), isolate);
16315 if (!table->IsObsolete()) return;
16317 int index = Smi::cast(this->index())->value();
16318 while (table->IsObsolete()) {
16319 Handle<TableType> next_table(table->NextTable(), isolate);
16322 int nod = table->NumberOfDeletedElements();
16324 // When we clear the table we set the number of deleted elements to -1.
16328 int old_index = index;
16329 for (int i = 0; i < nod; ++i) {
16330 int removed_index = table->RemovedIndexAt(i);
16331 if (removed_index >= old_index) break;
16337 table = next_table;
16341 set_index(Smi::FromInt(index));
16345 template Handle<JSObject>
16346 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Next(
16347 Handle<JSSetIterator> iterator);
16350 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Transition();
16353 template Handle<JSObject>
16354 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Next(
16355 Handle<JSMapIterator> iterator);
16358 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Transition();
16361 Handle<Object> JSSetIterator::ValueForKind(
16362 Handle<JSSetIterator> iterator, int entry_index) {
16363 int kind = iterator->kind()->value();
16364 // Set.prototype only has values and entries.
16365 ASSERT(kind == kKindValues || kind == kKindEntries);
16367 Isolate* isolate = iterator->GetIsolate();
16368 Factory* factory = isolate->factory();
16370 Handle<OrderedHashSet> table(
16371 OrderedHashSet::cast(iterator->table()), isolate);
16372 Handle<Object> value = Handle<Object>(table->get(entry_index), isolate);
16374 if (kind == kKindEntries) {
16375 Handle<FixedArray> array = factory->NewFixedArray(2);
16376 array->set(0, *value);
16377 array->set(1, *value);
16378 return factory->NewJSArrayWithElements(array);
16385 Handle<Object> JSMapIterator::ValueForKind(
16386 Handle<JSMapIterator> iterator, int entry_index) {
16387 int kind = iterator->kind()->value();
16388 ASSERT(kind == kKindKeys || kind == kKindValues || kind == kKindEntries);
16390 Isolate* isolate = iterator->GetIsolate();
16391 Factory* factory = isolate->factory();
16393 Handle<OrderedHashMap> table(
16394 OrderedHashMap::cast(iterator->table()), isolate);
16398 return Handle<Object>(table->get(entry_index), isolate);
16401 return Handle<Object>(table->get(entry_index + 1), isolate);
16403 case kKindEntries: {
16404 Handle<Object> key(table->get(entry_index), isolate);
16405 Handle<Object> value(table->get(entry_index + 1), isolate);
16406 Handle<FixedArray> array = factory->NewFixedArray(2);
16407 array->set(0, *key);
16408 array->set(1, *value);
16409 return factory->NewJSArrayWithElements(array);
16414 return factory->undefined_value();
16418 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator(
16419 DeclaredAccessorDescriptor* descriptor)
16420 : array_(descriptor->serialized_data()->GetDataStartAddress()),
16421 length_(descriptor->serialized_data()->length()),
16426 const DeclaredAccessorDescriptorData*
16427 DeclaredAccessorDescriptorIterator::Next() {
16428 ASSERT(offset_ < length_);
16429 uint8_t* ptr = &array_[offset_];
16430 ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0);
16431 const DeclaredAccessorDescriptorData* data =
16432 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr);
16433 offset_ += sizeof(*data);
16434 ASSERT(offset_ <= length_);
16439 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create(
16441 const DeclaredAccessorDescriptorData& descriptor,
16442 Handle<DeclaredAccessorDescriptor> previous) {
16443 int previous_length =
16444 previous.is_null() ? 0 : previous->serialized_data()->length();
16445 int length = sizeof(descriptor) + previous_length;
16446 Handle<ByteArray> serialized_descriptor =
16447 isolate->factory()->NewByteArray(length);
16448 Handle<DeclaredAccessorDescriptor> value =
16449 isolate->factory()->NewDeclaredAccessorDescriptor();
16450 value->set_serialized_data(*serialized_descriptor);
16451 // Copy in the data.
16453 DisallowHeapAllocation no_allocation;
16454 uint8_t* array = serialized_descriptor->GetDataStartAddress();
16455 if (previous_length != 0) {
16456 uint8_t* previous_array =
16457 previous->serialized_data()->GetDataStartAddress();
16458 MemCopy(array, previous_array, previous_length);
16459 array += previous_length;
16461 ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0);
16462 DeclaredAccessorDescriptorData* data =
16463 reinterpret_cast<DeclaredAccessorDescriptorData*>(array);
16464 *data = descriptor;
16470 // Check if there is a break point at this code position.
16471 bool DebugInfo::HasBreakPoint(int code_position) {
16472 // Get the break point info object for this code position.
16473 Object* break_point_info = GetBreakPointInfo(code_position);
16475 // If there is no break point info object or no break points in the break
16476 // point info object there is no break point at this code position.
16477 if (break_point_info->IsUndefined()) return false;
16478 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0;
16482 // Get the break point info object for this code position.
16483 Object* DebugInfo::GetBreakPointInfo(int code_position) {
16484 // Find the index of the break point info object for this code position.
16485 int index = GetBreakPointInfoIndex(code_position);
16487 // Return the break point info object if any.
16488 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value();
16489 return BreakPointInfo::cast(break_points()->get(index));
16493 // Clear a break point at the specified code position.
16494 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info,
16496 Handle<Object> break_point_object) {
16497 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16498 debug_info->GetIsolate());
16499 if (break_point_info->IsUndefined()) return;
16500 BreakPointInfo::ClearBreakPoint(
16501 Handle<BreakPointInfo>::cast(break_point_info),
16502 break_point_object);
16506 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
16508 int source_position,
16509 int statement_position,
16510 Handle<Object> break_point_object) {
16511 Isolate* isolate = debug_info->GetIsolate();
16512 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16514 if (!break_point_info->IsUndefined()) {
16515 BreakPointInfo::SetBreakPoint(
16516 Handle<BreakPointInfo>::cast(break_point_info),
16517 break_point_object);
16521 // Adding a new break point for a code position which did not have any
16522 // break points before. Try to find a free slot.
16523 int index = kNoBreakPointInfo;
16524 for (int i = 0; i < debug_info->break_points()->length(); i++) {
16525 if (debug_info->break_points()->get(i)->IsUndefined()) {
16530 if (index == kNoBreakPointInfo) {
16531 // No free slot - extend break point info array.
16532 Handle<FixedArray> old_break_points =
16533 Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
16534 Handle<FixedArray> new_break_points =
16535 isolate->factory()->NewFixedArray(
16536 old_break_points->length() +
16537 DebugInfo::kEstimatedNofBreakPointsInFunction);
16539 debug_info->set_break_points(*new_break_points);
16540 for (int i = 0; i < old_break_points->length(); i++) {
16541 new_break_points->set(i, old_break_points->get(i));
16543 index = old_break_points->length();
16545 ASSERT(index != kNoBreakPointInfo);
16547 // Allocate new BreakPointInfo object and set the break point.
16548 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
16549 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
16550 new_break_point_info->set_code_position(Smi::FromInt(code_position));
16551 new_break_point_info->set_source_position(Smi::FromInt(source_position));
16552 new_break_point_info->
16553 set_statement_position(Smi::FromInt(statement_position));
16554 new_break_point_info->set_break_point_objects(
16555 isolate->heap()->undefined_value());
16556 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
16557 debug_info->break_points()->set(index, *new_break_point_info);
16561 // Get the break point objects for a code position.
16562 Object* DebugInfo::GetBreakPointObjects(int code_position) {
16563 Object* break_point_info = GetBreakPointInfo(code_position);
16564 if (break_point_info->IsUndefined()) {
16565 return GetHeap()->undefined_value();
16567 return BreakPointInfo::cast(break_point_info)->break_point_objects();
16571 // Get the total number of break points.
16572 int DebugInfo::GetBreakPointCount() {
16573 if (break_points()->IsUndefined()) return 0;
16575 for (int i = 0; i < break_points()->length(); i++) {
16576 if (!break_points()->get(i)->IsUndefined()) {
16577 BreakPointInfo* break_point_info =
16578 BreakPointInfo::cast(break_points()->get(i));
16579 count += break_point_info->GetBreakPointCount();
16586 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
16587 Handle<Object> break_point_object) {
16588 Heap* heap = debug_info->GetHeap();
16589 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
16590 for (int i = 0; i < debug_info->break_points()->length(); i++) {
16591 if (!debug_info->break_points()->get(i)->IsUndefined()) {
16592 Handle<BreakPointInfo> break_point_info =
16593 Handle<BreakPointInfo>(BreakPointInfo::cast(
16594 debug_info->break_points()->get(i)));
16595 if (BreakPointInfo::HasBreakPointObject(break_point_info,
16596 break_point_object)) {
16597 return *break_point_info;
16601 return heap->undefined_value();
16605 // Find the index of the break point info object for the specified code
16607 int DebugInfo::GetBreakPointInfoIndex(int code_position) {
16608 if (break_points()->IsUndefined()) return kNoBreakPointInfo;
16609 for (int i = 0; i < break_points()->length(); i++) {
16610 if (!break_points()->get(i)->IsUndefined()) {
16611 BreakPointInfo* break_point_info =
16612 BreakPointInfo::cast(break_points()->get(i));
16613 if (break_point_info->code_position()->value() == code_position) {
16618 return kNoBreakPointInfo;
16622 // Remove the specified break point object.
16623 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
16624 Handle<Object> break_point_object) {
16625 Isolate* isolate = break_point_info->GetIsolate();
16626 // If there are no break points just ignore.
16627 if (break_point_info->break_point_objects()->IsUndefined()) return;
16628 // If there is a single break point clear it if it is the same.
16629 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16630 if (break_point_info->break_point_objects() == *break_point_object) {
16631 break_point_info->set_break_point_objects(
16632 isolate->heap()->undefined_value());
16636 // If there are multiple break points shrink the array
16637 ASSERT(break_point_info->break_point_objects()->IsFixedArray());
16638 Handle<FixedArray> old_array =
16639 Handle<FixedArray>(
16640 FixedArray::cast(break_point_info->break_point_objects()));
16641 Handle<FixedArray> new_array =
16642 isolate->factory()->NewFixedArray(old_array->length() - 1);
16643 int found_count = 0;
16644 for (int i = 0; i < old_array->length(); i++) {
16645 if (old_array->get(i) == *break_point_object) {
16646 ASSERT(found_count == 0);
16649 new_array->set(i - found_count, old_array->get(i));
16652 // If the break point was found in the list change it.
16653 if (found_count > 0) break_point_info->set_break_point_objects(*new_array);
16657 // Add the specified break point object.
16658 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
16659 Handle<Object> break_point_object) {
16660 Isolate* isolate = break_point_info->GetIsolate();
16662 // If there was no break point objects before just set it.
16663 if (break_point_info->break_point_objects()->IsUndefined()) {
16664 break_point_info->set_break_point_objects(*break_point_object);
16667 // If the break point object is the same as before just ignore.
16668 if (break_point_info->break_point_objects() == *break_point_object) return;
16669 // If there was one break point object before replace with array.
16670 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16671 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
16672 array->set(0, break_point_info->break_point_objects());
16673 array->set(1, *break_point_object);
16674 break_point_info->set_break_point_objects(*array);
16677 // If there was more than one break point before extend array.
16678 Handle<FixedArray> old_array =
16679 Handle<FixedArray>(
16680 FixedArray::cast(break_point_info->break_point_objects()));
16681 Handle<FixedArray> new_array =
16682 isolate->factory()->NewFixedArray(old_array->length() + 1);
16683 for (int i = 0; i < old_array->length(); i++) {
16684 // If the break point was there before just ignore.
16685 if (old_array->get(i) == *break_point_object) return;
16686 new_array->set(i, old_array->get(i));
16688 // Add the new break point.
16689 new_array->set(old_array->length(), *break_point_object);
16690 break_point_info->set_break_point_objects(*new_array);
16694 bool BreakPointInfo::HasBreakPointObject(
16695 Handle<BreakPointInfo> break_point_info,
16696 Handle<Object> break_point_object) {
16698 if (break_point_info->break_point_objects()->IsUndefined()) return false;
16699 // Single break point.
16700 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16701 return break_point_info->break_point_objects() == *break_point_object;
16703 // Multiple break points.
16704 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects());
16705 for (int i = 0; i < array->length(); i++) {
16706 if (array->get(i) == *break_point_object) {
16714 // Get the number of break points.
16715 int BreakPointInfo::GetBreakPointCount() {
16717 if (break_point_objects()->IsUndefined()) return 0;
16718 // Single break point.
16719 if (!break_point_objects()->IsFixedArray()) return 1;
16720 // Multiple break points.
16721 return FixedArray::cast(break_point_objects())->length();
16725 Object* JSDate::GetField(Object* object, Smi* index) {
16726 return JSDate::cast(object)->DoGetField(
16727 static_cast<FieldIndex>(index->value()));
16731 Object* JSDate::DoGetField(FieldIndex index) {
16732 ASSERT(index != kDateValue);
16734 DateCache* date_cache = GetIsolate()->date_cache();
16736 if (index < kFirstUncachedField) {
16737 Object* stamp = cache_stamp();
16738 if (stamp != date_cache->stamp() && stamp->IsSmi()) {
16739 // Since the stamp is not NaN, the value is also not NaN.
16740 int64_t local_time_ms =
16741 date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
16742 SetCachedFields(local_time_ms, date_cache);
16745 case kYear: return year();
16746 case kMonth: return month();
16747 case kDay: return day();
16748 case kWeekday: return weekday();
16749 case kHour: return hour();
16750 case kMinute: return min();
16751 case kSecond: return sec();
16752 default: UNREACHABLE();
16756 if (index >= kFirstUTCField) {
16757 return GetUTCField(index, value()->Number(), date_cache);
16760 double time = value()->Number();
16761 if (std::isnan(time)) return GetIsolate()->heap()->nan_value();
16763 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
16764 int days = DateCache::DaysFromTime(local_time_ms);
16766 if (index == kDays) return Smi::FromInt(days);
16768 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16769 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
16770 ASSERT(index == kTimeInDay);
16771 return Smi::FromInt(time_in_day_ms);
16775 Object* JSDate::GetUTCField(FieldIndex index,
16777 DateCache* date_cache) {
16778 ASSERT(index >= kFirstUTCField);
16780 if (std::isnan(value)) return GetIsolate()->heap()->nan_value();
16782 int64_t time_ms = static_cast<int64_t>(value);
16784 if (index == kTimezoneOffset) {
16785 return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
16788 int days = DateCache::DaysFromTime(time_ms);
16790 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
16792 if (index <= kDayUTC) {
16793 int year, month, day;
16794 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16795 if (index == kYearUTC) return Smi::FromInt(year);
16796 if (index == kMonthUTC) return Smi::FromInt(month);
16797 ASSERT(index == kDayUTC);
16798 return Smi::FromInt(day);
16801 int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
16803 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
16804 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
16805 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
16806 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
16807 case kDaysUTC: return Smi::FromInt(days);
16808 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
16809 default: UNREACHABLE();
16817 void JSDate::SetValue(Object* value, bool is_value_nan) {
16819 if (is_value_nan) {
16820 HeapNumber* nan = GetIsolate()->heap()->nan_value();
16821 set_cache_stamp(nan, SKIP_WRITE_BARRIER);
16822 set_year(nan, SKIP_WRITE_BARRIER);
16823 set_month(nan, SKIP_WRITE_BARRIER);
16824 set_day(nan, SKIP_WRITE_BARRIER);
16825 set_hour(nan, SKIP_WRITE_BARRIER);
16826 set_min(nan, SKIP_WRITE_BARRIER);
16827 set_sec(nan, SKIP_WRITE_BARRIER);
16828 set_weekday(nan, SKIP_WRITE_BARRIER);
16830 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
16835 void JSDate::SetCachedFields(int64_t local_time_ms, DateCache* date_cache) {
16836 int days = DateCache::DaysFromTime(local_time_ms);
16837 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16838 int year, month, day;
16839 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16840 int weekday = date_cache->Weekday(days);
16841 int hour = time_in_day_ms / (60 * 60 * 1000);
16842 int min = (time_in_day_ms / (60 * 1000)) % 60;
16843 int sec = (time_in_day_ms / 1000) % 60;
16844 set_cache_stamp(date_cache->stamp());
16845 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
16846 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
16847 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
16848 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
16849 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
16850 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
16851 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
16855 void JSArrayBuffer::Neuter() {
16856 ASSERT(is_external());
16857 set_backing_store(NULL);
16858 set_byte_length(Smi::FromInt(0));
16862 void JSArrayBufferView::NeuterView() {
16863 set_byte_offset(Smi::FromInt(0));
16864 set_byte_length(Smi::FromInt(0));
16868 void JSDataView::Neuter() {
16873 void JSTypedArray::Neuter() {
16875 set_length(Smi::FromInt(0));
16876 set_elements(GetHeap()->EmptyExternalArrayForMap(map()));
16880 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) {
16881 switch (elements_kind) {
16882 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
16883 case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS;
16885 TYPED_ARRAYS(TYPED_ARRAY_CASE)
16886 #undef TYPED_ARRAY_CASE
16890 return FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND;
16895 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
16896 Handle<JSTypedArray> typed_array) {
16898 Handle<Map> map(typed_array->map());
16899 Isolate* isolate = typed_array->GetIsolate();
16901 ASSERT(IsFixedTypedArrayElementsKind(map->elements_kind()));
16903 Handle<Map> new_map = Map::TransitionElementsTo(
16905 FixedToExternalElementsKind(map->elements_kind()));
16907 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
16908 Handle<FixedTypedArrayBase> fixed_typed_array(
16909 FixedTypedArrayBase::cast(typed_array->elements()));
16910 Runtime::SetupArrayBufferAllocatingData(isolate, buffer,
16911 fixed_typed_array->DataSize(), false);
16912 memcpy(buffer->backing_store(),
16913 fixed_typed_array->DataPtr(),
16914 fixed_typed_array->DataSize());
16915 Handle<ExternalArray> new_elements =
16916 isolate->factory()->NewExternalArray(
16917 fixed_typed_array->length(), typed_array->type(),
16918 static_cast<uint8_t*>(buffer->backing_store()));
16920 buffer->set_weak_first_view(*typed_array);
16921 ASSERT(typed_array->weak_next() == isolate->heap()->undefined_value());
16922 typed_array->set_buffer(*buffer);
16923 JSObject::SetMapAndElements(typed_array, new_map, new_elements);
16929 Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
16930 Handle<Object> result(buffer(), GetIsolate());
16931 if (*result != Smi::FromInt(0)) {
16932 ASSERT(IsExternalArrayElementsKind(map()->elements_kind()));
16933 return Handle<JSArrayBuffer>::cast(result);
16935 Handle<JSTypedArray> self(this);
16936 return MaterializeArrayBuffer(self);
16940 HeapType* PropertyCell::type() {
16941 return static_cast<HeapType*>(type_raw());
16945 void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) {
16946 ASSERT(IsPropertyCell());
16947 set_type_raw(type, ignored);
16951 Handle<HeapType> PropertyCell::UpdatedType(Handle<PropertyCell> cell,
16952 Handle<Object> value) {
16953 Isolate* isolate = cell->GetIsolate();
16954 Handle<HeapType> old_type(cell->type(), isolate);
16955 // TODO(2803): Do not track ConsString as constant because they cannot be
16956 // embedded into code.
16957 Handle<HeapType> new_type = value->IsConsString() || value->IsTheHole()
16958 ? HeapType::Any(isolate) : HeapType::Constant(value, isolate);
16960 if (new_type->Is(old_type)) {
16964 cell->dependent_code()->DeoptimizeDependentCodeGroup(
16965 isolate, DependentCode::kPropertyCellChangedGroup);
16967 if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) {
16971 return HeapType::Any(isolate);
16975 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell,
16976 Handle<Object> value) {
16977 cell->set_value(*value);
16978 if (!HeapType::Any()->Is(cell->type())) {
16979 Handle<HeapType> new_type = UpdatedType(cell, value);
16980 cell->set_type(*new_type);
16986 void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell,
16987 CompilationInfo* info) {
16988 Handle<DependentCode> codes =
16989 DependentCode::Insert(handle(cell->dependent_code(), info->isolate()),
16990 DependentCode::kPropertyCellChangedGroup,
16991 info->object_wrapper());
16992 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes);
16993 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
16994 cell, info->zone());
16998 const char* GetBailoutReason(BailoutReason reason) {
16999 ASSERT(reason < kLastErrorMessage);
17000 #define ERROR_MESSAGES_TEXTS(C, T) T,
17001 static const char* error_messages_[] = {
17002 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
17004 #undef ERROR_MESSAGES_TEXTS
17005 return error_messages_[reason];
17009 } } // namespace v8::internal