1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "allocation-site-scopes.h"
10 #include "arguments.h"
11 #include "bootstrapper.h"
13 #include "code-stubs.h"
14 #include "cpu-profiler.h"
16 #include "deoptimizer.h"
19 #include "execution.h"
20 #include "full-codegen.h"
22 #include "isolate-inl.h"
24 #include "objects-inl.h"
25 #include "objects-visiting-inl.h"
26 #include "macro-assembler.h"
27 #include "mark-compact.h"
28 #include "safepoint-table.h"
29 #include "string-search.h"
30 #include "string-stream.h"
33 #ifdef ENABLE_DISASSEMBLER
35 #include "disassembler.h"
41 Handle<HeapType> Object::OptimalType(Isolate* isolate,
42 Representation representation) {
43 if (representation.IsNone()) return HeapType::None(isolate);
44 if (FLAG_track_field_types) {
45 if (representation.IsHeapObject() && IsHeapObject()) {
46 // We can track only JavaScript objects with stable maps.
47 Handle<Map> map(HeapObject::cast(this)->map(), isolate);
48 if (map->is_stable() &&
49 map->instance_type() >= FIRST_NONCALLABLE_SPEC_OBJECT_TYPE &&
50 map->instance_type() <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE) {
51 return HeapType::Class(map, isolate);
55 return HeapType::Any(isolate);
59 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
60 Handle<Object> object,
61 Handle<Context> native_context) {
62 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
63 Handle<JSFunction> constructor;
64 if (object->IsNumber()) {
65 constructor = handle(native_context->number_function(), isolate);
66 } else if (object->IsFloat32x4()) {
67 constructor = handle(native_context->float32x4_function(), isolate);
68 } else if (object->IsFloat64x2()) {
69 constructor = handle(native_context->float64x2_function(), isolate);
70 } else if (object->IsInt32x4()) {
71 constructor = handle(native_context->int32x4_function(), isolate);
72 } else if (object->IsBoolean()) {
73 constructor = handle(native_context->boolean_function(), isolate);
74 } else if (object->IsString()) {
75 constructor = handle(native_context->string_function(), isolate);
76 } else if (object->IsSymbol()) {
77 constructor = handle(native_context->symbol_function(), isolate);
79 return MaybeHandle<JSReceiver>();
81 Handle<JSObject> result = isolate->factory()->NewJSObject(constructor);
82 Handle<JSValue>::cast(result)->set_value(*object);
87 bool Object::BooleanValue() {
88 if (IsBoolean()) return IsTrue();
89 if (IsSmi()) return Smi::cast(this)->value() != 0;
90 if (IsUndefined() || IsNull()) return false;
91 if (IsUndetectableObject()) return false; // Undetectable object is false.
92 if (IsString()) return String::cast(this)->length() != 0;
93 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue();
98 bool Object::IsCallable() {
100 while (fun->IsJSFunctionProxy()) {
101 fun = JSFunctionProxy::cast(fun)->call_trap();
103 return fun->IsJSFunction() ||
104 (fun->IsHeapObject() &&
105 HeapObject::cast(fun)->map()->has_instance_call_handler());
109 void Object::Lookup(Handle<Name> name, LookupResult* result) {
110 DisallowHeapAllocation no_gc;
111 Object* holder = NULL;
112 if (IsJSReceiver()) {
115 Context* native_context = result->isolate()->context()->native_context();
117 holder = native_context->number_function()->instance_prototype();
118 } else if (IsFloat32x4()) {
119 holder = native_context->float32x4_function()->instance_prototype();
120 } else if (IsFloat64x2()) {
121 holder = native_context->float64x2_function()->instance_prototype();
122 } else if (IsInt32x4()) {
123 holder = native_context->int32x4_function()->instance_prototype();
124 } else if (IsString()) {
125 holder = native_context->string_function()->instance_prototype();
126 } else if (IsSymbol()) {
127 holder = native_context->symbol_function()->instance_prototype();
128 } else if (IsBoolean()) {
129 holder = native_context->boolean_function()->instance_prototype();
131 result->isolate()->PushStackTraceAndDie(
132 0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001);
135 ASSERT(holder != NULL); // Cannot handle null or undefined.
136 JSReceiver::cast(holder)->Lookup(name, result);
140 MaybeHandle<Object> Object::GetPropertyWithReceiver(
141 Handle<Object> object,
142 Handle<Object> receiver,
144 PropertyAttributes* attributes) {
145 LookupResult lookup(name->GetIsolate());
146 object->Lookup(name, &lookup);
147 MaybeHandle<Object> result =
148 GetProperty(object, receiver, &lookup, name, attributes);
149 ASSERT(*attributes <= ABSENT);
154 bool Object::ToInt32(int32_t* value) {
156 *value = Smi::cast(this)->value();
159 if (IsHeapNumber()) {
160 double num = HeapNumber::cast(this)->value();
161 if (FastI2D(FastD2I(num)) == num) {
162 *value = FastD2I(num);
170 bool Object::ToUint32(uint32_t* value) {
172 int num = Smi::cast(this)->value();
174 *value = static_cast<uint32_t>(num);
178 if (IsHeapNumber()) {
179 double num = HeapNumber::cast(this)->value();
180 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) {
181 *value = FastD2UI(num);
189 bool FunctionTemplateInfo::IsTemplateFor(Object* object) {
190 if (!object->IsHeapObject()) return false;
191 return IsTemplateFor(HeapObject::cast(object)->map());
195 bool FunctionTemplateInfo::IsTemplateFor(Map* map) {
196 // There is a constraint on the object; check.
197 if (!map->IsJSObjectMap()) return false;
198 // Fetch the constructor function of the object.
199 Object* cons_obj = map->constructor();
200 if (!cons_obj->IsJSFunction()) return false;
201 JSFunction* fun = JSFunction::cast(cons_obj);
202 // Iterate through the chain of inheriting function templates to
203 // see if the required one occurs.
204 for (Object* type = fun->shared()->function_data();
205 type->IsFunctionTemplateInfo();
206 type = FunctionTemplateInfo::cast(type)->parent_template()) {
207 if (type == this) return true;
209 // Didn't find the required type in the inheritance chain.
214 template<typename To>
215 static inline To* CheckedCast(void *from) {
216 uintptr_t temp = reinterpret_cast<uintptr_t>(from);
217 ASSERT(temp % sizeof(To) == 0);
218 return reinterpret_cast<To*>(temp);
222 static Handle<Object> PerformCompare(const BitmaskCompareDescriptor& descriptor,
225 uint32_t bitmask = descriptor.bitmask;
226 uint32_t compare_value = descriptor.compare_value;
228 switch (descriptor.size) {
230 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr));
231 compare_value &= 0xff;
235 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr));
236 compare_value &= 0xffff;
240 value = *CheckedCast<uint32_t>(ptr);
244 return isolate->factory()->undefined_value();
246 return isolate->factory()->ToBoolean(
247 (bitmask & value) == (bitmask & compare_value));
251 static Handle<Object> PerformCompare(const PointerCompareDescriptor& descriptor,
254 uintptr_t compare_value =
255 reinterpret_cast<uintptr_t>(descriptor.compare_value);
256 uintptr_t value = *CheckedCast<uintptr_t>(ptr);
257 return isolate->factory()->ToBoolean(compare_value == value);
261 static Handle<Object> GetPrimitiveValue(
262 const PrimitiveValueDescriptor& descriptor,
265 int32_t int32_value = 0;
266 switch (descriptor.data_type) {
267 case kDescriptorInt8Type:
268 int32_value = *CheckedCast<int8_t>(ptr);
270 case kDescriptorUint8Type:
271 int32_value = *CheckedCast<uint8_t>(ptr);
273 case kDescriptorInt16Type:
274 int32_value = *CheckedCast<int16_t>(ptr);
276 case kDescriptorUint16Type:
277 int32_value = *CheckedCast<uint16_t>(ptr);
279 case kDescriptorInt32Type:
280 int32_value = *CheckedCast<int32_t>(ptr);
282 case kDescriptorUint32Type: {
283 uint32_t value = *CheckedCast<uint32_t>(ptr);
284 AllowHeapAllocation allow_gc;
285 return isolate->factory()->NewNumberFromUint(value);
287 case kDescriptorBoolType: {
288 uint8_t byte = *CheckedCast<uint8_t>(ptr);
289 return isolate->factory()->ToBoolean(
290 byte & (0x1 << descriptor.bool_offset));
292 case kDescriptorFloatType: {
293 float value = *CheckedCast<float>(ptr);
294 AllowHeapAllocation allow_gc;
295 return isolate->factory()->NewNumber(value);
297 case kDescriptorDoubleType: {
298 double value = *CheckedCast<double>(ptr);
299 AllowHeapAllocation allow_gc;
300 return isolate->factory()->NewNumber(value);
303 AllowHeapAllocation allow_gc;
304 return isolate->factory()->NewNumberFromInt(int32_value);
308 static Handle<Object> GetDeclaredAccessorProperty(
309 Handle<Object> receiver,
310 Handle<DeclaredAccessorInfo> info,
312 DisallowHeapAllocation no_gc;
313 char* current = reinterpret_cast<char*>(*receiver);
314 DeclaredAccessorDescriptorIterator iterator(info->descriptor());
316 const DeclaredAccessorDescriptorData* data = iterator.Next();
317 switch (data->type) {
318 case kDescriptorReturnObject: {
319 ASSERT(iterator.Complete());
320 current = *CheckedCast<char*>(current);
321 return handle(*CheckedCast<Object*>(current), isolate);
323 case kDescriptorPointerDereference:
324 ASSERT(!iterator.Complete());
325 current = *reinterpret_cast<char**>(current);
327 case kDescriptorPointerShift:
328 ASSERT(!iterator.Complete());
329 current += data->pointer_shift_descriptor.byte_offset;
331 case kDescriptorObjectDereference: {
332 ASSERT(!iterator.Complete());
333 Object* object = CheckedCast<Object>(current);
334 int field = data->object_dereference_descriptor.internal_field;
335 Object* smi = JSObject::cast(object)->GetInternalField(field);
336 ASSERT(smi->IsSmi());
337 current = reinterpret_cast<char*>(smi);
340 case kDescriptorBitmaskCompare:
341 ASSERT(iterator.Complete());
342 return PerformCompare(data->bitmask_compare_descriptor,
345 case kDescriptorPointerCompare:
346 ASSERT(iterator.Complete());
347 return PerformCompare(data->pointer_compare_descriptor,
350 case kDescriptorPrimitiveValue:
351 ASSERT(iterator.Complete());
352 return GetPrimitiveValue(data->primitive_value_descriptor,
358 return isolate->factory()->undefined_value();
362 Handle<FixedArray> JSObject::EnsureWritableFastElements(
363 Handle<JSObject> object) {
364 ASSERT(object->HasFastSmiOrObjectElements());
365 Isolate* isolate = object->GetIsolate();
366 Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate);
367 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
368 Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap(
369 elems, isolate->factory()->fixed_array_map());
370 object->set_elements(*writable_elems);
371 isolate->counters()->cow_arrays_converted()->Increment();
372 return writable_elems;
376 MaybeHandle<Object> JSObject::GetPropertyWithCallback(Handle<JSObject> object,
377 Handle<Object> receiver,
378 Handle<Object> structure,
380 Isolate* isolate = name->GetIsolate();
381 ASSERT(!structure->IsForeign());
382 // api style callbacks.
383 if (structure->IsAccessorInfo()) {
384 Handle<AccessorInfo> accessor_info = Handle<AccessorInfo>::cast(structure);
385 if (!accessor_info->IsCompatibleReceiver(*receiver)) {
386 Handle<Object> args[2] = { name, receiver };
387 Handle<Object> error =
388 isolate->factory()->NewTypeError("incompatible_method_receiver",
391 return isolate->Throw<Object>(error);
393 // TODO(rossberg): Handling symbols in the API requires changing the API,
394 // so we do not support it for now.
395 if (name->IsSymbol()) return isolate->factory()->undefined_value();
396 if (structure->IsDeclaredAccessorInfo()) {
397 return GetDeclaredAccessorProperty(
399 Handle<DeclaredAccessorInfo>::cast(structure),
403 Handle<ExecutableAccessorInfo> data =
404 Handle<ExecutableAccessorInfo>::cast(structure);
405 v8::AccessorGetterCallback call_fun =
406 v8::ToCData<v8::AccessorGetterCallback>(data->getter());
407 if (call_fun == NULL) return isolate->factory()->undefined_value();
409 Handle<String> key = Handle<String>::cast(name);
410 LOG(isolate, ApiNamedPropertyAccess("load", *object, *name));
411 PropertyCallbackArguments args(isolate, data->data(), *receiver, *object);
412 v8::Handle<v8::Value> result =
413 args.Call(call_fun, v8::Utils::ToLocal(key));
414 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
415 if (result.IsEmpty()) {
416 return isolate->factory()->undefined_value();
418 Handle<Object> return_value = v8::Utils::OpenHandle(*result);
419 return_value->VerifyApiCallResultType();
420 // Rebox handle before return.
421 return handle(*return_value, isolate);
424 // __defineGetter__ callback
425 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
427 if (getter->IsSpecFunction()) {
428 // TODO(rossberg): nicer would be to cast to some JSCallable here...
429 return Object::GetPropertyWithDefinedGetter(
430 object, receiver, Handle<JSReceiver>::cast(getter));
432 // Getter is not a function.
433 return isolate->factory()->undefined_value();
437 MaybeHandle<Object> JSProxy::GetPropertyWithHandler(Handle<JSProxy> proxy,
438 Handle<Object> receiver,
440 Isolate* isolate = proxy->GetIsolate();
442 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
443 if (name->IsSymbol()) return isolate->factory()->undefined_value();
445 Handle<Object> args[] = { receiver, name };
447 proxy, "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args);
451 MaybeHandle<Object> Object::GetPropertyWithDefinedGetter(
452 Handle<Object> object,
453 Handle<Object> receiver,
454 Handle<JSReceiver> getter) {
455 Isolate* isolate = getter->GetIsolate();
456 Debug* debug = isolate->debug();
457 // Handle stepping into a getter if step into is active.
458 // TODO(rossberg): should this apply to getters that are function proxies?
459 if (debug->StepInActive() && getter->IsJSFunction()) {
461 Handle<JSFunction>::cast(getter), Handle<Object>::null(), 0, false);
464 return Execution::Call(isolate, getter, receiver, 0, NULL, true);
468 // Only deal with CALLBACKS and INTERCEPTOR
469 MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck(
470 Handle<JSObject> object,
471 Handle<Object> receiver,
472 LookupResult* result,
474 PropertyAttributes* attributes) {
475 Isolate* isolate = name->GetIsolate();
476 if (result->IsProperty()) {
477 switch (result->type()) {
479 // Only allow API accessors.
480 Handle<Object> callback_obj(result->GetCallbackObject(), isolate);
481 if (callback_obj->IsAccessorInfo()) {
482 if (!AccessorInfo::cast(*callback_obj)->all_can_read()) break;
483 *attributes = result->GetAttributes();
484 // Fall through to GetPropertyWithCallback.
485 } else if (callback_obj->IsAccessorPair()) {
486 if (!AccessorPair::cast(*callback_obj)->all_can_read()) break;
487 // Fall through to GetPropertyWithCallback.
491 Handle<JSObject> holder(result->holder(), isolate);
492 return GetPropertyWithCallback(holder, receiver, callback_obj, name);
497 // Search ALL_CAN_READ accessors in prototype chain.
498 LookupResult r(isolate);
499 result->holder()->LookupRealNamedPropertyInPrototypes(name, &r);
500 if (r.IsProperty()) {
501 return GetPropertyWithFailedAccessCheck(
502 object, receiver, &r, name, attributes);
507 // If the object has an interceptor, try real named properties.
508 // No access check in GetPropertyAttributeWithInterceptor.
509 LookupResult r(isolate);
510 result->holder()->LookupRealNamedProperty(name, &r);
511 if (r.IsProperty()) {
512 return GetPropertyWithFailedAccessCheck(
513 object, receiver, &r, name, attributes);
522 // No accessible property found.
523 *attributes = ABSENT;
524 isolate->ReportFailedAccessCheck(object, v8::ACCESS_GET);
525 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
526 return isolate->factory()->undefined_value();
530 PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck(
531 Handle<JSObject> object,
532 LookupResult* result,
534 bool continue_search) {
535 if (result->IsProperty()) {
536 switch (result->type()) {
538 // Only allow API accessors.
539 Handle<Object> obj(result->GetCallbackObject(), object->GetIsolate());
540 if (obj->IsAccessorInfo()) {
541 Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(obj);
542 if (info->all_can_read()) {
543 return result->GetAttributes();
545 } else if (obj->IsAccessorPair()) {
546 Handle<AccessorPair> pair = Handle<AccessorPair>::cast(obj);
547 if (pair->all_can_read()) {
548 return result->GetAttributes();
557 if (!continue_search) break;
558 // Search ALL_CAN_READ accessors in prototype chain.
559 LookupResult r(object->GetIsolate());
560 result->holder()->LookupRealNamedPropertyInPrototypes(name, &r);
561 if (r.IsProperty()) {
562 return GetPropertyAttributeWithFailedAccessCheck(
563 object, &r, name, continue_search);
569 // If the object has an interceptor, try real named properties.
570 // No access check in GetPropertyAttributeWithInterceptor.
571 LookupResult r(object->GetIsolate());
572 if (continue_search) {
573 result->holder()->LookupRealNamedProperty(name, &r);
575 result->holder()->LocalLookupRealNamedProperty(name, &r);
577 if (!r.IsFound()) break;
578 return GetPropertyAttributeWithFailedAccessCheck(
579 object, &r, name, continue_search);
588 object->GetIsolate()->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
589 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
594 Object* JSObject::GetNormalizedProperty(const LookupResult* result) {
595 ASSERT(!HasFastProperties());
596 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
597 if (IsGlobalObject()) {
598 value = PropertyCell::cast(value)->value();
600 ASSERT(!value->IsPropertyCell() && !value->IsCell());
605 Handle<Object> JSObject::GetNormalizedProperty(Handle<JSObject> object,
606 const LookupResult* result) {
607 ASSERT(!object->HasFastProperties());
608 Isolate* isolate = object->GetIsolate();
609 Handle<Object> value(object->property_dictionary()->ValueAt(
610 result->GetDictionaryEntry()), isolate);
611 if (object->IsGlobalObject()) {
612 value = Handle<Object>(Handle<PropertyCell>::cast(value)->value(), isolate);
614 ASSERT(!value->IsPropertyCell() && !value->IsCell());
619 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
620 const LookupResult* result,
621 Handle<Object> value) {
622 ASSERT(!object->HasFastProperties());
623 NameDictionary* property_dictionary = object->property_dictionary();
624 if (object->IsGlobalObject()) {
625 Handle<PropertyCell> cell(PropertyCell::cast(
626 property_dictionary->ValueAt(result->GetDictionaryEntry())));
627 PropertyCell::SetValueInferType(cell, value);
629 property_dictionary->ValueAtPut(result->GetDictionaryEntry(), *value);
634 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
636 Handle<Object> value,
637 PropertyDetails details) {
638 ASSERT(!object->HasFastProperties());
639 Handle<NameDictionary> property_dictionary(object->property_dictionary());
641 if (!name->IsUniqueName()) {
642 name = object->GetIsolate()->factory()->InternalizeString(
643 Handle<String>::cast(name));
646 int entry = property_dictionary->FindEntry(name);
647 if (entry == NameDictionary::kNotFound) {
648 Handle<Object> store_value = value;
649 if (object->IsGlobalObject()) {
650 store_value = object->GetIsolate()->factory()->NewPropertyCell(value);
653 property_dictionary = NameDictionary::Add(
654 property_dictionary, name, store_value, details);
655 object->set_properties(*property_dictionary);
659 PropertyDetails original_details = property_dictionary->DetailsAt(entry);
660 int enumeration_index;
661 // Preserve the enumeration index unless the property was deleted.
662 if (original_details.IsDeleted()) {
663 enumeration_index = property_dictionary->NextEnumerationIndex();
664 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1);
666 enumeration_index = original_details.dictionary_index();
667 ASSERT(enumeration_index > 0);
670 details = PropertyDetails(
671 details.attributes(), details.type(), enumeration_index);
673 if (object->IsGlobalObject()) {
674 Handle<PropertyCell> cell(
675 PropertyCell::cast(property_dictionary->ValueAt(entry)));
676 PropertyCell::SetValueInferType(cell, value);
677 // Please note we have to update the property details.
678 property_dictionary->DetailsAtPut(entry, details);
680 property_dictionary->SetEntry(entry, name, value, details);
685 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object,
688 ASSERT(!object->HasFastProperties());
689 Isolate* isolate = object->GetIsolate();
690 Handle<NameDictionary> dictionary(object->property_dictionary());
691 int entry = dictionary->FindEntry(name);
692 if (entry != NameDictionary::kNotFound) {
693 // If we have a global object set the cell to the hole.
694 if (object->IsGlobalObject()) {
695 PropertyDetails details = dictionary->DetailsAt(entry);
696 if (details.IsDontDelete()) {
697 if (mode != FORCE_DELETION) return isolate->factory()->false_value();
698 // When forced to delete global properties, we have to make a
699 // map change to invalidate any ICs that think they can load
700 // from the DontDelete cell without checking if it contains
702 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
703 ASSERT(new_map->is_dictionary_map());
704 object->set_map(*new_map);
706 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry)));
707 Handle<Object> value = isolate->factory()->the_hole_value();
708 PropertyCell::SetValueInferType(cell, value);
709 dictionary->DetailsAtPut(entry, details.AsDeleted());
711 Handle<Object> deleted(
712 NameDictionary::DeleteProperty(dictionary, entry, mode));
713 if (*deleted == isolate->heap()->true_value()) {
714 Handle<NameDictionary> new_properties =
715 NameDictionary::Shrink(dictionary, name);
716 object->set_properties(*new_properties);
721 return isolate->factory()->true_value();
725 bool JSObject::IsDirty() {
726 Object* cons_obj = map()->constructor();
727 if (!cons_obj->IsJSFunction())
729 JSFunction* fun = JSFunction::cast(cons_obj);
730 if (!fun->shared()->IsApiFunction())
732 // If the object is fully fast case and has the same map it was
733 // created with then no changes can have been made to it.
734 return map() != fun->initial_map()
735 || !HasFastObjectElements()
736 || !HasFastProperties();
740 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
741 Handle<Object> receiver,
742 LookupResult* result,
744 PropertyAttributes* attributes) {
745 Isolate* isolate = name->GetIsolate();
746 Factory* factory = isolate->factory();
748 // Make sure that the top context does not change when doing
749 // callbacks or interceptor calls.
750 AssertNoContextChange ncc(isolate);
752 // Traverse the prototype chain from the current object (this) to
753 // the holder and check for access rights. This avoids traversing the
754 // objects more than once in case of interceptors, because the
755 // holder will always be the interceptor holder and the search may
756 // only continue with a current object just after the interceptor
757 // holder in the prototype chain.
758 // Proxy handlers do not use the proxy's prototype, so we can skip this.
759 if (!result->IsHandler()) {
760 ASSERT(*object != object->GetPrototype(isolate));
761 Handle<Object> last = result->IsProperty()
762 ? Handle<Object>(result->holder(), isolate)
763 : Handle<Object>::cast(factory->null_value());
764 for (Handle<Object> current = object;
766 current = Handle<Object>(current->GetPrototype(isolate), isolate)) {
767 if (current->IsAccessCheckNeeded()) {
768 // Check if we're allowed to read from the current object. Note
769 // that even though we may not actually end up loading the named
770 // property from the current object, we still check that we have
772 Handle<JSObject> checked = Handle<JSObject>::cast(current);
773 if (!isolate->MayNamedAccess(checked, name, v8::ACCESS_GET)) {
774 return JSObject::GetPropertyWithFailedAccessCheck(
775 checked, receiver, result, name, attributes);
778 // Stop traversing the chain once we reach the last object in the
779 // chain; either the holder of the result or null in case of an
781 if (current.is_identical_to(last)) break;
785 if (!result->IsProperty()) {
786 *attributes = ABSENT;
787 return factory->undefined_value();
789 *attributes = result->GetAttributes();
791 Handle<Object> value;
792 switch (result->type()) {
794 value = JSObject::GetNormalizedProperty(
795 handle(result->holder(), isolate), result);
799 value = JSObject::FastPropertyAt(handle(result->holder(), isolate),
800 result->representation(),
801 result->GetFieldIndex().field_index());
804 return handle(result->GetConstant(), isolate);
806 return JSObject::GetPropertyWithCallback(
807 handle(result->holder(), isolate),
809 handle(result->GetCallbackObject(), isolate),
812 return JSProxy::GetPropertyWithHandler(
813 handle(result->proxy(), isolate), receiver, name);
815 return JSObject::GetPropertyWithInterceptor(
816 handle(result->holder(), isolate), receiver, name, attributes);
821 ASSERT(!value->IsTheHole() || result->IsReadOnly());
822 return value->IsTheHole() ? Handle<Object>::cast(factory->undefined_value())
827 MaybeHandle<Object> Object::GetElementWithReceiver(Isolate* isolate,
828 Handle<Object> object,
829 Handle<Object> receiver,
831 Handle<Object> holder;
833 // Iterate up the prototype chain until an element is found or the null
834 // prototype is encountered.
835 for (holder = object;
837 holder = Handle<Object>(holder->GetPrototype(isolate), isolate)) {
838 if (!holder->IsJSObject()) {
839 Context* native_context = isolate->context()->native_context();
840 if (holder->IsNumber()) {
841 holder = Handle<Object>(
842 native_context->number_function()->instance_prototype(), isolate);
843 } else if (holder->IsFloat32x4()) {
844 holder = Handle<Object>(
845 native_context->float32x4_function()->instance_prototype(),
847 } else if (holder->IsFloat64x2()) {
848 holder = Handle<Object>(
849 native_context->float64x2_function()->instance_prototype(),
851 } else if (holder->IsInt32x4()) {
852 holder = Handle<Object>(
853 native_context->int32x4_function()->instance_prototype(), isolate);
854 } else if (holder->IsString()) {
855 holder = Handle<Object>(
856 native_context->string_function()->instance_prototype(), isolate);
857 } else if (holder->IsSymbol()) {
858 holder = Handle<Object>(
859 native_context->symbol_function()->instance_prototype(), isolate);
860 } else if (holder->IsBoolean()) {
861 holder = Handle<Object>(
862 native_context->boolean_function()->instance_prototype(), isolate);
863 } else if (holder->IsJSProxy()) {
864 return JSProxy::GetElementWithHandler(
865 Handle<JSProxy>::cast(holder), receiver, index);
867 // Undefined and null have no indexed properties.
868 ASSERT(holder->IsUndefined() || holder->IsNull());
869 return isolate->factory()->undefined_value();
873 // Inline the case for JSObjects. Doing so significantly improves the
874 // performance of fetching elements where checking the prototype chain is
876 Handle<JSObject> js_object = Handle<JSObject>::cast(holder);
878 // Check access rights if needed.
879 if (js_object->IsAccessCheckNeeded()) {
880 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) {
881 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET);
882 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
883 return isolate->factory()->undefined_value();
887 if (js_object->HasIndexedInterceptor()) {
888 return JSObject::GetElementWithInterceptor(js_object, receiver, index);
891 if (js_object->elements() != isolate->heap()->empty_fixed_array()) {
892 Handle<Object> result;
893 ASSIGN_RETURN_ON_EXCEPTION(
895 js_object->GetElementsAccessor()->Get(receiver, js_object, index),
897 if (!result->IsTheHole()) return result;
901 return isolate->factory()->undefined_value();
905 Object* Object::GetPrototype(Isolate* isolate) {
906 DisallowHeapAllocation no_alloc;
908 Context* context = isolate->context()->native_context();
909 return context->number_function()->instance_prototype();
912 HeapObject* heap_object = HeapObject::cast(this);
914 // The object is either a number, a string, a boolean,
915 // a real JS object, or a Harmony proxy.
916 if (heap_object->IsJSReceiver()) {
917 return heap_object->map()->prototype();
919 Context* context = isolate->context()->native_context();
921 if (heap_object->IsHeapNumber()) {
922 return context->number_function()->instance_prototype();
924 if (heap_object->IsFloat32x4()) {
925 return context->float32x4_function()->instance_prototype();
927 if (heap_object->IsFloat64x2()) {
928 return context->float64x2_function()->instance_prototype();
930 if (heap_object->IsInt32x4()) {
931 return context->int32x4_function()->instance_prototype();
933 if (heap_object->IsString()) {
934 return context->string_function()->instance_prototype();
936 if (heap_object->IsSymbol()) {
937 return context->symbol_function()->instance_prototype();
939 if (heap_object->IsBoolean()) {
940 return context->boolean_function()->instance_prototype();
942 return isolate->heap()->null_value();
947 Handle<Object> Object::GetPrototype(Isolate* isolate,
948 Handle<Object> object) {
949 return handle(object->GetPrototype(isolate), isolate);
953 Map* Object::GetMarkerMap(Isolate* isolate) {
954 if (IsSmi()) return isolate->heap()->heap_number_map();
955 return HeapObject::cast(this)->map();
959 Object* Object::GetHash() {
960 // The object is either a number, a name, an odd-ball,
961 // a real JS object, or a Harmony proxy.
963 uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
964 return Smi::FromInt(hash & Smi::kMaxValue);
967 uint32_t hash = Name::cast(this)->Hash();
968 return Smi::FromInt(hash);
971 uint32_t hash = Oddball::cast(this)->to_string()->Hash();
972 return Smi::FromInt(hash);
975 ASSERT(IsJSReceiver());
976 return JSReceiver::cast(this)->GetIdentityHash();
980 Handle<Object> Object::GetOrCreateHash(Handle<Object> object,
982 Handle<Object> hash(object->GetHash(), isolate);
986 ASSERT(object->IsJSReceiver());
987 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object));
991 bool Object::SameValue(Object* other) {
992 if (other == this) return true;
994 // The object is either a number, a name, an odd-ball,
995 // a real JS object, or a Harmony proxy.
996 if (IsNumber() && other->IsNumber()) {
997 double this_value = Number();
998 double other_value = other->Number();
999 bool equal = this_value == other_value;
1000 // SameValue(NaN, NaN) is true.
1001 if (!equal) return std::isnan(this_value) && std::isnan(other_value);
1002 // SameValue(0.0, -0.0) is false.
1003 return (this_value != 0) || ((1 / this_value) == (1 / other_value));
1005 if (IsString() && other->IsString()) {
1006 return String::cast(this)->Equals(String::cast(other));
1012 void Object::ShortPrint(FILE* out) {
1013 HeapStringAllocator allocator;
1014 StringStream accumulator(&allocator);
1015 ShortPrint(&accumulator);
1016 accumulator.OutputToFile(out);
1020 void Object::ShortPrint(StringStream* accumulator) {
1022 Smi::cast(this)->SmiPrint(accumulator);
1024 HeapObject::cast(this)->HeapObjectShortPrint(accumulator);
1029 void Smi::SmiPrint(FILE* out) {
1030 PrintF(out, "%d", value());
1034 void Smi::SmiPrint(StringStream* accumulator) {
1035 accumulator->Add("%d", value());
1039 // Should a word be prefixed by 'a' or 'an' in order to read naturally in
1040 // English? Returns false for non-ASCII or words that don't start with
1041 // a capital letter. The a/an rule follows pronunciation in English.
1042 // We don't use the BBC's overcorrect "an historic occasion" though if
1043 // you speak a dialect you may well say "an 'istoric occasion".
1044 static bool AnWord(String* str) {
1045 if (str->length() == 0) return false; // A nothing.
1046 int c0 = str->Get(0);
1047 int c1 = str->length() > 1 ? str->Get(1) : 0;
1050 return true; // An Umpire, but a UTF8String, a U.
1052 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') {
1053 return true; // An Ape, an ABCBook.
1054 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) &&
1055 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' ||
1056 c0 == 'S' || c0 == 'X')) {
1057 return true; // An MP3File, an M.
1063 Handle<String> String::SlowFlatten(Handle<ConsString> cons,
1064 PretenureFlag pretenure) {
1065 ASSERT(AllowHeapAllocation::IsAllowed());
1066 ASSERT(cons->second()->length() != 0);
1067 Isolate* isolate = cons->GetIsolate();
1068 int length = cons->length();
1069 PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure
1071 Handle<SeqString> result;
1072 if (cons->IsOneByteRepresentation()) {
1073 Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString(
1074 length, tenure).ToHandleChecked();
1075 DisallowHeapAllocation no_gc;
1076 WriteToFlat(*cons, flat->GetChars(), 0, length);
1079 Handle<SeqTwoByteString> flat = isolate->factory()->NewRawTwoByteString(
1080 length, tenure).ToHandleChecked();
1081 DisallowHeapAllocation no_gc;
1082 WriteToFlat(*cons, flat->GetChars(), 0, length);
1085 cons->set_first(*result);
1086 cons->set_second(isolate->heap()->empty_string());
1087 ASSERT(result->IsFlat());
1093 bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
1094 // Externalizing twice leaks the external resource, so it's
1095 // prohibited by the API.
1096 ASSERT(!this->IsExternalString());
1097 #ifdef ENABLE_SLOW_ASSERTS
1098 if (FLAG_enable_slow_asserts) {
1099 // Assert that the resource and the string are equivalent.
1100 ASSERT(static_cast<size_t>(this->length()) == resource->length());
1101 ScopedVector<uc16> smart_chars(this->length());
1102 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1103 ASSERT(memcmp(smart_chars.start(),
1105 resource->length() * sizeof(smart_chars[0])) == 0);
1108 Heap* heap = GetHeap();
1109 int size = this->Size(); // Byte size of the original string.
1110 if (size < ExternalString::kShortSize) {
1113 bool is_ascii = this->IsOneByteRepresentation();
1114 bool is_internalized = this->IsInternalizedString();
1116 // Morph the string to an external string by replacing the map and
1117 // reinitializing the fields. This won't work if
1118 // - the space the existing string occupies is too small for a regular
1120 // - the existing string is in old pointer space and the backing store of
1121 // the external string is not aligned. The GC cannot deal with a field
1122 // containing a possibly unaligned address to outside of V8's heap.
1123 // In either case we resort to a short external string instead, omitting
1124 // the field caching the address of the backing store. When we encounter
1125 // short external strings in generated code, we need to bailout to runtime.
1127 if (size < ExternalString::kSize ||
1128 heap->old_pointer_space()->Contains(this)) {
1129 new_map = is_internalized
1132 short_external_internalized_string_with_one_byte_data_map()
1133 : heap->short_external_internalized_string_map())
1135 ? heap->short_external_string_with_one_byte_data_map()
1136 : heap->short_external_string_map());
1138 new_map = is_internalized
1140 ? heap->external_internalized_string_with_one_byte_data_map()
1141 : heap->external_internalized_string_map())
1143 ? heap->external_string_with_one_byte_data_map()
1144 : heap->external_string_map());
1147 // Byte size of the external String object.
1148 int new_size = this->SizeFromMap(new_map);
1149 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1151 // We are storing the new map using release store after creating a filler for
1152 // the left-over space to avoid races with the sweeper thread.
1153 this->synchronized_set_map(new_map);
1155 ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
1156 self->set_resource(resource);
1157 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1159 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1164 bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) {
1165 #ifdef ENABLE_SLOW_ASSERTS
1166 if (FLAG_enable_slow_asserts) {
1167 // Assert that the resource and the string are equivalent.
1168 ASSERT(static_cast<size_t>(this->length()) == resource->length());
1169 if (this->IsTwoByteRepresentation()) {
1170 ScopedVector<uint16_t> smart_chars(this->length());
1171 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1172 ASSERT(String::IsOneByte(smart_chars.start(), this->length()));
1174 ScopedVector<char> smart_chars(this->length());
1175 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1176 ASSERT(memcmp(smart_chars.start(),
1178 resource->length() * sizeof(smart_chars[0])) == 0);
1181 Heap* heap = GetHeap();
1182 int size = this->Size(); // Byte size of the original string.
1183 if (size < ExternalString::kShortSize) {
1186 bool is_internalized = this->IsInternalizedString();
1188 // Morph the string to an external string by replacing the map and
1189 // reinitializing the fields. This won't work if
1190 // - the space the existing string occupies is too small for a regular
1192 // - the existing string is in old pointer space and the backing store of
1193 // the external string is not aligned. The GC cannot deal with a field
1194 // containing a possibly unaligned address to outside of V8's heap.
1195 // In either case we resort to a short external string instead, omitting
1196 // the field caching the address of the backing store. When we encounter
1197 // short external strings in generated code, we need to bailout to runtime.
1199 if (size < ExternalString::kSize ||
1200 heap->old_pointer_space()->Contains(this)) {
1201 new_map = is_internalized
1202 ? heap->short_external_ascii_internalized_string_map()
1203 : heap->short_external_ascii_string_map();
1205 new_map = is_internalized
1206 ? heap->external_ascii_internalized_string_map()
1207 : heap->external_ascii_string_map();
1210 // Byte size of the external String object.
1211 int new_size = this->SizeFromMap(new_map);
1212 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1214 // We are storing the new map using release store after creating a filler for
1215 // the left-over space to avoid races with the sweeper thread.
1216 this->synchronized_set_map(new_map);
1218 ExternalAsciiString* self = ExternalAsciiString::cast(this);
1219 self->set_resource(resource);
1220 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1222 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1227 void String::StringShortPrint(StringStream* accumulator) {
1229 if (len > kMaxShortPrintLength) {
1230 accumulator->Add("<Very long string[%u]>", len);
1234 if (!LooksValid()) {
1235 accumulator->Add("<Invalid String>");
1239 ConsStringIteratorOp op;
1240 StringCharacterStream stream(this, &op);
1242 bool truncated = false;
1243 if (len > kMaxShortPrintLength) {
1244 len = kMaxShortPrintLength;
1248 for (int i = 0; i < len; i++) {
1249 uint16_t c = stream.GetNext();
1251 if (c < 32 || c >= 127) {
1257 accumulator->Add("<String[%u]: ", length());
1258 for (int i = 0; i < len; i++) {
1259 accumulator->Put(static_cast<char>(stream.GetNext()));
1261 accumulator->Put('>');
1263 // Backslash indicates that the string contains control
1264 // characters and that backslashes are therefore escaped.
1265 accumulator->Add("<String[%u]\\: ", length());
1266 for (int i = 0; i < len; i++) {
1267 uint16_t c = stream.GetNext();
1269 accumulator->Add("\\n");
1270 } else if (c == '\r') {
1271 accumulator->Add("\\r");
1272 } else if (c == '\\') {
1273 accumulator->Add("\\\\");
1274 } else if (c < 32 || c > 126) {
1275 accumulator->Add("\\x%02x", c);
1277 accumulator->Put(static_cast<char>(c));
1281 accumulator->Put('.');
1282 accumulator->Put('.');
1283 accumulator->Put('.');
1285 accumulator->Put('>');
1291 void JSObject::JSObjectShortPrint(StringStream* accumulator) {
1292 switch (map()->instance_type()) {
1293 case JS_ARRAY_TYPE: {
1294 double length = JSArray::cast(this)->length()->IsUndefined()
1296 : JSArray::cast(this)->length()->Number();
1297 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
1300 case JS_WEAK_MAP_TYPE: {
1301 accumulator->Add("<JS WeakMap>");
1304 case JS_WEAK_SET_TYPE: {
1305 accumulator->Add("<JS WeakSet>");
1308 case JS_REGEXP_TYPE: {
1309 accumulator->Add("<JS RegExp>");
1312 case JS_FUNCTION_TYPE: {
1313 JSFunction* function = JSFunction::cast(this);
1314 Object* fun_name = function->shared()->DebugName();
1315 bool printed = false;
1316 if (fun_name->IsString()) {
1317 String* str = String::cast(fun_name);
1318 if (str->length() > 0) {
1319 accumulator->Add("<JS Function ");
1320 accumulator->Put(str);
1325 accumulator->Add("<JS Function");
1327 accumulator->Add(" (SharedFunctionInfo %p)",
1328 reinterpret_cast<void*>(function->shared()));
1329 accumulator->Put('>');
1332 case JS_GENERATOR_OBJECT_TYPE: {
1333 accumulator->Add("<JS Generator>");
1336 case JS_MODULE_TYPE: {
1337 accumulator->Add("<JS Module>");
1340 // All other JSObjects are rather similar to each other (JSObject,
1341 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
1343 Map* map_of_this = map();
1344 Heap* heap = GetHeap();
1345 Object* constructor = map_of_this->constructor();
1346 bool printed = false;
1347 if (constructor->IsHeapObject() &&
1348 !heap->Contains(HeapObject::cast(constructor))) {
1349 accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
1351 bool global_object = IsJSGlobalProxy();
1352 if (constructor->IsJSFunction()) {
1353 if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
1354 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
1356 Object* constructor_name =
1357 JSFunction::cast(constructor)->shared()->name();
1358 if (constructor_name->IsString()) {
1359 String* str = String::cast(constructor_name);
1360 if (str->length() > 0) {
1361 bool vowel = AnWord(str);
1362 accumulator->Add("<%sa%s ",
1363 global_object ? "Global Object: " : "",
1365 accumulator->Put(str);
1366 accumulator->Add(" with %smap %p",
1367 map_of_this->is_deprecated() ? "deprecated " : "",
1375 accumulator->Add("<JS %sObject", global_object ? "Global " : "");
1379 accumulator->Add(" value = ");
1380 JSValue::cast(this)->value()->ShortPrint(accumulator);
1382 accumulator->Put('>');
1389 void JSObject::PrintElementsTransition(
1390 FILE* file, Handle<JSObject> object,
1391 ElementsKind from_kind, Handle<FixedArrayBase> from_elements,
1392 ElementsKind to_kind, Handle<FixedArrayBase> to_elements) {
1393 if (from_kind != to_kind) {
1394 PrintF(file, "elements transition [");
1395 PrintElementsKind(file, from_kind);
1396 PrintF(file, " -> ");
1397 PrintElementsKind(file, to_kind);
1398 PrintF(file, "] in ");
1399 JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true);
1400 PrintF(file, " for ");
1401 object->ShortPrint(file);
1402 PrintF(file, " from ");
1403 from_elements->ShortPrint(file);
1404 PrintF(file, " to ");
1405 to_elements->ShortPrint(file);
1411 void Map::PrintGeneralization(FILE* file,
1416 bool constant_to_field,
1417 Representation old_representation,
1418 Representation new_representation,
1419 HeapType* old_field_type,
1420 HeapType* new_field_type) {
1421 PrintF(file, "[generalizing ");
1422 constructor_name()->PrintOn(file);
1424 Name* name = instance_descriptors()->GetKey(modify_index);
1425 if (name->IsString()) {
1426 String::cast(name)->PrintOn(file);
1428 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1431 if (constant_to_field) {
1434 PrintF(file, "%s", old_representation.Mnemonic());
1436 old_field_type->TypePrint(file, HeapType::SEMANTIC_DIM);
1439 PrintF(file, "->%s", new_representation.Mnemonic());
1441 new_field_type->TypePrint(file, HeapType::SEMANTIC_DIM);
1444 if (strlen(reason) > 0) {
1445 PrintF(file, "%s", reason);
1447 PrintF(file, "+%i maps", descriptors - split);
1449 PrintF(file, ") [");
1450 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true);
1451 PrintF(file, "]\n");
1455 void JSObject::PrintInstanceMigration(FILE* file,
1458 PrintF(file, "[migrating ");
1459 map()->constructor_name()->PrintOn(file);
1461 DescriptorArray* o = original_map->instance_descriptors();
1462 DescriptorArray* n = new_map->instance_descriptors();
1463 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) {
1464 Representation o_r = o->GetDetails(i).representation();
1465 Representation n_r = n->GetDetails(i).representation();
1466 if (!o_r.Equals(n_r)) {
1467 String::cast(o->GetKey(i))->PrintOn(file);
1468 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic());
1469 } else if (o->GetDetails(i).type() == CONSTANT &&
1470 n->GetDetails(i).type() == FIELD) {
1471 Name* name = o->GetKey(i);
1472 if (name->IsString()) {
1473 String::cast(name)->PrintOn(file);
1475 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1484 void HeapObject::HeapObjectShortPrint(StringStream* accumulator) {
1485 Heap* heap = GetHeap();
1486 if (!heap->Contains(this)) {
1487 accumulator->Add("!!!INVALID POINTER!!!");
1490 if (!heap->Contains(map())) {
1491 accumulator->Add("!!!INVALID MAP!!!");
1495 accumulator->Add("%p ", this);
1498 String::cast(this)->StringShortPrint(accumulator);
1502 JSObject::cast(this)->JSObjectShortPrint(accumulator);
1505 switch (map()->instance_type()) {
1507 accumulator->Add("<Map(elements=%u)>", Map::cast(this)->elements_kind());
1509 case FIXED_ARRAY_TYPE:
1510 accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length());
1512 case FIXED_DOUBLE_ARRAY_TYPE:
1513 accumulator->Add("<FixedDoubleArray[%u]>",
1514 FixedDoubleArray::cast(this)->length());
1516 case BYTE_ARRAY_TYPE:
1517 accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length());
1519 case FREE_SPACE_TYPE:
1520 accumulator->Add("<FreeSpace[%u]>", FreeSpace::cast(this)->Size());
1522 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \
1523 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1524 accumulator->Add("<External" #Type "Array[%u]>", \
1525 External##Type##Array::cast(this)->length()); \
1527 case FIXED_##TYPE##_ARRAY_TYPE: \
1528 accumulator->Add("<Fixed" #Type "Array[%u]>", \
1529 Fixed##Type##Array::cast(this)->length()); \
1532 TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT)
1533 #undef TYPED_ARRAY_SHORT_PRINT
1535 case SHARED_FUNCTION_INFO_TYPE: {
1536 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this);
1537 SmartArrayPointer<char> debug_name =
1538 shared->DebugName()->ToCString();
1539 if (debug_name[0] != 0) {
1540 accumulator->Add("<SharedFunctionInfo %s>", debug_name.get());
1542 accumulator->Add("<SharedFunctionInfo>");
1546 case JS_MESSAGE_OBJECT_TYPE:
1547 accumulator->Add("<JSMessageObject>");
1549 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1551 accumulator->Put('<'); \
1552 accumulator->Add(#Name); \
1553 accumulator->Put('>'); \
1555 STRUCT_LIST(MAKE_STRUCT_CASE)
1556 #undef MAKE_STRUCT_CASE
1558 accumulator->Add("<Code>");
1560 case ODDBALL_TYPE: {
1562 accumulator->Add("<undefined>");
1563 else if (IsTheHole())
1564 accumulator->Add("<the hole>");
1566 accumulator->Add("<null>");
1568 accumulator->Add("<true>");
1570 accumulator->Add("<false>");
1572 accumulator->Add("<Odd Oddball>");
1576 Symbol* symbol = Symbol::cast(this);
1577 accumulator->Add("<Symbol: %d", symbol->Hash());
1578 if (!symbol->name()->IsUndefined()) {
1579 accumulator->Add(" ");
1580 String::cast(symbol->name())->StringShortPrint(accumulator);
1582 accumulator->Add(">");
1585 case HEAP_NUMBER_TYPE:
1586 accumulator->Add("<Number: ");
1587 HeapNumber::cast(this)->HeapNumberPrint(accumulator);
1588 accumulator->Put('>');
1590 case FLOAT32x4_TYPE:
1591 accumulator->Add("<Float32x4: ");
1592 Float32x4::cast(this)->Float32x4Print(accumulator);
1593 accumulator->Put('>');
1595 case FLOAT64x2_TYPE:
1596 accumulator->Add("<Float64x2: ");
1597 Float64x2::cast(this)->Float64x2Print(accumulator);
1598 accumulator->Put('>');
1601 accumulator->Add("<Int32x4: ");
1602 Int32x4::cast(this)->Int32x4Print(accumulator);
1603 accumulator->Put('>');
1606 accumulator->Add("<JSProxy>");
1608 case JS_FUNCTION_PROXY_TYPE:
1609 accumulator->Add("<JSFunctionProxy>");
1612 accumulator->Add("<Foreign>");
1615 accumulator->Add("Cell for ");
1616 Cell::cast(this)->value()->ShortPrint(accumulator);
1618 case PROPERTY_CELL_TYPE:
1619 accumulator->Add("PropertyCell for ");
1620 PropertyCell::cast(this)->value()->ShortPrint(accumulator);
1623 accumulator->Add("<Other heap object (%d)>", map()->instance_type());
1629 void HeapObject::Iterate(ObjectVisitor* v) {
1631 IteratePointer(v, kMapOffset);
1632 // Handle object body
1634 IterateBody(m->instance_type(), SizeFromMap(m), v);
1638 void HeapObject::IterateBody(InstanceType type, int object_size,
1640 // Avoiding <Type>::cast(this) because it accesses the map pointer field.
1641 // During GC, the map pointer field is encoded.
1642 if (type < FIRST_NONSTRING_TYPE) {
1643 switch (type & kStringRepresentationMask) {
1646 case kConsStringTag:
1647 ConsString::BodyDescriptor::IterateBody(this, v);
1649 case kSlicedStringTag:
1650 SlicedString::BodyDescriptor::IterateBody(this, v);
1652 case kExternalStringTag:
1653 if ((type & kStringEncodingMask) == kOneByteStringTag) {
1654 reinterpret_cast<ExternalAsciiString*>(this)->
1655 ExternalAsciiStringIterateBody(v);
1657 reinterpret_cast<ExternalTwoByteString*>(this)->
1658 ExternalTwoByteStringIterateBody(v);
1666 case FIXED_ARRAY_TYPE:
1667 FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
1669 case CONSTANT_POOL_ARRAY_TYPE:
1670 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v);
1672 case FIXED_DOUBLE_ARRAY_TYPE:
1674 case JS_OBJECT_TYPE:
1675 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1676 case JS_GENERATOR_OBJECT_TYPE:
1677 case JS_MODULE_TYPE:
1681 case JS_ARRAY_BUFFER_TYPE:
1682 case JS_TYPED_ARRAY_TYPE:
1683 case JS_DATA_VIEW_TYPE:
1686 case JS_SET_ITERATOR_TYPE:
1687 case JS_MAP_ITERATOR_TYPE:
1688 case JS_WEAK_MAP_TYPE:
1689 case JS_WEAK_SET_TYPE:
1690 case JS_REGEXP_TYPE:
1691 case JS_GLOBAL_PROXY_TYPE:
1692 case JS_GLOBAL_OBJECT_TYPE:
1693 case JS_BUILTINS_OBJECT_TYPE:
1694 case JS_MESSAGE_OBJECT_TYPE:
1695 JSObject::BodyDescriptor::IterateBody(this, object_size, v);
1697 case JS_FUNCTION_TYPE:
1698 reinterpret_cast<JSFunction*>(this)
1699 ->JSFunctionIterateBody(object_size, v);
1702 Oddball::BodyDescriptor::IterateBody(this, v);
1705 JSProxy::BodyDescriptor::IterateBody(this, v);
1707 case JS_FUNCTION_PROXY_TYPE:
1708 JSFunctionProxy::BodyDescriptor::IterateBody(this, v);
1711 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v);
1714 Map::BodyDescriptor::IterateBody(this, v);
1717 reinterpret_cast<Code*>(this)->CodeIterateBody(v);
1720 Cell::BodyDescriptor::IterateBody(this, v);
1722 case PROPERTY_CELL_TYPE:
1723 PropertyCell::BodyDescriptor::IterateBody(this, v);
1726 Symbol::BodyDescriptor::IterateBody(this, v);
1729 case HEAP_NUMBER_TYPE:
1730 case FLOAT32x4_TYPE:
1731 case FLOAT64x2_TYPE:
1734 case BYTE_ARRAY_TYPE:
1735 case FREE_SPACE_TYPE:
1738 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
1739 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1740 case FIXED_##TYPE##_ARRAY_TYPE: \
1743 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1744 #undef TYPED_ARRAY_CASE
1746 case SHARED_FUNCTION_INFO_TYPE: {
1747 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
1751 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1753 STRUCT_LIST(MAKE_STRUCT_CASE)
1754 #undef MAKE_STRUCT_CASE
1755 if (type == ALLOCATION_SITE_TYPE) {
1756 AllocationSite::BodyDescriptor::IterateBody(this, v);
1758 StructBodyDescriptor::IterateBody(this, object_size, v);
1762 PrintF("Unknown type: %d\n", type);
1768 bool HeapNumber::HeapNumberBooleanValue() {
1769 // NaN, +0, and -0 should return the false object
1770 #if __BYTE_ORDER == __LITTLE_ENDIAN
1771 union IeeeDoubleLittleEndianArchType u;
1772 #elif __BYTE_ORDER == __BIG_ENDIAN
1773 union IeeeDoubleBigEndianArchType u;
1776 if (u.bits.exp == 2047) {
1777 // Detect NaN for IEEE double precision floating point.
1778 if ((u.bits.man_low | u.bits.man_high) != 0) return false;
1780 if (u.bits.exp == 0) {
1781 // Detect +0, and -0 for IEEE double precision floating point.
1782 if ((u.bits.man_low | u.bits.man_high) == 0) return false;
1788 void HeapNumber::HeapNumberPrint(FILE* out) {
1789 PrintF(out, "%.16g", Number());
1793 void HeapNumber::HeapNumberPrint(StringStream* accumulator) {
1794 // The Windows version of vsnprintf can allocate when printing a %g string
1795 // into a buffer that may not be big enough. We don't want random memory
1796 // allocation when producing post-crash stack traces, so we print into a
1797 // buffer that is plenty big enough for any floating point number, then
1798 // print that using vsnprintf (which may truncate but never allocate if
1799 // there is no more space in the buffer).
1800 EmbeddedVector<char, 100> buffer;
1801 OS::SNPrintF(buffer, "%.16g", Number());
1802 accumulator->Add("%s", buffer.start());
1806 void Float32x4::Float32x4Print(FILE* out) {
1807 PrintF(out, "%.16g %.16g %.16g %.16g", x(), y(), z(), w());
1811 void Float32x4::Float32x4Print(StringStream* accumulator) {
1812 // The Windows version of vsnprintf can allocate when printing a %g string
1813 // into a buffer that may not be big enough. We don't want random memory
1814 // allocation when producing post-crash stack traces, so we print into a
1815 // buffer that is plenty big enough for any floating point number, then
1816 // print that using vsnprintf (which may truncate but never allocate if
1817 // there is no more space in the buffer).
1818 EmbeddedVector<char, 100> buffer;
1819 OS::SNPrintF(buffer, "%.16g %.16g %.16g %.16g", x(), y(), z(), w());
1820 accumulator->Add("%s", buffer.start());
1824 void Int32x4::Int32x4Print(FILE* out) {
1825 PrintF(out, "%u %u %u %u", x(), y(), z(), w());
1829 void Float64x2::Float64x2Print(FILE* out) {
1830 PrintF(out, "%.16g %.16g", x(), y());
1834 void Float64x2::Float64x2Print(StringStream* accumulator) {
1835 // The Windows version of vsnprintf can allocate when printing a %g string
1836 // into a buffer that may not be big enough. We don't want random memory
1837 // allocation when producing post-crash stack traces, so we print into a
1838 // buffer that is plenty big enough for any floating point number, then
1839 // print that using vsnprintf (which may truncate but never allocate if
1840 // there is no more space in the buffer).
1841 EmbeddedVector<char, 100> buffer;
1842 OS::SNPrintF(buffer, "%.16g %.16g", x(), y());
1843 accumulator->Add("%s", buffer.start());
1847 void Int32x4::Int32x4Print(StringStream* accumulator) {
1848 // The Windows version of vsnprintf can allocate when printing a %g string
1849 // into a buffer that may not be big enough. We don't want random memory
1850 // allocation when producing post-crash stack traces, so we print into a
1851 // buffer that is plenty big enough for any floating point number, then
1852 // print that using vsnprintf (which may truncate but never allocate if
1853 // there is no more space in the buffer).
1854 EmbeddedVector<char, 100> buffer;
1855 OS::SNPrintF(buffer, "%u %u %u %u", x(), y(), z(), w());
1856 accumulator->Add("%s", buffer.start());
1860 String* JSReceiver::class_name() {
1861 if (IsJSFunction() && IsJSFunctionProxy()) {
1862 return GetHeap()->function_class_string();
1864 if (map()->constructor()->IsJSFunction()) {
1865 JSFunction* constructor = JSFunction::cast(map()->constructor());
1866 return String::cast(constructor->shared()->instance_class_name());
1868 // If the constructor is not present, return "Object".
1869 return GetHeap()->Object_string();
1873 String* Map::constructor_name() {
1874 if (constructor()->IsJSFunction()) {
1875 JSFunction* constructor = JSFunction::cast(this->constructor());
1876 String* name = String::cast(constructor->shared()->name());
1877 if (name->length() > 0) return name;
1878 String* inferred_name = constructor->shared()->inferred_name();
1879 if (inferred_name->length() > 0) return inferred_name;
1880 Object* proto = prototype();
1881 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
1883 // TODO(rossberg): what about proxies?
1884 // If the constructor is not present, return "Object".
1885 return GetHeap()->Object_string();
1889 String* JSReceiver::constructor_name() {
1890 return map()->constructor_name();
1894 MaybeHandle<Map> Map::CopyWithField(Handle<Map> map,
1896 Handle<HeapType> type,
1897 PropertyAttributes attributes,
1898 Representation representation,
1899 TransitionFlag flag) {
1900 ASSERT(DescriptorArray::kNotFound ==
1901 map->instance_descriptors()->Search(
1902 *name, map->NumberOfOwnDescriptors()));
1904 // Ensure the descriptor array does not get too big.
1905 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1906 return MaybeHandle<Map>();
1909 // Normalize the object if the name is an actual name (not the
1910 // hidden strings) and is not a real identifier.
1911 // Normalize the object if it will have too many fast properties.
1912 Isolate* isolate = map->GetIsolate();
1913 if (!name->IsCacheable(isolate)) return MaybeHandle<Map>();
1915 // Compute the new index for new field.
1916 int index = map->NextFreePropertyIndex();
1918 if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
1919 representation = Representation::Tagged();
1920 type = HeapType::Any(isolate);
1923 FieldDescriptor new_field_desc(name, index, type, attributes, representation);
1924 Handle<Map> new_map = Map::CopyAddDescriptor(map, &new_field_desc, flag);
1925 int unused_property_fields = new_map->unused_property_fields() - 1;
1926 if (unused_property_fields < 0) {
1927 unused_property_fields += JSObject::kFieldsAdded;
1929 new_map->set_unused_property_fields(unused_property_fields);
1934 MaybeHandle<Map> Map::CopyWithConstant(Handle<Map> map,
1936 Handle<Object> constant,
1937 PropertyAttributes attributes,
1938 TransitionFlag flag) {
1939 // Ensure the descriptor array does not get too big.
1940 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1941 return MaybeHandle<Map>();
1944 // Allocate new instance descriptors with (name, constant) added.
1945 ConstantDescriptor new_constant_desc(name, constant, attributes);
1946 return Map::CopyAddDescriptor(map, &new_constant_desc, flag);
1950 void JSObject::AddFastProperty(Handle<JSObject> object,
1952 Handle<Object> value,
1953 PropertyAttributes attributes,
1954 StoreFromKeyed store_mode,
1955 ValueType value_type,
1956 TransitionFlag flag) {
1957 ASSERT(!object->IsJSGlobalProxy());
1959 MaybeHandle<Map> maybe_map;
1960 if (value->IsJSFunction()) {
1961 maybe_map = Map::CopyWithConstant(
1962 handle(object->map()), name, value, attributes, flag);
1963 } else if (!object->TooManyFastProperties(store_mode)) {
1964 Isolate* isolate = object->GetIsolate();
1965 Representation representation = value->OptimalRepresentation(value_type);
1966 maybe_map = Map::CopyWithField(
1967 handle(object->map(), isolate), name,
1968 value->OptimalType(isolate, representation),
1969 attributes, representation, flag);
1972 Handle<Map> new_map;
1973 if (!maybe_map.ToHandle(&new_map)) {
1974 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
1978 JSObject::MigrateToNewProperty(object, new_map, value);
1982 void JSObject::AddSlowProperty(Handle<JSObject> object,
1984 Handle<Object> value,
1985 PropertyAttributes attributes) {
1986 ASSERT(!object->HasFastProperties());
1987 Isolate* isolate = object->GetIsolate();
1988 Handle<NameDictionary> dict(object->property_dictionary());
1989 if (object->IsGlobalObject()) {
1990 // In case name is an orphaned property reuse the cell.
1991 int entry = dict->FindEntry(name);
1992 if (entry != NameDictionary::kNotFound) {
1993 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry)));
1994 PropertyCell::SetValueInferType(cell, value);
1995 // Assign an enumeration index to the property and update
1996 // SetNextEnumerationIndex.
1997 int index = dict->NextEnumerationIndex();
1998 PropertyDetails details = PropertyDetails(attributes, NORMAL, index);
1999 dict->SetNextEnumerationIndex(index + 1);
2000 dict->SetEntry(entry, name, cell, details);
2003 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value);
2004 PropertyCell::SetValueInferType(cell, value);
2007 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
2008 Handle<NameDictionary> result =
2009 NameDictionary::Add(dict, name, value, details);
2010 if (*dict != *result) object->set_properties(*result);
2014 MaybeHandle<Object> JSObject::AddProperty(
2015 Handle<JSObject> object,
2017 Handle<Object> value,
2018 PropertyAttributes attributes,
2019 StrictMode strict_mode,
2020 JSReceiver::StoreFromKeyed store_mode,
2021 ExtensibilityCheck extensibility_check,
2022 ValueType value_type,
2024 TransitionFlag transition_flag) {
2025 ASSERT(!object->IsJSGlobalProxy());
2026 Isolate* isolate = object->GetIsolate();
2028 if (!name->IsUniqueName()) {
2029 name = isolate->factory()->InternalizeString(
2030 Handle<String>::cast(name));
2033 if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK &&
2034 !object->map()->is_extensible()) {
2035 if (strict_mode == SLOPPY) {
2038 Handle<Object> args[1] = { name };
2039 Handle<Object> error = isolate->factory()->NewTypeError(
2040 "object_not_extensible", HandleVector(args, ARRAY_SIZE(args)));
2041 return isolate->Throw<Object>(error);
2045 if (object->HasFastProperties()) {
2046 AddFastProperty(object, name, value, attributes, store_mode,
2047 value_type, transition_flag);
2050 if (!object->HasFastProperties()) {
2051 AddSlowProperty(object, name, value, attributes);
2054 if (object->map()->is_observed() &&
2055 *name != isolate->heap()->hidden_string()) {
2056 Handle<Object> old_value = isolate->factory()->the_hole_value();
2057 EnqueueChangeRecord(object, "add", name, old_value);
2064 Context* JSObject::GetCreationContext() {
2065 Object* constructor = this->map()->constructor();
2066 JSFunction* function;
2067 if (!constructor->IsJSFunction()) {
2068 // Functions have null as a constructor,
2069 // but any JSFunction knows its context immediately.
2070 function = JSFunction::cast(this);
2072 function = JSFunction::cast(constructor);
2075 return function->context()->native_context();
2079 void JSObject::EnqueueChangeRecord(Handle<JSObject> object,
2080 const char* type_str,
2082 Handle<Object> old_value) {
2083 Isolate* isolate = object->GetIsolate();
2084 HandleScope scope(isolate);
2085 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str);
2086 if (object->IsJSGlobalObject()) {
2087 object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate);
2089 Handle<Object> args[] = { type, object, name, old_value };
2090 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4;
2092 Execution::Call(isolate,
2093 Handle<JSFunction>(isolate->observers_notify_change()),
2094 isolate->factory()->undefined_value(),
2095 argc, args).Assert();
2099 MaybeHandle<Object> JSObject::SetPropertyPostInterceptor(
2100 Handle<JSObject> object,
2102 Handle<Object> value,
2103 PropertyAttributes attributes,
2104 StrictMode strict_mode) {
2105 // Check local property, ignore interceptor.
2106 Isolate* isolate = object->GetIsolate();
2107 LookupResult result(isolate);
2108 object->LocalLookupRealNamedProperty(name, &result);
2109 if (!result.IsFound()) {
2110 object->map()->LookupTransition(*object, *name, &result);
2112 if (result.IsFound()) {
2113 // An existing property or a map transition was found. Use set property to
2114 // handle all these cases.
2115 return SetPropertyForResult(object, &result, name, value, attributes,
2116 strict_mode, MAY_BE_STORE_FROM_KEYED);
2119 Handle<Object> result_object;
2120 ASSIGN_RETURN_ON_EXCEPTION(
2121 isolate, result_object,
2122 SetPropertyViaPrototypes(
2123 object, name, value, attributes, strict_mode, &done),
2125 if (done) return result_object;
2126 // Add a new real property.
2127 return AddProperty(object, name, value, attributes, strict_mode);
2131 static void ReplaceSlowProperty(Handle<JSObject> object,
2133 Handle<Object> value,
2134 PropertyAttributes attributes) {
2135 NameDictionary* dictionary = object->property_dictionary();
2136 int old_index = dictionary->FindEntry(name);
2137 int new_enumeration_index = 0; // 0 means "Use the next available index."
2138 if (old_index != -1) {
2139 // All calls to ReplaceSlowProperty have had all transitions removed.
2140 new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index();
2143 PropertyDetails new_details(attributes, NORMAL, new_enumeration_index);
2144 JSObject::SetNormalizedProperty(object, name, value, new_details);
2148 const char* Representation::Mnemonic() const {
2150 case kNone: return "v";
2151 case kTagged: return "t";
2152 case kSmi: return "s";
2153 case kDouble: return "d";
2154 case kFloat32x4: return "float32x4";
2155 case kFloat64x2: return "float64x2";
2156 case kInt32x4: return "int32x44";
2157 case kInteger32: return "i";
2158 case kHeapObject: return "h";
2159 case kExternal: return "x";
2167 static void ZapEndOfFixedArray(Address new_end, int to_trim) {
2168 // If we are doing a big trim in old space then we zap the space.
2169 Object** zap = reinterpret_cast<Object**>(new_end);
2170 zap++; // Header of filler must be at least one word so skip that.
2171 for (int i = 1; i < to_trim; i++) {
2172 *zap++ = Smi::FromInt(0);
2177 template<Heap::InvocationMode mode>
2178 static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
2179 ASSERT(elms->map() != heap->fixed_cow_array_map());
2180 // For now this trick is only applied to fixed arrays in new and paged space.
2181 ASSERT(!heap->lo_space()->Contains(elms));
2183 const int len = elms->length();
2185 ASSERT(to_trim < len);
2187 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
2189 if (mode != Heap::FROM_GC || Heap::ShouldZapGarbage()) {
2190 ZapEndOfFixedArray(new_end, to_trim);
2193 int size_delta = to_trim * kPointerSize;
2195 // Technically in new space this write might be omitted (except for
2196 // debug mode which iterates through the heap), but to play safer
2198 heap->CreateFillerObjectAt(new_end, size_delta);
2200 // We are storing the new length using release store after creating a filler
2201 // for the left-over space to avoid races with the sweeper thread.
2202 elms->synchronized_set_length(len - to_trim);
2204 heap->AdjustLiveBytes(elms->address(), -size_delta, mode);
2206 // The array may not be moved during GC,
2207 // and size has to be adjusted nevertheless.
2208 HeapProfiler* profiler = heap->isolate()->heap_profiler();
2209 if (profiler->is_tracking_allocations()) {
2210 profiler->UpdateObjectSizeEvent(elms->address(), elms->Size());
2215 bool Map::InstancesNeedRewriting(Map* target,
2216 int target_number_of_fields,
2217 int target_inobject,
2218 int target_unused) {
2219 // If fields were added (or removed), rewrite the instance.
2220 int number_of_fields = NumberOfFields();
2221 ASSERT(target_number_of_fields >= number_of_fields);
2222 if (target_number_of_fields != number_of_fields) return true;
2224 // If smi descriptors were replaced by double descriptors, rewrite.
2225 DescriptorArray* old_desc = instance_descriptors();
2226 DescriptorArray* new_desc = target->instance_descriptors();
2227 int limit = NumberOfOwnDescriptors();
2228 for (int i = 0; i < limit; i++) {
2229 if (new_desc->GetDetails(i).representation().IsDouble() &&
2230 !old_desc->GetDetails(i).representation().IsDouble()) {
2235 // If no fields were added, and no inobject properties were removed, setting
2236 // the map is sufficient.
2237 if (target_inobject == inobject_properties()) return false;
2238 // In-object slack tracking may have reduced the object size of the new map.
2239 // In that case, succeed if all existing fields were inobject, and they still
2240 // fit within the new inobject size.
2241 ASSERT(target_inobject < inobject_properties());
2242 if (target_number_of_fields <= target_inobject) {
2243 ASSERT(target_number_of_fields + target_unused == target_inobject);
2246 // Otherwise, properties will need to be moved to the backing store.
2251 Handle<TransitionArray> Map::SetElementsTransitionMap(
2252 Handle<Map> map, Handle<Map> transitioned_map) {
2253 Handle<TransitionArray> transitions = TransitionArray::CopyInsert(
2255 map->GetIsolate()->factory()->elements_transition_symbol(),
2258 map->set_transitions(*transitions);
2263 // To migrate an instance to a map:
2264 // - First check whether the instance needs to be rewritten. If not, simply
2266 // - Otherwise, allocate a fixed array large enough to hold all fields, in
2267 // addition to unused space.
2268 // - Copy all existing properties in, in the following order: backing store
2269 // properties, unused fields, inobject properties.
2270 // - If all allocation succeeded, commit the state atomically:
2271 // * Copy inobject properties from the backing store back into the object.
2272 // * Trim the difference in instance size of the object. This also cleanly
2273 // frees inobject properties that moved to the backing store.
2274 // * If there are properties left in the backing store, trim of the space used
2275 // to temporarily store the inobject properties.
2276 // * If there are properties left in the backing store, install the backing
2278 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) {
2279 Isolate* isolate = object->GetIsolate();
2280 Handle<Map> old_map(object->map());
2281 int number_of_fields = new_map->NumberOfFields();
2282 int inobject = new_map->inobject_properties();
2283 int unused = new_map->unused_property_fields();
2285 // Nothing to do if no functions were converted to fields and no smis were
2286 // converted to doubles.
2287 if (!old_map->InstancesNeedRewriting(
2288 *new_map, number_of_fields, inobject, unused)) {
2289 // Writing the new map here does not require synchronization since it does
2290 // not change the actual object size.
2291 object->synchronized_set_map(*new_map);
2295 int total_size = number_of_fields + unused;
2296 int external = total_size - inobject;
2297 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size);
2299 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
2300 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors());
2301 int old_nof = old_map->NumberOfOwnDescriptors();
2302 int new_nof = new_map->NumberOfOwnDescriptors();
2304 // This method only supports generalizing instances to at least the same
2305 // number of properties.
2306 ASSERT(old_nof <= new_nof);
2308 for (int i = 0; i < old_nof; i++) {
2309 PropertyDetails details = new_descriptors->GetDetails(i);
2310 if (details.type() != FIELD) continue;
2311 PropertyDetails old_details = old_descriptors->GetDetails(i);
2312 if (old_details.type() == CALLBACKS) {
2313 ASSERT(details.representation().IsTagged());
2316 ASSERT(old_details.type() == CONSTANT ||
2317 old_details.type() == FIELD);
2318 Object* raw_value = old_details.type() == CONSTANT
2319 ? old_descriptors->GetValue(i)
2320 : object->RawFastPropertyAt(old_descriptors->GetFieldIndex(i));
2321 Handle<Object> value(raw_value, isolate);
2322 if (!old_details.representation().IsDouble() &&
2323 details.representation().IsDouble()) {
2324 if (old_details.representation().IsNone()) {
2325 value = handle(Smi::FromInt(0), isolate);
2327 value = Object::NewStorageFor(isolate, value, details.representation());
2329 ASSERT(!(details.representation().IsDouble() && value->IsSmi()));
2330 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2331 if (target_index < 0) target_index += total_size;
2332 array->set(target_index, *value);
2335 for (int i = old_nof; i < new_nof; i++) {
2336 PropertyDetails details = new_descriptors->GetDetails(i);
2337 if (details.type() != FIELD) continue;
2338 Handle<Object> value;
2339 if (details.representation().IsDouble()) {
2340 value = isolate->factory()->NewHeapNumber(0);
2342 value = isolate->factory()->uninitialized_value();
2344 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2345 if (target_index < 0) target_index += total_size;
2346 array->set(target_index, *value);
2349 // From here on we cannot fail and we shouldn't GC anymore.
2350 DisallowHeapAllocation no_allocation;
2352 // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2353 // avoid overwriting |one_pointer_filler_map|.
2354 int limit = Min(inobject, number_of_fields);
2355 for (int i = 0; i < limit; i++) {
2356 object->FastPropertyAtPut(i, array->get(external + i));
2359 // Create filler object past the new instance size.
2360 int new_instance_size = new_map->instance_size();
2361 int instance_size_delta = old_map->instance_size() - new_instance_size;
2362 ASSERT(instance_size_delta >= 0);
2363 Address address = object->address() + new_instance_size;
2365 // The trimming is performed on a newly allocated object, which is on a
2366 // fresly allocated page or on an already swept page. Hence, the sweeper
2367 // thread can not get confused with the filler creation. No synchronization
2369 isolate->heap()->CreateFillerObjectAt(address, instance_size_delta);
2371 // If there are properties in the new backing store, trim it to the correct
2372 // size and install the backing store into the object.
2374 RightTrimFixedArray<Heap::FROM_MUTATOR>(isolate->heap(), *array, inobject);
2375 object->set_properties(*array);
2378 // The trimming is performed on a newly allocated object, which is on a
2379 // fresly allocated page or on an already swept page. Hence, the sweeper
2380 // thread can not get confused with the filler creation. No synchronization
2382 object->set_map(*new_map);
2386 void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object,
2388 Representation new_representation,
2389 Handle<HeapType> new_field_type,
2390 StoreMode store_mode) {
2391 Handle<Map> new_map = Map::GeneralizeRepresentation(
2392 handle(object->map()), modify_index, new_representation,
2393 new_field_type, store_mode);
2394 if (object->map() == *new_map) return;
2395 return MigrateToMap(object, new_map);
2399 int Map::NumberOfFields() {
2400 DescriptorArray* descriptors = instance_descriptors();
2402 for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
2403 if (descriptors->GetDetails(i).type() == FIELD) result++;
2409 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2411 StoreMode store_mode,
2412 PropertyAttributes attributes,
2413 const char* reason) {
2414 Isolate* isolate = map->GetIsolate();
2415 Handle<Map> new_map = Copy(map);
2417 DescriptorArray* descriptors = new_map->instance_descriptors();
2418 int length = descriptors->number_of_descriptors();
2419 for (int i = 0; i < length; i++) {
2420 descriptors->SetRepresentation(i, Representation::Tagged());
2421 if (descriptors->GetDetails(i).type() == FIELD) {
2422 descriptors->SetValue(i, HeapType::Any());
2426 // Unless the instance is being migrated, ensure that modify_index is a field.
2427 PropertyDetails details = descriptors->GetDetails(modify_index);
2428 if (store_mode == FORCE_FIELD && details.type() != FIELD) {
2429 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate),
2430 new_map->NumberOfFields(),
2432 Representation::Tagged());
2433 descriptors->Replace(modify_index, &d);
2434 int unused_property_fields = new_map->unused_property_fields() - 1;
2435 if (unused_property_fields < 0) {
2436 unused_property_fields += JSObject::kFieldsAdded;
2438 new_map->set_unused_property_fields(unused_property_fields);
2441 if (FLAG_trace_generalization) {
2442 HeapType* field_type = (details.type() == FIELD)
2443 ? map->instance_descriptors()->GetFieldType(modify_index)
2445 map->PrintGeneralization(stdout, reason, modify_index,
2446 new_map->NumberOfOwnDescriptors(),
2447 new_map->NumberOfOwnDescriptors(),
2448 details.type() == CONSTANT && store_mode == FORCE_FIELD,
2449 details.representation(), Representation::Tagged(),
2450 field_type, HeapType::Any());
2456 void Map::DeprecateTransitionTree() {
2457 if (is_deprecated()) return;
2458 if (HasTransitionArray()) {
2459 TransitionArray* transitions = this->transitions();
2460 for (int i = 0; i < transitions->number_of_transitions(); i++) {
2461 transitions->GetTarget(i)->DeprecateTransitionTree();
2465 dependent_code()->DeoptimizeDependentCodeGroup(
2466 GetIsolate(), DependentCode::kTransitionGroup);
2467 NotifyLeafMapLayoutChange();
2471 // Invalidates a transition target at |key|, and installs |new_descriptors| over
2472 // the current instance_descriptors to ensure proper sharing of descriptor
2474 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
2475 if (HasTransitionArray()) {
2476 TransitionArray* transitions = this->transitions();
2477 int transition = transitions->Search(key);
2478 if (transition != TransitionArray::kNotFound) {
2479 transitions->GetTarget(transition)->DeprecateTransitionTree();
2483 // Don't overwrite the empty descriptor array.
2484 if (NumberOfOwnDescriptors() == 0) return;
2486 DescriptorArray* to_replace = instance_descriptors();
2487 Map* current = this;
2488 GetHeap()->incremental_marking()->RecordWrites(to_replace);
2489 while (current->instance_descriptors() == to_replace) {
2490 current->SetEnumLength(kInvalidEnumCacheSentinel);
2491 current->set_instance_descriptors(new_descriptors);
2492 Object* next = current->GetBackPointer();
2493 if (next->IsUndefined()) break;
2494 current = Map::cast(next);
2497 set_owns_descriptors(false);
2501 Map* Map::FindRootMap() {
2504 Object* back = result->GetBackPointer();
2505 if (back->IsUndefined()) return result;
2506 result = Map::cast(back);
2511 Map* Map::FindLastMatchMap(int verbatim,
2513 DescriptorArray* descriptors) {
2514 DisallowHeapAllocation no_allocation;
2516 // This can only be called on roots of transition trees.
2517 ASSERT(GetBackPointer()->IsUndefined());
2519 Map* current = this;
2521 for (int i = verbatim; i < length; i++) {
2522 if (!current->HasTransitionArray()) break;
2523 Name* name = descriptors->GetKey(i);
2524 TransitionArray* transitions = current->transitions();
2525 int transition = transitions->Search(name);
2526 if (transition == TransitionArray::kNotFound) break;
2528 Map* next = transitions->GetTarget(transition);
2529 DescriptorArray* next_descriptors = next->instance_descriptors();
2531 PropertyDetails details = descriptors->GetDetails(i);
2532 PropertyDetails next_details = next_descriptors->GetDetails(i);
2533 if (details.type() != next_details.type()) break;
2534 if (details.attributes() != next_details.attributes()) break;
2535 if (!details.representation().Equals(next_details.representation())) break;
2536 if (next_details.type() == FIELD) {
2537 if (!descriptors->GetFieldType(i)->NowIs(
2538 next_descriptors->GetFieldType(i))) break;
2540 if (descriptors->GetValue(i) != next_descriptors->GetValue(i)) break;
2549 Map* Map::FindFieldOwner(int descriptor) {
2550 DisallowHeapAllocation no_allocation;
2551 ASSERT_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type());
2554 Object* back = result->GetBackPointer();
2555 if (back->IsUndefined()) break;
2556 Map* parent = Map::cast(back);
2557 if (parent->NumberOfOwnDescriptors() <= descriptor) break;
2564 void Map::UpdateDescriptor(int descriptor_number, Descriptor* desc) {
2565 DisallowHeapAllocation no_allocation;
2566 if (HasTransitionArray()) {
2567 TransitionArray* transitions = this->transitions();
2568 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
2569 transitions->GetTarget(i)->UpdateDescriptor(descriptor_number, desc);
2572 instance_descriptors()->Replace(descriptor_number, desc);;
2577 Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1,
2578 Handle<HeapType> type2,
2580 static const int kMaxClassesPerFieldType = 5;
2581 if (type1->NowIs(type2)) return type2;
2582 if (type2->NowIs(type1)) return type1;
2583 if (type1->NowStable() && type2->NowStable()) {
2584 Handle<HeapType> type = HeapType::Union(type1, type2, isolate);
2585 if (type->NumClasses() <= kMaxClassesPerFieldType) {
2586 ASSERT(type->NowStable());
2587 ASSERT(type1->NowIs(type));
2588 ASSERT(type2->NowIs(type));
2592 return HeapType::Any(isolate);
2597 void Map::GeneralizeFieldType(Handle<Map> map,
2599 Handle<HeapType> new_field_type) {
2600 Isolate* isolate = map->GetIsolate();
2602 // Check if we actually need to generalize the field type at all.
2603 Handle<HeapType> old_field_type(
2604 map->instance_descriptors()->GetFieldType(modify_index), isolate);
2605 if (new_field_type->NowIs(old_field_type)) {
2606 ASSERT(Map::GeneralizeFieldType(old_field_type,
2608 isolate)->NowIs(old_field_type));
2612 // Determine the field owner.
2613 Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate);
2614 Handle<DescriptorArray> descriptors(
2615 field_owner->instance_descriptors(), isolate);
2616 ASSERT_EQ(*old_field_type, descriptors->GetFieldType(modify_index));
2618 // Determine the generalized new field type.
2619 new_field_type = Map::GeneralizeFieldType(
2620 old_field_type, new_field_type, isolate);
2622 PropertyDetails details = descriptors->GetDetails(modify_index);
2623 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate),
2624 descriptors->GetFieldIndex(modify_index),
2626 details.attributes(),
2627 details.representation());
2628 field_owner->UpdateDescriptor(modify_index, &d);
2629 field_owner->dependent_code()->DeoptimizeDependentCodeGroup(
2630 isolate, DependentCode::kFieldTypeGroup);
2632 if (FLAG_trace_generalization) {
2633 map->PrintGeneralization(
2634 stdout, "field type generalization",
2635 modify_index, map->NumberOfOwnDescriptors(),
2636 map->NumberOfOwnDescriptors(), false,
2637 details.representation(), details.representation(),
2638 *old_field_type, *new_field_type);
2643 // Generalize the representation of the descriptor at |modify_index|.
2644 // This method rewrites the transition tree to reflect the new change. To avoid
2645 // high degrees over polymorphism, and to stabilize quickly, on every rewrite
2646 // the new type is deduced by merging the current type with any potential new
2647 // (partial) version of the type in the transition tree.
2648 // To do this, on each rewrite:
2649 // - Search the root of the transition tree using FindRootMap.
2650 // - Find |target_map|, the newest matching version of this map using the keys
2651 // in the |old_map|'s descriptor array to walk the transition tree.
2652 // - Merge/generalize the descriptor array of the |old_map| and |target_map|.
2653 // - Generalize the |modify_index| descriptor using |new_representation| and
2654 // |new_field_type|.
2655 // - Walk the tree again starting from the root towards |target_map|. Stop at
2656 // |split_map|, the first map who's descriptor array does not match the merged
2657 // descriptor array.
2658 // - If |target_map| == |split_map|, |target_map| is in the expected state.
2660 // - Otherwise, invalidate the outdated transition target from |target_map|, and
2661 // replace its transition tree with a new branch for the updated descriptors.
2662 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map,
2664 Representation new_representation,
2665 Handle<HeapType> new_field_type,
2666 StoreMode store_mode) {
2667 Isolate* isolate = old_map->GetIsolate();
2669 Handle<DescriptorArray> old_descriptors(
2670 old_map->instance_descriptors(), isolate);
2671 int old_nof = old_map->NumberOfOwnDescriptors();
2672 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2673 Representation old_representation = old_details.representation();
2675 // It's fine to transition from None to anything but double without any
2676 // modification to the object, because the default uninitialized value for
2677 // representation None can be overwritten by both smi and tagged values.
2678 // Doubles, however, would require a box allocation.
2679 if (old_representation.IsNone() &&
2680 !new_representation.IsNone() &&
2681 !new_representation.IsDouble()) {
2682 ASSERT(old_details.type() == FIELD);
2683 ASSERT(old_descriptors->GetFieldType(modify_index)->NowIs(
2685 if (FLAG_trace_generalization) {
2686 old_map->PrintGeneralization(
2687 stdout, "uninitialized field",
2688 modify_index, old_map->NumberOfOwnDescriptors(),
2689 old_map->NumberOfOwnDescriptors(), false,
2690 old_representation, new_representation,
2691 old_descriptors->GetFieldType(modify_index), *new_field_type);
2693 old_descriptors->SetRepresentation(modify_index, new_representation);
2694 old_descriptors->SetValue(modify_index, *new_field_type);
2698 // Check the state of the root map.
2699 Handle<Map> root_map(old_map->FindRootMap(), isolate);
2700 if (!old_map->EquivalentToForTransition(*root_map)) {
2701 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2702 old_details.attributes(), "not equivalent");
2704 int root_nof = root_map->NumberOfOwnDescriptors();
2705 if (modify_index < root_nof) {
2706 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2707 if ((old_details.type() != FIELD && store_mode == FORCE_FIELD) ||
2708 (old_details.type() == FIELD &&
2709 (!new_field_type->NowIs(old_descriptors->GetFieldType(modify_index)) ||
2710 !new_representation.fits_into(old_details.representation())))) {
2711 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2712 old_details.attributes(), "root modification");
2716 Handle<Map> target_map = root_map;
2717 for (int i = root_nof; i < old_nof; ++i) {
2718 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2719 if (j == TransitionArray::kNotFound) break;
2720 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2721 Handle<DescriptorArray> tmp_descriptors = handle(
2722 tmp_map->instance_descriptors(), isolate);
2724 // Check if target map is incompatible.
2725 PropertyDetails old_details = old_descriptors->GetDetails(i);
2726 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2727 PropertyType old_type = old_details.type();
2728 PropertyType tmp_type = tmp_details.type();
2729 if (tmp_details.attributes() != old_details.attributes() ||
2730 ((tmp_type == CALLBACKS || old_type == CALLBACKS) &&
2731 (tmp_type != old_type ||
2732 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2733 return CopyGeneralizeAllRepresentations(
2734 old_map, modify_index, store_mode,
2735 old_details.attributes(), "incompatible");
2737 Representation old_representation = old_details.representation();
2738 Representation tmp_representation = tmp_details.representation();
2739 if (!old_representation.fits_into(tmp_representation) ||
2740 (!new_representation.fits_into(tmp_representation) &&
2741 modify_index == i)) {
2744 if (tmp_type == FIELD) {
2745 // Generalize the field type as necessary.
2746 Handle<HeapType> old_field_type = (old_type == FIELD)
2747 ? handle(old_descriptors->GetFieldType(i), isolate)
2748 : old_descriptors->GetValue(i)->OptimalType(
2749 isolate, tmp_representation);
2750 if (modify_index == i) {
2751 old_field_type = GeneralizeFieldType(
2752 new_field_type, old_field_type, isolate);
2754 GeneralizeFieldType(tmp_map, i, old_field_type);
2755 } else if (tmp_type == CONSTANT) {
2756 if (old_type != CONSTANT ||
2757 old_descriptors->GetConstant(i) != tmp_descriptors->GetConstant(i)) {
2761 ASSERT_EQ(tmp_type, old_type);
2762 ASSERT_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i));
2764 target_map = tmp_map;
2767 // Directly change the map if the target map is more general.
2768 Handle<DescriptorArray> target_descriptors(
2769 target_map->instance_descriptors(), isolate);
2770 int target_nof = target_map->NumberOfOwnDescriptors();
2771 if (target_nof == old_nof &&
2772 (store_mode != FORCE_FIELD ||
2773 target_descriptors->GetDetails(modify_index).type() == FIELD)) {
2774 ASSERT(modify_index < target_nof);
2775 ASSERT(new_representation.fits_into(
2776 target_descriptors->GetDetails(modify_index).representation()));
2777 ASSERT(target_descriptors->GetDetails(modify_index).type() != FIELD ||
2778 new_field_type->NowIs(
2779 target_descriptors->GetFieldType(modify_index)));
2783 // Find the last compatible target map in the transition tree.
2784 for (int i = target_nof; i < old_nof; ++i) {
2785 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2786 if (j == TransitionArray::kNotFound) break;
2787 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2788 Handle<DescriptorArray> tmp_descriptors(
2789 tmp_map->instance_descriptors(), isolate);
2791 // Check if target map is compatible.
2792 PropertyDetails old_details = old_descriptors->GetDetails(i);
2793 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2794 if (tmp_details.attributes() != old_details.attributes() ||
2795 ((tmp_details.type() == CALLBACKS || old_details.type() == CALLBACKS) &&
2796 (tmp_details.type() != old_details.type() ||
2797 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2798 return CopyGeneralizeAllRepresentations(
2799 old_map, modify_index, store_mode,
2800 old_details.attributes(), "incompatible");
2802 target_map = tmp_map;
2804 target_nof = target_map->NumberOfOwnDescriptors();
2805 target_descriptors = handle(target_map->instance_descriptors(), isolate);
2807 // Allocate a new descriptor array large enough to hold the required
2808 // descriptors, with minimally the exact same size as the old descriptor
2810 int new_slack = Max(
2811 old_nof, old_descriptors->number_of_descriptors()) - old_nof;
2812 Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate(
2813 isolate, old_nof, new_slack);
2814 ASSERT(new_descriptors->length() > target_descriptors->length() ||
2815 new_descriptors->NumberOfSlackDescriptors() > 0 ||
2816 new_descriptors->number_of_descriptors() ==
2817 old_descriptors->number_of_descriptors());
2818 ASSERT(new_descriptors->number_of_descriptors() == old_nof);
2821 int current_offset = 0;
2822 for (int i = 0; i < root_nof; ++i) {
2823 PropertyDetails old_details = old_descriptors->GetDetails(i);
2824 if (old_details.type() == FIELD) current_offset++;
2825 Descriptor d(handle(old_descriptors->GetKey(i), isolate),
2826 handle(old_descriptors->GetValue(i), isolate),
2828 new_descriptors->Set(i, &d);
2831 // |root_nof| -> |target_nof|
2832 for (int i = root_nof; i < target_nof; ++i) {
2833 Handle<Name> target_key(target_descriptors->GetKey(i), isolate);
2834 PropertyDetails old_details = old_descriptors->GetDetails(i);
2835 PropertyDetails target_details = target_descriptors->GetDetails(i);
2836 target_details = target_details.CopyWithRepresentation(
2837 old_details.representation().generalize(
2838 target_details.representation()));
2839 if (modify_index == i) {
2840 target_details = target_details.CopyWithRepresentation(
2841 new_representation.generalize(target_details.representation()));
2843 if (old_details.type() == FIELD ||
2844 target_details.type() == FIELD ||
2845 (modify_index == i && store_mode == FORCE_FIELD) ||
2846 (target_descriptors->GetValue(i) != old_descriptors->GetValue(i))) {
2847 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2848 ? handle(old_descriptors->GetFieldType(i), isolate)
2849 : old_descriptors->GetValue(i)->OptimalType(
2850 isolate, target_details.representation());
2851 Handle<HeapType> target_field_type = (target_details.type() == FIELD)
2852 ? handle(target_descriptors->GetFieldType(i), isolate)
2853 : target_descriptors->GetValue(i)->OptimalType(
2854 isolate, target_details.representation());
2855 target_field_type = GeneralizeFieldType(
2856 target_field_type, old_field_type, isolate);
2857 if (modify_index == i) {
2858 target_field_type = GeneralizeFieldType(
2859 target_field_type, new_field_type, isolate);
2861 FieldDescriptor d(target_key,
2864 target_details.attributes(),
2865 target_details.representation());
2866 new_descriptors->Set(i, &d);
2868 ASSERT_NE(FIELD, target_details.type());
2869 Descriptor d(target_key,
2870 handle(target_descriptors->GetValue(i), isolate),
2872 new_descriptors->Set(i, &d);
2876 // |target_nof| -> |old_nof|
2877 for (int i = target_nof; i < old_nof; ++i) {
2878 PropertyDetails old_details = old_descriptors->GetDetails(i);
2879 Handle<Name> old_key(old_descriptors->GetKey(i), isolate);
2880 if (modify_index == i) {
2881 old_details = old_details.CopyWithRepresentation(
2882 new_representation.generalize(old_details.representation()));
2884 if (old_details.type() == FIELD) {
2885 Handle<HeapType> old_field_type(
2886 old_descriptors->GetFieldType(i), isolate);
2887 if (modify_index == i) {
2888 old_field_type = GeneralizeFieldType(
2889 old_field_type, new_field_type, isolate);
2891 FieldDescriptor d(old_key,
2894 old_details.attributes(),
2895 old_details.representation());
2896 new_descriptors->Set(i, &d);
2898 ASSERT(old_details.type() == CONSTANT || old_details.type() == CALLBACKS);
2899 if (modify_index == i && store_mode == FORCE_FIELD) {
2900 FieldDescriptor d(old_key,
2902 GeneralizeFieldType(
2903 old_descriptors->GetValue(i)->OptimalType(
2904 isolate, old_details.representation()),
2905 new_field_type, isolate),
2906 old_details.attributes(),
2907 old_details.representation());
2908 new_descriptors->Set(i, &d);
2910 ASSERT_NE(FIELD, old_details.type());
2911 Descriptor d(old_key,
2912 handle(old_descriptors->GetValue(i), isolate),
2914 new_descriptors->Set(i, &d);
2919 new_descriptors->Sort();
2921 ASSERT(store_mode != FORCE_FIELD ||
2922 new_descriptors->GetDetails(modify_index).type() == FIELD);
2924 Handle<Map> split_map(root_map->FindLastMatchMap(
2925 root_nof, old_nof, *new_descriptors), isolate);
2926 int split_nof = split_map->NumberOfOwnDescriptors();
2927 ASSERT_NE(old_nof, split_nof);
2929 split_map->DeprecateTarget(
2930 old_descriptors->GetKey(split_nof), *new_descriptors);
2932 if (FLAG_trace_generalization) {
2933 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2934 PropertyDetails new_details = new_descriptors->GetDetails(modify_index);
2935 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2936 ? handle(old_descriptors->GetFieldType(modify_index), isolate)
2937 : HeapType::Constant(handle(old_descriptors->GetValue(modify_index),
2939 Handle<HeapType> new_field_type = (new_details.type() == FIELD)
2940 ? handle(new_descriptors->GetFieldType(modify_index), isolate)
2941 : HeapType::Constant(handle(new_descriptors->GetValue(modify_index),
2943 old_map->PrintGeneralization(
2944 stdout, "", modify_index, split_nof, old_nof,
2945 old_details.type() == CONSTANT && store_mode == FORCE_FIELD,
2946 old_details.representation(), new_details.representation(),
2947 *old_field_type, *new_field_type);
2950 // Add missing transitions.
2951 Handle<Map> new_map = split_map;
2952 for (int i = split_nof; i < old_nof; ++i) {
2953 new_map = CopyInstallDescriptors(new_map, i, new_descriptors);
2955 new_map->set_owns_descriptors(true);
2960 // Generalize the representation of all FIELD descriptors.
2961 Handle<Map> Map::GeneralizeAllFieldRepresentations(
2963 Handle<DescriptorArray> descriptors(map->instance_descriptors());
2964 for (int i = 0; i < map->NumberOfOwnDescriptors(); ++i) {
2965 if (descriptors->GetDetails(i).type() == FIELD) {
2966 map = GeneralizeRepresentation(map, i, Representation::Tagged(),
2967 HeapType::Any(map->GetIsolate()),
2976 MaybeHandle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) {
2977 Handle<Map> proto_map(map);
2978 while (proto_map->prototype()->IsJSObject()) {
2979 Handle<JSObject> holder(JSObject::cast(proto_map->prototype()));
2980 proto_map = Handle<Map>(holder->map());
2981 if (proto_map->is_deprecated() && JSObject::TryMigrateInstance(holder)) {
2982 proto_map = Handle<Map>(holder->map());
2985 return CurrentMapForDeprecatedInternal(map);
2990 MaybeHandle<Map> Map::CurrentMapForDeprecatedInternal(Handle<Map> old_map) {
2991 DisallowHeapAllocation no_allocation;
2992 DisallowDeoptimization no_deoptimization(old_map->GetIsolate());
2994 if (!old_map->is_deprecated()) return old_map;
2996 // Check the state of the root map.
2997 Map* root_map = old_map->FindRootMap();
2998 if (!old_map->EquivalentToForTransition(root_map)) return MaybeHandle<Map>();
2999 int root_nof = root_map->NumberOfOwnDescriptors();
3001 int old_nof = old_map->NumberOfOwnDescriptors();
3002 DescriptorArray* old_descriptors = old_map->instance_descriptors();
3004 Map* new_map = root_map;
3005 for (int i = root_nof; i < old_nof; ++i) {
3006 int j = new_map->SearchTransition(old_descriptors->GetKey(i));
3007 if (j == TransitionArray::kNotFound) return MaybeHandle<Map>();
3008 new_map = new_map->GetTransition(j);
3009 DescriptorArray* new_descriptors = new_map->instance_descriptors();
3011 PropertyDetails new_details = new_descriptors->GetDetails(i);
3012 PropertyDetails old_details = old_descriptors->GetDetails(i);
3013 if (old_details.attributes() != new_details.attributes() ||
3014 !old_details.representation().fits_into(new_details.representation())) {
3015 return MaybeHandle<Map>();
3017 PropertyType new_type = new_details.type();
3018 PropertyType old_type = old_details.type();
3019 Object* new_value = new_descriptors->GetValue(i);
3020 Object* old_value = old_descriptors->GetValue(i);
3023 if ((old_type == FIELD &&
3024 !HeapType::cast(old_value)->NowIs(HeapType::cast(new_value))) ||
3025 (old_type == CONSTANT &&
3026 !HeapType::cast(new_value)->NowContains(old_value)) ||
3027 (old_type == CALLBACKS &&
3028 !HeapType::Any()->Is(HeapType::cast(new_value)))) {
3029 return MaybeHandle<Map>();
3035 if (old_type != new_type || old_value != new_value) {
3036 return MaybeHandle<Map>();
3047 if (new_map->NumberOfOwnDescriptors() != old_nof) return MaybeHandle<Map>();
3048 return handle(new_map);
3052 MaybeHandle<Object> JSObject::SetPropertyWithInterceptor(
3053 Handle<JSObject> object,
3055 Handle<Object> value,
3056 PropertyAttributes attributes,
3057 StrictMode strict_mode) {
3058 // TODO(rossberg): Support symbols in the API.
3059 if (name->IsSymbol()) return value;
3060 Isolate* isolate = object->GetIsolate();
3061 Handle<String> name_string = Handle<String>::cast(name);
3062 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
3063 if (!interceptor->setter()->IsUndefined()) {
3065 ApiNamedPropertyAccess("interceptor-named-set", *object, *name));
3066 PropertyCallbackArguments args(
3067 isolate, interceptor->data(), *object, *object);
3068 v8::NamedPropertySetterCallback setter =
3069 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter());
3070 Handle<Object> value_unhole = value->IsTheHole()
3071 ? Handle<Object>(isolate->factory()->undefined_value()) : value;
3072 v8::Handle<v8::Value> result = args.Call(setter,
3073 v8::Utils::ToLocal(name_string),
3074 v8::Utils::ToLocal(value_unhole));
3075 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
3076 if (!result.IsEmpty()) return value;
3078 return SetPropertyPostInterceptor(
3079 object, name, value, attributes, strict_mode);
3083 MaybeHandle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
3085 Handle<Object> value,
3086 PropertyAttributes attributes,
3087 StrictMode strict_mode,
3088 StoreFromKeyed store_mode) {
3089 LookupResult result(object->GetIsolate());
3090 object->LocalLookup(name, &result, true);
3091 if (!result.IsFound()) {
3092 object->map()->LookupTransition(JSObject::cast(*object), *name, &result);
3094 return SetProperty(object, &result, name, value, attributes, strict_mode,
3099 MaybeHandle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object,
3100 Handle<Object> structure,
3102 Handle<Object> value,
3103 Handle<JSObject> holder,
3104 StrictMode strict_mode) {
3105 Isolate* isolate = object->GetIsolate();
3107 // We should never get here to initialize a const with the hole
3108 // value since a const declaration would conflict with the setter.
3109 ASSERT(!value->IsTheHole());
3110 ASSERT(!structure->IsForeign());
3111 if (structure->IsExecutableAccessorInfo()) {
3112 // api style callbacks
3113 ExecutableAccessorInfo* data = ExecutableAccessorInfo::cast(*structure);
3114 if (!data->IsCompatibleReceiver(*object)) {
3115 Handle<Object> args[2] = { name, object };
3116 Handle<Object> error =
3117 isolate->factory()->NewTypeError("incompatible_method_receiver",
3120 return isolate->Throw<Object>(error);
3122 // TODO(rossberg): Support symbols in the API.
3123 if (name->IsSymbol()) return value;
3124 Object* call_obj = data->setter();
3125 v8::AccessorSetterCallback call_fun =
3126 v8::ToCData<v8::AccessorSetterCallback>(call_obj);
3127 if (call_fun == NULL) return value;
3128 Handle<String> key = Handle<String>::cast(name);
3129 LOG(isolate, ApiNamedPropertyAccess("store", *object, *name));
3130 PropertyCallbackArguments args(isolate, data->data(), *object, *holder);
3132 v8::Utils::ToLocal(key),
3133 v8::Utils::ToLocal(value));
3134 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
3138 if (structure->IsAccessorPair()) {
3139 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
3140 if (setter->IsSpecFunction()) {
3141 // TODO(rossberg): nicer would be to cast to some JSCallable here...
3142 return SetPropertyWithDefinedSetter(
3143 object, Handle<JSReceiver>::cast(setter), value);
3145 if (strict_mode == SLOPPY) return value;
3146 Handle<Object> args[2] = { name, holder };
3147 Handle<Object> error =
3148 isolate->factory()->NewTypeError("no_setter_in_callback",
3149 HandleVector(args, 2));
3150 return isolate->Throw<Object>(error);
3154 // TODO(dcarney): Handle correctly.
3155 if (structure->IsDeclaredAccessorInfo()) {
3160 return MaybeHandle<Object>();
3164 MaybeHandle<Object> JSReceiver::SetPropertyWithDefinedSetter(
3165 Handle<JSReceiver> object,
3166 Handle<JSReceiver> setter,
3167 Handle<Object> value) {
3168 Isolate* isolate = object->GetIsolate();
3170 Debug* debug = isolate->debug();
3171 // Handle stepping into a setter if step into is active.
3172 // TODO(rossberg): should this apply to getters that are function proxies?
3173 if (debug->StepInActive() && setter->IsJSFunction()) {
3174 debug->HandleStepIn(
3175 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false);
3178 Handle<Object> argv[] = { value };
3179 RETURN_ON_EXCEPTION(
3181 Execution::Call(isolate, setter, object, ARRAY_SIZE(argv), argv),
3187 MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes(
3188 Handle<JSObject> object,
3190 Handle<Object> value,
3192 StrictMode strict_mode) {
3193 Isolate *isolate = object->GetIsolate();
3194 for (Handle<Object> proto = handle(object->GetPrototype(), isolate);
3196 proto = handle(proto->GetPrototype(isolate), isolate)) {
3197 if (proto->IsJSProxy()) {
3198 return JSProxy::SetPropertyViaPrototypesWithHandler(
3199 Handle<JSProxy>::cast(proto),
3201 isolate->factory()->Uint32ToString(index), // name
3207 Handle<JSObject> js_proto = Handle<JSObject>::cast(proto);
3208 if (!js_proto->HasDictionaryElements()) {
3211 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary());
3212 int entry = dictionary->FindEntry(index);
3213 if (entry != SeededNumberDictionary::kNotFound) {
3214 PropertyDetails details = dictionary->DetailsAt(entry);
3215 if (details.type() == CALLBACKS) {
3217 Handle<Object> structure(dictionary->ValueAt(entry), isolate);
3218 return SetElementWithCallback(object, structure, index, value, js_proto,
3224 return isolate->factory()->the_hole_value();
3228 MaybeHandle<Object> JSObject::SetPropertyViaPrototypes(
3229 Handle<JSObject> object,
3231 Handle<Object> value,
3232 PropertyAttributes attributes,
3233 StrictMode strict_mode,
3235 Isolate* isolate = object->GetIsolate();
3238 // We could not find a local property so let's check whether there is an
3239 // accessor that wants to handle the property, or whether the property is
3240 // read-only on the prototype chain.
3241 LookupResult result(isolate);
3242 object->LookupRealNamedPropertyInPrototypes(name, &result);
3243 if (result.IsFound()) {
3244 switch (result.type()) {
3248 *done = result.IsReadOnly();
3251 PropertyAttributes attr = GetPropertyAttributeWithInterceptor(
3252 handle(result.holder()), object, name, true);
3253 *done = !!(attr & READ_ONLY);
3258 Handle<Object> callback_object(result.GetCallbackObject(), isolate);
3259 return SetPropertyWithCallback(object, callback_object, name, value,
3260 handle(result.holder()), strict_mode);
3263 Handle<JSProxy> proxy(result.proxy());
3264 return JSProxy::SetPropertyViaPrototypesWithHandler(
3265 proxy, object, name, value, attributes, strict_mode, done);
3273 // If we get here with *done true, we have encountered a read-only property.
3275 if (strict_mode == SLOPPY) return value;
3276 Handle<Object> args[] = { name, object };
3277 Handle<Object> error = isolate->factory()->NewTypeError(
3278 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3279 return isolate->Throw<Object>(error);
3281 return isolate->factory()->the_hole_value();
3285 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
3286 // Only supports adding slack to owned descriptors.
3287 ASSERT(map->owns_descriptors());
3289 Handle<DescriptorArray> descriptors(map->instance_descriptors());
3290 int old_size = map->NumberOfOwnDescriptors();
3291 if (slack <= descriptors->NumberOfSlackDescriptors()) return;
3293 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
3294 descriptors, old_size, slack);
3296 if (old_size == 0) {
3297 map->set_instance_descriptors(*new_descriptors);
3301 // If the source descriptors had an enum cache we copy it. This ensures
3302 // that the maps to which we push the new descriptor array back can rely
3303 // on a cache always being available once it is set. If the map has more
3304 // enumerated descriptors than available in the original cache, the cache
3305 // will be lazily replaced by the extended cache when needed.
3306 if (descriptors->HasEnumCache()) {
3307 new_descriptors->CopyEnumCacheFrom(*descriptors);
3310 // Replace descriptors by new_descriptors in all maps that share it.
3311 map->GetHeap()->incremental_marking()->RecordWrites(*descriptors);
3314 for (Object* current = map->GetBackPointer();
3315 !current->IsUndefined();
3316 current = walk_map->GetBackPointer()) {
3317 walk_map = Map::cast(current);
3318 if (walk_map->instance_descriptors() != *descriptors) break;
3319 walk_map->set_instance_descriptors(*new_descriptors);
3322 map->set_instance_descriptors(*new_descriptors);
3327 static int AppendUniqueCallbacks(NeanderArray* callbacks,
3328 Handle<typename T::Array> array,
3329 int valid_descriptors) {
3330 int nof_callbacks = callbacks->length();
3332 Isolate* isolate = array->GetIsolate();
3333 // Ensure the keys are unique names before writing them into the
3334 // instance descriptor. Since it may cause a GC, it has to be done before we
3335 // temporarily put the heap in an invalid state while appending descriptors.
3336 for (int i = 0; i < nof_callbacks; ++i) {
3337 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3338 if (entry->name()->IsUniqueName()) continue;
3339 Handle<String> key =
3340 isolate->factory()->InternalizeString(
3341 Handle<String>(String::cast(entry->name())));
3342 entry->set_name(*key);
3345 // Fill in new callback descriptors. Process the callbacks from
3346 // back to front so that the last callback with a given name takes
3347 // precedence over previously added callbacks with that name.
3348 for (int i = nof_callbacks - 1; i >= 0; i--) {
3349 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3350 Handle<Name> key(Name::cast(entry->name()));
3351 // Check if a descriptor with this name already exists before writing.
3352 if (!T::Contains(key, entry, valid_descriptors, array)) {
3353 T::Insert(key, entry, valid_descriptors, array);
3354 valid_descriptors++;
3358 return valid_descriptors;
3361 struct DescriptorArrayAppender {
3362 typedef DescriptorArray Array;
3363 static bool Contains(Handle<Name> key,
3364 Handle<AccessorInfo> entry,
3365 int valid_descriptors,
3366 Handle<DescriptorArray> array) {
3367 DisallowHeapAllocation no_gc;
3368 return array->Search(*key, valid_descriptors) != DescriptorArray::kNotFound;
3370 static void Insert(Handle<Name> key,
3371 Handle<AccessorInfo> entry,
3372 int valid_descriptors,
3373 Handle<DescriptorArray> array) {
3374 DisallowHeapAllocation no_gc;
3375 CallbacksDescriptor desc(key, entry, entry->property_attributes());
3376 array->Append(&desc);
3381 struct FixedArrayAppender {
3382 typedef FixedArray Array;
3383 static bool Contains(Handle<Name> key,
3384 Handle<AccessorInfo> entry,
3385 int valid_descriptors,
3386 Handle<FixedArray> array) {
3387 for (int i = 0; i < valid_descriptors; i++) {
3388 if (*key == AccessorInfo::cast(array->get(i))->name()) return true;
3392 static void Insert(Handle<Name> key,
3393 Handle<AccessorInfo> entry,
3394 int valid_descriptors,
3395 Handle<FixedArray> array) {
3396 DisallowHeapAllocation no_gc;
3397 array->set(valid_descriptors, *entry);
3402 void Map::AppendCallbackDescriptors(Handle<Map> map,
3403 Handle<Object> descriptors) {
3404 int nof = map->NumberOfOwnDescriptors();
3405 Handle<DescriptorArray> array(map->instance_descriptors());
3406 NeanderArray callbacks(descriptors);
3407 ASSERT(array->NumberOfSlackDescriptors() >= callbacks.length());
3408 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof);
3409 map->SetNumberOfOwnDescriptors(nof);
3413 int AccessorInfo::AppendUnique(Handle<Object> descriptors,
3414 Handle<FixedArray> array,
3415 int valid_descriptors) {
3416 NeanderArray callbacks(descriptors);
3417 ASSERT(array->length() >= callbacks.length() + valid_descriptors);
3418 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks,
3424 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
3425 ASSERT(!map.is_null());
3426 for (int i = 0; i < maps->length(); ++i) {
3427 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
3434 static Handle<T> MaybeNull(T* p) {
3435 if (p == NULL) return Handle<T>::null();
3436 return Handle<T>(p);
3440 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
3441 ElementsKind kind = elements_kind();
3442 Handle<Map> transitioned_map = Handle<Map>::null();
3443 Handle<Map> current_map(this);
3444 bool packed = IsFastPackedElementsKind(kind);
3445 if (IsTransitionableFastElementsKind(kind)) {
3446 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
3447 kind = GetNextMoreGeneralFastElementsKind(kind, false);
3448 Handle<Map> maybe_transitioned_map =
3449 MaybeNull(current_map->LookupElementsTransitionMap(kind));
3450 if (maybe_transitioned_map.is_null()) break;
3451 if (ContainsMap(candidates, maybe_transitioned_map) &&
3452 (packed || !IsFastPackedElementsKind(kind))) {
3453 transitioned_map = maybe_transitioned_map;
3454 if (!IsFastPackedElementsKind(kind)) packed = false;
3456 current_map = maybe_transitioned_map;
3459 return transitioned_map;
3463 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
3464 Map* current_map = map;
3466 IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind)
3468 : TERMINAL_FAST_ELEMENTS_KIND;
3470 // Support for legacy API.
3471 if (IsExternalArrayElementsKind(to_kind) &&
3472 !IsFixedTypedArrayElementsKind(map->elements_kind())) {
3476 ElementsKind kind = map->elements_kind();
3477 while (kind != target_kind) {
3478 kind = GetNextTransitionElementsKind(kind);
3479 if (!current_map->HasElementsTransition()) return current_map;
3480 current_map = current_map->elements_transition_map();
3483 if (to_kind != kind && current_map->HasElementsTransition()) {
3484 ASSERT(to_kind == DICTIONARY_ELEMENTS);
3485 Map* next_map = current_map->elements_transition_map();
3486 if (next_map->elements_kind() == to_kind) return next_map;
3489 ASSERT(current_map->elements_kind() == target_kind);
3494 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
3495 Map* to_map = FindClosestElementsTransition(this, to_kind);
3496 if (to_map->elements_kind() == to_kind) return to_map;
3501 bool Map::IsMapInArrayPrototypeChain() {
3502 Isolate* isolate = GetIsolate();
3503 if (isolate->initial_array_prototype()->map() == this) {
3507 if (isolate->initial_object_prototype()->map() == this) {
3515 static Handle<Map> AddMissingElementsTransitions(Handle<Map> map,
3516 ElementsKind to_kind) {
3517 ASSERT(IsTransitionElementsKind(map->elements_kind()));
3519 Handle<Map> current_map = map;
3521 ElementsKind kind = map->elements_kind();
3522 while (kind != to_kind && !IsTerminalElementsKind(kind)) {
3523 kind = GetNextTransitionElementsKind(kind);
3524 current_map = Map::CopyAsElementsKind(
3525 current_map, kind, INSERT_TRANSITION);
3528 // In case we are exiting the fast elements kind system, just add the map in
3530 if (kind != to_kind) {
3531 current_map = Map::CopyAsElementsKind(
3532 current_map, to_kind, INSERT_TRANSITION);
3535 ASSERT(current_map->elements_kind() == to_kind);
3540 Handle<Map> Map::TransitionElementsTo(Handle<Map> map,
3541 ElementsKind to_kind) {
3542 ElementsKind from_kind = map->elements_kind();
3543 if (from_kind == to_kind) return map;
3545 Isolate* isolate = map->GetIsolate();
3546 Context* native_context = isolate->context()->native_context();
3547 Object* maybe_array_maps = native_context->js_array_maps();
3548 if (maybe_array_maps->IsFixedArray()) {
3549 DisallowHeapAllocation no_gc;
3550 FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
3551 if (array_maps->get(from_kind) == *map) {
3552 Object* maybe_transitioned_map = array_maps->get(to_kind);
3553 if (maybe_transitioned_map->IsMap()) {
3554 return handle(Map::cast(maybe_transitioned_map));
3559 return TransitionElementsToSlow(map, to_kind);
3563 Handle<Map> Map::TransitionElementsToSlow(Handle<Map> map,
3564 ElementsKind to_kind) {
3565 ElementsKind from_kind = map->elements_kind();
3567 if (from_kind == to_kind) {
3571 bool allow_store_transition =
3572 // Only remember the map transition if there is not an already existing
3573 // non-matching element transition.
3574 !map->IsUndefined() && !map->is_shared() &&
3575 IsTransitionElementsKind(from_kind);
3577 // Only store fast element maps in ascending generality.
3578 if (IsFastElementsKind(to_kind)) {
3579 allow_store_transition &=
3580 IsTransitionableFastElementsKind(from_kind) &&
3581 IsMoreGeneralElementsKindTransition(from_kind, to_kind);
3584 if (!allow_store_transition) {
3585 return Map::CopyAsElementsKind(map, to_kind, OMIT_TRANSITION);
3588 return Map::AsElementsKind(map, to_kind);
3593 Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) {
3594 Handle<Map> closest_map(FindClosestElementsTransition(*map, kind));
3596 if (closest_map->elements_kind() == kind) {
3600 return AddMissingElementsTransitions(closest_map, kind);
3604 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
3605 ElementsKind to_kind) {
3606 Handle<Map> map(object->map());
3607 return Map::TransitionElementsTo(map, to_kind);
3611 void JSObject::LocalLookupRealNamedProperty(Handle<Name> name,
3612 LookupResult* result) {
3613 DisallowHeapAllocation no_gc;
3614 if (IsJSGlobalProxy()) {
3615 Object* proto = GetPrototype();
3616 if (proto->IsNull()) return result->NotFound();
3617 ASSERT(proto->IsJSGlobalObject());
3618 return JSObject::cast(proto)->LocalLookupRealNamedProperty(name, result);
3621 if (HasFastProperties()) {
3622 map()->LookupDescriptor(this, *name, result);
3623 // A property or a map transition was found. We return all of these result
3624 // types because LocalLookupRealNamedProperty is used when setting
3625 // properties where map transitions are handled.
3626 ASSERT(!result->IsFound() ||
3627 (result->holder() == this && result->IsFastPropertyType()));
3628 // Disallow caching for uninitialized constants. These can only
3630 if (result->IsField() &&
3631 result->IsReadOnly() &&
3632 RawFastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) {
3633 result->DisallowCaching();
3638 int entry = property_dictionary()->FindEntry(name);
3639 if (entry != NameDictionary::kNotFound) {
3640 Object* value = property_dictionary()->ValueAt(entry);
3641 if (IsGlobalObject()) {
3642 PropertyDetails d = property_dictionary()->DetailsAt(entry);
3643 if (d.IsDeleted()) {
3647 value = PropertyCell::cast(value)->value();
3649 // Make sure to disallow caching for uninitialized constants
3650 // found in the dictionary-mode objects.
3651 if (value->IsTheHole()) result->DisallowCaching();
3652 result->DictionaryResult(this, entry);
3660 void JSObject::LookupRealNamedProperty(Handle<Name> name,
3661 LookupResult* result) {
3662 DisallowHeapAllocation no_gc;
3663 LocalLookupRealNamedProperty(name, result);
3664 if (result->IsFound()) return;
3666 LookupRealNamedPropertyInPrototypes(name, result);
3670 void JSObject::LookupRealNamedPropertyInPrototypes(Handle<Name> name,
3671 LookupResult* result) {
3672 DisallowHeapAllocation no_gc;
3673 Isolate* isolate = GetIsolate();
3674 Heap* heap = isolate->heap();
3675 for (Object* pt = GetPrototype();
3676 pt != heap->null_value();
3677 pt = pt->GetPrototype(isolate)) {
3678 if (pt->IsJSProxy()) {
3679 return result->HandlerResult(JSProxy::cast(pt));
3681 JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
3682 ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR));
3683 if (result->IsFound()) return;
3689 // We only need to deal with CALLBACKS and INTERCEPTORS
3690 MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck(
3691 Handle<JSObject> object,
3692 LookupResult* result,
3694 Handle<Object> value,
3695 bool check_prototype,
3696 StrictMode strict_mode) {
3697 if (check_prototype && !result->IsProperty()) {
3698 object->LookupRealNamedPropertyInPrototypes(name, result);
3701 if (result->IsProperty()) {
3702 if (!result->IsReadOnly()) {
3703 switch (result->type()) {
3705 Object* obj = result->GetCallbackObject();
3706 if (obj->IsAccessorInfo()) {
3707 Handle<AccessorInfo> info(AccessorInfo::cast(obj));
3708 if (info->all_can_write()) {
3709 return SetPropertyWithCallback(object,
3713 handle(result->holder()),
3716 } else if (obj->IsAccessorPair()) {
3717 Handle<AccessorPair> pair(AccessorPair::cast(obj));
3718 if (pair->all_can_read()) {
3719 return SetPropertyWithCallback(object,
3723 handle(result->holder()),
3730 // Try lookup real named properties. Note that only property can be
3731 // set is callbacks marked as ALL_CAN_WRITE on the prototype chain.
3732 LookupResult r(object->GetIsolate());
3733 object->LookupRealNamedProperty(name, &r);
3734 if (r.IsProperty()) {
3735 return SetPropertyWithFailedAccessCheck(object,
3751 Isolate* isolate = object->GetIsolate();
3752 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
3753 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
3758 MaybeHandle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
3759 LookupResult* result,
3761 Handle<Object> value,
3762 PropertyAttributes attributes,
3763 StrictMode strict_mode,
3764 StoreFromKeyed store_mode) {
3765 if (result->IsHandler()) {
3766 return JSProxy::SetPropertyWithHandler(handle(result->proxy()),
3767 object, key, value, attributes, strict_mode);
3769 return JSObject::SetPropertyForResult(Handle<JSObject>::cast(object),
3770 result, key, value, attributes, strict_mode, store_mode);
3775 bool JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy, Handle<Name> name) {
3776 Isolate* isolate = proxy->GetIsolate();
3778 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3779 if (name->IsSymbol()) return false;
3781 Handle<Object> args[] = { name };
3782 Handle<Object> result;
3783 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3787 isolate->derived_has_trap(),
3792 return result->BooleanValue();
3796 MaybeHandle<Object> JSProxy::SetPropertyWithHandler(
3797 Handle<JSProxy> proxy,
3798 Handle<JSReceiver> receiver,
3800 Handle<Object> value,
3801 PropertyAttributes attributes,
3802 StrictMode strict_mode) {
3803 Isolate* isolate = proxy->GetIsolate();
3805 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3806 if (name->IsSymbol()) return value;
3808 Handle<Object> args[] = { receiver, name, value };
3809 RETURN_ON_EXCEPTION(
3813 isolate->derived_set_trap(),
3822 MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler(
3823 Handle<JSProxy> proxy,
3824 Handle<JSReceiver> receiver,
3826 Handle<Object> value,
3827 PropertyAttributes attributes,
3828 StrictMode strict_mode,
3830 Isolate* isolate = proxy->GetIsolate();
3831 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy.
3833 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3834 if (name->IsSymbol()) {
3836 return isolate->factory()->the_hole_value();
3839 *done = true; // except where redefined...
3840 Handle<Object> args[] = { name };
3841 Handle<Object> result;
3842 ASSIGN_RETURN_ON_EXCEPTION(
3845 "getPropertyDescriptor",
3851 if (result->IsUndefined()) {
3853 return isolate->factory()->the_hole_value();
3856 // Emulate [[GetProperty]] semantics for proxies.
3857 Handle<Object> argv[] = { result };
3858 Handle<Object> desc;
3859 ASSIGN_RETURN_ON_EXCEPTION(
3861 Execution::Call(isolate,
3862 isolate->to_complete_property_descriptor(),
3868 // [[GetProperty]] requires to check that all properties are configurable.
3869 Handle<String> configurable_name =
3870 isolate->factory()->InternalizeOneByteString(
3871 STATIC_ASCII_VECTOR("configurable_"));
3872 Handle<Object> configurable =
3873 Object::GetProperty(desc, configurable_name).ToHandleChecked();
3874 ASSERT(configurable->IsBoolean());
3875 if (configurable->IsFalse()) {
3876 Handle<String> trap =
3877 isolate->factory()->InternalizeOneByteString(
3878 STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3879 Handle<Object> args[] = { handler, trap, name };
3880 Handle<Object> error = isolate->factory()->NewTypeError(
3881 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3882 return isolate->Throw<Object>(error);
3884 ASSERT(configurable->IsTrue());
3886 // Check for DataDescriptor.
3887 Handle<String> hasWritable_name =
3888 isolate->factory()->InternalizeOneByteString(
3889 STATIC_ASCII_VECTOR("hasWritable_"));
3890 Handle<Object> hasWritable =
3891 Object::GetProperty(desc, hasWritable_name).ToHandleChecked();
3892 ASSERT(hasWritable->IsBoolean());
3893 if (hasWritable->IsTrue()) {
3894 Handle<String> writable_name =
3895 isolate->factory()->InternalizeOneByteString(
3896 STATIC_ASCII_VECTOR("writable_"));
3897 Handle<Object> writable =
3898 Object::GetProperty(desc, writable_name).ToHandleChecked();
3899 ASSERT(writable->IsBoolean());
3900 *done = writable->IsFalse();
3901 if (!*done) return isolate->factory()->the_hole_value();
3902 if (strict_mode == SLOPPY) return value;
3903 Handle<Object> args[] = { name, receiver };
3904 Handle<Object> error = isolate->factory()->NewTypeError(
3905 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3906 return isolate->Throw<Object>(error);
3909 // We have an AccessorDescriptor.
3910 Handle<String> set_name = isolate->factory()->InternalizeOneByteString(
3911 STATIC_ASCII_VECTOR("set_"));
3912 Handle<Object> setter = Object::GetProperty(desc, set_name).ToHandleChecked();
3913 if (!setter->IsUndefined()) {
3914 // TODO(rossberg): nicer would be to cast to some JSCallable here...
3915 return SetPropertyWithDefinedSetter(
3916 receiver, Handle<JSReceiver>::cast(setter), value);
3919 if (strict_mode == SLOPPY) return value;
3920 Handle<Object> args2[] = { name, proxy };
3921 Handle<Object> error = isolate->factory()->NewTypeError(
3922 "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2)));
3923 return isolate->Throw<Object>(error);
3927 MaybeHandle<Object> JSProxy::DeletePropertyWithHandler(
3928 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) {
3929 Isolate* isolate = proxy->GetIsolate();
3931 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3932 if (name->IsSymbol()) return isolate->factory()->false_value();
3934 Handle<Object> args[] = { name };
3935 Handle<Object> result;
3936 ASSIGN_RETURN_ON_EXCEPTION(
3945 bool result_bool = result->BooleanValue();
3946 if (mode == STRICT_DELETION && !result_bool) {
3947 Handle<Object> handler(proxy->handler(), isolate);
3948 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString(
3949 STATIC_ASCII_VECTOR("delete"));
3950 Handle<Object> args[] = { handler, trap_name };
3951 Handle<Object> error = isolate->factory()->NewTypeError(
3952 "handler_failed", HandleVector(args, ARRAY_SIZE(args)));
3953 return isolate->Throw<Object>(error);
3955 return isolate->factory()->ToBoolean(result_bool);
3959 MaybeHandle<Object> JSProxy::DeleteElementWithHandler(
3960 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) {
3961 Isolate* isolate = proxy->GetIsolate();
3962 Handle<String> name = isolate->factory()->Uint32ToString(index);
3963 return JSProxy::DeletePropertyWithHandler(proxy, name, mode);
3967 PropertyAttributes JSProxy::GetPropertyAttributeWithHandler(
3968 Handle<JSProxy> proxy,
3969 Handle<JSReceiver> receiver,
3970 Handle<Name> name) {
3971 Isolate* isolate = proxy->GetIsolate();
3972 HandleScope scope(isolate);
3974 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3975 if (name->IsSymbol()) return ABSENT;
3977 Handle<Object> args[] = { name };
3978 Handle<Object> result;
3979 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3981 proxy->CallTrap(proxy,
3982 "getPropertyDescriptor",
3988 if (result->IsUndefined()) return ABSENT;
3990 Handle<Object> argv[] = { result };
3991 Handle<Object> desc;
3992 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3994 Execution::Call(isolate,
3995 isolate->to_complete_property_descriptor(),
4001 // Convert result to PropertyAttributes.
4002 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString(
4003 STATIC_ASCII_VECTOR("enumerable_"));
4004 Handle<Object> enumerable;
4005 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
4006 isolate, enumerable, Object::GetProperty(desc, enum_n), NONE);
4007 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString(
4008 STATIC_ASCII_VECTOR("configurable_"));
4009 Handle<Object> configurable;
4010 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
4011 isolate, configurable, Object::GetProperty(desc, conf_n), NONE);
4012 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString(
4013 STATIC_ASCII_VECTOR("writable_"));
4014 Handle<Object> writable;
4015 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
4016 isolate, writable, Object::GetProperty(desc, writ_n), NONE);
4017 if (!writable->BooleanValue()) {
4018 Handle<String> set_n = isolate->factory()->InternalizeOneByteString(
4019 STATIC_ASCII_VECTOR("set_"));
4020 Handle<Object> setter;
4021 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
4022 isolate, setter, Object::GetProperty(desc, set_n), NONE);
4023 writable = isolate->factory()->ToBoolean(!setter->IsUndefined());
4026 if (configurable->IsFalse()) {
4027 Handle<Object> handler(proxy->handler(), isolate);
4028 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
4029 STATIC_ASCII_VECTOR("getPropertyDescriptor"));
4030 Handle<Object> args[] = { handler, trap, name };
4031 Handle<Object> error = isolate->factory()->NewTypeError(
4032 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
4033 isolate->Throw(*error);
4037 int attributes = NONE;
4038 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM;
4039 if (!configurable->BooleanValue()) attributes |= DONT_DELETE;
4040 if (!writable->BooleanValue()) attributes |= READ_ONLY;
4041 return static_cast<PropertyAttributes>(attributes);
4045 PropertyAttributes JSProxy::GetElementAttributeWithHandler(
4046 Handle<JSProxy> proxy,
4047 Handle<JSReceiver> receiver,
4049 Isolate* isolate = proxy->GetIsolate();
4050 Handle<String> name = isolate->factory()->Uint32ToString(index);
4051 return GetPropertyAttributeWithHandler(proxy, receiver, name);
4055 void JSProxy::Fix(Handle<JSProxy> proxy) {
4056 Isolate* isolate = proxy->GetIsolate();
4058 // Save identity hash.
4059 Handle<Object> hash(proxy->GetIdentityHash(), isolate);
4061 if (proxy->IsJSFunctionProxy()) {
4062 isolate->factory()->BecomeJSFunction(proxy);
4063 // Code will be set on the JavaScript side.
4065 isolate->factory()->BecomeJSObject(proxy);
4067 ASSERT(proxy->IsJSObject());
4069 // Inherit identity, if it was present.
4070 if (hash->IsSmi()) {
4071 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy),
4072 Handle<Smi>::cast(hash));
4077 MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy,
4079 Handle<Object> derived,
4081 Handle<Object> argv[]) {
4082 Isolate* isolate = proxy->GetIsolate();
4083 Handle<Object> handler(proxy->handler(), isolate);
4085 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name);
4086 Handle<Object> trap;
4087 ASSIGN_RETURN_ON_EXCEPTION(
4089 Object::GetPropertyOrElement(handler, trap_name),
4092 if (trap->IsUndefined()) {
4093 if (derived.is_null()) {
4094 Handle<Object> args[] = { handler, trap_name };
4095 Handle<Object> error = isolate->factory()->NewTypeError(
4096 "handler_trap_missing", HandleVector(args, ARRAY_SIZE(args)));
4097 return isolate->Throw<Object>(error);
4099 trap = Handle<Object>(derived);
4102 return Execution::Call(isolate, trap, handler, argc, argv);
4106 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
4107 ASSERT(object->map()->inobject_properties() == map->inobject_properties());
4108 ElementsKind obj_kind = object->map()->elements_kind();
4109 ElementsKind map_kind = map->elements_kind();
4110 if (map_kind != obj_kind) {
4111 ElementsKind to_kind = map_kind;
4112 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
4113 IsDictionaryElementsKind(obj_kind)) {
4116 if (IsDictionaryElementsKind(to_kind)) {
4117 NormalizeElements(object);
4119 TransitionElementsKind(object, to_kind);
4121 map = Map::AsElementsKind(map, to_kind);
4123 JSObject::MigrateToMap(object, map);
4127 void JSObject::MigrateInstance(Handle<JSObject> object) {
4128 // Converting any field to the most specific type will cause the
4129 // GeneralizeFieldRepresentation algorithm to create the most general existing
4130 // transition that matches the object. This achieves what is needed.
4131 Handle<Map> original_map(object->map());
4132 GeneralizeFieldRepresentation(
4133 object, 0, Representation::None(),
4134 HeapType::None(object->GetIsolate()),
4136 object->map()->set_migration_target(true);
4137 if (FLAG_trace_migration) {
4138 object->PrintInstanceMigration(stdout, *original_map, object->map());
4144 bool JSObject::TryMigrateInstance(Handle<JSObject> object) {
4145 Isolate* isolate = object->GetIsolate();
4146 DisallowDeoptimization no_deoptimization(isolate);
4147 Handle<Map> original_map(object->map(), isolate);
4148 Handle<Map> new_map;
4149 if (!Map::CurrentMapForDeprecatedInternal(original_map).ToHandle(&new_map)) {
4152 JSObject::MigrateToMap(object, new_map);
4153 if (FLAG_trace_migration) {
4154 object->PrintInstanceMigration(stdout, *original_map, object->map());
4160 MaybeHandle<Object> JSObject::SetPropertyUsingTransition(
4161 Handle<JSObject> object,
4162 LookupResult* lookup,
4164 Handle<Object> value,
4165 PropertyAttributes attributes) {
4166 Handle<Map> transition_map(lookup->GetTransitionTarget());
4167 int descriptor = transition_map->LastAdded();
4169 Handle<DescriptorArray> descriptors(transition_map->instance_descriptors());
4170 PropertyDetails details = descriptors->GetDetails(descriptor);
4172 if (details.type() == CALLBACKS || attributes != details.attributes()) {
4173 // AddProperty will either normalize the object, or create a new fast copy
4174 // of the map. If we get a fast copy of the map, all field representations
4175 // will be tagged since the transition is omitted.
4176 return JSObject::AddProperty(
4177 object, name, value, attributes, SLOPPY,
4178 JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED,
4179 JSReceiver::OMIT_EXTENSIBILITY_CHECK,
4180 JSObject::FORCE_TAGGED, FORCE_FIELD, OMIT_TRANSITION);
4183 // Keep the target CONSTANT if the same value is stored.
4184 // TODO(verwaest): Also support keeping the placeholder
4185 // (value->IsUninitialized) as constant.
4186 if (!lookup->CanHoldValue(value)) {
4187 Representation field_representation = value->OptimalRepresentation();
4188 Handle<HeapType> field_type = value->OptimalType(
4189 lookup->isolate(), field_representation);
4190 transition_map = Map::GeneralizeRepresentation(
4191 transition_map, descriptor,
4192 field_representation, field_type, FORCE_FIELD);
4195 JSObject::MigrateToNewProperty(object, transition_map, value);
4200 void JSObject::MigrateToNewProperty(Handle<JSObject> object,
4202 Handle<Object> value) {
4203 JSObject::MigrateToMap(object, map);
4204 if (map->GetLastDescriptorDetails().type() != FIELD) return;
4205 object->WriteToField(map->LastAdded(), *value);
4209 void JSObject::WriteToField(int descriptor, Object* value) {
4210 DisallowHeapAllocation no_gc;
4212 DescriptorArray* desc = map()->instance_descriptors();
4213 PropertyDetails details = desc->GetDetails(descriptor);
4215 ASSERT(details.type() == FIELD);
4217 int field_index = desc->GetFieldIndex(descriptor);
4218 if (details.representation().IsDouble()) {
4219 // Nothing more to be done.
4220 if (value->IsUninitialized()) return;
4221 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(field_index));
4222 box->set_value(value->Number());
4224 FastPropertyAtPut(field_index, value);
4229 static void SetPropertyToField(LookupResult* lookup,
4230 Handle<Object> value) {
4231 if (lookup->type() == CONSTANT || !lookup->CanHoldValue(value)) {
4232 Representation field_representation = value->OptimalRepresentation();
4233 Handle<HeapType> field_type = value->OptimalType(
4234 lookup->isolate(), field_representation);
4235 JSObject::GeneralizeFieldRepresentation(handle(lookup->holder()),
4236 lookup->GetDescriptorIndex(),
4237 field_representation, field_type,
4240 lookup->holder()->WriteToField(lookup->GetDescriptorIndex(), *value);
4244 static void ConvertAndSetLocalProperty(LookupResult* lookup,
4246 Handle<Object> value,
4247 PropertyAttributes attributes) {
4248 Handle<JSObject> object(lookup->holder());
4249 if (object->TooManyFastProperties()) {
4250 JSObject::NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
4253 if (!object->HasFastProperties()) {
4254 ReplaceSlowProperty(object, name, value, attributes);
4258 int descriptor_index = lookup->GetDescriptorIndex();
4259 if (lookup->GetAttributes() == attributes) {
4260 JSObject::GeneralizeFieldRepresentation(
4261 object, descriptor_index, Representation::Tagged(),
4262 HeapType::Any(lookup->isolate()), FORCE_FIELD);
4264 Handle<Map> old_map(object->map());
4265 Handle<Map> new_map = Map::CopyGeneralizeAllRepresentations(old_map,
4266 descriptor_index, FORCE_FIELD, attributes, "attributes mismatch");
4267 JSObject::MigrateToMap(object, new_map);
4270 object->WriteToField(descriptor_index, *value);
4274 static void SetPropertyToFieldWithAttributes(LookupResult* lookup,
4276 Handle<Object> value,
4277 PropertyAttributes attributes) {
4278 if (lookup->GetAttributes() == attributes) {
4279 if (value->IsUninitialized()) return;
4280 SetPropertyToField(lookup, value);
4282 ConvertAndSetLocalProperty(lookup, name, value, attributes);
4287 MaybeHandle<Object> JSObject::SetPropertyForResult(
4288 Handle<JSObject> object,
4289 LookupResult* lookup,
4291 Handle<Object> value,
4292 PropertyAttributes attributes,
4293 StrictMode strict_mode,
4294 StoreFromKeyed store_mode) {
4295 Isolate* isolate = object->GetIsolate();
4297 // Make sure that the top context does not change when doing callbacks or
4298 // interceptor calls.
4299 AssertNoContextChange ncc(isolate);
4301 // Optimization for 2-byte strings often used as keys in a decompression
4302 // dictionary. We internalize these short keys to avoid constantly
4303 // reallocating them.
4304 if (name->IsString() && !name->IsInternalizedString() &&
4305 Handle<String>::cast(name)->length() <= 2) {
4306 name = isolate->factory()->InternalizeString(Handle<String>::cast(name));
4309 // Check access rights if needed.
4310 if (object->IsAccessCheckNeeded()) {
4311 if (!isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
4312 return SetPropertyWithFailedAccessCheck(object, lookup, name, value,
4317 if (object->IsJSGlobalProxy()) {
4318 Handle<Object> proto(object->GetPrototype(), isolate);
4319 if (proto->IsNull()) return value;
4320 ASSERT(proto->IsJSGlobalObject());
4321 return SetPropertyForResult(Handle<JSObject>::cast(proto),
4322 lookup, name, value, attributes, strict_mode, store_mode);
4325 ASSERT(!lookup->IsFound() || lookup->holder() == *object ||
4326 lookup->holder()->map()->is_hidden_prototype());
4328 if (!lookup->IsProperty() && !object->IsJSContextExtensionObject()) {
4330 Handle<Object> result_object;
4331 ASSIGN_RETURN_ON_EXCEPTION(
4332 isolate, result_object,
4333 SetPropertyViaPrototypes(
4334 object, name, value, attributes, strict_mode, &done),
4336 if (done) return result_object;
4339 if (!lookup->IsFound()) {
4340 // Neither properties nor transitions found.
4342 object, name, value, attributes, strict_mode, store_mode);
4345 if (lookup->IsProperty() && lookup->IsReadOnly()) {
4346 if (strict_mode == STRICT) {
4347 Handle<Object> args[] = { name, object };
4348 Handle<Object> error = isolate->factory()->NewTypeError(
4349 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
4350 return isolate->Throw<Object>(error);
4356 Handle<Object> old_value = isolate->factory()->the_hole_value();
4357 bool is_observed = object->map()->is_observed() &&
4358 *name != isolate->heap()->hidden_string();
4359 if (is_observed && lookup->IsDataProperty()) {
4360 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked();
4363 // This is a real property that is not read-only, or it is a
4364 // transition or null descriptor and there are no setters in the prototypes.
4365 MaybeHandle<Object> maybe_result = value;
4366 if (lookup->IsTransition()) {
4367 maybe_result = SetPropertyUsingTransition(handle(lookup->holder()), lookup,
4368 name, value, attributes);
4370 switch (lookup->type()) {
4372 SetNormalizedProperty(handle(lookup->holder()), lookup, value);
4375 SetPropertyToField(lookup, value);
4378 // Only replace the constant if necessary.
4379 if (*value == lookup->GetConstant()) return value;
4380 SetPropertyToField(lookup, value);
4383 Handle<Object> callback_object(lookup->GetCallbackObject(), isolate);
4384 return SetPropertyWithCallback(object, callback_object, name, value,
4385 handle(lookup->holder()), strict_mode);
4388 maybe_result = SetPropertyWithInterceptor(
4389 handle(lookup->holder()), name, value, attributes, strict_mode);
4397 Handle<Object> result;
4398 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object);
4401 if (lookup->IsTransition()) {
4402 EnqueueChangeRecord(object, "add", name, old_value);
4404 LookupResult new_lookup(isolate);
4405 object->LocalLookup(name, &new_lookup, true);
4406 if (new_lookup.IsDataProperty()) {
4407 Handle<Object> new_value =
4408 Object::GetPropertyOrElement(object, name).ToHandleChecked();
4409 if (!new_value->SameValue(*old_value)) {
4410 EnqueueChangeRecord(object, "update", name, old_value);
4420 // Set a real local property, even if it is READ_ONLY. If the property is not
4421 // present, add it with attributes NONE. This code is an exact clone of
4422 // SetProperty, with the check for IsReadOnly and the check for a
4423 // callback setter removed. The two lines looking up the LookupResult
4424 // result are also added. If one of the functions is changed, the other
4426 // Note that this method cannot be used to set the prototype of a function
4427 // because ConvertDescriptorToField() which is called in "case CALLBACKS:"
4428 // doesn't handle function prototypes correctly.
4429 MaybeHandle<Object> JSObject::SetLocalPropertyIgnoreAttributes(
4430 Handle<JSObject> object,
4432 Handle<Object> value,
4433 PropertyAttributes attributes,
4434 ValueType value_type,
4436 ExtensibilityCheck extensibility_check,
4437 StoreFromKeyed store_from_keyed) {
4438 Isolate* isolate = object->GetIsolate();
4440 // Make sure that the top context does not change when doing callbacks or
4441 // interceptor calls.
4442 AssertNoContextChange ncc(isolate);
4444 LookupResult lookup(isolate);
4445 object->LocalLookup(name, &lookup, true);
4446 if (!lookup.IsFound()) {
4447 object->map()->LookupTransition(*object, *name, &lookup);
4450 // Check access rights if needed.
4451 if (object->IsAccessCheckNeeded()) {
4452 if (!isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
4453 return SetPropertyWithFailedAccessCheck(object, &lookup, name, value,
4458 if (object->IsJSGlobalProxy()) {
4459 Handle<Object> proto(object->GetPrototype(), isolate);
4460 if (proto->IsNull()) return value;
4461 ASSERT(proto->IsJSGlobalObject());
4462 return SetLocalPropertyIgnoreAttributes(Handle<JSObject>::cast(proto),
4463 name, value, attributes, value_type, mode, extensibility_check);
4466 if (lookup.IsInterceptor() ||
4467 (lookup.IsDescriptorOrDictionary() && lookup.type() == CALLBACKS)) {
4468 object->LocalLookupRealNamedProperty(name, &lookup);
4471 // Check for accessor in prototype chain removed here in clone.
4472 if (!lookup.IsFound()) {
4473 object->map()->LookupTransition(*object, *name, &lookup);
4474 TransitionFlag flag = lookup.IsFound()
4475 ? OMIT_TRANSITION : INSERT_TRANSITION;
4476 // Neither properties nor transitions found.
4477 return AddProperty(object, name, value, attributes, SLOPPY,
4478 store_from_keyed, extensibility_check, value_type, mode, flag);
4481 Handle<Object> old_value = isolate->factory()->the_hole_value();
4482 PropertyAttributes old_attributes = ABSENT;
4483 bool is_observed = object->map()->is_observed() &&
4484 *name != isolate->heap()->hidden_string();
4485 if (is_observed && lookup.IsProperty()) {
4486 if (lookup.IsDataProperty()) {
4487 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked();
4489 old_attributes = lookup.GetAttributes();
4492 // Check of IsReadOnly removed from here in clone.
4493 if (lookup.IsTransition()) {
4494 Handle<Object> result;
4495 ASSIGN_RETURN_ON_EXCEPTION(
4497 SetPropertyUsingTransition(
4498 handle(lookup.holder()), &lookup, name, value, attributes),
4501 switch (lookup.type()) {
4503 ReplaceSlowProperty(object, name, value, attributes);
4506 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes);
4509 // Only replace the constant if necessary.
4510 if (lookup.GetAttributes() != attributes ||
4511 *value != lookup.GetConstant()) {
4512 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes);
4516 ConvertAndSetLocalProperty(&lookup, name, value, attributes);
4526 if (lookup.IsTransition()) {
4527 EnqueueChangeRecord(object, "add", name, old_value);
4528 } else if (old_value->IsTheHole()) {
4529 EnqueueChangeRecord(object, "reconfigure", name, old_value);
4531 LookupResult new_lookup(isolate);
4532 object->LocalLookup(name, &new_lookup, true);
4533 bool value_changed = false;
4534 if (new_lookup.IsDataProperty()) {
4535 Handle<Object> new_value =
4536 Object::GetPropertyOrElement(object, name).ToHandleChecked();
4537 value_changed = !old_value->SameValue(*new_value);
4539 if (new_lookup.GetAttributes() != old_attributes) {
4540 if (!value_changed) old_value = isolate->factory()->the_hole_value();
4541 EnqueueChangeRecord(object, "reconfigure", name, old_value);
4542 } else if (value_changed) {
4543 EnqueueChangeRecord(object, "update", name, old_value);
4552 PropertyAttributes JSObject::GetPropertyAttributePostInterceptor(
4553 Handle<JSObject> object,
4554 Handle<JSObject> receiver,
4556 bool continue_search) {
4557 // Check local property, ignore interceptor.
4558 Isolate* isolate = object->GetIsolate();
4559 LookupResult result(isolate);
4560 object->LocalLookupRealNamedProperty(name, &result);
4561 if (result.IsFound()) return result.GetAttributes();
4563 if (continue_search) {
4564 // Continue searching via the prototype chain.
4565 Handle<Object> proto(object->GetPrototype(), isolate);
4566 if (!proto->IsNull()) {
4567 return JSReceiver::GetPropertyAttributeWithReceiver(
4568 Handle<JSObject>::cast(proto), receiver, name);
4575 PropertyAttributes JSObject::GetPropertyAttributeWithInterceptor(
4576 Handle<JSObject> object,
4577 Handle<JSObject> receiver,
4579 bool continue_search) {
4580 // TODO(rossberg): Support symbols in the API.
4581 if (name->IsSymbol()) return ABSENT;
4583 Isolate* isolate = object->GetIsolate();
4584 HandleScope scope(isolate);
4586 // Make sure that the top context does not change when doing
4587 // callbacks or interceptor calls.
4588 AssertNoContextChange ncc(isolate);
4590 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
4591 PropertyCallbackArguments args(
4592 isolate, interceptor->data(), *receiver, *object);
4593 if (!interceptor->query()->IsUndefined()) {
4594 v8::NamedPropertyQueryCallback query =
4595 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query());
4597 ApiNamedPropertyAccess("interceptor-named-has", *object, *name));
4598 v8::Handle<v8::Integer> result =
4599 args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name)));
4600 if (!result.IsEmpty()) {
4601 ASSERT(result->IsInt32());
4602 return static_cast<PropertyAttributes>(result->Int32Value());
4604 } else if (!interceptor->getter()->IsUndefined()) {
4605 v8::NamedPropertyGetterCallback getter =
4606 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
4608 ApiNamedPropertyAccess("interceptor-named-get-has", *object, *name));
4609 v8::Handle<v8::Value> result =
4610 args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4611 if (!result.IsEmpty()) return DONT_ENUM;
4613 return GetPropertyAttributePostInterceptor(
4614 object, receiver, name, continue_search);
4618 PropertyAttributes JSReceiver::GetPropertyAttributeWithReceiver(
4619 Handle<JSReceiver> object,
4620 Handle<JSReceiver> receiver,
4623 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
4624 return JSObject::GetElementAttributeWithReceiver(
4625 Handle<JSObject>::cast(object), receiver, index, true);
4628 LookupResult lookup(object->GetIsolate());
4629 object->Lookup(key, &lookup);
4630 return GetPropertyAttributeForResult(object, receiver, &lookup, key, true);
4634 PropertyAttributes JSReceiver::GetPropertyAttributeForResult(
4635 Handle<JSReceiver> object,
4636 Handle<JSReceiver> receiver,
4637 LookupResult* lookup,
4639 bool continue_search) {
4640 // Check access rights if needed.
4641 if (object->IsAccessCheckNeeded()) {
4642 Heap* heap = object->GetHeap();
4643 Handle<JSObject> obj = Handle<JSObject>::cast(object);
4644 if (!heap->isolate()->MayNamedAccess(obj, name, v8::ACCESS_HAS)) {
4645 return JSObject::GetPropertyAttributeWithFailedAccessCheck(
4646 obj, lookup, name, continue_search);
4649 if (lookup->IsFound()) {
4650 switch (lookup->type()) {
4651 case NORMAL: // fall through
4655 return lookup->GetAttributes();
4657 return JSProxy::GetPropertyAttributeWithHandler(
4658 handle(lookup->proxy()), receiver, name);
4661 return JSObject::GetPropertyAttributeWithInterceptor(
4662 handle(lookup->holder()),
4663 Handle<JSObject>::cast(receiver),
4674 PropertyAttributes JSReceiver::GetLocalPropertyAttribute(
4675 Handle<JSReceiver> object, Handle<Name> name) {
4676 // Check whether the name is an array index.
4678 if (object->IsJSObject() && name->AsArrayIndex(&index)) {
4679 return GetLocalElementAttribute(object, index);
4682 LookupResult lookup(object->GetIsolate());
4683 object->LocalLookup(name, &lookup, true);
4684 return GetPropertyAttributeForResult(object, object, &lookup, name, false);
4688 PropertyAttributes JSObject::GetElementAttributeWithReceiver(
4689 Handle<JSObject> object,
4690 Handle<JSReceiver> receiver,
4692 bool continue_search) {
4693 Isolate* isolate = object->GetIsolate();
4695 // Check access rights if needed.
4696 if (object->IsAccessCheckNeeded()) {
4697 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
4698 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
4699 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
4704 if (object->IsJSGlobalProxy()) {
4705 Handle<Object> proto(object->GetPrototype(), isolate);
4706 if (proto->IsNull()) return ABSENT;
4707 ASSERT(proto->IsJSGlobalObject());
4708 return JSObject::GetElementAttributeWithReceiver(
4709 Handle<JSObject>::cast(proto), receiver, index, continue_search);
4712 // Check for lookup interceptor except when bootstrapping.
4713 if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
4714 return JSObject::GetElementAttributeWithInterceptor(
4715 object, receiver, index, continue_search);
4718 return GetElementAttributeWithoutInterceptor(
4719 object, receiver, index, continue_search);
4723 PropertyAttributes JSObject::GetElementAttributeWithInterceptor(
4724 Handle<JSObject> object,
4725 Handle<JSReceiver> receiver,
4727 bool continue_search) {
4728 Isolate* isolate = object->GetIsolate();
4729 HandleScope scope(isolate);
4731 // Make sure that the top context does not change when doing
4732 // callbacks or interceptor calls.
4733 AssertNoContextChange ncc(isolate);
4735 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4736 PropertyCallbackArguments args(
4737 isolate, interceptor->data(), *receiver, *object);
4738 if (!interceptor->query()->IsUndefined()) {
4739 v8::IndexedPropertyQueryCallback query =
4740 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query());
4742 ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index));
4743 v8::Handle<v8::Integer> result = args.Call(query, index);
4744 if (!result.IsEmpty())
4745 return static_cast<PropertyAttributes>(result->Int32Value());
4746 } else if (!interceptor->getter()->IsUndefined()) {
4747 v8::IndexedPropertyGetterCallback getter =
4748 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
4750 ApiIndexedPropertyAccess(
4751 "interceptor-indexed-get-has", *object, index));
4752 v8::Handle<v8::Value> result = args.Call(getter, index);
4753 if (!result.IsEmpty()) return NONE;
4756 return GetElementAttributeWithoutInterceptor(
4757 object, receiver, index, continue_search);
4761 PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor(
4762 Handle<JSObject> object,
4763 Handle<JSReceiver> receiver,
4765 bool continue_search) {
4766 PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes(
4767 receiver, object, index);
4768 if (attr != ABSENT) return attr;
4770 // Handle [] on String objects.
4771 if (object->IsStringObjectWithCharacterAt(index)) {
4772 return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
4775 if (!continue_search) return ABSENT;
4777 Handle<Object> proto(object->GetPrototype(), object->GetIsolate());
4778 if (proto->IsJSProxy()) {
4779 // We need to follow the spec and simulate a call to [[GetOwnProperty]].
4780 return JSProxy::GetElementAttributeWithHandler(
4781 Handle<JSProxy>::cast(proto), receiver, index);
4783 if (proto->IsNull()) return ABSENT;
4784 return GetElementAttributeWithReceiver(
4785 Handle<JSObject>::cast(proto), receiver, index, true);
4789 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
4790 Handle<FixedArray> array(
4791 isolate->factory()->NewFixedArray(kEntries, TENURED));
4792 return Handle<NormalizedMapCache>::cast(array);
4796 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
4797 PropertyNormalizationMode mode) {
4798 DisallowHeapAllocation no_gc;
4799 Object* value = FixedArray::get(GetIndex(fast_map));
4800 if (!value->IsMap() ||
4801 !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) {
4802 return MaybeHandle<Map>();
4804 return handle(Map::cast(value));
4808 void NormalizedMapCache::Set(Handle<Map> fast_map,
4809 Handle<Map> normalized_map) {
4810 DisallowHeapAllocation no_gc;
4811 ASSERT(normalized_map->is_dictionary_map());
4812 FixedArray::set(GetIndex(fast_map), *normalized_map);
4816 void NormalizedMapCache::Clear() {
4817 int entries = length();
4818 for (int i = 0; i != entries; i++) {
4824 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object,
4826 Handle<Code> code) {
4827 Handle<Map> map(object->map());
4828 Map::UpdateCodeCache(map, name, code);
4832 void JSObject::NormalizeProperties(Handle<JSObject> object,
4833 PropertyNormalizationMode mode,
4834 int expected_additional_properties) {
4835 if (!object->HasFastProperties()) return;
4837 // The global object is always normalized.
4838 ASSERT(!object->IsGlobalObject());
4839 // JSGlobalProxy must never be normalized
4840 ASSERT(!object->IsJSGlobalProxy());
4842 Isolate* isolate = object->GetIsolate();
4843 HandleScope scope(isolate);
4844 Handle<Map> map(object->map());
4845 Handle<Map> new_map = Map::Normalize(map, mode);
4847 // Allocate new content.
4848 int real_size = map->NumberOfOwnDescriptors();
4849 int property_count = real_size;
4850 if (expected_additional_properties > 0) {
4851 property_count += expected_additional_properties;
4853 property_count += 2; // Make space for two more properties.
4855 Handle<NameDictionary> dictionary =
4856 NameDictionary::New(isolate, property_count);
4858 Handle<DescriptorArray> descs(map->instance_descriptors());
4859 for (int i = 0; i < real_size; i++) {
4860 PropertyDetails details = descs->GetDetails(i);
4861 switch (details.type()) {
4863 Handle<Name> key(descs->GetKey(i));
4864 Handle<Object> value(descs->GetConstant(i), isolate);
4865 PropertyDetails d = PropertyDetails(
4866 details.attributes(), NORMAL, i + 1);
4867 dictionary = NameDictionary::Add(dictionary, key, value, d);
4871 Handle<Name> key(descs->GetKey(i));
4872 Handle<Object> value(
4873 object->RawFastPropertyAt(descs->GetFieldIndex(i)), isolate);
4875 PropertyDetails(details.attributes(), NORMAL, i + 1);
4876 dictionary = NameDictionary::Add(dictionary, key, value, d);
4880 Handle<Name> key(descs->GetKey(i));
4881 Handle<Object> value(descs->GetCallbacksObject(i), isolate);
4882 PropertyDetails d = PropertyDetails(
4883 details.attributes(), CALLBACKS, i + 1);
4884 dictionary = NameDictionary::Add(dictionary, key, value, d);
4897 // Copy the next enumeration index from instance descriptor.
4898 dictionary->SetNextEnumerationIndex(real_size + 1);
4900 // From here on we cannot fail and we shouldn't GC anymore.
4901 DisallowHeapAllocation no_allocation;
4903 // Resize the object in the heap if necessary.
4904 int new_instance_size = new_map->instance_size();
4905 int instance_size_delta = map->instance_size() - new_instance_size;
4906 ASSERT(instance_size_delta >= 0);
4907 Heap* heap = isolate->heap();
4908 heap->CreateFillerObjectAt(object->address() + new_instance_size,
4909 instance_size_delta);
4910 heap->AdjustLiveBytes(object->address(),
4911 -instance_size_delta,
4912 Heap::FROM_MUTATOR);
4914 // We are storing the new map using release store after creating a filler for
4915 // the left-over space to avoid races with the sweeper thread.
4916 object->synchronized_set_map(*new_map);
4918 object->set_properties(*dictionary);
4920 isolate->counters()->props_to_dictionary()->Increment();
4923 if (FLAG_trace_normalization) {
4924 PrintF("Object properties have been normalized:\n");
4931 void JSObject::TransformToFastProperties(Handle<JSObject> object,
4932 int unused_property_fields) {
4933 if (object->HasFastProperties()) return;
4934 ASSERT(!object->IsGlobalObject());
4935 Isolate* isolate = object->GetIsolate();
4936 Factory* factory = isolate->factory();
4937 Handle<NameDictionary> dictionary(object->property_dictionary());
4939 // Make sure we preserve dictionary representation if there are too many
4941 int number_of_elements = dictionary->NumberOfElements();
4942 if (number_of_elements > kMaxNumberOfDescriptors) return;
4944 if (number_of_elements != dictionary->NextEnumerationIndex()) {
4945 NameDictionary::DoGenerateNewEnumerationIndices(dictionary);
4948 int instance_descriptor_length = 0;
4949 int number_of_fields = 0;
4951 // Compute the length of the instance descriptor.
4952 int capacity = dictionary->Capacity();
4953 for (int i = 0; i < capacity; i++) {
4954 Object* k = dictionary->KeyAt(i);
4955 if (dictionary->IsKey(k)) {
4956 Object* value = dictionary->ValueAt(i);
4957 PropertyType type = dictionary->DetailsAt(i).type();
4958 ASSERT(type != FIELD);
4959 instance_descriptor_length++;
4960 if (type == NORMAL && !value->IsJSFunction()) {
4961 number_of_fields += 1;
4966 int inobject_props = object->map()->inobject_properties();
4968 // Allocate new map.
4969 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
4970 new_map->set_dictionary_map(false);
4972 if (instance_descriptor_length == 0) {
4973 DisallowHeapAllocation no_gc;
4974 ASSERT_LE(unused_property_fields, inobject_props);
4975 // Transform the object.
4976 new_map->set_unused_property_fields(inobject_props);
4977 object->set_map(*new_map);
4978 object->set_properties(isolate->heap()->empty_fixed_array());
4979 // Check that it really works.
4980 ASSERT(object->HasFastProperties());
4984 // Allocate the instance descriptor.
4985 Handle<DescriptorArray> descriptors = DescriptorArray::Allocate(
4986 isolate, instance_descriptor_length);
4988 int number_of_allocated_fields =
4989 number_of_fields + unused_property_fields - inobject_props;
4990 if (number_of_allocated_fields < 0) {
4991 // There is enough inobject space for all fields (including unused).
4992 number_of_allocated_fields = 0;
4993 unused_property_fields = inobject_props - number_of_fields;
4996 // Allocate the fixed array for the fields.
4997 Handle<FixedArray> fields = factory->NewFixedArray(
4998 number_of_allocated_fields);
5000 // Fill in the instance descriptor and the fields.
5001 int current_offset = 0;
5002 for (int i = 0; i < capacity; i++) {
5003 Object* k = dictionary->KeyAt(i);
5004 if (dictionary->IsKey(k)) {
5005 Object* value = dictionary->ValueAt(i);
5007 if (k->IsSymbol()) {
5008 key = handle(Symbol::cast(k));
5010 // Ensure the key is a unique name before writing into the
5011 // instance descriptor.
5012 key = factory->InternalizeString(handle(String::cast(k)));
5015 PropertyDetails details = dictionary->DetailsAt(i);
5016 int enumeration_index = details.dictionary_index();
5017 PropertyType type = details.type();
5019 if (value->IsJSFunction()) {
5020 ConstantDescriptor d(key,
5021 handle(value, isolate),
5022 details.attributes());
5023 descriptors->Set(enumeration_index - 1, &d);
5024 } else if (type == NORMAL) {
5025 if (current_offset < inobject_props) {
5026 object->InObjectPropertyAtPut(current_offset,
5028 UPDATE_WRITE_BARRIER);
5030 int offset = current_offset - inobject_props;
5031 fields->set(offset, value);
5033 FieldDescriptor d(key,
5035 details.attributes(),
5036 // TODO(verwaest): value->OptimalRepresentation();
5037 Representation::Tagged());
5038 descriptors->Set(enumeration_index - 1, &d);
5039 } else if (type == CALLBACKS) {
5040 CallbacksDescriptor d(key,
5041 handle(value, isolate),
5042 details.attributes());
5043 descriptors->Set(enumeration_index - 1, &d);
5049 ASSERT(current_offset == number_of_fields);
5051 descriptors->Sort();
5053 DisallowHeapAllocation no_gc;
5054 new_map->InitializeDescriptors(*descriptors);
5055 new_map->set_unused_property_fields(unused_property_fields);
5057 // Transform the object.
5058 object->set_map(*new_map);
5060 object->set_properties(*fields);
5061 ASSERT(object->IsJSObject());
5063 // Check that it really works.
5064 ASSERT(object->HasFastProperties());
5068 void JSObject::ResetElements(Handle<JSObject> object) {
5069 if (object->map()->is_observed()) {
5070 // Maintain invariant that observed elements are always in dictionary mode.
5071 Isolate* isolate = object->GetIsolate();
5072 Factory* factory = isolate->factory();
5073 Handle<SeededNumberDictionary> dictionary =
5074 SeededNumberDictionary::New(isolate, 0);
5075 if (object->map() == *factory->sloppy_arguments_elements_map()) {
5076 FixedArray::cast(object->elements())->set(1, *dictionary);
5078 object->set_elements(*dictionary);
5083 ElementsKind elements_kind = GetInitialFastElementsKind();
5084 if (!FLAG_smi_only_arrays) {
5085 elements_kind = FastSmiToObjectElementsKind(elements_kind);
5087 Handle<Map> map = JSObject::GetElementsTransitionMap(object, elements_kind);
5088 DisallowHeapAllocation no_gc;
5089 Handle<FixedArrayBase> elements(map->GetInitialElements());
5090 JSObject::SetMapAndElements(object, map, elements);
5094 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary(
5095 Handle<FixedArrayBase> array,
5097 Handle<SeededNumberDictionary> dictionary) {
5098 Isolate* isolate = array->GetIsolate();
5099 Factory* factory = isolate->factory();
5100 bool has_double_elements = array->IsFixedDoubleArray();
5101 for (int i = 0; i < length; i++) {
5102 Handle<Object> value;
5103 if (has_double_elements) {
5104 Handle<FixedDoubleArray> double_array =
5105 Handle<FixedDoubleArray>::cast(array);
5106 if (double_array->is_the_hole(i)) {
5107 value = factory->the_hole_value();
5109 value = factory->NewHeapNumber(double_array->get_scalar(i));
5112 value = handle(Handle<FixedArray>::cast(array)->get(i), isolate);
5114 if (!value->IsTheHole()) {
5115 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
5117 SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details);
5124 Handle<SeededNumberDictionary> JSObject::NormalizeElements(
5125 Handle<JSObject> object) {
5126 ASSERT(!object->HasExternalArrayElements() &&
5127 !object->HasFixedTypedArrayElements());
5128 Isolate* isolate = object->GetIsolate();
5130 // Find the backing store.
5131 Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements()));
5133 (array->map() == isolate->heap()->sloppy_arguments_elements_map());
5135 array = handle(FixedArrayBase::cast(
5136 Handle<FixedArray>::cast(array)->get(1)));
5138 if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array);
5140 ASSERT(object->HasFastSmiOrObjectElements() ||
5141 object->HasFastDoubleElements() ||
5142 object->HasFastArgumentsElements());
5143 // Compute the effective length and allocate a new backing store.
5144 int length = object->IsJSArray()
5145 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
5147 int old_capacity = 0;
5148 int used_elements = 0;
5149 object->GetElementsCapacityAndUsage(&old_capacity, &used_elements);
5150 Handle<SeededNumberDictionary> dictionary =
5151 SeededNumberDictionary::New(isolate, used_elements);
5153 dictionary = CopyFastElementsToDictionary(array, length, dictionary);
5155 // Switch to using the dictionary as the backing storage for elements.
5157 FixedArray::cast(object->elements())->set(1, *dictionary);
5159 // Set the new map first to satify the elements type assert in
5161 Handle<Map> new_map =
5162 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
5164 JSObject::MigrateToMap(object, new_map);
5165 object->set_elements(*dictionary);
5168 isolate->counters()->elements_to_dictionary()->Increment();
5171 if (FLAG_trace_normalization) {
5172 PrintF("Object elements have been normalized:\n");
5177 ASSERT(object->HasDictionaryElements() ||
5178 object->HasDictionaryArgumentsElements());
5183 Smi* JSReceiver::GenerateIdentityHash() {
5184 Isolate* isolate = GetIsolate();
5189 // Generate a random 32-bit hash value but limit range to fit
5191 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue;
5193 } while (hash_value == 0 && attempts < 30);
5194 hash_value = hash_value != 0 ? hash_value : 1; // never return 0
5196 return Smi::FromInt(hash_value);
5200 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) {
5201 Isolate* isolate = object->GetIsolate();
5202 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
5206 Object* JSObject::GetIdentityHash() {
5207 DisallowHeapAllocation no_gc;
5208 Isolate* isolate = GetIsolate();
5209 Object* stored_value =
5210 GetHiddenProperty(isolate->factory()->identity_hash_string());
5211 return stored_value->IsSmi()
5213 : isolate->heap()->undefined_value();
5217 Handle<Object> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) {
5218 Handle<Object> hash(object->GetIdentityHash(), object->GetIsolate());
5222 Isolate* isolate = object->GetIsolate();
5224 hash = handle(object->GenerateIdentityHash(), isolate);
5225 Handle<Object> result = SetHiddenProperty(object,
5226 isolate->factory()->identity_hash_string(), hash);
5228 if (result->IsUndefined()) {
5229 // Trying to get hash of detached proxy.
5230 return handle(Smi::FromInt(0), isolate);
5237 Object* JSProxy::GetIdentityHash() {
5238 return this->hash();
5242 Handle<Object> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) {
5243 Isolate* isolate = proxy->GetIsolate();
5245 Handle<Object> hash(proxy->GetIdentityHash(), isolate);
5249 hash = handle(proxy->GenerateIdentityHash(), isolate);
5250 proxy->set_hash(*hash);
5255 Object* JSObject::GetHiddenProperty(Handle<Name> key) {
5256 DisallowHeapAllocation no_gc;
5257 ASSERT(key->IsUniqueName());
5258 if (IsJSGlobalProxy()) {
5259 // For a proxy, use the prototype as target object.
5260 Object* proxy_parent = GetPrototype();
5261 // If the proxy is detached, return undefined.
5262 if (proxy_parent->IsNull()) return GetHeap()->the_hole_value();
5263 ASSERT(proxy_parent->IsJSGlobalObject());
5264 return JSObject::cast(proxy_parent)->GetHiddenProperty(key);
5266 ASSERT(!IsJSGlobalProxy());
5267 Object* inline_value = GetHiddenPropertiesHashTable();
5269 if (inline_value->IsSmi()) {
5270 // Handle inline-stored identity hash.
5271 if (*key == GetHeap()->identity_hash_string()) {
5272 return inline_value;
5274 return GetHeap()->the_hole_value();
5278 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value();
5280 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
5281 Object* entry = hashtable->Lookup(key);
5286 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object,
5288 Handle<Object> value) {
5289 Isolate* isolate = object->GetIsolate();
5291 ASSERT(key->IsUniqueName());
5292 if (object->IsJSGlobalProxy()) {
5293 // For a proxy, use the prototype as target object.
5294 Handle<Object> proxy_parent(object->GetPrototype(), isolate);
5295 // If the proxy is detached, return undefined.
5296 if (proxy_parent->IsNull()) return isolate->factory()->undefined_value();
5297 ASSERT(proxy_parent->IsJSGlobalObject());
5298 return SetHiddenProperty(Handle<JSObject>::cast(proxy_parent), key, value);
5300 ASSERT(!object->IsJSGlobalProxy());
5302 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
5304 // If there is no backing store yet, store the identity hash inline.
5305 if (value->IsSmi() &&
5306 *key == *isolate->factory()->identity_hash_string() &&
5307 (inline_value->IsUndefined() || inline_value->IsSmi())) {
5308 return JSObject::SetHiddenPropertiesHashTable(object, value);
5311 Handle<ObjectHashTable> hashtable =
5312 GetOrCreateHiddenPropertiesHashtable(object);
5314 // If it was found, check if the key is already in the dictionary.
5315 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key,
5317 if (*new_table != *hashtable) {
5318 // If adding the key expanded the dictionary (i.e., Add returned a new
5319 // dictionary), store it back to the object.
5320 SetHiddenPropertiesHashTable(object, new_table);
5323 // Return this to mark success.
5328 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) {
5329 Isolate* isolate = object->GetIsolate();
5330 ASSERT(key->IsUniqueName());
5332 if (object->IsJSGlobalProxy()) {
5333 Handle<Object> proto(object->GetPrototype(), isolate);
5334 if (proto->IsNull()) return;
5335 ASSERT(proto->IsJSGlobalObject());
5336 return DeleteHiddenProperty(Handle<JSObject>::cast(proto), key);
5339 Object* inline_value = object->GetHiddenPropertiesHashTable();
5341 // We never delete (inline-stored) identity hashes.
5342 ASSERT(*key != *isolate->factory()->identity_hash_string());
5343 if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
5345 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value));
5346 ObjectHashTable::Put(hashtable, key, isolate->factory()->the_hole_value());
5350 bool JSObject::HasHiddenProperties(Handle<JSObject> object) {
5351 Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string();
5352 return GetPropertyAttributePostInterceptor(
5353 object, object, hidden, false) != ABSENT;
5357 Object* JSObject::GetHiddenPropertiesHashTable() {
5358 ASSERT(!IsJSGlobalProxy());
5359 if (HasFastProperties()) {
5360 // If the object has fast properties, check whether the first slot
5361 // in the descriptor array matches the hidden string. Since the
5362 // hidden strings hash code is zero (and no other name has hash
5363 // code zero) it will always occupy the first entry if present.
5364 DescriptorArray* descriptors = this->map()->instance_descriptors();
5365 if (descriptors->number_of_descriptors() > 0) {
5366 int sorted_index = descriptors->GetSortedKeyIndex(0);
5367 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
5368 sorted_index < map()->NumberOfOwnDescriptors()) {
5369 ASSERT(descriptors->GetType(sorted_index) == FIELD);
5370 ASSERT(descriptors->GetDetails(sorted_index).representation().
5371 IsCompatibleForLoad(Representation::Tagged()));
5372 return this->RawFastPropertyAt(
5373 descriptors->GetFieldIndex(sorted_index));
5375 return GetHeap()->undefined_value();
5378 return GetHeap()->undefined_value();
5381 Isolate* isolate = GetIsolate();
5382 LookupResult result(isolate);
5383 LocalLookupRealNamedProperty(isolate->factory()->hidden_string(), &result);
5384 if (result.IsFound()) {
5385 ASSERT(result.IsNormal());
5386 ASSERT(result.holder() == this);
5387 Object* value = GetNormalizedProperty(&result);
5388 if (!value->IsTheHole()) return value;
5390 return GetHeap()->undefined_value();
5394 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable(
5395 Handle<JSObject> object) {
5396 Isolate* isolate = object->GetIsolate();
5398 static const int kInitialCapacity = 4;
5399 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
5400 if (inline_value->IsHashTable()) {
5401 return Handle<ObjectHashTable>::cast(inline_value);
5404 Handle<ObjectHashTable> hashtable = ObjectHashTable::New(
5405 isolate, kInitialCapacity, USE_CUSTOM_MINIMUM_CAPACITY);
5407 if (inline_value->IsSmi()) {
5408 // We were storing the identity hash inline and now allocated an actual
5409 // dictionary. Put the identity hash into the new dictionary.
5410 hashtable = ObjectHashTable::Put(hashtable,
5411 isolate->factory()->identity_hash_string(),
5415 JSObject::SetLocalPropertyIgnoreAttributes(
5417 isolate->factory()->hidden_string(),
5420 OPTIMAL_REPRESENTATION,
5422 OMIT_EXTENSIBILITY_CHECK).Assert();
5428 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object,
5429 Handle<Object> value) {
5430 ASSERT(!object->IsJSGlobalProxy());
5432 Isolate* isolate = object->GetIsolate();
5434 // We can store the identity hash inline iff there is no backing store
5435 // for hidden properties yet.
5436 ASSERT(JSObject::HasHiddenProperties(object) != value->IsSmi());
5437 if (object->HasFastProperties()) {
5438 // If the object has fast properties, check whether the first slot
5439 // in the descriptor array matches the hidden string. Since the
5440 // hidden strings hash code is zero (and no other name has hash
5441 // code zero) it will always occupy the first entry if present.
5442 DescriptorArray* descriptors = object->map()->instance_descriptors();
5443 if (descriptors->number_of_descriptors() > 0) {
5444 int sorted_index = descriptors->GetSortedKeyIndex(0);
5445 if (descriptors->GetKey(sorted_index) == isolate->heap()->hidden_string()
5446 && sorted_index < object->map()->NumberOfOwnDescriptors()) {
5447 object->WriteToField(sorted_index, *value);
5453 SetLocalPropertyIgnoreAttributes(object,
5454 isolate->factory()->hidden_string(),
5457 OPTIMAL_REPRESENTATION,
5459 OMIT_EXTENSIBILITY_CHECK).Assert();
5464 Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object,
5467 // Check local property, ignore interceptor.
5468 Isolate* isolate = object->GetIsolate();
5469 LookupResult result(isolate);
5470 object->LocalLookupRealNamedProperty(name, &result);
5471 if (!result.IsFound()) return isolate->factory()->true_value();
5473 // Normalize object if needed.
5474 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5476 return DeleteNormalizedProperty(object, name, mode);
5480 MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor(
5481 Handle<JSObject> object, Handle<Name> name) {
5482 Isolate* isolate = object->GetIsolate();
5484 // TODO(rossberg): Support symbols in the API.
5485 if (name->IsSymbol()) return isolate->factory()->false_value();
5487 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
5488 if (!interceptor->deleter()->IsUndefined()) {
5489 v8::NamedPropertyDeleterCallback deleter =
5490 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter());
5492 ApiNamedPropertyAccess("interceptor-named-delete", *object, *name));
5493 PropertyCallbackArguments args(
5494 isolate, interceptor->data(), *object, *object);
5495 v8::Handle<v8::Boolean> result =
5496 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name)));
5497 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5498 if (!result.IsEmpty()) {
5499 ASSERT(result->IsBoolean());
5500 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5501 result_internal->VerifyApiCallResultType();
5502 // Rebox CustomArguments::kReturnValueOffset before returning.
5503 return handle(*result_internal, isolate);
5506 Handle<Object> result =
5507 DeletePropertyPostInterceptor(object, name, NORMAL_DELETION);
5512 MaybeHandle<Object> JSObject::DeleteElementWithInterceptor(
5513 Handle<JSObject> object,
5515 Isolate* isolate = object->GetIsolate();
5516 Factory* factory = isolate->factory();
5518 // Make sure that the top context does not change when doing
5519 // callbacks or interceptor calls.
5520 AssertNoContextChange ncc(isolate);
5522 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
5523 if (interceptor->deleter()->IsUndefined()) return factory->false_value();
5524 v8::IndexedPropertyDeleterCallback deleter =
5525 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter());
5527 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index));
5528 PropertyCallbackArguments args(
5529 isolate, interceptor->data(), *object, *object);
5530 v8::Handle<v8::Boolean> result = args.Call(deleter, index);
5531 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5532 if (!result.IsEmpty()) {
5533 ASSERT(result->IsBoolean());
5534 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5535 result_internal->VerifyApiCallResultType();
5536 // Rebox CustomArguments::kReturnValueOffset before returning.
5537 return handle(*result_internal, isolate);
5539 MaybeHandle<Object> delete_result = object->GetElementsAccessor()->Delete(
5540 object, index, NORMAL_DELETION);
5541 return delete_result;
5545 MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object,
5548 Isolate* isolate = object->GetIsolate();
5549 Factory* factory = isolate->factory();
5551 // Check access rights if needed.
5552 if (object->IsAccessCheckNeeded() &&
5553 !isolate->MayIndexedAccess(object, index, v8::ACCESS_DELETE)) {
5554 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE);
5555 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5556 return factory->false_value();
5559 if (object->IsStringObjectWithCharacterAt(index)) {
5560 if (mode == STRICT_DELETION) {
5561 // Deleting a non-configurable property in strict mode.
5562 Handle<Object> name = factory->NewNumberFromUint(index);
5563 Handle<Object> args[2] = { name, object };
5564 Handle<Object> error =
5565 factory->NewTypeError("strict_delete_property",
5566 HandleVector(args, 2));
5567 isolate->Throw(*error);
5568 return Handle<Object>();
5570 return factory->false_value();
5573 if (object->IsJSGlobalProxy()) {
5574 Handle<Object> proto(object->GetPrototype(), isolate);
5575 if (proto->IsNull()) return factory->false_value();
5576 ASSERT(proto->IsJSGlobalObject());
5577 return DeleteElement(Handle<JSObject>::cast(proto), index, mode);
5580 Handle<Object> old_value;
5581 bool should_enqueue_change_record = false;
5582 if (object->map()->is_observed()) {
5583 should_enqueue_change_record = HasLocalElement(object, index);
5584 if (should_enqueue_change_record) {
5585 if (!GetLocalElementAccessorPair(object, index).is_null()) {
5586 old_value = Handle<Object>::cast(factory->the_hole_value());
5588 old_value = Object::GetElement(
5589 isolate, object, index).ToHandleChecked();
5594 // Skip interceptor if forcing deletion.
5595 MaybeHandle<Object> maybe_result;
5596 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) {
5597 maybe_result = DeleteElementWithInterceptor(object, index);
5599 maybe_result = object->GetElementsAccessor()->Delete(object, index, mode);
5601 Handle<Object> result;
5602 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object);
5604 if (should_enqueue_change_record && !HasLocalElement(object, index)) {
5605 Handle<String> name = factory->Uint32ToString(index);
5606 EnqueueChangeRecord(object, "delete", name, old_value);
5613 MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object,
5616 Isolate* isolate = object->GetIsolate();
5617 // ECMA-262, 3rd, 8.6.2.5
5618 ASSERT(name->IsName());
5620 // Check access rights if needed.
5621 if (object->IsAccessCheckNeeded() &&
5622 !isolate->MayNamedAccess(object, name, v8::ACCESS_DELETE)) {
5623 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE);
5624 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5625 return isolate->factory()->false_value();
5628 if (object->IsJSGlobalProxy()) {
5629 Object* proto = object->GetPrototype();
5630 if (proto->IsNull()) return isolate->factory()->false_value();
5631 ASSERT(proto->IsJSGlobalObject());
5632 return JSGlobalObject::DeleteProperty(
5633 handle(JSGlobalObject::cast(proto)), name, mode);
5637 if (name->AsArrayIndex(&index)) {
5638 return DeleteElement(object, index, mode);
5641 LookupResult lookup(isolate);
5642 object->LocalLookup(name, &lookup, true);
5643 if (!lookup.IsFound()) return isolate->factory()->true_value();
5644 // Ignore attributes if forcing a deletion.
5645 if (lookup.IsDontDelete() && mode != FORCE_DELETION) {
5646 if (mode == STRICT_DELETION) {
5647 // Deleting a non-configurable property in strict mode.
5648 Handle<Object> args[2] = { name, object };
5649 Handle<Object> error = isolate->factory()->NewTypeError(
5650 "strict_delete_property", HandleVector(args, ARRAY_SIZE(args)));
5651 isolate->Throw(*error);
5652 return Handle<Object>();
5654 return isolate->factory()->false_value();
5657 Handle<Object> old_value = isolate->factory()->the_hole_value();
5658 bool is_observed = object->map()->is_observed() &&
5659 *name != isolate->heap()->hidden_string();
5660 if (is_observed && lookup.IsDataProperty()) {
5661 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked();
5663 Handle<Object> result;
5665 // Check for interceptor.
5666 if (lookup.IsInterceptor()) {
5667 // Skip interceptor if forcing a deletion.
5668 if (mode == FORCE_DELETION) {
5669 result = DeletePropertyPostInterceptor(object, name, mode);
5671 ASSIGN_RETURN_ON_EXCEPTION(
5673 DeletePropertyWithInterceptor(object, name),
5677 // Normalize object if needed.
5678 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5679 // Make sure the properties are normalized before removing the entry.
5680 result = DeleteNormalizedProperty(object, name, mode);
5683 if (is_observed && !HasLocalProperty(object, name)) {
5684 EnqueueChangeRecord(object, "delete", name, old_value);
5691 MaybeHandle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object,
5694 if (object->IsJSProxy()) {
5695 return JSProxy::DeleteElementWithHandler(
5696 Handle<JSProxy>::cast(object), index, mode);
5698 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode);
5702 MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object,
5705 if (object->IsJSProxy()) {
5706 return JSProxy::DeletePropertyWithHandler(
5707 Handle<JSProxy>::cast(object), name, mode);
5709 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode);
5713 bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
5716 ASSERT(IsFastObjectElementsKind(kind) ||
5717 kind == DICTIONARY_ELEMENTS);
5718 if (IsFastObjectElementsKind(kind)) {
5719 int length = IsJSArray()
5720 ? Smi::cast(JSArray::cast(this)->length())->value()
5721 : elements->length();
5722 for (int i = 0; i < length; ++i) {
5723 Object* element = elements->get(i);
5724 if (!element->IsTheHole() && element == object) return true;
5728 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object);
5729 if (!key->IsUndefined()) return true;
5735 // Check whether this object references another object.
5736 bool JSObject::ReferencesObject(Object* obj) {
5737 Map* map_of_this = map();
5738 Heap* heap = GetHeap();
5739 DisallowHeapAllocation no_allocation;
5741 // Is the object the constructor for this object?
5742 if (map_of_this->constructor() == obj) {
5746 // Is the object the prototype for this object?
5747 if (map_of_this->prototype() == obj) {
5751 // Check if the object is among the named properties.
5752 Object* key = SlowReverseLookup(obj);
5753 if (!key->IsUndefined()) {
5757 // Check if the object is among the indexed properties.
5758 ElementsKind kind = GetElementsKind();
5760 // Raw pixels and external arrays do not reference other
5762 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5763 case EXTERNAL_##TYPE##_ELEMENTS: \
5764 case TYPE##_ELEMENTS: \
5767 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5768 #undef TYPED_ARRAY_CASE
5770 case FAST_DOUBLE_ELEMENTS:
5771 case FAST_HOLEY_DOUBLE_ELEMENTS:
5773 case FAST_SMI_ELEMENTS:
5774 case FAST_HOLEY_SMI_ELEMENTS:
5777 case FAST_HOLEY_ELEMENTS:
5778 case DICTIONARY_ELEMENTS: {
5779 FixedArray* elements = FixedArray::cast(this->elements());
5780 if (ReferencesObjectFromElements(elements, kind, obj)) return true;
5783 case SLOPPY_ARGUMENTS_ELEMENTS: {
5784 FixedArray* parameter_map = FixedArray::cast(elements());
5785 // Check the mapped parameters.
5786 int length = parameter_map->length();
5787 for (int i = 2; i < length; ++i) {
5788 Object* value = parameter_map->get(i);
5789 if (!value->IsTheHole() && value == obj) return true;
5791 // Check the arguments.
5792 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5793 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
5794 FAST_HOLEY_ELEMENTS;
5795 if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
5800 // For functions check the context.
5801 if (IsJSFunction()) {
5802 // Get the constructor function for arguments array.
5803 JSObject* arguments_boilerplate =
5804 heap->isolate()->context()->native_context()->
5805 sloppy_arguments_boilerplate();
5806 JSFunction* arguments_function =
5807 JSFunction::cast(arguments_boilerplate->map()->constructor());
5809 // Get the context and don't check if it is the native context.
5810 JSFunction* f = JSFunction::cast(this);
5811 Context* context = f->context();
5812 if (context->IsNativeContext()) {
5816 // Check the non-special context slots.
5817 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) {
5818 // Only check JS objects.
5819 if (context->get(i)->IsJSObject()) {
5820 JSObject* ctxobj = JSObject::cast(context->get(i));
5821 // If it is an arguments array check the content.
5822 if (ctxobj->map()->constructor() == arguments_function) {
5823 if (ctxobj->ReferencesObject(obj)) {
5826 } else if (ctxobj == obj) {
5832 // Check the context extension (if any) if it can have references.
5833 if (context->has_extension() && !context->IsCatchContext()) {
5834 // With harmony scoping, a JSFunction may have a global context.
5835 // TODO(mvstanton): walk into the ScopeInfo.
5836 if (FLAG_harmony_scoping && context->IsGlobalContext()) {
5840 return JSObject::cast(context->extension())->ReferencesObject(obj);
5844 // No references to object.
5849 MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
5850 Isolate* isolate = object->GetIsolate();
5852 if (!object->map()->is_extensible()) return object;
5854 if (object->IsAccessCheckNeeded() &&
5855 !isolate->MayNamedAccess(
5856 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5857 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5858 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5859 return isolate->factory()->false_value();
5862 if (object->IsJSGlobalProxy()) {
5863 Handle<Object> proto(object->GetPrototype(), isolate);
5864 if (proto->IsNull()) return object;
5865 ASSERT(proto->IsJSGlobalObject());
5866 return PreventExtensions(Handle<JSObject>::cast(proto));
5869 // It's not possible to seal objects with external array elements
5870 if (object->HasExternalArrayElements() ||
5871 object->HasFixedTypedArrayElements()) {
5872 Handle<Object> error =
5873 isolate->factory()->NewTypeError(
5874 "cant_prevent_ext_external_array_elements",
5875 HandleVector(&object, 1));
5876 return isolate->Throw<Object>(error);
5879 // If there are fast elements we normalize.
5880 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
5881 ASSERT(object->HasDictionaryElements() ||
5882 object->HasDictionaryArgumentsElements());
5884 // Make sure that we never go back to fast case.
5885 dictionary->set_requires_slow_elements();
5887 // Do a map transition, other objects with this map may still
5889 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5890 Handle<Map> new_map = Map::Copy(handle(object->map()));
5892 new_map->set_is_extensible(false);
5893 JSObject::MigrateToMap(object, new_map);
5894 ASSERT(!object->map()->is_extensible());
5896 if (object->map()->is_observed()) {
5897 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(),
5898 isolate->factory()->the_hole_value());
5904 template<typename Dictionary>
5905 static void FreezeDictionary(Dictionary* dictionary) {
5906 int capacity = dictionary->Capacity();
5907 for (int i = 0; i < capacity; i++) {
5908 Object* k = dictionary->KeyAt(i);
5909 if (dictionary->IsKey(k)) {
5910 PropertyDetails details = dictionary->DetailsAt(i);
5911 int attrs = DONT_DELETE;
5912 // READ_ONLY is an invalid attribute for JS setters/getters.
5913 if (details.type() == CALLBACKS) {
5914 Object* v = dictionary->ValueAt(i);
5915 if (v->IsPropertyCell()) v = PropertyCell::cast(v)->value();
5916 if (!v->IsAccessorPair()) attrs |= READ_ONLY;
5920 details = details.CopyAddAttributes(
5921 static_cast<PropertyAttributes>(attrs));
5922 dictionary->DetailsAtPut(i, details);
5928 MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) {
5929 // Freezing sloppy arguments should be handled elsewhere.
5930 ASSERT(!object->HasSloppyArgumentsElements());
5931 ASSERT(!object->map()->is_observed());
5933 if (object->map()->is_frozen()) return object;
5935 Isolate* isolate = object->GetIsolate();
5936 if (object->IsAccessCheckNeeded() &&
5937 !isolate->MayNamedAccess(
5938 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5939 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5940 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5941 return isolate->factory()->false_value();
5944 if (object->IsJSGlobalProxy()) {
5945 Handle<Object> proto(object->GetPrototype(), isolate);
5946 if (proto->IsNull()) return object;
5947 ASSERT(proto->IsJSGlobalObject());
5948 return Freeze(Handle<JSObject>::cast(proto));
5951 // It's not possible to freeze objects with external array elements
5952 if (object->HasExternalArrayElements() ||
5953 object->HasFixedTypedArrayElements()) {
5954 Handle<Object> error =
5955 isolate->factory()->NewTypeError(
5956 "cant_prevent_ext_external_array_elements",
5957 HandleVector(&object, 1));
5958 return isolate->Throw<Object>(error);
5961 Handle<SeededNumberDictionary> new_element_dictionary;
5962 if (!object->elements()->IsDictionary()) {
5963 int length = object->IsJSArray()
5964 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
5965 : object->elements()->length();
5969 object->GetElementsCapacityAndUsage(&capacity, &used);
5970 new_element_dictionary = SeededNumberDictionary::New(isolate, used);
5972 // Move elements to a dictionary; avoid calling NormalizeElements to avoid
5973 // unnecessary transitions.
5974 new_element_dictionary = CopyFastElementsToDictionary(
5975 handle(object->elements()), length, new_element_dictionary);
5977 // No existing elements, use a pre-allocated empty backing store
5978 new_element_dictionary =
5979 isolate->factory()->empty_slow_element_dictionary();
5983 Handle<Map> old_map(object->map(), isolate);
5984 int transition_index = old_map->SearchTransition(
5985 isolate->heap()->frozen_symbol());
5986 if (transition_index != TransitionArray::kNotFound) {
5987 Handle<Map> transition_map(old_map->GetTransition(transition_index));
5988 ASSERT(transition_map->has_dictionary_elements());
5989 ASSERT(transition_map->is_frozen());
5990 ASSERT(!transition_map->is_extensible());
5991 JSObject::MigrateToMap(object, transition_map);
5992 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5993 // Create a new descriptor array with fully-frozen properties
5994 Handle<Map> new_map = Map::CopyForFreeze(old_map);
5995 JSObject::MigrateToMap(object, new_map);
5997 // Slow path: need to normalize properties for safety
5998 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
6000 // Create a new map, since other objects with this map may be extensible.
6001 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
6002 Handle<Map> new_map = Map::Copy(handle(object->map()));
6004 new_map->set_is_extensible(false);
6005 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
6006 JSObject::MigrateToMap(object, new_map);
6008 // Freeze dictionary-mode properties
6009 FreezeDictionary(object->property_dictionary());
6012 ASSERT(object->map()->has_dictionary_elements());
6013 if (!new_element_dictionary.is_null()) {
6014 object->set_elements(*new_element_dictionary);
6017 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) {
6018 SeededNumberDictionary* dictionary = object->element_dictionary();
6019 // Make sure we never go back to the fast case
6020 dictionary->set_requires_slow_elements();
6021 // Freeze all elements in the dictionary
6022 FreezeDictionary(dictionary);
6029 void JSObject::SetObserved(Handle<JSObject> object) {
6030 Isolate* isolate = object->GetIsolate();
6031 Handle<Map> new_map;
6032 Handle<Map> old_map(object->map(), isolate);
6033 ASSERT(!old_map->is_observed());
6034 int transition_index = old_map->SearchTransition(
6035 isolate->heap()->observed_symbol());
6036 if (transition_index != TransitionArray::kNotFound) {
6037 new_map = handle(old_map->GetTransition(transition_index), isolate);
6038 ASSERT(new_map->is_observed());
6039 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
6040 new_map = Map::CopyForObserved(old_map);
6042 new_map = Map::Copy(old_map);
6043 new_map->set_is_observed();
6045 JSObject::MigrateToMap(object, new_map);
6049 Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object,
6050 Representation representation,
6052 Isolate* isolate = object->GetIsolate();
6053 Handle<Object> raw_value(object->RawFastPropertyAt(index), isolate);
6054 return Object::NewStorageFor(isolate, raw_value, representation);
6058 template<class ContextObject>
6059 class JSObjectWalkVisitor {
6061 JSObjectWalkVisitor(ContextObject* site_context, bool copying,
6062 JSObject::DeepCopyHints hints)
6063 : site_context_(site_context),
6067 MUST_USE_RESULT MaybeHandle<JSObject> StructureWalk(Handle<JSObject> object);
6070 MUST_USE_RESULT inline MaybeHandle<JSObject> VisitElementOrProperty(
6071 Handle<JSObject> object,
6072 Handle<JSObject> value) {
6073 Handle<AllocationSite> current_site = site_context()->EnterNewScope();
6074 MaybeHandle<JSObject> copy_of_value = StructureWalk(value);
6075 site_context()->ExitScope(current_site, value);
6076 return copy_of_value;
6079 inline ContextObject* site_context() { return site_context_; }
6080 inline Isolate* isolate() { return site_context()->isolate(); }
6082 inline bool copying() const { return copying_; }
6085 ContextObject* site_context_;
6086 const bool copying_;
6087 const JSObject::DeepCopyHints hints_;
6091 template <class ContextObject>
6092 MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk(
6093 Handle<JSObject> object) {
6094 Isolate* isolate = this->isolate();
6095 bool copying = this->copying();
6096 bool shallow = hints_ == JSObject::kObjectIsShallowArray;
6099 StackLimitCheck check(isolate);
6101 if (check.HasOverflowed()) {
6102 isolate->StackOverflow();
6103 return MaybeHandle<JSObject>();
6107 if (object->map()->is_deprecated()) {
6108 JSObject::MigrateInstance(object);
6111 Handle<JSObject> copy;
6113 Handle<AllocationSite> site_to_pass;
6114 if (site_context()->ShouldCreateMemento(object)) {
6115 site_to_pass = site_context()->current();
6117 copy = isolate->factory()->CopyJSObjectWithAllocationSite(
6118 object, site_to_pass);
6123 ASSERT(copying || copy.is_identical_to(object));
6125 ElementsKind kind = copy->GetElementsKind();
6126 if (copying && IsFastSmiOrObjectElementsKind(kind) &&
6127 FixedArray::cast(copy->elements())->map() ==
6128 isolate->heap()->fixed_cow_array_map()) {
6129 isolate->counters()->cow_arrays_created_runtime()->Increment();
6133 HandleScope scope(isolate);
6135 // Deep copy local properties.
6136 if (copy->HasFastProperties()) {
6137 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors());
6138 int limit = copy->map()->NumberOfOwnDescriptors();
6139 for (int i = 0; i < limit; i++) {
6140 PropertyDetails details = descriptors->GetDetails(i);
6141 if (details.type() != FIELD) continue;
6142 int index = descriptors->GetFieldIndex(i);
6143 Handle<Object> value(object->RawFastPropertyAt(index), isolate);
6144 if (value->IsJSObject()) {
6145 ASSIGN_RETURN_ON_EXCEPTION(
6147 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
6150 Representation representation = details.representation();
6151 value = Object::NewStorageFor(isolate, value, representation);
6154 copy->FastPropertyAtPut(index, *value);
6158 Handle<FixedArray> names =
6159 isolate->factory()->NewFixedArray(copy->NumberOfLocalProperties());
6160 copy->GetLocalPropertyNames(*names, 0);
6161 for (int i = 0; i < names->length(); i++) {
6162 ASSERT(names->get(i)->IsString());
6163 Handle<String> key_string(String::cast(names->get(i)));
6164 PropertyAttributes attributes =
6165 JSReceiver::GetLocalPropertyAttribute(copy, key_string);
6166 // Only deep copy fields from the object literal expression.
6167 // In particular, don't try to copy the length attribute of
6169 if (attributes != NONE) continue;
6170 Handle<Object> value =
6171 Object::GetProperty(copy, key_string).ToHandleChecked();
6172 if (value->IsJSObject()) {
6173 Handle<JSObject> result;
6174 ASSIGN_RETURN_ON_EXCEPTION(
6176 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
6179 // Creating object copy for literals. No strict mode needed.
6180 JSObject::SetProperty(
6181 copy, key_string, result, NONE, SLOPPY).Assert();
6187 // Deep copy local elements.
6188 // Pixel elements cannot be created using an object literal.
6189 ASSERT(!copy->HasExternalArrayElements());
6191 case FAST_SMI_ELEMENTS:
6193 case FAST_HOLEY_SMI_ELEMENTS:
6194 case FAST_HOLEY_ELEMENTS: {
6195 Handle<FixedArray> elements(FixedArray::cast(copy->elements()));
6196 if (elements->map() == isolate->heap()->fixed_cow_array_map()) {
6198 for (int i = 0; i < elements->length(); i++) {
6199 ASSERT(!elements->get(i)->IsJSObject());
6203 for (int i = 0; i < elements->length(); i++) {
6204 Handle<Object> value(elements->get(i), isolate);
6205 ASSERT(value->IsSmi() ||
6206 value->IsTheHole() ||
6207 (IsFastObjectElementsKind(copy->GetElementsKind())));
6208 if (value->IsJSObject()) {
6209 Handle<JSObject> result;
6210 ASSIGN_RETURN_ON_EXCEPTION(
6212 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
6215 elements->set(i, *result);
6222 case DICTIONARY_ELEMENTS: {
6223 Handle<SeededNumberDictionary> element_dictionary(
6224 copy->element_dictionary());
6225 int capacity = element_dictionary->Capacity();
6226 for (int i = 0; i < capacity; i++) {
6227 Object* k = element_dictionary->KeyAt(i);
6228 if (element_dictionary->IsKey(k)) {
6229 Handle<Object> value(element_dictionary->ValueAt(i), isolate);
6230 if (value->IsJSObject()) {
6231 Handle<JSObject> result;
6232 ASSIGN_RETURN_ON_EXCEPTION(
6234 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
6237 element_dictionary->ValueAtPut(i, *result);
6244 case SLOPPY_ARGUMENTS_ELEMENTS:
6249 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6250 case EXTERNAL_##TYPE##_ELEMENTS: \
6251 case TYPE##_ELEMENTS: \
6253 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6254 #undef TYPED_ARRAY_CASE
6256 case FAST_DOUBLE_ELEMENTS:
6257 case FAST_HOLEY_DOUBLE_ELEMENTS:
6258 // No contained objects, nothing to do.
6267 MaybeHandle<JSObject> JSObject::DeepWalk(
6268 Handle<JSObject> object,
6269 AllocationSiteCreationContext* site_context) {
6270 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false,
6272 MaybeHandle<JSObject> result = v.StructureWalk(object);
6273 Handle<JSObject> for_assert;
6274 ASSERT(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object));
6279 MaybeHandle<JSObject> JSObject::DeepCopy(
6280 Handle<JSObject> object,
6281 AllocationSiteUsageContext* site_context,
6282 DeepCopyHints hints) {
6283 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints);
6284 MaybeHandle<JSObject> copy = v.StructureWalk(object);
6285 Handle<JSObject> for_assert;
6286 ASSERT(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object));
6291 Handle<Object> JSObject::GetDataProperty(Handle<JSObject> object,
6293 Isolate* isolate = object->GetIsolate();
6294 LookupResult lookup(isolate);
6296 DisallowHeapAllocation no_allocation;
6297 object->LookupRealNamedProperty(key, &lookup);
6299 Handle<Object> result = isolate->factory()->undefined_value();
6300 if (lookup.IsFound() && !lookup.IsTransition()) {
6301 switch (lookup.type()) {
6303 result = GetNormalizedProperty(
6304 Handle<JSObject>(lookup.holder(), isolate), &lookup);
6307 result = FastPropertyAt(Handle<JSObject>(lookup.holder(), isolate),
6308 lookup.representation(),
6309 lookup.GetFieldIndex().field_index());
6312 result = Handle<Object>(lookup.GetConstant(), isolate);
6326 // Tests for the fast common case for property enumeration:
6327 // - This object and all prototypes has an enum cache (which means that
6328 // it is no proxy, has no interceptors and needs no access checks).
6329 // - This object has no elements.
6330 // - No prototype has enumerable properties/elements.
6331 bool JSReceiver::IsSimpleEnum() {
6332 Heap* heap = GetHeap();
6333 for (Object* o = this;
6334 o != heap->null_value();
6335 o = JSObject::cast(o)->GetPrototype()) {
6336 if (!o->IsJSObject()) return false;
6337 JSObject* curr = JSObject::cast(o);
6338 int enum_length = curr->map()->EnumLength();
6339 if (enum_length == kInvalidEnumCacheSentinel) return false;
6340 if (curr->IsAccessCheckNeeded()) return false;
6341 ASSERT(!curr->HasNamedInterceptor());
6342 ASSERT(!curr->HasIndexedInterceptor());
6343 if (curr->NumberOfEnumElements() > 0) return false;
6344 if (curr != this && enum_length != 0) return false;
6350 static bool FilterKey(Object* key, PropertyAttributes filter) {
6351 if ((filter & SYMBOLIC) && key->IsSymbol()) {
6355 if ((filter & PRIVATE_SYMBOL) &&
6356 key->IsSymbol() && Symbol::cast(key)->is_private()) {
6360 if ((filter & STRING) && !key->IsSymbol()) {
6368 int Map::NumberOfDescribedProperties(DescriptorFlag which,
6369 PropertyAttributes filter) {
6371 DescriptorArray* descs = instance_descriptors();
6372 int limit = which == ALL_DESCRIPTORS
6373 ? descs->number_of_descriptors()
6374 : NumberOfOwnDescriptors();
6375 for (int i = 0; i < limit; i++) {
6376 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
6377 !FilterKey(descs->GetKey(i), filter)) {
6385 int Map::NextFreePropertyIndex() {
6387 int number_of_own_descriptors = NumberOfOwnDescriptors();
6388 DescriptorArray* descs = instance_descriptors();
6389 for (int i = 0; i < number_of_own_descriptors; i++) {
6390 if (descs->GetType(i) == FIELD) {
6391 int current_index = descs->GetFieldIndex(i);
6392 if (current_index > max_index) max_index = current_index;
6395 return max_index + 1;
6399 void JSReceiver::LocalLookup(
6400 Handle<Name> name, LookupResult* result, bool search_hidden_prototypes) {
6401 DisallowHeapAllocation no_gc;
6402 ASSERT(name->IsName());
6404 if (IsJSGlobalProxy()) {
6405 Object* proto = GetPrototype();
6406 if (proto->IsNull()) return result->NotFound();
6407 ASSERT(proto->IsJSGlobalObject());
6408 return JSReceiver::cast(proto)->LocalLookup(
6409 name, result, search_hidden_prototypes);
6413 result->HandlerResult(JSProxy::cast(this));
6417 // Do not use inline caching if the object is a non-global object
6418 // that requires access checks.
6419 if (IsAccessCheckNeeded()) {
6420 result->DisallowCaching();
6423 JSObject* js_object = JSObject::cast(this);
6425 // Check for lookup interceptor except when bootstrapping.
6426 if (js_object->HasNamedInterceptor() &&
6427 !GetIsolate()->bootstrapper()->IsActive()) {
6428 result->InterceptorResult(js_object);
6432 js_object->LocalLookupRealNamedProperty(name, result);
6433 if (result->IsFound() || !search_hidden_prototypes) return;
6435 Object* proto = js_object->GetPrototype();
6436 if (!proto->IsJSReceiver()) return;
6437 JSReceiver* receiver = JSReceiver::cast(proto);
6438 if (receiver->map()->is_hidden_prototype()) {
6439 receiver->LocalLookup(name, result, search_hidden_prototypes);
6444 void JSReceiver::Lookup(Handle<Name> name, LookupResult* result) {
6445 DisallowHeapAllocation no_gc;
6446 // Ecma-262 3rd 8.6.2.4
6447 Handle<Object> null_value = GetIsolate()->factory()->null_value();
6448 for (Object* current = this;
6449 current != *null_value;
6450 current = JSObject::cast(current)->GetPrototype()) {
6451 JSReceiver::cast(current)->LocalLookup(name, result, false);
6452 if (result->IsFound()) return;
6458 // Search object and its prototype chain for callback properties.
6459 void JSObject::LookupCallbackProperty(Handle<Name> name, LookupResult* result) {
6460 DisallowHeapAllocation no_gc;
6461 Handle<Object> null_value = GetIsolate()->factory()->null_value();
6462 for (Object* current = this;
6463 current != *null_value && current->IsJSObject();
6464 current = JSObject::cast(current)->GetPrototype()) {
6465 JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
6466 if (result->IsPropertyCallbacks()) return;
6472 static bool ContainsOnlyValidKeys(Handle<FixedArray> array) {
6473 int len = array->length();
6474 for (int i = 0; i < len; i++) {
6475 Object* e = array->get(i);
6476 if (!(e->IsString() || e->IsNumber())) return false;
6482 static Handle<FixedArray> ReduceFixedArrayTo(
6483 Handle<FixedArray> array, int length) {
6484 ASSERT(array->length() >= length);
6485 if (array->length() == length) return array;
6487 Handle<FixedArray> new_array =
6488 array->GetIsolate()->factory()->NewFixedArray(length);
6489 for (int i = 0; i < length; ++i) new_array->set(i, array->get(i));
6494 static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
6495 bool cache_result) {
6496 Isolate* isolate = object->GetIsolate();
6497 if (object->HasFastProperties()) {
6498 int own_property_count = object->map()->EnumLength();
6499 // If the enum length of the given map is set to kInvalidEnumCache, this
6500 // means that the map itself has never used the present enum cache. The
6501 // first step to using the cache is to set the enum length of the map by
6502 // counting the number of own descriptors that are not DONT_ENUM or
6504 if (own_property_count == kInvalidEnumCacheSentinel) {
6505 own_property_count = object->map()->NumberOfDescribedProperties(
6506 OWN_DESCRIPTORS, DONT_SHOW);
6508 ASSERT(own_property_count == object->map()->NumberOfDescribedProperties(
6509 OWN_DESCRIPTORS, DONT_SHOW));
6512 if (object->map()->instance_descriptors()->HasEnumCache()) {
6513 DescriptorArray* desc = object->map()->instance_descriptors();
6514 Handle<FixedArray> keys(desc->GetEnumCache(), isolate);
6516 // In case the number of properties required in the enum are actually
6517 // present, we can reuse the enum cache. Otherwise, this means that the
6518 // enum cache was generated for a previous (smaller) version of the
6519 // Descriptor Array. In that case we regenerate the enum cache.
6520 if (own_property_count <= keys->length()) {
6521 if (cache_result) object->map()->SetEnumLength(own_property_count);
6522 isolate->counters()->enum_cache_hits()->Increment();
6523 return ReduceFixedArrayTo(keys, own_property_count);
6527 Handle<Map> map(object->map());
6529 if (map->instance_descriptors()->IsEmpty()) {
6530 isolate->counters()->enum_cache_hits()->Increment();
6531 if (cache_result) map->SetEnumLength(0);
6532 return isolate->factory()->empty_fixed_array();
6535 isolate->counters()->enum_cache_misses()->Increment();
6537 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(
6538 own_property_count);
6539 Handle<FixedArray> indices = isolate->factory()->NewFixedArray(
6540 own_property_count);
6542 Handle<DescriptorArray> descs =
6543 Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
6545 int size = map->NumberOfOwnDescriptors();
6548 for (int i = 0; i < size; i++) {
6549 PropertyDetails details = descs->GetDetails(i);
6550 Object* key = descs->GetKey(i);
6551 if (!(details.IsDontEnum() || key->IsSymbol())) {
6552 storage->set(index, key);
6553 if (!indices.is_null()) {
6554 if (details.type() != FIELD) {
6555 indices = Handle<FixedArray>();
6557 int field_index = descs->GetFieldIndex(i);
6558 if (field_index >= map->inobject_properties()) {
6559 field_index = -(field_index - map->inobject_properties() + 1);
6561 field_index = field_index << 1;
6562 if (details.representation().IsDouble()) {
6565 indices->set(index, Smi::FromInt(field_index));
6571 ASSERT(index == storage->length());
6573 Handle<FixedArray> bridge_storage =
6574 isolate->factory()->NewFixedArray(
6575 DescriptorArray::kEnumCacheBridgeLength);
6576 DescriptorArray* desc = object->map()->instance_descriptors();
6577 desc->SetEnumCache(*bridge_storage,
6579 indices.is_null() ? Object::cast(Smi::FromInt(0))
6580 : Object::cast(*indices));
6582 object->map()->SetEnumLength(own_property_count);
6586 Handle<NameDictionary> dictionary(object->property_dictionary());
6587 int length = dictionary->NumberOfEnumElements();
6589 return Handle<FixedArray>(isolate->heap()->empty_fixed_array());
6591 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length);
6592 dictionary->CopyEnumKeysTo(*storage);
6598 MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object,
6599 KeyCollectionType type) {
6600 USE(ContainsOnlyValidKeys);
6601 Isolate* isolate = object->GetIsolate();
6602 Handle<FixedArray> content = isolate->factory()->empty_fixed_array();
6603 Handle<JSObject> arguments_boilerplate = Handle<JSObject>(
6604 isolate->context()->native_context()->sloppy_arguments_boilerplate(),
6606 Handle<JSFunction> arguments_function = Handle<JSFunction>(
6607 JSFunction::cast(arguments_boilerplate->map()->constructor()),
6610 // Only collect keys if access is permitted.
6611 for (Handle<Object> p = object;
6612 *p != isolate->heap()->null_value();
6613 p = Handle<Object>(p->GetPrototype(isolate), isolate)) {
6614 if (p->IsJSProxy()) {
6615 Handle<JSProxy> proxy(JSProxy::cast(*p), isolate);
6616 Handle<Object> args[] = { proxy };
6617 Handle<Object> names;
6618 ASSIGN_RETURN_ON_EXCEPTION(
6620 Execution::Call(isolate,
6621 isolate->proxy_enumerate(),
6626 ASSIGN_RETURN_ON_EXCEPTION(
6628 FixedArray::AddKeysFromArrayLike(
6629 content, Handle<JSObject>::cast(names)),
6634 Handle<JSObject> current(JSObject::cast(*p), isolate);
6636 // Check access rights if required.
6637 if (current->IsAccessCheckNeeded() &&
6638 !isolate->MayNamedAccess(
6639 current, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
6640 isolate->ReportFailedAccessCheck(current, v8::ACCESS_KEYS);
6641 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, FixedArray);
6645 // Compute the element keys.
6646 Handle<FixedArray> element_keys =
6647 isolate->factory()->NewFixedArray(current->NumberOfEnumElements());
6648 current->GetEnumElementKeys(*element_keys);
6649 ASSIGN_RETURN_ON_EXCEPTION(
6651 FixedArray::UnionOfKeys(content, element_keys),
6653 ASSERT(ContainsOnlyValidKeys(content));
6655 // Add the element keys from the interceptor.
6656 if (current->HasIndexedInterceptor()) {
6657 Handle<JSObject> result;
6658 if (JSObject::GetKeysForIndexedInterceptor(
6659 current, object).ToHandle(&result)) {
6660 ASSIGN_RETURN_ON_EXCEPTION(
6662 FixedArray::AddKeysFromArrayLike(content, result),
6665 ASSERT(ContainsOnlyValidKeys(content));
6668 // We can cache the computed property keys if access checks are
6669 // not needed and no interceptors are involved.
6671 // We do not use the cache if the object has elements and
6672 // therefore it does not make sense to cache the property names
6673 // for arguments objects. Arguments objects will always have
6675 // Wrapped strings have elements, but don't have an elements
6676 // array or dictionary. So the fast inline test for whether to
6677 // use the cache says yes, so we should not create a cache.
6678 bool cache_enum_keys =
6679 ((current->map()->constructor() != *arguments_function) &&
6680 !current->IsJSValue() &&
6681 !current->IsAccessCheckNeeded() &&
6682 !current->HasNamedInterceptor() &&
6683 !current->HasIndexedInterceptor());
6684 // Compute the property keys and cache them if possible.
6685 ASSIGN_RETURN_ON_EXCEPTION(
6687 FixedArray::UnionOfKeys(
6688 content, GetEnumPropertyKeys(current, cache_enum_keys)),
6690 ASSERT(ContainsOnlyValidKeys(content));
6692 // Add the property keys from the interceptor.
6693 if (current->HasNamedInterceptor()) {
6694 Handle<JSObject> result;
6695 if (JSObject::GetKeysForNamedInterceptor(
6696 current, object).ToHandle(&result)) {
6697 ASSIGN_RETURN_ON_EXCEPTION(
6699 FixedArray::AddKeysFromArrayLike(content, result),
6702 ASSERT(ContainsOnlyValidKeys(content));
6705 // If we only want local properties we bail out after the first
6707 if (type == LOCAL_ONLY) break;
6713 // Try to update an accessor in an elements dictionary. Return true if the
6714 // update succeeded, and false otherwise.
6715 static bool UpdateGetterSetterInDictionary(
6716 SeededNumberDictionary* dictionary,
6720 PropertyAttributes attributes) {
6721 int entry = dictionary->FindEntry(index);
6722 if (entry != SeededNumberDictionary::kNotFound) {
6723 Object* result = dictionary->ValueAt(entry);
6724 PropertyDetails details = dictionary->DetailsAt(entry);
6725 if (details.type() == CALLBACKS && result->IsAccessorPair()) {
6726 ASSERT(!details.IsDontDelete());
6727 if (details.attributes() != attributes) {
6728 dictionary->DetailsAtPut(
6730 PropertyDetails(attributes, CALLBACKS, index));
6732 AccessorPair::cast(result)->SetComponents(getter, setter);
6740 void JSObject::DefineElementAccessor(Handle<JSObject> object,
6742 Handle<Object> getter,
6743 Handle<Object> setter,
6744 PropertyAttributes attributes,
6745 v8::AccessControl access_control) {
6746 switch (object->GetElementsKind()) {
6747 case FAST_SMI_ELEMENTS:
6749 case FAST_DOUBLE_ELEMENTS:
6750 case FAST_HOLEY_SMI_ELEMENTS:
6751 case FAST_HOLEY_ELEMENTS:
6752 case FAST_HOLEY_DOUBLE_ELEMENTS:
6755 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6756 case EXTERNAL_##TYPE##_ELEMENTS: \
6757 case TYPE##_ELEMENTS: \
6759 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6760 #undef TYPED_ARRAY_CASE
6761 // Ignore getters and setters on pixel and external array elements.
6764 case DICTIONARY_ELEMENTS:
6765 if (UpdateGetterSetterInDictionary(object->element_dictionary(),
6773 case SLOPPY_ARGUMENTS_ELEMENTS: {
6774 // Ascertain whether we have read-only properties or an existing
6775 // getter/setter pair in an arguments elements dictionary backing
6777 FixedArray* parameter_map = FixedArray::cast(object->elements());
6778 uint32_t length = parameter_map->length();
6780 index < (length - 2) ? parameter_map->get(index + 2) : NULL;
6781 if (probe == NULL || probe->IsTheHole()) {
6782 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
6783 if (arguments->IsDictionary()) {
6784 SeededNumberDictionary* dictionary =
6785 SeededNumberDictionary::cast(arguments);
6786 if (UpdateGetterSetterInDictionary(dictionary,
6799 Isolate* isolate = object->GetIsolate();
6800 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair();
6801 accessors->SetComponents(*getter, *setter);
6802 accessors->set_access_flags(access_control);
6804 SetElementCallback(object, index, accessors, attributes);
6808 Handle<AccessorPair> JSObject::CreateAccessorPairFor(Handle<JSObject> object,
6809 Handle<Name> name) {
6810 Isolate* isolate = object->GetIsolate();
6811 LookupResult result(isolate);
6812 object->LocalLookupRealNamedProperty(name, &result);
6813 if (result.IsPropertyCallbacks()) {
6814 // Note that the result can actually have IsDontDelete() == true when we
6815 // e.g. have to fall back to the slow case while adding a setter after
6816 // successfully reusing a map transition for a getter. Nevertheless, this is
6817 // OK, because the assertion only holds for the whole addition of both
6818 // accessors, not for the addition of each part. See first comment in
6819 // DefinePropertyAccessor below.
6820 Object* obj = result.GetCallbackObject();
6821 if (obj->IsAccessorPair()) {
6822 return AccessorPair::Copy(handle(AccessorPair::cast(obj), isolate));
6825 return isolate->factory()->NewAccessorPair();
6829 void JSObject::DefinePropertyAccessor(Handle<JSObject> object,
6831 Handle<Object> getter,
6832 Handle<Object> setter,
6833 PropertyAttributes attributes,
6834 v8::AccessControl access_control) {
6835 // We could assert that the property is configurable here, but we would need
6836 // to do a lookup, which seems to be a bit of overkill.
6837 bool only_attribute_changes = getter->IsNull() && setter->IsNull();
6838 if (object->HasFastProperties() && !only_attribute_changes &&
6839 access_control == v8::DEFAULT &&
6840 (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors)) {
6841 bool getterOk = getter->IsNull() ||
6842 DefineFastAccessor(object, name, ACCESSOR_GETTER, getter, attributes);
6843 bool setterOk = !getterOk || setter->IsNull() ||
6844 DefineFastAccessor(object, name, ACCESSOR_SETTER, setter, attributes);
6845 if (getterOk && setterOk) return;
6848 Handle<AccessorPair> accessors = CreateAccessorPairFor(object, name);
6849 accessors->SetComponents(*getter, *setter);
6850 accessors->set_access_flags(access_control);
6852 SetPropertyCallback(object, name, accessors, attributes);
6856 bool JSObject::CanSetCallback(Handle<JSObject> object, Handle<Name> name) {
6857 Isolate* isolate = object->GetIsolate();
6858 ASSERT(!object->IsAccessCheckNeeded() ||
6859 isolate->MayNamedAccess(object, name, v8::ACCESS_SET));
6861 // Check if there is an API defined callback object which prohibits
6862 // callback overwriting in this object or its prototype chain.
6863 // This mechanism is needed for instance in a browser setting, where
6864 // certain accessors such as window.location should not be allowed
6865 // to be overwritten because allowing overwriting could potentially
6866 // cause security problems.
6867 LookupResult callback_result(isolate);
6868 object->LookupCallbackProperty(name, &callback_result);
6869 if (callback_result.IsFound()) {
6870 Object* callback_obj = callback_result.GetCallbackObject();
6871 if (callback_obj->IsAccessorInfo()) {
6872 return !AccessorInfo::cast(callback_obj)->prohibits_overwriting();
6874 if (callback_obj->IsAccessorPair()) {
6875 return !AccessorPair::cast(callback_obj)->prohibits_overwriting();
6882 bool Map::DictionaryElementsInPrototypeChainOnly() {
6883 Heap* heap = GetHeap();
6885 if (IsDictionaryElementsKind(elements_kind())) {
6889 for (Object* prototype = this->prototype();
6890 prototype != heap->null_value();
6891 prototype = prototype->GetPrototype(GetIsolate())) {
6892 if (prototype->IsJSProxy()) {
6893 // Be conservative, don't walk into proxies.
6897 if (IsDictionaryElementsKind(
6898 JSObject::cast(prototype)->map()->elements_kind())) {
6907 void JSObject::SetElementCallback(Handle<JSObject> object,
6909 Handle<Object> structure,
6910 PropertyAttributes attributes) {
6911 Heap* heap = object->GetHeap();
6912 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6914 // Normalize elements to make this operation simple.
6915 bool had_dictionary_elements = object->HasDictionaryElements();
6916 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
6917 ASSERT(object->HasDictionaryElements() ||
6918 object->HasDictionaryArgumentsElements());
6919 // Update the dictionary with the new CALLBACKS property.
6920 dictionary = SeededNumberDictionary::Set(dictionary, index, structure,
6922 dictionary->set_requires_slow_elements();
6924 // Update the dictionary backing store on the object.
6925 if (object->elements()->map() == heap->sloppy_arguments_elements_map()) {
6926 // Also delete any parameter alias.
6928 // TODO(kmillikin): when deleting the last parameter alias we could
6929 // switch to a direct backing store without the parameter map. This
6930 // would allow GC of the context.
6931 FixedArray* parameter_map = FixedArray::cast(object->elements());
6932 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
6933 parameter_map->set(index + 2, heap->the_hole_value());
6935 parameter_map->set(1, *dictionary);
6937 object->set_elements(*dictionary);
6939 if (!had_dictionary_elements) {
6940 // KeyedStoreICs (at least the non-generic ones) need a reset.
6941 heap->ClearAllICsByKind(Code::KEYED_STORE_IC);
6947 void JSObject::SetPropertyCallback(Handle<JSObject> object,
6949 Handle<Object> structure,
6950 PropertyAttributes attributes) {
6951 // Normalize object to make this operation simple.
6952 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
6954 // For the global object allocate a new map to invalidate the global inline
6955 // caches which have a global property cell reference directly in the code.
6956 if (object->IsGlobalObject()) {
6957 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
6958 ASSERT(new_map->is_dictionary_map());
6959 object->set_map(*new_map);
6961 // When running crankshaft, changing the map is not enough. We
6962 // need to deoptimize all functions that rely on this global
6964 Deoptimizer::DeoptimizeGlobalObject(*object);
6967 // Update the dictionary with the new CALLBACKS property.
6968 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6969 SetNormalizedProperty(object, name, structure, details);
6973 void JSObject::DefineAccessor(Handle<JSObject> object,
6975 Handle<Object> getter,
6976 Handle<Object> setter,
6977 PropertyAttributes attributes,
6978 v8::AccessControl access_control) {
6979 Isolate* isolate = object->GetIsolate();
6980 // Check access rights if needed.
6981 if (object->IsAccessCheckNeeded() &&
6982 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6983 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6984 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
6988 if (object->IsJSGlobalProxy()) {
6989 Handle<Object> proto(object->GetPrototype(), isolate);
6990 if (proto->IsNull()) return;
6991 ASSERT(proto->IsJSGlobalObject());
6992 DefineAccessor(Handle<JSObject>::cast(proto),
7001 // Make sure that the top context does not change when doing callbacks or
7002 // interceptor calls.
7003 AssertNoContextChange ncc(isolate);
7005 // Try to flatten before operating on the string.
7006 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
7008 if (!JSObject::CanSetCallback(object, name)) return;
7011 bool is_element = name->AsArrayIndex(&index);
7013 Handle<Object> old_value = isolate->factory()->the_hole_value();
7014 bool is_observed = object->map()->is_observed() &&
7015 *name != isolate->heap()->hidden_string();
7016 bool preexists = false;
7019 preexists = HasLocalElement(object, index);
7020 if (preexists && GetLocalElementAccessorPair(object, index).is_null()) {
7022 Object::GetElement(isolate, object, index).ToHandleChecked();
7025 LookupResult lookup(isolate);
7026 object->LocalLookup(name, &lookup, true);
7027 preexists = lookup.IsProperty();
7028 if (preexists && lookup.IsDataProperty()) {
7030 Object::GetPropertyOrElement(object, name).ToHandleChecked();
7036 DefineElementAccessor(
7037 object, index, getter, setter, attributes, access_control);
7039 DefinePropertyAccessor(
7040 object, name, getter, setter, attributes, access_control);
7044 const char* type = preexists ? "reconfigure" : "add";
7045 EnqueueChangeRecord(object, type, name, old_value);
7050 static bool TryAccessorTransition(Handle<JSObject> self,
7051 Handle<Map> transitioned_map,
7052 int target_descriptor,
7053 AccessorComponent component,
7054 Handle<Object> accessor,
7055 PropertyAttributes attributes) {
7056 DescriptorArray* descs = transitioned_map->instance_descriptors();
7057 PropertyDetails details = descs->GetDetails(target_descriptor);
7059 // If the transition target was not callbacks, fall back to the slow case.
7060 if (details.type() != CALLBACKS) return false;
7061 Object* descriptor = descs->GetCallbacksObject(target_descriptor);
7062 if (!descriptor->IsAccessorPair()) return false;
7064 Object* target_accessor = AccessorPair::cast(descriptor)->get(component);
7065 PropertyAttributes target_attributes = details.attributes();
7067 // Reuse transition if adding same accessor with same attributes.
7068 if (target_accessor == *accessor && target_attributes == attributes) {
7069 JSObject::MigrateToMap(self, transitioned_map);
7073 // If either not the same accessor, or not the same attributes, fall back to
7079 bool JSObject::DefineFastAccessor(Handle<JSObject> object,
7081 AccessorComponent component,
7082 Handle<Object> accessor,
7083 PropertyAttributes attributes) {
7084 ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined());
7085 Isolate* isolate = object->GetIsolate();
7086 LookupResult result(isolate);
7087 object->LocalLookup(name, &result);
7089 if (result.IsFound() && !result.IsPropertyCallbacks()) {
7093 // Return success if the same accessor with the same attributes already exist.
7094 AccessorPair* source_accessors = NULL;
7095 if (result.IsPropertyCallbacks()) {
7096 Object* callback_value = result.GetCallbackObject();
7097 if (callback_value->IsAccessorPair()) {
7098 source_accessors = AccessorPair::cast(callback_value);
7099 Object* entry = source_accessors->get(component);
7100 if (entry == *accessor && result.GetAttributes() == attributes) {
7107 int descriptor_number = result.GetDescriptorIndex();
7109 object->map()->LookupTransition(*object, *name, &result);
7111 if (result.IsFound()) {
7112 Handle<Map> target(result.GetTransitionTarget());
7113 ASSERT(target->NumberOfOwnDescriptors() ==
7114 object->map()->NumberOfOwnDescriptors());
7115 // This works since descriptors are sorted in order of addition.
7116 ASSERT(object->map()->instance_descriptors()->
7117 GetKey(descriptor_number) == *name);
7118 return TryAccessorTransition(object, target, descriptor_number,
7119 component, accessor, attributes);
7122 // If not, lookup a transition.
7123 object->map()->LookupTransition(*object, *name, &result);
7125 // If there is a transition, try to follow it.
7126 if (result.IsFound()) {
7127 Handle<Map> target(result.GetTransitionTarget());
7128 int descriptor_number = target->LastAdded();
7129 ASSERT(Name::Equals(name,
7130 handle(target->instance_descriptors()->GetKey(descriptor_number))));
7131 return TryAccessorTransition(object, target, descriptor_number,
7132 component, accessor, attributes);
7136 // If there is no transition yet, add a transition to the a new accessor pair
7137 // containing the accessor. Allocate a new pair if there were no source
7138 // accessors. Otherwise, copy the pair and modify the accessor.
7139 Handle<AccessorPair> accessors = source_accessors != NULL
7140 ? AccessorPair::Copy(Handle<AccessorPair>(source_accessors))
7141 : isolate->factory()->NewAccessorPair();
7142 accessors->set(component, *accessor);
7144 CallbacksDescriptor new_accessors_desc(name, accessors, attributes);
7145 Handle<Map> new_map = Map::CopyInsertDescriptor(
7146 handle(object->map()), &new_accessors_desc, INSERT_TRANSITION);
7148 JSObject::MigrateToMap(object, new_map);
7153 MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object,
7154 Handle<AccessorInfo> info) {
7155 Isolate* isolate = object->GetIsolate();
7156 Factory* factory = isolate->factory();
7157 Handle<Name> name(Name::cast(info->name()));
7159 // Check access rights if needed.
7160 if (object->IsAccessCheckNeeded() &&
7161 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
7162 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
7163 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
7164 return factory->undefined_value();
7167 if (object->IsJSGlobalProxy()) {
7168 Handle<Object> proto(object->GetPrototype(), isolate);
7169 if (proto->IsNull()) return object;
7170 ASSERT(proto->IsJSGlobalObject());
7171 return SetAccessor(Handle<JSObject>::cast(proto), info);
7174 // Make sure that the top context does not change when doing callbacks or
7175 // interceptor calls.
7176 AssertNoContextChange ncc(isolate);
7178 // Try to flatten before operating on the string.
7179 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
7181 if (!JSObject::CanSetCallback(object, name)) {
7182 return factory->undefined_value();
7186 bool is_element = name->AsArrayIndex(&index);
7189 if (object->IsJSArray()) return factory->undefined_value();
7191 // Accessors overwrite previous callbacks (cf. with getters/setters).
7192 switch (object->GetElementsKind()) {
7193 case FAST_SMI_ELEMENTS:
7195 case FAST_DOUBLE_ELEMENTS:
7196 case FAST_HOLEY_SMI_ELEMENTS:
7197 case FAST_HOLEY_ELEMENTS:
7198 case FAST_HOLEY_DOUBLE_ELEMENTS:
7201 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
7202 case EXTERNAL_##TYPE##_ELEMENTS: \
7203 case TYPE##_ELEMENTS: \
7205 TYPED_ARRAYS(TYPED_ARRAY_CASE)
7206 #undef TYPED_ARRAY_CASE
7207 // Ignore getters and setters on pixel and external array
7209 return factory->undefined_value();
7211 case DICTIONARY_ELEMENTS:
7213 case SLOPPY_ARGUMENTS_ELEMENTS:
7218 SetElementCallback(object, index, info, info->property_attributes());
7221 LookupResult result(isolate);
7222 object->LocalLookup(name, &result, true);
7223 // ES5 forbids turning a property into an accessor if it's not
7224 // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5).
7225 if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) {
7226 return factory->undefined_value();
7229 SetPropertyCallback(object, name, info, info->property_attributes());
7236 MaybeHandle<Object> JSObject::GetAccessor(Handle<JSObject> object,
7238 AccessorComponent component) {
7239 Isolate* isolate = object->GetIsolate();
7241 // Make sure that the top context does not change when doing callbacks or
7242 // interceptor calls.
7243 AssertNoContextChange ncc(isolate);
7245 // Check access rights if needed.
7246 if (object->IsAccessCheckNeeded() &&
7247 !isolate->MayNamedAccess(object, name, v8::ACCESS_HAS)) {
7248 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
7249 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
7250 return isolate->factory()->undefined_value();
7253 // Make the lookup and include prototypes.
7255 if (name->AsArrayIndex(&index)) {
7256 for (Handle<Object> obj = object;
7258 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) {
7259 if (obj->IsJSObject() && JSObject::cast(*obj)->HasDictionaryElements()) {
7260 JSObject* js_object = JSObject::cast(*obj);
7261 SeededNumberDictionary* dictionary = js_object->element_dictionary();
7262 int entry = dictionary->FindEntry(index);
7263 if (entry != SeededNumberDictionary::kNotFound) {
7264 Object* element = dictionary->ValueAt(entry);
7265 if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
7266 element->IsAccessorPair()) {
7267 return handle(AccessorPair::cast(element)->GetComponent(component),
7274 for (Handle<Object> obj = object;
7276 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) {
7277 LookupResult result(isolate);
7278 JSReceiver::cast(*obj)->LocalLookup(name, &result);
7279 if (result.IsFound()) {
7280 if (result.IsReadOnly()) return isolate->factory()->undefined_value();
7281 if (result.IsPropertyCallbacks()) {
7282 Object* obj = result.GetCallbackObject();
7283 if (obj->IsAccessorPair()) {
7284 return handle(AccessorPair::cast(obj)->GetComponent(component),
7291 return isolate->factory()->undefined_value();
7295 Object* JSObject::SlowReverseLookup(Object* value) {
7296 if (HasFastProperties()) {
7297 int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
7298 DescriptorArray* descs = map()->instance_descriptors();
7299 for (int i = 0; i < number_of_own_descriptors; i++) {
7300 if (descs->GetType(i) == FIELD) {
7301 Object* property = RawFastPropertyAt(descs->GetFieldIndex(i));
7302 if (descs->GetDetails(i).representation().IsDouble()) {
7303 ASSERT(property->IsHeapNumber());
7304 if (value->IsNumber() && property->Number() == value->Number()) {
7305 return descs->GetKey(i);
7307 } else if (property == value) {
7308 return descs->GetKey(i);
7310 } else if (descs->GetType(i) == CONSTANT) {
7311 if (descs->GetConstant(i) == value) {
7312 return descs->GetKey(i);
7316 return GetHeap()->undefined_value();
7318 return property_dictionary()->SlowReverseLookup(value);
7323 Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) {
7324 Handle<Map> result = map->GetIsolate()->factory()->NewMap(
7325 map->instance_type(), instance_size);
7326 result->set_prototype(map->prototype());
7327 result->set_constructor(map->constructor());
7328 result->set_bit_field(map->bit_field());
7329 result->set_bit_field2(map->bit_field2());
7330 int new_bit_field3 = map->bit_field3();
7331 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
7332 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
7333 new_bit_field3 = EnumLengthBits::update(new_bit_field3,
7334 kInvalidEnumCacheSentinel);
7335 new_bit_field3 = Deprecated::update(new_bit_field3, false);
7336 if (!map->is_dictionary_map()) {
7337 new_bit_field3 = IsUnstable::update(new_bit_field3, false);
7339 result->set_bit_field3(new_bit_field3);
7344 Handle<Map> Map::Normalize(Handle<Map> fast_map,
7345 PropertyNormalizationMode mode) {
7346 ASSERT(!fast_map->is_dictionary_map());
7348 Isolate* isolate = fast_map->GetIsolate();
7349 Handle<NormalizedMapCache> cache(
7350 isolate->context()->native_context()->normalized_map_cache());
7352 Handle<Map> new_map;
7353 if (cache->Get(fast_map, mode).ToHandle(&new_map)) {
7355 if (FLAG_verify_heap) {
7356 new_map->SharedMapVerify();
7359 #ifdef ENABLE_SLOW_ASSERTS
7360 if (FLAG_enable_slow_asserts) {
7361 // The cached map should match newly created normalized map bit-by-bit,
7362 // except for the code cache, which can contain some ics which can be
7363 // applied to the shared map.
7364 Handle<Map> fresh = Map::CopyNormalized(
7365 fast_map, mode, SHARED_NORMALIZED_MAP);
7367 ASSERT(memcmp(fresh->address(),
7369 Map::kCodeCacheOffset) == 0);
7370 STATIC_ASSERT(Map::kDependentCodeOffset ==
7371 Map::kCodeCacheOffset + kPointerSize);
7372 int offset = Map::kDependentCodeOffset + kPointerSize;
7373 ASSERT(memcmp(fresh->address() + offset,
7374 new_map->address() + offset,
7375 Map::kSize - offset) == 0);
7379 new_map = Map::CopyNormalized(fast_map, mode, SHARED_NORMALIZED_MAP);
7380 cache->Set(fast_map, new_map);
7381 isolate->counters()->normalized_maps()->Increment();
7383 fast_map->NotifyLeafMapLayoutChange();
7388 Handle<Map> Map::CopyNormalized(Handle<Map> map,
7389 PropertyNormalizationMode mode,
7390 NormalizedMapSharingMode sharing) {
7391 int new_instance_size = map->instance_size();
7392 if (mode == CLEAR_INOBJECT_PROPERTIES) {
7393 new_instance_size -= map->inobject_properties() * kPointerSize;
7396 Handle<Map> result = RawCopy(map, new_instance_size);
7398 if (mode != CLEAR_INOBJECT_PROPERTIES) {
7399 result->set_inobject_properties(map->inobject_properties());
7402 result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
7403 result->set_dictionary_map(true);
7404 result->set_migration_target(false);
7407 if (FLAG_verify_heap && result->is_shared()) {
7408 result->SharedMapVerify();
7416 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) {
7417 Handle<Map> result = RawCopy(map, map->instance_size());
7419 // Please note instance_type and instance_size are set when allocated.
7420 result->set_inobject_properties(map->inobject_properties());
7421 result->set_unused_property_fields(map->unused_property_fields());
7423 result->set_pre_allocated_property_fields(
7424 map->pre_allocated_property_fields());
7425 result->set_is_shared(false);
7426 result->ClearCodeCache(map->GetHeap());
7427 map->NotifyLeafMapLayoutChange();
7432 Handle<Map> Map::ShareDescriptor(Handle<Map> map,
7433 Handle<DescriptorArray> descriptors,
7434 Descriptor* descriptor) {
7435 // Sanity check. This path is only to be taken if the map owns its descriptor
7436 // array, implying that its NumberOfOwnDescriptors equals the number of
7437 // descriptors in the descriptor array.
7438 ASSERT(map->NumberOfOwnDescriptors() ==
7439 map->instance_descriptors()->number_of_descriptors());
7441 Handle<Map> result = CopyDropDescriptors(map);
7442 Handle<Name> name = descriptor->GetKey();
7443 Handle<TransitionArray> transitions =
7444 TransitionArray::CopyInsert(map, name, result, SIMPLE_TRANSITION);
7446 // Ensure there's space for the new descriptor in the shared descriptor array.
7447 if (descriptors->NumberOfSlackDescriptors() == 0) {
7448 int old_size = descriptors->number_of_descriptors();
7449 if (old_size == 0) {
7450 descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1);
7452 EnsureDescriptorSlack(map, old_size < 4 ? 1 : old_size / 2);
7453 descriptors = handle(map->instance_descriptors());
7457 // Commit the state atomically.
7458 DisallowHeapAllocation no_gc;
7460 descriptors->Append(descriptor);
7461 result->SetBackPointer(*map);
7462 result->InitializeDescriptors(*descriptors);
7464 ASSERT(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1);
7466 map->set_transitions(*transitions);
7467 map->set_owns_descriptors(false);
7473 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map,
7474 Handle<DescriptorArray> descriptors,
7475 TransitionFlag flag,
7476 MaybeHandle<Name> maybe_name,
7477 SimpleTransitionFlag simple_flag) {
7478 ASSERT(descriptors->IsSortedNoDuplicates());
7480 Handle<Map> result = CopyDropDescriptors(map);
7481 result->InitializeDescriptors(*descriptors);
7483 if (flag == INSERT_TRANSITION && map->CanHaveMoreTransitions()) {
7485 CHECK(maybe_name.ToHandle(&name));
7486 Handle<TransitionArray> transitions = TransitionArray::CopyInsert(
7487 map, name, result, simple_flag);
7488 map->set_transitions(*transitions);
7489 result->SetBackPointer(*map);
7491 int length = descriptors->number_of_descriptors();
7492 for (int i = 0; i < length; i++) {
7493 descriptors->SetRepresentation(i, Representation::Tagged());
7494 if (descriptors->GetDetails(i).type() == FIELD) {
7495 descriptors->SetValue(i, HeapType::Any());
7504 // Since this method is used to rewrite an existing transition tree, it can
7505 // always insert transitions without checking.
7506 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map,
7508 Handle<DescriptorArray> descriptors) {
7509 ASSERT(descriptors->IsSortedNoDuplicates());
7511 Handle<Map> result = CopyDropDescriptors(map);
7513 result->InitializeDescriptors(*descriptors);
7514 result->SetNumberOfOwnDescriptors(new_descriptor + 1);
7516 int unused_property_fields = map->unused_property_fields();
7517 if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
7518 unused_property_fields = map->unused_property_fields() - 1;
7519 if (unused_property_fields < 0) {
7520 unused_property_fields += JSObject::kFieldsAdded;
7524 result->set_unused_property_fields(unused_property_fields);
7525 result->set_owns_descriptors(false);
7527 Handle<Name> name = handle(descriptors->GetKey(new_descriptor));
7528 Handle<TransitionArray> transitions = TransitionArray::CopyInsert(
7529 map, name, result, SIMPLE_TRANSITION);
7531 map->set_transitions(*transitions);
7532 result->SetBackPointer(*map);
7538 Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind,
7539 TransitionFlag flag) {
7540 if (flag == INSERT_TRANSITION) {
7541 ASSERT(!map->HasElementsTransition() ||
7542 ((map->elements_transition_map()->elements_kind() ==
7543 DICTIONARY_ELEMENTS ||
7544 IsExternalArrayElementsKind(
7545 map->elements_transition_map()->elements_kind())) &&
7546 (kind == DICTIONARY_ELEMENTS ||
7547 IsExternalArrayElementsKind(kind))));
7548 ASSERT(!IsFastElementsKind(kind) ||
7549 IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
7550 ASSERT(kind != map->elements_kind());
7553 bool insert_transition =
7554 flag == INSERT_TRANSITION && !map->HasElementsTransition();
7556 if (insert_transition && map->owns_descriptors()) {
7557 // In case the map owned its own descriptors, share the descriptors and
7558 // transfer ownership to the new map.
7559 Handle<Map> new_map = CopyDropDescriptors(map);
7561 SetElementsTransitionMap(map, new_map);
7563 new_map->set_elements_kind(kind);
7564 new_map->InitializeDescriptors(map->instance_descriptors());
7565 new_map->SetBackPointer(*map);
7566 map->set_owns_descriptors(false);
7570 // In case the map did not own its own descriptors, a split is forced by
7571 // copying the map; creating a new descriptor array cell.
7572 // Create a new free-floating map only if we are not allowed to store it.
7573 Handle<Map> new_map = Copy(map);
7575 new_map->set_elements_kind(kind);
7577 if (insert_transition) {
7578 SetElementsTransitionMap(map, new_map);
7579 new_map->SetBackPointer(*map);
7586 Handle<Map> Map::CopyForObserved(Handle<Map> map) {
7587 ASSERT(!map->is_observed());
7589 Isolate* isolate = map->GetIsolate();
7591 // In case the map owned its own descriptors, share the descriptors and
7592 // transfer ownership to the new map.
7593 Handle<Map> new_map;
7594 if (map->owns_descriptors()) {
7595 new_map = CopyDropDescriptors(map);
7597 new_map = Copy(map);
7600 Handle<TransitionArray> transitions = TransitionArray::CopyInsert(
7601 map, isolate->factory()->observed_symbol(), new_map, FULL_TRANSITION);
7603 map->set_transitions(*transitions);
7605 new_map->set_is_observed();
7607 if (map->owns_descriptors()) {
7608 new_map->InitializeDescriptors(map->instance_descriptors());
7609 map->set_owns_descriptors(false);
7612 new_map->SetBackPointer(*map);
7617 Handle<Map> Map::Copy(Handle<Map> map) {
7618 Handle<DescriptorArray> descriptors(map->instance_descriptors());
7619 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
7620 Handle<DescriptorArray> new_descriptors =
7621 DescriptorArray::CopyUpTo(descriptors, number_of_own_descriptors);
7622 return CopyReplaceDescriptors(
7623 map, new_descriptors, OMIT_TRANSITION, MaybeHandle<Name>());
7627 Handle<Map> Map::Create(Handle<JSFunction> constructor,
7628 int extra_inobject_properties) {
7629 Handle<Map> copy = Copy(handle(constructor->initial_map()));
7631 // Check that we do not overflow the instance size when adding the
7632 // extra inobject properties.
7633 int instance_size_delta = extra_inobject_properties * kPointerSize;
7634 int max_instance_size_delta =
7635 JSObject::kMaxInstanceSize - copy->instance_size();
7636 int max_extra_properties = max_instance_size_delta >> kPointerSizeLog2;
7638 // If the instance size overflows, we allocate as many properties as we can as
7639 // inobject properties.
7640 if (extra_inobject_properties > max_extra_properties) {
7641 instance_size_delta = max_instance_size_delta;
7642 extra_inobject_properties = max_extra_properties;
7645 // Adjust the map with the extra inobject properties.
7646 int inobject_properties =
7647 copy->inobject_properties() + extra_inobject_properties;
7648 copy->set_inobject_properties(inobject_properties);
7649 copy->set_unused_property_fields(inobject_properties);
7650 copy->set_instance_size(copy->instance_size() + instance_size_delta);
7651 copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy));
7656 Handle<Map> Map::CopyForFreeze(Handle<Map> map) {
7657 int num_descriptors = map->NumberOfOwnDescriptors();
7658 Isolate* isolate = map->GetIsolate();
7659 Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
7660 handle(map->instance_descriptors(), isolate), num_descriptors, FROZEN);
7661 Handle<Map> new_map = Map::CopyReplaceDescriptors(
7662 map, new_desc, INSERT_TRANSITION, isolate->factory()->frozen_symbol());
7664 new_map->set_is_extensible(false);
7665 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
7670 Handle<Map> Map::CopyAddDescriptor(Handle<Map> map,
7671 Descriptor* descriptor,
7672 TransitionFlag flag) {
7673 Handle<DescriptorArray> descriptors(map->instance_descriptors());
7675 // Ensure the key is unique.
7676 descriptor->KeyToUniqueName();
7678 if (flag == INSERT_TRANSITION &&
7679 map->owns_descriptors() &&
7680 map->CanHaveMoreTransitions()) {
7681 return ShareDescriptor(map, descriptors, descriptor);
7684 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
7685 descriptors, map->NumberOfOwnDescriptors(), 1);
7686 new_descriptors->Append(descriptor);
7688 return CopyReplaceDescriptors(
7689 map, new_descriptors, flag, descriptor->GetKey(), SIMPLE_TRANSITION);
7693 Handle<Map> Map::CopyInsertDescriptor(Handle<Map> map,
7694 Descriptor* descriptor,
7695 TransitionFlag flag) {
7696 Handle<DescriptorArray> old_descriptors(map->instance_descriptors());
7698 // Ensure the key is unique.
7699 descriptor->KeyToUniqueName();
7701 // We replace the key if it is already present.
7702 int index = old_descriptors->SearchWithCache(*descriptor->GetKey(), *map);
7703 if (index != DescriptorArray::kNotFound) {
7704 return CopyReplaceDescriptor(map, old_descriptors, descriptor, index, flag);
7706 return CopyAddDescriptor(map, descriptor, flag);
7710 Handle<DescriptorArray> DescriptorArray::CopyUpTo(
7711 Handle<DescriptorArray> desc,
7712 int enumeration_index,
7714 return DescriptorArray::CopyUpToAddAttributes(
7715 desc, enumeration_index, NONE, slack);
7719 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
7720 Handle<DescriptorArray> desc,
7721 int enumeration_index,
7722 PropertyAttributes attributes,
7724 if (enumeration_index + slack == 0) {
7725 return desc->GetIsolate()->factory()->empty_descriptor_array();
7728 int size = enumeration_index;
7730 Handle<DescriptorArray> descriptors =
7731 DescriptorArray::Allocate(desc->GetIsolate(), size, slack);
7732 DescriptorArray::WhitenessWitness witness(*descriptors);
7734 if (attributes != NONE) {
7735 for (int i = 0; i < size; ++i) {
7736 Object* value = desc->GetValue(i);
7737 PropertyDetails details = desc->GetDetails(i);
7738 int mask = DONT_DELETE | DONT_ENUM;
7739 // READ_ONLY is an invalid attribute for JS setters/getters.
7740 if (details.type() != CALLBACKS || !value->IsAccessorPair()) {
7743 details = details.CopyAddAttributes(
7744 static_cast<PropertyAttributes>(attributes & mask));
7745 Descriptor inner_desc(handle(desc->GetKey(i)),
7746 handle(value, desc->GetIsolate()),
7748 descriptors->Set(i, &inner_desc, witness);
7751 for (int i = 0; i < size; ++i) {
7752 descriptors->CopyFrom(i, *desc, witness);
7756 if (desc->number_of_descriptors() != enumeration_index) descriptors->Sort();
7762 Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map,
7763 Handle<DescriptorArray> descriptors,
7764 Descriptor* descriptor,
7765 int insertion_index,
7766 TransitionFlag flag) {
7767 // Ensure the key is unique.
7768 descriptor->KeyToUniqueName();
7770 Handle<Name> key = descriptor->GetKey();
7771 ASSERT(*key == descriptors->GetKey(insertion_index));
7773 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
7774 descriptors, map->NumberOfOwnDescriptors());
7776 new_descriptors->Replace(insertion_index, descriptor);
7778 SimpleTransitionFlag simple_flag =
7779 (insertion_index == descriptors->number_of_descriptors() - 1)
7782 return CopyReplaceDescriptors(map, new_descriptors, flag, key, simple_flag);
7786 void Map::UpdateCodeCache(Handle<Map> map,
7788 Handle<Code> code) {
7789 Isolate* isolate = map->GetIsolate();
7790 HandleScope scope(isolate);
7791 // Allocate the code cache if not present.
7792 if (map->code_cache()->IsFixedArray()) {
7793 Handle<Object> result = isolate->factory()->NewCodeCache();
7794 map->set_code_cache(*result);
7797 // Update the code cache.
7798 Handle<CodeCache> code_cache(CodeCache::cast(map->code_cache()), isolate);
7799 CodeCache::Update(code_cache, name, code);
7803 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) {
7804 // Do a lookup if a code cache exists.
7805 if (!code_cache()->IsFixedArray()) {
7806 return CodeCache::cast(code_cache())->Lookup(name, flags);
7808 return GetHeap()->undefined_value();
7813 int Map::IndexInCodeCache(Object* name, Code* code) {
7814 // Get the internal index if a code cache exists.
7815 if (!code_cache()->IsFixedArray()) {
7816 return CodeCache::cast(code_cache())->GetIndex(name, code);
7822 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) {
7823 // No GC is supposed to happen between a call to IndexInCodeCache and
7824 // RemoveFromCodeCache so the code cache must be there.
7825 ASSERT(!code_cache()->IsFixedArray());
7826 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
7830 // An iterator over all map transitions in an descriptor array, reusing the
7831 // constructor field of the map while it is running. Negative values in
7832 // the constructor field indicate an active map transition iteration. The
7833 // original constructor is restored after iterating over all entries.
7834 class IntrusiveMapTransitionIterator {
7836 IntrusiveMapTransitionIterator(
7837 Map* map, TransitionArray* transition_array, Object* constructor)
7839 transition_array_(transition_array),
7840 constructor_(constructor) { }
7842 void StartIfNotStarted() {
7843 ASSERT(!(*IteratorField())->IsSmi() || IsIterating());
7844 if (!(*IteratorField())->IsSmi()) {
7845 ASSERT(*IteratorField() == constructor_);
7846 *IteratorField() = Smi::FromInt(-1);
7850 bool IsIterating() {
7851 return (*IteratorField())->IsSmi() &&
7852 Smi::cast(*IteratorField())->value() < 0;
7856 ASSERT(IsIterating());
7857 int value = Smi::cast(*IteratorField())->value();
7858 int index = -value - 1;
7859 int number_of_transitions = transition_array_->number_of_transitions();
7860 while (index < number_of_transitions) {
7861 *IteratorField() = Smi::FromInt(value - 1);
7862 return transition_array_->GetTarget(index);
7865 *IteratorField() = constructor_;
7870 Object** IteratorField() {
7871 return HeapObject::RawField(map_, Map::kConstructorOffset);
7875 TransitionArray* transition_array_;
7876 Object* constructor_;
7880 // An iterator over all prototype transitions, reusing the constructor field
7881 // of the map while it is running. Positive values in the constructor field
7882 // indicate an active prototype transition iteration. The original constructor
7883 // is restored after iterating over all entries.
7884 class IntrusivePrototypeTransitionIterator {
7886 IntrusivePrototypeTransitionIterator(
7887 Map* map, HeapObject* proto_trans, Object* constructor)
7888 : map_(map), proto_trans_(proto_trans), constructor_(constructor) { }
7890 void StartIfNotStarted() {
7891 if (!(*IteratorField())->IsSmi()) {
7892 ASSERT(*IteratorField() == constructor_);
7893 *IteratorField() = Smi::FromInt(0);
7897 bool IsIterating() {
7898 return (*IteratorField())->IsSmi() &&
7899 Smi::cast(*IteratorField())->value() >= 0;
7903 ASSERT(IsIterating());
7904 int transitionNumber = Smi::cast(*IteratorField())->value();
7905 if (transitionNumber < NumberOfTransitions()) {
7906 *IteratorField() = Smi::FromInt(transitionNumber + 1);
7907 return GetTransition(transitionNumber);
7909 *IteratorField() = constructor_;
7914 Object** IteratorField() {
7915 return HeapObject::RawField(map_, Map::kConstructorOffset);
7918 int NumberOfTransitions() {
7919 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7920 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
7921 return Smi::cast(num)->value();
7924 Map* GetTransition(int transitionNumber) {
7925 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7926 return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
7929 int IndexFor(int transitionNumber) {
7930 return Map::kProtoTransitionHeaderSize +
7931 Map::kProtoTransitionMapOffset +
7932 transitionNumber * Map::kProtoTransitionElementsPerEntry;
7936 HeapObject* proto_trans_;
7937 Object* constructor_;
7941 // To traverse the transition tree iteratively, we have to store two kinds of
7942 // information in a map: The parent map in the traversal and which children of a
7943 // node have already been visited. To do this without additional memory, we
7944 // temporarily reuse two fields with known values:
7946 // (1) The map of the map temporarily holds the parent, and is restored to the
7947 // meta map afterwards.
7949 // (2) The info which children have already been visited depends on which part
7950 // of the map we currently iterate. We use the constructor field of the
7951 // map to store the current index. We can do that because the constructor
7952 // is the same for all involved maps.
7954 // (a) If we currently follow normal map transitions, we temporarily store
7955 // the current index in the constructor field, and restore it to the
7956 // original constructor afterwards. Note that a single descriptor can
7957 // have 0, 1, or 2 transitions.
7959 // (b) If we currently follow prototype transitions, we temporarily store
7960 // the current index in the constructor field, and restore it to the
7961 // original constructor afterwards.
7963 // Note that the child iterator is just a concatenation of two iterators: One
7964 // iterating over map transitions and one iterating over prototype transisitons.
7965 class TraversableMap : public Map {
7967 // Record the parent in the traversal within this map. Note that this destroys
7969 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); }
7971 // Reset the current map's map, returning the parent previously stored in it.
7972 TraversableMap* GetAndResetParent() {
7973 TraversableMap* old_parent = static_cast<TraversableMap*>(map());
7974 set_map_no_write_barrier(GetHeap()->meta_map());
7978 // If we have an unvisited child map, return that one and advance. If we have
7979 // none, return NULL and restore the overwritten constructor field.
7980 TraversableMap* ChildIteratorNext(Object* constructor) {
7981 if (!HasTransitionArray()) return NULL;
7983 TransitionArray* transition_array = transitions();
7984 if (transition_array->HasPrototypeTransitions()) {
7985 HeapObject* proto_transitions =
7986 transition_array->GetPrototypeTransitions();
7987 IntrusivePrototypeTransitionIterator proto_iterator(this,
7990 proto_iterator.StartIfNotStarted();
7991 if (proto_iterator.IsIterating()) {
7992 Map* next = proto_iterator.Next();
7993 if (next != NULL) return static_cast<TraversableMap*>(next);
7997 IntrusiveMapTransitionIterator transition_iterator(this,
8000 transition_iterator.StartIfNotStarted();
8001 if (transition_iterator.IsIterating()) {
8002 Map* next = transition_iterator.Next();
8003 if (next != NULL) return static_cast<TraversableMap*>(next);
8011 // Traverse the transition tree in postorder without using the C++ stack by
8012 // doing pointer reversal.
8013 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
8014 // Make sure that we do not allocate in the callback.
8015 DisallowHeapAllocation no_allocation;
8017 TraversableMap* current = static_cast<TraversableMap*>(this);
8018 // Get the root constructor here to restore it later when finished iterating
8020 Object* root_constructor = constructor();
8022 TraversableMap* child = current->ChildIteratorNext(root_constructor);
8023 if (child != NULL) {
8024 child->SetParent(current);
8027 TraversableMap* parent = current->GetAndResetParent();
8028 callback(current, data);
8029 if (current == this) break;
8036 void CodeCache::Update(
8037 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
8038 // The number of monomorphic stubs for normal load/store/call IC's can grow to
8039 // a large number and therefore they need to go into a hash table. They are
8040 // used to load global properties from cells.
8041 if (code->type() == Code::NORMAL) {
8042 // Make sure that a hash table is allocated for the normal load code cache.
8043 if (code_cache->normal_type_cache()->IsUndefined()) {
8044 Handle<Object> result =
8045 CodeCacheHashTable::New(code_cache->GetIsolate(),
8046 CodeCacheHashTable::kInitialSize);
8047 code_cache->set_normal_type_cache(*result);
8049 UpdateNormalTypeCache(code_cache, name, code);
8051 ASSERT(code_cache->default_cache()->IsFixedArray());
8052 UpdateDefaultCache(code_cache, name, code);
8057 void CodeCache::UpdateDefaultCache(
8058 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
8059 // When updating the default code cache we disregard the type encoded in the
8060 // flags. This allows call constant stubs to overwrite call field
8062 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags());
8064 // First check whether we can update existing code cache without
8066 Handle<FixedArray> cache = handle(code_cache->default_cache());
8067 int length = cache->length();
8069 DisallowHeapAllocation no_alloc;
8070 int deleted_index = -1;
8071 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
8072 Object* key = cache->get(i);
8073 if (key->IsNull()) {
8074 if (deleted_index < 0) deleted_index = i;
8077 if (key->IsUndefined()) {
8078 if (deleted_index >= 0) i = deleted_index;
8079 cache->set(i + kCodeCacheEntryNameOffset, *name);
8080 cache->set(i + kCodeCacheEntryCodeOffset, *code);
8083 if (name->Equals(Name::cast(key))) {
8085 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags();
8086 if (Code::RemoveTypeFromFlags(found) == flags) {
8087 cache->set(i + kCodeCacheEntryCodeOffset, *code);
8093 // Reached the end of the code cache. If there were deleted
8094 // elements, reuse the space for the first of them.
8095 if (deleted_index >= 0) {
8096 cache->set(deleted_index + kCodeCacheEntryNameOffset, *name);
8097 cache->set(deleted_index + kCodeCacheEntryCodeOffset, *code);
8102 // Extend the code cache with some new entries (at least one). Must be a
8103 // multiple of the entry size.
8104 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize;
8105 new_length = new_length - new_length % kCodeCacheEntrySize;
8106 ASSERT((new_length % kCodeCacheEntrySize) == 0);
8107 cache = FixedArray::CopySize(cache, new_length);
8109 // Add the (name, code) pair to the new cache.
8110 cache->set(length + kCodeCacheEntryNameOffset, *name);
8111 cache->set(length + kCodeCacheEntryCodeOffset, *code);
8112 code_cache->set_default_cache(*cache);
8116 void CodeCache::UpdateNormalTypeCache(
8117 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
8118 // Adding a new entry can cause a new cache to be allocated.
8119 Handle<CodeCacheHashTable> cache(
8120 CodeCacheHashTable::cast(code_cache->normal_type_cache()));
8121 Handle<Object> new_cache = CodeCacheHashTable::Put(cache, name, code);
8122 code_cache->set_normal_type_cache(*new_cache);
8126 Object* CodeCache::Lookup(Name* name, Code::Flags flags) {
8127 Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags));
8128 if (result->IsCode()) {
8129 if (Code::cast(result)->flags() == flags) return result;
8130 return GetHeap()->undefined_value();
8132 return LookupNormalTypeCache(name, flags);
8136 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) {
8137 FixedArray* cache = default_cache();
8138 int length = cache->length();
8139 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
8140 Object* key = cache->get(i + kCodeCacheEntryNameOffset);
8141 // Skip deleted elements.
8142 if (key->IsNull()) continue;
8143 if (key->IsUndefined()) return key;
8144 if (name->Equals(Name::cast(key))) {
8145 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset));
8146 if (Code::RemoveTypeFromFlags(code->flags()) == flags) {
8151 return GetHeap()->undefined_value();
8155 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) {
8156 if (!normal_type_cache()->IsUndefined()) {
8157 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
8158 return cache->Lookup(name, flags);
8160 return GetHeap()->undefined_value();
8165 int CodeCache::GetIndex(Object* name, Code* code) {
8166 if (code->type() == Code::NORMAL) {
8167 if (normal_type_cache()->IsUndefined()) return -1;
8168 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
8169 return cache->GetIndex(Name::cast(name), code->flags());
8172 FixedArray* array = default_cache();
8173 int len = array->length();
8174 for (int i = 0; i < len; i += kCodeCacheEntrySize) {
8175 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1;
8181 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
8182 if (code->type() == Code::NORMAL) {
8183 ASSERT(!normal_type_cache()->IsUndefined());
8184 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
8185 ASSERT(cache->GetIndex(Name::cast(name), code->flags()) == index);
8186 cache->RemoveByIndex(index);
8188 FixedArray* array = default_cache();
8189 ASSERT(array->length() >= index && array->get(index)->IsCode());
8190 // Use null instead of undefined for deleted elements to distinguish
8191 // deleted elements from unused elements. This distinction is used
8192 // when looking up in the cache and when updating the cache.
8193 ASSERT_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset);
8194 array->set_null(index - 1); // Name.
8195 array->set_null(index); // Code.
8200 // The key in the code cache hash table consists of the property name and the
8201 // code object. The actual match is on the name and the code flags. If a key
8202 // is created using the flags and not a code object it can only be used for
8203 // lookup not to create a new entry.
8204 class CodeCacheHashTableKey : public HashTableKey {
8206 CodeCacheHashTableKey(Handle<Name> name, Code::Flags flags)
8207 : name_(name), flags_(flags), code_() { }
8209 CodeCacheHashTableKey(Handle<Name> name, Handle<Code> code)
8210 : name_(name), flags_(code->flags()), code_(code) { }
8212 bool IsMatch(Object* other) V8_OVERRIDE {
8213 if (!other->IsFixedArray()) return false;
8214 FixedArray* pair = FixedArray::cast(other);
8215 Name* name = Name::cast(pair->get(0));
8216 Code::Flags flags = Code::cast(pair->get(1))->flags();
8217 if (flags != flags_) {
8220 return name_->Equals(name);
8223 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) {
8224 return name->Hash() ^ flags;
8227 uint32_t Hash() V8_OVERRIDE { return NameFlagsHashHelper(*name_, flags_); }
8229 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
8230 FixedArray* pair = FixedArray::cast(obj);
8231 Name* name = Name::cast(pair->get(0));
8232 Code* code = Code::cast(pair->get(1));
8233 return NameFlagsHashHelper(name, code->flags());
8236 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
8237 Handle<Code> code = code_.ToHandleChecked();
8238 Handle<FixedArray> pair = isolate->factory()->NewFixedArray(2);
8239 pair->set(0, *name_);
8240 pair->set(1, *code);
8247 // TODO(jkummerow): We should be able to get by without this.
8248 MaybeHandle<Code> code_;
8252 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) {
8253 DisallowHeapAllocation no_alloc;
8254 CodeCacheHashTableKey key(handle(name), flags);
8255 int entry = FindEntry(&key);
8256 if (entry == kNotFound) return GetHeap()->undefined_value();
8257 return get(EntryToIndex(entry) + 1);
8261 Handle<CodeCacheHashTable> CodeCacheHashTable::Put(
8262 Handle<CodeCacheHashTable> cache, Handle<Name> name, Handle<Code> code) {
8263 CodeCacheHashTableKey key(name, code);
8265 Handle<CodeCacheHashTable> new_cache = EnsureCapacity(cache, 1, &key);
8267 int entry = new_cache->FindInsertionEntry(key.Hash());
8268 Handle<Object> k = key.AsHandle(cache->GetIsolate());
8270 new_cache->set(EntryToIndex(entry), *k);
8271 new_cache->set(EntryToIndex(entry) + 1, *code);
8272 new_cache->ElementAdded();
8277 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) {
8278 DisallowHeapAllocation no_alloc;
8279 CodeCacheHashTableKey key(handle(name), flags);
8280 int entry = FindEntry(&key);
8281 return (entry == kNotFound) ? -1 : entry;
8285 void CodeCacheHashTable::RemoveByIndex(int index) {
8287 Heap* heap = GetHeap();
8288 set(EntryToIndex(index), heap->the_hole_value());
8289 set(EntryToIndex(index) + 1, heap->the_hole_value());
8294 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache,
8295 MapHandleList* maps,
8297 Handle<Code> code) {
8298 Isolate* isolate = code_cache->GetIsolate();
8299 if (code_cache->cache()->IsUndefined()) {
8300 Handle<PolymorphicCodeCacheHashTable> result =
8301 PolymorphicCodeCacheHashTable::New(
8303 PolymorphicCodeCacheHashTable::kInitialSize);
8304 code_cache->set_cache(*result);
8306 // This entry shouldn't be contained in the cache yet.
8307 ASSERT(PolymorphicCodeCacheHashTable::cast(code_cache->cache())
8308 ->Lookup(maps, flags)->IsUndefined());
8310 Handle<PolymorphicCodeCacheHashTable> hash_table =
8311 handle(PolymorphicCodeCacheHashTable::cast(code_cache->cache()));
8312 Handle<PolymorphicCodeCacheHashTable> new_cache =
8313 PolymorphicCodeCacheHashTable::Put(hash_table, maps, flags, code);
8314 code_cache->set_cache(*new_cache);
8318 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps,
8319 Code::Flags flags) {
8320 if (!cache()->IsUndefined()) {
8321 PolymorphicCodeCacheHashTable* hash_table =
8322 PolymorphicCodeCacheHashTable::cast(cache());
8323 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate());
8325 return GetIsolate()->factory()->undefined_value();
8330 // Despite their name, object of this class are not stored in the actual
8331 // hash table; instead they're temporarily used for lookups. It is therefore
8332 // safe to have a weak (non-owning) pointer to a MapList as a member field.
8333 class PolymorphicCodeCacheHashTableKey : public HashTableKey {
8335 // Callers must ensure that |maps| outlives the newly constructed object.
8336 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags)
8338 code_flags_(code_flags) {}
8340 bool IsMatch(Object* other) V8_OVERRIDE {
8341 MapHandleList other_maps(kDefaultListAllocationSize);
8343 FromObject(other, &other_flags, &other_maps);
8344 if (code_flags_ != other_flags) return false;
8345 if (maps_->length() != other_maps.length()) return false;
8346 // Compare just the hashes first because it's faster.
8347 int this_hash = MapsHashHelper(maps_, code_flags_);
8348 int other_hash = MapsHashHelper(&other_maps, other_flags);
8349 if (this_hash != other_hash) return false;
8351 // Full comparison: for each map in maps_, look for an equivalent map in
8352 // other_maps. This implementation is slow, but probably good enough for
8353 // now because the lists are short (<= 4 elements currently).
8354 for (int i = 0; i < maps_->length(); ++i) {
8355 bool match_found = false;
8356 for (int j = 0; j < other_maps.length(); ++j) {
8357 if (*(maps_->at(i)) == *(other_maps.at(j))) {
8362 if (!match_found) return false;
8367 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
8368 uint32_t hash = code_flags;
8369 for (int i = 0; i < maps->length(); ++i) {
8370 hash ^= maps->at(i)->Hash();
8375 uint32_t Hash() V8_OVERRIDE {
8376 return MapsHashHelper(maps_, code_flags_);
8379 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
8380 MapHandleList other_maps(kDefaultListAllocationSize);
8382 FromObject(obj, &other_flags, &other_maps);
8383 return MapsHashHelper(&other_maps, other_flags);
8386 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
8387 // The maps in |maps_| must be copied to a newly allocated FixedArray,
8388 // both because the referenced MapList is short-lived, and because C++
8389 // objects can't be stored in the heap anyway.
8390 Handle<FixedArray> list =
8391 isolate->factory()->NewUninitializedFixedArray(maps_->length() + 1);
8392 list->set(0, Smi::FromInt(code_flags_));
8393 for (int i = 0; i < maps_->length(); ++i) {
8394 list->set(i + 1, *maps_->at(i));
8400 static MapHandleList* FromObject(Object* obj,
8402 MapHandleList* maps) {
8403 FixedArray* list = FixedArray::cast(obj);
8405 *code_flags = Smi::cast(list->get(0))->value();
8406 for (int i = 1; i < list->length(); ++i) {
8407 maps->Add(Handle<Map>(Map::cast(list->get(i))));
8412 MapHandleList* maps_; // weak.
8414 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
8418 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps,
8420 DisallowHeapAllocation no_alloc;
8421 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
8422 int entry = FindEntry(&key);
8423 if (entry == kNotFound) return GetHeap()->undefined_value();
8424 return get(EntryToIndex(entry) + 1);
8428 Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put(
8429 Handle<PolymorphicCodeCacheHashTable> hash_table,
8430 MapHandleList* maps,
8432 Handle<Code> code) {
8433 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
8434 Handle<PolymorphicCodeCacheHashTable> cache =
8435 EnsureCapacity(hash_table, 1, &key);
8436 int entry = cache->FindInsertionEntry(key.Hash());
8438 Handle<Object> obj = key.AsHandle(hash_table->GetIsolate());
8439 cache->set(EntryToIndex(entry), *obj);
8440 cache->set(EntryToIndex(entry) + 1, *code);
8441 cache->ElementAdded();
8446 void FixedArray::Shrink(int new_length) {
8447 ASSERT(0 <= new_length && new_length <= length());
8448 if (new_length < length()) {
8449 RightTrimFixedArray<Heap::FROM_MUTATOR>(
8450 GetHeap(), this, length() - new_length);
8455 MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike(
8456 Handle<FixedArray> content,
8457 Handle<JSObject> array) {
8458 ASSERT(array->IsJSArray() || array->HasSloppyArgumentsElements());
8459 ElementsAccessor* accessor = array->GetElementsAccessor();
8460 Handle<FixedArray> result;
8461 ASSIGN_RETURN_ON_EXCEPTION(
8462 array->GetIsolate(), result,
8463 accessor->AddElementsToFixedArray(array, array, content),
8466 #ifdef ENABLE_SLOW_ASSERTS
8467 if (FLAG_enable_slow_asserts) {
8468 DisallowHeapAllocation no_allocation;
8469 for (int i = 0; i < result->length(); i++) {
8470 Object* current = result->get(i);
8471 ASSERT(current->IsNumber() || current->IsName());
8479 MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first,
8480 Handle<FixedArray> second) {
8481 ElementsAccessor* accessor = ElementsAccessor::ForArray(second);
8482 Handle<FixedArray> result;
8483 ASSIGN_RETURN_ON_EXCEPTION(
8484 first->GetIsolate(), result,
8485 accessor->AddElementsToFixedArray(
8486 Handle<Object>::null(), // receiver
8487 Handle<JSObject>::null(), // holder
8489 Handle<FixedArrayBase>::cast(second)),
8492 #ifdef ENABLE_SLOW_ASSERTS
8493 if (FLAG_enable_slow_asserts) {
8494 DisallowHeapAllocation no_allocation;
8495 for (int i = 0; i < result->length(); i++) {
8496 Object* current = result->get(i);
8497 ASSERT(current->IsNumber() || current->IsName());
8505 Handle<FixedArray> FixedArray::CopySize(
8506 Handle<FixedArray> array, int new_length, PretenureFlag pretenure) {
8507 Isolate* isolate = array->GetIsolate();
8508 if (new_length == 0) return isolate->factory()->empty_fixed_array();
8509 Handle<FixedArray> result =
8510 isolate->factory()->NewFixedArray(new_length, pretenure);
8512 DisallowHeapAllocation no_gc;
8513 int len = array->length();
8514 if (new_length < len) len = new_length;
8515 // We are taking the map from the old fixed array so the map is sure to
8516 // be an immortal immutable object.
8517 result->set_map_no_write_barrier(array->map());
8518 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
8519 for (int i = 0; i < len; i++) {
8520 result->set(i, array->get(i), mode);
8526 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
8527 DisallowHeapAllocation no_gc;
8528 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
8529 for (int index = 0; index < len; index++) {
8530 dest->set(dest_pos+index, get(pos+index), mode);
8536 bool FixedArray::IsEqualTo(FixedArray* other) {
8537 if (length() != other->length()) return false;
8538 for (int i = 0 ; i < length(); ++i) {
8539 if (get(i) != other->get(i)) return false;
8546 Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
8547 int number_of_descriptors,
8549 ASSERT(0 <= number_of_descriptors);
8550 Factory* factory = isolate->factory();
8551 // Do not use DescriptorArray::cast on incomplete object.
8552 int size = number_of_descriptors + slack;
8553 if (size == 0) return factory->empty_descriptor_array();
8554 // Allocate the array of keys.
8555 Handle<FixedArray> result = factory->NewFixedArray(LengthFor(size));
8557 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
8558 result->set(kEnumCacheIndex, Smi::FromInt(0));
8559 return Handle<DescriptorArray>::cast(result);
8563 void DescriptorArray::ClearEnumCache() {
8564 set(kEnumCacheIndex, Smi::FromInt(0));
8568 void DescriptorArray::Replace(int index, Descriptor* descriptor) {
8569 descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index));
8570 Set(index, descriptor);
8574 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
8575 FixedArray* new_cache,
8576 Object* new_index_cache) {
8577 ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
8578 ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
8580 ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
8581 FixedArray::cast(bridge_storage)->
8582 set(kEnumCacheBridgeCacheIndex, new_cache);
8583 FixedArray::cast(bridge_storage)->
8584 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
8585 set(kEnumCacheIndex, bridge_storage);
8589 void DescriptorArray::CopyFrom(int index,
8590 DescriptorArray* src,
8591 const WhitenessWitness& witness) {
8592 Object* value = src->GetValue(index);
8593 PropertyDetails details = src->GetDetails(index);
8594 Descriptor desc(handle(src->GetKey(index)),
8595 handle(value, src->GetIsolate()),
8597 Set(index, &desc, witness);
8601 // We need the whiteness witness since sort will reshuffle the entries in the
8602 // descriptor array. If the descriptor array were to be black, the shuffling
8603 // would move a slot that was already recorded as pointing into an evacuation
8604 // candidate. This would result in missing updates upon evacuation.
8605 void DescriptorArray::Sort() {
8606 // In-place heap sort.
8607 int len = number_of_descriptors();
8608 // Reset sorting since the descriptor array might contain invalid pointers.
8609 for (int i = 0; i < len; ++i) SetSortedKey(i, i);
8610 // Bottom-up max-heap construction.
8611 // Index of the last node with children
8612 const int max_parent_index = (len / 2) - 1;
8613 for (int i = max_parent_index; i >= 0; --i) {
8614 int parent_index = i;
8615 const uint32_t parent_hash = GetSortedKey(i)->Hash();
8616 while (parent_index <= max_parent_index) {
8617 int child_index = 2 * parent_index + 1;
8618 uint32_t child_hash = GetSortedKey(child_index)->Hash();
8619 if (child_index + 1 < len) {
8620 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
8621 if (right_child_hash > child_hash) {
8623 child_hash = right_child_hash;
8626 if (child_hash <= parent_hash) break;
8627 SwapSortedKeys(parent_index, child_index);
8628 // Now element at child_index could be < its children.
8629 parent_index = child_index; // parent_hash remains correct.
8633 // Extract elements and create sorted array.
8634 for (int i = len - 1; i > 0; --i) {
8635 // Put max element at the back of the array.
8636 SwapSortedKeys(0, i);
8637 // Shift down the new top element.
8638 int parent_index = 0;
8639 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
8640 const int max_parent_index = (i / 2) - 1;
8641 while (parent_index <= max_parent_index) {
8642 int child_index = parent_index * 2 + 1;
8643 uint32_t child_hash = GetSortedKey(child_index)->Hash();
8644 if (child_index + 1 < i) {
8645 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
8646 if (right_child_hash > child_hash) {
8648 child_hash = right_child_hash;
8651 if (child_hash <= parent_hash) break;
8652 SwapSortedKeys(parent_index, child_index);
8653 parent_index = child_index;
8656 ASSERT(IsSortedNoDuplicates());
8660 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) {
8661 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair();
8662 copy->set_getter(pair->getter());
8663 copy->set_setter(pair->setter());
8668 Object* AccessorPair::GetComponent(AccessorComponent component) {
8669 Object* accessor = get(component);
8670 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
8674 Handle<DeoptimizationInputData> DeoptimizationInputData::New(
8676 int deopt_entry_count,
8677 PretenureFlag pretenure) {
8678 ASSERT(deopt_entry_count > 0);
8679 return Handle<DeoptimizationInputData>::cast(
8680 isolate->factory()->NewFixedArray(
8681 LengthFor(deopt_entry_count), pretenure));
8685 Handle<DeoptimizationOutputData> DeoptimizationOutputData::New(
8687 int number_of_deopt_points,
8688 PretenureFlag pretenure) {
8689 Handle<FixedArray> result;
8690 if (number_of_deopt_points == 0) {
8691 result = isolate->factory()->empty_fixed_array();
8693 result = isolate->factory()->NewFixedArray(
8694 LengthOfFixedArray(number_of_deopt_points), pretenure);
8696 return Handle<DeoptimizationOutputData>::cast(result);
8701 bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
8702 if (IsEmpty()) return other->IsEmpty();
8703 if (other->IsEmpty()) return false;
8704 if (length() != other->length()) return false;
8705 for (int i = 0; i < length(); ++i) {
8706 if (get(i) != other->get(i)) return false;
8713 static bool IsIdentifier(UnicodeCache* cache, Name* name) {
8714 // Checks whether the buffer contains an identifier (no escape).
8715 if (!name->IsString()) return false;
8716 String* string = String::cast(name);
8717 if (string->length() == 0) return true;
8718 ConsStringIteratorOp op;
8719 StringCharacterStream stream(string, &op);
8720 if (!cache->IsIdentifierStart(stream.GetNext())) {
8723 while (stream.HasMore()) {
8724 if (!cache->IsIdentifierPart(stream.GetNext())) {
8732 bool Name::IsCacheable(Isolate* isolate) {
8733 return IsSymbol() || IsIdentifier(isolate->unicode_cache(), this);
8737 bool String::LooksValid() {
8738 if (!GetIsolate()->heap()->Contains(this)) return false;
8743 String::FlatContent String::GetFlatContent() {
8744 ASSERT(!AllowHeapAllocation::IsAllowed());
8745 int length = this->length();
8746 StringShape shape(this);
8747 String* string = this;
8749 if (shape.representation_tag() == kConsStringTag) {
8750 ConsString* cons = ConsString::cast(string);
8751 if (cons->second()->length() != 0) {
8752 return FlatContent();
8754 string = cons->first();
8755 shape = StringShape(string);
8757 if (shape.representation_tag() == kSlicedStringTag) {
8758 SlicedString* slice = SlicedString::cast(string);
8759 offset = slice->offset();
8760 string = slice->parent();
8761 shape = StringShape(string);
8762 ASSERT(shape.representation_tag() != kConsStringTag &&
8763 shape.representation_tag() != kSlicedStringTag);
8765 if (shape.encoding_tag() == kOneByteStringTag) {
8766 const uint8_t* start;
8767 if (shape.representation_tag() == kSeqStringTag) {
8768 start = SeqOneByteString::cast(string)->GetChars();
8770 start = ExternalAsciiString::cast(string)->GetChars();
8772 return FlatContent(start + offset, length);
8774 ASSERT(shape.encoding_tag() == kTwoByteStringTag);
8776 if (shape.representation_tag() == kSeqStringTag) {
8777 start = SeqTwoByteString::cast(string)->GetChars();
8779 start = ExternalTwoByteString::cast(string)->GetChars();
8781 return FlatContent(start + offset, length);
8786 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8787 RobustnessFlag robust_flag,
8790 int* length_return) {
8791 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8792 return SmartArrayPointer<char>(NULL);
8794 Heap* heap = GetHeap();
8796 // Negative length means the to the end of the string.
8797 if (length < 0) length = kMaxInt - offset;
8799 // Compute the size of the UTF-8 string. Start at the specified offset.
8800 Access<ConsStringIteratorOp> op(
8801 heap->isolate()->objects_string_iterator());
8802 StringCharacterStream stream(this, op.value(), offset);
8803 int character_position = offset;
8805 int last = unibrow::Utf16::kNoPreviousCharacter;
8806 while (stream.HasMore() && character_position++ < offset + length) {
8807 uint16_t character = stream.GetNext();
8808 utf8_bytes += unibrow::Utf8::Length(character, last);
8812 if (length_return) {
8813 *length_return = utf8_bytes;
8816 char* result = NewArray<char>(utf8_bytes + 1);
8818 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset.
8819 stream.Reset(this, offset);
8820 character_position = offset;
8821 int utf8_byte_position = 0;
8822 last = unibrow::Utf16::kNoPreviousCharacter;
8823 while (stream.HasMore() && character_position++ < offset + length) {
8824 uint16_t character = stream.GetNext();
8825 if (allow_nulls == DISALLOW_NULLS && character == 0) {
8828 utf8_byte_position +=
8829 unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
8832 result[utf8_byte_position] = 0;
8833 return SmartArrayPointer<char>(result);
8837 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8838 RobustnessFlag robust_flag,
8839 int* length_return) {
8840 return ToCString(allow_nulls, robust_flag, 0, -1, length_return);
8844 const uc16* String::GetTwoByteData(unsigned start) {
8845 ASSERT(!IsOneByteRepresentationUnderneath());
8846 switch (StringShape(this).representation_tag()) {
8848 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
8849 case kExternalStringTag:
8850 return ExternalTwoByteString::cast(this)->
8851 ExternalTwoByteStringGetData(start);
8852 case kSlicedStringTag: {
8853 SlicedString* slice = SlicedString::cast(this);
8854 return slice->parent()->GetTwoByteData(start + slice->offset());
8856 case kConsStringTag:
8865 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) {
8866 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8867 return SmartArrayPointer<uc16>();
8869 Heap* heap = GetHeap();
8871 Access<ConsStringIteratorOp> op(
8872 heap->isolate()->objects_string_iterator());
8873 StringCharacterStream stream(this, op.value());
8875 uc16* result = NewArray<uc16>(length() + 1);
8878 while (stream.HasMore()) {
8879 uint16_t character = stream.GetNext();
8880 result[i++] = character;
8883 return SmartArrayPointer<uc16>(result);
8887 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) {
8888 return reinterpret_cast<uc16*>(
8889 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start;
8893 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) {
8894 Relocatable* current = isolate->relocatable_top();
8895 while (current != NULL) {
8896 current->PostGarbageCollection();
8897 current = current->prev_;
8902 // Reserve space for statics needing saving and restoring.
8903 int Relocatable::ArchiveSpacePerThread() {
8904 return sizeof(Relocatable*); // NOLINT
8908 // Archive statics that are thread local.
8909 char* Relocatable::ArchiveState(Isolate* isolate, char* to) {
8910 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
8911 isolate->set_relocatable_top(NULL);
8912 return to + ArchiveSpacePerThread();
8916 // Restore statics that are thread local.
8917 char* Relocatable::RestoreState(Isolate* isolate, char* from) {
8918 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
8919 return from + ArchiveSpacePerThread();
8923 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
8924 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
8926 return thread_storage + ArchiveSpacePerThread();
8930 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) {
8931 Iterate(v, isolate->relocatable_top());
8935 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
8936 Relocatable* current = top;
8937 while (current != NULL) {
8938 current->IterateInstance(v);
8939 current = current->prev_;
8944 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str)
8945 : Relocatable(isolate),
8946 str_(str.location()),
8947 length_(str->length()) {
8948 PostGarbageCollection();
8952 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input)
8953 : Relocatable(isolate),
8956 length_(input.length()),
8957 start_(input.start()) { }
8960 void FlatStringReader::PostGarbageCollection() {
8961 if (str_ == NULL) return;
8962 Handle<String> str(str_);
8963 ASSERT(str->IsFlat());
8964 DisallowHeapAllocation no_gc;
8965 // This does not actually prevent the vector from being relocated later.
8966 String::FlatContent content = str->GetFlatContent();
8967 ASSERT(content.IsFlat());
8968 is_ascii_ = content.IsAscii();
8970 start_ = content.ToOneByteVector().start();
8972 start_ = content.ToUC16Vector().start();
8977 void ConsStringIteratorOp::Initialize(ConsString* cons_string, int offset) {
8978 ASSERT(cons_string != NULL);
8979 root_ = cons_string;
8981 // Force stack blown condition to trigger restart.
8983 maximum_depth_ = kStackSize + depth_;
8984 ASSERT(StackBlown());
8988 String* ConsStringIteratorOp::Continue(int* offset_out) {
8989 ASSERT(depth_ != 0);
8990 ASSERT_EQ(0, *offset_out);
8991 bool blew_stack = StackBlown();
8992 String* string = NULL;
8993 // Get the next leaf if there is one.
8994 if (!blew_stack) string = NextLeaf(&blew_stack);
8995 // Restart search from root.
8997 ASSERT(string == NULL);
8998 string = Search(offset_out);
9000 // Ensure future calls return null immediately.
9001 if (string == NULL) Reset(NULL);
9006 String* ConsStringIteratorOp::Search(int* offset_out) {
9007 ConsString* cons_string = root_;
9008 // Reset the stack, pushing the root string.
9011 frames_[0] = cons_string;
9012 const int consumed = consumed_;
9015 // Loop until the string is found which contains the target offset.
9016 String* string = cons_string->first();
9017 int length = string->length();
9019 if (consumed < offset + length) {
9020 // Target offset is in the left branch.
9021 // Keep going if we're still in a ConString.
9022 type = string->map()->instance_type();
9023 if ((type & kStringRepresentationMask) == kConsStringTag) {
9024 cons_string = ConsString::cast(string);
9025 PushLeft(cons_string);
9028 // Tell the stack we're done descending.
9029 AdjustMaximumDepth();
9032 // Update progress through the string.
9034 // Keep going if we're still in a ConString.
9035 string = cons_string->second();
9036 type = string->map()->instance_type();
9037 if ((type & kStringRepresentationMask) == kConsStringTag) {
9038 cons_string = ConsString::cast(string);
9039 PushRight(cons_string);
9042 // Need this to be updated for the current string.
9043 length = string->length();
9044 // Account for the possibility of an empty right leaf.
9045 // This happens only if we have asked for an offset outside the string.
9047 // Reset so future operations will return null immediately.
9051 // Tell the stack we're done descending.
9052 AdjustMaximumDepth();
9053 // Pop stack so next iteration is in correct place.
9056 ASSERT(length != 0);
9057 // Adjust return values and exit.
9058 consumed_ = offset + length;
9059 *offset_out = consumed - offset;
9067 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) {
9069 // Tree traversal complete.
9071 *blew_stack = false;
9074 // We've lost track of higher nodes.
9080 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)];
9081 String* string = cons_string->second();
9082 int32_t type = string->map()->instance_type();
9083 if ((type & kStringRepresentationMask) != kConsStringTag) {
9084 // Pop stack so next iteration is in correct place.
9086 int length = string->length();
9087 // Could be a flattened ConsString.
9088 if (length == 0) continue;
9089 consumed_ += length;
9092 cons_string = ConsString::cast(string);
9093 PushRight(cons_string);
9094 // Need to traverse all the way left.
9097 string = cons_string->first();
9098 type = string->map()->instance_type();
9099 if ((type & kStringRepresentationMask) != kConsStringTag) {
9100 AdjustMaximumDepth();
9101 int length = string->length();
9102 ASSERT(length != 0);
9103 consumed_ += length;
9106 cons_string = ConsString::cast(string);
9107 PushLeft(cons_string);
9115 uint16_t ConsString::ConsStringGet(int index) {
9116 ASSERT(index >= 0 && index < this->length());
9118 // Check for a flattened cons string
9119 if (second()->length() == 0) {
9120 String* left = first();
9121 return left->Get(index);
9124 String* string = String::cast(this);
9127 if (StringShape(string).IsCons()) {
9128 ConsString* cons_string = ConsString::cast(string);
9129 String* left = cons_string->first();
9130 if (left->length() > index) {
9133 index -= left->length();
9134 string = cons_string->second();
9137 return string->Get(index);
9146 uint16_t SlicedString::SlicedStringGet(int index) {
9147 return parent()->Get(offset() + index);
9151 template <typename sinkchar>
9152 void String::WriteToFlat(String* src,
9156 String* source = src;
9160 ASSERT(0 <= from && from <= to && to <= source->length());
9161 switch (StringShape(source).full_representation_tag()) {
9162 case kOneByteStringTag | kExternalStringTag: {
9164 ExternalAsciiString::cast(source)->GetChars() + from,
9168 case kTwoByteStringTag | kExternalStringTag: {
9170 ExternalTwoByteString::cast(source)->GetChars();
9176 case kOneByteStringTag | kSeqStringTag: {
9178 SeqOneByteString::cast(source)->GetChars() + from,
9182 case kTwoByteStringTag | kSeqStringTag: {
9184 SeqTwoByteString::cast(source)->GetChars() + from,
9188 case kOneByteStringTag | kConsStringTag:
9189 case kTwoByteStringTag | kConsStringTag: {
9190 ConsString* cons_string = ConsString::cast(source);
9191 String* first = cons_string->first();
9192 int boundary = first->length();
9193 if (to - boundary >= boundary - from) {
9194 // Right hand side is longer. Recurse over left.
9195 if (from < boundary) {
9196 WriteToFlat(first, sink, from, boundary);
9197 sink += boundary - from;
9203 source = cons_string->second();
9205 // Left hand side is longer. Recurse over right.
9206 if (to > boundary) {
9207 String* second = cons_string->second();
9208 // When repeatedly appending to a string, we get a cons string that
9209 // is unbalanced to the left, a list, essentially. We inline the
9210 // common case of sequential ascii right child.
9211 if (to - boundary == 1) {
9212 sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
9213 } else if (second->IsSeqOneByteString()) {
9214 CopyChars(sink + boundary - from,
9215 SeqOneByteString::cast(second)->GetChars(),
9219 sink + boundary - from,
9229 case kOneByteStringTag | kSlicedStringTag:
9230 case kTwoByteStringTag | kSlicedStringTag: {
9231 SlicedString* slice = SlicedString::cast(source);
9232 unsigned offset = slice->offset();
9233 WriteToFlat(slice->parent(), sink, from + offset, to + offset);
9242 template <typename SourceChar>
9243 static void CalculateLineEndsImpl(Isolate* isolate,
9244 List<int>* line_ends,
9245 Vector<const SourceChar> src,
9246 bool include_ending_line) {
9247 const int src_len = src.length();
9248 StringSearch<uint8_t, SourceChar> search(isolate, STATIC_ASCII_VECTOR("\n"));
9250 // Find and record line ends.
9252 while (position != -1 && position < src_len) {
9253 position = search.Search(src, position);
9254 if (position != -1) {
9255 line_ends->Add(position);
9257 } else if (include_ending_line) {
9258 // Even if the last line misses a line end, it is counted.
9259 line_ends->Add(src_len);
9266 Handle<FixedArray> String::CalculateLineEnds(Handle<String> src,
9267 bool include_ending_line) {
9269 // Rough estimate of line count based on a roughly estimated average
9270 // length of (unpacked) code.
9271 int line_count_estimate = src->length() >> 4;
9272 List<int> line_ends(line_count_estimate);
9273 Isolate* isolate = src->GetIsolate();
9274 { DisallowHeapAllocation no_allocation; // ensure vectors stay valid.
9275 // Dispatch on type of strings.
9276 String::FlatContent content = src->GetFlatContent();
9277 ASSERT(content.IsFlat());
9278 if (content.IsAscii()) {
9279 CalculateLineEndsImpl(isolate,
9281 content.ToOneByteVector(),
9282 include_ending_line);
9284 CalculateLineEndsImpl(isolate,
9286 content.ToUC16Vector(),
9287 include_ending_line);
9290 int line_count = line_ends.length();
9291 Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count);
9292 for (int i = 0; i < line_count; i++) {
9293 array->set(i, Smi::FromInt(line_ends[i]));
9299 // Compares the contents of two strings by reading and comparing
9300 // int-sized blocks of characters.
9301 template <typename Char>
9302 static inline bool CompareRawStringContents(const Char* const a,
9303 const Char* const b,
9306 #ifndef V8_HOST_CAN_READ_UNALIGNED
9307 // If this architecture isn't comfortable reading unaligned ints
9308 // then we have to check that the strings are aligned before
9309 // comparing them blockwise.
9310 const int kAlignmentMask = sizeof(uint32_t) - 1; // NOLINT
9311 uint32_t pa_addr = reinterpret_cast<uint32_t>(a);
9312 uint32_t pb_addr = reinterpret_cast<uint32_t>(b);
9313 if (((pa_addr & kAlignmentMask) | (pb_addr & kAlignmentMask)) == 0) {
9315 const int kStepSize = sizeof(int) / sizeof(Char); // NOLINT
9316 int endpoint = length - kStepSize;
9317 // Compare blocks until we reach near the end of the string.
9318 for (; i <= endpoint; i += kStepSize) {
9319 uint32_t wa = *reinterpret_cast<const uint32_t*>(a + i);
9320 uint32_t wb = *reinterpret_cast<const uint32_t*>(b + i);
9325 #ifndef V8_HOST_CAN_READ_UNALIGNED
9328 // Compare the remaining characters that didn't fit into a block.
9329 for (; i < length; i++) {
9338 template<typename Chars1, typename Chars2>
9339 class RawStringComparator : public AllStatic {
9341 static inline bool compare(const Chars1* a, const Chars2* b, int len) {
9342 ASSERT(sizeof(Chars1) != sizeof(Chars2));
9343 for (int i = 0; i < len; i++) {
9354 class RawStringComparator<uint16_t, uint16_t> {
9356 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) {
9357 return CompareRawStringContents(a, b, len);
9363 class RawStringComparator<uint8_t, uint8_t> {
9365 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) {
9366 return CompareRawStringContents(a, b, len);
9371 class StringComparator {
9374 explicit inline State(ConsStringIteratorOp* op)
9375 : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {}
9377 inline void Init(String* string) {
9378 ConsString* cons_string = String::VisitFlat(this, string);
9379 op_->Reset(cons_string);
9380 if (cons_string != NULL) {
9382 string = op_->Next(&offset);
9383 String::VisitFlat(this, string, offset);
9387 inline void VisitOneByteString(const uint8_t* chars, int length) {
9388 is_one_byte_ = true;
9393 inline void VisitTwoByteString(const uint16_t* chars, int length) {
9394 is_one_byte_ = false;
9399 void Advance(int consumed) {
9400 ASSERT(consumed <= length_);
9402 if (length_ != consumed) {
9404 buffer8_ += consumed;
9406 buffer16_ += consumed;
9408 length_ -= consumed;
9413 String* next = op_->Next(&offset);
9414 ASSERT_EQ(0, offset);
9415 ASSERT(next != NULL);
9416 String::VisitFlat(this, next);
9419 ConsStringIteratorOp* const op_;
9423 const uint8_t* buffer8_;
9424 const uint16_t* buffer16_;
9428 DISALLOW_IMPLICIT_CONSTRUCTORS(State);
9432 inline StringComparator(ConsStringIteratorOp* op_1,
9433 ConsStringIteratorOp* op_2)
9438 template<typename Chars1, typename Chars2>
9439 static inline bool Equals(State* state_1, State* state_2, int to_check) {
9440 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_);
9441 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_);
9442 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check);
9445 bool Equals(String* string_1, String* string_2) {
9446 int length = string_1->length();
9447 state_1_.Init(string_1);
9448 state_2_.Init(string_2);
9450 int to_check = Min(state_1_.length_, state_2_.length_);
9451 ASSERT(to_check > 0 && to_check <= length);
9453 if (state_1_.is_one_byte_) {
9454 if (state_2_.is_one_byte_) {
9455 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check);
9457 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check);
9460 if (state_2_.is_one_byte_) {
9461 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check);
9463 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check);
9467 if (!is_equal) return false;
9469 // Exit condition. Strings are equal.
9470 if (length == 0) return true;
9471 state_1_.Advance(to_check);
9472 state_2_.Advance(to_check);
9479 DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator);
9483 bool String::SlowEquals(String* other) {
9484 DisallowHeapAllocation no_gc;
9485 // Fast check: negative check with lengths.
9487 if (len != other->length()) return false;
9488 if (len == 0) return true;
9490 // Fast check: if hash code is computed for both strings
9491 // a fast negative check can be performed.
9492 if (HasHashCode() && other->HasHashCode()) {
9493 #ifdef ENABLE_SLOW_ASSERTS
9494 if (FLAG_enable_slow_asserts) {
9495 if (Hash() != other->Hash()) {
9496 bool found_difference = false;
9497 for (int i = 0; i < len; i++) {
9498 if (Get(i) != other->Get(i)) {
9499 found_difference = true;
9503 ASSERT(found_difference);
9507 if (Hash() != other->Hash()) return false;
9510 // We know the strings are both non-empty. Compare the first chars
9511 // before we try to flatten the strings.
9512 if (this->Get(0) != other->Get(0)) return false;
9514 if (IsSeqOneByteString() && other->IsSeqOneByteString()) {
9515 const uint8_t* str1 = SeqOneByteString::cast(this)->GetChars();
9516 const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars();
9517 return CompareRawStringContents(str1, str2, len);
9520 Isolate* isolate = GetIsolate();
9521 StringComparator comparator(isolate->objects_string_compare_iterator_a(),
9522 isolate->objects_string_compare_iterator_b());
9524 return comparator.Equals(this, other);
9528 bool String::SlowEquals(Handle<String> one, Handle<String> two) {
9529 // Fast check: negative check with lengths.
9530 int one_length = one->length();
9531 if (one_length != two->length()) return false;
9532 if (one_length == 0) return true;
9534 // Fast check: if hash code is computed for both strings
9535 // a fast negative check can be performed.
9536 if (one->HasHashCode() && two->HasHashCode()) {
9537 #ifdef ENABLE_SLOW_ASSERTS
9538 if (FLAG_enable_slow_asserts) {
9539 if (one->Hash() != two->Hash()) {
9540 bool found_difference = false;
9541 for (int i = 0; i < one_length; i++) {
9542 if (one->Get(i) != two->Get(i)) {
9543 found_difference = true;
9547 ASSERT(found_difference);
9551 if (one->Hash() != two->Hash()) return false;
9554 // We know the strings are both non-empty. Compare the first chars
9555 // before we try to flatten the strings.
9556 if (one->Get(0) != two->Get(0)) return false;
9558 one = String::Flatten(one);
9559 two = String::Flatten(two);
9561 DisallowHeapAllocation no_gc;
9562 String::FlatContent flat1 = one->GetFlatContent();
9563 String::FlatContent flat2 = two->GetFlatContent();
9565 if (flat1.IsAscii() && flat2.IsAscii()) {
9566 return CompareRawStringContents(flat1.ToOneByteVector().start(),
9567 flat2.ToOneByteVector().start(),
9570 for (int i = 0; i < one_length; i++) {
9571 if (flat1.Get(i) != flat2.Get(i)) return false;
9578 bool String::MarkAsUndetectable() {
9579 if (StringShape(this).IsInternalized()) return false;
9581 Map* map = this->map();
9582 Heap* heap = GetHeap();
9583 if (map == heap->string_map()) {
9584 this->set_map(heap->undetectable_string_map());
9586 } else if (map == heap->ascii_string_map()) {
9587 this->set_map(heap->undetectable_ascii_string_map());
9590 // Rest cannot be marked as undetectable
9595 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
9596 int slen = length();
9597 // Can't check exact length equality, but we can check bounds.
9598 int str_len = str.length();
9599 if (!allow_prefix_match &&
9601 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) {
9605 unsigned remaining_in_str = static_cast<unsigned>(str_len);
9606 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start());
9607 for (i = 0; i < slen && remaining_in_str > 0; i++) {
9608 unsigned cursor = 0;
9609 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor);
9610 ASSERT(cursor > 0 && cursor <= remaining_in_str);
9611 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
9612 if (i > slen - 1) return false;
9613 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
9614 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
9616 if (Get(i) != r) return false;
9618 utf8_data += cursor;
9619 remaining_in_str -= cursor;
9621 return (allow_prefix_match || i == slen) && remaining_in_str == 0;
9625 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) {
9626 int slen = length();
9627 if (str.length() != slen) return false;
9628 DisallowHeapAllocation no_gc;
9629 FlatContent content = GetFlatContent();
9630 if (content.IsAscii()) {
9631 return CompareChars(content.ToOneByteVector().start(),
9632 str.start(), slen) == 0;
9634 for (int i = 0; i < slen; i++) {
9635 if (Get(i) != static_cast<uint16_t>(str[i])) return false;
9641 bool String::IsTwoByteEqualTo(Vector<const uc16> str) {
9642 int slen = length();
9643 if (str.length() != slen) return false;
9644 DisallowHeapAllocation no_gc;
9645 FlatContent content = GetFlatContent();
9646 if (content.IsTwoByte()) {
9647 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
9649 for (int i = 0; i < slen; i++) {
9650 if (Get(i) != str[i]) return false;
9656 class IteratingStringHasher: public StringHasher {
9658 static inline uint32_t Hash(String* string, uint32_t seed) {
9659 IteratingStringHasher hasher(string->length(), seed);
9661 if (hasher.has_trivial_hash()) return hasher.GetHashField();
9662 ConsString* cons_string = String::VisitFlat(&hasher, string);
9663 // The string was flat.
9664 if (cons_string == NULL) return hasher.GetHashField();
9665 // This is a ConsString, iterate across it.
9666 ConsStringIteratorOp op(cons_string);
9668 while (NULL != (string = op.Next(&offset))) {
9669 String::VisitFlat(&hasher, string, offset);
9671 return hasher.GetHashField();
9673 inline void VisitOneByteString(const uint8_t* chars, int length) {
9674 AddCharacters(chars, length);
9676 inline void VisitTwoByteString(const uint16_t* chars, int length) {
9677 AddCharacters(chars, length);
9681 inline IteratingStringHasher(int len, uint32_t seed)
9682 : StringHasher(len, seed) {
9684 DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher);
9688 uint32_t String::ComputeAndSetHash() {
9689 // Should only be called if hash code has not yet been computed.
9690 ASSERT(!HasHashCode());
9692 // Store the hash code in the object.
9693 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed());
9694 set_hash_field(field);
9696 // Check the hash code is there.
9697 ASSERT(HasHashCode());
9698 uint32_t result = field >> kHashShift;
9699 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
9704 bool String::ComputeArrayIndex(uint32_t* index) {
9705 int length = this->length();
9706 if (length == 0 || length > kMaxArrayIndexSize) return false;
9707 ConsStringIteratorOp op;
9708 StringCharacterStream stream(this, &op);
9709 uint16_t ch = stream.GetNext();
9711 // If the string begins with a '0' character, it must only consist
9712 // of it to be a legal array index.
9718 // Convert string to uint32 array index; character by character.
9720 if (d < 0 || d > 9) return false;
9721 uint32_t result = d;
9722 while (stream.HasMore()) {
9723 d = stream.GetNext() - '0';
9724 if (d < 0 || d > 9) return false;
9725 // Check that the new result is below the 32 bit limit.
9726 if (result > 429496729U - ((d > 5) ? 1 : 0)) return false;
9727 result = (result * 10) + d;
9735 bool String::SlowAsArrayIndex(uint32_t* index) {
9736 if (length() <= kMaxCachedArrayIndexLength) {
9737 Hash(); // force computation of hash code
9738 uint32_t field = hash_field();
9739 if ((field & kIsNotArrayIndexMask) != 0) return false;
9740 // Isolate the array index form the full hash field.
9741 *index = (kArrayIndexHashMask & field) >> kHashShift;
9744 return ComputeArrayIndex(index);
9749 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
9750 int new_size, old_size;
9751 int old_length = string->length();
9752 if (old_length <= new_length) return string;
9754 if (string->IsSeqOneByteString()) {
9755 old_size = SeqOneByteString::SizeFor(old_length);
9756 new_size = SeqOneByteString::SizeFor(new_length);
9758 ASSERT(string->IsSeqTwoByteString());
9759 old_size = SeqTwoByteString::SizeFor(old_length);
9760 new_size = SeqTwoByteString::SizeFor(new_length);
9763 int delta = old_size - new_size;
9765 Address start_of_string = string->address();
9766 ASSERT_OBJECT_ALIGNED(start_of_string);
9767 ASSERT_OBJECT_ALIGNED(start_of_string + new_size);
9769 Heap* heap = string->GetHeap();
9770 NewSpace* newspace = heap->new_space();
9771 if (newspace->Contains(start_of_string) &&
9772 newspace->top() == start_of_string + old_size) {
9773 // Last allocated object in new space. Simply lower allocation top.
9774 newspace->set_top(start_of_string + new_size);
9776 // Sizes are pointer size aligned, so that we can use filler objects
9777 // that are a multiple of pointer size.
9778 heap->CreateFillerObjectAt(start_of_string + new_size, delta);
9780 heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR);
9782 // We are storing the new length using release store after creating a filler
9783 // for the left-over space to avoid races with the sweeper thread.
9784 string->synchronized_set_length(new_length);
9786 if (new_length == 0) return heap->isolate()->factory()->empty_string();
9791 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
9792 // For array indexes mix the length into the hash as an array index could
9795 ASSERT(length <= String::kMaxArrayIndexSize);
9796 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
9797 (1 << String::kArrayIndexValueBits));
9799 value <<= String::kHashShift;
9800 value |= length << String::kArrayIndexHashLengthShift;
9802 ASSERT((value & String::kIsNotArrayIndexMask) == 0);
9803 ASSERT((length > String::kMaxCachedArrayIndexLength) ||
9804 (value & String::kContainsCachedArrayIndexMask) == 0);
9809 uint32_t StringHasher::GetHashField() {
9810 if (length_ <= String::kMaxHashCalcLength) {
9811 if (is_array_index_) {
9812 return MakeArrayIndexHash(array_index_, length_);
9814 return (GetHashCore(raw_running_hash_) << String::kHashShift) |
9815 String::kIsNotArrayIndexMask;
9817 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask;
9822 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars,
9824 int* utf16_length_out) {
9825 int vector_length = chars.length();
9826 // Handle some edge cases
9827 if (vector_length <= 1) {
9828 ASSERT(vector_length == 0 ||
9829 static_cast<uint8_t>(chars.start()[0]) <=
9830 unibrow::Utf8::kMaxOneByteChar);
9831 *utf16_length_out = vector_length;
9832 return HashSequentialString(chars.start(), vector_length, seed);
9834 // Start with a fake length which won't affect computation.
9835 // It will be updated later.
9836 StringHasher hasher(String::kMaxArrayIndexSize, seed);
9837 unsigned remaining = static_cast<unsigned>(vector_length);
9838 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start());
9839 int utf16_length = 0;
9840 bool is_index = true;
9841 ASSERT(hasher.is_array_index_);
9842 while (remaining > 0) {
9843 unsigned consumed = 0;
9844 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed);
9845 ASSERT(consumed > 0 && consumed <= remaining);
9847 remaining -= consumed;
9848 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode;
9849 utf16_length += is_two_characters ? 2 : 1;
9850 // No need to keep hashing. But we do need to calculate utf16_length.
9851 if (utf16_length > String::kMaxHashCalcLength) continue;
9852 if (is_two_characters) {
9853 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c);
9854 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c);
9855 hasher.AddCharacter(c1);
9856 hasher.AddCharacter(c2);
9857 if (is_index) is_index = hasher.UpdateIndex(c1);
9858 if (is_index) is_index = hasher.UpdateIndex(c2);
9860 hasher.AddCharacter(c);
9861 if (is_index) is_index = hasher.UpdateIndex(c);
9864 *utf16_length_out = static_cast<int>(utf16_length);
9865 // Must set length here so that hash computation is correct.
9866 hasher.length_ = utf16_length;
9867 return hasher.GetHashField();
9871 void String::PrintOn(FILE* file) {
9872 int length = this->length();
9873 for (int i = 0; i < length; i++) {
9874 PrintF(file, "%c", Get(i));
9879 static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) {
9880 int live_enum = map->EnumLength();
9881 if (live_enum == kInvalidEnumCacheSentinel) {
9882 live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM);
9884 if (live_enum == 0) return descriptors->ClearEnumCache();
9886 FixedArray* enum_cache = descriptors->GetEnumCache();
9888 int to_trim = enum_cache->length() - live_enum;
9889 if (to_trim <= 0) return;
9890 RightTrimFixedArray<Heap::FROM_GC>(
9891 heap, descriptors->GetEnumCache(), to_trim);
9893 if (!descriptors->HasEnumIndicesCache()) return;
9894 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
9895 RightTrimFixedArray<Heap::FROM_GC>(heap, enum_indices_cache, to_trim);
9899 static void TrimDescriptorArray(Heap* heap,
9901 DescriptorArray* descriptors,
9902 int number_of_own_descriptors) {
9903 int number_of_descriptors = descriptors->number_of_descriptors_storage();
9904 int to_trim = number_of_descriptors - number_of_own_descriptors;
9905 if (to_trim == 0) return;
9907 RightTrimFixedArray<Heap::FROM_GC>(
9908 heap, descriptors, to_trim * DescriptorArray::kDescriptorSize);
9909 descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
9911 if (descriptors->HasEnumCache()) TrimEnumCache(heap, map, descriptors);
9912 descriptors->Sort();
9916 // Clear a possible back pointer in case the transition leads to a dead map.
9917 // Return true in case a back pointer has been cleared and false otherwise.
9918 static bool ClearBackPointer(Heap* heap, Map* target) {
9919 if (Marking::MarkBitFrom(target).Get()) return false;
9920 target->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
9925 // TODO(mstarzinger): This method should be moved into MarkCompactCollector,
9926 // because it cannot be called from outside the GC and we already have methods
9927 // depending on the transitions layout in the GC anyways.
9928 void Map::ClearNonLiveTransitions(Heap* heap) {
9929 // If there are no transitions to be cleared, return.
9930 // TODO(verwaest) Should be an assert, otherwise back pointers are not
9931 // properly cleared.
9932 if (!HasTransitionArray()) return;
9934 TransitionArray* t = transitions();
9935 MarkCompactCollector* collector = heap->mark_compact_collector();
9937 int transition_index = 0;
9939 DescriptorArray* descriptors = instance_descriptors();
9940 bool descriptors_owner_died = false;
9942 // Compact all live descriptors to the left.
9943 for (int i = 0; i < t->number_of_transitions(); ++i) {
9944 Map* target = t->GetTarget(i);
9945 if (ClearBackPointer(heap, target)) {
9946 if (target->instance_descriptors() == descriptors) {
9947 descriptors_owner_died = true;
9950 if (i != transition_index) {
9951 Name* key = t->GetKey(i);
9952 t->SetKey(transition_index, key);
9953 Object** key_slot = t->GetKeySlot(transition_index);
9954 collector->RecordSlot(key_slot, key_slot, key);
9955 // Target slots do not need to be recorded since maps are not compacted.
9956 t->SetTarget(transition_index, t->GetTarget(i));
9962 // If there are no transitions to be cleared, return.
9963 // TODO(verwaest) Should be an assert, otherwise back pointers are not
9964 // properly cleared.
9965 if (transition_index == t->number_of_transitions()) return;
9967 int number_of_own_descriptors = NumberOfOwnDescriptors();
9969 if (descriptors_owner_died) {
9970 if (number_of_own_descriptors > 0) {
9971 TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors);
9972 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
9973 set_owns_descriptors(true);
9975 ASSERT(descriptors == GetHeap()->empty_descriptor_array());
9979 // Note that we never eliminate a transition array, though we might right-trim
9980 // such that number_of_transitions() == 0. If this assumption changes,
9981 // TransitionArray::CopyInsert() will need to deal with the case that a
9982 // transition array disappeared during GC.
9983 int trim = t->number_of_transitions() - transition_index;
9985 RightTrimFixedArray<Heap::FROM_GC>(heap, t, t->IsSimpleTransition()
9986 ? trim : trim * TransitionArray::kTransitionSize);
9988 ASSERT(HasTransitionArray());
9993 // For performance reasons we only hash the 3 most variable fields of a map:
9994 // constructor, prototype and bit_field2.
9996 // Shift away the tag.
9997 int hash = (static_cast<uint32_t>(
9998 reinterpret_cast<uintptr_t>(constructor())) >> 2);
10000 // XOR-ing the prototype and constructor directly yields too many zero bits
10001 // when the two pointers are close (which is fairly common).
10002 // To avoid this we shift the prototype 4 bits relatively to the constructor.
10003 hash ^= (static_cast<uint32_t>(
10004 reinterpret_cast<uintptr_t>(prototype())) << 2);
10006 return hash ^ (hash >> 16) ^ bit_field2();
10010 static bool CheckEquivalent(Map* first, Map* second) {
10012 first->constructor() == second->constructor() &&
10013 first->prototype() == second->prototype() &&
10014 first->instance_type() == second->instance_type() &&
10015 first->bit_field() == second->bit_field() &&
10016 first->bit_field2() == second->bit_field2() &&
10017 first->is_observed() == second->is_observed() &&
10018 first->function_with_prototype() == second->function_with_prototype();
10022 bool Map::EquivalentToForTransition(Map* other) {
10023 return CheckEquivalent(this, other);
10027 bool Map::EquivalentToForNormalization(Map* other,
10028 PropertyNormalizationMode mode) {
10029 int properties = mode == CLEAR_INOBJECT_PROPERTIES
10030 ? 0 : other->inobject_properties();
10031 return CheckEquivalent(this, other) && inobject_properties() == properties;
10035 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
10036 for (int i = 0; i < count_of_code_ptr_entries(); i++) {
10037 int index = first_code_ptr_index() + i;
10038 v->VisitCodeEntry(reinterpret_cast<Address>(RawFieldOfElementAt(index)));
10040 for (int i = 0; i < count_of_heap_ptr_entries(); i++) {
10041 int index = first_heap_ptr_index() + i;
10042 v->VisitPointer(RawFieldOfElementAt(index));
10047 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
10048 // Iterate over all fields in the body but take care in dealing with
10050 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset);
10051 v->VisitCodeEntry(this->address() + kCodeEntryOffset);
10052 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
10056 void JSFunction::MarkForOptimization() {
10057 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
10058 ASSERT(!IsOptimized());
10059 ASSERT(shared()->allows_lazy_compilation() ||
10060 code()->optimizable());
10061 ASSERT(!shared()->is_generator());
10062 set_code_no_write_barrier(
10063 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
10064 // No write barrier required, since the builtin is part of the root set.
10068 void JSFunction::MarkForConcurrentOptimization() {
10069 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
10070 ASSERT(!IsOptimized());
10071 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
10072 ASSERT(!shared()->is_generator());
10073 ASSERT(GetIsolate()->concurrent_recompilation_enabled());
10074 if (FLAG_trace_concurrent_recompilation) {
10075 PrintF(" ** Marking ");
10077 PrintF(" for concurrent recompilation.\n");
10079 set_code_no_write_barrier(
10080 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
10081 // No write barrier required, since the builtin is part of the root set.
10085 void JSFunction::MarkInOptimizationQueue() {
10086 // We can only arrive here via the concurrent-recompilation builtin. If
10087 // break points were set, the code would point to the lazy-compile builtin.
10088 ASSERT(!GetIsolate()->DebuggerHasBreakPoints());
10089 ASSERT(IsMarkedForConcurrentOptimization() && !IsOptimized());
10090 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
10091 ASSERT(GetIsolate()->concurrent_recompilation_enabled());
10092 if (FLAG_trace_concurrent_recompilation) {
10093 PrintF(" ** Queueing ");
10095 PrintF(" for concurrent recompilation.\n");
10097 set_code_no_write_barrier(
10098 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
10099 // No write barrier required, since the builtin is part of the root set.
10103 void SharedFunctionInfo::AddToOptimizedCodeMap(
10104 Handle<SharedFunctionInfo> shared,
10105 Handle<Context> native_context,
10107 Handle<FixedArray> literals,
10108 BailoutId osr_ast_id) {
10109 Isolate* isolate = shared->GetIsolate();
10110 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
10111 ASSERT(native_context->IsNativeContext());
10112 STATIC_ASSERT(kEntryLength == 4);
10113 Handle<FixedArray> new_code_map;
10114 Handle<Object> value(shared->optimized_code_map(), isolate);
10116 if (value->IsSmi()) {
10117 // No optimized code map.
10118 ASSERT_EQ(0, Smi::cast(*value)->value());
10119 // Create 3 entries per context {context, code, literals}.
10120 new_code_map = isolate->factory()->NewFixedArray(kInitialLength);
10121 old_length = kEntriesStart;
10123 // Copy old map and append one new entry.
10124 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value);
10125 ASSERT_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id));
10126 old_length = old_code_map->length();
10127 new_code_map = FixedArray::CopySize(
10128 old_code_map, old_length + kEntryLength);
10129 // Zap the old map for the sake of the heap verifier.
10130 if (Heap::ShouldZapGarbage()) {
10131 Object** data = old_code_map->data_start();
10132 MemsetPointer(data, isolate->heap()->the_hole_value(), old_length);
10135 new_code_map->set(old_length + kContextOffset, *native_context);
10136 new_code_map->set(old_length + kCachedCodeOffset, *code);
10137 new_code_map->set(old_length + kLiteralsOffset, *literals);
10138 new_code_map->set(old_length + kOsrAstIdOffset,
10139 Smi::FromInt(osr_ast_id.ToInt()));
10142 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
10143 ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext());
10144 ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode());
10145 ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
10146 Code::OPTIMIZED_FUNCTION);
10147 ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
10148 ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
10151 shared->set_optimized_code_map(*new_code_map);
10155 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
10156 ASSERT(index > kEntriesStart);
10157 FixedArray* code_map = FixedArray::cast(optimized_code_map());
10159 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
10160 ASSERT_NE(NULL, cached_literals);
10161 return cached_literals;
10167 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
10168 ASSERT(index > kEntriesStart);
10169 FixedArray* code_map = FixedArray::cast(optimized_code_map());
10170 Code* code = Code::cast(code_map->get(index));
10171 ASSERT_NE(NULL, code);
10176 void SharedFunctionInfo::ClearOptimizedCodeMap() {
10177 FixedArray* code_map = FixedArray::cast(optimized_code_map());
10179 // If the next map link slot is already used then the function was
10180 // enqueued with code flushing and we remove it now.
10181 if (!code_map->get(kNextMapIndex)->IsUndefined()) {
10182 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
10183 flusher->EvictOptimizedCodeMap(this);
10186 ASSERT(code_map->get(kNextMapIndex)->IsUndefined());
10187 set_optimized_code_map(Smi::FromInt(0));
10191 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
10192 const char* reason) {
10193 DisallowHeapAllocation no_gc;
10194 if (optimized_code_map()->IsSmi()) return;
10196 FixedArray* code_map = FixedArray::cast(optimized_code_map());
10197 int dst = kEntriesStart;
10198 int length = code_map->length();
10199 for (int src = kEntriesStart; src < length; src += kEntryLength) {
10200 ASSERT(code_map->get(src)->IsNativeContext());
10201 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) {
10202 // Evict the src entry by not copying it to the dst entry.
10203 if (FLAG_trace_opt) {
10204 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
10206 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
10207 if (osr.IsNone()) {
10210 PrintF(" (osr ast id %d)]\n", osr.ToInt());
10214 // Keep the src entry by copying it to the dst entry.
10216 code_map->set(dst + kContextOffset,
10217 code_map->get(src + kContextOffset));
10218 code_map->set(dst + kCachedCodeOffset,
10219 code_map->get(src + kCachedCodeOffset));
10220 code_map->set(dst + kLiteralsOffset,
10221 code_map->get(src + kLiteralsOffset));
10222 code_map->set(dst + kOsrAstIdOffset,
10223 code_map->get(src + kOsrAstIdOffset));
10225 dst += kEntryLength;
10228 if (dst != length) {
10229 // Always trim even when array is cleared because of heap verifier.
10230 RightTrimFixedArray<Heap::FROM_MUTATOR>(GetHeap(), code_map, length - dst);
10231 if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap();
10236 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
10237 FixedArray* code_map = FixedArray::cast(optimized_code_map());
10238 ASSERT(shrink_by % kEntryLength == 0);
10239 ASSERT(shrink_by <= code_map->length() - kEntriesStart);
10240 // Always trim even when array is cleared because of heap verifier.
10241 RightTrimFixedArray<Heap::FROM_GC>(GetHeap(), code_map, shrink_by);
10242 if (code_map->length() == kEntriesStart) {
10243 ClearOptimizedCodeMap();
10248 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) {
10249 if (object->IsGlobalObject()) return;
10251 // Make sure prototypes are fast objects and their maps have the bit set
10252 // so they remain fast.
10253 if (!object->HasFastProperties()) {
10254 TransformToFastProperties(object, 0);
10259 Handle<Object> CacheInitialJSArrayMaps(
10260 Handle<Context> native_context, Handle<Map> initial_map) {
10261 // Replace all of the cached initial array maps in the native context with
10262 // the appropriate transitioned elements kind maps.
10263 Factory* factory = native_context->GetIsolate()->factory();
10264 Handle<FixedArray> maps = factory->NewFixedArrayWithHoles(
10265 kElementsKindCount, TENURED);
10267 Handle<Map> current_map = initial_map;
10268 ElementsKind kind = current_map->elements_kind();
10269 ASSERT(kind == GetInitialFastElementsKind());
10270 maps->set(kind, *current_map);
10271 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
10272 i < kFastElementsKindCount; ++i) {
10273 Handle<Map> new_map;
10274 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
10275 if (current_map->HasElementsTransition()) {
10276 new_map = handle(current_map->elements_transition_map());
10277 ASSERT(new_map->elements_kind() == next_kind);
10279 new_map = Map::CopyAsElementsKind(
10280 current_map, next_kind, INSERT_TRANSITION);
10282 maps->set(next_kind, *new_map);
10283 current_map = new_map;
10285 native_context->set_js_array_maps(*maps);
10286 return initial_map;
10290 void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
10291 Handle<Object> value) {
10292 ASSERT(value->IsJSReceiver());
10294 // First some logic for the map of the prototype to make sure it is in fast
10296 if (value->IsJSObject()) {
10297 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value));
10300 // Now some logic for the maps of the objects that are created by using this
10301 // function as a constructor.
10302 if (function->has_initial_map()) {
10303 // If the function has allocated the initial map replace it with a
10304 // copy containing the new prototype. Also complete any in-object
10305 // slack tracking that is in progress at this point because it is
10306 // still tracking the old copy.
10307 if (function->shared()->IsInobjectSlackTrackingInProgress()) {
10308 function->shared()->CompleteInobjectSlackTracking();
10310 Handle<Map> new_map = Map::Copy(handle(function->initial_map()));
10311 new_map->set_prototype(*value);
10313 // If the function is used as the global Array function, cache the
10314 // initial map (and transitioned versions) in the native context.
10315 Context* native_context = function->context()->native_context();
10316 Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX);
10317 if (array_function->IsJSFunction() &&
10318 *function == JSFunction::cast(array_function)) {
10319 CacheInitialJSArrayMaps(handle(native_context), new_map);
10322 function->set_initial_map(*new_map);
10324 // Put the value in the initial map field until an initial map is
10325 // needed. At that point, a new initial map is created and the
10326 // prototype is put into the initial map where it belongs.
10327 function->set_prototype_or_initial_map(*value);
10329 function->GetHeap()->ClearInstanceofCache();
10333 void JSFunction::SetPrototype(Handle<JSFunction> function,
10334 Handle<Object> value) {
10335 ASSERT(function->should_have_prototype());
10336 Handle<Object> construct_prototype = value;
10338 // If the value is not a JSReceiver, store the value in the map's
10339 // constructor field so it can be accessed. Also, set the prototype
10340 // used for constructing objects to the original object prototype.
10341 // See ECMA-262 13.2.2.
10342 if (!value->IsJSReceiver()) {
10343 // Copy the map so this does not affect unrelated functions.
10344 // Remove map transitions because they point to maps with a
10345 // different prototype.
10346 Handle<Map> new_map = Map::Copy(handle(function->map()));
10348 JSObject::MigrateToMap(function, new_map);
10349 new_map->set_constructor(*value);
10350 new_map->set_non_instance_prototype(true);
10351 Isolate* isolate = new_map->GetIsolate();
10352 construct_prototype = handle(
10353 isolate->context()->native_context()->initial_object_prototype(),
10356 function->map()->set_non_instance_prototype(false);
10359 return SetInstancePrototype(function, construct_prototype);
10363 bool JSFunction::RemovePrototype() {
10364 Context* native_context = context()->native_context();
10365 Map* no_prototype_map = shared()->strict_mode() == SLOPPY
10366 ? native_context->sloppy_function_without_prototype_map()
10367 : native_context->strict_function_without_prototype_map();
10369 if (map() == no_prototype_map) return true;
10372 if (map() != (shared()->strict_mode() == SLOPPY
10373 ? native_context->sloppy_function_map()
10374 : native_context->strict_function_map())) {
10379 set_map(no_prototype_map);
10380 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
10385 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
10386 if (function->has_initial_map()) return;
10387 Isolate* isolate = function->GetIsolate();
10389 // First create a new map with the size and number of in-object properties
10390 // suggested by the function.
10391 InstanceType instance_type;
10393 int in_object_properties;
10394 if (function->shared()->is_generator()) {
10395 instance_type = JS_GENERATOR_OBJECT_TYPE;
10396 instance_size = JSGeneratorObject::kSize;
10397 in_object_properties = 0;
10399 instance_type = JS_OBJECT_TYPE;
10400 instance_size = function->shared()->CalculateInstanceSize();
10401 in_object_properties = function->shared()->CalculateInObjectProperties();
10403 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size);
10405 // Fetch or allocate prototype.
10406 Handle<Object> prototype;
10407 if (function->has_instance_prototype()) {
10408 prototype = handle(function->instance_prototype(), isolate);
10410 prototype = isolate->factory()->NewFunctionPrototype(function);
10412 map->set_inobject_properties(in_object_properties);
10413 map->set_unused_property_fields(in_object_properties);
10414 map->set_prototype(*prototype);
10415 ASSERT(map->has_fast_object_elements());
10417 if (!function->shared()->is_generator()) {
10418 function->shared()->StartInobjectSlackTracking(*map);
10421 // Finally link initial map and constructor function.
10422 function->set_initial_map(*map);
10423 map->set_constructor(*function);
10427 void JSFunction::SetInstanceClassName(String* name) {
10428 shared()->set_instance_class_name(name);
10432 void JSFunction::PrintName(FILE* out) {
10433 SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
10434 PrintF(out, "%s", name.get());
10438 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
10439 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
10443 // The filter is a pattern that matches function names in this way:
10444 // "*" all; the default
10445 // "-" all but the top-level function
10446 // "-name" all but the function "name"
10447 // "" only the top-level function
10448 // "name" only the function "name"
10449 // "name*" only functions starting with "name"
10450 bool JSFunction::PassesFilter(const char* raw_filter) {
10451 if (*raw_filter == '*') return true;
10452 String* name = shared()->DebugName();
10453 Vector<const char> filter = CStrVector(raw_filter);
10454 if (filter.length() == 0) return name->length() == 0;
10455 if (filter[0] == '-') {
10456 // Negative filter.
10457 if (filter.length() == 1) {
10458 return (name->length() != 0);
10459 } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) {
10462 if (filter[filter.length() - 1] == '*' &&
10463 name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) {
10468 } else if (name->IsUtf8EqualTo(filter)) {
10471 if (filter[filter.length() - 1] == '*' &&
10472 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) {
10479 void Oddball::Initialize(Isolate* isolate,
10480 Handle<Oddball> oddball,
10481 const char* to_string,
10482 Handle<Object> to_number,
10484 Handle<String> internalized_to_string =
10485 isolate->factory()->InternalizeUtf8String(to_string);
10486 oddball->set_to_string(*internalized_to_string);
10487 oddball->set_to_number(*to_number);
10488 oddball->set_kind(kind);
10492 void Script::InitLineEnds(Handle<Script> script) {
10493 if (!script->line_ends()->IsUndefined()) return;
10495 Isolate* isolate = script->GetIsolate();
10497 if (!script->source()->IsString()) {
10498 ASSERT(script->source()->IsUndefined());
10499 Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0);
10500 script->set_line_ends(*empty);
10501 ASSERT(script->line_ends()->IsFixedArray());
10505 Handle<String> src(String::cast(script->source()), isolate);
10507 Handle<FixedArray> array = String::CalculateLineEnds(src, true);
10509 if (*array != isolate->heap()->empty_fixed_array()) {
10510 array->set_map(isolate->heap()->fixed_cow_array_map());
10513 script->set_line_ends(*array);
10514 ASSERT(script->line_ends()->IsFixedArray());
10518 int Script::GetColumnNumber(Handle<Script> script, int code_pos) {
10519 int line_number = GetLineNumber(script, code_pos);
10520 if (line_number == -1) return -1;
10522 DisallowHeapAllocation no_allocation;
10523 FixedArray* line_ends_array = FixedArray::cast(script->line_ends());
10524 line_number = line_number - script->line_offset()->value();
10525 if (line_number == 0) return code_pos + script->column_offset()->value();
10526 int prev_line_end_pos =
10527 Smi::cast(line_ends_array->get(line_number - 1))->value();
10528 return code_pos - (prev_line_end_pos + 1);
10532 int Script::GetLineNumberWithArray(int code_pos) {
10533 DisallowHeapAllocation no_allocation;
10534 ASSERT(line_ends()->IsFixedArray());
10535 FixedArray* line_ends_array = FixedArray::cast(line_ends());
10536 int line_ends_len = line_ends_array->length();
10537 if (line_ends_len == 0) return -1;
10539 if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) {
10540 return line_offset()->value();
10544 int right = line_ends_len;
10545 while (int half = (right - left) / 2) {
10546 if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) {
10552 return right + line_offset()->value();
10556 int Script::GetLineNumber(Handle<Script> script, int code_pos) {
10557 InitLineEnds(script);
10558 return script->GetLineNumberWithArray(code_pos);
10562 int Script::GetLineNumber(int code_pos) {
10563 DisallowHeapAllocation no_allocation;
10564 if (!line_ends()->IsUndefined()) return GetLineNumberWithArray(code_pos);
10566 // Slow mode: we do not have line_ends. We have to iterate through source.
10567 if (!source()->IsString()) return -1;
10569 String* source_string = String::cast(source());
10571 int len = source_string->length();
10572 for (int pos = 0; pos < len; pos++) {
10573 if (pos == code_pos) break;
10574 if (source_string->Get(pos) == '\n') line++;
10580 Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) {
10581 Isolate* isolate = script->GetIsolate();
10582 Handle<String> name_or_source_url_key =
10583 isolate->factory()->InternalizeOneByteString(
10584 STATIC_ASCII_VECTOR("nameOrSourceURL"));
10585 Handle<JSObject> script_wrapper = Script::GetWrapper(script);
10586 Handle<Object> property = Object::GetProperty(
10587 script_wrapper, name_or_source_url_key).ToHandleChecked();
10588 ASSERT(property->IsJSFunction());
10589 Handle<JSFunction> method = Handle<JSFunction>::cast(property);
10590 Handle<Object> result;
10591 // Do not check against pending exception, since this function may be called
10592 // when an exception has already been pending.
10593 if (!Execution::TryCall(method, script_wrapper, 0, NULL).ToHandle(&result)) {
10594 return isolate->factory()->undefined_value();
10600 // Wrappers for scripts are kept alive and cached in weak global
10601 // handles referred from foreign objects held by the scripts as long as
10602 // they are used. When they are not used anymore, the garbage
10603 // collector will call the weak callback on the global handle
10604 // associated with the wrapper and get rid of both the wrapper and the
10606 static void ClearWrapperCache(
10607 const v8::WeakCallbackData<v8::Value, void>& data) {
10608 Object** location = reinterpret_cast<Object**>(data.GetParameter());
10609 JSValue* wrapper = JSValue::cast(*location);
10610 Foreign* foreign = Script::cast(wrapper->value())->wrapper();
10611 ASSERT_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location));
10612 foreign->set_foreign_address(0);
10613 GlobalHandles::Destroy(location);
10614 Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate());
10615 isolate->counters()->script_wrappers()->Decrement();
10619 Handle<JSObject> Script::GetWrapper(Handle<Script> script) {
10620 if (script->wrapper()->foreign_address() != NULL) {
10621 // Return a handle for the existing script wrapper from the cache.
10622 return Handle<JSValue>(
10623 *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address()));
10625 Isolate* isolate = script->GetIsolate();
10626 // Construct a new script wrapper.
10627 isolate->counters()->script_wrappers()->Increment();
10628 Handle<JSFunction> constructor = isolate->script_function();
10629 Handle<JSValue> result =
10630 Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor));
10632 result->set_value(*script);
10634 // Create a new weak global handle and use it to cache the wrapper
10635 // for future use. The cache will automatically be cleared by the
10636 // garbage collector when it is not used anymore.
10637 Handle<Object> handle = isolate->global_handles()->Create(*result);
10638 GlobalHandles::MakeWeak(handle.location(),
10639 reinterpret_cast<void*>(handle.location()),
10640 &ClearWrapperCache);
10641 script->wrapper()->set_foreign_address(
10642 reinterpret_cast<Address>(handle.location()));
10647 String* SharedFunctionInfo::DebugName() {
10648 Object* n = name();
10649 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name();
10650 return String::cast(n);
10654 bool SharedFunctionInfo::HasSourceCode() {
10655 return !script()->IsUndefined() &&
10656 !reinterpret_cast<Script*>(script())->source()->IsUndefined();
10660 Handle<Object> SharedFunctionInfo::GetSourceCode() {
10661 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
10662 Handle<String> source(String::cast(Script::cast(script())->source()));
10663 return GetIsolate()->factory()->NewSubString(
10664 source, start_position(), end_position());
10668 bool SharedFunctionInfo::IsInlineable() {
10669 // Check that the function has a script associated with it.
10670 if (!script()->IsScript()) return false;
10671 if (optimization_disabled()) return false;
10672 // If we never ran this (unlikely) then lets try to optimize it.
10673 if (code()->kind() != Code::FUNCTION) return true;
10674 return code()->optimizable();
10678 int SharedFunctionInfo::SourceSize() {
10679 return end_position() - start_position();
10683 int SharedFunctionInfo::CalculateInstanceSize() {
10684 int instance_size =
10685 JSObject::kHeaderSize +
10686 expected_nof_properties() * kPointerSize;
10687 if (instance_size > JSObject::kMaxInstanceSize) {
10688 instance_size = JSObject::kMaxInstanceSize;
10690 return instance_size;
10694 int SharedFunctionInfo::CalculateInObjectProperties() {
10695 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize;
10699 // Support function for printing the source code to a StringStream
10700 // without any allocation in the heap.
10701 void SharedFunctionInfo::SourceCodePrint(StringStream* accumulator,
10703 // For some native functions there is no source.
10704 if (!HasSourceCode()) {
10705 accumulator->Add("<No Source>");
10709 // Get the source for the script which this function came from.
10710 // Don't use String::cast because we don't want more assertion errors while
10711 // we are already creating a stack dump.
10712 String* script_source =
10713 reinterpret_cast<String*>(Script::cast(script())->source());
10715 if (!script_source->LooksValid()) {
10716 accumulator->Add("<Invalid Source>");
10720 if (!is_toplevel()) {
10721 accumulator->Add("function ");
10722 Object* name = this->name();
10723 if (name->IsString() && String::cast(name)->length() > 0) {
10724 accumulator->PrintName(name);
10728 int len = end_position() - start_position();
10729 if (len <= max_length || max_length < 0) {
10730 accumulator->Put(script_source, start_position(), end_position());
10732 accumulator->Put(script_source,
10734 start_position() + max_length);
10735 accumulator->Add("...\n");
10740 static bool IsCodeEquivalent(Code* code, Code* recompiled) {
10741 if (code->instruction_size() != recompiled->instruction_size()) return false;
10742 ByteArray* code_relocation = code->relocation_info();
10743 ByteArray* recompiled_relocation = recompiled->relocation_info();
10744 int length = code_relocation->length();
10745 if (length != recompiled_relocation->length()) return false;
10746 int compare = memcmp(code_relocation->GetDataStartAddress(),
10747 recompiled_relocation->GetDataStartAddress(),
10749 return compare == 0;
10753 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
10754 ASSERT(!has_deoptimization_support());
10755 DisallowHeapAllocation no_allocation;
10756 Code* code = this->code();
10757 if (IsCodeEquivalent(code, recompiled)) {
10758 // Copy the deoptimization data from the recompiled code.
10759 code->set_deoptimization_data(recompiled->deoptimization_data());
10760 code->set_has_deoptimization_support(true);
10762 // TODO(3025757): In case the recompiled isn't equivalent to the
10763 // old code, we have to replace it. We should try to avoid this
10764 // altogether because it flushes valuable type feedback by
10765 // effectively resetting all IC state.
10766 ReplaceCode(recompiled);
10768 ASSERT(has_deoptimization_support());
10772 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
10773 // Disable optimization for the shared function info and mark the
10774 // code as non-optimizable. The marker on the shared function info
10775 // is there because we flush non-optimized code thereby loosing the
10776 // non-optimizable information for the code. When the code is
10777 // regenerated and set on the shared function info it is marked as
10778 // non-optimizable if optimization is disabled for the shared
10780 set_optimization_disabled(true);
10781 set_bailout_reason(reason);
10782 // Code should be the lazy compilation stub or else unoptimized. If the
10783 // latter, disable optimization for the code too.
10784 ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN);
10785 if (code()->kind() == Code::FUNCTION) {
10786 code()->set_optimizable(false);
10788 PROFILE(GetIsolate(),
10789 LogExistingFunction(Handle<SharedFunctionInfo>(this),
10790 Handle<Code>(code())));
10791 if (FLAG_trace_opt) {
10792 PrintF("[disabled optimization for ");
10794 PrintF(", reason: %s]\n", GetBailoutReason(reason));
10799 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) {
10800 ASSERT(!id.IsNone());
10801 Code* unoptimized = code();
10802 DeoptimizationOutputData* data =
10803 DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
10804 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this);
10806 return true; // Return true if there was no ASSERT.
10810 void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) {
10811 ASSERT(!IsInobjectSlackTrackingInProgress());
10813 if (!FLAG_clever_optimizations) return;
10815 // Only initiate the tracking the first time.
10816 if (live_objects_may_exist()) return;
10817 set_live_objects_may_exist(true);
10819 // No tracking during the snapshot construction phase.
10820 Isolate* isolate = GetIsolate();
10821 if (Serializer::enabled(isolate)) return;
10823 if (map->unused_property_fields() == 0) return;
10825 // Nonzero counter is a leftover from the previous attempt interrupted
10827 if (construction_count() == 0) {
10828 set_construction_count(kGenerousAllocationCount);
10830 set_initial_map(map);
10831 Builtins* builtins = isolate->builtins();
10832 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
10834 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
10838 // Called from GC, hence reinterpret_cast and unchecked accessors.
10839 void SharedFunctionInfo::DetachInitialMap() {
10840 Map* map = reinterpret_cast<Map*>(initial_map());
10842 // Make the map remember to restore the link if it survives the GC.
10843 map->set_bit_field2(
10844 map->bit_field2() | (1 << Map::kAttachedToSharedFunctionInfo));
10846 // Undo state changes made by StartInobjectTracking (except the
10847 // construction_count). This way if the initial map does not survive the GC
10848 // then StartInobjectTracking will be called again the next time the
10849 // constructor is called. The countdown will continue and (possibly after
10850 // several more GCs) CompleteInobjectSlackTracking will eventually be called.
10851 Heap* heap = map->GetHeap();
10852 set_initial_map(heap->undefined_value());
10853 Builtins* builtins = heap->isolate()->builtins();
10854 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
10855 *RawField(this, kConstructStubOffset));
10856 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
10857 // It is safe to clear the flag: it will be set again if the map is live.
10858 set_live_objects_may_exist(false);
10862 // Called from GC, hence reinterpret_cast and unchecked accessors.
10863 void SharedFunctionInfo::AttachInitialMap(Map* map) {
10864 map->set_bit_field2(
10865 map->bit_field2() & ~(1 << Map::kAttachedToSharedFunctionInfo));
10867 // Resume inobject slack tracking.
10868 set_initial_map(map);
10869 Builtins* builtins = map->GetHeap()->isolate()->builtins();
10870 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
10871 *RawField(this, kConstructStubOffset));
10872 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
10873 // The map survived the gc, so there may be objects referencing it.
10874 set_live_objects_may_exist(true);
10878 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
10879 code()->ClearInlineCaches();
10880 // If we clear ICs, we need to clear the type feedback vector too, since
10881 // CallICs are synced with a feedback vector slot.
10882 ClearTypeFeedbackInfo();
10883 set_ic_age(new_ic_age);
10884 if (code()->kind() == Code::FUNCTION) {
10885 code()->set_profiler_ticks(0);
10886 if (optimization_disabled() &&
10887 opt_count() >= FLAG_max_opt_count) {
10888 // Re-enable optimizations if they were disabled due to opt_count limit.
10889 set_optimization_disabled(false);
10890 code()->set_optimizable(true);
10893 set_deopt_count(0);
10898 static void GetMinInobjectSlack(Map* map, void* data) {
10899 int slack = map->unused_property_fields();
10900 if (*reinterpret_cast<int*>(data) > slack) {
10901 *reinterpret_cast<int*>(data) = slack;
10906 static void ShrinkInstanceSize(Map* map, void* data) {
10907 int slack = *reinterpret_cast<int*>(data);
10908 map->set_inobject_properties(map->inobject_properties() - slack);
10909 map->set_unused_property_fields(map->unused_property_fields() - slack);
10910 map->set_instance_size(map->instance_size() - slack * kPointerSize);
10912 // Visitor id might depend on the instance size, recalculate it.
10913 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
10917 void SharedFunctionInfo::CompleteInobjectSlackTracking() {
10918 ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress());
10919 Map* map = Map::cast(initial_map());
10921 Heap* heap = map->GetHeap();
10922 set_initial_map(heap->undefined_value());
10923 Builtins* builtins = heap->isolate()->builtins();
10924 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
10926 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
10928 int slack = map->unused_property_fields();
10929 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
10931 // Resize the initial map and all maps in its transition tree.
10932 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
10934 // Give the correct expected_nof_properties to initial maps created later.
10935 ASSERT(expected_nof_properties() >= slack);
10936 set_expected_nof_properties(expected_nof_properties() - slack);
10941 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
10942 BailoutId osr_ast_id) {
10943 DisallowHeapAllocation no_gc;
10944 ASSERT(native_context->IsNativeContext());
10945 if (!FLAG_cache_optimized_code) return -1;
10946 Object* value = optimized_code_map();
10947 if (!value->IsSmi()) {
10948 FixedArray* optimized_code_map = FixedArray::cast(value);
10949 int length = optimized_code_map->length();
10950 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
10951 for (int i = kEntriesStart; i < length; i += kEntryLength) {
10952 if (optimized_code_map->get(i + kContextOffset) == native_context &&
10953 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
10954 return i + kCachedCodeOffset;
10957 if (FLAG_trace_opt) {
10958 PrintF("[didn't find optimized code in optimized code map for ");
10967 #define DECLARE_TAG(ignore1, name, ignore2) name,
10968 const char* const VisitorSynchronization::kTags[
10969 VisitorSynchronization::kNumberOfSyncTags] = {
10970 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10975 #define DECLARE_TAG(ignore1, ignore2, name) name,
10976 const char* const VisitorSynchronization::kTagNames[
10977 VisitorSynchronization::kNumberOfSyncTags] = {
10978 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10983 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
10984 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
10985 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
10986 Object* old_target = target;
10987 VisitPointer(&target);
10988 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10992 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
10993 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
10994 Object* stub = rinfo->code_age_stub();
10996 VisitPointer(&stub);
11001 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
11002 Object* code = Code::GetObjectFromEntryAddress(entry_address);
11003 Object* old_code = code;
11004 VisitPointer(&code);
11005 if (code != old_code) {
11006 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry();
11011 void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
11012 ASSERT(rinfo->rmode() == RelocInfo::CELL);
11013 Object* cell = rinfo->target_cell();
11014 Object* old_cell = cell;
11015 VisitPointer(&cell);
11016 if (cell != old_cell) {
11017 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
11022 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
11023 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
11024 rinfo->IsPatchedReturnSequence()) ||
11025 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
11026 rinfo->IsPatchedDebugBreakSlotSequence()));
11027 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
11028 Object* old_target = target;
11029 VisitPointer(&target);
11030 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
11034 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
11035 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
11036 Object* p = rinfo->target_object();
11041 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
11042 Address p = rinfo->target_reference();
11043 VisitExternalReference(&p);
11047 void Code::InvalidateRelocation() {
11048 set_relocation_info(GetHeap()->empty_byte_array());
11052 void Code::InvalidateEmbeddedObjects() {
11053 Object* undefined = GetHeap()->undefined_value();
11054 Cell* undefined_cell = GetHeap()->undefined_cell();
11055 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
11056 RelocInfo::ModeMask(RelocInfo::CELL);
11057 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
11058 RelocInfo::Mode mode = it.rinfo()->rmode();
11059 if (mode == RelocInfo::EMBEDDED_OBJECT) {
11060 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
11061 } else if (mode == RelocInfo::CELL) {
11062 it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER);
11068 void Code::Relocate(intptr_t delta) {
11069 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
11070 it.rinfo()->apply(delta);
11072 CPU::FlushICache(instruction_start(), instruction_size());
11076 void Code::CopyFrom(const CodeDesc& desc) {
11077 ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT);
11080 CopyBytes(instruction_start(), desc.buffer,
11081 static_cast<size_t>(desc.instr_size));
11084 CopyBytes(relocation_start(),
11085 desc.buffer + desc.buffer_size - desc.reloc_size,
11086 static_cast<size_t>(desc.reloc_size));
11088 // unbox handles and relocate
11089 intptr_t delta = instruction_start() - desc.buffer;
11090 int mode_mask = RelocInfo::kCodeTargetMask |
11091 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
11092 RelocInfo::ModeMask(RelocInfo::CELL) |
11093 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
11094 RelocInfo::kApplyMask;
11095 // Needed to find target_object and runtime_entry on X64
11096 Assembler* origin = desc.origin;
11097 AllowDeferredHandleDereference embedding_raw_address;
11098 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
11099 RelocInfo::Mode mode = it.rinfo()->rmode();
11100 if (mode == RelocInfo::EMBEDDED_OBJECT) {
11101 Handle<Object> p = it.rinfo()->target_object_handle(origin);
11102 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER);
11103 } else if (mode == RelocInfo::CELL) {
11104 Handle<Cell> cell = it.rinfo()->target_cell_handle();
11105 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER);
11106 } else if (RelocInfo::IsCodeTarget(mode)) {
11107 // rewrite code handles in inline cache targets to direct
11108 // pointers to the first instruction in the code object
11109 Handle<Object> p = it.rinfo()->target_object_handle(origin);
11110 Code* code = Code::cast(*p);
11111 it.rinfo()->set_target_address(code->instruction_start(),
11112 SKIP_WRITE_BARRIER);
11113 } else if (RelocInfo::IsRuntimeEntry(mode)) {
11114 Address p = it.rinfo()->target_runtime_entry(origin);
11115 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER);
11116 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) {
11117 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin);
11118 Code* code = Code::cast(*p);
11119 it.rinfo()->set_code_age_stub(code);
11121 it.rinfo()->apply(delta);
11124 CPU::FlushICache(instruction_start(), instruction_size());
11128 // Locate the source position which is closest to the address in the code. This
11129 // is using the source position information embedded in the relocation info.
11130 // The position returned is relative to the beginning of the script where the
11131 // source for this function is found.
11132 int Code::SourcePosition(Address pc) {
11133 int distance = kMaxInt;
11134 int position = RelocInfo::kNoPosition; // Initially no position found.
11135 // Run through all the relocation info to find the best matching source
11136 // position. All the code needs to be considered as the sequence of the
11137 // instructions in the code does not necessarily follow the same order as the
11139 RelocIterator it(this, RelocInfo::kPositionMask);
11140 while (!it.done()) {
11141 // Only look at positions after the current pc.
11142 if (it.rinfo()->pc() < pc) {
11143 // Get position and distance.
11145 int dist = static_cast<int>(pc - it.rinfo()->pc());
11146 int pos = static_cast<int>(it.rinfo()->data());
11147 // If this position is closer than the current candidate or if it has the
11148 // same distance as the current candidate and the position is higher then
11149 // this position is the new candidate.
11150 if ((dist < distance) ||
11151 (dist == distance && pos > position)) {
11162 // Same as Code::SourcePosition above except it only looks for statement
11164 int Code::SourceStatementPosition(Address pc) {
11165 // First find the position as close as possible using all position
11167 int position = SourcePosition(pc);
11168 // Now find the closest statement position before the position.
11169 int statement_position = 0;
11170 RelocIterator it(this, RelocInfo::kPositionMask);
11171 while (!it.done()) {
11172 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
11173 int p = static_cast<int>(it.rinfo()->data());
11174 if (statement_position < p && p <= position) {
11175 statement_position = p;
11180 return statement_position;
11184 SafepointEntry Code::GetSafepointEntry(Address pc) {
11185 SafepointTable table(this);
11186 return table.FindEntry(pc);
11190 Object* Code::FindNthObject(int n, Map* match_map) {
11191 ASSERT(is_inline_cache_stub());
11192 DisallowHeapAllocation no_allocation;
11193 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
11194 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11195 RelocInfo* info = it.rinfo();
11196 Object* object = info->target_object();
11197 if (object->IsHeapObject()) {
11198 if (HeapObject::cast(object)->map() == match_map) {
11199 if (--n == 0) return object;
11207 AllocationSite* Code::FindFirstAllocationSite() {
11208 Object* result = FindNthObject(1, GetHeap()->allocation_site_map());
11209 return (result != NULL) ? AllocationSite::cast(result) : NULL;
11213 Map* Code::FindFirstMap() {
11214 Object* result = FindNthObject(1, GetHeap()->meta_map());
11215 return (result != NULL) ? Map::cast(result) : NULL;
11219 void Code::FindAndReplace(const FindAndReplacePattern& pattern) {
11220 ASSERT(is_inline_cache_stub() || is_handler());
11221 DisallowHeapAllocation no_allocation;
11222 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
11223 STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32);
11224 int current_pattern = 0;
11225 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11226 RelocInfo* info = it.rinfo();
11227 Object* object = info->target_object();
11228 if (object->IsHeapObject()) {
11229 Map* map = HeapObject::cast(object)->map();
11230 if (map == *pattern.find_[current_pattern]) {
11231 info->set_target_object(*pattern.replace_[current_pattern]);
11232 if (++current_pattern == pattern.count_) return;
11240 void Code::FindAllMaps(MapHandleList* maps) {
11241 ASSERT(is_inline_cache_stub());
11242 DisallowHeapAllocation no_allocation;
11243 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
11244 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11245 RelocInfo* info = it.rinfo();
11246 Object* object = info->target_object();
11247 if (object->IsMap()) maps->Add(handle(Map::cast(object)));
11252 Code* Code::FindFirstHandler() {
11253 ASSERT(is_inline_cache_stub());
11254 DisallowHeapAllocation no_allocation;
11255 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
11256 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11257 RelocInfo* info = it.rinfo();
11258 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
11259 if (code->kind() == Code::HANDLER) return code;
11265 bool Code::FindHandlers(CodeHandleList* code_list, int length) {
11266 ASSERT(is_inline_cache_stub());
11267 DisallowHeapAllocation no_allocation;
11268 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
11270 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11271 if (i == length) return true;
11272 RelocInfo* info = it.rinfo();
11273 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
11274 // IC stubs with handlers never contain non-handler code objects before
11275 // handler targets.
11276 if (code->kind() != Code::HANDLER) break;
11277 code_list->Add(Handle<Code>(code));
11280 return i == length;
11284 Name* Code::FindFirstName() {
11285 ASSERT(is_inline_cache_stub());
11286 DisallowHeapAllocation no_allocation;
11287 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
11288 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11289 RelocInfo* info = it.rinfo();
11290 Object* object = info->target_object();
11291 if (object->IsName()) return Name::cast(object);
11297 void Code::ClearInlineCaches() {
11298 ClearInlineCaches(NULL);
11302 void Code::ClearInlineCaches(Code::Kind kind) {
11303 ClearInlineCaches(&kind);
11307 void Code::ClearInlineCaches(Code::Kind* kind) {
11308 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
11309 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
11310 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
11311 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11312 RelocInfo* info = it.rinfo();
11313 Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
11314 if (target->is_inline_cache_stub()) {
11315 if (kind == NULL || *kind == target->kind()) {
11316 IC::Clear(this->GetIsolate(), info->pc(),
11317 info->host()->constant_pool());
11324 void SharedFunctionInfo::ClearTypeFeedbackInfo() {
11325 FixedArray* vector = feedback_vector();
11326 Heap* heap = GetHeap();
11327 for (int i = 0; i < vector->length(); i++) {
11328 Object* obj = vector->get(i);
11329 if (!obj->IsAllocationSite()) {
11332 TypeFeedbackInfo::RawUninitializedSentinel(heap),
11333 SKIP_WRITE_BARRIER);
11339 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
11340 DisallowHeapAllocation no_gc;
11341 ASSERT(kind() == FUNCTION);
11342 BackEdgeTable back_edges(this, &no_gc);
11343 for (uint32_t i = 0; i < back_edges.length(); i++) {
11344 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
11346 return BailoutId::None();
11350 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
11351 DisallowHeapAllocation no_gc;
11352 ASSERT(kind() == FUNCTION);
11353 BackEdgeTable back_edges(this, &no_gc);
11354 for (uint32_t i = 0; i < back_edges.length(); i++) {
11355 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
11357 UNREACHABLE(); // We expect to find the back edge.
11362 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
11363 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
11367 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) {
11368 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge,
11369 NO_MARKING_PARITY);
11373 static Code::Age EffectiveAge(Code::Age age) {
11374 if (age == Code::kNotExecutedCodeAge) {
11375 // Treat that's never been executed as old immediately.
11376 age = Code::kIsOldCodeAge;
11377 } else if (age == Code::kExecutedOnceCodeAge) {
11378 // Pre-age code that has only been executed once.
11379 age = Code::kPreAgedCodeAge;
11385 void Code::MakeOlder(MarkingParity current_parity) {
11386 byte* sequence = FindCodeAgeSequence();
11387 if (sequence != NULL) {
11389 MarkingParity code_parity;
11390 GetCodeAgeAndParity(sequence, &age, &code_parity);
11391 age = EffectiveAge(age);
11392 if (age != kLastCodeAge && code_parity != current_parity) {
11393 PatchPlatformCodeAge(GetIsolate(),
11395 static_cast<Age>(age + 1),
11402 bool Code::IsOld() {
11403 return GetAge() >= kIsOldCodeAge;
11407 byte* Code::FindCodeAgeSequence() {
11408 return FLAG_age_code &&
11409 prologue_offset() != Code::kPrologueOffsetNotSet &&
11410 (kind() == OPTIMIZED_FUNCTION ||
11411 (kind() == FUNCTION && !has_debug_break_slots()))
11412 ? instruction_start() + prologue_offset()
11417 Code::Age Code::GetAge() {
11418 return EffectiveAge(GetRawAge());
11422 Code::Age Code::GetRawAge() {
11423 byte* sequence = FindCodeAgeSequence();
11424 if (sequence == NULL) {
11425 return kNoAgeCodeAge;
11428 MarkingParity parity;
11429 GetCodeAgeAndParity(sequence, &age, &parity);
11434 void Code::GetCodeAgeAndParity(Code* code, Age* age,
11435 MarkingParity* parity) {
11436 Isolate* isolate = code->GetIsolate();
11437 Builtins* builtins = isolate->builtins();
11439 #define HANDLE_CODE_AGE(AGE) \
11440 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \
11441 if (code == stub) { \
11442 *age = k##AGE##CodeAge; \
11443 *parity = EVEN_MARKING_PARITY; \
11446 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
11447 if (code == stub) { \
11448 *age = k##AGE##CodeAge; \
11449 *parity = ODD_MARKING_PARITY; \
11452 CODE_AGE_LIST(HANDLE_CODE_AGE)
11453 #undef HANDLE_CODE_AGE
11454 stub = *builtins->MarkCodeAsExecutedOnce();
11455 if (code == stub) {
11456 *age = kNotExecutedCodeAge;
11457 *parity = NO_MARKING_PARITY;
11460 stub = *builtins->MarkCodeAsExecutedTwice();
11461 if (code == stub) {
11462 *age = kExecutedOnceCodeAge;
11463 *parity = NO_MARKING_PARITY;
11470 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) {
11471 Builtins* builtins = isolate->builtins();
11473 #define HANDLE_CODE_AGE(AGE) \
11474 case k##AGE##CodeAge: { \
11475 Code* stub = parity == EVEN_MARKING_PARITY \
11476 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \
11477 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
11480 CODE_AGE_LIST(HANDLE_CODE_AGE)
11481 #undef HANDLE_CODE_AGE
11482 case kNotExecutedCodeAge: {
11483 ASSERT(parity == NO_MARKING_PARITY);
11484 return *builtins->MarkCodeAsExecutedOnce();
11486 case kExecutedOnceCodeAge: {
11487 ASSERT(parity == NO_MARKING_PARITY);
11488 return *builtins->MarkCodeAsExecutedTwice();
11498 void Code::PrintDeoptLocation(FILE* out, int bailout_id) {
11499 const char* last_comment = NULL;
11500 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
11501 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
11502 for (RelocIterator it(this, mask); !it.done(); it.next()) {
11503 RelocInfo* info = it.rinfo();
11504 if (info->rmode() == RelocInfo::COMMENT) {
11505 last_comment = reinterpret_cast<const char*>(info->data());
11506 } else if (last_comment != NULL) {
11507 if ((bailout_id == Deoptimizer::GetDeoptimizationId(
11508 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
11509 (bailout_id == Deoptimizer::GetDeoptimizationId(
11510 GetIsolate(), info->target_address(), Deoptimizer::SOFT))) {
11511 CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
11512 PrintF(out, " %s\n", last_comment);
11520 bool Code::CanDeoptAt(Address pc) {
11521 DeoptimizationInputData* deopt_data =
11522 DeoptimizationInputData::cast(deoptimization_data());
11523 Address code_start_address = instruction_start();
11524 for (int i = 0; i < deopt_data->DeoptCount(); i++) {
11525 if (deopt_data->Pc(i)->value() == -1) continue;
11526 Address address = code_start_address + deopt_data->Pc(i)->value();
11527 if (address == pc) return true;
11533 // Identify kind of code.
11534 const char* Code::Kind2String(Kind kind) {
11536 #define CASE(name) case name: return #name;
11537 CODE_KIND_LIST(CASE)
11539 case NUMBER_OF_KINDS: break;
11546 #ifdef ENABLE_DISASSEMBLER
11548 void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
11549 disasm::NameConverter converter;
11550 int deopt_count = DeoptCount();
11551 PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
11552 if (0 == deopt_count) return;
11554 PrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc",
11555 FLAG_print_code_verbose ? "commands" : "");
11556 for (int i = 0; i < deopt_count; i++) {
11557 PrintF(out, "%6d %6d %6d %6d",
11560 ArgumentsStackHeight(i)->value(),
11563 if (!FLAG_print_code_verbose) {
11567 // Print details of the frame translation.
11568 int translation_index = TranslationIndex(i)->value();
11569 TranslationIterator iterator(TranslationByteArray(), translation_index);
11570 Translation::Opcode opcode =
11571 static_cast<Translation::Opcode>(iterator.Next());
11572 ASSERT(Translation::BEGIN == opcode);
11573 int frame_count = iterator.Next();
11574 int jsframe_count = iterator.Next();
11575 PrintF(out, " %s {frame count=%d, js frame count=%d}\n",
11576 Translation::StringFor(opcode),
11580 while (iterator.HasNext() &&
11581 Translation::BEGIN !=
11582 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
11583 PrintF(out, "%24s %s ", "", Translation::StringFor(opcode));
11586 case Translation::BEGIN:
11590 case Translation::JS_FRAME: {
11591 int ast_id = iterator.Next();
11592 int function_id = iterator.Next();
11593 unsigned height = iterator.Next();
11594 PrintF(out, "{ast_id=%d, function=", ast_id);
11595 if (function_id != Translation::kSelfLiteralId) {
11596 Object* function = LiteralArray()->get(function_id);
11597 JSFunction::cast(function)->PrintName(out);
11599 PrintF(out, "<self>");
11601 PrintF(out, ", height=%u}", height);
11605 case Translation::COMPILED_STUB_FRAME: {
11606 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next());
11607 PrintF(out, "{kind=%d}", stub_kind);
11611 case Translation::ARGUMENTS_ADAPTOR_FRAME:
11612 case Translation::CONSTRUCT_STUB_FRAME: {
11613 int function_id = iterator.Next();
11614 JSFunction* function =
11615 JSFunction::cast(LiteralArray()->get(function_id));
11616 unsigned height = iterator.Next();
11617 PrintF(out, "{function=");
11618 function->PrintName(out);
11619 PrintF(out, ", height=%u}", height);
11623 case Translation::GETTER_STUB_FRAME:
11624 case Translation::SETTER_STUB_FRAME: {
11625 int function_id = iterator.Next();
11626 JSFunction* function =
11627 JSFunction::cast(LiteralArray()->get(function_id));
11628 PrintF(out, "{function=");
11629 function->PrintName(out);
11634 case Translation::REGISTER: {
11635 int reg_code = iterator.Next();
11636 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
11640 case Translation::INT32_REGISTER: {
11641 int reg_code = iterator.Next();
11642 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
11646 case Translation::UINT32_REGISTER: {
11647 int reg_code = iterator.Next();
11648 PrintF(out, "{input=%s (unsigned)}",
11649 converter.NameOfCPURegister(reg_code));
11653 case Translation::DOUBLE_REGISTER: {
11654 int reg_code = iterator.Next();
11655 PrintF(out, "{input=%s}",
11656 DoubleRegister::AllocationIndexToString(reg_code));
11660 case Translation::FLOAT32x4_REGISTER: {
11661 int reg_code = iterator.Next();
11662 PrintF(out, "{input=%s}",
11663 SIMD128Register::AllocationIndexToString(reg_code));
11667 case Translation::FLOAT64x2_REGISTER: {
11668 int reg_code = iterator.Next();
11669 PrintF(out, "{input=%s}",
11670 SIMD128Register::AllocationIndexToString(reg_code));
11674 case Translation::INT32x4_REGISTER: {
11675 int reg_code = iterator.Next();
11676 PrintF(out, "{input=%s}",
11677 SIMD128Register::AllocationIndexToString(reg_code));
11681 case Translation::STACK_SLOT: {
11682 int input_slot_index = iterator.Next();
11683 PrintF(out, "{input=%d}", input_slot_index);
11687 case Translation::INT32_STACK_SLOT: {
11688 int input_slot_index = iterator.Next();
11689 PrintF(out, "{input=%d}", input_slot_index);
11693 case Translation::UINT32_STACK_SLOT: {
11694 int input_slot_index = iterator.Next();
11695 PrintF(out, "{input=%d (unsigned)}", input_slot_index);
11699 case Translation::DOUBLE_STACK_SLOT: {
11700 int input_slot_index = iterator.Next();
11701 PrintF(out, "{input=%d}", input_slot_index);
11705 case Translation::FLOAT32x4_STACK_SLOT: {
11706 int input_slot_index = iterator.Next();
11707 PrintF(out, "{input=%d}", input_slot_index);
11711 case Translation::FLOAT64x2_STACK_SLOT: {
11712 int input_slot_index = iterator.Next();
11713 PrintF(out, "{input=%d}", input_slot_index);
11717 case Translation::INT32x4_STACK_SLOT: {
11718 int input_slot_index = iterator.Next();
11719 PrintF(out, "{input=%d}", input_slot_index);
11723 case Translation::LITERAL: {
11724 unsigned literal_index = iterator.Next();
11725 PrintF(out, "{literal_id=%u}", literal_index);
11729 case Translation::DUPLICATED_OBJECT: {
11730 int object_index = iterator.Next();
11731 PrintF(out, "{object_index=%d}", object_index);
11735 case Translation::ARGUMENTS_OBJECT:
11736 case Translation::CAPTURED_OBJECT: {
11737 int args_length = iterator.Next();
11738 PrintF(out, "{length=%d}", args_length);
11748 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) {
11749 PrintF(out, "Deoptimization Output Data (deopt points = %d)\n",
11750 this->DeoptPoints());
11751 if (this->DeoptPoints() == 0) return;
11753 PrintF(out, "%6s %8s %s\n", "ast id", "pc", "state");
11754 for (int i = 0; i < this->DeoptPoints(); i++) {
11755 int pc_and_state = this->PcAndState(i)->value();
11756 PrintF(out, "%6d %8d %s\n",
11757 this->AstId(i).ToInt(),
11758 FullCodeGenerator::PcField::decode(pc_and_state),
11759 FullCodeGenerator::State2String(
11760 FullCodeGenerator::StateField::decode(pc_and_state)));
11765 const char* Code::ICState2String(InlineCacheState state) {
11767 case UNINITIALIZED: return "UNINITIALIZED";
11768 case PREMONOMORPHIC: return "PREMONOMORPHIC";
11769 case MONOMORPHIC: return "MONOMORPHIC";
11770 case MONOMORPHIC_PROTOTYPE_FAILURE: return "MONOMORPHIC_PROTOTYPE_FAILURE";
11771 case POLYMORPHIC: return "POLYMORPHIC";
11772 case MEGAMORPHIC: return "MEGAMORPHIC";
11773 case GENERIC: return "GENERIC";
11774 case DEBUG_STUB: return "DEBUG_STUB";
11781 const char* Code::StubType2String(StubType type) {
11783 case NORMAL: return "NORMAL";
11784 case FAST: return "FAST";
11786 UNREACHABLE(); // keep the compiler happy
11791 void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
11792 PrintF(out, "extra_ic_state = ");
11793 const char* name = NULL;
11796 case KEYED_STORE_IC:
11797 if (extra == STRICT) name = "STRICT";
11802 if (name != NULL) {
11803 PrintF(out, "%s\n", name);
11805 PrintF(out, "%d\n", extra);
11810 void Code::Disassemble(const char* name, FILE* out) {
11811 PrintF(out, "kind = %s\n", Kind2String(kind()));
11812 if (has_major_key()) {
11813 PrintF(out, "major_key = %s\n",
11814 CodeStub::MajorName(CodeStub::GetMajorKey(this), true));
11816 if (is_inline_cache_stub()) {
11817 PrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
11818 PrintExtraICState(out, kind(), extra_ic_state());
11819 if (ic_state() == MONOMORPHIC) {
11820 PrintF(out, "type = %s\n", StubType2String(type()));
11822 if (is_compare_ic_stub()) {
11823 ASSERT(major_key() == CodeStub::CompareIC);
11824 CompareIC::State left_state, right_state, handler_state;
11826 ICCompareStub::DecodeMinorKey(stub_info(), &left_state, &right_state,
11827 &handler_state, &op);
11828 PrintF(out, "compare_state = %s*%s -> %s\n",
11829 CompareIC::GetStateName(left_state),
11830 CompareIC::GetStateName(right_state),
11831 CompareIC::GetStateName(handler_state));
11832 PrintF(out, "compare_operation = %s\n", Token::Name(op));
11835 if ((name != NULL) && (name[0] != '\0')) {
11836 PrintF(out, "name = %s\n", name);
11838 if (kind() == OPTIMIZED_FUNCTION) {
11839 PrintF(out, "stack_slots = %d\n", stack_slots());
11842 PrintF(out, "Instructions (size = %d)\n", instruction_size());
11843 Disassembler::Decode(out, this);
11846 if (kind() == FUNCTION) {
11847 DeoptimizationOutputData* data =
11848 DeoptimizationOutputData::cast(this->deoptimization_data());
11849 data->DeoptimizationOutputDataPrint(out);
11850 } else if (kind() == OPTIMIZED_FUNCTION) {
11851 DeoptimizationInputData* data =
11852 DeoptimizationInputData::cast(this->deoptimization_data());
11853 data->DeoptimizationInputDataPrint(out);
11857 if (is_crankshafted()) {
11858 SafepointTable table(this);
11859 PrintF(out, "Safepoints (size = %u)\n", table.size());
11860 for (unsigned i = 0; i < table.length(); i++) {
11861 unsigned pc_offset = table.GetPcOffset(i);
11862 PrintF(out, "%p %4d ", (instruction_start() + pc_offset), pc_offset);
11863 table.PrintEntry(i, out);
11864 PrintF(out, " (sp -> fp)");
11865 SafepointEntry entry = table.GetEntry(i);
11866 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
11867 PrintF(out, " %6d", entry.deoptimization_index());
11869 PrintF(out, " <none>");
11871 if (entry.argument_count() > 0) {
11872 PrintF(out, " argc: %d", entry.argument_count());
11877 } else if (kind() == FUNCTION) {
11878 unsigned offset = back_edge_table_offset();
11879 // If there is no back edge table, the "table start" will be at or after
11880 // (due to alignment) the end of the instruction stream.
11881 if (static_cast<int>(offset) < instruction_size()) {
11882 DisallowHeapAllocation no_gc;
11883 BackEdgeTable back_edges(this, &no_gc);
11885 PrintF(out, "Back edges (size = %u)\n", back_edges.length());
11886 PrintF(out, "ast_id pc_offset loop_depth\n");
11888 for (uint32_t i = 0; i < back_edges.length(); i++) {
11889 PrintF(out, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(),
11890 back_edges.pc_offset(i),
11891 back_edges.loop_depth(i));
11896 #ifdef OBJECT_PRINT
11897 if (!type_feedback_info()->IsUndefined()) {
11898 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(out);
11904 PrintF(out, "RelocInfo (size = %d)\n", relocation_size());
11905 for (RelocIterator it(this); !it.done(); it.next()) {
11906 it.rinfo()->Print(GetIsolate(), out);
11910 #endif // ENABLE_DISASSEMBLER
11913 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength(
11914 Handle<JSObject> object,
11917 SetFastElementsCapacitySmiMode smi_mode) {
11918 // We should never end in here with a pixel or external array.
11919 ASSERT(!object->HasExternalArrayElements());
11921 // Allocate a new fast elements backing store.
11922 Handle<FixedArray> new_elements =
11923 object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity);
11925 ElementsKind elements_kind = object->GetElementsKind();
11926 ElementsKind new_elements_kind;
11927 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
11928 // or if it's allowed and the old elements array contained only SMIs.
11929 bool has_fast_smi_elements =
11930 (smi_mode == kForceSmiElements) ||
11931 ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements());
11932 if (has_fast_smi_elements) {
11933 if (IsHoleyElementsKind(elements_kind)) {
11934 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
11936 new_elements_kind = FAST_SMI_ELEMENTS;
11939 if (IsHoleyElementsKind(elements_kind)) {
11940 new_elements_kind = FAST_HOLEY_ELEMENTS;
11942 new_elements_kind = FAST_ELEMENTS;
11945 Handle<FixedArrayBase> old_elements(object->elements());
11946 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind);
11947 accessor->CopyElements(object, new_elements, elements_kind);
11949 if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) {
11950 Handle<Map> new_map = (new_elements_kind != elements_kind)
11951 ? GetElementsTransitionMap(object, new_elements_kind)
11952 : handle(object->map());
11953 JSObject::ValidateElements(object);
11954 JSObject::SetMapAndElements(object, new_map, new_elements);
11956 // Transition through the allocation site as well if present.
11957 JSObject::UpdateAllocationSite(object, new_elements_kind);
11959 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements);
11960 parameter_map->set(1, *new_elements);
11963 if (FLAG_trace_elements_transitions) {
11964 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11965 object->GetElementsKind(), new_elements);
11968 if (object->IsJSArray()) {
11969 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11971 return new_elements;
11975 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object,
11978 // We should never end in here with a pixel or external array.
11979 ASSERT(!object->HasExternalArrayElements());
11981 Handle<FixedArrayBase> elems =
11982 object->GetIsolate()->factory()->NewFixedDoubleArray(capacity);
11984 ElementsKind elements_kind = object->GetElementsKind();
11985 CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS);
11986 ElementsKind new_elements_kind = elements_kind;
11987 if (IsHoleyElementsKind(elements_kind)) {
11988 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
11990 new_elements_kind = FAST_DOUBLE_ELEMENTS;
11993 Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind);
11995 Handle<FixedArrayBase> old_elements(object->elements());
11996 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS);
11997 accessor->CopyElements(object, elems, elements_kind);
11999 JSObject::ValidateElements(object);
12000 JSObject::SetMapAndElements(object, new_map, elems);
12002 if (FLAG_trace_elements_transitions) {
12003 PrintElementsTransition(stdout, object, elements_kind, old_elements,
12004 object->GetElementsKind(), elems);
12007 if (object->IsJSArray()) {
12008 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
12014 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) {
12015 ASSERT(capacity >= 0);
12016 array->GetIsolate()->factory()->NewJSArrayStorage(
12017 array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
12021 void JSArray::Expand(Handle<JSArray> array, int required_size) {
12022 ElementsAccessor* accessor = array->GetElementsAccessor();
12023 accessor->SetCapacityAndLength(array, required_size, required_size);
12027 // Returns false if the passed-in index is marked non-configurable,
12028 // which will cause the ES5 truncation operation to halt, and thus
12029 // no further old values need be collected.
12030 static bool GetOldValue(Isolate* isolate,
12031 Handle<JSObject> object,
12033 List<Handle<Object> >* old_values,
12034 List<uint32_t>* indices) {
12035 PropertyAttributes attributes =
12036 JSReceiver::GetLocalElementAttribute(object, index);
12037 ASSERT(attributes != ABSENT);
12038 if (attributes == DONT_DELETE) return false;
12039 Handle<Object> value;
12040 if (!JSObject::GetLocalElementAccessorPair(object, index).is_null()) {
12041 value = Handle<Object>::cast(isolate->factory()->the_hole_value());
12043 value = Object::GetElement(isolate, object, index).ToHandleChecked();
12045 old_values->Add(value);
12046 indices->Add(index);
12050 static void EnqueueSpliceRecord(Handle<JSArray> object,
12052 Handle<JSArray> deleted,
12053 uint32_t add_count) {
12054 Isolate* isolate = object->GetIsolate();
12055 HandleScope scope(isolate);
12056 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index);
12057 Handle<Object> add_count_object =
12058 isolate->factory()->NewNumberFromUint(add_count);
12060 Handle<Object> args[] =
12061 { object, index_object, deleted, add_count_object };
12063 Execution::Call(isolate,
12064 Handle<JSFunction>(isolate->observers_enqueue_splice()),
12065 isolate->factory()->undefined_value(),
12071 static void BeginPerformSplice(Handle<JSArray> object) {
12072 Isolate* isolate = object->GetIsolate();
12073 HandleScope scope(isolate);
12074 Handle<Object> args[] = { object };
12076 Execution::Call(isolate,
12077 Handle<JSFunction>(isolate->observers_begin_perform_splice()),
12078 isolate->factory()->undefined_value(),
12084 static void EndPerformSplice(Handle<JSArray> object) {
12085 Isolate* isolate = object->GetIsolate();
12086 HandleScope scope(isolate);
12087 Handle<Object> args[] = { object };
12089 Execution::Call(isolate,
12090 Handle<JSFunction>(isolate->observers_end_perform_splice()),
12091 isolate->factory()->undefined_value(),
12097 MaybeHandle<Object> JSArray::SetElementsLength(
12098 Handle<JSArray> array,
12099 Handle<Object> new_length_handle) {
12100 // We should never end in here with a pixel or external array.
12101 ASSERT(array->AllowsSetElementsLength());
12102 if (!array->map()->is_observed()) {
12103 return array->GetElementsAccessor()->SetLength(array, new_length_handle);
12106 Isolate* isolate = array->GetIsolate();
12107 List<uint32_t> indices;
12108 List<Handle<Object> > old_values;
12109 Handle<Object> old_length_handle(array->length(), isolate);
12110 uint32_t old_length = 0;
12111 CHECK(old_length_handle->ToArrayIndex(&old_length));
12112 uint32_t new_length = 0;
12113 CHECK(new_length_handle->ToArrayIndex(&new_length));
12115 static const PropertyAttributes kNoAttrFilter = NONE;
12116 int num_elements = array->NumberOfLocalElements(kNoAttrFilter);
12117 if (num_elements > 0) {
12118 if (old_length == static_cast<uint32_t>(num_elements)) {
12119 // Simple case for arrays without holes.
12120 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
12121 if (!GetOldValue(isolate, array, i, &old_values, &indices)) break;
12124 // For sparse arrays, only iterate over existing elements.
12125 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
12126 // the to-be-removed indices twice.
12127 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
12128 array->GetLocalElementKeys(*keys, kNoAttrFilter);
12129 while (num_elements-- > 0) {
12130 uint32_t index = NumberToUint32(keys->get(num_elements));
12131 if (index < new_length) break;
12132 if (!GetOldValue(isolate, array, index, &old_values, &indices)) break;
12137 Handle<Object> hresult;
12138 ASSIGN_RETURN_ON_EXCEPTION(
12140 array->GetElementsAccessor()->SetLength(array, new_length_handle),
12143 CHECK(array->length()->ToArrayIndex(&new_length));
12144 if (old_length == new_length) return hresult;
12146 BeginPerformSplice(array);
12148 for (int i = 0; i < indices.length(); ++i) {
12149 // For deletions where the property was an accessor, old_values[i]
12150 // will be the hole, which instructs EnqueueChangeRecord to elide
12151 // the "oldValue" property.
12152 JSObject::EnqueueChangeRecord(
12153 array, "delete", isolate->factory()->Uint32ToString(indices[i]),
12156 JSObject::EnqueueChangeRecord(
12157 array, "update", isolate->factory()->length_string(),
12158 old_length_handle);
12160 EndPerformSplice(array);
12162 uint32_t index = Min(old_length, new_length);
12163 uint32_t add_count = new_length > old_length ? new_length - old_length : 0;
12164 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0;
12165 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
12166 if (delete_count > 0) {
12167 for (int i = indices.length() - 1; i >= 0; i--) {
12168 // Skip deletions where the property was an accessor, leaving holes
12169 // in the array of old values.
12170 if (old_values[i]->IsTheHole()) continue;
12171 JSObject::SetElement(
12172 deleted, indices[i] - index, old_values[i], NONE, SLOPPY).Assert();
12175 SetProperty(deleted, isolate->factory()->length_string(),
12176 isolate->factory()->NewNumberFromUint(delete_count),
12177 NONE, SLOPPY).Assert();
12180 EnqueueSpliceRecord(array, index, deleted, add_count);
12186 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map,
12187 Handle<Object> prototype) {
12188 FixedArray* cache = map->GetPrototypeTransitions();
12189 int number_of_transitions = map->NumberOfProtoTransitions();
12190 const int proto_offset =
12191 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset;
12192 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
12193 const int step = kProtoTransitionElementsPerEntry;
12194 for (int i = 0; i < number_of_transitions; i++) {
12195 if (cache->get(proto_offset + i * step) == *prototype) {
12196 Object* result = cache->get(map_offset + i * step);
12197 return Handle<Map>(Map::cast(result));
12200 return Handle<Map>();
12204 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map,
12205 Handle<Object> prototype,
12206 Handle<Map> target_map) {
12207 ASSERT(target_map->IsMap());
12208 ASSERT(HeapObject::cast(*prototype)->map()->IsMap());
12209 // Don't cache prototype transition if this map is shared.
12210 if (map->is_shared() || !FLAG_cache_prototype_transitions) return map;
12212 const int step = kProtoTransitionElementsPerEntry;
12213 const int header = kProtoTransitionHeaderSize;
12215 Handle<FixedArray> cache(map->GetPrototypeTransitions());
12216 int capacity = (cache->length() - header) / step;
12217 int transitions = map->NumberOfProtoTransitions() + 1;
12219 if (transitions > capacity) {
12220 if (capacity > kMaxCachedPrototypeTransitions) return map;
12222 // Grow array by factor 2 over and above what we need.
12223 cache = FixedArray::CopySize(cache, transitions * 2 * step + header);
12225 SetPrototypeTransitions(map, cache);
12228 // Reload number of transitions as GC might shrink them.
12229 int last = map->NumberOfProtoTransitions();
12230 int entry = header + last * step;
12232 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype);
12233 cache->set(entry + kProtoTransitionMapOffset, *target_map);
12234 map->SetNumberOfProtoTransitions(last + 1);
12240 void Map::ZapTransitions() {
12241 TransitionArray* transition_array = transitions();
12242 // TODO(mstarzinger): Temporarily use a slower version instead of the faster
12243 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer.
12244 Object** data = transition_array->data_start();
12245 Object* the_hole = GetHeap()->the_hole_value();
12246 int length = transition_array->length();
12247 for (int i = 0; i < length; i++) {
12248 data[i] = the_hole;
12253 void Map::ZapPrototypeTransitions() {
12254 FixedArray* proto_transitions = GetPrototypeTransitions();
12255 MemsetPointer(proto_transitions->data_start(),
12256 GetHeap()->the_hole_value(),
12257 proto_transitions->length());
12262 void Map::AddDependentCompilationInfo(Handle<Map> map,
12263 DependentCode::DependencyGroup group,
12264 CompilationInfo* info) {
12265 Handle<DependentCode> codes =
12266 DependentCode::Insert(handle(map->dependent_code(), info->isolate()),
12267 group, info->object_wrapper());
12268 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
12269 info->dependencies(group)->Add(map, info->zone());
12274 void Map::AddDependentCode(Handle<Map> map,
12275 DependentCode::DependencyGroup group,
12276 Handle<Code> code) {
12277 Handle<DependentCode> codes = DependentCode::Insert(
12278 Handle<DependentCode>(map->dependent_code()), group, code);
12279 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
12284 void Map::AddDependentIC(Handle<Map> map,
12285 Handle<Code> stub) {
12286 ASSERT(stub->next_code_link()->IsUndefined());
12287 int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup);
12289 // Slow path: insert the head of the list with possible heap allocation.
12290 Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub);
12292 // Fast path: link the stub to the existing head of the list without any
12293 // heap allocation.
12295 map->dependent_code()->AddToDependentICList(stub);
12300 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
12301 Recompute(entries);
12305 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) {
12306 start_indexes_[0] = 0;
12307 for (int g = 1; g <= kGroupCount; g++) {
12308 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1));
12309 start_indexes_[g] = start_indexes_[g - 1] + count;
12314 DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
12315 DependencyGroup group) {
12316 AllowDeferredHandleDereference dependencies_are_safe;
12317 if (group == DependentCode::kPropertyCellChangedGroup) {
12318 return Handle<PropertyCell>::cast(object)->dependent_code();
12319 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
12320 group == DependentCode::kAllocationSiteTransitionChangedGroup) {
12321 return Handle<AllocationSite>::cast(object)->dependent_code();
12323 return Handle<Map>::cast(object)->dependent_code();
12327 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
12328 DependencyGroup group,
12329 Handle<Object> object) {
12330 GroupStartIndexes starts(*entries);
12331 int start = starts.at(group);
12332 int end = starts.at(group + 1);
12333 int number_of_entries = starts.number_of_entries();
12334 // Check for existing entry to avoid duplicates.
12335 for (int i = start; i < end; i++) {
12336 if (entries->object_at(i) == *object) return entries;
12338 if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
12339 int capacity = kCodesStartIndex + number_of_entries + 1;
12340 if (capacity > 5) capacity = capacity * 5 / 4;
12341 Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
12342 FixedArray::CopySize(entries, capacity, TENURED));
12343 // The number of codes can change after GC.
12344 starts.Recompute(*entries);
12345 start = starts.at(group);
12346 end = starts.at(group + 1);
12347 number_of_entries = starts.number_of_entries();
12348 for (int i = 0; i < number_of_entries; i++) {
12349 entries->clear_at(i);
12351 // If the old fixed array was empty, we need to reset counters of the
12353 if (number_of_entries == 0) {
12354 for (int g = 0; g < kGroupCount; g++) {
12355 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
12358 entries = new_entries;
12360 entries->ExtendGroup(group);
12361 entries->set_object_at(end, *object);
12362 entries->set_number_of_entries(group, end + 1 - start);
12367 void DependentCode::UpdateToFinishedCode(DependencyGroup group,
12368 CompilationInfo* info,
12370 DisallowHeapAllocation no_gc;
12371 AllowDeferredHandleDereference get_object_wrapper;
12372 Foreign* info_wrapper = *info->object_wrapper();
12373 GroupStartIndexes starts(this);
12374 int start = starts.at(group);
12375 int end = starts.at(group + 1);
12376 for (int i = start; i < end; i++) {
12377 if (object_at(i) == info_wrapper) {
12378 set_object_at(i, code);
12384 for (int i = start; i < end; i++) {
12385 ASSERT(is_code_at(i) || compilation_info_at(i) != info);
12391 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
12392 CompilationInfo* info) {
12393 DisallowHeapAllocation no_allocation;
12394 AllowDeferredHandleDereference get_object_wrapper;
12395 Foreign* info_wrapper = *info->object_wrapper();
12396 GroupStartIndexes starts(this);
12397 int start = starts.at(group);
12398 int end = starts.at(group + 1);
12399 // Find compilation info wrapper.
12401 for (int i = start; i < end; i++) {
12402 if (object_at(i) == info_wrapper) {
12407 if (info_pos == -1) return; // Not found.
12408 int gap = info_pos;
12409 // Use the last of each group to fill the gap in the previous group.
12410 for (int i = group; i < kGroupCount; i++) {
12411 int last_of_group = starts.at(i + 1) - 1;
12412 ASSERT(last_of_group >= gap);
12413 if (last_of_group == gap) continue;
12414 copy(last_of_group, gap);
12415 gap = last_of_group;
12417 ASSERT(gap == starts.number_of_entries() - 1);
12418 clear_at(gap); // Clear last gap.
12419 set_number_of_entries(group, end - start - 1);
12422 for (int i = start; i < end - 1; i++) {
12423 ASSERT(is_code_at(i) || compilation_info_at(i) != info);
12429 static bool CodeListContains(Object* head, Code* code) {
12430 while (!head->IsUndefined()) {
12431 if (head == code) return true;
12432 head = Code::cast(head)->next_code_link();
12438 bool DependentCode::Contains(DependencyGroup group, Code* code) {
12439 GroupStartIndexes starts(this);
12440 int start = starts.at(group);
12441 int end = starts.at(group + 1);
12442 if (group == kWeakICGroup) {
12443 return CodeListContains(object_at(start), code);
12445 for (int i = start; i < end; i++) {
12446 if (object_at(i) == code) return true;
12452 bool DependentCode::MarkCodeForDeoptimization(
12454 DependentCode::DependencyGroup group) {
12455 DisallowHeapAllocation no_allocation_scope;
12456 DependentCode::GroupStartIndexes starts(this);
12457 int start = starts.at(group);
12458 int end = starts.at(group + 1);
12459 int code_entries = starts.number_of_entries();
12460 if (start == end) return false;
12462 // Mark all the code that needs to be deoptimized.
12463 bool marked = false;
12464 for (int i = start; i < end; i++) {
12465 if (is_code_at(i)) {
12466 Code* code = code_at(i);
12467 if (!code->marked_for_deoptimization()) {
12468 code->set_marked_for_deoptimization(true);
12472 CompilationInfo* info = compilation_info_at(i);
12473 info->AbortDueToDependencyChange();
12476 // Compact the array by moving all subsequent groups to fill in the new holes.
12477 for (int src = end, dst = start; src < code_entries; src++, dst++) {
12480 // Now the holes are at the end of the array, zap them for heap-verifier.
12481 int removed = end - start;
12482 for (int i = code_entries - removed; i < code_entries; i++) {
12485 set_number_of_entries(group, 0);
12490 void DependentCode::DeoptimizeDependentCodeGroup(
12492 DependentCode::DependencyGroup group) {
12493 ASSERT(AllowCodeDependencyChange::IsAllowed());
12494 DisallowHeapAllocation no_allocation_scope;
12495 bool marked = MarkCodeForDeoptimization(isolate, group);
12497 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate);
12501 void DependentCode::AddToDependentICList(Handle<Code> stub) {
12502 DisallowHeapAllocation no_heap_allocation;
12503 GroupStartIndexes starts(this);
12504 int i = starts.at(kWeakICGroup);
12505 stub->set_next_code_link(object_at(i));
12506 set_object_at(i, *stub);
12510 Handle<Map> Map::TransitionToPrototype(Handle<Map> map,
12511 Handle<Object> prototype) {
12512 Handle<Map> new_map = GetPrototypeTransition(map, prototype);
12513 if (new_map.is_null()) {
12514 new_map = Copy(map);
12515 PutPrototypeTransition(map, prototype, new_map);
12516 new_map->set_prototype(*prototype);
12522 MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object,
12523 Handle<Object> value,
12524 bool skip_hidden_prototypes) {
12526 int size = object->Size();
12529 Isolate* isolate = object->GetIsolate();
12530 Heap* heap = isolate->heap();
12531 // Silently ignore the change if value is not a JSObject or null.
12532 // SpiderMonkey behaves this way.
12533 if (!value->IsJSReceiver() && !value->IsNull()) return value;
12535 // From 8.6.2 Object Internal Methods
12537 // In addition, if [[Extensible]] is false the value of the [[Class]] and
12538 // [[Prototype]] internal properties of the object may not be modified.
12540 // Implementation specific extensions that modify [[Class]], [[Prototype]]
12541 // or [[Extensible]] must not violate the invariants defined in the preceding
12543 if (!object->map()->is_extensible()) {
12544 Handle<Object> args[] = { object };
12545 Handle<Object> error = isolate->factory()->NewTypeError(
12546 "non_extensible_proto", HandleVector(args, ARRAY_SIZE(args)));
12547 return isolate->Throw<Object>(error);
12550 // Before we can set the prototype we need to be sure
12551 // prototype cycles are prevented.
12552 // It is sufficient to validate that the receiver is not in the new prototype
12554 for (Object* pt = *value;
12555 pt != heap->null_value();
12556 pt = pt->GetPrototype(isolate)) {
12557 if (JSReceiver::cast(pt) == *object) {
12559 Handle<Object> error = isolate->factory()->NewError(
12560 "cyclic_proto", HandleVector<Object>(NULL, 0));
12561 return isolate->Throw<Object>(error);
12565 bool dictionary_elements_in_chain =
12566 object->map()->DictionaryElementsInPrototypeChainOnly();
12567 Handle<JSObject> real_receiver = object;
12569 if (skip_hidden_prototypes) {
12570 // Find the first object in the chain whose prototype object is not
12571 // hidden and set the new prototype on that object.
12572 Object* current_proto = real_receiver->GetPrototype();
12573 while (current_proto->IsJSObject() &&
12574 JSObject::cast(current_proto)->map()->is_hidden_prototype()) {
12575 real_receiver = handle(JSObject::cast(current_proto), isolate);
12576 current_proto = current_proto->GetPrototype(isolate);
12580 // Set the new prototype of the object.
12581 Handle<Map> map(real_receiver->map());
12583 // Nothing to do if prototype is already set.
12584 if (map->prototype() == *value) return value;
12586 if (value->IsJSObject()) {
12587 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value));
12590 Handle<Map> new_map = Map::TransitionToPrototype(map, value);
12591 ASSERT(new_map->prototype() == *value);
12592 JSObject::MigrateToMap(real_receiver, new_map);
12594 if (!dictionary_elements_in_chain &&
12595 new_map->DictionaryElementsInPrototypeChainOnly()) {
12596 // If the prototype chain didn't previously have element callbacks, then
12597 // KeyedStoreICs need to be cleared to ensure any that involve this
12599 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC);
12602 heap->ClearInstanceofCache();
12603 ASSERT(size == object->Size());
12608 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
12610 uint32_t first_arg,
12611 uint32_t arg_count,
12612 EnsureElementsMode mode) {
12613 // Elements in |Arguments| are ordered backwards (because they're on the
12614 // stack), but the method that's called here iterates over them in forward
12616 return EnsureCanContainElements(
12617 object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode);
12621 MaybeHandle<AccessorPair> JSObject::GetLocalPropertyAccessorPair(
12622 Handle<JSObject> object,
12623 Handle<Name> name) {
12624 uint32_t index = 0;
12625 if (name->AsArrayIndex(&index)) {
12626 return GetLocalElementAccessorPair(object, index);
12629 Isolate* isolate = object->GetIsolate();
12630 LookupResult lookup(isolate);
12631 object->LocalLookupRealNamedProperty(name, &lookup);
12633 if (lookup.IsPropertyCallbacks() &&
12634 lookup.GetCallbackObject()->IsAccessorPair()) {
12635 return handle(AccessorPair::cast(lookup.GetCallbackObject()), isolate);
12637 return MaybeHandle<AccessorPair>();
12641 MaybeHandle<AccessorPair> JSObject::GetLocalElementAccessorPair(
12642 Handle<JSObject> object,
12644 if (object->IsJSGlobalProxy()) {
12645 Handle<Object> proto(object->GetPrototype(), object->GetIsolate());
12646 if (proto->IsNull()) return MaybeHandle<AccessorPair>();
12647 ASSERT(proto->IsJSGlobalObject());
12648 return GetLocalElementAccessorPair(Handle<JSObject>::cast(proto), index);
12651 // Check for lookup interceptor.
12652 if (object->HasIndexedInterceptor()) return MaybeHandle<AccessorPair>();
12654 return object->GetElementsAccessor()->GetAccessorPair(object, object, index);
12658 MaybeHandle<Object> JSObject::SetElementWithInterceptor(
12659 Handle<JSObject> object,
12661 Handle<Object> value,
12662 PropertyAttributes attributes,
12663 StrictMode strict_mode,
12664 bool check_prototype,
12665 SetPropertyMode set_mode) {
12666 Isolate* isolate = object->GetIsolate();
12668 // Make sure that the top context does not change when doing
12669 // callbacks or interceptor calls.
12670 AssertNoContextChange ncc(isolate);
12672 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
12673 if (!interceptor->setter()->IsUndefined()) {
12674 v8::IndexedPropertySetterCallback setter =
12675 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter());
12677 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index));
12678 PropertyCallbackArguments args(isolate, interceptor->data(), *object,
12680 v8::Handle<v8::Value> result =
12681 args.Call(setter, index, v8::Utils::ToLocal(value));
12682 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12683 if (!result.IsEmpty()) return value;
12686 return SetElementWithoutInterceptor(object, index, value, attributes,
12693 MaybeHandle<Object> JSObject::GetElementWithCallback(
12694 Handle<JSObject> object,
12695 Handle<Object> receiver,
12696 Handle<Object> structure,
12698 Handle<Object> holder) {
12699 Isolate* isolate = object->GetIsolate();
12700 ASSERT(!structure->IsForeign());
12701 // api style callbacks.
12702 if (structure->IsExecutableAccessorInfo()) {
12703 Handle<ExecutableAccessorInfo> data =
12704 Handle<ExecutableAccessorInfo>::cast(structure);
12705 Object* fun_obj = data->getter();
12706 v8::AccessorGetterCallback call_fun =
12707 v8::ToCData<v8::AccessorGetterCallback>(fun_obj);
12708 if (call_fun == NULL) return isolate->factory()->undefined_value();
12709 Handle<JSObject> holder_handle = Handle<JSObject>::cast(holder);
12710 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12711 Handle<String> key = isolate->factory()->NumberToString(number);
12712 LOG(isolate, ApiNamedPropertyAccess("load", *holder_handle, *key));
12713 PropertyCallbackArguments
12714 args(isolate, data->data(), *receiver, *holder_handle);
12715 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key));
12716 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12717 if (result.IsEmpty()) return isolate->factory()->undefined_value();
12718 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12719 result_internal->VerifyApiCallResultType();
12720 // Rebox handle before return.
12721 return handle(*result_internal, isolate);
12724 // __defineGetter__ callback
12725 if (structure->IsAccessorPair()) {
12726 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
12728 if (getter->IsSpecFunction()) {
12729 // TODO(rossberg): nicer would be to cast to some JSCallable here...
12730 return GetPropertyWithDefinedGetter(
12731 object, receiver, Handle<JSReceiver>::cast(getter));
12733 // Getter is not a function.
12734 return isolate->factory()->undefined_value();
12737 if (structure->IsDeclaredAccessorInfo()) {
12738 return GetDeclaredAccessorProperty(
12739 receiver, Handle<DeclaredAccessorInfo>::cast(structure), isolate);
12743 return MaybeHandle<Object>();
12747 MaybeHandle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object,
12748 Handle<Object> structure,
12750 Handle<Object> value,
12751 Handle<JSObject> holder,
12752 StrictMode strict_mode) {
12753 Isolate* isolate = object->GetIsolate();
12755 // We should never get here to initialize a const with the hole
12756 // value since a const declaration would conflict with the setter.
12757 ASSERT(!value->IsTheHole());
12758 ASSERT(!structure->IsForeign());
12759 if (structure->IsExecutableAccessorInfo()) {
12760 // api style callbacks
12761 Handle<ExecutableAccessorInfo> data =
12762 Handle<ExecutableAccessorInfo>::cast(structure);
12763 Object* call_obj = data->setter();
12764 v8::AccessorSetterCallback call_fun =
12765 v8::ToCData<v8::AccessorSetterCallback>(call_obj);
12766 if (call_fun == NULL) return value;
12767 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12768 Handle<String> key(isolate->factory()->NumberToString(number));
12769 LOG(isolate, ApiNamedPropertyAccess("store", *object, *key));
12770 PropertyCallbackArguments
12771 args(isolate, data->data(), *object, *holder);
12772 args.Call(call_fun,
12773 v8::Utils::ToLocal(key),
12774 v8::Utils::ToLocal(value));
12775 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12779 if (structure->IsAccessorPair()) {
12780 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
12781 if (setter->IsSpecFunction()) {
12782 // TODO(rossberg): nicer would be to cast to some JSCallable here...
12783 return SetPropertyWithDefinedSetter(
12784 object, Handle<JSReceiver>::cast(setter), value);
12786 if (strict_mode == SLOPPY) return value;
12787 Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
12788 Handle<Object> args[2] = { key, holder };
12789 Handle<Object> error = isolate->factory()->NewTypeError(
12790 "no_setter_in_callback", HandleVector(args, 2));
12791 return isolate->Throw<Object>(error);
12795 // TODO(dcarney): Handle correctly.
12796 if (structure->IsDeclaredAccessorInfo()) return value;
12799 return MaybeHandle<Object>();
12803 bool JSObject::HasFastArgumentsElements() {
12804 Heap* heap = GetHeap();
12805 if (!elements()->IsFixedArray()) return false;
12806 FixedArray* elements = FixedArray::cast(this->elements());
12807 if (elements->map() != heap->sloppy_arguments_elements_map()) {
12810 FixedArray* arguments = FixedArray::cast(elements->get(1));
12811 return !arguments->IsDictionary();
12815 bool JSObject::HasDictionaryArgumentsElements() {
12816 Heap* heap = GetHeap();
12817 if (!elements()->IsFixedArray()) return false;
12818 FixedArray* elements = FixedArray::cast(this->elements());
12819 if (elements->map() != heap->sloppy_arguments_elements_map()) {
12822 FixedArray* arguments = FixedArray::cast(elements->get(1));
12823 return arguments->IsDictionary();
12827 // Adding n elements in fast case is O(n*n).
12828 // Note: revisit design to have dual undefined values to capture absent
12830 MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object,
12832 Handle<Object> value,
12833 StrictMode strict_mode,
12834 bool check_prototype) {
12835 ASSERT(object->HasFastSmiOrObjectElements() ||
12836 object->HasFastArgumentsElements());
12838 Isolate* isolate = object->GetIsolate();
12840 // Array optimizations rely on the prototype lookups of Array objects always
12841 // returning undefined. If there is a store to the initial prototype object,
12842 // make sure all of these optimizations are invalidated.
12843 if (isolate->is_initial_object_prototype(*object) ||
12844 isolate->is_initial_array_prototype(*object)) {
12845 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate,
12846 DependentCode::kElementsCantBeAddedGroup);
12849 Handle<FixedArray> backing_store(FixedArray::cast(object->elements()));
12850 if (backing_store->map() ==
12851 isolate->heap()->sloppy_arguments_elements_map()) {
12852 backing_store = handle(FixedArray::cast(backing_store->get(1)));
12854 backing_store = EnsureWritableFastElements(object);
12856 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
12858 if (check_prototype &&
12859 (index >= capacity || backing_store->get(index)->IsTheHole())) {
12861 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12862 object, index, value, &found, strict_mode);
12863 if (found) return result;
12866 uint32_t new_capacity = capacity;
12867 // Check if the length property of this object needs to be updated.
12868 uint32_t array_length = 0;
12869 bool must_update_array_length = false;
12870 bool introduces_holes = true;
12871 if (object->IsJSArray()) {
12872 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12873 introduces_holes = index > array_length;
12874 if (index >= array_length) {
12875 must_update_array_length = true;
12876 array_length = index + 1;
12879 introduces_holes = index >= capacity;
12882 // If the array is growing, and it's not growth by a single element at the
12883 // end, make sure that the ElementsKind is HOLEY.
12884 ElementsKind elements_kind = object->GetElementsKind();
12885 if (introduces_holes &&
12886 IsFastElementsKind(elements_kind) &&
12887 !IsFastHoleyElementsKind(elements_kind)) {
12888 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12889 TransitionElementsKind(object, transitioned_kind);
12892 // Check if the capacity of the backing store needs to be increased, or if
12893 // a transition to slow elements is necessary.
12894 if (index >= capacity) {
12895 bool convert_to_slow = true;
12896 if ((index - capacity) < kMaxGap) {
12897 new_capacity = NewElementsCapacity(index + 1);
12898 ASSERT(new_capacity > index);
12899 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12900 convert_to_slow = false;
12903 if (convert_to_slow) {
12904 NormalizeElements(object);
12905 return SetDictionaryElement(object, index, value, NONE, strict_mode,
12909 // Convert to fast double elements if appropriate.
12910 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
12911 // Consider fixing the boilerplate as well if we have one.
12912 ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
12913 ? FAST_HOLEY_DOUBLE_ELEMENTS
12914 : FAST_DOUBLE_ELEMENTS;
12916 UpdateAllocationSite(object, to_kind);
12918 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length);
12919 FixedDoubleArray::cast(object->elements())->set(index, value->Number());
12920 JSObject::ValidateElements(object);
12923 // Change elements kind from Smi-only to generic FAST if necessary.
12924 if (object->HasFastSmiElements() && !value->IsSmi()) {
12925 ElementsKind kind = object->HasFastHoleyElements()
12926 ? FAST_HOLEY_ELEMENTS
12929 UpdateAllocationSite(object, kind);
12930 Handle<Map> new_map = GetElementsTransitionMap(object, kind);
12931 JSObject::MigrateToMap(object, new_map);
12932 ASSERT(IsFastObjectElementsKind(object->GetElementsKind()));
12934 // Increase backing store capacity if that's been decided previously.
12935 if (new_capacity != capacity) {
12936 SetFastElementsCapacitySmiMode smi_mode =
12937 value->IsSmi() && object->HasFastSmiElements()
12938 ? kAllowSmiElements
12939 : kDontAllowSmiElements;
12940 Handle<FixedArray> new_elements =
12941 SetFastElementsCapacityAndLength(object, new_capacity, array_length,
12943 new_elements->set(index, *value);
12944 JSObject::ValidateElements(object);
12948 // Finally, set the new element and length.
12949 ASSERT(object->elements()->IsFixedArray());
12950 backing_store->set(index, *value);
12951 if (must_update_array_length) {
12952 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length));
12958 MaybeHandle<Object> JSObject::SetDictionaryElement(
12959 Handle<JSObject> object,
12961 Handle<Object> value,
12962 PropertyAttributes attributes,
12963 StrictMode strict_mode,
12964 bool check_prototype,
12965 SetPropertyMode set_mode) {
12966 ASSERT(object->HasDictionaryElements() ||
12967 object->HasDictionaryArgumentsElements());
12968 Isolate* isolate = object->GetIsolate();
12970 // Insert element in the dictionary.
12971 Handle<FixedArray> elements(FixedArray::cast(object->elements()));
12972 bool is_arguments =
12973 (elements->map() == isolate->heap()->sloppy_arguments_elements_map());
12974 Handle<SeededNumberDictionary> dictionary(is_arguments
12975 ? SeededNumberDictionary::cast(elements->get(1))
12976 : SeededNumberDictionary::cast(*elements));
12978 int entry = dictionary->FindEntry(index);
12979 if (entry != SeededNumberDictionary::kNotFound) {
12980 Handle<Object> element(dictionary->ValueAt(entry), isolate);
12981 PropertyDetails details = dictionary->DetailsAt(entry);
12982 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
12983 return SetElementWithCallback(object, element, index, value, object,
12986 dictionary->UpdateMaxNumberKey(index);
12987 // If a value has not been initialized we allow writing to it even if it
12988 // is read-only (a declared const that has not been initialized). If a
12989 // value is being defined we skip attribute checks completely.
12990 if (set_mode == DEFINE_PROPERTY) {
12991 details = PropertyDetails(
12992 attributes, NORMAL, details.dictionary_index());
12993 dictionary->DetailsAtPut(entry, details);
12994 } else if (details.IsReadOnly() && !element->IsTheHole()) {
12995 if (strict_mode == SLOPPY) {
12996 return isolate->factory()->undefined_value();
12998 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12999 Handle<Object> args[2] = { number, object };
13000 Handle<Object> error =
13001 isolate->factory()->NewTypeError("strict_read_only_property",
13002 HandleVector(args, 2));
13003 return isolate->Throw<Object>(error);
13006 // Elements of the arguments object in slow mode might be slow aliases.
13007 if (is_arguments && element->IsAliasedArgumentsEntry()) {
13008 Handle<AliasedArgumentsEntry> entry =
13009 Handle<AliasedArgumentsEntry>::cast(element);
13010 Handle<Context> context(Context::cast(elements->get(0)));
13011 int context_index = entry->aliased_context_slot();
13012 ASSERT(!context->get(context_index)->IsTheHole());
13013 context->set(context_index, *value);
13014 // For elements that are still writable we keep slow aliasing.
13015 if (!details.IsReadOnly()) value = element;
13017 dictionary->ValueAtPut(entry, *value);
13020 // Index not already used. Look for an accessor in the prototype chain.
13022 if (check_prototype) {
13024 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
13025 object, index, value, &found, strict_mode);
13026 if (found) return result;
13029 // When we set the is_extensible flag to false we always force the
13030 // element into dictionary mode (and force them to stay there).
13031 if (!object->map()->is_extensible()) {
13032 if (strict_mode == SLOPPY) {
13033 return isolate->factory()->undefined_value();
13035 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
13036 Handle<String> name = isolate->factory()->NumberToString(number);
13037 Handle<Object> args[1] = { name };
13038 Handle<Object> error =
13039 isolate->factory()->NewTypeError("object_not_extensible",
13040 HandleVector(args, 1));
13041 return isolate->Throw<Object>(error);
13045 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
13046 Handle<SeededNumberDictionary> new_dictionary =
13047 SeededNumberDictionary::AddNumberEntry(dictionary, index, value,
13049 if (*dictionary != *new_dictionary) {
13050 if (is_arguments) {
13051 elements->set(1, *new_dictionary);
13053 object->set_elements(*new_dictionary);
13055 dictionary = new_dictionary;
13059 // Update the array length if this JSObject is an array.
13060 if (object->IsJSArray()) {
13061 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index,
13065 // Attempt to put this object back in fast case.
13066 if (object->ShouldConvertToFastElements()) {
13067 uint32_t new_length = 0;
13068 if (object->IsJSArray()) {
13069 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length));
13071 new_length = dictionary->max_number_key() + 1;
13073 SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays
13074 ? kAllowSmiElements
13075 : kDontAllowSmiElements;
13076 bool has_smi_only_elements = false;
13077 bool should_convert_to_fast_double_elements =
13078 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
13079 if (has_smi_only_elements) {
13080 smi_mode = kForceSmiElements;
13083 if (should_convert_to_fast_double_elements) {
13084 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length);
13086 SetFastElementsCapacityAndLength(object, new_length, new_length,
13089 JSObject::ValidateElements(object);
13091 if (FLAG_trace_normalization) {
13092 PrintF("Object elements are fast case again:\n");
13100 MaybeHandle<Object> JSObject::SetFastDoubleElement(
13101 Handle<JSObject> object,
13103 Handle<Object> value,
13104 StrictMode strict_mode,
13105 bool check_prototype) {
13106 ASSERT(object->HasFastDoubleElements());
13108 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements()));
13109 uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
13111 // If storing to an element that isn't in the array, pass the store request
13112 // up the prototype chain before storing in the receiver's elements.
13113 if (check_prototype &&
13114 (index >= elms_length ||
13115 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) {
13117 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
13118 object, index, value, &found, strict_mode);
13119 if (found) return result;
13122 // If the value object is not a heap number, switch to fast elements and try
13124 bool value_is_smi = value->IsSmi();
13125 bool introduces_holes = true;
13126 uint32_t length = elms_length;
13127 if (object->IsJSArray()) {
13128 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length));
13129 introduces_holes = index > length;
13131 introduces_holes = index >= elms_length;
13134 if (!value->IsNumber()) {
13135 SetFastElementsCapacityAndLength(object, elms_length, length,
13136 kDontAllowSmiElements);
13137 Handle<Object> result;
13138 ASSIGN_RETURN_ON_EXCEPTION(
13139 object->GetIsolate(), result,
13140 SetFastElement(object, index, value, strict_mode, check_prototype),
13142 JSObject::ValidateElements(object);
13146 double double_value = value_is_smi
13147 ? static_cast<double>(Handle<Smi>::cast(value)->value())
13148 : Handle<HeapNumber>::cast(value)->value();
13150 // If the array is growing, and it's not growth by a single element at the
13151 // end, make sure that the ElementsKind is HOLEY.
13152 ElementsKind elements_kind = object->GetElementsKind();
13153 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
13154 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
13155 TransitionElementsKind(object, transitioned_kind);
13158 // Check whether there is extra space in the fixed array.
13159 if (index < elms_length) {
13160 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements()));
13161 elms->set(index, double_value);
13162 if (object->IsJSArray()) {
13163 // Update the length of the array if needed.
13164 uint32_t array_length = 0;
13166 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
13167 if (index >= array_length) {
13168 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1));
13174 // Allow gap in fast case.
13175 if ((index - elms_length) < kMaxGap) {
13176 // Try allocating extra space.
13177 int new_capacity = NewElementsCapacity(index+1);
13178 if (!object->ShouldConvertToSlowElements(new_capacity)) {
13179 ASSERT(static_cast<uint32_t>(new_capacity) > index);
13180 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1);
13181 FixedDoubleArray::cast(object->elements())->set(index, double_value);
13182 JSObject::ValidateElements(object);
13187 // Otherwise default to slow case.
13188 ASSERT(object->HasFastDoubleElements());
13189 ASSERT(object->map()->has_fast_double_elements());
13190 ASSERT(object->elements()->IsFixedDoubleArray() ||
13191 object->elements()->length() == 0);
13193 NormalizeElements(object);
13194 ASSERT(object->HasDictionaryElements());
13195 return SetElement(object, index, value, NONE, strict_mode, check_prototype);
13199 MaybeHandle<Object> JSReceiver::SetElement(Handle<JSReceiver> object,
13201 Handle<Object> value,
13202 PropertyAttributes attributes,
13203 StrictMode strict_mode) {
13204 if (object->IsJSProxy()) {
13205 return JSProxy::SetElementWithHandler(
13206 Handle<JSProxy>::cast(object), object, index, value, strict_mode);
13208 return JSObject::SetElement(
13209 Handle<JSObject>::cast(object), index, value, attributes, strict_mode);
13213 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
13215 Handle<Object> value,
13216 StrictMode strict_mode) {
13217 ASSERT(!object->HasExternalArrayElements());
13218 return JSObject::SetElement(object, index, value, NONE, strict_mode, false);
13222 MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object,
13224 Handle<Object> value,
13225 PropertyAttributes attributes,
13226 StrictMode strict_mode,
13227 bool check_prototype,
13228 SetPropertyMode set_mode) {
13229 Isolate* isolate = object->GetIsolate();
13231 if (object->HasExternalArrayElements() ||
13232 object->HasFixedTypedArrayElements()) {
13233 // TODO(ningxin): Throw an error if setting a Float32x4Array element
13234 // while the value is not Float32x4Object.
13235 if (!value->IsNumber() && !value->IsFloat32x4() && !value->IsFloat64x2() &&
13236 !value->IsInt32x4() && !value->IsUndefined()) {
13237 ASSIGN_RETURN_ON_EXCEPTION(
13239 Execution::ToNumber(isolate, value), Object);
13243 // Check access rights if needed.
13244 if (object->IsAccessCheckNeeded()) {
13245 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_SET)) {
13246 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
13247 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
13252 if (object->IsJSGlobalProxy()) {
13253 Handle<Object> proto(object->GetPrototype(), isolate);
13254 if (proto->IsNull()) return value;
13255 ASSERT(proto->IsJSGlobalObject());
13256 return SetElement(Handle<JSObject>::cast(proto), index, value, attributes,
13262 // Don't allow element properties to be redefined for external arrays.
13263 if ((object->HasExternalArrayElements() ||
13264 object->HasFixedTypedArrayElements()) &&
13265 set_mode == DEFINE_PROPERTY) {
13266 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
13267 Handle<Object> args[] = { object, number };
13268 Handle<Object> error = isolate->factory()->NewTypeError(
13269 "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
13270 return isolate->Throw<Object>(error);
13273 // Normalize the elements to enable attributes on the property.
13274 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
13275 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
13276 // Make sure that we never go back to fast case.
13277 dictionary->set_requires_slow_elements();
13280 if (!object->map()->is_observed()) {
13281 return object->HasIndexedInterceptor()
13282 ? SetElementWithInterceptor(object, index, value, attributes,
13283 strict_mode, check_prototype, set_mode)
13284 : SetElementWithoutInterceptor(object, index, value, attributes,
13285 strict_mode, check_prototype, set_mode);
13288 PropertyAttributes old_attributes =
13289 JSReceiver::GetLocalElementAttribute(object, index);
13290 Handle<Object> old_value = isolate->factory()->the_hole_value();
13291 Handle<Object> old_length_handle;
13292 Handle<Object> new_length_handle;
13294 if (old_attributes != ABSENT) {
13295 if (GetLocalElementAccessorPair(object, index).is_null()) {
13296 old_value = Object::GetElement(isolate, object, index).ToHandleChecked();
13298 } else if (object->IsJSArray()) {
13299 // Store old array length in case adding an element grows the array.
13300 old_length_handle = handle(Handle<JSArray>::cast(object)->length(),
13304 // Check for lookup interceptor
13305 Handle<Object> result;
13306 ASSIGN_RETURN_ON_EXCEPTION(
13308 object->HasIndexedInterceptor()
13309 ? SetElementWithInterceptor(
13310 object, index, value, attributes,
13311 strict_mode, check_prototype, set_mode)
13312 : SetElementWithoutInterceptor(
13313 object, index, value, attributes,
13314 strict_mode, check_prototype, set_mode),
13317 Handle<String> name = isolate->factory()->Uint32ToString(index);
13318 PropertyAttributes new_attributes = GetLocalElementAttribute(object, index);
13319 if (old_attributes == ABSENT) {
13320 if (object->IsJSArray() &&
13321 !old_length_handle->SameValue(
13322 Handle<JSArray>::cast(object)->length())) {
13323 new_length_handle = handle(Handle<JSArray>::cast(object)->length(),
13325 uint32_t old_length = 0;
13326 uint32_t new_length = 0;
13327 CHECK(old_length_handle->ToArrayIndex(&old_length));
13328 CHECK(new_length_handle->ToArrayIndex(&new_length));
13330 BeginPerformSplice(Handle<JSArray>::cast(object));
13331 EnqueueChangeRecord(object, "add", name, old_value);
13332 EnqueueChangeRecord(object, "update", isolate->factory()->length_string(),
13333 old_length_handle);
13334 EndPerformSplice(Handle<JSArray>::cast(object));
13335 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
13336 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted,
13337 new_length - old_length);
13339 EnqueueChangeRecord(object, "add", name, old_value);
13341 } else if (old_value->IsTheHole()) {
13342 EnqueueChangeRecord(object, "reconfigure", name, old_value);
13344 Handle<Object> new_value =
13345 Object::GetElement(isolate, object, index).ToHandleChecked();
13346 bool value_changed = !old_value->SameValue(*new_value);
13347 if (old_attributes != new_attributes) {
13348 if (!value_changed) old_value = isolate->factory()->the_hole_value();
13349 EnqueueChangeRecord(object, "reconfigure", name, old_value);
13350 } else if (value_changed) {
13351 EnqueueChangeRecord(object, "update", name, old_value);
13359 MaybeHandle<Object> JSObject::SetElementWithoutInterceptor(
13360 Handle<JSObject> object,
13362 Handle<Object> value,
13363 PropertyAttributes attributes,
13364 StrictMode strict_mode,
13365 bool check_prototype,
13366 SetPropertyMode set_mode) {
13367 ASSERT(object->HasDictionaryElements() ||
13368 object->HasDictionaryArgumentsElements() ||
13369 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
13370 Isolate* isolate = object->GetIsolate();
13371 if (FLAG_trace_external_array_abuse &&
13372 IsExternalArrayElementsKind(object->GetElementsKind())) {
13373 CheckArrayAbuse(object, "external elements write", index);
13375 if (FLAG_trace_js_array_abuse &&
13376 !IsExternalArrayElementsKind(object->GetElementsKind())) {
13377 if (object->IsJSArray()) {
13378 CheckArrayAbuse(object, "elements write", index, true);
13381 switch (object->GetElementsKind()) {
13382 case FAST_SMI_ELEMENTS:
13383 case FAST_ELEMENTS:
13384 case FAST_HOLEY_SMI_ELEMENTS:
13385 case FAST_HOLEY_ELEMENTS:
13386 return SetFastElement(object, index, value, strict_mode, check_prototype);
13387 case FAST_DOUBLE_ELEMENTS:
13388 case FAST_HOLEY_DOUBLE_ELEMENTS:
13389 return SetFastDoubleElement(object, index, value, strict_mode,
13392 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13393 case EXTERNAL_##TYPE##_ELEMENTS: { \
13394 Handle<External##Type##Array> array( \
13395 External##Type##Array::cast(object->elements())); \
13396 return External##Type##Array::SetValue(array, index, value); \
13398 case TYPE##_ELEMENTS: { \
13399 Handle<Fixed##Type##Array> array( \
13400 Fixed##Type##Array::cast(object->elements())); \
13401 return Fixed##Type##Array::SetValue(array, index, value); \
13404 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13406 #undef TYPED_ARRAY_CASE
13408 case DICTIONARY_ELEMENTS:
13409 return SetDictionaryElement(object, index, value, attributes, strict_mode,
13412 case SLOPPY_ARGUMENTS_ELEMENTS: {
13413 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()));
13414 uint32_t length = parameter_map->length();
13415 Handle<Object> probe = index < length - 2 ?
13416 Handle<Object>(parameter_map->get(index + 2), isolate) :
13418 if (!probe.is_null() && !probe->IsTheHole()) {
13419 Handle<Context> context(Context::cast(parameter_map->get(0)));
13420 int context_index = Handle<Smi>::cast(probe)->value();
13421 ASSERT(!context->get(context_index)->IsTheHole());
13422 context->set(context_index, *value);
13423 // Redefining attributes of an aliased element destroys fast aliasing.
13424 if (set_mode == SET_PROPERTY || attributes == NONE) return value;
13425 parameter_map->set_the_hole(index + 2);
13426 // For elements that are still writable we re-establish slow aliasing.
13427 if ((attributes & READ_ONLY) == 0) {
13428 value = Handle<Object>::cast(
13429 isolate->factory()->NewAliasedArgumentsEntry(context_index));
13432 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)));
13433 if (arguments->IsDictionary()) {
13434 return SetDictionaryElement(object, index, value, attributes,
13439 return SetFastElement(object, index, value, strict_mode,
13444 // All possible cases have been handled above. Add a return to avoid the
13445 // complaints from the compiler.
13447 return isolate->factory()->null_value();
13451 const double AllocationSite::kPretenureRatio = 0.85;
13454 void AllocationSite::ResetPretenureDecision() {
13455 set_pretenure_decision(kUndecided);
13456 set_memento_found_count(0);
13457 set_memento_create_count(0);
13461 PretenureFlag AllocationSite::GetPretenureMode() {
13462 PretenureDecision mode = pretenure_decision();
13463 // Zombie objects "decide" to be untenured.
13464 return mode == kTenure ? TENURED : NOT_TENURED;
13468 bool AllocationSite::IsNestedSite() {
13469 ASSERT(FLAG_trace_track_allocation_sites);
13470 Object* current = GetHeap()->allocation_sites_list();
13471 while (current->IsAllocationSite()) {
13472 AllocationSite* current_site = AllocationSite::cast(current);
13473 if (current_site->nested_site() == this) {
13476 current = current_site->weak_next();
13482 void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
13483 ElementsKind to_kind) {
13484 Isolate* isolate = site->GetIsolate();
13486 if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) {
13487 Handle<JSArray> transition_info =
13488 handle(JSArray::cast(site->transition_info()));
13489 ElementsKind kind = transition_info->GetElementsKind();
13490 // if kind is holey ensure that to_kind is as well.
13491 if (IsHoleyElementsKind(kind)) {
13492 to_kind = GetHoleyElementsKind(to_kind);
13494 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
13495 // If the array is huge, it's not likely to be defined in a local
13496 // function, so we shouldn't make new instances of it very often.
13497 uint32_t length = 0;
13498 CHECK(transition_info->length()->ToArrayIndex(&length));
13499 if (length <= kMaximumArrayBytesToPretransition) {
13500 if (FLAG_trace_track_allocation_sites) {
13501 bool is_nested = site->IsNestedSite();
13503 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
13504 reinterpret_cast<void*>(*site),
13505 is_nested ? "(nested)" : "",
13506 ElementsKindToString(kind),
13507 ElementsKindToString(to_kind));
13509 JSObject::TransitionElementsKind(transition_info, to_kind);
13510 site->dependent_code()->DeoptimizeDependentCodeGroup(
13511 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
13515 ElementsKind kind = site->GetElementsKind();
13516 // if kind is holey ensure that to_kind is as well.
13517 if (IsHoleyElementsKind(kind)) {
13518 to_kind = GetHoleyElementsKind(to_kind);
13520 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
13521 if (FLAG_trace_track_allocation_sites) {
13522 PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
13523 reinterpret_cast<void*>(*site),
13524 ElementsKindToString(kind),
13525 ElementsKindToString(to_kind));
13527 site->SetElementsKind(to_kind);
13528 site->dependent_code()->DeoptimizeDependentCodeGroup(
13529 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
13536 void AllocationSite::AddDependentCompilationInfo(Handle<AllocationSite> site,
13538 CompilationInfo* info) {
13539 DependentCode::DependencyGroup group = site->ToDependencyGroup(reason);
13540 Handle<DependentCode> dep(site->dependent_code());
13541 Handle<DependentCode> codes =
13542 DependentCode::Insert(dep, group, info->object_wrapper());
13543 if (*codes != site->dependent_code()) site->set_dependent_code(*codes);
13544 info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone());
13548 void JSObject::UpdateAllocationSite(Handle<JSObject> object,
13549 ElementsKind to_kind) {
13550 if (!object->IsJSArray()) return;
13552 Heap* heap = object->GetHeap();
13553 if (!heap->InNewSpace(*object)) return;
13555 Handle<AllocationSite> site;
13557 DisallowHeapAllocation no_allocation;
13559 AllocationMemento* memento = heap->FindAllocationMemento(*object);
13560 if (memento == NULL) return;
13562 // Walk through to the Allocation Site
13563 site = handle(memento->GetAllocationSite());
13565 AllocationSite::DigestTransitionFeedback(site, to_kind);
13569 void JSObject::TransitionElementsKind(Handle<JSObject> object,
13570 ElementsKind to_kind) {
13571 ElementsKind from_kind = object->map()->elements_kind();
13573 if (IsFastHoleyElementsKind(from_kind)) {
13574 to_kind = GetHoleyElementsKind(to_kind);
13577 if (from_kind == to_kind) return;
13578 // Don't update the site if to_kind isn't fast
13579 if (IsFastElementsKind(to_kind)) {
13580 UpdateAllocationSite(object, to_kind);
13583 Isolate* isolate = object->GetIsolate();
13584 if (object->elements() == isolate->heap()->empty_fixed_array() ||
13585 (IsFastSmiOrObjectElementsKind(from_kind) &&
13586 IsFastSmiOrObjectElementsKind(to_kind)) ||
13587 (from_kind == FAST_DOUBLE_ELEMENTS &&
13588 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
13589 ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
13590 // No change is needed to the elements() buffer, the transition
13591 // only requires a map change.
13592 Handle<Map> new_map = GetElementsTransitionMap(object, to_kind);
13593 MigrateToMap(object, new_map);
13594 if (FLAG_trace_elements_transitions) {
13595 Handle<FixedArrayBase> elms(object->elements());
13596 PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms);
13601 Handle<FixedArrayBase> elms(object->elements());
13602 uint32_t capacity = static_cast<uint32_t>(elms->length());
13603 uint32_t length = capacity;
13605 if (object->IsJSArray()) {
13606 Object* raw_length = Handle<JSArray>::cast(object)->length();
13607 if (raw_length->IsUndefined()) {
13608 // If length is undefined, then JSArray is being initialized and has no
13609 // elements, assume a length of zero.
13612 CHECK(raw_length->ToArrayIndex(&length));
13616 if (IsFastSmiElementsKind(from_kind) &&
13617 IsFastDoubleElementsKind(to_kind)) {
13618 SetFastDoubleElementsCapacityAndLength(object, capacity, length);
13619 JSObject::ValidateElements(object);
13623 if (IsFastDoubleElementsKind(from_kind) &&
13624 IsFastObjectElementsKind(to_kind)) {
13625 SetFastElementsCapacityAndLength(object, capacity, length,
13626 kDontAllowSmiElements);
13627 JSObject::ValidateElements(object);
13631 // This method should never be called for any other case than the ones
13638 bool Map::IsValidElementsTransition(ElementsKind from_kind,
13639 ElementsKind to_kind) {
13640 // Transitions can't go backwards.
13641 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
13645 // Transitions from HOLEY -> PACKED are not allowed.
13646 return !IsFastHoleyElementsKind(from_kind) ||
13647 IsFastHoleyElementsKind(to_kind);
13651 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array,
13653 Handle<Object> value) {
13654 uint32_t old_len = 0;
13655 CHECK(array->length()->ToArrayIndex(&old_len));
13656 // Check to see if we need to update the length. For now, we make
13657 // sure that the length stays within 32-bits (unsigned).
13658 if (index >= old_len && index != 0xffffffff) {
13659 Handle<Object> len = array->GetIsolate()->factory()->NewNumber(
13660 static_cast<double>(index) + 1);
13661 array->set_length(*len);
13666 MaybeHandle<Object> JSObject::GetElementWithInterceptor(
13667 Handle<JSObject> object,
13668 Handle<Object> receiver,
13670 Isolate* isolate = object->GetIsolate();
13672 // Make sure that the top context does not change when doing
13673 // callbacks or interceptor calls.
13674 AssertNoContextChange ncc(isolate);
13676 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate);
13677 if (!interceptor->getter()->IsUndefined()) {
13678 v8::IndexedPropertyGetterCallback getter =
13679 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
13681 ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index));
13682 PropertyCallbackArguments
13683 args(isolate, interceptor->data(), *receiver, *object);
13684 v8::Handle<v8::Value> result = args.Call(getter, index);
13685 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
13686 if (!result.IsEmpty()) {
13687 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13688 result_internal->VerifyApiCallResultType();
13689 // Rebox handle before return.
13690 return handle(*result_internal, isolate);
13694 ElementsAccessor* handler = object->GetElementsAccessor();
13695 Handle<Object> result;
13696 ASSIGN_RETURN_ON_EXCEPTION(
13697 isolate, result, handler->Get(receiver, object, index),
13699 if (!result->IsTheHole()) return result;
13701 Handle<Object> proto(object->GetPrototype(), isolate);
13702 if (proto->IsNull()) return isolate->factory()->undefined_value();
13703 return Object::GetElementWithReceiver(isolate, proto, receiver, index);
13707 bool JSObject::HasDenseElements() {
13710 GetElementsCapacityAndUsage(&capacity, &used);
13711 return (capacity == 0) || (used > (capacity / 2));
13715 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
13719 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
13720 FixedArray* backing_store = NULL;
13721 switch (GetElementsKind()) {
13722 case SLOPPY_ARGUMENTS_ELEMENTS:
13723 backing_store_base =
13724 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
13725 backing_store = FixedArray::cast(backing_store_base);
13726 if (backing_store->IsDictionary()) {
13727 SeededNumberDictionary* dictionary =
13728 SeededNumberDictionary::cast(backing_store);
13729 *capacity = dictionary->Capacity();
13730 *used = dictionary->NumberOfElements();
13734 case FAST_SMI_ELEMENTS:
13735 case FAST_ELEMENTS:
13737 *capacity = backing_store_base->length();
13738 *used = Smi::cast(JSArray::cast(this)->length())->value();
13741 // Fall through if packing is not guaranteed.
13742 case FAST_HOLEY_SMI_ELEMENTS:
13743 case FAST_HOLEY_ELEMENTS:
13744 backing_store = FixedArray::cast(backing_store_base);
13745 *capacity = backing_store->length();
13746 for (int i = 0; i < *capacity; ++i) {
13747 if (!backing_store->get(i)->IsTheHole()) ++(*used);
13750 case DICTIONARY_ELEMENTS: {
13751 SeededNumberDictionary* dictionary = element_dictionary();
13752 *capacity = dictionary->Capacity();
13753 *used = dictionary->NumberOfElements();
13756 case FAST_DOUBLE_ELEMENTS:
13758 *capacity = backing_store_base->length();
13759 *used = Smi::cast(JSArray::cast(this)->length())->value();
13762 // Fall through if packing is not guaranteed.
13763 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13764 *capacity = elements()->length();
13765 if (*capacity == 0) break;
13766 FixedDoubleArray * elms = FixedDoubleArray::cast(elements());
13767 for (int i = 0; i < *capacity; i++) {
13768 if (!elms->is_the_hole(i)) ++(*used);
13773 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13774 case EXTERNAL_##TYPE##_ELEMENTS: \
13775 case TYPE##_ELEMENTS: \
13777 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13778 #undef TYPED_ARRAY_CASE
13780 // External arrays are considered 100% used.
13781 FixedArrayBase* external_array = FixedArrayBase::cast(elements());
13782 *capacity = external_array->length();
13783 *used = external_array->length();
13790 bool JSObject::WouldConvertToSlowElements(Handle<Object> key) {
13792 if (HasFastElements() && key->ToArrayIndex(&index)) {
13793 Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements()));
13794 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
13795 if (index >= capacity) {
13796 if ((index - capacity) >= kMaxGap) return true;
13797 uint32_t new_capacity = NewElementsCapacity(index + 1);
13798 return ShouldConvertToSlowElements(new_capacity);
13805 bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
13806 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
13807 kMaxUncheckedFastElementsLength);
13808 if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
13809 (new_capacity <= kMaxUncheckedFastElementsLength &&
13810 GetHeap()->InNewSpace(this))) {
13813 // If the fast-case backing storage takes up roughly three times as
13814 // much space (in machine words) as a dictionary backing storage
13815 // would, the object should have slow elements.
13816 int old_capacity = 0;
13817 int used_elements = 0;
13818 GetElementsCapacityAndUsage(&old_capacity, &used_elements);
13819 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
13820 SeededNumberDictionary::kEntrySize;
13821 return 3 * dictionary_size <= new_capacity;
13825 bool JSObject::ShouldConvertToFastElements() {
13826 ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
13827 // If the elements are sparse, we should not go back to fast case.
13828 if (!HasDenseElements()) return false;
13829 // An object requiring access checks is never allowed to have fast
13830 // elements. If it had fast elements we would skip security checks.
13831 if (IsAccessCheckNeeded()) return false;
13832 // Observed objects may not go to fast mode because they rely on map checks,
13833 // and for fast element accesses we sometimes check element kinds only.
13834 if (map()->is_observed()) return false;
13836 FixedArray* elements = FixedArray::cast(this->elements());
13837 SeededNumberDictionary* dictionary = NULL;
13838 if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) {
13839 dictionary = SeededNumberDictionary::cast(elements->get(1));
13841 dictionary = SeededNumberDictionary::cast(elements);
13843 // If an element has been added at a very high index in the elements
13844 // dictionary, we cannot go back to fast case.
13845 if (dictionary->requires_slow_elements()) return false;
13846 // If the dictionary backing storage takes up roughly half as much
13847 // space (in machine words) as a fast-case backing storage would,
13848 // the object should have fast elements.
13849 uint32_t array_size = 0;
13851 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
13853 array_size = dictionary->max_number_key();
13855 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
13856 SeededNumberDictionary::kEntrySize;
13857 return 2 * dictionary_size >= array_size;
13861 bool JSObject::ShouldConvertToFastDoubleElements(
13862 bool* has_smi_only_elements) {
13863 *has_smi_only_elements = false;
13864 if (HasSloppyArgumentsElements()) return false;
13865 if (FLAG_unbox_double_arrays) {
13866 ASSERT(HasDictionaryElements());
13867 SeededNumberDictionary* dictionary = element_dictionary();
13868 bool found_double = false;
13869 for (int i = 0; i < dictionary->Capacity(); i++) {
13870 Object* key = dictionary->KeyAt(i);
13871 if (key->IsNumber()) {
13872 Object* value = dictionary->ValueAt(i);
13873 if (!value->IsNumber()) return false;
13874 if (!value->IsSmi()) {
13875 found_double = true;
13879 *has_smi_only_elements = !found_double;
13880 return found_double;
13887 // Certain compilers request function template instantiation when they
13888 // see the definition of the other template functions in the
13889 // class. This requires us to have the template functions put
13890 // together, so even though this function belongs in objects-debug.cc,
13891 // we keep it here instead to satisfy certain compilers.
13892 #ifdef OBJECT_PRINT
13893 template<typename Derived, typename Shape, typename Key>
13894 void Dictionary<Derived, Shape, Key>::Print(FILE* out) {
13895 int capacity = DerivedHashTable::Capacity();
13896 for (int i = 0; i < capacity; i++) {
13897 Object* k = DerivedHashTable::KeyAt(i);
13898 if (DerivedHashTable::IsKey(k)) {
13900 if (k->IsString()) {
13901 String::cast(k)->StringPrint(out);
13903 k->ShortPrint(out);
13906 ValueAt(i)->ShortPrint(out);
13914 template<typename Derived, typename Shape, typename Key>
13915 void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) {
13917 int capacity = DerivedHashTable::Capacity();
13918 DisallowHeapAllocation no_gc;
13919 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
13920 for (int i = 0; i < capacity; i++) {
13921 Object* k = Dictionary::KeyAt(i);
13922 if (Dictionary::IsKey(k)) {
13923 elements->set(pos++, ValueAt(i), mode);
13926 ASSERT(pos == elements->length());
13930 InterceptorInfo* JSObject::GetNamedInterceptor() {
13931 ASSERT(map()->has_named_interceptor());
13932 JSFunction* constructor = JSFunction::cast(map()->constructor());
13933 ASSERT(constructor->shared()->IsApiFunction());
13935 constructor->shared()->get_api_func_data()->named_property_handler();
13936 return InterceptorInfo::cast(result);
13940 InterceptorInfo* JSObject::GetIndexedInterceptor() {
13941 ASSERT(map()->has_indexed_interceptor());
13942 JSFunction* constructor = JSFunction::cast(map()->constructor());
13943 ASSERT(constructor->shared()->IsApiFunction());
13945 constructor->shared()->get_api_func_data()->indexed_property_handler();
13946 return InterceptorInfo::cast(result);
13950 MaybeHandle<Object> JSObject::GetPropertyPostInterceptor(
13951 Handle<JSObject> object,
13952 Handle<Object> receiver,
13954 PropertyAttributes* attributes) {
13955 // Check local property in holder, ignore interceptor.
13956 Isolate* isolate = object->GetIsolate();
13957 LookupResult lookup(isolate);
13958 object->LocalLookupRealNamedProperty(name, &lookup);
13959 if (lookup.IsFound()) {
13960 return GetProperty(object, receiver, &lookup, name, attributes);
13962 // Continue searching via the prototype chain.
13963 Handle<Object> prototype(object->GetPrototype(), isolate);
13964 *attributes = ABSENT;
13965 if (prototype->IsNull()) return isolate->factory()->undefined_value();
13966 return GetPropertyWithReceiver(prototype, receiver, name, attributes);
13971 MaybeHandle<Object> JSObject::GetPropertyWithInterceptor(
13972 Handle<JSObject> object,
13973 Handle<Object> receiver,
13975 PropertyAttributes* attributes) {
13976 Isolate* isolate = object->GetIsolate();
13978 // TODO(rossberg): Support symbols in the API.
13979 if (name->IsSymbol()) return isolate->factory()->undefined_value();
13981 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor(), isolate);
13982 Handle<String> name_string = Handle<String>::cast(name);
13984 if (!interceptor->getter()->IsUndefined()) {
13985 v8::NamedPropertyGetterCallback getter =
13986 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
13988 ApiNamedPropertyAccess("interceptor-named-get", *object, *name));
13989 PropertyCallbackArguments
13990 args(isolate, interceptor->data(), *receiver, *object);
13991 v8::Handle<v8::Value> result =
13992 args.Call(getter, v8::Utils::ToLocal(name_string));
13993 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
13994 if (!result.IsEmpty()) {
13995 *attributes = NONE;
13996 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13997 result_internal->VerifyApiCallResultType();
13998 // Rebox handle before return.
13999 return handle(*result_internal, isolate);
14003 return GetPropertyPostInterceptor(object, receiver, name, attributes);
14007 // Compute the property keys from the interceptor.
14008 // TODO(rossberg): support symbols in API, and filter here if needed.
14009 MaybeHandle<JSObject> JSObject::GetKeysForNamedInterceptor(
14010 Handle<JSObject> object, Handle<JSReceiver> receiver) {
14011 Isolate* isolate = receiver->GetIsolate();
14012 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
14013 PropertyCallbackArguments
14014 args(isolate, interceptor->data(), *receiver, *object);
14015 v8::Handle<v8::Object> result;
14016 if (!interceptor->enumerator()->IsUndefined()) {
14017 v8::NamedPropertyEnumeratorCallback enum_fun =
14018 v8::ToCData<v8::NamedPropertyEnumeratorCallback>(
14019 interceptor->enumerator());
14020 LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object));
14021 result = args.Call(enum_fun);
14023 if (result.IsEmpty()) return MaybeHandle<JSObject>();
14024 #if ENABLE_EXTRA_CHECKS
14025 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
14026 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
14028 // Rebox before returning.
14029 return handle(*v8::Utils::OpenHandle(*result), isolate);
14033 // Compute the element keys from the interceptor.
14034 MaybeHandle<JSObject> JSObject::GetKeysForIndexedInterceptor(
14035 Handle<JSObject> object, Handle<JSReceiver> receiver) {
14036 Isolate* isolate = receiver->GetIsolate();
14037 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
14038 PropertyCallbackArguments
14039 args(isolate, interceptor->data(), *receiver, *object);
14040 v8::Handle<v8::Object> result;
14041 if (!interceptor->enumerator()->IsUndefined()) {
14042 v8::IndexedPropertyEnumeratorCallback enum_fun =
14043 v8::ToCData<v8::IndexedPropertyEnumeratorCallback>(
14044 interceptor->enumerator());
14045 LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object));
14046 result = args.Call(enum_fun);
14048 if (result.IsEmpty()) return MaybeHandle<JSObject>();
14049 #if ENABLE_EXTRA_CHECKS
14050 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
14051 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
14053 // Rebox before returning.
14054 return handle(*v8::Utils::OpenHandle(*result), isolate);
14058 bool JSObject::HasRealNamedProperty(Handle<JSObject> object,
14059 Handle<Name> key) {
14060 Isolate* isolate = object->GetIsolate();
14061 SealHandleScope shs(isolate);
14062 // Check access rights if needed.
14063 if (object->IsAccessCheckNeeded()) {
14064 if (!isolate->MayNamedAccess(object, key, v8::ACCESS_HAS)) {
14065 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
14066 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
14071 LookupResult result(isolate);
14072 object->LocalLookupRealNamedProperty(key, &result);
14073 return result.IsFound() && !result.IsInterceptor();
14077 bool JSObject::HasRealElementProperty(Handle<JSObject> object, uint32_t index) {
14078 Isolate* isolate = object->GetIsolate();
14079 HandleScope scope(isolate);
14080 // Check access rights if needed.
14081 if (object->IsAccessCheckNeeded()) {
14082 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
14083 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
14084 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
14089 if (object->IsJSGlobalProxy()) {
14090 HandleScope scope(isolate);
14091 Handle<Object> proto(object->GetPrototype(), isolate);
14092 if (proto->IsNull()) return false;
14093 ASSERT(proto->IsJSGlobalObject());
14094 return HasRealElementProperty(Handle<JSObject>::cast(proto), index);
14097 return GetElementAttributeWithoutInterceptor(
14098 object, object, index, false) != ABSENT;
14102 bool JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object,
14103 Handle<Name> key) {
14104 Isolate* isolate = object->GetIsolate();
14105 SealHandleScope shs(isolate);
14106 // Check access rights if needed.
14107 if (object->IsAccessCheckNeeded()) {
14108 if (!isolate->MayNamedAccess(object, key, v8::ACCESS_HAS)) {
14109 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
14110 // TODO(yangguo): Issue 3269, check for scheduled exception missing?
14115 LookupResult result(isolate);
14116 object->LocalLookupRealNamedProperty(key, &result);
14117 return result.IsPropertyCallbacks();
14121 int JSObject::NumberOfLocalProperties(PropertyAttributes filter) {
14122 if (HasFastProperties()) {
14123 Map* map = this->map();
14124 if (filter == NONE) return map->NumberOfOwnDescriptors();
14125 if (filter & DONT_ENUM) {
14126 int result = map->EnumLength();
14127 if (result != kInvalidEnumCacheSentinel) return result;
14129 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
14131 return property_dictionary()->NumberOfElementsFilterAttributes(filter);
14135 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
14136 Object* temp = get(i);
14139 if (this != numbers) {
14140 temp = numbers->get(i);
14141 numbers->set(i, Smi::cast(numbers->get(j)));
14142 numbers->set(j, Smi::cast(temp));
14147 static void InsertionSortPairs(FixedArray* content,
14148 FixedArray* numbers,
14150 for (int i = 1; i < len; i++) {
14153 (NumberToUint32(numbers->get(j - 1)) >
14154 NumberToUint32(numbers->get(j)))) {
14155 content->SwapPairs(numbers, j - 1, j);
14162 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) {
14163 // In-place heap sort.
14164 ASSERT(content->length() == numbers->length());
14166 // Bottom-up max-heap construction.
14167 for (int i = 1; i < len; ++i) {
14168 int child_index = i;
14169 while (child_index > 0) {
14170 int parent_index = ((child_index + 1) >> 1) - 1;
14171 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
14172 uint32_t child_value = NumberToUint32(numbers->get(child_index));
14173 if (parent_value < child_value) {
14174 content->SwapPairs(numbers, parent_index, child_index);
14178 child_index = parent_index;
14182 // Extract elements and create sorted array.
14183 for (int i = len - 1; i > 0; --i) {
14184 // Put max element at the back of the array.
14185 content->SwapPairs(numbers, 0, i);
14186 // Sift down the new top element.
14187 int parent_index = 0;
14189 int child_index = ((parent_index + 1) << 1) - 1;
14190 if (child_index >= i) break;
14191 uint32_t child1_value = NumberToUint32(numbers->get(child_index));
14192 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1));
14193 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
14194 if (child_index + 1 >= i || child1_value > child2_value) {
14195 if (parent_value > child1_value) break;
14196 content->SwapPairs(numbers, parent_index, child_index);
14197 parent_index = child_index;
14199 if (parent_value > child2_value) break;
14200 content->SwapPairs(numbers, parent_index, child_index + 1);
14201 parent_index = child_index + 1;
14208 // Sort this array and the numbers as pairs wrt. the (distinct) numbers.
14209 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
14210 ASSERT(this->length() == numbers->length());
14211 // For small arrays, simply use insertion sort.
14213 InsertionSortPairs(this, numbers, len);
14216 // Check the range of indices.
14217 uint32_t min_index = NumberToUint32(numbers->get(0));
14218 uint32_t max_index = min_index;
14220 for (i = 1; i < len; i++) {
14221 if (NumberToUint32(numbers->get(i)) < min_index) {
14222 min_index = NumberToUint32(numbers->get(i));
14223 } else if (NumberToUint32(numbers->get(i)) > max_index) {
14224 max_index = NumberToUint32(numbers->get(i));
14227 if (max_index - min_index + 1 == len) {
14228 // Indices form a contiguous range, unless there are duplicates.
14229 // Do an in-place linear time sort assuming distinct numbers, but
14230 // avoid hanging in case they are not.
14231 for (i = 0; i < len; i++) {
14234 // While the current element at i is not at its correct position p,
14235 // swap the elements at these two positions.
14236 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i &&
14238 SwapPairs(numbers, i, p);
14242 HeapSortPairs(this, numbers, len);
14248 // Fill in the names of local properties into the supplied storage. The main
14249 // purpose of this function is to provide reflection information for the object
14251 void JSObject::GetLocalPropertyNames(
14252 FixedArray* storage, int index, PropertyAttributes filter) {
14253 ASSERT(storage->length() >= (NumberOfLocalProperties(filter) - index));
14254 if (HasFastProperties()) {
14255 int real_size = map()->NumberOfOwnDescriptors();
14256 DescriptorArray* descs = map()->instance_descriptors();
14257 for (int i = 0; i < real_size; i++) {
14258 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
14259 !FilterKey(descs->GetKey(i), filter)) {
14260 storage->set(index++, descs->GetKey(i));
14264 property_dictionary()->CopyKeysTo(storage,
14267 NameDictionary::UNSORTED);
14272 int JSObject::NumberOfLocalElements(PropertyAttributes filter) {
14273 return GetLocalElementKeys(NULL, filter);
14277 int JSObject::NumberOfEnumElements() {
14278 // Fast case for objects with no elements.
14279 if (!IsJSValue() && HasFastObjectElements()) {
14280 uint32_t length = IsJSArray() ?
14281 static_cast<uint32_t>(
14282 Smi::cast(JSArray::cast(this)->length())->value()) :
14283 static_cast<uint32_t>(FixedArray::cast(elements())->length());
14284 if (length == 0) return 0;
14286 // Compute the number of enumerable elements.
14287 return NumberOfLocalElements(static_cast<PropertyAttributes>(DONT_ENUM));
14291 int JSObject::GetLocalElementKeys(FixedArray* storage,
14292 PropertyAttributes filter) {
14294 switch (GetElementsKind()) {
14295 case FAST_SMI_ELEMENTS:
14296 case FAST_ELEMENTS:
14297 case FAST_HOLEY_SMI_ELEMENTS:
14298 case FAST_HOLEY_ELEMENTS: {
14299 int length = IsJSArray() ?
14300 Smi::cast(JSArray::cast(this)->length())->value() :
14301 FixedArray::cast(elements())->length();
14302 for (int i = 0; i < length; i++) {
14303 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
14304 if (storage != NULL) {
14305 storage->set(counter, Smi::FromInt(i));
14310 ASSERT(!storage || storage->length() >= counter);
14313 case FAST_DOUBLE_ELEMENTS:
14314 case FAST_HOLEY_DOUBLE_ELEMENTS: {
14315 int length = IsJSArray() ?
14316 Smi::cast(JSArray::cast(this)->length())->value() :
14317 FixedDoubleArray::cast(elements())->length();
14318 for (int i = 0; i < length; i++) {
14319 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) {
14320 if (storage != NULL) {
14321 storage->set(counter, Smi::FromInt(i));
14326 ASSERT(!storage || storage->length() >= counter);
14330 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
14331 case EXTERNAL_##TYPE##_ELEMENTS: \
14332 case TYPE##_ELEMENTS: \
14334 TYPED_ARRAYS(TYPED_ARRAY_CASE)
14335 #undef TYPED_ARRAY_CASE
14337 int length = FixedArrayBase::cast(elements())->length();
14338 while (counter < length) {
14339 if (storage != NULL) {
14340 storage->set(counter, Smi::FromInt(counter));
14344 ASSERT(!storage || storage->length() >= counter);
14348 case DICTIONARY_ELEMENTS: {
14349 if (storage != NULL) {
14350 element_dictionary()->CopyKeysTo(storage,
14352 SeededNumberDictionary::SORTED);
14354 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
14357 case SLOPPY_ARGUMENTS_ELEMENTS: {
14358 FixedArray* parameter_map = FixedArray::cast(elements());
14359 int mapped_length = parameter_map->length() - 2;
14360 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
14361 if (arguments->IsDictionary()) {
14362 // Copy the keys from arguments first, because Dictionary::CopyKeysTo
14363 // will insert in storage starting at index 0.
14364 SeededNumberDictionary* dictionary =
14365 SeededNumberDictionary::cast(arguments);
14366 if (storage != NULL) {
14367 dictionary->CopyKeysTo(
14368 storage, filter, SeededNumberDictionary::UNSORTED);
14370 counter += dictionary->NumberOfElementsFilterAttributes(filter);
14371 for (int i = 0; i < mapped_length; ++i) {
14372 if (!parameter_map->get(i + 2)->IsTheHole()) {
14373 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
14377 if (storage != NULL) storage->SortPairs(storage, counter);
14380 int backing_length = arguments->length();
14382 for (; i < mapped_length; ++i) {
14383 if (!parameter_map->get(i + 2)->IsTheHole()) {
14384 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
14386 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
14387 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
14391 for (; i < backing_length; ++i) {
14392 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
14400 if (this->IsJSValue()) {
14401 Object* val = JSValue::cast(this)->value();
14402 if (val->IsString()) {
14403 String* str = String::cast(val);
14405 for (int i = 0; i < str->length(); i++) {
14406 storage->set(counter + i, Smi::FromInt(i));
14409 counter += str->length();
14412 ASSERT(!storage || storage->length() == counter);
14417 int JSObject::GetEnumElementKeys(FixedArray* storage) {
14418 return GetLocalElementKeys(storage,
14419 static_cast<PropertyAttributes>(DONT_ENUM));
14423 // StringKey simply carries a string object as key.
14424 class StringKey : public HashTableKey {
14426 explicit StringKey(String* string) :
14428 hash_(HashForObject(string)) { }
14430 bool IsMatch(Object* string) {
14431 // We know that all entries in a hash table had their hash keys created.
14432 // Use that knowledge to have fast failure.
14433 if (hash_ != HashForObject(string)) {
14436 return string_->Equals(String::cast(string));
14439 uint32_t Hash() { return hash_; }
14441 uint32_t HashForObject(Object* other) { return String::cast(other)->Hash(); }
14443 Object* AsObject(Heap* heap) { return string_; }
14450 // StringSharedKeys are used as keys in the eval cache.
14451 class StringSharedKey : public HashTableKey {
14453 StringSharedKey(Handle<String> source,
14454 Handle<SharedFunctionInfo> shared,
14455 StrictMode strict_mode,
14456 int scope_position)
14459 strict_mode_(strict_mode),
14460 scope_position_(scope_position) { }
14462 bool IsMatch(Object* other) V8_OVERRIDE {
14463 DisallowHeapAllocation no_allocation;
14464 if (!other->IsFixedArray()) return false;
14465 FixedArray* other_array = FixedArray::cast(other);
14466 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
14467 if (shared != *shared_) return false;
14468 int strict_unchecked = Smi::cast(other_array->get(2))->value();
14469 ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
14470 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
14471 if (strict_mode != strict_mode_) return false;
14472 int scope_position = Smi::cast(other_array->get(3))->value();
14473 if (scope_position != scope_position_) return false;
14474 String* source = String::cast(other_array->get(1));
14475 return source->Equals(*source_);
14478 static uint32_t StringSharedHashHelper(String* source,
14479 SharedFunctionInfo* shared,
14480 StrictMode strict_mode,
14481 int scope_position) {
14482 uint32_t hash = source->Hash();
14483 if (shared->HasSourceCode()) {
14484 // Instead of using the SharedFunctionInfo pointer in the hash
14485 // code computation, we use a combination of the hash of the
14486 // script source code and the start position of the calling scope.
14487 // We do this to ensure that the cache entries can survive garbage
14489 Script* script(Script::cast(shared->script()));
14490 hash ^= String::cast(script->source())->Hash();
14491 if (strict_mode == STRICT) hash ^= 0x8000;
14492 hash += scope_position;
14497 uint32_t Hash() V8_OVERRIDE {
14498 return StringSharedHashHelper(*source_, *shared_, strict_mode_,
14502 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
14503 DisallowHeapAllocation no_allocation;
14504 FixedArray* other_array = FixedArray::cast(obj);
14505 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
14506 String* source = String::cast(other_array->get(1));
14507 int strict_unchecked = Smi::cast(other_array->get(2))->value();
14508 ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
14509 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
14510 int scope_position = Smi::cast(other_array->get(3))->value();
14511 return StringSharedHashHelper(
14512 source, shared, strict_mode, scope_position);
14516 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
14517 Handle<FixedArray> array = isolate->factory()->NewFixedArray(4);
14518 array->set(0, *shared_);
14519 array->set(1, *source_);
14520 array->set(2, Smi::FromInt(strict_mode_));
14521 array->set(3, Smi::FromInt(scope_position_));
14526 Handle<String> source_;
14527 Handle<SharedFunctionInfo> shared_;
14528 StrictMode strict_mode_;
14529 int scope_position_;
14533 // RegExpKey carries the source and flags of a regular expression as key.
14534 class RegExpKey : public HashTableKey {
14536 RegExpKey(Handle<String> string, JSRegExp::Flags flags)
14538 flags_(Smi::FromInt(flags.value())) { }
14540 // Rather than storing the key in the hash table, a pointer to the
14541 // stored value is stored where the key should be. IsMatch then
14542 // compares the search key to the found object, rather than comparing
14544 bool IsMatch(Object* obj) V8_OVERRIDE {
14545 FixedArray* val = FixedArray::cast(obj);
14546 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
14547 && (flags_ == val->get(JSRegExp::kFlagsIndex));
14550 uint32_t Hash() V8_OVERRIDE { return RegExpHash(*string_, flags_); }
14552 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
14553 // Plain hash maps, which is where regexp keys are used, don't
14554 // use this function.
14556 return MaybeHandle<Object>().ToHandleChecked();
14559 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
14560 FixedArray* val = FixedArray::cast(obj);
14561 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
14562 Smi::cast(val->get(JSRegExp::kFlagsIndex)));
14565 static uint32_t RegExpHash(String* string, Smi* flags) {
14566 return string->Hash() + flags->value();
14569 Handle<String> string_;
14574 Handle<Object> OneByteStringKey::AsHandle(Isolate* isolate) {
14575 if (hash_field_ == 0) Hash();
14576 return isolate->factory()->NewOneByteInternalizedString(string_, hash_field_);
14580 Handle<Object> TwoByteStringKey::AsHandle(Isolate* isolate) {
14581 if (hash_field_ == 0) Hash();
14582 return isolate->factory()->NewTwoByteInternalizedString(string_, hash_field_);
14587 const uint8_t* SubStringKey<uint8_t>::GetChars() {
14588 return string_->IsSeqOneByteString()
14589 ? SeqOneByteString::cast(*string_)->GetChars()
14590 : ExternalAsciiString::cast(*string_)->GetChars();
14595 const uint16_t* SubStringKey<uint16_t>::GetChars() {
14596 return string_->IsSeqTwoByteString()
14597 ? SeqTwoByteString::cast(*string_)->GetChars()
14598 : ExternalTwoByteString::cast(*string_)->GetChars();
14603 Handle<Object> SubStringKey<uint8_t>::AsHandle(Isolate* isolate) {
14604 if (hash_field_ == 0) Hash();
14605 Vector<const uint8_t> chars(GetChars() + from_, length_);
14606 return isolate->factory()->NewOneByteInternalizedString(chars, hash_field_);
14611 Handle<Object> SubStringKey<uint16_t>::AsHandle(Isolate* isolate) {
14612 if (hash_field_ == 0) Hash();
14613 Vector<const uint16_t> chars(GetChars() + from_, length_);
14614 return isolate->factory()->NewTwoByteInternalizedString(chars, hash_field_);
14619 bool SubStringKey<uint8_t>::IsMatch(Object* string) {
14620 Vector<const uint8_t> chars(GetChars() + from_, length_);
14621 return String::cast(string)->IsOneByteEqualTo(chars);
14626 bool SubStringKey<uint16_t>::IsMatch(Object* string) {
14627 Vector<const uint16_t> chars(GetChars() + from_, length_);
14628 return String::cast(string)->IsTwoByteEqualTo(chars);
14632 template class SubStringKey<uint8_t>;
14633 template class SubStringKey<uint16_t>;
14636 // InternalizedStringKey carries a string/internalized-string object as key.
14637 class InternalizedStringKey : public HashTableKey {
14639 explicit InternalizedStringKey(Handle<String> string)
14640 : string_(string) { }
14642 virtual bool IsMatch(Object* string) V8_OVERRIDE {
14643 return String::cast(string)->Equals(*string_);
14646 virtual uint32_t Hash() V8_OVERRIDE { return string_->Hash(); }
14648 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE {
14649 return String::cast(other)->Hash();
14652 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
14653 // Internalize the string if possible.
14654 MaybeHandle<Map> maybe_map =
14655 isolate->factory()->InternalizedStringMapForString(string_);
14657 if (maybe_map.ToHandle(&map)) {
14658 string_->set_map_no_write_barrier(*map);
14659 ASSERT(string_->IsInternalizedString());
14662 // Otherwise allocate a new internalized string.
14663 return isolate->factory()->NewInternalizedStringImpl(
14664 string_, string_->length(), string_->hash_field());
14667 static uint32_t StringHash(Object* obj) {
14668 return String::cast(obj)->Hash();
14671 Handle<String> string_;
14675 template<typename Derived, typename Shape, typename Key>
14676 void HashTable<Derived, Shape, Key>::IteratePrefix(ObjectVisitor* v) {
14677 IteratePointers(v, 0, kElementsStartOffset);
14681 template<typename Derived, typename Shape, typename Key>
14682 void HashTable<Derived, Shape, Key>::IterateElements(ObjectVisitor* v) {
14684 kElementsStartOffset,
14685 kHeaderSize + length() * kPointerSize);
14689 template<typename Derived, typename Shape, typename Key>
14690 Handle<Derived> HashTable<Derived, Shape, Key>::New(
14692 int at_least_space_for,
14693 MinimumCapacity capacity_option,
14694 PretenureFlag pretenure) {
14695 ASSERT(0 <= at_least_space_for);
14696 ASSERT(!capacity_option || IsPowerOf2(at_least_space_for));
14697 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
14698 ? at_least_space_for
14699 : ComputeCapacity(at_least_space_for);
14700 if (capacity > HashTable::kMaxCapacity) {
14701 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
14704 Factory* factory = isolate->factory();
14705 int length = EntryToIndex(capacity);
14706 Handle<FixedArray> array = factory->NewFixedArray(length, pretenure);
14707 array->set_map_no_write_barrier(*factory->hash_table_map());
14708 Handle<Derived> table = Handle<Derived>::cast(array);
14710 table->SetNumberOfElements(0);
14711 table->SetNumberOfDeletedElements(0);
14712 table->SetCapacity(capacity);
14717 // Find entry for key otherwise return kNotFound.
14718 int NameDictionary::FindEntry(Handle<Name> key) {
14719 if (!key->IsUniqueName()) {
14720 return DerivedHashTable::FindEntry(key);
14723 // Optimized for unique names. Knowledge of the key type allows:
14724 // 1. Move the check if the key is unique out of the loop.
14725 // 2. Avoid comparing hash codes in unique-to-unique comparison.
14726 // 3. Detect a case when a dictionary key is not unique but the key is.
14727 // In case of positive result the dictionary key may be replaced by the
14728 // internalized string with minimal performance penalty. It gives a chance
14729 // to perform further lookups in code stubs (and significant performance
14730 // boost a certain style of code).
14732 // EnsureCapacity will guarantee the hash table is never full.
14733 uint32_t capacity = Capacity();
14734 uint32_t entry = FirstProbe(key->Hash(), capacity);
14735 uint32_t count = 1;
14738 int index = EntryToIndex(entry);
14739 Object* element = get(index);
14740 if (element->IsUndefined()) break; // Empty entry.
14741 if (*key == element) return entry;
14742 if (!element->IsUniqueName() &&
14743 !element->IsTheHole() &&
14744 Name::cast(element)->Equals(*key)) {
14745 // Replace a key that is a non-internalized string by the equivalent
14746 // internalized string for faster further lookups.
14750 ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(*key));
14751 entry = NextProbe(entry, count++, capacity);
14757 template<typename Derived, typename Shape, typename Key>
14758 void HashTable<Derived, Shape, Key>::Rehash(
14759 Handle<Derived> new_table,
14761 ASSERT(NumberOfElements() < new_table->Capacity());
14763 DisallowHeapAllocation no_gc;
14764 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
14766 // Copy prefix to new array.
14767 for (int i = kPrefixStartIndex;
14768 i < kPrefixStartIndex + Shape::kPrefixSize;
14770 new_table->set(i, get(i), mode);
14773 // Rehash the elements.
14774 int capacity = Capacity();
14775 for (int i = 0; i < capacity; i++) {
14776 uint32_t from_index = EntryToIndex(i);
14777 Object* k = get(from_index);
14779 uint32_t hash = HashTable::HashForObject(key, k);
14780 uint32_t insertion_index =
14781 EntryToIndex(new_table->FindInsertionEntry(hash));
14782 for (int j = 0; j < Shape::kEntrySize; j++) {
14783 new_table->set(insertion_index + j, get(from_index + j), mode);
14787 new_table->SetNumberOfElements(NumberOfElements());
14788 new_table->SetNumberOfDeletedElements(0);
14792 template<typename Derived, typename Shape, typename Key>
14793 uint32_t HashTable<Derived, Shape, Key>::EntryForProbe(
14797 uint32_t expected) {
14798 uint32_t hash = HashTable::HashForObject(key, k);
14799 uint32_t capacity = Capacity();
14800 uint32_t entry = FirstProbe(hash, capacity);
14801 for (int i = 1; i < probe; i++) {
14802 if (entry == expected) return expected;
14803 entry = NextProbe(entry, i, capacity);
14809 template<typename Derived, typename Shape, typename Key>
14810 void HashTable<Derived, Shape, Key>::Swap(uint32_t entry1,
14812 WriteBarrierMode mode) {
14813 int index1 = EntryToIndex(entry1);
14814 int index2 = EntryToIndex(entry2);
14815 Object* temp[Shape::kEntrySize];
14816 for (int j = 0; j < Shape::kEntrySize; j++) {
14817 temp[j] = get(index1 + j);
14819 for (int j = 0; j < Shape::kEntrySize; j++) {
14820 set(index1 + j, get(index2 + j), mode);
14822 for (int j = 0; j < Shape::kEntrySize; j++) {
14823 set(index2 + j, temp[j], mode);
14828 template<typename Derived, typename Shape, typename Key>
14829 void HashTable<Derived, Shape, Key>::Rehash(Key key) {
14830 DisallowHeapAllocation no_gc;
14831 WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
14832 uint32_t capacity = Capacity();
14834 for (int probe = 1; !done; probe++) {
14835 // All elements at entries given by one of the first _probe_ probes
14836 // are placed correctly. Other elements might need to be moved.
14838 for (uint32_t current = 0; current < capacity; current++) {
14839 Object* current_key = get(EntryToIndex(current));
14840 if (IsKey(current_key)) {
14841 uint32_t target = EntryForProbe(key, current_key, probe, current);
14842 if (current == target) continue;
14843 Object* target_key = get(EntryToIndex(target));
14844 if (!IsKey(target_key) ||
14845 EntryForProbe(key, target_key, probe, target) != target) {
14846 // Put the current element into the correct position.
14847 Swap(current, target, mode);
14848 // The other element will be processed on the next iteration.
14851 // The place for the current element is occupied. Leave the element
14852 // for the next probe.
14861 template<typename Derived, typename Shape, typename Key>
14862 Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity(
14863 Handle<Derived> table,
14866 PretenureFlag pretenure) {
14867 Isolate* isolate = table->GetIsolate();
14868 int capacity = table->Capacity();
14869 int nof = table->NumberOfElements() + n;
14870 int nod = table->NumberOfDeletedElements();
14872 // 50% is still free after adding n elements and
14873 // at most 50% of the free elements are deleted elements.
14874 if (nod <= (capacity - nof) >> 1) {
14875 int needed_free = nof >> 1;
14876 if (nof + needed_free <= capacity) return table;
14879 const int kMinCapacityForPretenure = 256;
14880 bool should_pretenure = pretenure == TENURED ||
14881 ((capacity > kMinCapacityForPretenure) &&
14882 !isolate->heap()->InNewSpace(*table));
14883 Handle<Derived> new_table = HashTable::New(
14886 USE_DEFAULT_MINIMUM_CAPACITY,
14887 should_pretenure ? TENURED : NOT_TENURED);
14889 table->Rehash(new_table, key);
14894 template<typename Derived, typename Shape, typename Key>
14895 Handle<Derived> HashTable<Derived, Shape, Key>::Shrink(Handle<Derived> table,
14897 int capacity = table->Capacity();
14898 int nof = table->NumberOfElements();
14900 // Shrink to fit the number of elements if only a quarter of the
14901 // capacity is filled with elements.
14902 if (nof > (capacity >> 2)) return table;
14903 // Allocate a new dictionary with room for at least the current
14904 // number of elements. The allocation method will make sure that
14905 // there is extra room in the dictionary for additions. Don't go
14906 // lower than room for 16 elements.
14907 int at_least_room_for = nof;
14908 if (at_least_room_for < 16) return table;
14910 Isolate* isolate = table->GetIsolate();
14911 const int kMinCapacityForPretenure = 256;
14913 (at_least_room_for > kMinCapacityForPretenure) &&
14914 !isolate->heap()->InNewSpace(*table);
14915 Handle<Derived> new_table = HashTable::New(
14918 USE_DEFAULT_MINIMUM_CAPACITY,
14919 pretenure ? TENURED : NOT_TENURED);
14921 table->Rehash(new_table, key);
14926 template<typename Derived, typename Shape, typename Key>
14927 uint32_t HashTable<Derived, Shape, Key>::FindInsertionEntry(uint32_t hash) {
14928 uint32_t capacity = Capacity();
14929 uint32_t entry = FirstProbe(hash, capacity);
14930 uint32_t count = 1;
14931 // EnsureCapacity will guarantee the hash table is never full.
14933 Object* element = KeyAt(entry);
14934 if (element->IsUndefined() || element->IsTheHole()) break;
14935 entry = NextProbe(entry, count++, capacity);
14941 // Force instantiation of template instances class.
14942 // Please note this list is compiler dependent.
14944 template class HashTable<StringTable, StringTableShape, HashTableKey*>;
14946 template class HashTable<CompilationCacheTable,
14947 CompilationCacheShape,
14950 template class HashTable<MapCache, MapCacheShape, HashTableKey*>;
14952 template class HashTable<ObjectHashTable,
14953 ObjectHashTableShape,
14956 template class HashTable<WeakHashTable, WeakHashTableShape<2>, Handle<Object> >;
14958 template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >;
14960 template class Dictionary<SeededNumberDictionary,
14961 SeededNumberDictionaryShape,
14964 template class Dictionary<UnseededNumberDictionary,
14965 UnseededNumberDictionaryShape,
14968 template Handle<SeededNumberDictionary>
14969 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14970 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14972 template Handle<UnseededNumberDictionary>
14973 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14974 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14976 template Handle<NameDictionary>
14977 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14978 New(Isolate*, int n, PretenureFlag pretenure);
14980 template Handle<SeededNumberDictionary>
14981 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14982 AtPut(Handle<SeededNumberDictionary>, uint32_t, Handle<Object>);
14984 template Handle<UnseededNumberDictionary>
14985 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14986 AtPut(Handle<UnseededNumberDictionary>, uint32_t, Handle<Object>);
14989 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14990 SlowReverseLookup(Object* value);
14993 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14994 SlowReverseLookup(Object* value);
14997 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14998 SlowReverseLookup(Object* value);
15001 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
15004 PropertyAttributes,
15005 Dictionary<SeededNumberDictionary,
15006 SeededNumberDictionaryShape,
15007 uint32_t>::SortMode);
15009 template Handle<Object>
15010 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::DeleteProperty(
15011 Handle<NameDictionary>, int, JSObject::DeleteMode);
15013 template Handle<Object>
15014 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
15015 DeleteProperty(Handle<SeededNumberDictionary>, int, JSObject::DeleteMode);
15017 template Handle<NameDictionary>
15018 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
15019 New(Isolate*, int, MinimumCapacity, PretenureFlag);
15021 template Handle<NameDictionary>
15022 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
15023 Shrink(Handle<NameDictionary>, Handle<Name>);
15025 template Handle<SeededNumberDictionary>
15026 HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
15027 Shrink(Handle<SeededNumberDictionary>, uint32_t);
15029 template void Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
15033 PropertyAttributes,
15035 NameDictionary, NameDictionaryShape, Handle<Name> >::SortMode);
15038 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
15039 NumberOfElementsFilterAttributes(PropertyAttributes);
15041 template Handle<NameDictionary>
15042 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::Add(
15043 Handle<NameDictionary>, Handle<Name>, Handle<Object>, PropertyDetails);
15046 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
15047 GenerateNewEnumerationIndices(Handle<NameDictionary>);
15050 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
15051 NumberOfElementsFilterAttributes(PropertyAttributes);
15053 template Handle<SeededNumberDictionary>
15054 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
15055 Add(Handle<SeededNumberDictionary>,
15060 template Handle<UnseededNumberDictionary>
15061 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
15062 Add(Handle<UnseededNumberDictionary>,
15067 template Handle<SeededNumberDictionary>
15068 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
15069 EnsureCapacity(Handle<SeededNumberDictionary>, int, uint32_t);
15071 template Handle<UnseededNumberDictionary>
15072 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
15073 EnsureCapacity(Handle<UnseededNumberDictionary>, int, uint32_t);
15075 template Handle<NameDictionary>
15076 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
15077 EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>);
15080 int Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
15081 NumberOfEnumElements();
15084 int Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
15085 NumberOfEnumElements();
15088 int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
15089 FindEntry(uint32_t);
15092 Handle<Object> JSObject::PrepareSlowElementsForSort(
15093 Handle<JSObject> object, uint32_t limit) {
15094 ASSERT(object->HasDictionaryElements());
15095 Isolate* isolate = object->GetIsolate();
15096 // Must stay in dictionary mode, either because of requires_slow_elements,
15097 // or because we are not going to sort (and therefore compact) all of the
15099 Handle<SeededNumberDictionary> dict(object->element_dictionary(), isolate);
15100 Handle<SeededNumberDictionary> new_dict =
15101 SeededNumberDictionary::New(isolate, dict->NumberOfElements());
15104 uint32_t undefs = 0;
15105 int capacity = dict->Capacity();
15106 Handle<Smi> bailout(Smi::FromInt(-1), isolate);
15107 // Entry to the new dictionary does not cause it to grow, as we have
15108 // allocated one that is large enough for all entries.
15109 DisallowHeapAllocation no_gc;
15110 for (int i = 0; i < capacity; i++) {
15111 Object* k = dict->KeyAt(i);
15112 if (!dict->IsKey(k)) continue;
15114 ASSERT(k->IsNumber());
15115 ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0);
15116 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0);
15117 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32);
15119 HandleScope scope(isolate);
15120 Handle<Object> value(dict->ValueAt(i), isolate);
15121 PropertyDetails details = dict->DetailsAt(i);
15122 if (details.type() == CALLBACKS || details.IsReadOnly()) {
15123 // Bail out and do the sorting of undefineds and array holes in JS.
15124 // Also bail out if the element is not supposed to be moved.
15128 uint32_t key = NumberToUint32(k);
15130 if (value->IsUndefined()) {
15132 } else if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
15133 // Adding an entry with the key beyond smi-range requires
15134 // allocation. Bailout.
15137 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
15138 new_dict, pos, value, details);
15139 ASSERT(result.is_identical_to(new_dict));
15143 } else if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
15144 // Adding an entry with the key beyond smi-range requires
15145 // allocation. Bailout.
15148 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
15149 new_dict, key, value, details);
15150 ASSERT(result.is_identical_to(new_dict));
15155 uint32_t result = pos;
15156 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
15157 while (undefs > 0) {
15158 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
15159 // Adding an entry with the key beyond smi-range requires
15160 // allocation. Bailout.
15163 HandleScope scope(isolate);
15164 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
15165 new_dict, pos, isolate->factory()->undefined_value(), no_details);
15166 ASSERT(result.is_identical_to(new_dict));
15172 object->set_elements(*new_dict);
15174 AllowHeapAllocation allocate_return_value;
15175 return isolate->factory()->NewNumberFromUint(result);
15179 // Collects all defined (non-hole) and non-undefined (array) elements at
15180 // the start of the elements array.
15181 // If the object is in dictionary mode, it is converted to fast elements
15183 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object,
15185 Isolate* isolate = object->GetIsolate();
15186 if (object->HasSloppyArgumentsElements() ||
15187 object->map()->is_observed()) {
15188 return handle(Smi::FromInt(-1), isolate);
15191 if (object->HasDictionaryElements()) {
15192 // Convert to fast elements containing only the existing properties.
15193 // Ordering is irrelevant, since we are going to sort anyway.
15194 Handle<SeededNumberDictionary> dict(object->element_dictionary());
15195 if (object->IsJSArray() || dict->requires_slow_elements() ||
15196 dict->max_number_key() >= limit) {
15197 return JSObject::PrepareSlowElementsForSort(object, limit);
15199 // Convert to fast elements.
15201 Handle<Map> new_map =
15202 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS);
15204 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ?
15205 NOT_TENURED: TENURED;
15206 Handle<FixedArray> fast_elements =
15207 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure);
15208 dict->CopyValuesTo(*fast_elements);
15209 JSObject::ValidateElements(object);
15211 JSObject::SetMapAndElements(object, new_map, fast_elements);
15212 } else if (object->HasExternalArrayElements() ||
15213 object->HasFixedTypedArrayElements()) {
15214 // Typed arrays cannot have holes or undefined elements.
15215 return handle(Smi::FromInt(
15216 FixedArrayBase::cast(object->elements())->length()), isolate);
15217 } else if (!object->HasFastDoubleElements()) {
15218 EnsureWritableFastElements(object);
15220 ASSERT(object->HasFastSmiOrObjectElements() ||
15221 object->HasFastDoubleElements());
15223 // Collect holes at the end, undefined before that and the rest at the
15224 // start, and return the number of non-hole, non-undefined values.
15226 Handle<FixedArrayBase> elements_base(object->elements());
15227 uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
15228 if (limit > elements_length) {
15229 limit = elements_length ;
15232 return handle(Smi::FromInt(0), isolate);
15235 uint32_t result = 0;
15236 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) {
15237 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base);
15238 // Split elements into defined and the_hole, in that order.
15239 unsigned int holes = limit;
15240 // Assume most arrays contain no holes and undefined values, so minimize the
15241 // number of stores of non-undefined, non-the-hole values.
15242 for (unsigned int i = 0; i < holes; i++) {
15243 if (elements->is_the_hole(i)) {
15248 // Position i needs to be filled.
15249 while (holes > i) {
15250 if (elements->is_the_hole(holes)) {
15253 elements->set(i, elements->get_scalar(holes));
15259 while (holes < limit) {
15260 elements->set_the_hole(holes);
15264 FixedArray* elements = FixedArray::cast(*elements_base);
15265 DisallowHeapAllocation no_gc;
15267 // Split elements into defined, undefined and the_hole, in that order. Only
15268 // count locations for undefined and the hole, and fill them afterwards.
15269 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
15270 unsigned int undefs = limit;
15271 unsigned int holes = limit;
15272 // Assume most arrays contain no holes and undefined values, so minimize the
15273 // number of stores of non-undefined, non-the-hole values.
15274 for (unsigned int i = 0; i < undefs; i++) {
15275 Object* current = elements->get(i);
15276 if (current->IsTheHole()) {
15279 } else if (current->IsUndefined()) {
15284 // Position i needs to be filled.
15285 while (undefs > i) {
15286 current = elements->get(undefs);
15287 if (current->IsTheHole()) {
15290 } else if (current->IsUndefined()) {
15293 elements->set(i, current, write_barrier);
15299 while (undefs < holes) {
15300 elements->set_undefined(undefs);
15303 while (holes < limit) {
15304 elements->set_the_hole(holes);
15309 return isolate->factory()->NewNumberFromUint(result);
15313 ExternalArrayType JSTypedArray::type() {
15314 switch (elements()->map()->instance_type()) {
15315 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \
15316 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
15317 case FIXED_##TYPE##_ARRAY_TYPE: \
15318 return kExternal##Type##Array;
15320 TYPED_ARRAYS(INSTANCE_TYPE_TO_ARRAY_TYPE)
15321 #undef INSTANCE_TYPE_TO_ARRAY_TYPE
15325 return static_cast<ExternalArrayType>(-1);
15330 size_t JSTypedArray::element_size() {
15331 switch (elements()->map()->instance_type()) {
15332 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \
15333 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
15336 TYPED_ARRAYS(INSTANCE_TYPE_TO_ELEMENT_SIZE)
15337 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE
15346 Handle<Object> ExternalUint8ClampedArray::SetValue(
15347 Handle<ExternalUint8ClampedArray> array,
15349 Handle<Object> value) {
15350 uint8_t clamped_value = 0;
15351 if (index < static_cast<uint32_t>(array->length())) {
15352 if (value->IsSmi()) {
15353 int int_value = Handle<Smi>::cast(value)->value();
15354 if (int_value < 0) {
15356 } else if (int_value > 255) {
15357 clamped_value = 255;
15359 clamped_value = static_cast<uint8_t>(int_value);
15361 } else if (value->IsHeapNumber()) {
15362 double double_value = Handle<HeapNumber>::cast(value)->value();
15363 if (!(double_value > 0)) {
15364 // NaN and less than zero clamp to zero.
15366 } else if (double_value > 255) {
15367 // Greater than 255 clamp to 255.
15368 clamped_value = 255;
15370 // Other doubles are rounded to the nearest integer.
15371 clamped_value = static_cast<uint8_t>(lrint(double_value));
15374 // Clamp undefined to zero (default). All other types have been
15375 // converted to a number type further up in the call chain.
15376 ASSERT(value->IsUndefined());
15378 array->set(index, clamped_value);
15380 return handle(Smi::FromInt(clamped_value), array->GetIsolate());
15384 template<typename ExternalArrayClass, typename ValueType>
15385 static Handle<Object> ExternalArrayIntSetter(
15387 Handle<ExternalArrayClass> receiver,
15389 Handle<Object> value) {
15390 ValueType cast_value = 0;
15391 if (index < static_cast<uint32_t>(receiver->length())) {
15392 if (value->IsSmi()) {
15393 int int_value = Handle<Smi>::cast(value)->value();
15394 cast_value = static_cast<ValueType>(int_value);
15395 } else if (value->IsHeapNumber()) {
15396 double double_value = Handle<HeapNumber>::cast(value)->value();
15397 cast_value = static_cast<ValueType>(DoubleToInt32(double_value));
15399 // Clamp undefined to zero (default). All other types have been
15400 // converted to a number type further up in the call chain.
15401 ASSERT(value->IsUndefined());
15403 receiver->set(index, cast_value);
15405 return isolate->factory()->NewNumberFromInt(cast_value);
15409 Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array,
15411 Handle<Object> value) {
15412 return ExternalArrayIntSetter<ExternalInt8Array, int8_t>(
15413 array->GetIsolate(), array, index, value);
15417 Handle<Object> ExternalUint8Array::SetValue(Handle<ExternalUint8Array> array,
15419 Handle<Object> value) {
15420 return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>(
15421 array->GetIsolate(), array, index, value);
15425 Handle<Object> ExternalInt16Array::SetValue(Handle<ExternalInt16Array> array,
15427 Handle<Object> value) {
15428 return ExternalArrayIntSetter<ExternalInt16Array, int16_t>(
15429 array->GetIsolate(), array, index, value);
15433 Handle<Object> ExternalUint16Array::SetValue(Handle<ExternalUint16Array> array,
15435 Handle<Object> value) {
15436 return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>(
15437 array->GetIsolate(), array, index, value);
15441 Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array,
15443 Handle<Object> value) {
15444 return ExternalArrayIntSetter<ExternalInt32Array, int32_t>(
15445 array->GetIsolate(), array, index, value);
15449 Handle<Object> ExternalUint32Array::SetValue(
15450 Handle<ExternalUint32Array> array,
15452 Handle<Object> value) {
15453 uint32_t cast_value = 0;
15454 if (index < static_cast<uint32_t>(array->length())) {
15455 if (value->IsSmi()) {
15456 int int_value = Handle<Smi>::cast(value)->value();
15457 cast_value = static_cast<uint32_t>(int_value);
15458 } else if (value->IsHeapNumber()) {
15459 double double_value = Handle<HeapNumber>::cast(value)->value();
15460 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value));
15462 // Clamp undefined to zero (default). All other types have been
15463 // converted to a number type further up in the call chain.
15464 ASSERT(value->IsUndefined());
15466 array->set(index, cast_value);
15468 return array->GetIsolate()->factory()->NewNumberFromUint(cast_value);
15472 Handle<Object> ExternalFloat32Array::SetValue(
15473 Handle<ExternalFloat32Array> array,
15475 Handle<Object> value) {
15476 float cast_value = static_cast<float>(OS::nan_value());
15477 if (index < static_cast<uint32_t>(array->length())) {
15478 if (value->IsSmi()) {
15479 int int_value = Handle<Smi>::cast(value)->value();
15480 cast_value = static_cast<float>(int_value);
15481 } else if (value->IsHeapNumber()) {
15482 double double_value = Handle<HeapNumber>::cast(value)->value();
15483 cast_value = static_cast<float>(double_value);
15485 // Clamp undefined to NaN (default). All other types have been
15486 // converted to a number type further up in the call chain.
15487 ASSERT(value->IsUndefined());
15489 array->set(index, cast_value);
15491 return array->GetIsolate()->factory()->NewNumber(cast_value);
15495 Handle<Object> ExternalFloat64Array::SetValue(
15496 Handle<ExternalFloat64Array> array,
15498 Handle<Object> value) {
15499 double double_value = OS::nan_value();
15500 if (index < static_cast<uint32_t>(array->length())) {
15501 if (value->IsNumber()) {
15502 double_value = value->Number();
15504 // Clamp undefined to NaN (default). All other types have been
15505 // converted to a number type further up in the call chain.
15506 ASSERT(value->IsUndefined());
15508 array->set(index, double_value);
15510 return array->GetIsolate()->factory()->NewNumber(double_value);
15514 Handle<Object> ExternalFloat32x4Array::SetValue(
15515 Handle<ExternalFloat32x4Array> array,
15517 Handle<Object> value) {
15518 float32x4_value_t cast_value;
15519 cast_value.storage[0] = static_cast<float>(OS::nan_value());
15520 cast_value.storage[1] = static_cast<float>(OS::nan_value());
15521 cast_value.storage[2] = static_cast<float>(OS::nan_value());
15522 cast_value.storage[3] = static_cast<float>(OS::nan_value());
15523 if (index < static_cast<uint32_t>(array->length())) {
15524 if (value->IsFloat32x4()) {
15525 cast_value = Handle<Float32x4>::cast(value)->value();
15527 // Clamp undefined to NaN (default). All other types have been
15528 // converted to a number type further up in the call chain.
15529 ASSERT(value->IsUndefined());
15531 array->set(index, cast_value);
15533 return array->GetIsolate()->factory()->NewFloat32x4(cast_value);
15537 Handle<Object> ExternalInt32x4Array::SetValue(
15538 Handle<ExternalInt32x4Array> array, uint32_t index, Handle<Object> value) {
15539 int32x4_value_t cast_value;
15540 cast_value.storage[0] = 0;
15541 cast_value.storage[1] = 0;
15542 cast_value.storage[2] = 0;
15543 cast_value.storage[3] = 0;
15544 if (index < static_cast<uint32_t>(array->length())) {
15545 if (value->IsInt32x4()) {
15546 cast_value = Handle<Int32x4>::cast(value)->value();
15548 // Clamp undefined to zero (default). All other types have been
15549 // converted to a number type further up in the call chain.
15550 ASSERT(value->IsUndefined());
15552 array->set(index, cast_value);
15554 return array->GetIsolate()->factory()->NewInt32x4(cast_value);
15558 Handle<Object> ExternalFloat64x2Array::SetValue(
15559 Handle<ExternalFloat64x2Array> array,
15561 Handle<Object> value) {
15562 float64x2_value_t cast_value;
15563 cast_value.storage[0] = OS::nan_value();
15564 cast_value.storage[1] = OS::nan_value();
15565 if (index < static_cast<uint32_t>(array->length())) {
15566 if (value->IsFloat64x2()) {
15567 cast_value = Handle<Float64x2>::cast(value)->value();
15569 // Clamp undefined to NaN (default). All other types have been
15570 // converted to a number type further up in the call chain.
15571 ASSERT(value->IsUndefined());
15573 array->set(index, cast_value);
15575 return array->GetIsolate()->factory()->NewFloat64x2(cast_value);
15579 PropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) {
15580 ASSERT(!HasFastProperties());
15581 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
15582 return PropertyCell::cast(value);
15586 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell(
15587 Handle<JSGlobalObject> global,
15588 Handle<Name> name) {
15589 ASSERT(!global->HasFastProperties());
15590 int entry = global->property_dictionary()->FindEntry(name);
15591 if (entry == NameDictionary::kNotFound) {
15592 Isolate* isolate = global->GetIsolate();
15593 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(
15594 isolate->factory()->the_hole_value());
15595 PropertyDetails details(NONE, NORMAL, 0);
15596 details = details.AsDeleted();
15597 Handle<NameDictionary> dictionary = NameDictionary::Add(
15598 handle(global->property_dictionary()), name, cell, details);
15599 global->set_properties(*dictionary);
15602 Object* value = global->property_dictionary()->ValueAt(entry);
15603 ASSERT(value->IsPropertyCell());
15604 return handle(PropertyCell::cast(value));
15609 // This class is used for looking up two character strings in the string table.
15610 // If we don't have a hit we don't want to waste much time so we unroll the
15611 // string hash calculation loop here for speed. Doesn't work if the two
15612 // characters form a decimal integer, since such strings have a different hash
15614 class TwoCharHashTableKey : public HashTableKey {
15616 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
15617 : c1_(c1), c2_(c2) {
15619 uint32_t hash = seed;
15621 hash += hash << 10;
15625 hash += hash << 10;
15629 hash ^= hash >> 11;
15630 hash += hash << 15;
15631 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
15634 // If this assert fails then we failed to reproduce the two-character
15635 // version of the string hashing algorithm above. One reason could be
15636 // that we were passed two digits as characters, since the hash
15637 // algorithm is different in that case.
15638 uint16_t chars[2] = {c1, c2};
15639 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed);
15640 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask;
15641 ASSERT_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash));
15645 bool IsMatch(Object* o) V8_OVERRIDE {
15646 if (!o->IsString()) return false;
15647 String* other = String::cast(o);
15648 if (other->length() != 2) return false;
15649 if (other->Get(0) != c1_) return false;
15650 return other->Get(1) == c2_;
15653 uint32_t Hash() V8_OVERRIDE { return hash_; }
15654 uint32_t HashForObject(Object* key) V8_OVERRIDE {
15655 if (!key->IsString()) return 0;
15656 return String::cast(key)->Hash();
15659 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE {
15660 // The TwoCharHashTableKey is only used for looking in the string
15661 // table, not for adding to it.
15663 return MaybeHandle<Object>().ToHandleChecked();
15673 MaybeHandle<String> StringTable::InternalizeStringIfExists(
15675 Handle<String> string) {
15676 if (string->IsInternalizedString()) {
15679 return LookupStringIfExists(isolate, string);
15683 MaybeHandle<String> StringTable::LookupStringIfExists(
15685 Handle<String> string) {
15686 Handle<StringTable> string_table = isolate->factory()->string_table();
15687 InternalizedStringKey key(string);
15688 int entry = string_table->FindEntry(&key);
15689 if (entry == kNotFound) {
15690 return MaybeHandle<String>();
15692 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
15693 ASSERT(StringShape(*result).IsInternalized());
15699 MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists(
15703 Handle<StringTable> string_table = isolate->factory()->string_table();
15704 TwoCharHashTableKey key(c1, c2, isolate->heap()->HashSeed());
15705 int entry = string_table->FindEntry(&key);
15706 if (entry == kNotFound) {
15707 return MaybeHandle<String>();
15709 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
15710 ASSERT(StringShape(*result).IsInternalized());
15716 Handle<String> StringTable::LookupString(Isolate* isolate,
15717 Handle<String> string) {
15718 InternalizedStringKey key(string);
15719 return LookupKey(isolate, &key);
15723 Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) {
15724 Handle<StringTable> table = isolate->factory()->string_table();
15725 int entry = table->FindEntry(key);
15727 // String already in table.
15728 if (entry != kNotFound) {
15729 return handle(String::cast(table->KeyAt(entry)), isolate);
15732 // Adding new string. Grow table if needed.
15733 table = StringTable::EnsureCapacity(table, 1, key);
15735 // Create string object.
15736 Handle<Object> string = key->AsHandle(isolate);
15737 // There must be no attempts to internalize strings that could throw
15738 // InvalidStringLength error.
15739 CHECK(!string.is_null());
15741 // Add the new string and return it along with the string table.
15742 entry = table->FindInsertionEntry(key->Hash());
15743 table->set(EntryToIndex(entry), *string);
15744 table->ElementAdded();
15746 isolate->factory()->set_string_table(table);
15747 return Handle<String>::cast(string);
15751 Handle<Object> CompilationCacheTable::Lookup(Handle<String> src,
15752 Handle<Context> context) {
15753 Isolate* isolate = GetIsolate();
15754 Handle<SharedFunctionInfo> shared(context->closure()->shared());
15755 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
15756 RelocInfo::kNoPosition);
15757 int entry = FindEntry(&key);
15758 if (entry == kNotFound) return isolate->factory()->undefined_value();
15759 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
15763 Handle<Object> CompilationCacheTable::LookupEval(Handle<String> src,
15764 Handle<Context> context,
15765 StrictMode strict_mode,
15766 int scope_position) {
15767 Isolate* isolate = GetIsolate();
15768 Handle<SharedFunctionInfo> shared(context->closure()->shared());
15769 StringSharedKey key(src, shared, strict_mode, scope_position);
15770 int entry = FindEntry(&key);
15771 if (entry == kNotFound) return isolate->factory()->undefined_value();
15772 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
15776 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src,
15777 JSRegExp::Flags flags) {
15778 Isolate* isolate = GetIsolate();
15779 DisallowHeapAllocation no_allocation;
15780 RegExpKey key(src, flags);
15781 int entry = FindEntry(&key);
15782 if (entry == kNotFound) return isolate->factory()->undefined_value();
15783 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
15787 Handle<CompilationCacheTable> CompilationCacheTable::Put(
15788 Handle<CompilationCacheTable> cache, Handle<String> src,
15789 Handle<Context> context, Handle<Object> value) {
15790 Isolate* isolate = cache->GetIsolate();
15791 Handle<SharedFunctionInfo> shared(context->closure()->shared());
15792 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
15793 RelocInfo::kNoPosition);
15794 cache = EnsureCapacity(cache, 1, &key);
15795 Handle<Object> k = key.AsHandle(isolate);
15796 int entry = cache->FindInsertionEntry(key.Hash());
15797 cache->set(EntryToIndex(entry), *k);
15798 cache->set(EntryToIndex(entry) + 1, *value);
15799 cache->ElementAdded();
15804 Handle<CompilationCacheTable> CompilationCacheTable::PutEval(
15805 Handle<CompilationCacheTable> cache, Handle<String> src,
15806 Handle<Context> context, Handle<SharedFunctionInfo> value,
15807 int scope_position) {
15808 Isolate* isolate = cache->GetIsolate();
15809 Handle<SharedFunctionInfo> shared(context->closure()->shared());
15810 StringSharedKey key(src, shared, value->strict_mode(), scope_position);
15811 cache = EnsureCapacity(cache, 1, &key);
15812 Handle<Object> k = key.AsHandle(isolate);
15813 int entry = cache->FindInsertionEntry(key.Hash());
15814 cache->set(EntryToIndex(entry), *k);
15815 cache->set(EntryToIndex(entry) + 1, *value);
15816 cache->ElementAdded();
15821 Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp(
15822 Handle<CompilationCacheTable> cache, Handle<String> src,
15823 JSRegExp::Flags flags, Handle<FixedArray> value) {
15824 RegExpKey key(src, flags);
15825 cache = EnsureCapacity(cache, 1, &key);
15826 int entry = cache->FindInsertionEntry(key.Hash());
15827 // We store the value in the key slot, and compare the search key
15828 // to the stored value with a custon IsMatch function during lookups.
15829 cache->set(EntryToIndex(entry), *value);
15830 cache->set(EntryToIndex(entry) + 1, *value);
15831 cache->ElementAdded();
15836 void CompilationCacheTable::Remove(Object* value) {
15837 DisallowHeapAllocation no_allocation;
15838 Object* the_hole_value = GetHeap()->the_hole_value();
15839 for (int entry = 0, size = Capacity(); entry < size; entry++) {
15840 int entry_index = EntryToIndex(entry);
15841 int value_index = entry_index + 1;
15842 if (get(value_index) == value) {
15843 NoWriteBarrierSet(this, entry_index, the_hole_value);
15844 NoWriteBarrierSet(this, value_index, the_hole_value);
15852 // StringsKey used for HashTable where key is array of internalized strings.
15853 class StringsKey : public HashTableKey {
15855 explicit StringsKey(Handle<FixedArray> strings) : strings_(strings) { }
15857 bool IsMatch(Object* strings) V8_OVERRIDE {
15858 FixedArray* o = FixedArray::cast(strings);
15859 int len = strings_->length();
15860 if (o->length() != len) return false;
15861 for (int i = 0; i < len; i++) {
15862 if (o->get(i) != strings_->get(i)) return false;
15867 uint32_t Hash() V8_OVERRIDE { return HashForObject(*strings_); }
15869 uint32_t HashForObject(Object* obj) V8_OVERRIDE {
15870 FixedArray* strings = FixedArray::cast(obj);
15871 int len = strings->length();
15873 for (int i = 0; i < len; i++) {
15874 hash ^= String::cast(strings->get(i))->Hash();
15879 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { return strings_; }
15882 Handle<FixedArray> strings_;
15886 Object* MapCache::Lookup(FixedArray* array) {
15887 DisallowHeapAllocation no_alloc;
15888 StringsKey key(handle(array));
15889 int entry = FindEntry(&key);
15890 if (entry == kNotFound) return GetHeap()->undefined_value();
15891 return get(EntryToIndex(entry) + 1);
15895 Handle<MapCache> MapCache::Put(
15896 Handle<MapCache> map_cache, Handle<FixedArray> array, Handle<Map> value) {
15897 StringsKey key(array);
15899 Handle<MapCache> new_cache = EnsureCapacity(map_cache, 1, &key);
15900 int entry = new_cache->FindInsertionEntry(key.Hash());
15901 new_cache->set(EntryToIndex(entry), *array);
15902 new_cache->set(EntryToIndex(entry) + 1, *value);
15903 new_cache->ElementAdded();
15908 template<typename Derived, typename Shape, typename Key>
15909 Handle<Derived> Dictionary<Derived, Shape, Key>::New(
15911 int at_least_space_for,
15912 PretenureFlag pretenure) {
15913 ASSERT(0 <= at_least_space_for);
15914 Handle<Derived> dict = DerivedHashTable::New(isolate,
15915 at_least_space_for,
15916 USE_DEFAULT_MINIMUM_CAPACITY,
15919 // Initialize the next enumeration index.
15920 dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex);
15925 template<typename Derived, typename Shape, typename Key>
15926 void Dictionary<Derived, Shape, Key>::GenerateNewEnumerationIndices(
15927 Handle<Derived> dictionary) {
15928 Factory* factory = dictionary->GetIsolate()->factory();
15929 int length = dictionary->NumberOfElements();
15931 // Allocate and initialize iteration order array.
15932 Handle<FixedArray> iteration_order = factory->NewFixedArray(length);
15933 for (int i = 0; i < length; i++) {
15934 iteration_order->set(i, Smi::FromInt(i));
15937 // Allocate array with enumeration order.
15938 Handle<FixedArray> enumeration_order = factory->NewFixedArray(length);
15940 // Fill the enumeration order array with property details.
15941 int capacity = dictionary->Capacity();
15943 for (int i = 0; i < capacity; i++) {
15944 if (dictionary->IsKey(dictionary->KeyAt(i))) {
15945 int index = dictionary->DetailsAt(i).dictionary_index();
15946 enumeration_order->set(pos++, Smi::FromInt(index));
15950 // Sort the arrays wrt. enumeration order.
15951 iteration_order->SortPairs(*enumeration_order, enumeration_order->length());
15953 // Overwrite the enumeration_order with the enumeration indices.
15954 for (int i = 0; i < length; i++) {
15955 int index = Smi::cast(iteration_order->get(i))->value();
15956 int enum_index = PropertyDetails::kInitialIndex + i;
15957 enumeration_order->set(index, Smi::FromInt(enum_index));
15960 // Update the dictionary with new indices.
15961 capacity = dictionary->Capacity();
15963 for (int i = 0; i < capacity; i++) {
15964 if (dictionary->IsKey(dictionary->KeyAt(i))) {
15965 int enum_index = Smi::cast(enumeration_order->get(pos++))->value();
15966 PropertyDetails details = dictionary->DetailsAt(i);
15967 PropertyDetails new_details = PropertyDetails(
15968 details.attributes(), details.type(), enum_index);
15969 dictionary->DetailsAtPut(i, new_details);
15973 // Set the next enumeration index.
15974 dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length);
15978 template<typename Derived, typename Shape, typename Key>
15979 Handle<Derived> Dictionary<Derived, Shape, Key>::EnsureCapacity(
15980 Handle<Derived> dictionary, int n, Key key) {
15981 // Check whether there are enough enumeration indices to add n elements.
15982 if (Shape::kIsEnumerable &&
15983 !PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) {
15984 // If not, we generate new indices for the properties.
15985 GenerateNewEnumerationIndices(dictionary);
15987 return DerivedHashTable::EnsureCapacity(dictionary, n, key);
15991 template<typename Derived, typename Shape, typename Key>
15992 Handle<Object> Dictionary<Derived, Shape, Key>::DeleteProperty(
15993 Handle<Derived> dictionary,
15995 JSObject::DeleteMode mode) {
15996 Factory* factory = dictionary->GetIsolate()->factory();
15997 PropertyDetails details = dictionary->DetailsAt(entry);
15998 // Ignore attributes if forcing a deletion.
15999 if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) {
16000 return factory->false_value();
16003 dictionary->SetEntry(
16004 entry, factory->the_hole_value(), factory->the_hole_value());
16005 dictionary->ElementRemoved();
16006 return factory->true_value();
16010 template<typename Derived, typename Shape, typename Key>
16011 Handle<Derived> Dictionary<Derived, Shape, Key>::AtPut(
16012 Handle<Derived> dictionary, Key key, Handle<Object> value) {
16013 int entry = dictionary->FindEntry(key);
16015 // If the entry is present set the value;
16016 if (entry != Dictionary::kNotFound) {
16017 dictionary->ValueAtPut(entry, *value);
16021 // Check whether the dictionary should be extended.
16022 dictionary = EnsureCapacity(dictionary, 1, key);
16024 USE(Shape::AsHandle(dictionary->GetIsolate(), key));
16026 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
16028 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
16033 template<typename Derived, typename Shape, typename Key>
16034 Handle<Derived> Dictionary<Derived, Shape, Key>::Add(
16035 Handle<Derived> dictionary,
16037 Handle<Object> value,
16038 PropertyDetails details) {
16039 // Valdate key is absent.
16040 SLOW_ASSERT((dictionary->FindEntry(key) == Dictionary::kNotFound));
16041 // Check whether the dictionary should be extended.
16042 dictionary = EnsureCapacity(dictionary, 1, key);
16044 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
16049 // Add a key, value pair to the dictionary.
16050 template<typename Derived, typename Shape, typename Key>
16051 void Dictionary<Derived, Shape, Key>::AddEntry(
16052 Handle<Derived> dictionary,
16054 Handle<Object> value,
16055 PropertyDetails details,
16057 // Compute the key object.
16058 Handle<Object> k = Shape::AsHandle(dictionary->GetIsolate(), key);
16060 uint32_t entry = dictionary->FindInsertionEntry(hash);
16061 // Insert element at empty or deleted entry
16062 if (!details.IsDeleted() &&
16063 details.dictionary_index() == 0 &&
16064 Shape::kIsEnumerable) {
16065 // Assign an enumeration index to the property and update
16066 // SetNextEnumerationIndex.
16067 int index = dictionary->NextEnumerationIndex();
16068 details = PropertyDetails(details.attributes(), details.type(), index);
16069 dictionary->SetNextEnumerationIndex(index + 1);
16071 dictionary->SetEntry(entry, k, value, details);
16072 ASSERT((dictionary->KeyAt(entry)->IsNumber() ||
16073 dictionary->KeyAt(entry)->IsName()));
16074 dictionary->ElementAdded();
16078 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) {
16079 DisallowHeapAllocation no_allocation;
16080 // If the dictionary requires slow elements an element has already
16081 // been added at a high index.
16082 if (requires_slow_elements()) return;
16083 // Check if this index is high enough that we should require slow
16085 if (key > kRequiresSlowElementsLimit) {
16086 set_requires_slow_elements();
16089 // Update max key value.
16090 Object* max_index_object = get(kMaxNumberKeyIndex);
16091 if (!max_index_object->IsSmi() || max_number_key() < key) {
16092 FixedArray::set(kMaxNumberKeyIndex,
16093 Smi::FromInt(key << kRequiresSlowElementsTagSize));
16098 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry(
16099 Handle<SeededNumberDictionary> dictionary,
16101 Handle<Object> value,
16102 PropertyDetails details) {
16103 dictionary->UpdateMaxNumberKey(key);
16104 SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound);
16105 return Add(dictionary, key, value, details);
16109 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry(
16110 Handle<UnseededNumberDictionary> dictionary,
16112 Handle<Object> value) {
16113 SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound);
16114 return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0));
16118 Handle<SeededNumberDictionary> SeededNumberDictionary::AtNumberPut(
16119 Handle<SeededNumberDictionary> dictionary,
16121 Handle<Object> value) {
16122 dictionary->UpdateMaxNumberKey(key);
16123 return AtPut(dictionary, key, value);
16127 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AtNumberPut(
16128 Handle<UnseededNumberDictionary> dictionary,
16130 Handle<Object> value) {
16131 return AtPut(dictionary, key, value);
16135 Handle<SeededNumberDictionary> SeededNumberDictionary::Set(
16136 Handle<SeededNumberDictionary> dictionary,
16138 Handle<Object> value,
16139 PropertyDetails details) {
16140 int entry = dictionary->FindEntry(key);
16141 if (entry == kNotFound) {
16142 return AddNumberEntry(dictionary, key, value, details);
16144 // Preserve enumeration index.
16145 details = PropertyDetails(details.attributes(),
16147 dictionary->DetailsAt(entry).dictionary_index());
16148 Handle<Object> object_key =
16149 SeededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
16150 dictionary->SetEntry(entry, object_key, value, details);
16155 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set(
16156 Handle<UnseededNumberDictionary> dictionary,
16158 Handle<Object> value) {
16159 int entry = dictionary->FindEntry(key);
16160 if (entry == kNotFound) return AddNumberEntry(dictionary, key, value);
16161 Handle<Object> object_key =
16162 UnseededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
16163 dictionary->SetEntry(entry, object_key, value);
16169 template<typename Derived, typename Shape, typename Key>
16170 int Dictionary<Derived, Shape, Key>::NumberOfElementsFilterAttributes(
16171 PropertyAttributes filter) {
16172 int capacity = DerivedHashTable::Capacity();
16174 for (int i = 0; i < capacity; i++) {
16175 Object* k = DerivedHashTable::KeyAt(i);
16176 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
16177 PropertyDetails details = DetailsAt(i);
16178 if (details.IsDeleted()) continue;
16179 PropertyAttributes attr = details.attributes();
16180 if ((attr & filter) == 0) result++;
16187 template<typename Derived, typename Shape, typename Key>
16188 int Dictionary<Derived, Shape, Key>::NumberOfEnumElements() {
16189 return NumberOfElementsFilterAttributes(
16190 static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC));
16194 template<typename Derived, typename Shape, typename Key>
16195 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
16196 FixedArray* storage,
16197 PropertyAttributes filter,
16198 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
16199 ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter));
16200 int capacity = DerivedHashTable::Capacity();
16202 for (int i = 0; i < capacity; i++) {
16203 Object* k = DerivedHashTable::KeyAt(i);
16204 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
16205 PropertyDetails details = DetailsAt(i);
16206 if (details.IsDeleted()) continue;
16207 PropertyAttributes attr = details.attributes();
16208 if ((attr & filter) == 0) storage->set(index++, k);
16211 if (sort_mode == Dictionary::SORTED) {
16212 storage->SortPairs(storage, index);
16214 ASSERT(storage->length() >= index);
16218 struct EnumIndexComparator {
16219 explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { }
16220 bool operator() (Smi* a, Smi* b) {
16221 PropertyDetails da(dict->DetailsAt(a->value()));
16222 PropertyDetails db(dict->DetailsAt(b->value()));
16223 return da.dictionary_index() < db.dictionary_index();
16225 NameDictionary* dict;
16229 void NameDictionary::CopyEnumKeysTo(FixedArray* storage) {
16230 int length = storage->length();
16231 int capacity = Capacity();
16232 int properties = 0;
16233 for (int i = 0; i < capacity; i++) {
16234 Object* k = KeyAt(i);
16235 if (IsKey(k) && !k->IsSymbol()) {
16236 PropertyDetails details = DetailsAt(i);
16237 if (details.IsDeleted() || details.IsDontEnum()) continue;
16238 storage->set(properties, Smi::FromInt(i));
16240 if (properties == length) break;
16243 EnumIndexComparator cmp(this);
16244 Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress());
16245 std::sort(start, start + length, cmp);
16246 for (int i = 0; i < length; i++) {
16247 int index = Smi::cast(storage->get(i))->value();
16248 storage->set(i, KeyAt(index));
16253 template<typename Derived, typename Shape, typename Key>
16254 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
16255 FixedArray* storage,
16257 PropertyAttributes filter,
16258 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
16259 ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter));
16260 int capacity = DerivedHashTable::Capacity();
16261 for (int i = 0; i < capacity; i++) {
16262 Object* k = DerivedHashTable::KeyAt(i);
16263 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
16264 PropertyDetails details = DetailsAt(i);
16265 if (details.IsDeleted()) continue;
16266 PropertyAttributes attr = details.attributes();
16267 if ((attr & filter) == 0) storage->set(index++, k);
16270 if (sort_mode == Dictionary::SORTED) {
16271 storage->SortPairs(storage, index);
16273 ASSERT(storage->length() >= index);
16277 // Backwards lookup (slow).
16278 template<typename Derived, typename Shape, typename Key>
16279 Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) {
16280 int capacity = DerivedHashTable::Capacity();
16281 for (int i = 0; i < capacity; i++) {
16282 Object* k = DerivedHashTable::KeyAt(i);
16283 if (Dictionary::IsKey(k)) {
16284 Object* e = ValueAt(i);
16285 if (e->IsPropertyCell()) {
16286 e = PropertyCell::cast(e)->value();
16288 if (e == value) return k;
16291 Heap* heap = Dictionary::GetHeap();
16292 return heap->undefined_value();
16296 Object* ObjectHashTable::Lookup(Handle<Object> key) {
16297 DisallowHeapAllocation no_gc;
16298 ASSERT(IsKey(*key));
16300 // If the object does not have an identity hash, it was never used as a key.
16301 Object* hash = key->GetHash();
16302 if (hash->IsUndefined()) {
16303 return GetHeap()->the_hole_value();
16305 int entry = FindEntry(key);
16306 if (entry == kNotFound) return GetHeap()->the_hole_value();
16307 return get(EntryToIndex(entry) + 1);
16311 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table,
16312 Handle<Object> key,
16313 Handle<Object> value) {
16314 ASSERT(table->IsKey(*key));
16316 Isolate* isolate = table->GetIsolate();
16318 // Make sure the key object has an identity hash code.
16319 Handle<Object> hash = Object::GetOrCreateHash(key, isolate);
16321 int entry = table->FindEntry(key);
16323 // Check whether to perform removal operation.
16324 if (value->IsTheHole()) {
16325 if (entry == kNotFound) return table;
16326 table->RemoveEntry(entry);
16327 return Shrink(table, key);
16330 // Key is already in table, just overwrite value.
16331 if (entry != kNotFound) {
16332 table->set(EntryToIndex(entry) + 1, *value);
16336 // Check whether the hash table should be extended.
16337 table = EnsureCapacity(table, 1, key);
16338 table->AddEntry(table->FindInsertionEntry(Handle<Smi>::cast(hash)->value()),
16345 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
16346 set(EntryToIndex(entry), key);
16347 set(EntryToIndex(entry) + 1, value);
16352 void ObjectHashTable::RemoveEntry(int entry) {
16353 set_the_hole(EntryToIndex(entry));
16354 set_the_hole(EntryToIndex(entry) + 1);
16359 Object* WeakHashTable::Lookup(Handle<Object> key) {
16360 DisallowHeapAllocation no_gc;
16361 ASSERT(IsKey(*key));
16362 int entry = FindEntry(key);
16363 if (entry == kNotFound) return GetHeap()->the_hole_value();
16364 return get(EntryToValueIndex(entry));
16368 Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table,
16369 Handle<Object> key,
16370 Handle<Object> value) {
16371 ASSERT(table->IsKey(*key));
16372 int entry = table->FindEntry(key);
16373 // Key is already in table, just overwrite value.
16374 if (entry != kNotFound) {
16375 table->set(EntryToValueIndex(entry), *value);
16379 // Check whether the hash table should be extended.
16380 table = EnsureCapacity(table, 1, key, TENURED);
16382 table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value);
16387 void WeakHashTable::AddEntry(int entry,
16388 Handle<Object> key,
16389 Handle<Object> value) {
16390 DisallowHeapAllocation no_allocation;
16391 set(EntryToIndex(entry), *key);
16392 set(EntryToValueIndex(entry), *value);
16397 template<class Derived, class Iterator, int entrysize>
16398 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate(
16399 Isolate* isolate, int capacity, PretenureFlag pretenure) {
16400 // Capacity must be a power of two, since we depend on being able
16401 // to divide and multiple by 2 (kLoadFactor) to derive capacity
16402 // from number of buckets. If we decide to change kLoadFactor
16403 // to something other than 2, capacity should be stored as another
16404 // field of this object.
16405 capacity = RoundUpToPowerOf2(Max(kMinCapacity, capacity));
16406 if (capacity > kMaxCapacity) {
16407 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
16409 int num_buckets = capacity / kLoadFactor;
16410 Handle<FixedArray> backing_store = isolate->factory()->NewFixedArray(
16411 kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure);
16412 backing_store->set_map_no_write_barrier(
16413 isolate->heap()->ordered_hash_table_map());
16414 Handle<Derived> table = Handle<Derived>::cast(backing_store);
16415 for (int i = 0; i < num_buckets; ++i) {
16416 table->set(kHashTableStartIndex + i, Smi::FromInt(kNotFound));
16418 table->SetNumberOfBuckets(num_buckets);
16419 table->SetNumberOfElements(0);
16420 table->SetNumberOfDeletedElements(0);
16421 table->set_iterators(isolate->heap()->undefined_value());
16426 template<class Derived, class Iterator, int entrysize>
16427 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable(
16428 Handle<Derived> table) {
16429 int nof = table->NumberOfElements();
16430 int nod = table->NumberOfDeletedElements();
16431 int capacity = table->Capacity();
16432 if ((nof + nod) < capacity) return table;
16433 // Don't need to grow if we can simply clear out deleted entries instead.
16434 // Note that we can't compact in place, though, so we always allocate
16436 return Rehash(table, (nod < (capacity >> 1)) ? capacity << 1 : capacity);
16440 template<class Derived, class Iterator, int entrysize>
16441 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink(
16442 Handle<Derived> table) {
16443 int nof = table->NumberOfElements();
16444 int capacity = table->Capacity();
16445 if (nof > (capacity >> 2)) return table;
16446 return Rehash(table, capacity / 2);
16450 template<class Derived, class Iterator, int entrysize>
16451 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear(
16452 Handle<Derived> table) {
16453 Handle<Derived> new_table =
16454 Allocate(table->GetIsolate(),
16456 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
16458 new_table->set_iterators(table->iterators());
16459 table->set_iterators(table->GetHeap()->undefined_value());
16461 DisallowHeapAllocation no_allocation;
16462 for (Object* object = new_table->iterators();
16463 !object->IsUndefined();
16464 object = Iterator::cast(object)->next_iterator()) {
16465 Iterator::cast(object)->TableCleared();
16466 Iterator::cast(object)->set_table(*new_table);
16473 template<class Derived, class Iterator, int entrysize>
16474 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash(
16475 Handle<Derived> table, int new_capacity) {
16476 Handle<Derived> new_table =
16477 Allocate(table->GetIsolate(),
16479 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
16480 int nof = table->NumberOfElements();
16481 int nod = table->NumberOfDeletedElements();
16482 int new_buckets = new_table->NumberOfBuckets();
16484 for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) {
16485 Object* key = table->KeyAt(old_entry);
16486 if (key->IsTheHole()) continue;
16487 Object* hash = key->GetHash();
16488 int bucket = Smi::cast(hash)->value() & (new_buckets - 1);
16489 Object* chain_entry = new_table->get(kHashTableStartIndex + bucket);
16490 new_table->set(kHashTableStartIndex + bucket, Smi::FromInt(new_entry));
16491 int new_index = new_table->EntryToIndex(new_entry);
16492 int old_index = table->EntryToIndex(old_entry);
16493 for (int i = 0; i < entrysize; ++i) {
16494 Object* value = table->get(old_index + i);
16495 new_table->set(new_index + i, value);
16497 new_table->set(new_index + kChainOffset, chain_entry);
16500 new_table->SetNumberOfElements(nof);
16502 new_table->set_iterators(table->iterators());
16503 table->set_iterators(table->GetHeap()->undefined_value());
16505 DisallowHeapAllocation no_allocation;
16506 for (Object* object = new_table->iterators();
16507 !object->IsUndefined();
16508 object = Iterator::cast(object)->next_iterator()) {
16509 Iterator::cast(object)->TableCompacted();
16510 Iterator::cast(object)->set_table(*new_table);
16517 template<class Derived, class Iterator, int entrysize>
16518 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry(
16519 Handle<Object> key) {
16520 DisallowHeapAllocation no_gc;
16521 ASSERT(!key->IsTheHole());
16522 Object* hash = key->GetHash();
16523 if (hash->IsUndefined()) return kNotFound;
16524 for (int entry = HashToEntry(Smi::cast(hash)->value());
16525 entry != kNotFound;
16526 entry = ChainAt(entry)) {
16527 Object* candidate = KeyAt(entry);
16528 if (candidate->SameValue(*key))
16535 template<class Derived, class Iterator, int entrysize>
16536 int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) {
16537 int entry = UsedCapacity();
16538 int bucket = HashToBucket(hash);
16539 int index = EntryToIndex(entry);
16540 Object* chain_entry = get(kHashTableStartIndex + bucket);
16541 set(kHashTableStartIndex + bucket, Smi::FromInt(entry));
16542 set(index + kChainOffset, chain_entry);
16543 SetNumberOfElements(NumberOfElements() + 1);
16548 template<class Derived, class Iterator, int entrysize>
16549 void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) {
16550 int index = EntryToIndex(entry);
16551 for (int i = 0; i < entrysize; ++i) {
16552 set_the_hole(index + i);
16554 SetNumberOfElements(NumberOfElements() - 1);
16555 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
16557 DisallowHeapAllocation no_allocation;
16558 for (Object* object = iterators();
16559 !object->IsUndefined();
16560 object = Iterator::cast(object)->next_iterator()) {
16561 Iterator::cast(object)->EntryRemoved(entry);
16566 template Handle<OrderedHashSet>
16567 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Allocate(
16568 Isolate* isolate, int capacity, PretenureFlag pretenure);
16570 template Handle<OrderedHashSet>
16571 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::EnsureGrowable(
16572 Handle<OrderedHashSet> table);
16574 template Handle<OrderedHashSet>
16575 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Shrink(
16576 Handle<OrderedHashSet> table);
16578 template Handle<OrderedHashSet>
16579 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Clear(
16580 Handle<OrderedHashSet> table);
16583 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry(
16584 Handle<Object> key);
16587 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::AddEntry(int hash);
16590 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::RemoveEntry(int entry);
16593 template Handle<OrderedHashMap>
16594 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Allocate(
16595 Isolate* isolate, int capacity, PretenureFlag pretenure);
16597 template Handle<OrderedHashMap>
16598 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::EnsureGrowable(
16599 Handle<OrderedHashMap> table);
16601 template Handle<OrderedHashMap>
16602 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Shrink(
16603 Handle<OrderedHashMap> table);
16605 template Handle<OrderedHashMap>
16606 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Clear(
16607 Handle<OrderedHashMap> table);
16610 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry(
16611 Handle<Object> key);
16614 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::AddEntry(int hash);
16617 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::RemoveEntry(int entry);
16620 bool OrderedHashSet::Contains(Handle<Object> key) {
16621 return FindEntry(key) != kNotFound;
16625 Handle<OrderedHashSet> OrderedHashSet::Add(Handle<OrderedHashSet> table,
16626 Handle<Object> key) {
16627 if (table->FindEntry(key) != kNotFound) return table;
16629 table = EnsureGrowable(table);
16631 Handle<Object> hash = GetOrCreateHash(key, table->GetIsolate());
16632 int index = table->AddEntry(Smi::cast(*hash)->value());
16633 table->set(index, *key);
16638 Handle<OrderedHashSet> OrderedHashSet::Remove(Handle<OrderedHashSet> table,
16639 Handle<Object> key) {
16640 int entry = table->FindEntry(key);
16641 if (entry == kNotFound) return table;
16642 table->RemoveEntry(entry);
16643 return Shrink(table);
16647 Object* OrderedHashMap::Lookup(Handle<Object> key) {
16648 DisallowHeapAllocation no_gc;
16649 int entry = FindEntry(key);
16650 if (entry == kNotFound) return GetHeap()->the_hole_value();
16651 return ValueAt(entry);
16655 Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table,
16656 Handle<Object> key,
16657 Handle<Object> value) {
16658 int entry = table->FindEntry(key);
16660 if (value->IsTheHole()) {
16661 if (entry == kNotFound) return table;
16662 table->RemoveEntry(entry);
16663 return Shrink(table);
16666 if (entry != kNotFound) {
16667 table->set(table->EntryToIndex(entry) + kValueOffset, *value);
16671 table = EnsureGrowable(table);
16673 Handle<Object> hash = GetOrCreateHash(key, table->GetIsolate());
16674 int index = table->AddEntry(Smi::cast(*hash)->value());
16675 table->set(index, *key);
16676 table->set(index + kValueOffset, *value);
16681 template<class Derived, class TableType>
16682 void OrderedHashTableIterator<Derived, TableType>::EntryRemoved(int index) {
16683 int i = this->index()->value();
16685 set_count(Smi::FromInt(count()->value() - 1));
16693 template<class Derived, class TableType>
16694 void OrderedHashTableIterator<Derived, TableType>::Close() {
16695 if (Closed()) return;
16697 DisallowHeapAllocation no_allocation;
16699 Object* undefined = GetHeap()->undefined_value();
16700 TableType* table = TableType::cast(this->table());
16701 Object* previous = previous_iterator();
16702 Object* next = next_iterator();
16704 if (previous == undefined) {
16705 ASSERT_EQ(table->iterators(), this);
16706 table->set_iterators(next);
16708 ASSERT_EQ(Derived::cast(previous)->next_iterator(), this);
16709 Derived::cast(previous)->set_next_iterator(next);
16712 if (!next->IsUndefined()) {
16713 ASSERT_EQ(Derived::cast(next)->previous_iterator(), this);
16714 Derived::cast(next)->set_previous_iterator(previous);
16717 set_previous_iterator(undefined);
16718 set_next_iterator(undefined);
16719 set_table(undefined);
16723 template<class Derived, class TableType>
16724 void OrderedHashTableIterator<Derived, TableType>::Seek() {
16727 DisallowHeapAllocation no_allocation;
16729 int index = this->index()->value();
16731 TableType* table = TableType::cast(this->table());
16732 int used_capacity = table->UsedCapacity();
16734 while (index < used_capacity && table->KeyAt(index)->IsTheHole()) {
16737 set_index(Smi::FromInt(index));
16741 template<class Derived, class TableType>
16742 void OrderedHashTableIterator<Derived, TableType>::MoveNext() {
16745 set_index(Smi::FromInt(index()->value() + 1));
16746 set_count(Smi::FromInt(count()->value() + 1));
16751 template<class Derived, class TableType>
16752 Handle<JSObject> OrderedHashTableIterator<Derived, TableType>::Next(
16753 Handle<Derived> iterator) {
16754 Isolate* isolate = iterator->GetIsolate();
16755 Factory* factory = isolate->factory();
16757 Handle<Object> object(iterator->table(), isolate);
16759 if (!object->IsUndefined()) {
16760 Handle<TableType> table = Handle<TableType>::cast(object);
16761 int index = iterator->index()->value();
16762 if (index < table->UsedCapacity()) {
16763 int entry_index = table->EntryToIndex(index);
16764 iterator->MoveNext();
16765 Handle<Object> value = Derived::ValueForKind(iterator, entry_index);
16766 return factory->NewIteratorResultObject(value, false);
16772 return factory->NewIteratorResultObject(factory->undefined_value(), true);
16776 template<class Derived, class TableType>
16777 Handle<Derived> OrderedHashTableIterator<Derived, TableType>::CreateInternal(
16779 Handle<TableType> table,
16781 Isolate* isolate = table->GetIsolate();
16783 Handle<Object> undefined = isolate->factory()->undefined_value();
16785 Handle<Derived> new_iterator = Handle<Derived>::cast(
16786 isolate->factory()->NewJSObjectFromMap(map));
16787 new_iterator->set_previous_iterator(*undefined);
16788 new_iterator->set_table(*table);
16789 new_iterator->set_index(Smi::FromInt(0));
16790 new_iterator->set_count(Smi::FromInt(0));
16791 new_iterator->set_kind(Smi::FromInt(kind));
16793 Handle<Object> old_iterator(table->iterators(), isolate);
16794 if (!old_iterator->IsUndefined()) {
16795 Handle<Derived>::cast(old_iterator)->set_previous_iterator(*new_iterator);
16796 new_iterator->set_next_iterator(*old_iterator);
16798 new_iterator->set_next_iterator(*undefined);
16801 table->set_iterators(*new_iterator);
16803 return new_iterator;
16808 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::EntryRemoved(
16812 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Close();
16814 template Handle<JSObject>
16815 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Next(
16816 Handle<JSSetIterator> iterator);
16818 template Handle<JSSetIterator>
16819 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::CreateInternal(
16820 Handle<Map> map, Handle<OrderedHashSet> table, int kind);
16824 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::EntryRemoved(
16828 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Close();
16830 template Handle<JSObject>
16831 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Next(
16832 Handle<JSMapIterator> iterator);
16834 template Handle<JSMapIterator>
16835 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::CreateInternal(
16836 Handle<Map> map, Handle<OrderedHashMap> table, int kind);
16839 Handle<Object> JSSetIterator::ValueForKind(
16840 Handle<JSSetIterator> iterator, int entry_index) {
16841 int kind = iterator->kind()->value();
16842 // Set.prototype only has values and entries.
16843 ASSERT(kind == kKindValues || kind == kKindEntries);
16845 Isolate* isolate = iterator->GetIsolate();
16846 Factory* factory = isolate->factory();
16848 Handle<OrderedHashSet> table(
16849 OrderedHashSet::cast(iterator->table()), isolate);
16850 Handle<Object> value = Handle<Object>(table->get(entry_index), isolate);
16852 if (kind == kKindEntries) {
16853 Handle<FixedArray> array = factory->NewFixedArray(2);
16854 array->set(0, *value);
16855 array->set(1, *value);
16856 return factory->NewJSArrayWithElements(array);
16863 Handle<Object> JSMapIterator::ValueForKind(
16864 Handle<JSMapIterator> iterator, int entry_index) {
16865 int kind = iterator->kind()->value();
16866 ASSERT(kind == kKindKeys || kind == kKindValues || kind == kKindEntries);
16868 Isolate* isolate = iterator->GetIsolate();
16869 Factory* factory = isolate->factory();
16871 Handle<OrderedHashMap> table(
16872 OrderedHashMap::cast(iterator->table()), isolate);
16876 return Handle<Object>(table->get(entry_index), isolate);
16879 return Handle<Object>(table->get(entry_index + 1), isolate);
16881 case kKindEntries: {
16882 Handle<Object> key(table->get(entry_index), isolate);
16883 Handle<Object> value(table->get(entry_index + 1), isolate);
16884 Handle<FixedArray> array = factory->NewFixedArray(2);
16885 array->set(0, *key);
16886 array->set(1, *value);
16887 return factory->NewJSArrayWithElements(array);
16892 return factory->undefined_value();
16896 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator(
16897 DeclaredAccessorDescriptor* descriptor)
16898 : array_(descriptor->serialized_data()->GetDataStartAddress()),
16899 length_(descriptor->serialized_data()->length()),
16904 const DeclaredAccessorDescriptorData*
16905 DeclaredAccessorDescriptorIterator::Next() {
16906 ASSERT(offset_ < length_);
16907 uint8_t* ptr = &array_[offset_];
16908 ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0);
16909 const DeclaredAccessorDescriptorData* data =
16910 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr);
16911 offset_ += sizeof(*data);
16912 ASSERT(offset_ <= length_);
16917 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create(
16919 const DeclaredAccessorDescriptorData& descriptor,
16920 Handle<DeclaredAccessorDescriptor> previous) {
16921 int previous_length =
16922 previous.is_null() ? 0 : previous->serialized_data()->length();
16923 int length = sizeof(descriptor) + previous_length;
16924 Handle<ByteArray> serialized_descriptor =
16925 isolate->factory()->NewByteArray(length);
16926 Handle<DeclaredAccessorDescriptor> value =
16927 isolate->factory()->NewDeclaredAccessorDescriptor();
16928 value->set_serialized_data(*serialized_descriptor);
16929 // Copy in the data.
16931 DisallowHeapAllocation no_allocation;
16932 uint8_t* array = serialized_descriptor->GetDataStartAddress();
16933 if (previous_length != 0) {
16934 uint8_t* previous_array =
16935 previous->serialized_data()->GetDataStartAddress();
16936 OS::MemCopy(array, previous_array, previous_length);
16937 array += previous_length;
16939 ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0);
16940 DeclaredAccessorDescriptorData* data =
16941 reinterpret_cast<DeclaredAccessorDescriptorData*>(array);
16942 *data = descriptor;
16948 // Check if there is a break point at this code position.
16949 bool DebugInfo::HasBreakPoint(int code_position) {
16950 // Get the break point info object for this code position.
16951 Object* break_point_info = GetBreakPointInfo(code_position);
16953 // If there is no break point info object or no break points in the break
16954 // point info object there is no break point at this code position.
16955 if (break_point_info->IsUndefined()) return false;
16956 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0;
16960 // Get the break point info object for this code position.
16961 Object* DebugInfo::GetBreakPointInfo(int code_position) {
16962 // Find the index of the break point info object for this code position.
16963 int index = GetBreakPointInfoIndex(code_position);
16965 // Return the break point info object if any.
16966 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value();
16967 return BreakPointInfo::cast(break_points()->get(index));
16971 // Clear a break point at the specified code position.
16972 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info,
16974 Handle<Object> break_point_object) {
16975 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16976 debug_info->GetIsolate());
16977 if (break_point_info->IsUndefined()) return;
16978 BreakPointInfo::ClearBreakPoint(
16979 Handle<BreakPointInfo>::cast(break_point_info),
16980 break_point_object);
16984 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
16986 int source_position,
16987 int statement_position,
16988 Handle<Object> break_point_object) {
16989 Isolate* isolate = debug_info->GetIsolate();
16990 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16992 if (!break_point_info->IsUndefined()) {
16993 BreakPointInfo::SetBreakPoint(
16994 Handle<BreakPointInfo>::cast(break_point_info),
16995 break_point_object);
16999 // Adding a new break point for a code position which did not have any
17000 // break points before. Try to find a free slot.
17001 int index = kNoBreakPointInfo;
17002 for (int i = 0; i < debug_info->break_points()->length(); i++) {
17003 if (debug_info->break_points()->get(i)->IsUndefined()) {
17008 if (index == kNoBreakPointInfo) {
17009 // No free slot - extend break point info array.
17010 Handle<FixedArray> old_break_points =
17011 Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
17012 Handle<FixedArray> new_break_points =
17013 isolate->factory()->NewFixedArray(
17014 old_break_points->length() +
17015 Debug::kEstimatedNofBreakPointsInFunction);
17017 debug_info->set_break_points(*new_break_points);
17018 for (int i = 0; i < old_break_points->length(); i++) {
17019 new_break_points->set(i, old_break_points->get(i));
17021 index = old_break_points->length();
17023 ASSERT(index != kNoBreakPointInfo);
17025 // Allocate new BreakPointInfo object and set the break point.
17026 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
17027 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
17028 new_break_point_info->set_code_position(Smi::FromInt(code_position));
17029 new_break_point_info->set_source_position(Smi::FromInt(source_position));
17030 new_break_point_info->
17031 set_statement_position(Smi::FromInt(statement_position));
17032 new_break_point_info->set_break_point_objects(
17033 isolate->heap()->undefined_value());
17034 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
17035 debug_info->break_points()->set(index, *new_break_point_info);
17039 // Get the break point objects for a code position.
17040 Object* DebugInfo::GetBreakPointObjects(int code_position) {
17041 Object* break_point_info = GetBreakPointInfo(code_position);
17042 if (break_point_info->IsUndefined()) {
17043 return GetHeap()->undefined_value();
17045 return BreakPointInfo::cast(break_point_info)->break_point_objects();
17049 // Get the total number of break points.
17050 int DebugInfo::GetBreakPointCount() {
17051 if (break_points()->IsUndefined()) return 0;
17053 for (int i = 0; i < break_points()->length(); i++) {
17054 if (!break_points()->get(i)->IsUndefined()) {
17055 BreakPointInfo* break_point_info =
17056 BreakPointInfo::cast(break_points()->get(i));
17057 count += break_point_info->GetBreakPointCount();
17064 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
17065 Handle<Object> break_point_object) {
17066 Heap* heap = debug_info->GetHeap();
17067 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
17068 for (int i = 0; i < debug_info->break_points()->length(); i++) {
17069 if (!debug_info->break_points()->get(i)->IsUndefined()) {
17070 Handle<BreakPointInfo> break_point_info =
17071 Handle<BreakPointInfo>(BreakPointInfo::cast(
17072 debug_info->break_points()->get(i)));
17073 if (BreakPointInfo::HasBreakPointObject(break_point_info,
17074 break_point_object)) {
17075 return *break_point_info;
17079 return heap->undefined_value();
17083 // Find the index of the break point info object for the specified code
17085 int DebugInfo::GetBreakPointInfoIndex(int code_position) {
17086 if (break_points()->IsUndefined()) return kNoBreakPointInfo;
17087 for (int i = 0; i < break_points()->length(); i++) {
17088 if (!break_points()->get(i)->IsUndefined()) {
17089 BreakPointInfo* break_point_info =
17090 BreakPointInfo::cast(break_points()->get(i));
17091 if (break_point_info->code_position()->value() == code_position) {
17096 return kNoBreakPointInfo;
17100 // Remove the specified break point object.
17101 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
17102 Handle<Object> break_point_object) {
17103 Isolate* isolate = break_point_info->GetIsolate();
17104 // If there are no break points just ignore.
17105 if (break_point_info->break_point_objects()->IsUndefined()) return;
17106 // If there is a single break point clear it if it is the same.
17107 if (!break_point_info->break_point_objects()->IsFixedArray()) {
17108 if (break_point_info->break_point_objects() == *break_point_object) {
17109 break_point_info->set_break_point_objects(
17110 isolate->heap()->undefined_value());
17114 // If there are multiple break points shrink the array
17115 ASSERT(break_point_info->break_point_objects()->IsFixedArray());
17116 Handle<FixedArray> old_array =
17117 Handle<FixedArray>(
17118 FixedArray::cast(break_point_info->break_point_objects()));
17119 Handle<FixedArray> new_array =
17120 isolate->factory()->NewFixedArray(old_array->length() - 1);
17121 int found_count = 0;
17122 for (int i = 0; i < old_array->length(); i++) {
17123 if (old_array->get(i) == *break_point_object) {
17124 ASSERT(found_count == 0);
17127 new_array->set(i - found_count, old_array->get(i));
17130 // If the break point was found in the list change it.
17131 if (found_count > 0) break_point_info->set_break_point_objects(*new_array);
17135 // Add the specified break point object.
17136 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
17137 Handle<Object> break_point_object) {
17138 Isolate* isolate = break_point_info->GetIsolate();
17140 // If there was no break point objects before just set it.
17141 if (break_point_info->break_point_objects()->IsUndefined()) {
17142 break_point_info->set_break_point_objects(*break_point_object);
17145 // If the break point object is the same as before just ignore.
17146 if (break_point_info->break_point_objects() == *break_point_object) return;
17147 // If there was one break point object before replace with array.
17148 if (!break_point_info->break_point_objects()->IsFixedArray()) {
17149 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
17150 array->set(0, break_point_info->break_point_objects());
17151 array->set(1, *break_point_object);
17152 break_point_info->set_break_point_objects(*array);
17155 // If there was more than one break point before extend array.
17156 Handle<FixedArray> old_array =
17157 Handle<FixedArray>(
17158 FixedArray::cast(break_point_info->break_point_objects()));
17159 Handle<FixedArray> new_array =
17160 isolate->factory()->NewFixedArray(old_array->length() + 1);
17161 for (int i = 0; i < old_array->length(); i++) {
17162 // If the break point was there before just ignore.
17163 if (old_array->get(i) == *break_point_object) return;
17164 new_array->set(i, old_array->get(i));
17166 // Add the new break point.
17167 new_array->set(old_array->length(), *break_point_object);
17168 break_point_info->set_break_point_objects(*new_array);
17172 bool BreakPointInfo::HasBreakPointObject(
17173 Handle<BreakPointInfo> break_point_info,
17174 Handle<Object> break_point_object) {
17176 if (break_point_info->break_point_objects()->IsUndefined()) return false;
17177 // Single break point.
17178 if (!break_point_info->break_point_objects()->IsFixedArray()) {
17179 return break_point_info->break_point_objects() == *break_point_object;
17181 // Multiple break points.
17182 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects());
17183 for (int i = 0; i < array->length(); i++) {
17184 if (array->get(i) == *break_point_object) {
17192 // Get the number of break points.
17193 int BreakPointInfo::GetBreakPointCount() {
17195 if (break_point_objects()->IsUndefined()) return 0;
17196 // Single break point.
17197 if (!break_point_objects()->IsFixedArray()) return 1;
17198 // Multiple break points.
17199 return FixedArray::cast(break_point_objects())->length();
17203 Object* JSDate::GetField(Object* object, Smi* index) {
17204 return JSDate::cast(object)->DoGetField(
17205 static_cast<FieldIndex>(index->value()));
17209 Object* JSDate::DoGetField(FieldIndex index) {
17210 ASSERT(index != kDateValue);
17212 DateCache* date_cache = GetIsolate()->date_cache();
17214 if (index < kFirstUncachedField) {
17215 Object* stamp = cache_stamp();
17216 if (stamp != date_cache->stamp() && stamp->IsSmi()) {
17217 // Since the stamp is not NaN, the value is also not NaN.
17218 int64_t local_time_ms =
17219 date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
17220 SetLocalFields(local_time_ms, date_cache);
17223 case kYear: return year();
17224 case kMonth: return month();
17225 case kDay: return day();
17226 case kWeekday: return weekday();
17227 case kHour: return hour();
17228 case kMinute: return min();
17229 case kSecond: return sec();
17230 default: UNREACHABLE();
17234 if (index >= kFirstUTCField) {
17235 return GetUTCField(index, value()->Number(), date_cache);
17238 double time = value()->Number();
17239 if (std::isnan(time)) return GetIsolate()->heap()->nan_value();
17241 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
17242 int days = DateCache::DaysFromTime(local_time_ms);
17244 if (index == kDays) return Smi::FromInt(days);
17246 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
17247 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
17248 ASSERT(index == kTimeInDay);
17249 return Smi::FromInt(time_in_day_ms);
17253 Object* JSDate::GetUTCField(FieldIndex index,
17255 DateCache* date_cache) {
17256 ASSERT(index >= kFirstUTCField);
17258 if (std::isnan(value)) return GetIsolate()->heap()->nan_value();
17260 int64_t time_ms = static_cast<int64_t>(value);
17262 if (index == kTimezoneOffset) {
17263 return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
17266 int days = DateCache::DaysFromTime(time_ms);
17268 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
17270 if (index <= kDayUTC) {
17271 int year, month, day;
17272 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
17273 if (index == kYearUTC) return Smi::FromInt(year);
17274 if (index == kMonthUTC) return Smi::FromInt(month);
17275 ASSERT(index == kDayUTC);
17276 return Smi::FromInt(day);
17279 int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
17281 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
17282 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
17283 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
17284 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
17285 case kDaysUTC: return Smi::FromInt(days);
17286 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
17287 default: UNREACHABLE();
17295 void JSDate::SetValue(Object* value, bool is_value_nan) {
17297 if (is_value_nan) {
17298 HeapNumber* nan = GetIsolate()->heap()->nan_value();
17299 set_cache_stamp(nan, SKIP_WRITE_BARRIER);
17300 set_year(nan, SKIP_WRITE_BARRIER);
17301 set_month(nan, SKIP_WRITE_BARRIER);
17302 set_day(nan, SKIP_WRITE_BARRIER);
17303 set_hour(nan, SKIP_WRITE_BARRIER);
17304 set_min(nan, SKIP_WRITE_BARRIER);
17305 set_sec(nan, SKIP_WRITE_BARRIER);
17306 set_weekday(nan, SKIP_WRITE_BARRIER);
17308 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
17313 void JSDate::SetLocalFields(int64_t local_time_ms, DateCache* date_cache) {
17314 int days = DateCache::DaysFromTime(local_time_ms);
17315 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
17316 int year, month, day;
17317 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
17318 int weekday = date_cache->Weekday(days);
17319 int hour = time_in_day_ms / (60 * 60 * 1000);
17320 int min = (time_in_day_ms / (60 * 1000)) % 60;
17321 int sec = (time_in_day_ms / 1000) % 60;
17322 set_cache_stamp(date_cache->stamp());
17323 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
17324 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
17325 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
17326 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
17327 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
17328 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
17329 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
17333 void JSArrayBuffer::Neuter() {
17334 ASSERT(is_external());
17335 set_backing_store(NULL);
17336 set_byte_length(Smi::FromInt(0));
17340 void JSArrayBufferView::NeuterView() {
17341 set_byte_offset(Smi::FromInt(0));
17342 set_byte_length(Smi::FromInt(0));
17346 void JSDataView::Neuter() {
17351 void JSTypedArray::Neuter() {
17353 set_length(Smi::FromInt(0));
17354 set_elements(GetHeap()->EmptyExternalArrayForMap(map()));
17358 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) {
17359 switch (elements_kind) {
17360 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
17361 case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS;
17363 TYPED_ARRAYS(TYPED_ARRAY_CASE)
17364 #undef TYPED_ARRAY_CASE
17368 return FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND;
17373 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
17374 Handle<JSTypedArray> typed_array) {
17376 Handle<Map> map(typed_array->map());
17377 Isolate* isolate = typed_array->GetIsolate();
17379 ASSERT(IsFixedTypedArrayElementsKind(map->elements_kind()));
17381 Handle<Map> new_map = Map::TransitionElementsTo(
17383 FixedToExternalElementsKind(map->elements_kind()));
17385 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
17386 Handle<FixedTypedArrayBase> fixed_typed_array(
17387 FixedTypedArrayBase::cast(typed_array->elements()));
17388 Runtime::SetupArrayBufferAllocatingData(isolate, buffer,
17389 fixed_typed_array->DataSize(), false);
17390 memcpy(buffer->backing_store(),
17391 fixed_typed_array->DataPtr(),
17392 fixed_typed_array->DataSize());
17393 Handle<ExternalArray> new_elements =
17394 isolate->factory()->NewExternalArray(
17395 fixed_typed_array->length(), typed_array->type(),
17396 static_cast<uint8_t*>(buffer->backing_store()));
17398 buffer->set_weak_first_view(*typed_array);
17399 ASSERT(typed_array->weak_next() == isolate->heap()->undefined_value());
17400 typed_array->set_buffer(*buffer);
17401 JSObject::SetMapAndElements(typed_array, new_map, new_elements);
17407 Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
17408 Handle<Object> result(buffer(), GetIsolate());
17409 if (*result != Smi::FromInt(0)) {
17410 ASSERT(IsExternalArrayElementsKind(map()->elements_kind()));
17411 return Handle<JSArrayBuffer>::cast(result);
17413 Handle<JSTypedArray> self(this);
17414 return MaterializeArrayBuffer(self);
17418 HeapType* PropertyCell::type() {
17419 return static_cast<HeapType*>(type_raw());
17423 void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) {
17424 ASSERT(IsPropertyCell());
17425 set_type_raw(type, ignored);
17429 Handle<HeapType> PropertyCell::UpdatedType(Handle<PropertyCell> cell,
17430 Handle<Object> value) {
17431 Isolate* isolate = cell->GetIsolate();
17432 Handle<HeapType> old_type(cell->type(), isolate);
17433 // TODO(2803): Do not track ConsString as constant because they cannot be
17434 // embedded into code.
17435 Handle<HeapType> new_type = value->IsConsString() || value->IsTheHole()
17436 ? HeapType::Any(isolate) : HeapType::Constant(value, isolate);
17438 if (new_type->Is(old_type)) {
17442 cell->dependent_code()->DeoptimizeDependentCodeGroup(
17443 isolate, DependentCode::kPropertyCellChangedGroup);
17445 if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) {
17449 return HeapType::Any(isolate);
17453 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell,
17454 Handle<Object> value) {
17455 cell->set_value(*value);
17456 if (!HeapType::Any()->Is(cell->type())) {
17457 Handle<HeapType> new_type = UpdatedType(cell, value);
17458 cell->set_type(*new_type);
17464 void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell,
17465 CompilationInfo* info) {
17466 Handle<DependentCode> codes =
17467 DependentCode::Insert(handle(cell->dependent_code(), info->isolate()),
17468 DependentCode::kPropertyCellChangedGroup,
17469 info->object_wrapper());
17470 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes);
17471 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
17472 cell, info->zone());
17476 const char* GetBailoutReason(BailoutReason reason) {
17477 ASSERT(reason < kLastErrorMessage);
17478 #define ERROR_MESSAGES_TEXTS(C, T) T,
17479 static const char* error_messages_[] = {
17480 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
17482 #undef ERROR_MESSAGES_TEXTS
17483 return error_messages_[reason];
17487 } } // namespace v8::internal