1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/accessors.h"
8 #include "src/allocation-site-scopes.h"
10 #include "src/arguments.h"
11 #include "src/base/bits.h"
12 #include "src/bootstrapper.h"
13 #include "src/code-stubs.h"
14 #include "src/codegen.h"
15 #include "src/cpu-profiler.h"
17 #include "src/debug.h"
18 #include "src/deoptimizer.h"
19 #include "src/elements.h"
20 #include "src/execution.h"
21 #include "src/field-index-inl.h"
22 #include "src/field-index.h"
23 #include "src/full-codegen.h"
24 #include "src/heap/mark-compact.h"
25 #include "src/heap/objects-visiting-inl.h"
26 #include "src/hydrogen.h"
27 #include "src/ic/ic.h"
28 #include "src/isolate-inl.h"
30 #include "src/lookup.h"
31 #include "src/macro-assembler.h"
32 #include "src/objects-inl.h"
33 #include "src/prototype.h"
34 #include "src/safepoint-table.h"
35 #include "src/string-search.h"
36 #include "src/string-stream.h"
37 #include "src/utils.h"
39 #ifdef ENABLE_DISASSEMBLER
40 #include "src/disasm.h"
41 #include "src/disassembler.h"
47 Handle<HeapType> Object::OptimalType(Isolate* isolate,
48 Representation representation) {
49 if (representation.IsNone()) return HeapType::None(isolate);
50 if (FLAG_track_field_types) {
51 if (representation.IsHeapObject() && IsHeapObject()) {
52 // We can track only JavaScript objects with stable maps.
53 Handle<Map> map(HeapObject::cast(this)->map(), isolate);
54 if (map->is_stable() &&
55 map->instance_type() >= FIRST_NONCALLABLE_SPEC_OBJECT_TYPE &&
56 map->instance_type() <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE) {
57 return HeapType::Class(map, isolate);
61 return HeapType::Any(isolate);
65 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
66 Handle<Object> object,
67 Handle<Context> native_context) {
68 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
69 Handle<JSFunction> constructor;
70 if (object->IsNumber()) {
71 constructor = handle(native_context->number_function(), isolate);
72 } else if (object->IsBoolean()) {
73 constructor = handle(native_context->boolean_function(), isolate);
74 } else if (object->IsString()) {
75 constructor = handle(native_context->string_function(), isolate);
76 } else if (object->IsSymbol()) {
77 constructor = handle(native_context->symbol_function(), isolate);
79 return MaybeHandle<JSReceiver>();
81 Handle<JSObject> result = isolate->factory()->NewJSObject(constructor);
82 Handle<JSValue>::cast(result)->set_value(*object);
87 bool Object::BooleanValue() {
88 if (IsBoolean()) return IsTrue();
89 if (IsSmi()) return Smi::cast(this)->value() != 0;
90 if (IsUndefined() || IsNull()) return false;
91 if (IsUndetectableObject()) return false; // Undetectable object is false.
92 if (IsString()) return String::cast(this)->length() != 0;
93 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue();
98 bool Object::IsCallable() const {
99 const Object* fun = this;
100 while (fun->IsJSFunctionProxy()) {
101 fun = JSFunctionProxy::cast(fun)->call_trap();
103 return fun->IsJSFunction() ||
104 (fun->IsHeapObject() &&
105 HeapObject::cast(fun)->map()->has_instance_call_handler());
109 MaybeHandle<Object> Object::GetProperty(LookupIterator* it) {
110 for (; it->IsFound(); it->Next()) {
111 switch (it->state()) {
112 case LookupIterator::NOT_FOUND:
113 case LookupIterator::TRANSITION:
115 case LookupIterator::JSPROXY:
116 return JSProxy::GetPropertyWithHandler(it->GetHolder<JSProxy>(),
117 it->GetReceiver(), it->name());
118 case LookupIterator::INTERCEPTOR: {
119 MaybeHandle<Object> maybe_result = JSObject::GetPropertyWithInterceptor(
120 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
121 if (!maybe_result.is_null()) return maybe_result;
122 if (it->isolate()->has_pending_exception()) return maybe_result;
125 case LookupIterator::ACCESS_CHECK:
126 if (it->HasAccess(v8::ACCESS_GET)) break;
127 return JSObject::GetPropertyWithFailedAccessCheck(it);
128 case LookupIterator::ACCESSOR:
129 return GetPropertyWithAccessor(it->GetReceiver(), it->name(),
130 it->GetHolder<JSObject>(),
132 case LookupIterator::DATA:
133 return it->GetDataValue();
136 return it->factory()->undefined_value();
140 Handle<Object> JSObject::GetDataProperty(Handle<JSObject> object,
142 LookupIterator it(object, key,
143 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
144 return GetDataProperty(&it);
148 Handle<Object> JSObject::GetDataProperty(LookupIterator* it) {
149 for (; it->IsFound(); it->Next()) {
150 switch (it->state()) {
151 case LookupIterator::INTERCEPTOR:
152 case LookupIterator::NOT_FOUND:
153 case LookupIterator::TRANSITION:
155 case LookupIterator::ACCESS_CHECK:
156 if (it->HasAccess(v8::ACCESS_GET)) continue;
158 case LookupIterator::JSPROXY:
160 return it->isolate()->factory()->undefined_value();
161 case LookupIterator::ACCESSOR:
162 // TODO(verwaest): For now this doesn't call into
163 // ExecutableAccessorInfo, since clients don't need it. Update once
166 return it->isolate()->factory()->undefined_value();
167 case LookupIterator::DATA:
168 return it->GetDataValue();
171 return it->isolate()->factory()->undefined_value();
175 bool Object::ToInt32(int32_t* value) {
177 *value = Smi::cast(this)->value();
180 if (IsHeapNumber()) {
181 double num = HeapNumber::cast(this)->value();
182 if (FastI2D(FastD2I(num)) == num) {
183 *value = FastD2I(num);
191 bool Object::ToUint32(uint32_t* value) {
193 int num = Smi::cast(this)->value();
195 *value = static_cast<uint32_t>(num);
199 if (IsHeapNumber()) {
200 double num = HeapNumber::cast(this)->value();
201 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) {
202 *value = FastD2UI(num);
210 bool FunctionTemplateInfo::IsTemplateFor(Object* object) {
211 if (!object->IsHeapObject()) return false;
212 return IsTemplateFor(HeapObject::cast(object)->map());
216 bool FunctionTemplateInfo::IsTemplateFor(Map* map) {
217 // There is a constraint on the object; check.
218 if (!map->IsJSObjectMap()) return false;
219 // Fetch the constructor function of the object.
220 Object* cons_obj = map->constructor();
221 if (!cons_obj->IsJSFunction()) return false;
222 JSFunction* fun = JSFunction::cast(cons_obj);
223 // Iterate through the chain of inheriting function templates to
224 // see if the required one occurs.
225 for (Object* type = fun->shared()->function_data();
226 type->IsFunctionTemplateInfo();
227 type = FunctionTemplateInfo::cast(type)->parent_template()) {
228 if (type == this) return true;
230 // Didn't find the required type in the inheritance chain.
235 template<typename To>
236 static inline To* CheckedCast(void *from) {
237 uintptr_t temp = reinterpret_cast<uintptr_t>(from);
238 DCHECK(temp % sizeof(To) == 0);
239 return reinterpret_cast<To*>(temp);
243 static Handle<Object> PerformCompare(const BitmaskCompareDescriptor& descriptor,
246 uint32_t bitmask = descriptor.bitmask;
247 uint32_t compare_value = descriptor.compare_value;
249 switch (descriptor.size) {
251 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr));
252 compare_value &= 0xff;
256 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr));
257 compare_value &= 0xffff;
261 value = *CheckedCast<uint32_t>(ptr);
265 return isolate->factory()->undefined_value();
267 return isolate->factory()->ToBoolean(
268 (bitmask & value) == (bitmask & compare_value));
272 static Handle<Object> PerformCompare(const PointerCompareDescriptor& descriptor,
275 uintptr_t compare_value =
276 reinterpret_cast<uintptr_t>(descriptor.compare_value);
277 uintptr_t value = *CheckedCast<uintptr_t>(ptr);
278 return isolate->factory()->ToBoolean(compare_value == value);
282 static Handle<Object> GetPrimitiveValue(
283 const PrimitiveValueDescriptor& descriptor,
286 int32_t int32_value = 0;
287 switch (descriptor.data_type) {
288 case kDescriptorInt8Type:
289 int32_value = *CheckedCast<int8_t>(ptr);
291 case kDescriptorUint8Type:
292 int32_value = *CheckedCast<uint8_t>(ptr);
294 case kDescriptorInt16Type:
295 int32_value = *CheckedCast<int16_t>(ptr);
297 case kDescriptorUint16Type:
298 int32_value = *CheckedCast<uint16_t>(ptr);
300 case kDescriptorInt32Type:
301 int32_value = *CheckedCast<int32_t>(ptr);
303 case kDescriptorUint32Type: {
304 uint32_t value = *CheckedCast<uint32_t>(ptr);
305 AllowHeapAllocation allow_gc;
306 return isolate->factory()->NewNumberFromUint(value);
308 case kDescriptorBoolType: {
309 uint8_t byte = *CheckedCast<uint8_t>(ptr);
310 return isolate->factory()->ToBoolean(
311 byte & (0x1 << descriptor.bool_offset));
313 case kDescriptorFloatType: {
314 float value = *CheckedCast<float>(ptr);
315 AllowHeapAllocation allow_gc;
316 return isolate->factory()->NewNumber(value);
318 case kDescriptorDoubleType: {
319 double value = *CheckedCast<double>(ptr);
320 AllowHeapAllocation allow_gc;
321 return isolate->factory()->NewNumber(value);
324 AllowHeapAllocation allow_gc;
325 return isolate->factory()->NewNumberFromInt(int32_value);
329 static Handle<Object> GetDeclaredAccessorProperty(
330 Handle<Object> receiver,
331 Handle<DeclaredAccessorInfo> info,
333 DisallowHeapAllocation no_gc;
334 char* current = reinterpret_cast<char*>(*receiver);
335 DeclaredAccessorDescriptorIterator iterator(info->descriptor());
337 const DeclaredAccessorDescriptorData* data = iterator.Next();
338 switch (data->type) {
339 case kDescriptorReturnObject: {
340 DCHECK(iterator.Complete());
341 current = *CheckedCast<char*>(current);
342 return handle(*CheckedCast<Object*>(current), isolate);
344 case kDescriptorPointerDereference:
345 DCHECK(!iterator.Complete());
346 current = *reinterpret_cast<char**>(current);
348 case kDescriptorPointerShift:
349 DCHECK(!iterator.Complete());
350 current += data->pointer_shift_descriptor.byte_offset;
352 case kDescriptorObjectDereference: {
353 DCHECK(!iterator.Complete());
354 Object* object = CheckedCast<Object>(current);
355 int field = data->object_dereference_descriptor.internal_field;
356 Object* smi = JSObject::cast(object)->GetInternalField(field);
357 DCHECK(smi->IsSmi());
358 current = reinterpret_cast<char*>(smi);
361 case kDescriptorBitmaskCompare:
362 DCHECK(iterator.Complete());
363 return PerformCompare(data->bitmask_compare_descriptor,
366 case kDescriptorPointerCompare:
367 DCHECK(iterator.Complete());
368 return PerformCompare(data->pointer_compare_descriptor,
371 case kDescriptorPrimitiveValue:
372 DCHECK(iterator.Complete());
373 return GetPrimitiveValue(data->primitive_value_descriptor,
379 return isolate->factory()->undefined_value();
383 Handle<FixedArray> JSObject::EnsureWritableFastElements(
384 Handle<JSObject> object) {
385 DCHECK(object->HasFastSmiOrObjectElements());
386 Isolate* isolate = object->GetIsolate();
387 Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate);
388 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
389 Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap(
390 elems, isolate->factory()->fixed_array_map());
391 object->set_elements(*writable_elems);
392 isolate->counters()->cow_arrays_converted()->Increment();
393 return writable_elems;
397 MaybeHandle<Object> JSProxy::GetPropertyWithHandler(Handle<JSProxy> proxy,
398 Handle<Object> receiver,
400 Isolate* isolate = proxy->GetIsolate();
402 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
403 if (name->IsSymbol()) return isolate->factory()->undefined_value();
405 Handle<Object> args[] = { receiver, name };
407 proxy, "get", isolate->derived_get_trap(), arraysize(args), args);
411 MaybeHandle<Object> Object::GetPropertyWithAccessor(Handle<Object> receiver,
413 Handle<JSObject> holder,
414 Handle<Object> structure) {
415 Isolate* isolate = name->GetIsolate();
416 DCHECK(!structure->IsForeign());
417 // api style callbacks.
418 if (structure->IsAccessorInfo()) {
419 Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(structure);
420 if (!info->IsCompatibleReceiver(*receiver)) {
421 Handle<Object> args[2] = { name, receiver };
422 THROW_NEW_ERROR(isolate,
423 NewTypeError("incompatible_method_receiver",
424 HandleVector(args, arraysize(args))),
427 if (structure->IsDeclaredAccessorInfo()) {
428 return GetDeclaredAccessorProperty(
430 Handle<DeclaredAccessorInfo>::cast(structure),
434 Handle<ExecutableAccessorInfo> data =
435 Handle<ExecutableAccessorInfo>::cast(structure);
436 v8::AccessorNameGetterCallback call_fun =
437 v8::ToCData<v8::AccessorNameGetterCallback>(data->getter());
438 if (call_fun == NULL) return isolate->factory()->undefined_value();
440 LOG(isolate, ApiNamedPropertyAccess("load", *holder, *name));
441 PropertyCallbackArguments args(isolate, data->data(), *receiver, *holder);
442 v8::Handle<v8::Value> result =
443 args.Call(call_fun, v8::Utils::ToLocal(name));
444 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
445 if (result.IsEmpty()) {
446 return isolate->factory()->undefined_value();
448 Handle<Object> return_value = v8::Utils::OpenHandle(*result);
449 return_value->VerifyApiCallResultType();
450 // Rebox handle before return.
451 return handle(*return_value, isolate);
454 // __defineGetter__ callback
455 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
457 if (getter->IsSpecFunction()) {
458 // TODO(rossberg): nicer would be to cast to some JSCallable here...
459 return Object::GetPropertyWithDefinedGetter(
460 receiver, Handle<JSReceiver>::cast(getter));
462 // Getter is not a function.
463 return isolate->factory()->undefined_value();
467 bool AccessorInfo::IsCompatibleReceiverType(Isolate* isolate,
468 Handle<AccessorInfo> info,
469 Handle<HeapType> type) {
470 if (!info->HasExpectedReceiverType()) return true;
471 Handle<Map> map = IC::TypeToMap(*type, isolate);
472 if (!map->IsJSObjectMap()) return false;
473 return FunctionTemplateInfo::cast(info->expected_receiver_type())
474 ->IsTemplateFor(*map);
478 MaybeHandle<Object> Object::SetPropertyWithAccessor(
479 Handle<Object> receiver, Handle<Name> name, Handle<Object> value,
480 Handle<JSObject> holder, Handle<Object> structure, StrictMode strict_mode) {
481 Isolate* isolate = name->GetIsolate();
483 // We should never get here to initialize a const with the hole
484 // value since a const declaration would conflict with the setter.
485 DCHECK(!structure->IsForeign());
486 if (structure->IsExecutableAccessorInfo()) {
487 // Don't call executable accessor setters with non-JSObject receivers.
488 if (!receiver->IsJSObject()) return value;
489 // api style callbacks
490 ExecutableAccessorInfo* info = ExecutableAccessorInfo::cast(*structure);
491 if (!info->IsCompatibleReceiver(*receiver)) {
492 Handle<Object> args[2] = { name, receiver };
493 THROW_NEW_ERROR(isolate,
494 NewTypeError("incompatible_method_receiver",
495 HandleVector(args, arraysize(args))),
498 Object* call_obj = info->setter();
499 v8::AccessorNameSetterCallback call_fun =
500 v8::ToCData<v8::AccessorNameSetterCallback>(call_obj);
501 if (call_fun == NULL) return value;
502 LOG(isolate, ApiNamedPropertyAccess("store", *holder, *name));
503 PropertyCallbackArguments args(isolate, info->data(), *receiver, *holder);
505 v8::Utils::ToLocal(name),
506 v8::Utils::ToLocal(value));
507 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
511 if (structure->IsAccessorPair()) {
512 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
513 if (setter->IsSpecFunction()) {
514 // TODO(rossberg): nicer would be to cast to some JSCallable here...
515 return SetPropertyWithDefinedSetter(
516 receiver, Handle<JSReceiver>::cast(setter), value);
518 if (strict_mode == SLOPPY) return value;
519 Handle<Object> args[2] = { name, holder };
521 isolate, NewTypeError("no_setter_in_callback", HandleVector(args, 2)),
526 // TODO(dcarney): Handle correctly.
527 if (structure->IsDeclaredAccessorInfo()) {
532 return MaybeHandle<Object>();
536 MaybeHandle<Object> Object::GetPropertyWithDefinedGetter(
537 Handle<Object> receiver,
538 Handle<JSReceiver> getter) {
539 Isolate* isolate = getter->GetIsolate();
540 Debug* debug = isolate->debug();
541 // Handle stepping into a getter if step into is active.
542 // TODO(rossberg): should this apply to getters that are function proxies?
543 if (debug->StepInActive() && getter->IsJSFunction()) {
545 Handle<JSFunction>::cast(getter), Handle<Object>::null(), 0, false);
548 return Execution::Call(isolate, getter, receiver, 0, NULL, true);
552 MaybeHandle<Object> Object::SetPropertyWithDefinedSetter(
553 Handle<Object> receiver,
554 Handle<JSReceiver> setter,
555 Handle<Object> value) {
556 Isolate* isolate = setter->GetIsolate();
558 Debug* debug = isolate->debug();
559 // Handle stepping into a setter if step into is active.
560 // TODO(rossberg): should this apply to getters that are function proxies?
561 if (debug->StepInActive() && setter->IsJSFunction()) {
563 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false);
566 Handle<Object> argv[] = { value };
567 RETURN_ON_EXCEPTION(isolate, Execution::Call(isolate, setter, receiver,
568 arraysize(argv), argv, true),
574 static bool FindAllCanReadHolder(LookupIterator* it) {
575 for (; it->IsFound(); it->Next()) {
576 if (it->state() == LookupIterator::ACCESSOR) {
577 Handle<Object> accessors = it->GetAccessors();
578 if (accessors->IsAccessorInfo()) {
579 if (AccessorInfo::cast(*accessors)->all_can_read()) return true;
587 MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck(
588 LookupIterator* it) {
589 Handle<JSObject> checked = it->GetHolder<JSObject>();
590 if (FindAllCanReadHolder(it)) {
591 return GetPropertyWithAccessor(it->GetReceiver(), it->name(),
592 it->GetHolder<JSObject>(),
595 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_GET);
596 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
597 return it->factory()->undefined_value();
601 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithFailedAccessCheck(
602 LookupIterator* it) {
603 Handle<JSObject> checked = it->GetHolder<JSObject>();
604 if (FindAllCanReadHolder(it))
605 return maybe(it->property_details().attributes());
606 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_HAS);
607 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(it->isolate(),
608 Maybe<PropertyAttributes>());
609 return maybe(ABSENT);
613 static bool FindAllCanWriteHolder(LookupIterator* it) {
614 for (; it->IsFound(); it->Next()) {
615 if (it->state() == LookupIterator::ACCESSOR) {
616 Handle<Object> accessors = it->GetAccessors();
617 if (accessors->IsAccessorInfo()) {
618 if (AccessorInfo::cast(*accessors)->all_can_write()) return true;
626 MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck(
627 LookupIterator* it, Handle<Object> value, StrictMode strict_mode) {
628 Handle<JSObject> checked = it->GetHolder<JSObject>();
629 if (FindAllCanWriteHolder(it)) {
630 return SetPropertyWithAccessor(it->GetReceiver(), it->name(), value,
631 it->GetHolder<JSObject>(),
632 it->GetAccessors(), strict_mode);
635 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_SET);
636 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
641 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
643 Handle<Object> value,
644 PropertyDetails details) {
645 DCHECK(!object->HasFastProperties());
646 Handle<NameDictionary> property_dictionary(object->property_dictionary());
648 if (!name->IsUniqueName()) {
649 name = object->GetIsolate()->factory()->InternalizeString(
650 Handle<String>::cast(name));
653 int entry = property_dictionary->FindEntry(name);
654 if (entry == NameDictionary::kNotFound) {
655 Handle<Object> store_value = value;
656 if (object->IsGlobalObject()) {
657 store_value = object->GetIsolate()->factory()->NewPropertyCell(value);
660 property_dictionary = NameDictionary::Add(
661 property_dictionary, name, store_value, details);
662 object->set_properties(*property_dictionary);
666 PropertyDetails original_details = property_dictionary->DetailsAt(entry);
667 int enumeration_index;
668 // Preserve the enumeration index unless the property was deleted.
669 if (original_details.IsDeleted()) {
670 enumeration_index = property_dictionary->NextEnumerationIndex();
671 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1);
673 enumeration_index = original_details.dictionary_index();
674 DCHECK(enumeration_index > 0);
677 details = PropertyDetails(
678 details.attributes(), details.type(), enumeration_index);
680 if (object->IsGlobalObject()) {
681 Handle<PropertyCell> cell(
682 PropertyCell::cast(property_dictionary->ValueAt(entry)));
683 PropertyCell::SetValueInferType(cell, value);
684 // Please note we have to update the property details.
685 property_dictionary->DetailsAtPut(entry, details);
687 property_dictionary->SetEntry(entry, name, value, details);
692 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object,
695 DCHECK(!object->HasFastProperties());
696 Isolate* isolate = object->GetIsolate();
697 Handle<NameDictionary> dictionary(object->property_dictionary());
698 int entry = dictionary->FindEntry(name);
699 if (entry != NameDictionary::kNotFound) {
700 // If we have a global object set the cell to the hole.
701 if (object->IsGlobalObject()) {
702 PropertyDetails details = dictionary->DetailsAt(entry);
703 if (!details.IsConfigurable()) {
704 if (mode != FORCE_DELETION) return isolate->factory()->false_value();
705 // When forced to delete global properties, we have to make a
706 // map change to invalidate any ICs that think they can load
707 // from the non-configurable cell without checking if it contains
709 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
710 DCHECK(new_map->is_dictionary_map());
711 JSObject::MigrateToMap(object, new_map);
713 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry)));
714 Handle<Object> value = isolate->factory()->the_hole_value();
715 PropertyCell::SetValueInferType(cell, value);
716 dictionary->DetailsAtPut(entry, details.AsDeleted());
718 Handle<Object> deleted(
719 NameDictionary::DeleteProperty(dictionary, entry, mode));
720 if (*deleted == isolate->heap()->true_value()) {
721 Handle<NameDictionary> new_properties =
722 NameDictionary::Shrink(dictionary, name);
723 object->set_properties(*new_properties);
728 return isolate->factory()->true_value();
732 bool JSObject::IsDirty() {
733 Object* cons_obj = map()->constructor();
734 if (!cons_obj->IsJSFunction())
736 JSFunction* fun = JSFunction::cast(cons_obj);
737 if (!fun->shared()->IsApiFunction())
739 // If the object is fully fast case and has the same map it was
740 // created with then no changes can have been made to it.
741 return map() != fun->initial_map()
742 || !HasFastObjectElements()
743 || !HasFastProperties();
747 MaybeHandle<Object> Object::GetElementWithReceiver(Isolate* isolate,
748 Handle<Object> object,
749 Handle<Object> receiver,
751 if (object->IsUndefined()) {
752 // TODO(verwaest): Why is this check here?
754 return isolate->factory()->undefined_value();
757 // Iterate up the prototype chain until an element is found or the null
758 // prototype is encountered.
759 for (PrototypeIterator iter(isolate, object,
760 object->IsJSProxy() || object->IsJSObject()
761 ? PrototypeIterator::START_AT_RECEIVER
762 : PrototypeIterator::START_AT_PROTOTYPE);
763 !iter.IsAtEnd(); iter.Advance()) {
764 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
765 return JSProxy::GetElementWithHandler(
766 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
770 // Inline the case for JSObjects. Doing so significantly improves the
771 // performance of fetching elements where checking the prototype chain is
773 Handle<JSObject> js_object =
774 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
776 // Check access rights if needed.
777 if (js_object->IsAccessCheckNeeded()) {
778 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) {
779 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET);
780 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
781 return isolate->factory()->undefined_value();
785 if (js_object->HasIndexedInterceptor()) {
786 return JSObject::GetElementWithInterceptor(js_object, receiver, index);
789 if (js_object->elements() != isolate->heap()->empty_fixed_array()) {
790 Handle<Object> result;
791 ASSIGN_RETURN_ON_EXCEPTION(
793 js_object->GetElementsAccessor()->Get(receiver, js_object, index),
795 if (!result->IsTheHole()) return result;
799 return isolate->factory()->undefined_value();
803 Map* Object::GetRootMap(Isolate* isolate) {
804 DisallowHeapAllocation no_alloc;
806 Context* context = isolate->context()->native_context();
807 return context->number_function()->initial_map();
810 HeapObject* heap_object = HeapObject::cast(this);
812 // The object is either a number, a string, a boolean,
813 // a real JS object, or a Harmony proxy.
814 if (heap_object->IsJSReceiver()) {
815 return heap_object->map();
817 Context* context = isolate->context()->native_context();
819 if (heap_object->IsHeapNumber()) {
820 return context->number_function()->initial_map();
822 if (heap_object->IsString()) {
823 return context->string_function()->initial_map();
825 if (heap_object->IsSymbol()) {
826 return context->symbol_function()->initial_map();
828 if (heap_object->IsBoolean()) {
829 return context->boolean_function()->initial_map();
831 return isolate->heap()->null_value()->map();
835 Object* Object::GetHash() {
836 // The object is either a number, a name, an odd-ball,
837 // a real JS object, or a Harmony proxy.
839 uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
840 return Smi::FromInt(hash & Smi::kMaxValue);
843 uint32_t hash = Name::cast(this)->Hash();
844 return Smi::FromInt(hash);
847 uint32_t hash = Oddball::cast(this)->to_string()->Hash();
848 return Smi::FromInt(hash);
851 DCHECK(IsJSReceiver());
852 return JSReceiver::cast(this)->GetIdentityHash();
856 Handle<Smi> Object::GetOrCreateHash(Isolate* isolate, Handle<Object> object) {
857 Handle<Object> hash(object->GetHash(), isolate);
858 if (hash->IsSmi()) return Handle<Smi>::cast(hash);
860 DCHECK(object->IsJSReceiver());
861 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object));
865 bool Object::SameValue(Object* other) {
866 if (other == this) return true;
868 // The object is either a number, a name, an odd-ball,
869 // a real JS object, or a Harmony proxy.
870 if (IsNumber() && other->IsNumber()) {
871 double this_value = Number();
872 double other_value = other->Number();
873 bool equal = this_value == other_value;
874 // SameValue(NaN, NaN) is true.
875 if (!equal) return std::isnan(this_value) && std::isnan(other_value);
876 // SameValue(0.0, -0.0) is false.
877 return (this_value != 0) || ((1 / this_value) == (1 / other_value));
879 if (IsString() && other->IsString()) {
880 return String::cast(this)->Equals(String::cast(other));
886 bool Object::SameValueZero(Object* other) {
887 if (other == this) return true;
889 // The object is either a number, a name, an odd-ball,
890 // a real JS object, or a Harmony proxy.
891 if (IsNumber() && other->IsNumber()) {
892 double this_value = Number();
893 double other_value = other->Number();
895 return this_value == other_value
896 || (std::isnan(this_value) && std::isnan(other_value));
898 if (IsString() && other->IsString()) {
899 return String::cast(this)->Equals(String::cast(other));
905 void Object::ShortPrint(FILE* out) {
911 void Object::ShortPrint(StringStream* accumulator) {
914 accumulator->Add(os.c_str());
918 OStream& operator<<(OStream& os, const Brief& v) {
919 if (v.value->IsSmi()) {
920 Smi::cast(v.value)->SmiPrint(os);
922 // TODO(svenpanne) Const-correct HeapObjectShortPrint!
923 HeapObject* obj = const_cast<HeapObject*>(HeapObject::cast(v.value));
924 obj->HeapObjectShortPrint(os);
930 void Smi::SmiPrint(OStream& os) const { // NOLINT
935 // Should a word be prefixed by 'a' or 'an' in order to read naturally in
936 // English? Returns false for non-ASCII or words that don't start with
937 // a capital letter. The a/an rule follows pronunciation in English.
938 // We don't use the BBC's overcorrect "an historic occasion" though if
939 // you speak a dialect you may well say "an 'istoric occasion".
940 static bool AnWord(String* str) {
941 if (str->length() == 0) return false; // A nothing.
942 int c0 = str->Get(0);
943 int c1 = str->length() > 1 ? str->Get(1) : 0;
946 return true; // An Umpire, but a UTF8String, a U.
948 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') {
949 return true; // An Ape, an ABCBook.
950 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) &&
951 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' ||
952 c0 == 'S' || c0 == 'X')) {
953 return true; // An MP3File, an M.
959 Handle<String> String::SlowFlatten(Handle<ConsString> cons,
960 PretenureFlag pretenure) {
961 DCHECK(AllowHeapAllocation::IsAllowed());
962 DCHECK(cons->second()->length() != 0);
963 Isolate* isolate = cons->GetIsolate();
964 int length = cons->length();
965 PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure
967 Handle<SeqString> result;
968 if (cons->IsOneByteRepresentation()) {
969 Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString(
970 length, tenure).ToHandleChecked();
971 DisallowHeapAllocation no_gc;
972 WriteToFlat(*cons, flat->GetChars(), 0, length);
975 Handle<SeqTwoByteString> flat = isolate->factory()->NewRawTwoByteString(
976 length, tenure).ToHandleChecked();
977 DisallowHeapAllocation no_gc;
978 WriteToFlat(*cons, flat->GetChars(), 0, length);
981 cons->set_first(*result);
982 cons->set_second(isolate->heap()->empty_string());
983 DCHECK(result->IsFlat());
989 bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
990 // Externalizing twice leaks the external resource, so it's
991 // prohibited by the API.
992 DCHECK(!this->IsExternalString());
993 #ifdef ENABLE_SLOW_DCHECKS
994 if (FLAG_enable_slow_asserts) {
995 // Assert that the resource and the string are equivalent.
996 DCHECK(static_cast<size_t>(this->length()) == resource->length());
997 ScopedVector<uc16> smart_chars(this->length());
998 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
999 DCHECK(memcmp(smart_chars.start(),
1001 resource->length() * sizeof(smart_chars[0])) == 0);
1004 int size = this->Size(); // Byte size of the original string.
1005 // Abort if size does not allow in-place conversion.
1006 if (size < ExternalString::kShortSize) return false;
1007 Heap* heap = GetHeap();
1008 bool is_one_byte = this->IsOneByteRepresentation();
1009 bool is_internalized = this->IsInternalizedString();
1011 // Morph the string to an external string by replacing the map and
1012 // reinitializing the fields. This won't work if the space the existing
1013 // string occupies is too small for a regular external string.
1014 // Instead, we resort to a short external string instead, omitting
1015 // the field caching the address of the backing store. When we encounter
1016 // short external strings in generated code, we need to bailout to runtime.
1018 if (size < ExternalString::kSize) {
1019 new_map = is_internalized
1021 ? heap->short_external_internalized_string_with_one_byte_data_map()
1022 : heap->short_external_internalized_string_map())
1023 : (is_one_byte ? heap->short_external_string_with_one_byte_data_map()
1024 : heap->short_external_string_map());
1026 new_map = is_internalized
1028 ? heap->external_internalized_string_with_one_byte_data_map()
1029 : heap->external_internalized_string_map())
1030 : (is_one_byte ? heap->external_string_with_one_byte_data_map()
1031 : heap->external_string_map());
1034 // Byte size of the external String object.
1035 int new_size = this->SizeFromMap(new_map);
1036 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1038 // We are storing the new map using release store after creating a filler for
1039 // the left-over space to avoid races with the sweeper thread.
1040 this->synchronized_set_map(new_map);
1042 ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
1043 self->set_resource(resource);
1044 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1046 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1051 bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
1052 // Externalizing twice leaks the external resource, so it's
1053 // prohibited by the API.
1054 DCHECK(!this->IsExternalString());
1055 #ifdef ENABLE_SLOW_DCHECKS
1056 if (FLAG_enable_slow_asserts) {
1057 // Assert that the resource and the string are equivalent.
1058 DCHECK(static_cast<size_t>(this->length()) == resource->length());
1059 if (this->IsTwoByteRepresentation()) {
1060 ScopedVector<uint16_t> smart_chars(this->length());
1061 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1062 DCHECK(String::IsOneByte(smart_chars.start(), this->length()));
1064 ScopedVector<char> smart_chars(this->length());
1065 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1066 DCHECK(memcmp(smart_chars.start(),
1068 resource->length() * sizeof(smart_chars[0])) == 0);
1071 int size = this->Size(); // Byte size of the original string.
1072 // Abort if size does not allow in-place conversion.
1073 if (size < ExternalString::kShortSize) return false;
1074 Heap* heap = GetHeap();
1075 bool is_internalized = this->IsInternalizedString();
1077 // Morph the string to an external string by replacing the map and
1078 // reinitializing the fields. This won't work if the space the existing
1079 // string occupies is too small for a regular external string.
1080 // Instead, we resort to a short external string instead, omitting
1081 // the field caching the address of the backing store. When we encounter
1082 // short external strings in generated code, we need to bailout to runtime.
1084 if (size < ExternalString::kSize) {
1085 new_map = is_internalized
1086 ? heap->short_external_one_byte_internalized_string_map()
1087 : heap->short_external_one_byte_string_map();
1089 new_map = is_internalized
1090 ? heap->external_one_byte_internalized_string_map()
1091 : heap->external_one_byte_string_map();
1094 // Byte size of the external String object.
1095 int new_size = this->SizeFromMap(new_map);
1096 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1098 // We are storing the new map using release store after creating a filler for
1099 // the left-over space to avoid races with the sweeper thread.
1100 this->synchronized_set_map(new_map);
1102 ExternalOneByteString* self = ExternalOneByteString::cast(this);
1103 self->set_resource(resource);
1104 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1106 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1111 void String::StringShortPrint(StringStream* accumulator) {
1113 if (len > kMaxShortPrintLength) {
1114 accumulator->Add("<Very long string[%u]>", len);
1118 if (!LooksValid()) {
1119 accumulator->Add("<Invalid String>");
1123 ConsStringIteratorOp op;
1124 StringCharacterStream stream(this, &op);
1126 bool truncated = false;
1127 if (len > kMaxShortPrintLength) {
1128 len = kMaxShortPrintLength;
1131 bool one_byte = true;
1132 for (int i = 0; i < len; i++) {
1133 uint16_t c = stream.GetNext();
1135 if (c < 32 || c >= 127) {
1141 accumulator->Add("<String[%u]: ", length());
1142 for (int i = 0; i < len; i++) {
1143 accumulator->Put(static_cast<char>(stream.GetNext()));
1145 accumulator->Put('>');
1147 // Backslash indicates that the string contains control
1148 // characters and that backslashes are therefore escaped.
1149 accumulator->Add("<String[%u]\\: ", length());
1150 for (int i = 0; i < len; i++) {
1151 uint16_t c = stream.GetNext();
1153 accumulator->Add("\\n");
1154 } else if (c == '\r') {
1155 accumulator->Add("\\r");
1156 } else if (c == '\\') {
1157 accumulator->Add("\\\\");
1158 } else if (c < 32 || c > 126) {
1159 accumulator->Add("\\x%02x", c);
1161 accumulator->Put(static_cast<char>(c));
1165 accumulator->Put('.');
1166 accumulator->Put('.');
1167 accumulator->Put('.');
1169 accumulator->Put('>');
1175 void String::PrintUC16(OStream& os, int start, int end) { // NOLINT
1176 if (end < 0) end = length();
1177 ConsStringIteratorOp op;
1178 StringCharacterStream stream(this, &op, start);
1179 for (int i = start; i < end && stream.HasMore(); i++) {
1180 os << AsUC16(stream.GetNext());
1185 void JSObject::JSObjectShortPrint(StringStream* accumulator) {
1186 switch (map()->instance_type()) {
1187 case JS_ARRAY_TYPE: {
1188 double length = JSArray::cast(this)->length()->IsUndefined()
1190 : JSArray::cast(this)->length()->Number();
1191 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
1194 case JS_WEAK_MAP_TYPE: {
1195 accumulator->Add("<JS WeakMap>");
1198 case JS_WEAK_SET_TYPE: {
1199 accumulator->Add("<JS WeakSet>");
1202 case JS_REGEXP_TYPE: {
1203 accumulator->Add("<JS RegExp>");
1206 case JS_FUNCTION_TYPE: {
1207 JSFunction* function = JSFunction::cast(this);
1208 Object* fun_name = function->shared()->DebugName();
1209 bool printed = false;
1210 if (fun_name->IsString()) {
1211 String* str = String::cast(fun_name);
1212 if (str->length() > 0) {
1213 accumulator->Add("<JS Function ");
1214 accumulator->Put(str);
1219 accumulator->Add("<JS Function");
1221 accumulator->Add(" (SharedFunctionInfo %p)",
1222 reinterpret_cast<void*>(function->shared()));
1223 accumulator->Put('>');
1226 case JS_GENERATOR_OBJECT_TYPE: {
1227 accumulator->Add("<JS Generator>");
1230 case JS_MODULE_TYPE: {
1231 accumulator->Add("<JS Module>");
1234 // All other JSObjects are rather similar to each other (JSObject,
1235 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
1237 Map* map_of_this = map();
1238 Heap* heap = GetHeap();
1239 Object* constructor = map_of_this->constructor();
1240 bool printed = false;
1241 if (constructor->IsHeapObject() &&
1242 !heap->Contains(HeapObject::cast(constructor))) {
1243 accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
1245 bool global_object = IsJSGlobalProxy();
1246 if (constructor->IsJSFunction()) {
1247 if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
1248 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
1250 Object* constructor_name =
1251 JSFunction::cast(constructor)->shared()->name();
1252 if (constructor_name->IsString()) {
1253 String* str = String::cast(constructor_name);
1254 if (str->length() > 0) {
1255 bool vowel = AnWord(str);
1256 accumulator->Add("<%sa%s ",
1257 global_object ? "Global Object: " : "",
1259 accumulator->Put(str);
1260 accumulator->Add(" with %smap %p",
1261 map_of_this->is_deprecated() ? "deprecated " : "",
1269 accumulator->Add("<JS %sObject", global_object ? "Global " : "");
1273 accumulator->Add(" value = ");
1274 JSValue::cast(this)->value()->ShortPrint(accumulator);
1276 accumulator->Put('>');
1283 void JSObject::PrintElementsTransition(
1284 FILE* file, Handle<JSObject> object,
1285 ElementsKind from_kind, Handle<FixedArrayBase> from_elements,
1286 ElementsKind to_kind, Handle<FixedArrayBase> to_elements) {
1287 if (from_kind != to_kind) {
1289 os << "elements transition [" << ElementsKindToString(from_kind) << " -> "
1290 << ElementsKindToString(to_kind) << "] in ";
1291 JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true);
1292 PrintF(file, " for ");
1293 object->ShortPrint(file);
1294 PrintF(file, " from ");
1295 from_elements->ShortPrint(file);
1296 PrintF(file, " to ");
1297 to_elements->ShortPrint(file);
1303 void Map::PrintGeneralization(FILE* file,
1308 bool constant_to_field,
1309 Representation old_representation,
1310 Representation new_representation,
1311 HeapType* old_field_type,
1312 HeapType* new_field_type) {
1314 os << "[generalizing ";
1315 constructor_name()->PrintOn(file);
1317 Name* name = instance_descriptors()->GetKey(modify_index);
1318 if (name->IsString()) {
1319 String::cast(name)->PrintOn(file);
1321 os << "{symbol " << static_cast<void*>(name) << "}";
1324 if (constant_to_field) {
1327 os << old_representation.Mnemonic() << "{";
1328 old_field_type->PrintTo(os, HeapType::SEMANTIC_DIM);
1331 os << "->" << new_representation.Mnemonic() << "{";
1332 new_field_type->PrintTo(os, HeapType::SEMANTIC_DIM);
1334 if (strlen(reason) > 0) {
1337 os << "+" << (descriptors - split) << " maps";
1340 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true);
1345 void JSObject::PrintInstanceMigration(FILE* file,
1348 PrintF(file, "[migrating ");
1349 map()->constructor_name()->PrintOn(file);
1351 DescriptorArray* o = original_map->instance_descriptors();
1352 DescriptorArray* n = new_map->instance_descriptors();
1353 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) {
1354 Representation o_r = o->GetDetails(i).representation();
1355 Representation n_r = n->GetDetails(i).representation();
1356 if (!o_r.Equals(n_r)) {
1357 String::cast(o->GetKey(i))->PrintOn(file);
1358 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic());
1359 } else if (o->GetDetails(i).type() == CONSTANT &&
1360 n->GetDetails(i).type() == FIELD) {
1361 Name* name = o->GetKey(i);
1362 if (name->IsString()) {
1363 String::cast(name)->PrintOn(file);
1365 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1374 void HeapObject::HeapObjectShortPrint(OStream& os) { // NOLINT
1375 Heap* heap = GetHeap();
1376 if (!heap->Contains(this)) {
1377 os << "!!!INVALID POINTER!!!";
1380 if (!heap->Contains(map())) {
1381 os << "!!!INVALID MAP!!!";
1388 HeapStringAllocator allocator;
1389 StringStream accumulator(&allocator);
1390 String::cast(this)->StringShortPrint(&accumulator);
1391 os << accumulator.ToCString().get();
1395 HeapStringAllocator allocator;
1396 StringStream accumulator(&allocator);
1397 JSObject::cast(this)->JSObjectShortPrint(&accumulator);
1398 os << accumulator.ToCString().get();
1401 switch (map()->instance_type()) {
1403 os << "<Map(elements=" << Map::cast(this)->elements_kind() << ")>";
1405 case FIXED_ARRAY_TYPE:
1406 os << "<FixedArray[" << FixedArray::cast(this)->length() << "]>";
1408 case FIXED_DOUBLE_ARRAY_TYPE:
1409 os << "<FixedDoubleArray[" << FixedDoubleArray::cast(this)->length()
1412 case BYTE_ARRAY_TYPE:
1413 os << "<ByteArray[" << ByteArray::cast(this)->length() << "]>";
1415 case FREE_SPACE_TYPE:
1416 os << "<FreeSpace[" << FreeSpace::cast(this)->Size() << "]>";
1418 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \
1419 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1420 os << "<External" #Type "Array[" \
1421 << External##Type##Array::cast(this)->length() << "]>"; \
1423 case FIXED_##TYPE##_ARRAY_TYPE: \
1424 os << "<Fixed" #Type "Array[" << Fixed##Type##Array::cast(this)->length() \
1428 TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT)
1429 #undef TYPED_ARRAY_SHORT_PRINT
1431 case SHARED_FUNCTION_INFO_TYPE: {
1432 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this);
1433 SmartArrayPointer<char> debug_name =
1434 shared->DebugName()->ToCString();
1435 if (debug_name[0] != 0) {
1436 os << "<SharedFunctionInfo " << debug_name.get() << ">";
1438 os << "<SharedFunctionInfo>";
1442 case JS_MESSAGE_OBJECT_TYPE:
1443 os << "<JSMessageObject>";
1445 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1447 os << "<" #Name ">"; \
1449 STRUCT_LIST(MAKE_STRUCT_CASE)
1450 #undef MAKE_STRUCT_CASE
1452 Code* code = Code::cast(this);
1453 os << "<Code: " << Code::Kind2String(code->kind()) << ">";
1456 case ODDBALL_TYPE: {
1457 if (IsUndefined()) {
1458 os << "<undefined>";
1459 } else if (IsTheHole()) {
1461 } else if (IsNull()) {
1463 } else if (IsTrue()) {
1465 } else if (IsFalse()) {
1468 os << "<Odd Oddball>";
1473 Symbol* symbol = Symbol::cast(this);
1474 os << "<Symbol: " << symbol->Hash();
1475 if (!symbol->name()->IsUndefined()) {
1477 HeapStringAllocator allocator;
1478 StringStream accumulator(&allocator);
1479 String::cast(symbol->name())->StringShortPrint(&accumulator);
1480 os << accumulator.ToCString().get();
1485 case HEAP_NUMBER_TYPE: {
1487 HeapNumber::cast(this)->HeapNumberPrint(os);
1491 case MUTABLE_HEAP_NUMBER_TYPE: {
1492 os << "<MutableNumber: ";
1493 HeapNumber::cast(this)->HeapNumberPrint(os);
1497 case FLOAT32x4_TYPE:
1498 os << "<Float32x4: ";
1499 Float32x4::cast(this)->Float32x4Print(os);
1502 case FLOAT64x2_TYPE:
1503 os << "<Float64x2: ";
1504 Float64x2::cast(this)->Float64x2Print(os);
1509 Int32x4::cast(this)->Int32x4Print(os);
1515 case JS_FUNCTION_PROXY_TYPE:
1516 os << "<JSFunctionProxy>";
1523 HeapStringAllocator allocator;
1524 StringStream accumulator(&allocator);
1525 Cell::cast(this)->value()->ShortPrint(&accumulator);
1526 os << accumulator.ToCString().get();
1529 case PROPERTY_CELL_TYPE: {
1530 os << "PropertyCell for ";
1531 HeapStringAllocator allocator;
1532 StringStream accumulator(&allocator);
1533 PropertyCell::cast(this)->value()->ShortPrint(&accumulator);
1534 os << accumulator.ToCString().get();
1538 os << "<Other heap object (" << map()->instance_type() << ")>";
1544 void HeapObject::Iterate(ObjectVisitor* v) {
1546 IteratePointer(v, kMapOffset);
1547 // Handle object body
1549 IterateBody(m->instance_type(), SizeFromMap(m), v);
1553 void HeapObject::IterateBody(InstanceType type, int object_size,
1555 // Avoiding <Type>::cast(this) because it accesses the map pointer field.
1556 // During GC, the map pointer field is encoded.
1557 if (type < FIRST_NONSTRING_TYPE) {
1558 switch (type & kStringRepresentationMask) {
1561 case kConsStringTag:
1562 ConsString::BodyDescriptor::IterateBody(this, v);
1564 case kSlicedStringTag:
1565 SlicedString::BodyDescriptor::IterateBody(this, v);
1567 case kExternalStringTag:
1568 if ((type & kStringEncodingMask) == kOneByteStringTag) {
1569 reinterpret_cast<ExternalOneByteString*>(this)
1570 ->ExternalOneByteStringIterateBody(v);
1572 reinterpret_cast<ExternalTwoByteString*>(this)->
1573 ExternalTwoByteStringIterateBody(v);
1581 case FIXED_ARRAY_TYPE:
1582 FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
1584 case CONSTANT_POOL_ARRAY_TYPE:
1585 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v);
1587 case FIXED_DOUBLE_ARRAY_TYPE:
1589 case JS_OBJECT_TYPE:
1590 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1591 case JS_GENERATOR_OBJECT_TYPE:
1592 case JS_MODULE_TYPE:
1596 case JS_ARRAY_BUFFER_TYPE:
1597 case JS_TYPED_ARRAY_TYPE:
1598 case JS_DATA_VIEW_TYPE:
1601 case JS_SET_ITERATOR_TYPE:
1602 case JS_MAP_ITERATOR_TYPE:
1603 case JS_WEAK_MAP_TYPE:
1604 case JS_WEAK_SET_TYPE:
1605 case JS_REGEXP_TYPE:
1606 case JS_GLOBAL_PROXY_TYPE:
1607 case JS_GLOBAL_OBJECT_TYPE:
1608 case JS_BUILTINS_OBJECT_TYPE:
1609 case JS_MESSAGE_OBJECT_TYPE:
1610 case FLOAT32x4_TYPE:
1611 case FLOAT64x2_TYPE:
1613 JSObject::BodyDescriptor::IterateBody(this, object_size, v);
1615 case JS_FUNCTION_TYPE:
1616 reinterpret_cast<JSFunction*>(this)
1617 ->JSFunctionIterateBody(object_size, v);
1620 Oddball::BodyDescriptor::IterateBody(this, v);
1623 JSProxy::BodyDescriptor::IterateBody(this, v);
1625 case JS_FUNCTION_PROXY_TYPE:
1626 JSFunctionProxy::BodyDescriptor::IterateBody(this, v);
1629 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v);
1632 Map::BodyDescriptor::IterateBody(this, v);
1635 reinterpret_cast<Code*>(this)->CodeIterateBody(v);
1638 Cell::BodyDescriptor::IterateBody(this, v);
1640 case PROPERTY_CELL_TYPE:
1641 PropertyCell::BodyDescriptor::IterateBody(this, v);
1644 Symbol::BodyDescriptor::IterateBody(this, v);
1647 case HEAP_NUMBER_TYPE:
1648 case MUTABLE_HEAP_NUMBER_TYPE:
1650 case BYTE_ARRAY_TYPE:
1651 case FREE_SPACE_TYPE:
1654 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
1655 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1656 case FIXED_##TYPE##_ARRAY_TYPE: \
1659 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1660 #undef TYPED_ARRAY_CASE
1662 case SHARED_FUNCTION_INFO_TYPE: {
1663 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
1667 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1669 STRUCT_LIST(MAKE_STRUCT_CASE)
1670 #undef MAKE_STRUCT_CASE
1671 if (type == ALLOCATION_SITE_TYPE) {
1672 AllocationSite::BodyDescriptor::IterateBody(this, v);
1674 StructBodyDescriptor::IterateBody(this, object_size, v);
1678 PrintF("Unknown type: %d\n", type);
1684 bool HeapNumber::HeapNumberBooleanValue() {
1685 return DoubleToBoolean(value());
1689 void HeapNumber::HeapNumberPrint(OStream& os) { // NOLINT
1694 void Float32x4::Float32x4Print(OStream& os) {
1695 // The Windows version of vsnprintf can allocate when printing a %g string
1696 // into a buffer that may not be big enough. We don't want random memory
1697 // allocation when producing post-crash stack traces, so we print into a
1698 // buffer that is plenty big enough for any floating point number, then
1699 // print that using vsnprintf (which may truncate but never allocate if
1700 // there is no more space in the buffer).
1701 EmbeddedVector<char, 100> buffer;
1702 SNPrintF(buffer, "%.16g %.16g %.16g %.16g", x(), y(), z(), w());
1703 os << buffer.start();
1707 void Int32x4::Int32x4Print(OStream& os) {
1708 // The Windows version of vsnprintf can allocate when printing a %g string
1709 // into a buffer that may not be big enough. We don't want random memory
1710 // allocation when producing post-crash stack traces, so we print into a
1711 // buffer that is plenty big enough for any floating point number, then
1712 // print that using vsnprintf (which may truncate but never allocate if
1713 // there is no more space in the buffer).
1714 EmbeddedVector<char, 100> buffer;
1715 SNPrintF(buffer, "%u %u %u %u", x(), y(), z(), w());
1716 os << buffer.start();
1720 void Float64x2::Float64x2Print(OStream& os) {
1721 // The Windows version of vsnprintf can allocate when printing a %g string
1722 // into a buffer that may not be big enough. We don't want random memory
1723 // allocation when producing post-crash stack traces, so we print into a
1724 // buffer that is plenty big enough for any floating point number, then
1725 // print that using vsnprintf (which may truncate but never allocate if
1726 // there is no more space in the buffer).
1727 EmbeddedVector<char, 100> buffer;
1728 SNPrintF(buffer, "%.16g %.16g", x(), y());
1729 os << buffer.start();
1733 String* JSReceiver::class_name() {
1734 if (IsJSFunction() || IsJSFunctionProxy()) {
1735 return GetHeap()->Function_string();
1737 if (map()->constructor()->IsJSFunction()) {
1738 JSFunction* constructor = JSFunction::cast(map()->constructor());
1739 return String::cast(constructor->shared()->instance_class_name());
1741 // If the constructor is not present, return "Object".
1742 return GetHeap()->Object_string();
1746 String* Map::constructor_name() {
1747 if (constructor()->IsJSFunction()) {
1748 JSFunction* constructor = JSFunction::cast(this->constructor());
1749 String* name = String::cast(constructor->shared()->name());
1750 if (name->length() > 0) return name;
1751 String* inferred_name = constructor->shared()->inferred_name();
1752 if (inferred_name->length() > 0) return inferred_name;
1753 Object* proto = prototype();
1754 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
1756 // TODO(rossberg): what about proxies?
1757 // If the constructor is not present, return "Object".
1758 return GetHeap()->Object_string();
1762 String* JSReceiver::constructor_name() {
1763 return map()->constructor_name();
1767 MaybeHandle<Map> Map::CopyWithField(Handle<Map> map,
1769 Handle<HeapType> type,
1770 PropertyAttributes attributes,
1771 Representation representation,
1772 TransitionFlag flag) {
1773 DCHECK(DescriptorArray::kNotFound ==
1774 map->instance_descriptors()->Search(
1775 *name, map->NumberOfOwnDescriptors()));
1777 // Ensure the descriptor array does not get too big.
1778 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1779 return MaybeHandle<Map>();
1782 Isolate* isolate = map->GetIsolate();
1784 // Compute the new index for new field.
1785 int index = map->NextFreePropertyIndex();
1787 if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
1788 representation = Representation::Tagged();
1789 type = HeapType::Any(isolate);
1792 FieldDescriptor new_field_desc(name, index, type, attributes, representation);
1793 Handle<Map> new_map = Map::CopyAddDescriptor(map, &new_field_desc, flag);
1794 int unused_property_fields = new_map->unused_property_fields() - 1;
1795 if (unused_property_fields < 0) {
1796 unused_property_fields += JSObject::kFieldsAdded;
1798 new_map->set_unused_property_fields(unused_property_fields);
1803 MaybeHandle<Map> Map::CopyWithConstant(Handle<Map> map,
1805 Handle<Object> constant,
1806 PropertyAttributes attributes,
1807 TransitionFlag flag) {
1808 // Ensure the descriptor array does not get too big.
1809 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1810 return MaybeHandle<Map>();
1813 // Allocate new instance descriptors with (name, constant) added.
1814 ConstantDescriptor new_constant_desc(name, constant, attributes);
1815 return Map::CopyAddDescriptor(map, &new_constant_desc, flag);
1819 void JSObject::AddSlowProperty(Handle<JSObject> object,
1821 Handle<Object> value,
1822 PropertyAttributes attributes) {
1823 DCHECK(!object->HasFastProperties());
1824 Isolate* isolate = object->GetIsolate();
1825 Handle<NameDictionary> dict(object->property_dictionary());
1826 if (object->IsGlobalObject()) {
1827 // In case name is an orphaned property reuse the cell.
1828 int entry = dict->FindEntry(name);
1829 if (entry != NameDictionary::kNotFound) {
1830 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry)));
1831 PropertyCell::SetValueInferType(cell, value);
1832 // Assign an enumeration index to the property and update
1833 // SetNextEnumerationIndex.
1834 int index = dict->NextEnumerationIndex();
1835 PropertyDetails details = PropertyDetails(attributes, NORMAL, index);
1836 dict->SetNextEnumerationIndex(index + 1);
1837 dict->SetEntry(entry, name, cell, details);
1840 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value);
1841 PropertyCell::SetValueInferType(cell, value);
1844 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
1845 Handle<NameDictionary> result =
1846 NameDictionary::Add(dict, name, value, details);
1847 if (*dict != *result) object->set_properties(*result);
1851 Context* JSObject::GetCreationContext() {
1852 Object* constructor = this->map()->constructor();
1853 JSFunction* function;
1854 if (!constructor->IsJSFunction()) {
1855 // Functions have null as a constructor,
1856 // but any JSFunction knows its context immediately.
1857 function = JSFunction::cast(this);
1859 function = JSFunction::cast(constructor);
1862 return function->context()->native_context();
1866 void JSObject::EnqueueChangeRecord(Handle<JSObject> object,
1867 const char* type_str,
1869 Handle<Object> old_value) {
1870 DCHECK(!object->IsJSGlobalProxy());
1871 DCHECK(!object->IsJSGlobalObject());
1872 Isolate* isolate = object->GetIsolate();
1873 HandleScope scope(isolate);
1874 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str);
1875 Handle<Object> args[] = { type, object, name, old_value };
1876 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4;
1878 Execution::Call(isolate,
1879 Handle<JSFunction>(isolate->observers_notify_change()),
1880 isolate->factory()->undefined_value(),
1881 argc, args).Assert();
1885 const char* Representation::Mnemonic() const {
1887 case kNone: return "v";
1888 case kTagged: return "t";
1889 case kSmi: return "s";
1890 case kDouble: return "d";
1891 case kFloat32x4: return "float32x4";
1892 case kFloat64x2: return "float64x2";
1893 case kInt32x4: return "int32x44";
1894 case kInteger32: return "i";
1895 case kHeapObject: return "h";
1896 case kExternal: return "x";
1904 bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields,
1905 int target_inobject, int target_unused,
1906 int* old_number_of_fields) {
1907 // If fields were added (or removed), rewrite the instance.
1908 *old_number_of_fields = NumberOfFields();
1909 DCHECK(target_number_of_fields >= *old_number_of_fields);
1910 if (target_number_of_fields != *old_number_of_fields) return true;
1912 // If smi descriptors were replaced by double descriptors, rewrite.
1913 DescriptorArray* old_desc = instance_descriptors();
1914 DescriptorArray* new_desc = target->instance_descriptors();
1915 int limit = NumberOfOwnDescriptors();
1916 for (int i = 0; i < limit; i++) {
1917 if (new_desc->GetDetails(i).representation().IsDouble() !=
1918 old_desc->GetDetails(i).representation().IsDouble()) {
1923 // If no fields were added, and no inobject properties were removed, setting
1924 // the map is sufficient.
1925 if (target_inobject == inobject_properties()) return false;
1926 // In-object slack tracking may have reduced the object size of the new map.
1927 // In that case, succeed if all existing fields were inobject, and they still
1928 // fit within the new inobject size.
1929 DCHECK(target_inobject < inobject_properties());
1930 if (target_number_of_fields <= target_inobject) {
1931 DCHECK(target_number_of_fields + target_unused == target_inobject);
1934 // Otherwise, properties will need to be moved to the backing store.
1939 void Map::ConnectElementsTransition(Handle<Map> parent, Handle<Map> child) {
1940 Isolate* isolate = parent->GetIsolate();
1941 Handle<Name> name = isolate->factory()->elements_transition_symbol();
1942 ConnectTransition(parent, child, name, FULL_TRANSITION);
1946 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) {
1947 if (object->map() == *new_map) return;
1948 if (object->HasFastProperties()) {
1949 if (!new_map->is_dictionary_map()) {
1950 Handle<Map> old_map(object->map());
1951 MigrateFastToFast(object, new_map);
1952 if (old_map->is_prototype_map()) {
1953 // Clear out the old descriptor array to avoid problems to sharing
1954 // the descriptor array without using an explicit.
1955 old_map->InitializeDescriptors(
1956 old_map->GetHeap()->empty_descriptor_array());
1957 // Ensure that no transition was inserted for prototype migrations.
1958 DCHECK(!old_map->HasTransitionArray());
1959 DCHECK(new_map->GetBackPointer()->IsUndefined());
1962 MigrateFastToSlow(object, new_map, 0);
1965 // For slow-to-fast migrations JSObject::TransformToFastProperties()
1966 // must be used instead.
1967 CHECK(new_map->is_dictionary_map());
1969 // Slow-to-slow migration is trivial.
1970 object->set_map(*new_map);
1975 // To migrate a fast instance to a fast map:
1976 // - First check whether the instance needs to be rewritten. If not, simply
1978 // - Otherwise, allocate a fixed array large enough to hold all fields, in
1979 // addition to unused space.
1980 // - Copy all existing properties in, in the following order: backing store
1981 // properties, unused fields, inobject properties.
1982 // - If all allocation succeeded, commit the state atomically:
1983 // * Copy inobject properties from the backing store back into the object.
1984 // * Trim the difference in instance size of the object. This also cleanly
1985 // frees inobject properties that moved to the backing store.
1986 // * If there are properties left in the backing store, trim of the space used
1987 // to temporarily store the inobject properties.
1988 // * If there are properties left in the backing store, install the backing
1990 void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
1991 Isolate* isolate = object->GetIsolate();
1992 Handle<Map> old_map(object->map());
1993 int old_number_of_fields;
1994 int number_of_fields = new_map->NumberOfFields();
1995 int inobject = new_map->inobject_properties();
1996 int unused = new_map->unused_property_fields();
1998 // Nothing to do if no functions were converted to fields and no smis were
1999 // converted to doubles.
2000 if (!old_map->InstancesNeedRewriting(*new_map, number_of_fields, inobject,
2001 unused, &old_number_of_fields)) {
2002 object->synchronized_set_map(*new_map);
2006 int total_size = number_of_fields + unused;
2007 int external = total_size - inobject;
2009 if (number_of_fields != old_number_of_fields &&
2010 new_map->GetBackPointer() == *old_map) {
2011 PropertyDetails details = new_map->GetLastDescriptorDetails();
2013 if (old_map->unused_property_fields() > 0) {
2014 if (details.representation().IsDouble()) {
2015 Handle<Object> value = isolate->factory()->NewHeapNumber(0, MUTABLE);
2017 FieldIndex::ForDescriptor(*new_map, new_map->LastAdded());
2018 object->FastPropertyAtPut(index, *value);
2020 object->synchronized_set_map(*new_map);
2024 DCHECK(number_of_fields == old_number_of_fields + 1);
2025 // This migration is a transition from a map that has run out out property
2026 // space. Therefore it could be done by extending the backing store.
2027 Handle<FixedArray> old_storage = handle(object->properties(), isolate);
2028 Handle<FixedArray> new_storage =
2029 FixedArray::CopySize(old_storage, external);
2031 // Properly initialize newly added property.
2032 Handle<Object> value;
2033 if (details.representation().IsDouble()) {
2034 value = isolate->factory()->NewHeapNumber(0, MUTABLE);
2036 value = isolate->factory()->uninitialized_value();
2038 DCHECK(details.type() == FIELD);
2039 int target_index = details.field_index() - inobject;
2040 DCHECK(target_index >= 0); // Must be a backing store index.
2041 new_storage->set(target_index, *value);
2043 // From here on we cannot fail and we shouldn't GC anymore.
2044 DisallowHeapAllocation no_allocation;
2046 // Set the new property value and do the map transition.
2047 object->set_properties(*new_storage);
2048 object->synchronized_set_map(*new_map);
2051 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size);
2053 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
2054 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors());
2055 int old_nof = old_map->NumberOfOwnDescriptors();
2056 int new_nof = new_map->NumberOfOwnDescriptors();
2058 // This method only supports generalizing instances to at least the same
2059 // number of properties.
2060 DCHECK(old_nof <= new_nof);
2062 for (int i = 0; i < old_nof; i++) {
2063 PropertyDetails details = new_descriptors->GetDetails(i);
2064 if (details.type() != FIELD) continue;
2065 PropertyDetails old_details = old_descriptors->GetDetails(i);
2066 if (old_details.type() == CALLBACKS) {
2067 DCHECK(details.representation().IsTagged());
2070 DCHECK(old_details.type() == CONSTANT ||
2071 old_details.type() == FIELD);
2072 Object* raw_value = old_details.type() == CONSTANT
2073 ? old_descriptors->GetValue(i)
2074 : object->RawFastPropertyAt(FieldIndex::ForDescriptor(*old_map, i));
2075 Handle<Object> value(raw_value, isolate);
2076 if (!old_details.representation().IsDouble() &&
2077 details.representation().IsDouble()) {
2078 if (old_details.representation().IsNone()) {
2079 value = handle(Smi::FromInt(0), isolate);
2081 value = Object::NewStorageFor(isolate, value, details.representation());
2082 } else if (old_details.representation().IsDouble() &&
2083 !details.representation().IsDouble()) {
2084 value = Object::WrapForRead(isolate, value, old_details.representation());
2086 DCHECK(!(details.representation().IsDouble() && value->IsSmi()));
2087 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2088 if (target_index < 0) target_index += total_size;
2089 array->set(target_index, *value);
2092 for (int i = old_nof; i < new_nof; i++) {
2093 PropertyDetails details = new_descriptors->GetDetails(i);
2094 if (details.type() != FIELD) continue;
2095 Handle<Object> value;
2096 if (details.representation().IsDouble()) {
2097 value = isolate->factory()->NewHeapNumber(0, MUTABLE);
2099 value = isolate->factory()->uninitialized_value();
2101 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2102 if (target_index < 0) target_index += total_size;
2103 array->set(target_index, *value);
2106 // From here on we cannot fail and we shouldn't GC anymore.
2107 DisallowHeapAllocation no_allocation;
2109 // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2110 // avoid overwriting |one_pointer_filler_map|.
2111 int limit = Min(inobject, number_of_fields);
2112 for (int i = 0; i < limit; i++) {
2113 FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
2114 object->FastPropertyAtPut(index, array->get(external + i));
2117 Heap* heap = isolate->heap();
2119 // If there are properties in the new backing store, trim it to the correct
2120 // size and install the backing store into the object.
2122 heap->RightTrimFixedArray<Heap::FROM_MUTATOR>(*array, inobject);
2123 object->set_properties(*array);
2126 // Create filler object past the new instance size.
2127 int new_instance_size = new_map->instance_size();
2128 int instance_size_delta = old_map->instance_size() - new_instance_size;
2129 DCHECK(instance_size_delta >= 0);
2131 if (instance_size_delta > 0) {
2132 Address address = object->address();
2133 heap->CreateFillerObjectAt(
2134 address + new_instance_size, instance_size_delta);
2135 heap->AdjustLiveBytes(address, -instance_size_delta, Heap::FROM_MUTATOR);
2138 // We are storing the new map using release store after creating a filler for
2139 // the left-over space to avoid races with the sweeper thread.
2140 object->synchronized_set_map(*new_map);
2144 void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object,
2146 Representation new_representation,
2147 Handle<HeapType> new_field_type) {
2148 Handle<Map> new_map = Map::GeneralizeRepresentation(
2149 handle(object->map()), modify_index, new_representation, new_field_type,
2151 MigrateToMap(object, new_map);
2155 int Map::NumberOfFields() {
2156 DescriptorArray* descriptors = instance_descriptors();
2158 for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
2159 if (descriptors->GetDetails(i).type() == FIELD) result++;
2165 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2167 StoreMode store_mode,
2168 PropertyAttributes attributes,
2169 const char* reason) {
2170 Isolate* isolate = map->GetIsolate();
2171 Handle<Map> new_map = Copy(map);
2173 DescriptorArray* descriptors = new_map->instance_descriptors();
2174 int length = descriptors->number_of_descriptors();
2175 for (int i = 0; i < length; i++) {
2176 descriptors->SetRepresentation(i, Representation::Tagged());
2177 if (descriptors->GetDetails(i).type() == FIELD) {
2178 descriptors->SetValue(i, HeapType::Any());
2182 // Unless the instance is being migrated, ensure that modify_index is a field.
2183 PropertyDetails details = descriptors->GetDetails(modify_index);
2184 if (store_mode == FORCE_FIELD &&
2185 (details.type() != FIELD || details.attributes() != attributes)) {
2186 int field_index = details.type() == FIELD ? details.field_index()
2187 : new_map->NumberOfFields();
2188 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate),
2189 field_index, attributes, Representation::Tagged());
2190 descriptors->Replace(modify_index, &d);
2191 if (details.type() != FIELD) {
2192 int unused_property_fields = new_map->unused_property_fields() - 1;
2193 if (unused_property_fields < 0) {
2194 unused_property_fields += JSObject::kFieldsAdded;
2196 new_map->set_unused_property_fields(unused_property_fields);
2199 DCHECK(details.attributes() == attributes);
2202 if (FLAG_trace_generalization) {
2203 HeapType* field_type = (details.type() == FIELD)
2204 ? map->instance_descriptors()->GetFieldType(modify_index)
2206 map->PrintGeneralization(stdout, reason, modify_index,
2207 new_map->NumberOfOwnDescriptors(),
2208 new_map->NumberOfOwnDescriptors(),
2209 details.type() == CONSTANT && store_mode == FORCE_FIELD,
2210 details.representation(), Representation::Tagged(),
2211 field_type, HeapType::Any());
2218 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2220 StoreMode store_mode,
2221 const char* reason) {
2222 PropertyDetails details =
2223 map->instance_descriptors()->GetDetails(modify_index);
2224 return CopyGeneralizeAllRepresentations(map, modify_index, store_mode,
2225 details.attributes(), reason);
2229 void Map::DeprecateTransitionTree() {
2230 if (is_deprecated()) return;
2231 if (HasTransitionArray()) {
2232 TransitionArray* transitions = this->transitions();
2233 for (int i = 0; i < transitions->number_of_transitions(); i++) {
2234 transitions->GetTarget(i)->DeprecateTransitionTree();
2238 dependent_code()->DeoptimizeDependentCodeGroup(
2239 GetIsolate(), DependentCode::kTransitionGroup);
2240 NotifyLeafMapLayoutChange();
2244 // Invalidates a transition target at |key|, and installs |new_descriptors| over
2245 // the current instance_descriptors to ensure proper sharing of descriptor
2247 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
2248 if (HasTransitionArray()) {
2249 TransitionArray* transitions = this->transitions();
2250 int transition = transitions->Search(key);
2251 if (transition != TransitionArray::kNotFound) {
2252 transitions->GetTarget(transition)->DeprecateTransitionTree();
2256 // Don't overwrite the empty descriptor array.
2257 if (NumberOfOwnDescriptors() == 0) return;
2259 DescriptorArray* to_replace = instance_descriptors();
2260 Map* current = this;
2261 GetHeap()->incremental_marking()->RecordWrites(to_replace);
2262 while (current->instance_descriptors() == to_replace) {
2263 current->SetEnumLength(kInvalidEnumCacheSentinel);
2264 current->set_instance_descriptors(new_descriptors);
2265 Object* next = current->GetBackPointer();
2266 if (next->IsUndefined()) break;
2267 current = Map::cast(next);
2270 set_owns_descriptors(false);
2274 Map* Map::FindRootMap() {
2277 Object* back = result->GetBackPointer();
2278 if (back->IsUndefined()) return result;
2279 result = Map::cast(back);
2284 Map* Map::FindLastMatchMap(int verbatim,
2286 DescriptorArray* descriptors) {
2287 DisallowHeapAllocation no_allocation;
2289 // This can only be called on roots of transition trees.
2290 DCHECK(GetBackPointer()->IsUndefined());
2292 Map* current = this;
2294 for (int i = verbatim; i < length; i++) {
2295 if (!current->HasTransitionArray()) break;
2296 Name* name = descriptors->GetKey(i);
2297 TransitionArray* transitions = current->transitions();
2298 int transition = transitions->Search(name);
2299 if (transition == TransitionArray::kNotFound) break;
2301 Map* next = transitions->GetTarget(transition);
2302 DescriptorArray* next_descriptors = next->instance_descriptors();
2304 PropertyDetails details = descriptors->GetDetails(i);
2305 PropertyDetails next_details = next_descriptors->GetDetails(i);
2306 if (details.type() != next_details.type()) break;
2307 if (details.attributes() != next_details.attributes()) break;
2308 if (!details.representation().Equals(next_details.representation())) break;
2309 if (next_details.type() == FIELD) {
2310 if (!descriptors->GetFieldType(i)->NowIs(
2311 next_descriptors->GetFieldType(i))) break;
2313 if (descriptors->GetValue(i) != next_descriptors->GetValue(i)) break;
2322 Map* Map::FindFieldOwner(int descriptor) {
2323 DisallowHeapAllocation no_allocation;
2324 DCHECK_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type());
2327 Object* back = result->GetBackPointer();
2328 if (back->IsUndefined()) break;
2329 Map* parent = Map::cast(back);
2330 if (parent->NumberOfOwnDescriptors() <= descriptor) break;
2337 void Map::UpdateFieldType(int descriptor, Handle<Name> name,
2338 Handle<HeapType> new_type) {
2339 DisallowHeapAllocation no_allocation;
2340 PropertyDetails details = instance_descriptors()->GetDetails(descriptor);
2341 if (details.type() != FIELD) return;
2342 if (HasTransitionArray()) {
2343 TransitionArray* transitions = this->transitions();
2344 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
2345 transitions->GetTarget(i)->UpdateFieldType(descriptor, name, new_type);
2348 // Skip if already updated the shared descriptor.
2349 if (instance_descriptors()->GetFieldType(descriptor) == *new_type) return;
2350 FieldDescriptor d(name, instance_descriptors()->GetFieldIndex(descriptor),
2351 new_type, details.attributes(), details.representation());
2352 instance_descriptors()->Replace(descriptor, &d);
2357 Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1,
2358 Handle<HeapType> type2,
2360 static const int kMaxClassesPerFieldType = 5;
2361 if (type1->NowIs(type2)) return type2;
2362 if (type2->NowIs(type1)) return type1;
2363 if (type1->NowStable() && type2->NowStable()) {
2364 Handle<HeapType> type = HeapType::Union(type1, type2, isolate);
2365 if (type->NumClasses() <= kMaxClassesPerFieldType) {
2366 DCHECK(type->NowStable());
2367 DCHECK(type1->NowIs(type));
2368 DCHECK(type2->NowIs(type));
2372 return HeapType::Any(isolate);
2377 void Map::GeneralizeFieldType(Handle<Map> map,
2379 Handle<HeapType> new_field_type) {
2380 Isolate* isolate = map->GetIsolate();
2382 // Check if we actually need to generalize the field type at all.
2383 Handle<HeapType> old_field_type(
2384 map->instance_descriptors()->GetFieldType(modify_index), isolate);
2385 if (new_field_type->NowIs(old_field_type)) {
2386 DCHECK(Map::GeneralizeFieldType(old_field_type,
2388 isolate)->NowIs(old_field_type));
2392 // Determine the field owner.
2393 Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate);
2394 Handle<DescriptorArray> descriptors(
2395 field_owner->instance_descriptors(), isolate);
2396 DCHECK_EQ(*old_field_type, descriptors->GetFieldType(modify_index));
2398 // Determine the generalized new field type.
2399 new_field_type = Map::GeneralizeFieldType(
2400 old_field_type, new_field_type, isolate);
2402 PropertyDetails details = descriptors->GetDetails(modify_index);
2403 Handle<Name> name(descriptors->GetKey(modify_index));
2404 field_owner->UpdateFieldType(modify_index, name, new_field_type);
2405 field_owner->dependent_code()->DeoptimizeDependentCodeGroup(
2406 isolate, DependentCode::kFieldTypeGroup);
2408 if (FLAG_trace_generalization) {
2409 map->PrintGeneralization(
2410 stdout, "field type generalization",
2411 modify_index, map->NumberOfOwnDescriptors(),
2412 map->NumberOfOwnDescriptors(), false,
2413 details.representation(), details.representation(),
2414 *old_field_type, *new_field_type);
2419 // Generalize the representation of the descriptor at |modify_index|.
2420 // This method rewrites the transition tree to reflect the new change. To avoid
2421 // high degrees over polymorphism, and to stabilize quickly, on every rewrite
2422 // the new type is deduced by merging the current type with any potential new
2423 // (partial) version of the type in the transition tree.
2424 // To do this, on each rewrite:
2425 // - Search the root of the transition tree using FindRootMap.
2426 // - Find |target_map|, the newest matching version of this map using the keys
2427 // in the |old_map|'s descriptor array to walk the transition tree.
2428 // - Merge/generalize the descriptor array of the |old_map| and |target_map|.
2429 // - Generalize the |modify_index| descriptor using |new_representation| and
2430 // |new_field_type|.
2431 // - Walk the tree again starting from the root towards |target_map|. Stop at
2432 // |split_map|, the first map who's descriptor array does not match the merged
2433 // descriptor array.
2434 // - If |target_map| == |split_map|, |target_map| is in the expected state.
2436 // - Otherwise, invalidate the outdated transition target from |target_map|, and
2437 // replace its transition tree with a new branch for the updated descriptors.
2438 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map,
2440 Representation new_representation,
2441 Handle<HeapType> new_field_type,
2442 StoreMode store_mode) {
2443 Isolate* isolate = old_map->GetIsolate();
2445 Handle<DescriptorArray> old_descriptors(
2446 old_map->instance_descriptors(), isolate);
2447 int old_nof = old_map->NumberOfOwnDescriptors();
2448 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2449 Representation old_representation = old_details.representation();
2451 // It's fine to transition from None to anything but double without any
2452 // modification to the object, because the default uninitialized value for
2453 // representation None can be overwritten by both smi and tagged values.
2454 // Doubles, however, would require a box allocation.
2455 if (old_representation.IsNone() &&
2456 !new_representation.IsNone() &&
2457 !new_representation.IsDouble()) {
2458 DCHECK(old_details.type() == FIELD);
2459 DCHECK(old_descriptors->GetFieldType(modify_index)->NowIs(
2461 if (FLAG_trace_generalization) {
2462 old_map->PrintGeneralization(
2463 stdout, "uninitialized field",
2464 modify_index, old_map->NumberOfOwnDescriptors(),
2465 old_map->NumberOfOwnDescriptors(), false,
2466 old_representation, new_representation,
2467 old_descriptors->GetFieldType(modify_index), *new_field_type);
2469 old_descriptors->SetRepresentation(modify_index, new_representation);
2470 old_descriptors->SetValue(modify_index, *new_field_type);
2474 // Check the state of the root map.
2475 Handle<Map> root_map(old_map->FindRootMap(), isolate);
2476 if (!old_map->EquivalentToForTransition(*root_map)) {
2477 return CopyGeneralizeAllRepresentations(
2478 old_map, modify_index, store_mode, "not equivalent");
2480 int root_nof = root_map->NumberOfOwnDescriptors();
2481 if (modify_index < root_nof) {
2482 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2483 if ((old_details.type() != FIELD && store_mode == FORCE_FIELD) ||
2484 (old_details.type() == FIELD &&
2485 (!new_field_type->NowIs(old_descriptors->GetFieldType(modify_index)) ||
2486 !new_representation.fits_into(old_details.representation())))) {
2487 return CopyGeneralizeAllRepresentations(
2488 old_map, modify_index, store_mode, "root modification");
2492 Handle<Map> target_map = root_map;
2493 for (int i = root_nof; i < old_nof; ++i) {
2494 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2495 if (j == TransitionArray::kNotFound) break;
2496 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2497 Handle<DescriptorArray> tmp_descriptors = handle(
2498 tmp_map->instance_descriptors(), isolate);
2500 // Check if target map is incompatible.
2501 PropertyDetails old_details = old_descriptors->GetDetails(i);
2502 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2503 PropertyType old_type = old_details.type();
2504 PropertyType tmp_type = tmp_details.type();
2505 if (tmp_details.attributes() != old_details.attributes() ||
2506 ((tmp_type == CALLBACKS || old_type == CALLBACKS) &&
2507 (tmp_type != old_type ||
2508 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2509 return CopyGeneralizeAllRepresentations(
2510 old_map, modify_index, store_mode, "incompatible");
2512 Representation old_representation = old_details.representation();
2513 Representation tmp_representation = tmp_details.representation();
2514 if (!old_representation.fits_into(tmp_representation) ||
2515 (!new_representation.fits_into(tmp_representation) &&
2516 modify_index == i)) {
2519 if (tmp_type == FIELD) {
2520 // Generalize the field type as necessary.
2521 Handle<HeapType> old_field_type = (old_type == FIELD)
2522 ? handle(old_descriptors->GetFieldType(i), isolate)
2523 : old_descriptors->GetValue(i)->OptimalType(
2524 isolate, tmp_representation);
2525 if (modify_index == i) {
2526 old_field_type = GeneralizeFieldType(
2527 new_field_type, old_field_type, isolate);
2529 GeneralizeFieldType(tmp_map, i, old_field_type);
2530 } else if (tmp_type == CONSTANT) {
2531 if (old_type != CONSTANT ||
2532 old_descriptors->GetConstant(i) != tmp_descriptors->GetConstant(i)) {
2536 DCHECK_EQ(tmp_type, old_type);
2537 DCHECK_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i));
2539 target_map = tmp_map;
2542 // Directly change the map if the target map is more general.
2543 Handle<DescriptorArray> target_descriptors(
2544 target_map->instance_descriptors(), isolate);
2545 int target_nof = target_map->NumberOfOwnDescriptors();
2546 if (target_nof == old_nof &&
2547 (store_mode != FORCE_FIELD ||
2548 target_descriptors->GetDetails(modify_index).type() == FIELD)) {
2549 DCHECK(modify_index < target_nof);
2550 DCHECK(new_representation.fits_into(
2551 target_descriptors->GetDetails(modify_index).representation()));
2552 DCHECK(target_descriptors->GetDetails(modify_index).type() != FIELD ||
2553 new_field_type->NowIs(
2554 target_descriptors->GetFieldType(modify_index)));
2558 // Find the last compatible target map in the transition tree.
2559 for (int i = target_nof; i < old_nof; ++i) {
2560 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2561 if (j == TransitionArray::kNotFound) break;
2562 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2563 Handle<DescriptorArray> tmp_descriptors(
2564 tmp_map->instance_descriptors(), isolate);
2566 // Check if target map is compatible.
2567 PropertyDetails old_details = old_descriptors->GetDetails(i);
2568 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2569 if (tmp_details.attributes() != old_details.attributes() ||
2570 ((tmp_details.type() == CALLBACKS || old_details.type() == CALLBACKS) &&
2571 (tmp_details.type() != old_details.type() ||
2572 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2573 return CopyGeneralizeAllRepresentations(
2574 old_map, modify_index, store_mode, "incompatible");
2576 target_map = tmp_map;
2578 target_nof = target_map->NumberOfOwnDescriptors();
2579 target_descriptors = handle(target_map->instance_descriptors(), isolate);
2581 // Allocate a new descriptor array large enough to hold the required
2582 // descriptors, with minimally the exact same size as the old descriptor
2584 int new_slack = Max(
2585 old_nof, old_descriptors->number_of_descriptors()) - old_nof;
2586 Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate(
2587 isolate, old_nof, new_slack);
2588 DCHECK(new_descriptors->length() > target_descriptors->length() ||
2589 new_descriptors->NumberOfSlackDescriptors() > 0 ||
2590 new_descriptors->number_of_descriptors() ==
2591 old_descriptors->number_of_descriptors());
2592 DCHECK(new_descriptors->number_of_descriptors() == old_nof);
2595 int current_offset = 0;
2596 for (int i = 0; i < root_nof; ++i) {
2597 PropertyDetails old_details = old_descriptors->GetDetails(i);
2598 if (old_details.type() == FIELD) current_offset++;
2599 Descriptor d(handle(old_descriptors->GetKey(i), isolate),
2600 handle(old_descriptors->GetValue(i), isolate),
2602 new_descriptors->Set(i, &d);
2605 // |root_nof| -> |target_nof|
2606 for (int i = root_nof; i < target_nof; ++i) {
2607 Handle<Name> target_key(target_descriptors->GetKey(i), isolate);
2608 PropertyDetails old_details = old_descriptors->GetDetails(i);
2609 PropertyDetails target_details = target_descriptors->GetDetails(i);
2610 target_details = target_details.CopyWithRepresentation(
2611 old_details.representation().generalize(
2612 target_details.representation()));
2613 if (modify_index == i) {
2614 target_details = target_details.CopyWithRepresentation(
2615 new_representation.generalize(target_details.representation()));
2617 DCHECK_EQ(old_details.attributes(), target_details.attributes());
2618 if (old_details.type() == FIELD ||
2619 target_details.type() == FIELD ||
2620 (modify_index == i && store_mode == FORCE_FIELD) ||
2621 (target_descriptors->GetValue(i) != old_descriptors->GetValue(i))) {
2622 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2623 ? handle(old_descriptors->GetFieldType(i), isolate)
2624 : old_descriptors->GetValue(i)->OptimalType(
2625 isolate, target_details.representation());
2626 Handle<HeapType> target_field_type = (target_details.type() == FIELD)
2627 ? handle(target_descriptors->GetFieldType(i), isolate)
2628 : target_descriptors->GetValue(i)->OptimalType(
2629 isolate, target_details.representation());
2630 target_field_type = GeneralizeFieldType(
2631 target_field_type, old_field_type, isolate);
2632 if (modify_index == i) {
2633 target_field_type = GeneralizeFieldType(
2634 target_field_type, new_field_type, isolate);
2636 FieldDescriptor d(target_key,
2639 target_details.attributes(),
2640 target_details.representation());
2641 new_descriptors->Set(i, &d);
2643 DCHECK_NE(FIELD, target_details.type());
2644 Descriptor d(target_key,
2645 handle(target_descriptors->GetValue(i), isolate),
2647 new_descriptors->Set(i, &d);
2651 // |target_nof| -> |old_nof|
2652 for (int i = target_nof; i < old_nof; ++i) {
2653 PropertyDetails old_details = old_descriptors->GetDetails(i);
2654 Handle<Name> old_key(old_descriptors->GetKey(i), isolate);
2655 if (modify_index == i) {
2656 old_details = old_details.CopyWithRepresentation(
2657 new_representation.generalize(old_details.representation()));
2659 if (old_details.type() == FIELD) {
2660 Handle<HeapType> old_field_type(
2661 old_descriptors->GetFieldType(i), isolate);
2662 if (modify_index == i) {
2663 old_field_type = GeneralizeFieldType(
2664 old_field_type, new_field_type, isolate);
2666 FieldDescriptor d(old_key,
2669 old_details.attributes(),
2670 old_details.representation());
2671 new_descriptors->Set(i, &d);
2673 DCHECK(old_details.type() == CONSTANT || old_details.type() == CALLBACKS);
2674 if (modify_index == i && store_mode == FORCE_FIELD) {
2675 FieldDescriptor d(old_key,
2677 GeneralizeFieldType(
2678 old_descriptors->GetValue(i)->OptimalType(
2679 isolate, old_details.representation()),
2680 new_field_type, isolate),
2681 old_details.attributes(),
2682 old_details.representation());
2683 new_descriptors->Set(i, &d);
2685 DCHECK_NE(FIELD, old_details.type());
2686 Descriptor d(old_key,
2687 handle(old_descriptors->GetValue(i), isolate),
2689 new_descriptors->Set(i, &d);
2694 new_descriptors->Sort();
2696 DCHECK(store_mode != FORCE_FIELD ||
2697 new_descriptors->GetDetails(modify_index).type() == FIELD);
2699 Handle<Map> split_map(root_map->FindLastMatchMap(
2700 root_nof, old_nof, *new_descriptors), isolate);
2701 int split_nof = split_map->NumberOfOwnDescriptors();
2702 DCHECK_NE(old_nof, split_nof);
2704 split_map->DeprecateTarget(
2705 old_descriptors->GetKey(split_nof), *new_descriptors);
2707 if (FLAG_trace_generalization) {
2708 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2709 PropertyDetails new_details = new_descriptors->GetDetails(modify_index);
2710 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2711 ? handle(old_descriptors->GetFieldType(modify_index), isolate)
2712 : HeapType::Constant(handle(old_descriptors->GetValue(modify_index),
2714 Handle<HeapType> new_field_type = (new_details.type() == FIELD)
2715 ? handle(new_descriptors->GetFieldType(modify_index), isolate)
2716 : HeapType::Constant(handle(new_descriptors->GetValue(modify_index),
2718 old_map->PrintGeneralization(
2719 stdout, "", modify_index, split_nof, old_nof,
2720 old_details.type() == CONSTANT && store_mode == FORCE_FIELD,
2721 old_details.representation(), new_details.representation(),
2722 *old_field_type, *new_field_type);
2725 // Add missing transitions.
2726 Handle<Map> new_map = split_map;
2727 for (int i = split_nof; i < old_nof; ++i) {
2728 new_map = CopyInstallDescriptors(new_map, i, new_descriptors);
2730 new_map->set_owns_descriptors(true);
2735 // Generalize the representation of all FIELD descriptors.
2736 Handle<Map> Map::GeneralizeAllFieldRepresentations(
2738 Handle<DescriptorArray> descriptors(map->instance_descriptors());
2739 for (int i = 0; i < map->NumberOfOwnDescriptors(); ++i) {
2740 if (descriptors->GetDetails(i).type() == FIELD) {
2741 map = GeneralizeRepresentation(map, i, Representation::Tagged(),
2742 HeapType::Any(map->GetIsolate()),
2751 MaybeHandle<Map> Map::TryUpdate(Handle<Map> map) {
2752 Handle<Map> proto_map(map);
2753 while (proto_map->prototype()->IsJSObject()) {
2754 Handle<JSObject> holder(JSObject::cast(proto_map->prototype()));
2755 proto_map = Handle<Map>(holder->map());
2756 if (proto_map->is_deprecated() && JSObject::TryMigrateInstance(holder)) {
2757 proto_map = Handle<Map>(holder->map());
2760 return TryUpdateInternal(map);
2765 Handle<Map> Map::Update(Handle<Map> map) {
2766 if (!map->is_deprecated()) return map;
2767 return GeneralizeRepresentation(map, 0, Representation::None(),
2768 HeapType::None(map->GetIsolate()),
2774 MaybeHandle<Map> Map::TryUpdateInternal(Handle<Map> old_map) {
2775 DisallowHeapAllocation no_allocation;
2776 DisallowDeoptimization no_deoptimization(old_map->GetIsolate());
2778 if (!old_map->is_deprecated()) return old_map;
2780 // Check the state of the root map.
2781 Map* root_map = old_map->FindRootMap();
2782 if (!old_map->EquivalentToForTransition(root_map)) return MaybeHandle<Map>();
2783 int root_nof = root_map->NumberOfOwnDescriptors();
2785 int old_nof = old_map->NumberOfOwnDescriptors();
2786 DescriptorArray* old_descriptors = old_map->instance_descriptors();
2788 Map* new_map = root_map;
2789 for (int i = root_nof; i < old_nof; ++i) {
2790 int j = new_map->SearchTransition(old_descriptors->GetKey(i));
2791 if (j == TransitionArray::kNotFound) return MaybeHandle<Map>();
2792 new_map = new_map->GetTransition(j);
2793 DescriptorArray* new_descriptors = new_map->instance_descriptors();
2795 PropertyDetails new_details = new_descriptors->GetDetails(i);
2796 PropertyDetails old_details = old_descriptors->GetDetails(i);
2797 if (old_details.attributes() != new_details.attributes() ||
2798 !old_details.representation().fits_into(new_details.representation())) {
2799 return MaybeHandle<Map>();
2801 PropertyType new_type = new_details.type();
2802 PropertyType old_type = old_details.type();
2803 Object* new_value = new_descriptors->GetValue(i);
2804 Object* old_value = old_descriptors->GetValue(i);
2807 if ((old_type == FIELD &&
2808 !HeapType::cast(old_value)->NowIs(HeapType::cast(new_value))) ||
2809 (old_type == CONSTANT &&
2810 !HeapType::cast(new_value)->NowContains(old_value)) ||
2811 (old_type == CALLBACKS &&
2812 !HeapType::Any()->Is(HeapType::cast(new_value)))) {
2813 return MaybeHandle<Map>();
2819 if (old_type != new_type || old_value != new_value) {
2820 return MaybeHandle<Map>();
2828 if (new_map->NumberOfOwnDescriptors() != old_nof) return MaybeHandle<Map>();
2829 return handle(new_map);
2833 MaybeHandle<Object> JSObject::SetPropertyWithInterceptor(LookupIterator* it,
2834 Handle<Object> value) {
2835 // TODO(rossberg): Support symbols in the API.
2836 if (it->name()->IsSymbol()) return value;
2838 Handle<String> name_string = Handle<String>::cast(it->name());
2839 Handle<JSObject> holder = it->GetHolder<JSObject>();
2840 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
2841 if (interceptor->setter()->IsUndefined()) return MaybeHandle<Object>();
2844 ApiNamedPropertyAccess("interceptor-named-set", *holder, *name_string));
2845 PropertyCallbackArguments args(it->isolate(), interceptor->data(), *holder,
2847 v8::NamedPropertySetterCallback setter =
2848 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter());
2849 v8::Handle<v8::Value> result = args.Call(
2850 setter, v8::Utils::ToLocal(name_string), v8::Utils::ToLocal(value));
2851 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
2852 if (!result.IsEmpty()) return value;
2854 return MaybeHandle<Object>();
2858 MaybeHandle<Object> Object::SetProperty(Handle<Object> object,
2859 Handle<Name> name, Handle<Object> value,
2860 StrictMode strict_mode,
2861 StoreFromKeyed store_mode) {
2862 LookupIterator it(object, name);
2863 return SetProperty(&it, value, strict_mode, store_mode);
2867 MaybeHandle<Object> Object::SetProperty(LookupIterator* it,
2868 Handle<Object> value,
2869 StrictMode strict_mode,
2870 StoreFromKeyed store_mode) {
2871 // Make sure that the top context does not change when doing callbacks or
2872 // interceptor calls.
2873 AssertNoContextChange ncc(it->isolate());
2876 for (; it->IsFound(); it->Next()) {
2877 switch (it->state()) {
2878 case LookupIterator::NOT_FOUND:
2881 case LookupIterator::ACCESS_CHECK:
2882 // TODO(verwaest): Remove the distinction. This is mostly bogus since we
2883 // don't know whether we'll want to fetch attributes or call a setter
2884 // until we find the property.
2885 if (it->HasAccess(v8::ACCESS_SET)) break;
2886 return JSObject::SetPropertyWithFailedAccessCheck(it, value,
2889 case LookupIterator::JSPROXY:
2890 if (it->HolderIsReceiverOrHiddenPrototype()) {
2891 return JSProxy::SetPropertyWithHandler(it->GetHolder<JSProxy>(),
2892 it->GetReceiver(), it->name(),
2893 value, strict_mode);
2895 // TODO(verwaest): Use the MaybeHandle to indicate result.
2896 bool has_result = false;
2897 MaybeHandle<Object> maybe_result =
2898 JSProxy::SetPropertyViaPrototypesWithHandler(
2899 it->GetHolder<JSProxy>(), it->GetReceiver(), it->name(),
2900 value, strict_mode, &has_result);
2901 if (has_result) return maybe_result;
2906 case LookupIterator::INTERCEPTOR:
2907 if (it->HolderIsReceiverOrHiddenPrototype()) {
2908 MaybeHandle<Object> maybe_result =
2909 JSObject::SetPropertyWithInterceptor(it, value);
2910 if (!maybe_result.is_null()) return maybe_result;
2911 if (it->isolate()->has_pending_exception()) return maybe_result;
2913 Maybe<PropertyAttributes> maybe_attributes =
2914 JSObject::GetPropertyAttributesWithInterceptor(
2915 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
2916 if (!maybe_attributes.has_value) return MaybeHandle<Object>();
2917 done = maybe_attributes.value != ABSENT;
2918 if (done && (maybe_attributes.value & READ_ONLY) != 0) {
2919 return WriteToReadOnlyProperty(it, value, strict_mode);
2924 case LookupIterator::ACCESSOR:
2925 if (it->property_details().IsReadOnly()) {
2926 return WriteToReadOnlyProperty(it, value, strict_mode);
2928 if (it->HolderIsReceiverOrHiddenPrototype() ||
2929 !it->GetAccessors()->IsDeclaredAccessorInfo()) {
2930 return SetPropertyWithAccessor(it->GetReceiver(), it->name(), value,
2931 it->GetHolder<JSObject>(),
2932 it->GetAccessors(), strict_mode);
2937 case LookupIterator::DATA:
2938 if (it->property_details().IsReadOnly()) {
2939 return WriteToReadOnlyProperty(it, value, strict_mode);
2941 if (it->HolderIsReceiverOrHiddenPrototype()) {
2942 return SetDataProperty(it, value);
2947 case LookupIterator::TRANSITION:
2955 // If the receiver is the JSGlobalObject, the store was contextual. In case
2956 // the property did not exist yet on the global object itself, we have to
2957 // throw a reference error in strict mode.
2958 if (it->GetReceiver()->IsJSGlobalObject() && strict_mode == STRICT) {
2959 Handle<Object> args[1] = {it->name()};
2960 THROW_NEW_ERROR(it->isolate(),
2961 NewReferenceError("not_defined", HandleVector(args, 1)),
2965 return AddDataProperty(it, value, NONE, strict_mode, store_mode);
2969 MaybeHandle<Object> Object::WriteToReadOnlyProperty(LookupIterator* it,
2970 Handle<Object> value,
2971 StrictMode strict_mode) {
2972 if (strict_mode != STRICT) return value;
2974 Handle<Object> args[] = {it->name(), it->GetReceiver()};
2975 THROW_NEW_ERROR(it->isolate(),
2976 NewTypeError("strict_read_only_property",
2977 HandleVector(args, arraysize(args))),
2982 Handle<Object> Object::SetDataProperty(LookupIterator* it,
2983 Handle<Object> value) {
2984 // Proxies are handled on the WithHandler path. Other non-JSObjects cannot
2985 // have own properties.
2986 Handle<JSObject> receiver = Handle<JSObject>::cast(it->GetReceiver());
2988 // Store on the holder which may be hidden behind the receiver.
2989 DCHECK(it->HolderIsReceiverOrHiddenPrototype());
2991 // Old value for the observation change record.
2992 // Fetch before transforming the object since the encoding may become
2993 // incompatible with what's cached in |it|.
2995 receiver->map()->is_observed() &&
2996 !it->name().is_identical_to(it->factory()->hidden_string());
2997 MaybeHandle<Object> maybe_old;
2998 if (is_observed) maybe_old = it->GetDataValue();
3000 // Possibly migrate to the most up-to-date map that will be able to store
3001 // |value| under it->name().
3002 it->PrepareForDataProperty(value);
3004 // Write the property value.
3005 it->WriteDataValue(value);
3007 // Send the change record if there are observers.
3008 if (is_observed && !value->SameValue(*maybe_old.ToHandleChecked())) {
3009 JSObject::EnqueueChangeRecord(receiver, "update", it->name(),
3010 maybe_old.ToHandleChecked());
3017 MaybeHandle<Object> Object::AddDataProperty(LookupIterator* it,
3018 Handle<Object> value,
3019 PropertyAttributes attributes,
3020 StrictMode strict_mode,
3021 StoreFromKeyed store_mode) {
3022 DCHECK(!it->GetReceiver()->IsJSProxy());
3023 if (!it->GetReceiver()->IsJSObject()) {
3024 // TODO(verwaest): Throw a TypeError with a more specific message.
3025 return WriteToReadOnlyProperty(it, value, strict_mode);
3028 Handle<JSObject> receiver = it->GetStoreTarget();
3030 // If the receiver is a JSGlobalProxy, store on the prototype (JSGlobalObject)
3031 // instead. If the prototype is Null, the proxy is detached.
3032 if (receiver->IsJSGlobalProxy()) return value;
3034 // Possibly migrate to the most up-to-date map that will be able to store
3035 // |value| under it->name() with |attributes|.
3036 it->PrepareTransitionToDataProperty(value, attributes, store_mode);
3037 if (it->state() != LookupIterator::TRANSITION) {
3038 if (strict_mode == SLOPPY) return value;
3040 Handle<Object> args[1] = {it->name()};
3041 THROW_NEW_ERROR(it->isolate(),
3042 NewTypeError("object_not_extensible",
3043 HandleVector(args, arraysize(args))),
3046 it->ApplyTransitionToDataProperty();
3048 // TODO(verwaest): Encapsulate dictionary handling better.
3049 if (receiver->map()->is_dictionary_map()) {
3050 // TODO(verwaest): Probably should ensure this is done beforehand.
3051 it->InternalizeName();
3052 JSObject::AddSlowProperty(receiver, it->name(), value, attributes);
3054 // Write the property value.
3055 it->WriteDataValue(value);
3058 // Send the change record if there are observers.
3059 if (receiver->map()->is_observed() &&
3060 !it->name().is_identical_to(it->factory()->hidden_string())) {
3061 JSObject::EnqueueChangeRecord(receiver, "add", it->name(),
3062 it->factory()->the_hole_value());
3069 MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes(
3070 Handle<JSObject> object,
3072 Handle<Object> value,
3074 StrictMode strict_mode) {
3075 Isolate *isolate = object->GetIsolate();
3076 for (PrototypeIterator iter(isolate, object); !iter.IsAtEnd();
3078 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
3079 return JSProxy::SetPropertyViaPrototypesWithHandler(
3080 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), object,
3081 isolate->factory()->Uint32ToString(index), // name
3082 value, strict_mode, found);
3084 Handle<JSObject> js_proto =
3085 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
3086 if (!js_proto->HasDictionaryElements()) {
3089 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary());
3090 int entry = dictionary->FindEntry(index);
3091 if (entry != SeededNumberDictionary::kNotFound) {
3092 PropertyDetails details = dictionary->DetailsAt(entry);
3093 if (details.type() == CALLBACKS) {
3095 Handle<Object> structure(dictionary->ValueAt(entry), isolate);
3096 return SetElementWithCallback(object, structure, index, value, js_proto,
3102 return isolate->factory()->the_hole_value();
3106 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
3107 // Only supports adding slack to owned descriptors.
3108 DCHECK(map->owns_descriptors());
3110 Handle<DescriptorArray> descriptors(map->instance_descriptors());
3111 int old_size = map->NumberOfOwnDescriptors();
3112 if (slack <= descriptors->NumberOfSlackDescriptors()) return;
3114 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
3115 descriptors, old_size, slack);
3117 if (old_size == 0) {
3118 map->set_instance_descriptors(*new_descriptors);
3122 // If the source descriptors had an enum cache we copy it. This ensures
3123 // that the maps to which we push the new descriptor array back can rely
3124 // on a cache always being available once it is set. If the map has more
3125 // enumerated descriptors than available in the original cache, the cache
3126 // will be lazily replaced by the extended cache when needed.
3127 if (descriptors->HasEnumCache()) {
3128 new_descriptors->CopyEnumCacheFrom(*descriptors);
3131 // Replace descriptors by new_descriptors in all maps that share it.
3132 map->GetHeap()->incremental_marking()->RecordWrites(*descriptors);
3135 for (Object* current = map->GetBackPointer();
3136 !current->IsUndefined();
3137 current = walk_map->GetBackPointer()) {
3138 walk_map = Map::cast(current);
3139 if (walk_map->instance_descriptors() != *descriptors) break;
3140 walk_map->set_instance_descriptors(*new_descriptors);
3143 map->set_instance_descriptors(*new_descriptors);
3148 static int AppendUniqueCallbacks(NeanderArray* callbacks,
3149 Handle<typename T::Array> array,
3150 int valid_descriptors) {
3151 int nof_callbacks = callbacks->length();
3153 Isolate* isolate = array->GetIsolate();
3154 // Ensure the keys are unique names before writing them into the
3155 // instance descriptor. Since it may cause a GC, it has to be done before we
3156 // temporarily put the heap in an invalid state while appending descriptors.
3157 for (int i = 0; i < nof_callbacks; ++i) {
3158 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3159 if (entry->name()->IsUniqueName()) continue;
3160 Handle<String> key =
3161 isolate->factory()->InternalizeString(
3162 Handle<String>(String::cast(entry->name())));
3163 entry->set_name(*key);
3166 // Fill in new callback descriptors. Process the callbacks from
3167 // back to front so that the last callback with a given name takes
3168 // precedence over previously added callbacks with that name.
3169 for (int i = nof_callbacks - 1; i >= 0; i--) {
3170 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3171 Handle<Name> key(Name::cast(entry->name()));
3172 // Check if a descriptor with this name already exists before writing.
3173 if (!T::Contains(key, entry, valid_descriptors, array)) {
3174 T::Insert(key, entry, valid_descriptors, array);
3175 valid_descriptors++;
3179 return valid_descriptors;
3182 struct DescriptorArrayAppender {
3183 typedef DescriptorArray Array;
3184 static bool Contains(Handle<Name> key,
3185 Handle<AccessorInfo> entry,
3186 int valid_descriptors,
3187 Handle<DescriptorArray> array) {
3188 DisallowHeapAllocation no_gc;
3189 return array->Search(*key, valid_descriptors) != DescriptorArray::kNotFound;
3191 static void Insert(Handle<Name> key,
3192 Handle<AccessorInfo> entry,
3193 int valid_descriptors,
3194 Handle<DescriptorArray> array) {
3195 DisallowHeapAllocation no_gc;
3196 CallbacksDescriptor desc(key, entry, entry->property_attributes());
3197 array->Append(&desc);
3202 struct FixedArrayAppender {
3203 typedef FixedArray Array;
3204 static bool Contains(Handle<Name> key,
3205 Handle<AccessorInfo> entry,
3206 int valid_descriptors,
3207 Handle<FixedArray> array) {
3208 for (int i = 0; i < valid_descriptors; i++) {
3209 if (*key == AccessorInfo::cast(array->get(i))->name()) return true;
3213 static void Insert(Handle<Name> key,
3214 Handle<AccessorInfo> entry,
3215 int valid_descriptors,
3216 Handle<FixedArray> array) {
3217 DisallowHeapAllocation no_gc;
3218 array->set(valid_descriptors, *entry);
3223 void Map::AppendCallbackDescriptors(Handle<Map> map,
3224 Handle<Object> descriptors) {
3225 int nof = map->NumberOfOwnDescriptors();
3226 Handle<DescriptorArray> array(map->instance_descriptors());
3227 NeanderArray callbacks(descriptors);
3228 DCHECK(array->NumberOfSlackDescriptors() >= callbacks.length());
3229 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof);
3230 map->SetNumberOfOwnDescriptors(nof);
3234 int AccessorInfo::AppendUnique(Handle<Object> descriptors,
3235 Handle<FixedArray> array,
3236 int valid_descriptors) {
3237 NeanderArray callbacks(descriptors);
3238 DCHECK(array->length() >= callbacks.length() + valid_descriptors);
3239 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks,
3245 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
3246 DCHECK(!map.is_null());
3247 for (int i = 0; i < maps->length(); ++i) {
3248 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
3255 static Handle<T> MaybeNull(T* p) {
3256 if (p == NULL) return Handle<T>::null();
3257 return Handle<T>(p);
3261 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
3262 ElementsKind kind = elements_kind();
3263 Handle<Map> transitioned_map = Handle<Map>::null();
3264 Handle<Map> current_map(this);
3265 bool packed = IsFastPackedElementsKind(kind);
3266 if (IsTransitionableFastElementsKind(kind)) {
3267 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
3268 kind = GetNextMoreGeneralFastElementsKind(kind, false);
3269 Handle<Map> maybe_transitioned_map =
3270 MaybeNull(current_map->LookupElementsTransitionMap(kind));
3271 if (maybe_transitioned_map.is_null()) break;
3272 if (ContainsMap(candidates, maybe_transitioned_map) &&
3273 (packed || !IsFastPackedElementsKind(kind))) {
3274 transitioned_map = maybe_transitioned_map;
3275 if (!IsFastPackedElementsKind(kind)) packed = false;
3277 current_map = maybe_transitioned_map;
3280 return transitioned_map;
3284 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
3285 Map* current_map = map;
3287 IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind)
3289 : TERMINAL_FAST_ELEMENTS_KIND;
3291 // Support for legacy API: SetIndexedPropertiesTo{External,Pixel}Data
3292 // allows to change elements from arbitrary kind to any ExternalArray
3293 // elements kind. Satisfy its requirements, checking whether we already
3294 // have the cached transition.
3295 if (IsExternalArrayElementsKind(to_kind) &&
3296 !IsFixedTypedArrayElementsKind(map->elements_kind())) {
3297 if (map->HasElementsTransition()) {
3298 Map* next_map = map->elements_transition_map();
3299 if (next_map->elements_kind() == to_kind) return next_map;
3304 ElementsKind kind = map->elements_kind();
3305 while (kind != target_kind) {
3306 kind = GetNextTransitionElementsKind(kind);
3307 if (!current_map->HasElementsTransition()) return current_map;
3308 current_map = current_map->elements_transition_map();
3311 if (to_kind != kind && current_map->HasElementsTransition()) {
3312 DCHECK(to_kind == DICTIONARY_ELEMENTS);
3313 Map* next_map = current_map->elements_transition_map();
3314 if (next_map->elements_kind() == to_kind) return next_map;
3317 DCHECK(current_map->elements_kind() == target_kind);
3322 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
3323 Map* to_map = FindClosestElementsTransition(this, to_kind);
3324 if (to_map->elements_kind() == to_kind) return to_map;
3329 bool Map::IsMapInArrayPrototypeChain() {
3330 Isolate* isolate = GetIsolate();
3331 if (isolate->initial_array_prototype()->map() == this) {
3335 if (isolate->initial_object_prototype()->map() == this) {
3343 static Handle<Map> AddMissingElementsTransitions(Handle<Map> map,
3344 ElementsKind to_kind) {
3345 DCHECK(IsTransitionElementsKind(map->elements_kind()));
3347 Handle<Map> current_map = map;
3349 ElementsKind kind = map->elements_kind();
3350 if (!map->is_prototype_map()) {
3351 while (kind != to_kind && !IsTerminalElementsKind(kind)) {
3352 kind = GetNextTransitionElementsKind(kind);
3354 Map::CopyAsElementsKind(current_map, kind, INSERT_TRANSITION);
3358 // In case we are exiting the fast elements kind system, just add the map in
3360 if (kind != to_kind) {
3361 current_map = Map::CopyAsElementsKind(
3362 current_map, to_kind, INSERT_TRANSITION);
3365 DCHECK(current_map->elements_kind() == to_kind);
3370 Handle<Map> Map::TransitionElementsTo(Handle<Map> map,
3371 ElementsKind to_kind) {
3372 ElementsKind from_kind = map->elements_kind();
3373 if (from_kind == to_kind) return map;
3375 Isolate* isolate = map->GetIsolate();
3376 Context* native_context = isolate->context()->native_context();
3377 Object* maybe_array_maps = native_context->js_array_maps();
3378 if (maybe_array_maps->IsFixedArray()) {
3379 DisallowHeapAllocation no_gc;
3380 FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
3381 if (array_maps->get(from_kind) == *map) {
3382 Object* maybe_transitioned_map = array_maps->get(to_kind);
3383 if (maybe_transitioned_map->IsMap()) {
3384 return handle(Map::cast(maybe_transitioned_map));
3389 return TransitionElementsToSlow(map, to_kind);
3393 Handle<Map> Map::TransitionElementsToSlow(Handle<Map> map,
3394 ElementsKind to_kind) {
3395 ElementsKind from_kind = map->elements_kind();
3397 if (from_kind == to_kind) {
3401 bool allow_store_transition =
3402 // Only remember the map transition if there is not an already existing
3403 // non-matching element transition.
3404 !map->IsUndefined() && !map->is_dictionary_map() &&
3405 IsTransitionElementsKind(from_kind);
3407 // Only store fast element maps in ascending generality.
3408 if (IsFastElementsKind(to_kind)) {
3409 allow_store_transition &=
3410 IsTransitionableFastElementsKind(from_kind) &&
3411 IsMoreGeneralElementsKindTransition(from_kind, to_kind);
3414 if (!allow_store_transition) {
3415 return Map::CopyAsElementsKind(map, to_kind, OMIT_TRANSITION);
3418 return Map::AsElementsKind(map, to_kind);
3423 Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) {
3424 Handle<Map> closest_map(FindClosestElementsTransition(*map, kind));
3426 if (closest_map->elements_kind() == kind) {
3430 return AddMissingElementsTransitions(closest_map, kind);
3434 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
3435 ElementsKind to_kind) {
3436 Handle<Map> map(object->map());
3437 return Map::TransitionElementsTo(map, to_kind);
3441 Maybe<bool> JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy,
3442 Handle<Name> name) {
3443 Isolate* isolate = proxy->GetIsolate();
3445 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3446 if (name->IsSymbol()) return maybe(false);
3448 Handle<Object> args[] = { name };
3449 Handle<Object> result;
3450 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3451 isolate, result, CallTrap(proxy, "has", isolate->derived_has_trap(),
3452 arraysize(args), args),
3455 return maybe(result->BooleanValue());
3459 MaybeHandle<Object> JSProxy::SetPropertyWithHandler(Handle<JSProxy> proxy,
3460 Handle<Object> receiver,
3462 Handle<Object> value,
3463 StrictMode strict_mode) {
3464 Isolate* isolate = proxy->GetIsolate();
3466 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3467 if (name->IsSymbol()) return value;
3469 Handle<Object> args[] = { receiver, name, value };
3470 RETURN_ON_EXCEPTION(
3474 isolate->derived_set_trap(),
3483 MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler(
3484 Handle<JSProxy> proxy, Handle<Object> receiver, Handle<Name> name,
3485 Handle<Object> value, StrictMode strict_mode, bool* done) {
3486 Isolate* isolate = proxy->GetIsolate();
3487 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy.
3489 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3490 if (name->IsSymbol()) {
3492 return isolate->factory()->the_hole_value();
3495 *done = true; // except where redefined...
3496 Handle<Object> args[] = { name };
3497 Handle<Object> result;
3498 ASSIGN_RETURN_ON_EXCEPTION(
3501 "getPropertyDescriptor",
3507 if (result->IsUndefined()) {
3509 return isolate->factory()->the_hole_value();
3512 // Emulate [[GetProperty]] semantics for proxies.
3513 Handle<Object> argv[] = { result };
3514 Handle<Object> desc;
3515 ASSIGN_RETURN_ON_EXCEPTION(
3517 Execution::Call(isolate,
3518 isolate->to_complete_property_descriptor(),
3524 // [[GetProperty]] requires to check that all properties are configurable.
3525 Handle<String> configurable_name =
3526 isolate->factory()->InternalizeOneByteString(
3527 STATIC_CHAR_VECTOR("configurable_"));
3528 Handle<Object> configurable =
3529 Object::GetProperty(desc, configurable_name).ToHandleChecked();
3530 DCHECK(configurable->IsBoolean());
3531 if (configurable->IsFalse()) {
3532 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3533 STATIC_CHAR_VECTOR("getPropertyDescriptor"));
3534 Handle<Object> args[] = { handler, trap, name };
3535 THROW_NEW_ERROR(isolate, NewTypeError("proxy_prop_not_configurable",
3536 HandleVector(args, arraysize(args))),
3539 DCHECK(configurable->IsTrue());
3541 // Check for DataDescriptor.
3542 Handle<String> hasWritable_name =
3543 isolate->factory()->InternalizeOneByteString(
3544 STATIC_CHAR_VECTOR("hasWritable_"));
3545 Handle<Object> hasWritable =
3546 Object::GetProperty(desc, hasWritable_name).ToHandleChecked();
3547 DCHECK(hasWritable->IsBoolean());
3548 if (hasWritable->IsTrue()) {
3549 Handle<String> writable_name = isolate->factory()->InternalizeOneByteString(
3550 STATIC_CHAR_VECTOR("writable_"));
3551 Handle<Object> writable =
3552 Object::GetProperty(desc, writable_name).ToHandleChecked();
3553 DCHECK(writable->IsBoolean());
3554 *done = writable->IsFalse();
3555 if (!*done) return isolate->factory()->the_hole_value();
3556 if (strict_mode == SLOPPY) return value;
3557 Handle<Object> args[] = { name, receiver };
3558 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
3559 HandleVector(args, arraysize(args))),
3563 // We have an AccessorDescriptor.
3564 Handle<String> set_name =
3565 isolate->factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("set_"));
3566 Handle<Object> setter = Object::GetProperty(desc, set_name).ToHandleChecked();
3567 if (!setter->IsUndefined()) {
3568 // TODO(rossberg): nicer would be to cast to some JSCallable here...
3569 return SetPropertyWithDefinedSetter(
3570 receiver, Handle<JSReceiver>::cast(setter), value);
3573 if (strict_mode == SLOPPY) return value;
3574 Handle<Object> args2[] = { name, proxy };
3575 THROW_NEW_ERROR(isolate, NewTypeError("no_setter_in_callback",
3576 HandleVector(args2, arraysize(args2))),
3581 MaybeHandle<Object> JSProxy::DeletePropertyWithHandler(
3582 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) {
3583 Isolate* isolate = proxy->GetIsolate();
3585 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3586 if (name->IsSymbol()) return isolate->factory()->false_value();
3588 Handle<Object> args[] = { name };
3589 Handle<Object> result;
3590 ASSIGN_RETURN_ON_EXCEPTION(
3599 bool result_bool = result->BooleanValue();
3600 if (mode == STRICT_DELETION && !result_bool) {
3601 Handle<Object> handler(proxy->handler(), isolate);
3602 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString(
3603 STATIC_CHAR_VECTOR("delete"));
3604 Handle<Object> args[] = { handler, trap_name };
3605 THROW_NEW_ERROR(isolate, NewTypeError("handler_failed",
3606 HandleVector(args, arraysize(args))),
3609 return isolate->factory()->ToBoolean(result_bool);
3613 MaybeHandle<Object> JSProxy::DeleteElementWithHandler(
3614 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) {
3615 Isolate* isolate = proxy->GetIsolate();
3616 Handle<String> name = isolate->factory()->Uint32ToString(index);
3617 return JSProxy::DeletePropertyWithHandler(proxy, name, mode);
3621 Maybe<PropertyAttributes> JSProxy::GetPropertyAttributesWithHandler(
3622 Handle<JSProxy> proxy, Handle<Object> receiver, Handle<Name> name) {
3623 Isolate* isolate = proxy->GetIsolate();
3624 HandleScope scope(isolate);
3626 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3627 if (name->IsSymbol()) return maybe(ABSENT);
3629 Handle<Object> args[] = { name };
3630 Handle<Object> result;
3631 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3633 proxy->CallTrap(proxy, "getPropertyDescriptor", Handle<Object>(),
3634 arraysize(args), args),
3635 Maybe<PropertyAttributes>());
3637 if (result->IsUndefined()) return maybe(ABSENT);
3639 Handle<Object> argv[] = { result };
3640 Handle<Object> desc;
3641 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3643 Execution::Call(isolate, isolate->to_complete_property_descriptor(),
3644 result, arraysize(argv), argv),
3645 Maybe<PropertyAttributes>());
3647 // Convert result to PropertyAttributes.
3648 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString(
3649 STATIC_CHAR_VECTOR("enumerable_"));
3650 Handle<Object> enumerable;
3651 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, enumerable,
3652 Object::GetProperty(desc, enum_n),
3653 Maybe<PropertyAttributes>());
3654 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString(
3655 STATIC_CHAR_VECTOR("configurable_"));
3656 Handle<Object> configurable;
3657 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, configurable,
3658 Object::GetProperty(desc, conf_n),
3659 Maybe<PropertyAttributes>());
3660 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString(
3661 STATIC_CHAR_VECTOR("writable_"));
3662 Handle<Object> writable;
3663 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, writable,
3664 Object::GetProperty(desc, writ_n),
3665 Maybe<PropertyAttributes>());
3666 if (!writable->BooleanValue()) {
3667 Handle<String> set_n = isolate->factory()->InternalizeOneByteString(
3668 STATIC_CHAR_VECTOR("set_"));
3669 Handle<Object> setter;
3670 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, setter,
3671 Object::GetProperty(desc, set_n),
3672 Maybe<PropertyAttributes>());
3673 writable = isolate->factory()->ToBoolean(!setter->IsUndefined());
3676 if (configurable->IsFalse()) {
3677 Handle<Object> handler(proxy->handler(), isolate);
3678 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3679 STATIC_CHAR_VECTOR("getPropertyDescriptor"));
3680 Handle<Object> args[] = { handler, trap, name };
3681 Handle<Object> error;
3682 MaybeHandle<Object> maybe_error = isolate->factory()->NewTypeError(
3683 "proxy_prop_not_configurable", HandleVector(args, arraysize(args)));
3684 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
3688 int attributes = NONE;
3689 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM;
3690 if (!configurable->BooleanValue()) attributes |= DONT_DELETE;
3691 if (!writable->BooleanValue()) attributes |= READ_ONLY;
3692 return maybe(static_cast<PropertyAttributes>(attributes));
3696 Maybe<PropertyAttributes> JSProxy::GetElementAttributeWithHandler(
3697 Handle<JSProxy> proxy, Handle<JSReceiver> receiver, uint32_t index) {
3698 Isolate* isolate = proxy->GetIsolate();
3699 Handle<String> name = isolate->factory()->Uint32ToString(index);
3700 return GetPropertyAttributesWithHandler(proxy, receiver, name);
3704 void JSProxy::Fix(Handle<JSProxy> proxy) {
3705 Isolate* isolate = proxy->GetIsolate();
3707 // Save identity hash.
3708 Handle<Object> hash(proxy->GetIdentityHash(), isolate);
3710 if (proxy->IsJSFunctionProxy()) {
3711 isolate->factory()->BecomeJSFunction(proxy);
3712 // Code will be set on the JavaScript side.
3714 isolate->factory()->BecomeJSObject(proxy);
3716 DCHECK(proxy->IsJSObject());
3718 // Inherit identity, if it was present.
3719 if (hash->IsSmi()) {
3720 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy),
3721 Handle<Smi>::cast(hash));
3726 MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy,
3728 Handle<Object> derived,
3730 Handle<Object> argv[]) {
3731 Isolate* isolate = proxy->GetIsolate();
3732 Handle<Object> handler(proxy->handler(), isolate);
3734 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name);
3735 Handle<Object> trap;
3736 ASSIGN_RETURN_ON_EXCEPTION(
3738 Object::GetPropertyOrElement(handler, trap_name),
3741 if (trap->IsUndefined()) {
3742 if (derived.is_null()) {
3743 Handle<Object> args[] = { handler, trap_name };
3744 THROW_NEW_ERROR(isolate,
3745 NewTypeError("handler_trap_missing",
3746 HandleVector(args, arraysize(args))),
3749 trap = Handle<Object>(derived);
3752 return Execution::Call(isolate, trap, handler, argc, argv);
3756 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
3757 DCHECK(object->map()->inobject_properties() == map->inobject_properties());
3758 ElementsKind obj_kind = object->map()->elements_kind();
3759 ElementsKind map_kind = map->elements_kind();
3760 if (map_kind != obj_kind) {
3761 ElementsKind to_kind = map_kind;
3762 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
3763 IsDictionaryElementsKind(obj_kind)) {
3766 if (IsDictionaryElementsKind(to_kind)) {
3767 NormalizeElements(object);
3769 TransitionElementsKind(object, to_kind);
3771 map = Map::AsElementsKind(map, to_kind);
3773 JSObject::MigrateToMap(object, map);
3777 void JSObject::MigrateInstance(Handle<JSObject> object) {
3778 Handle<Map> original_map(object->map());
3779 Handle<Map> map = Map::Update(original_map);
3780 map->set_migration_target(true);
3781 MigrateToMap(object, map);
3782 if (FLAG_trace_migration) {
3783 object->PrintInstanceMigration(stdout, *original_map, *map);
3789 bool JSObject::TryMigrateInstance(Handle<JSObject> object) {
3790 Isolate* isolate = object->GetIsolate();
3791 DisallowDeoptimization no_deoptimization(isolate);
3792 Handle<Map> original_map(object->map(), isolate);
3793 Handle<Map> new_map;
3794 if (!Map::TryUpdate(original_map).ToHandle(&new_map)) {
3797 JSObject::MigrateToMap(object, new_map);
3798 if (FLAG_trace_migration) {
3799 object->PrintInstanceMigration(stdout, *original_map, object->map());
3805 void JSObject::MigrateToNewProperty(Handle<JSObject> object,
3807 Handle<Object> value) {
3808 JSObject::MigrateToMap(object, map);
3809 if (map->GetLastDescriptorDetails().type() != FIELD) return;
3810 object->WriteToField(map->LastAdded(), *value);
3814 void JSObject::WriteToField(int descriptor, Object* value) {
3815 DisallowHeapAllocation no_gc;
3817 DescriptorArray* desc = map()->instance_descriptors();
3818 PropertyDetails details = desc->GetDetails(descriptor);
3820 DCHECK(details.type() == FIELD);
3822 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
3823 if (details.representation().IsDouble()) {
3824 // Nothing more to be done.
3825 if (value->IsUninitialized()) return;
3826 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
3827 DCHECK(box->IsMutableHeapNumber());
3828 box->set_value(value->Number());
3830 FastPropertyAtPut(index, value);
3835 void JSObject::AddProperty(Handle<JSObject> object, Handle<Name> name,
3836 Handle<Object> value,
3837 PropertyAttributes attributes) {
3838 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
3839 CHECK_NE(LookupIterator::ACCESS_CHECK, it.state());
3842 DCHECK(!object->IsJSProxy());
3843 DCHECK(!name->AsArrayIndex(&index));
3844 Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it);
3845 DCHECK(maybe.has_value);
3846 DCHECK(!it.IsFound());
3847 DCHECK(object->map()->is_extensible() ||
3848 name.is_identical_to(it.isolate()->factory()->hidden_string()));
3850 AddDataProperty(&it, value, attributes, STRICT,
3851 CERTAINLY_NOT_STORE_FROM_KEYED).Check();
3855 // Reconfigures a property to a data property with attributes, even if it is not
3857 MaybeHandle<Object> JSObject::SetOwnPropertyIgnoreAttributes(
3858 Handle<JSObject> object,
3860 Handle<Object> value,
3861 PropertyAttributes attributes,
3862 ExecutableAccessorInfoHandling handling) {
3863 DCHECK(!value->IsTheHole());
3864 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
3865 bool is_observed = object->map()->is_observed() &&
3866 *name != it.isolate()->heap()->hidden_string();
3867 for (; it.IsFound(); it.Next()) {
3868 switch (it.state()) {
3869 case LookupIterator::INTERCEPTOR:
3870 case LookupIterator::JSPROXY:
3871 case LookupIterator::NOT_FOUND:
3872 case LookupIterator::TRANSITION:
3875 case LookupIterator::ACCESS_CHECK:
3876 if (!it.isolate()->MayNamedAccess(object, name, v8::ACCESS_SET)) {
3877 return SetPropertyWithFailedAccessCheck(&it, value, SLOPPY);
3881 case LookupIterator::ACCESSOR: {
3882 PropertyDetails details = it.property_details();
3883 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
3884 // Ensure the context isn't changed after calling into accessors.
3885 AssertNoContextChange ncc(it.isolate());
3887 Handle<Object> accessors = it.GetAccessors();
3889 if (is_observed && accessors->IsAccessorInfo()) {
3890 ASSIGN_RETURN_ON_EXCEPTION(
3891 it.isolate(), old_value,
3892 GetPropertyWithAccessor(it.GetReceiver(), it.name(),
3893 it.GetHolder<JSObject>(), accessors),
3897 // Special handling for ExecutableAccessorInfo, which behaves like a
3899 if (handling == DONT_FORCE_FIELD &&
3900 accessors->IsExecutableAccessorInfo()) {
3901 Handle<Object> result;
3902 ASSIGN_RETURN_ON_EXCEPTION(
3903 it.isolate(), result,
3904 JSObject::SetPropertyWithAccessor(it.GetReceiver(), it.name(),
3905 value, it.GetHolder<JSObject>(),
3908 DCHECK(result->SameValue(*value));
3910 if (details.attributes() == attributes) {
3911 // Regular property update if the attributes match.
3912 if (is_observed && !old_value->SameValue(*value)) {
3913 // If we are setting the prototype of a function and are
3914 // observed, don't send change records because the prototype
3915 // handles that itself.
3916 if (!object->IsJSFunction() ||
3917 !Name::Equals(it.isolate()->factory()->prototype_string(),
3919 !Handle<JSFunction>::cast(object)->should_have_prototype()) {
3920 EnqueueChangeRecord(object, "update", name, old_value);
3926 // Reconfigure the accessor if attributes mismatch.
3927 Handle<ExecutableAccessorInfo> new_data = Accessors::CloneAccessor(
3928 it.isolate(), Handle<ExecutableAccessorInfo>::cast(accessors));
3929 new_data->set_property_attributes(attributes);
3930 // By clearing the setter we don't have to introduce a lookup to
3931 // the setter, simply make it unavailable to reflect the
3933 if (attributes & READ_ONLY) new_data->clear_setter();
3934 SetPropertyCallback(object, name, new_data, attributes);
3936 if (old_value->SameValue(*value)) {
3937 old_value = it.isolate()->factory()->the_hole_value();
3939 EnqueueChangeRecord(object, "reconfigure", name, old_value);
3944 it.ReconfigureDataProperty(value, attributes);
3945 it.PrepareForDataProperty(value);
3946 it.WriteDataValue(value);
3949 if (old_value->SameValue(*value)) {
3950 old_value = it.isolate()->factory()->the_hole_value();
3952 EnqueueChangeRecord(object, "reconfigure", name, old_value);
3958 case LookupIterator::DATA: {
3959 PropertyDetails details = it.property_details();
3960 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
3961 // Regular property update if the attributes match.
3962 if (details.attributes() == attributes) {
3963 return SetDataProperty(&it, value);
3965 // Reconfigure the data property if the attributes mismatch.
3966 if (is_observed) old_value = it.GetDataValue();
3968 it.ReconfigureDataProperty(value, attributes);
3969 it.PrepareForDataProperty(value);
3970 it.WriteDataValue(value);
3973 if (old_value->SameValue(*value)) {
3974 old_value = it.isolate()->factory()->the_hole_value();
3976 EnqueueChangeRecord(object, "reconfigure", name, old_value);
3984 return AddDataProperty(&it, value, attributes, STRICT,
3985 CERTAINLY_NOT_STORE_FROM_KEYED);
3989 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithInterceptor(
3990 Handle<JSObject> holder,
3991 Handle<Object> receiver,
3992 Handle<Name> name) {
3993 // TODO(rossberg): Support symbols in the API.
3994 if (name->IsSymbol()) return maybe(ABSENT);
3996 Isolate* isolate = holder->GetIsolate();
3997 HandleScope scope(isolate);
3999 // Make sure that the top context does not change when doing
4000 // callbacks or interceptor calls.
4001 AssertNoContextChange ncc(isolate);
4003 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
4004 PropertyCallbackArguments args(
4005 isolate, interceptor->data(), *receiver, *holder);
4006 if (!interceptor->query()->IsUndefined()) {
4007 v8::NamedPropertyQueryCallback query =
4008 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query());
4010 ApiNamedPropertyAccess("interceptor-named-has", *holder, *name));
4011 v8::Handle<v8::Integer> result =
4012 args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name)));
4013 if (!result.IsEmpty()) {
4014 DCHECK(result->IsInt32());
4015 return maybe(static_cast<PropertyAttributes>(result->Int32Value()));
4017 } else if (!interceptor->getter()->IsUndefined()) {
4018 v8::NamedPropertyGetterCallback getter =
4019 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
4021 ApiNamedPropertyAccess("interceptor-named-get-has", *holder, *name));
4022 v8::Handle<v8::Value> result =
4023 args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4024 if (!result.IsEmpty()) return maybe(DONT_ENUM);
4027 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>());
4028 return maybe(ABSENT);
4032 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
4033 Handle<JSReceiver> object, Handle<Name> name) {
4034 // Check whether the name is an array index.
4036 if (object->IsJSObject() && name->AsArrayIndex(&index)) {
4037 return GetOwnElementAttribute(object, index);
4039 LookupIterator it(object, name, LookupIterator::HIDDEN);
4040 return GetPropertyAttributes(&it);
4044 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
4045 LookupIterator* it) {
4046 for (; it->IsFound(); it->Next()) {
4047 switch (it->state()) {
4048 case LookupIterator::NOT_FOUND:
4049 case LookupIterator::TRANSITION:
4051 case LookupIterator::JSPROXY:
4052 return JSProxy::GetPropertyAttributesWithHandler(
4053 it->GetHolder<JSProxy>(), it->GetReceiver(), it->name());
4054 case LookupIterator::INTERCEPTOR: {
4055 Maybe<PropertyAttributes> result =
4056 JSObject::GetPropertyAttributesWithInterceptor(
4057 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
4058 if (!result.has_value) return result;
4059 if (result.value != ABSENT) return result;
4062 case LookupIterator::ACCESS_CHECK:
4063 if (it->HasAccess(v8::ACCESS_HAS)) break;
4064 return JSObject::GetPropertyAttributesWithFailedAccessCheck(it);
4065 case LookupIterator::ACCESSOR:
4066 case LookupIterator::DATA:
4067 return maybe(it->property_details().attributes());
4070 return maybe(ABSENT);
4074 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithReceiver(
4075 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4076 bool check_prototype) {
4077 Isolate* isolate = object->GetIsolate();
4079 // Check access rights if needed.
4080 if (object->IsAccessCheckNeeded()) {
4081 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
4082 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
4083 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>());
4084 return maybe(ABSENT);
4088 if (object->IsJSGlobalProxy()) {
4089 PrototypeIterator iter(isolate, object);
4090 if (iter.IsAtEnd()) return maybe(ABSENT);
4091 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4092 return JSObject::GetElementAttributeWithReceiver(
4093 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4094 index, check_prototype);
4097 // Check for lookup interceptor except when bootstrapping.
4098 if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
4099 return JSObject::GetElementAttributeWithInterceptor(
4100 object, receiver, index, check_prototype);
4103 return GetElementAttributeWithoutInterceptor(
4104 object, receiver, index, check_prototype);
4108 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithInterceptor(
4109 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4110 bool check_prototype) {
4111 Isolate* isolate = object->GetIsolate();
4112 HandleScope scope(isolate);
4114 // Make sure that the top context does not change when doing
4115 // callbacks or interceptor calls.
4116 AssertNoContextChange ncc(isolate);
4118 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4119 PropertyCallbackArguments args(
4120 isolate, interceptor->data(), *receiver, *object);
4121 if (!interceptor->query()->IsUndefined()) {
4122 v8::IndexedPropertyQueryCallback query =
4123 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query());
4125 ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index));
4126 v8::Handle<v8::Integer> result = args.Call(query, index);
4127 if (!result.IsEmpty())
4128 return maybe(static_cast<PropertyAttributes>(result->Int32Value()));
4129 } else if (!interceptor->getter()->IsUndefined()) {
4130 v8::IndexedPropertyGetterCallback getter =
4131 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
4133 ApiIndexedPropertyAccess(
4134 "interceptor-indexed-get-has", *object, index));
4135 v8::Handle<v8::Value> result = args.Call(getter, index);
4136 if (!result.IsEmpty()) return maybe(NONE);
4139 return GetElementAttributeWithoutInterceptor(
4140 object, receiver, index, check_prototype);
4144 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithoutInterceptor(
4145 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4146 bool check_prototype) {
4147 PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes(
4148 receiver, object, index);
4149 if (attr != ABSENT) return maybe(attr);
4151 // Handle [] on String objects.
4152 if (object->IsStringObjectWithCharacterAt(index)) {
4153 return maybe(static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE));
4156 if (!check_prototype) return maybe(ABSENT);
4158 PrototypeIterator iter(object->GetIsolate(), object);
4159 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
4160 // We need to follow the spec and simulate a call to [[GetOwnProperty]].
4161 return JSProxy::GetElementAttributeWithHandler(
4162 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4165 if (iter.IsAtEnd()) return maybe(ABSENT);
4166 return GetElementAttributeWithReceiver(
4167 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4172 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
4173 Handle<FixedArray> array(
4174 isolate->factory()->NewFixedArray(kEntries, TENURED));
4175 return Handle<NormalizedMapCache>::cast(array);
4179 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
4180 PropertyNormalizationMode mode) {
4181 DisallowHeapAllocation no_gc;
4182 Object* value = FixedArray::get(GetIndex(fast_map));
4183 if (!value->IsMap() ||
4184 !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) {
4185 return MaybeHandle<Map>();
4187 return handle(Map::cast(value));
4191 void NormalizedMapCache::Set(Handle<Map> fast_map,
4192 Handle<Map> normalized_map) {
4193 DisallowHeapAllocation no_gc;
4194 DCHECK(normalized_map->is_dictionary_map());
4195 FixedArray::set(GetIndex(fast_map), *normalized_map);
4199 void NormalizedMapCache::Clear() {
4200 int entries = length();
4201 for (int i = 0; i != entries; i++) {
4207 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object,
4209 Handle<Code> code) {
4210 Handle<Map> map(object->map());
4211 Map::UpdateCodeCache(map, name, code);
4215 void JSObject::NormalizeProperties(Handle<JSObject> object,
4216 PropertyNormalizationMode mode,
4217 int expected_additional_properties) {
4218 if (!object->HasFastProperties()) return;
4220 Handle<Map> map(object->map());
4221 Handle<Map> new_map = Map::Normalize(map, mode);
4223 MigrateFastToSlow(object, new_map, expected_additional_properties);
4227 void JSObject::MigrateFastToSlow(Handle<JSObject> object,
4228 Handle<Map> new_map,
4229 int expected_additional_properties) {
4230 // The global object is always normalized.
4231 DCHECK(!object->IsGlobalObject());
4232 // JSGlobalProxy must never be normalized
4233 DCHECK(!object->IsJSGlobalProxy());
4235 Isolate* isolate = object->GetIsolate();
4236 HandleScope scope(isolate);
4237 Handle<Map> map(object->map());
4239 // Allocate new content.
4240 int real_size = map->NumberOfOwnDescriptors();
4241 int property_count = real_size;
4242 if (expected_additional_properties > 0) {
4243 property_count += expected_additional_properties;
4245 property_count += 2; // Make space for two more properties.
4247 Handle<NameDictionary> dictionary =
4248 NameDictionary::New(isolate, property_count);
4250 Handle<DescriptorArray> descs(map->instance_descriptors());
4251 for (int i = 0; i < real_size; i++) {
4252 PropertyDetails details = descs->GetDetails(i);
4253 switch (details.type()) {
4255 Handle<Name> key(descs->GetKey(i));
4256 Handle<Object> value(descs->GetConstant(i), isolate);
4257 PropertyDetails d = PropertyDetails(
4258 details.attributes(), NORMAL, i + 1);
4259 dictionary = NameDictionary::Add(dictionary, key, value, d);
4263 Handle<Name> key(descs->GetKey(i));
4264 FieldIndex index = FieldIndex::ForDescriptor(*map, i);
4265 Handle<Object> value(
4266 object->RawFastPropertyAt(index), isolate);
4267 if (details.representation().IsDouble()) {
4268 DCHECK(value->IsMutableHeapNumber());
4269 Handle<HeapNumber> old = Handle<HeapNumber>::cast(value);
4270 value = isolate->factory()->NewHeapNumber(old->value());
4273 PropertyDetails(details.attributes(), NORMAL, i + 1);
4274 dictionary = NameDictionary::Add(dictionary, key, value, d);
4278 Handle<Name> key(descs->GetKey(i));
4279 Handle<Object> value(descs->GetCallbacksObject(i), isolate);
4280 PropertyDetails d = PropertyDetails(
4281 details.attributes(), CALLBACKS, i + 1);
4282 dictionary = NameDictionary::Add(dictionary, key, value, d);
4291 // Copy the next enumeration index from instance descriptor.
4292 dictionary->SetNextEnumerationIndex(real_size + 1);
4294 // From here on we cannot fail and we shouldn't GC anymore.
4295 DisallowHeapAllocation no_allocation;
4297 // Resize the object in the heap if necessary.
4298 int new_instance_size = new_map->instance_size();
4299 int instance_size_delta = map->instance_size() - new_instance_size;
4300 DCHECK(instance_size_delta >= 0);
4302 if (instance_size_delta > 0) {
4303 Heap* heap = isolate->heap();
4304 heap->CreateFillerObjectAt(object->address() + new_instance_size,
4305 instance_size_delta);
4306 heap->AdjustLiveBytes(object->address(), -instance_size_delta,
4307 Heap::FROM_MUTATOR);
4310 // We are storing the new map using release store after creating a filler for
4311 // the left-over space to avoid races with the sweeper thread.
4312 object->synchronized_set_map(*new_map);
4314 object->set_properties(*dictionary);
4316 isolate->counters()->props_to_dictionary()->Increment();
4319 if (FLAG_trace_normalization) {
4320 OFStream os(stdout);
4321 os << "Object properties have been normalized:\n";
4328 void JSObject::MigrateSlowToFast(Handle<JSObject> object,
4329 int unused_property_fields) {
4330 if (object->HasFastProperties()) return;
4331 DCHECK(!object->IsGlobalObject());
4332 Isolate* isolate = object->GetIsolate();
4333 Factory* factory = isolate->factory();
4334 Handle<NameDictionary> dictionary(object->property_dictionary());
4336 // Make sure we preserve dictionary representation if there are too many
4338 int number_of_elements = dictionary->NumberOfElements();
4339 if (number_of_elements > kMaxNumberOfDescriptors) return;
4341 if (number_of_elements != dictionary->NextEnumerationIndex()) {
4342 NameDictionary::DoGenerateNewEnumerationIndices(dictionary);
4345 int instance_descriptor_length = 0;
4346 int number_of_fields = 0;
4348 // Compute the length of the instance descriptor.
4349 int capacity = dictionary->Capacity();
4350 for (int i = 0; i < capacity; i++) {
4351 Object* k = dictionary->KeyAt(i);
4352 if (dictionary->IsKey(k)) {
4353 Object* value = dictionary->ValueAt(i);
4354 PropertyType type = dictionary->DetailsAt(i).type();
4355 DCHECK(type != FIELD);
4356 instance_descriptor_length++;
4357 if (type == NORMAL && !value->IsJSFunction()) {
4358 number_of_fields += 1;
4363 int inobject_props = object->map()->inobject_properties();
4365 // Allocate new map.
4366 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
4367 new_map->set_dictionary_map(false);
4369 if (instance_descriptor_length == 0) {
4370 DisallowHeapAllocation no_gc;
4371 DCHECK_LE(unused_property_fields, inobject_props);
4372 // Transform the object.
4373 new_map->set_unused_property_fields(inobject_props);
4374 object->synchronized_set_map(*new_map);
4375 object->set_properties(isolate->heap()->empty_fixed_array());
4376 // Check that it really works.
4377 DCHECK(object->HasFastProperties());
4381 // Allocate the instance descriptor.
4382 Handle<DescriptorArray> descriptors = DescriptorArray::Allocate(
4383 isolate, instance_descriptor_length);
4385 int number_of_allocated_fields =
4386 number_of_fields + unused_property_fields - inobject_props;
4387 if (number_of_allocated_fields < 0) {
4388 // There is enough inobject space for all fields (including unused).
4389 number_of_allocated_fields = 0;
4390 unused_property_fields = inobject_props - number_of_fields;
4393 // Allocate the fixed array for the fields.
4394 Handle<FixedArray> fields = factory->NewFixedArray(
4395 number_of_allocated_fields);
4397 // Fill in the instance descriptor and the fields.
4398 int current_offset = 0;
4399 for (int i = 0; i < capacity; i++) {
4400 Object* k = dictionary->KeyAt(i);
4401 if (dictionary->IsKey(k)) {
4402 Object* value = dictionary->ValueAt(i);
4404 if (k->IsSymbol()) {
4405 key = handle(Symbol::cast(k));
4407 // Ensure the key is a unique name before writing into the
4408 // instance descriptor.
4409 key = factory->InternalizeString(handle(String::cast(k)));
4412 PropertyDetails details = dictionary->DetailsAt(i);
4413 int enumeration_index = details.dictionary_index();
4414 PropertyType type = details.type();
4416 if (value->IsJSFunction()) {
4417 ConstantDescriptor d(key,
4418 handle(value, isolate),
4419 details.attributes());
4420 descriptors->Set(enumeration_index - 1, &d);
4421 } else if (type == NORMAL) {
4422 if (current_offset < inobject_props) {
4423 object->InObjectPropertyAtPut(current_offset,
4425 UPDATE_WRITE_BARRIER);
4427 int offset = current_offset - inobject_props;
4428 fields->set(offset, value);
4430 FieldDescriptor d(key,
4432 details.attributes(),
4433 // TODO(verwaest): value->OptimalRepresentation();
4434 Representation::Tagged());
4435 descriptors->Set(enumeration_index - 1, &d);
4436 } else if (type == CALLBACKS) {
4437 CallbacksDescriptor d(key,
4438 handle(value, isolate),
4439 details.attributes());
4440 descriptors->Set(enumeration_index - 1, &d);
4446 DCHECK(current_offset == number_of_fields);
4448 descriptors->Sort();
4450 DisallowHeapAllocation no_gc;
4451 new_map->InitializeDescriptors(*descriptors);
4452 new_map->set_unused_property_fields(unused_property_fields);
4454 // Transform the object.
4455 object->synchronized_set_map(*new_map);
4457 object->set_properties(*fields);
4458 DCHECK(object->IsJSObject());
4460 // Check that it really works.
4461 DCHECK(object->HasFastProperties());
4465 void JSObject::ResetElements(Handle<JSObject> object) {
4466 Isolate* isolate = object->GetIsolate();
4467 CHECK(object->map() != isolate->heap()->sloppy_arguments_elements_map());
4468 if (object->map()->has_dictionary_elements()) {
4469 Handle<SeededNumberDictionary> new_elements =
4470 SeededNumberDictionary::New(isolate, 0);
4471 object->set_elements(*new_elements);
4473 object->set_elements(object->map()->GetInitialElements());
4478 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary(
4479 Handle<FixedArrayBase> array,
4481 Handle<SeededNumberDictionary> dictionary) {
4482 Isolate* isolate = array->GetIsolate();
4483 Factory* factory = isolate->factory();
4484 bool has_double_elements = array->IsFixedDoubleArray();
4485 for (int i = 0; i < length; i++) {
4486 Handle<Object> value;
4487 if (has_double_elements) {
4488 Handle<FixedDoubleArray> double_array =
4489 Handle<FixedDoubleArray>::cast(array);
4490 if (double_array->is_the_hole(i)) {
4491 value = factory->the_hole_value();
4493 value = factory->NewHeapNumber(double_array->get_scalar(i));
4496 value = handle(Handle<FixedArray>::cast(array)->get(i), isolate);
4498 if (!value->IsTheHole()) {
4499 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
4501 SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details);
4508 Handle<SeededNumberDictionary> JSObject::NormalizeElements(
4509 Handle<JSObject> object) {
4510 DCHECK(!object->HasExternalArrayElements() &&
4511 !object->HasFixedTypedArrayElements());
4512 Isolate* isolate = object->GetIsolate();
4514 // Find the backing store.
4515 Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements()));
4517 (array->map() == isolate->heap()->sloppy_arguments_elements_map());
4519 array = handle(FixedArrayBase::cast(
4520 Handle<FixedArray>::cast(array)->get(1)));
4522 if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array);
4524 DCHECK(object->HasFastSmiOrObjectElements() ||
4525 object->HasFastDoubleElements() ||
4526 object->HasFastArgumentsElements());
4527 // Compute the effective length and allocate a new backing store.
4528 int length = object->IsJSArray()
4529 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
4531 int old_capacity = 0;
4532 int used_elements = 0;
4533 object->GetElementsCapacityAndUsage(&old_capacity, &used_elements);
4534 Handle<SeededNumberDictionary> dictionary =
4535 SeededNumberDictionary::New(isolate, used_elements);
4537 dictionary = CopyFastElementsToDictionary(array, length, dictionary);
4539 // Switch to using the dictionary as the backing storage for elements.
4541 FixedArray::cast(object->elements())->set(1, *dictionary);
4543 // Set the new map first to satify the elements type assert in
4545 Handle<Map> new_map =
4546 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
4548 JSObject::MigrateToMap(object, new_map);
4549 object->set_elements(*dictionary);
4552 isolate->counters()->elements_to_dictionary()->Increment();
4555 if (FLAG_trace_normalization) {
4556 OFStream os(stdout);
4557 os << "Object elements have been normalized:\n";
4562 DCHECK(object->HasDictionaryElements() ||
4563 object->HasDictionaryArgumentsElements());
4568 static Smi* GenerateIdentityHash(Isolate* isolate) {
4572 // Generate a random 32-bit hash value but limit range to fit
4574 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue;
4576 } while (hash_value == 0 && attempts < 30);
4577 hash_value = hash_value != 0 ? hash_value : 1; // never return 0
4579 return Smi::FromInt(hash_value);
4583 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) {
4584 DCHECK(!object->IsJSGlobalProxy());
4585 Isolate* isolate = object->GetIsolate();
4586 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4590 template<typename ProxyType>
4591 static Handle<Smi> GetOrCreateIdentityHashHelper(Handle<ProxyType> proxy) {
4592 Isolate* isolate = proxy->GetIsolate();
4594 Handle<Object> maybe_hash(proxy->hash(), isolate);
4595 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash);
4597 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate);
4598 proxy->set_hash(*hash);
4603 Object* JSObject::GetIdentityHash() {
4604 DisallowHeapAllocation no_gc;
4605 Isolate* isolate = GetIsolate();
4606 if (IsJSGlobalProxy()) {
4607 return JSGlobalProxy::cast(this)->hash();
4609 Object* stored_value =
4610 GetHiddenProperty(isolate->factory()->identity_hash_string());
4611 return stored_value->IsSmi()
4613 : isolate->heap()->undefined_value();
4617 Handle<Smi> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) {
4618 if (object->IsJSGlobalProxy()) {
4619 return GetOrCreateIdentityHashHelper(Handle<JSGlobalProxy>::cast(object));
4622 Isolate* isolate = object->GetIsolate();
4624 Handle<Object> maybe_hash(object->GetIdentityHash(), isolate);
4625 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash);
4627 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate);
4628 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4633 Object* JSProxy::GetIdentityHash() {
4634 return this->hash();
4638 Handle<Smi> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) {
4639 return GetOrCreateIdentityHashHelper(proxy);
4643 Object* JSObject::GetHiddenProperty(Handle<Name> key) {
4644 DisallowHeapAllocation no_gc;
4645 DCHECK(key->IsUniqueName());
4646 if (IsJSGlobalProxy()) {
4647 // JSGlobalProxies store their hash internally.
4648 DCHECK(*key != GetHeap()->identity_hash_string());
4649 // For a proxy, use the prototype as target object.
4650 PrototypeIterator iter(GetIsolate(), this);
4651 // If the proxy is detached, return undefined.
4652 if (iter.IsAtEnd()) return GetHeap()->the_hole_value();
4653 DCHECK(iter.GetCurrent()->IsJSGlobalObject());
4654 return JSObject::cast(iter.GetCurrent())->GetHiddenProperty(key);
4656 DCHECK(!IsJSGlobalProxy());
4657 Object* inline_value = GetHiddenPropertiesHashTable();
4659 if (inline_value->IsSmi()) {
4660 // Handle inline-stored identity hash.
4661 if (*key == GetHeap()->identity_hash_string()) {
4662 return inline_value;
4664 return GetHeap()->the_hole_value();
4668 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value();
4670 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
4671 Object* entry = hashtable->Lookup(key);
4676 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object,
4678 Handle<Object> value) {
4679 Isolate* isolate = object->GetIsolate();
4681 DCHECK(key->IsUniqueName());
4682 if (object->IsJSGlobalProxy()) {
4683 // JSGlobalProxies store their hash internally.
4684 DCHECK(*key != *isolate->factory()->identity_hash_string());
4685 // For a proxy, use the prototype as target object.
4686 PrototypeIterator iter(isolate, object);
4687 // If the proxy is detached, return undefined.
4688 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
4689 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4690 return SetHiddenProperty(
4691 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key,
4694 DCHECK(!object->IsJSGlobalProxy());
4696 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4698 // If there is no backing store yet, store the identity hash inline.
4699 if (value->IsSmi() &&
4700 *key == *isolate->factory()->identity_hash_string() &&
4701 (inline_value->IsUndefined() || inline_value->IsSmi())) {
4702 return JSObject::SetHiddenPropertiesHashTable(object, value);
4705 Handle<ObjectHashTable> hashtable =
4706 GetOrCreateHiddenPropertiesHashtable(object);
4708 // If it was found, check if the key is already in the dictionary.
4709 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key,
4711 if (*new_table != *hashtable) {
4712 // If adding the key expanded the dictionary (i.e., Add returned a new
4713 // dictionary), store it back to the object.
4714 SetHiddenPropertiesHashTable(object, new_table);
4717 // Return this to mark success.
4722 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) {
4723 Isolate* isolate = object->GetIsolate();
4724 DCHECK(key->IsUniqueName());
4726 if (object->IsJSGlobalProxy()) {
4727 PrototypeIterator iter(isolate, object);
4728 if (iter.IsAtEnd()) return;
4729 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4730 return DeleteHiddenProperty(
4731 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key);
4734 Object* inline_value = object->GetHiddenPropertiesHashTable();
4736 // We never delete (inline-stored) identity hashes.
4737 DCHECK(*key != *isolate->factory()->identity_hash_string());
4738 if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
4740 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value));
4741 bool was_present = false;
4742 ObjectHashTable::Remove(hashtable, key, &was_present);
4746 bool JSObject::HasHiddenProperties(Handle<JSObject> object) {
4747 Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string();
4748 LookupIterator it(object, hidden, LookupIterator::OWN_SKIP_INTERCEPTOR);
4749 Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it);
4750 // Cannot get an exception since the hidden_string isn't accessible to JS.
4751 DCHECK(maybe.has_value);
4752 return maybe.value != ABSENT;
4756 Object* JSObject::GetHiddenPropertiesHashTable() {
4757 DCHECK(!IsJSGlobalProxy());
4758 if (HasFastProperties()) {
4759 // If the object has fast properties, check whether the first slot
4760 // in the descriptor array matches the hidden string. Since the
4761 // hidden strings hash code is zero (and no other name has hash
4762 // code zero) it will always occupy the first entry if present.
4763 DescriptorArray* descriptors = this->map()->instance_descriptors();
4764 if (descriptors->number_of_descriptors() > 0) {
4765 int sorted_index = descriptors->GetSortedKeyIndex(0);
4766 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
4767 sorted_index < map()->NumberOfOwnDescriptors()) {
4768 DCHECK(descriptors->GetType(sorted_index) == FIELD);
4769 DCHECK(descriptors->GetDetails(sorted_index).representation().
4770 IsCompatibleForLoad(Representation::Tagged()));
4771 FieldIndex index = FieldIndex::ForDescriptor(this->map(),
4773 return this->RawFastPropertyAt(index);
4775 return GetHeap()->undefined_value();
4778 return GetHeap()->undefined_value();
4781 Isolate* isolate = GetIsolate();
4782 LookupIterator it(handle(this), isolate->factory()->hidden_string(),
4783 LookupIterator::OWN_SKIP_INTERCEPTOR);
4784 // Access check is always skipped for the hidden string anyways.
4785 return *GetDataProperty(&it);
4789 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable(
4790 Handle<JSObject> object) {
4791 Isolate* isolate = object->GetIsolate();
4793 static const int kInitialCapacity = 4;
4794 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4795 if (inline_value->IsHashTable()) {
4796 return Handle<ObjectHashTable>::cast(inline_value);
4799 Handle<ObjectHashTable> hashtable = ObjectHashTable::New(
4800 isolate, kInitialCapacity, USE_CUSTOM_MINIMUM_CAPACITY);
4802 if (inline_value->IsSmi()) {
4803 // We were storing the identity hash inline and now allocated an actual
4804 // dictionary. Put the identity hash into the new dictionary.
4805 hashtable = ObjectHashTable::Put(hashtable,
4806 isolate->factory()->identity_hash_string(),
4810 SetHiddenPropertiesHashTable(object, hashtable);
4815 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object,
4816 Handle<Object> value) {
4817 DCHECK(!object->IsJSGlobalProxy());
4818 Isolate* isolate = object->GetIsolate();
4819 Handle<Name> name = isolate->factory()->hidden_string();
4820 SetOwnPropertyIgnoreAttributes(object, name, value, DONT_ENUM).Assert();
4825 MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor(
4826 Handle<JSObject> holder, Handle<JSObject> receiver, Handle<Name> name) {
4827 Isolate* isolate = holder->GetIsolate();
4829 // TODO(rossberg): Support symbols in the API.
4830 if (name->IsSymbol()) return MaybeHandle<Object>();
4832 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
4833 if (interceptor->deleter()->IsUndefined()) return MaybeHandle<Object>();
4835 v8::NamedPropertyDeleterCallback deleter =
4836 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter());
4838 ApiNamedPropertyAccess("interceptor-named-delete", *holder, *name));
4839 PropertyCallbackArguments args(isolate, interceptor->data(), *receiver,
4841 v8::Handle<v8::Boolean> result =
4842 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4843 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4844 if (result.IsEmpty()) return MaybeHandle<Object>();
4846 DCHECK(result->IsBoolean());
4847 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
4848 result_internal->VerifyApiCallResultType();
4849 // Rebox CustomArguments::kReturnValueOffset before returning.
4850 return handle(*result_internal, isolate);
4854 MaybeHandle<Object> JSObject::DeleteElementWithInterceptor(
4855 Handle<JSObject> object,
4857 Isolate* isolate = object->GetIsolate();
4858 Factory* factory = isolate->factory();
4860 // Make sure that the top context does not change when doing
4861 // callbacks or interceptor calls.
4862 AssertNoContextChange ncc(isolate);
4864 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4865 if (interceptor->deleter()->IsUndefined()) return factory->false_value();
4866 v8::IndexedPropertyDeleterCallback deleter =
4867 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter());
4869 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index));
4870 PropertyCallbackArguments args(
4871 isolate, interceptor->data(), *object, *object);
4872 v8::Handle<v8::Boolean> result = args.Call(deleter, index);
4873 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4874 if (!result.IsEmpty()) {
4875 DCHECK(result->IsBoolean());
4876 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
4877 result_internal->VerifyApiCallResultType();
4878 // Rebox CustomArguments::kReturnValueOffset before returning.
4879 return handle(*result_internal, isolate);
4881 MaybeHandle<Object> delete_result = object->GetElementsAccessor()->Delete(
4882 object, index, NORMAL_DELETION);
4883 return delete_result;
4887 MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object,
4890 Isolate* isolate = object->GetIsolate();
4891 Factory* factory = isolate->factory();
4893 // Check access rights if needed.
4894 if (object->IsAccessCheckNeeded() &&
4895 !isolate->MayIndexedAccess(object, index, v8::ACCESS_DELETE)) {
4896 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE);
4897 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4898 return factory->false_value();
4901 if (object->IsStringObjectWithCharacterAt(index)) {
4902 if (mode == STRICT_DELETION) {
4903 // Deleting a non-configurable property in strict mode.
4904 Handle<Object> name = factory->NewNumberFromUint(index);
4905 Handle<Object> args[2] = { name, object };
4906 THROW_NEW_ERROR(isolate, NewTypeError("strict_delete_property",
4907 HandleVector(args, 2)),
4910 return factory->false_value();
4913 if (object->IsJSGlobalProxy()) {
4914 PrototypeIterator iter(isolate, object);
4915 if (iter.IsAtEnd()) return factory->false_value();
4916 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4917 return DeleteElement(
4918 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index,
4922 Handle<Object> old_value;
4923 bool should_enqueue_change_record = false;
4924 if (object->map()->is_observed()) {
4925 Maybe<bool> maybe = HasOwnElement(object, index);
4926 if (!maybe.has_value) return MaybeHandle<Object>();
4927 should_enqueue_change_record = maybe.value;
4928 if (should_enqueue_change_record) {
4929 if (!GetOwnElementAccessorPair(object, index).is_null()) {
4930 old_value = Handle<Object>::cast(factory->the_hole_value());
4932 old_value = Object::GetElement(
4933 isolate, object, index).ToHandleChecked();
4938 // Skip interceptor if forcing deletion.
4939 MaybeHandle<Object> maybe_result;
4940 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) {
4941 maybe_result = DeleteElementWithInterceptor(object, index);
4943 maybe_result = object->GetElementsAccessor()->Delete(object, index, mode);
4945 Handle<Object> result;
4946 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object);
4948 if (should_enqueue_change_record) {
4949 Maybe<bool> maybe = HasOwnElement(object, index);
4950 if (!maybe.has_value) return MaybeHandle<Object>();
4952 Handle<String> name = factory->Uint32ToString(index);
4953 EnqueueChangeRecord(object, "delete", name, old_value);
4961 MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object,
4963 DeleteMode delete_mode) {
4964 // ECMA-262, 3rd, 8.6.2.5
4965 DCHECK(name->IsName());
4968 if (name->AsArrayIndex(&index)) {
4969 return DeleteElement(object, index, delete_mode);
4972 // Skip interceptors on FORCE_DELETION.
4973 LookupIterator::Configuration config =
4974 delete_mode == FORCE_DELETION ? LookupIterator::HIDDEN_SKIP_INTERCEPTOR
4975 : LookupIterator::HIDDEN;
4977 LookupIterator it(object, name, config);
4979 bool is_observed = object->map()->is_observed() &&
4980 *name != it.isolate()->heap()->hidden_string();
4981 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
4983 for (; it.IsFound(); it.Next()) {
4984 switch (it.state()) {
4985 case LookupIterator::JSPROXY:
4986 case LookupIterator::NOT_FOUND:
4987 case LookupIterator::TRANSITION:
4989 case LookupIterator::ACCESS_CHECK:
4990 if (it.HasAccess(v8::ACCESS_DELETE)) break;
4991 it.isolate()->ReportFailedAccessCheck(it.GetHolder<JSObject>(),
4993 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it.isolate(), Object);
4994 return it.isolate()->factory()->false_value();
4995 case LookupIterator::INTERCEPTOR: {
4996 MaybeHandle<Object> maybe_result =
4997 JSObject::DeletePropertyWithInterceptor(it.GetHolder<JSObject>(),
4999 // Delete with interceptor succeeded. Return result.
5000 if (!maybe_result.is_null()) return maybe_result;
5001 // An exception was thrown in the interceptor. Propagate.
5002 if (it.isolate()->has_pending_exception()) return maybe_result;
5005 case LookupIterator::DATA:
5007 old_value = it.GetDataValue();
5010 case LookupIterator::ACCESSOR: {
5011 if (delete_mode != FORCE_DELETION && !it.IsConfigurable()) {
5012 // Fail if the property is not configurable.
5013 if (delete_mode == STRICT_DELETION) {
5014 Handle<Object> args[2] = {name, object};
5015 THROW_NEW_ERROR(it.isolate(),
5016 NewTypeError("strict_delete_property",
5017 HandleVector(args, arraysize(args))),
5020 return it.isolate()->factory()->false_value();
5023 PropertyNormalizationMode mode = object->map()->is_prototype_map()
5024 ? KEEP_INOBJECT_PROPERTIES
5025 : CLEAR_INOBJECT_PROPERTIES;
5026 Handle<JSObject> holder = it.GetHolder<JSObject>();
5027 // TODO(verwaest): Remove this temporary compatibility hack when blink
5028 // tests are updated.
5029 if (!holder.is_identical_to(object) &&
5030 !(object->IsJSGlobalProxy() && holder->IsJSGlobalObject())) {
5031 return it.isolate()->factory()->true_value();
5033 NormalizeProperties(holder, mode, 0);
5034 Handle<Object> result =
5035 DeleteNormalizedProperty(holder, name, delete_mode);
5036 ReoptimizeIfPrototype(holder);
5039 EnqueueChangeRecord(object, "delete", name, old_value);
5047 return it.isolate()->factory()->true_value();
5051 MaybeHandle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object,
5054 if (object->IsJSProxy()) {
5055 return JSProxy::DeleteElementWithHandler(
5056 Handle<JSProxy>::cast(object), index, mode);
5058 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode);
5062 MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object,
5065 if (object->IsJSProxy()) {
5066 return JSProxy::DeletePropertyWithHandler(
5067 Handle<JSProxy>::cast(object), name, mode);
5069 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode);
5073 bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
5076 DCHECK(IsFastObjectElementsKind(kind) ||
5077 kind == DICTIONARY_ELEMENTS);
5078 if (IsFastObjectElementsKind(kind)) {
5079 int length = IsJSArray()
5080 ? Smi::cast(JSArray::cast(this)->length())->value()
5081 : elements->length();
5082 for (int i = 0; i < length; ++i) {
5083 Object* element = elements->get(i);
5084 if (!element->IsTheHole() && element == object) return true;
5088 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object);
5089 if (!key->IsUndefined()) return true;
5095 // Check whether this object references another object.
5096 bool JSObject::ReferencesObject(Object* obj) {
5097 Map* map_of_this = map();
5098 Heap* heap = GetHeap();
5099 DisallowHeapAllocation no_allocation;
5101 // Is the object the constructor for this object?
5102 if (map_of_this->constructor() == obj) {
5106 // Is the object the prototype for this object?
5107 if (map_of_this->prototype() == obj) {
5111 // Check if the object is among the named properties.
5112 Object* key = SlowReverseLookup(obj);
5113 if (!key->IsUndefined()) {
5117 // Check if the object is among the indexed properties.
5118 ElementsKind kind = GetElementsKind();
5120 // Raw pixels and external arrays do not reference other
5122 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5123 case EXTERNAL_##TYPE##_ELEMENTS: \
5124 case TYPE##_ELEMENTS: \
5127 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5128 #undef TYPED_ARRAY_CASE
5130 case FAST_DOUBLE_ELEMENTS:
5131 case FAST_HOLEY_DOUBLE_ELEMENTS:
5133 case FAST_SMI_ELEMENTS:
5134 case FAST_HOLEY_SMI_ELEMENTS:
5137 case FAST_HOLEY_ELEMENTS:
5138 case DICTIONARY_ELEMENTS: {
5139 FixedArray* elements = FixedArray::cast(this->elements());
5140 if (ReferencesObjectFromElements(elements, kind, obj)) return true;
5143 case SLOPPY_ARGUMENTS_ELEMENTS: {
5144 FixedArray* parameter_map = FixedArray::cast(elements());
5145 // Check the mapped parameters.
5146 int length = parameter_map->length();
5147 for (int i = 2; i < length; ++i) {
5148 Object* value = parameter_map->get(i);
5149 if (!value->IsTheHole() && value == obj) return true;
5151 // Check the arguments.
5152 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5153 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
5154 FAST_HOLEY_ELEMENTS;
5155 if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
5160 // For functions check the context.
5161 if (IsJSFunction()) {
5162 // Get the constructor function for arguments array.
5163 Map* arguments_map =
5164 heap->isolate()->context()->native_context()->sloppy_arguments_map();
5165 JSFunction* arguments_function =
5166 JSFunction::cast(arguments_map->constructor());
5168 // Get the context and don't check if it is the native context.
5169 JSFunction* f = JSFunction::cast(this);
5170 Context* context = f->context();
5171 if (context->IsNativeContext()) {
5175 // Check the non-special context slots.
5176 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) {
5177 // Only check JS objects.
5178 if (context->get(i)->IsJSObject()) {
5179 JSObject* ctxobj = JSObject::cast(context->get(i));
5180 // If it is an arguments array check the content.
5181 if (ctxobj->map()->constructor() == arguments_function) {
5182 if (ctxobj->ReferencesObject(obj)) {
5185 } else if (ctxobj == obj) {
5191 // Check the context extension (if any) if it can have references.
5192 if (context->has_extension() && !context->IsCatchContext()) {
5193 // With harmony scoping, a JSFunction may have a global context.
5194 // TODO(mvstanton): walk into the ScopeInfo.
5195 if (FLAG_harmony_scoping && context->IsGlobalContext()) {
5199 return JSObject::cast(context->extension())->ReferencesObject(obj);
5203 // No references to object.
5208 MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
5209 Isolate* isolate = object->GetIsolate();
5211 if (!object->map()->is_extensible()) return object;
5213 if (object->IsAccessCheckNeeded() &&
5214 !isolate->MayNamedAccess(
5215 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5216 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5217 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5218 return isolate->factory()->false_value();
5221 if (object->IsJSGlobalProxy()) {
5222 PrototypeIterator iter(isolate, object);
5223 if (iter.IsAtEnd()) return object;
5224 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
5225 return PreventExtensions(
5226 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)));
5229 // It's not possible to seal objects with external array elements
5230 if (object->HasExternalArrayElements() ||
5231 object->HasFixedTypedArrayElements()) {
5232 THROW_NEW_ERROR(isolate,
5233 NewTypeError("cant_prevent_ext_external_array_elements",
5234 HandleVector(&object, 1)),
5238 // If there are fast elements we normalize.
5239 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
5240 DCHECK(object->HasDictionaryElements() ||
5241 object->HasDictionaryArgumentsElements());
5243 // Make sure that we never go back to fast case.
5244 dictionary->set_requires_slow_elements();
5246 // Do a map transition, other objects with this map may still
5248 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5249 Handle<Map> new_map = Map::Copy(handle(object->map()));
5251 new_map->set_is_extensible(false);
5252 JSObject::MigrateToMap(object, new_map);
5253 DCHECK(!object->map()->is_extensible());
5255 if (object->map()->is_observed()) {
5256 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(),
5257 isolate->factory()->the_hole_value());
5263 template<typename Dictionary>
5264 static void FreezeDictionary(Dictionary* dictionary) {
5265 int capacity = dictionary->Capacity();
5266 for (int i = 0; i < capacity; i++) {
5267 Object* k = dictionary->KeyAt(i);
5268 if (dictionary->IsKey(k) &&
5269 !(k->IsSymbol() && Symbol::cast(k)->is_private())) {
5270 PropertyDetails details = dictionary->DetailsAt(i);
5271 int attrs = DONT_DELETE;
5272 // READ_ONLY is an invalid attribute for JS setters/getters.
5273 if (details.type() == CALLBACKS) {
5274 Object* v = dictionary->ValueAt(i);
5275 if (v->IsPropertyCell()) v = PropertyCell::cast(v)->value();
5276 if (!v->IsAccessorPair()) attrs |= READ_ONLY;
5280 details = details.CopyAddAttributes(
5281 static_cast<PropertyAttributes>(attrs));
5282 dictionary->DetailsAtPut(i, details);
5288 MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) {
5289 // Freezing sloppy arguments should be handled elsewhere.
5290 DCHECK(!object->HasSloppyArgumentsElements());
5291 DCHECK(!object->map()->is_observed());
5293 if (object->map()->is_frozen()) return object;
5295 Isolate* isolate = object->GetIsolate();
5296 if (object->IsAccessCheckNeeded() &&
5297 !isolate->MayNamedAccess(
5298 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5299 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5300 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5301 return isolate->factory()->false_value();
5304 if (object->IsJSGlobalProxy()) {
5305 PrototypeIterator iter(isolate, object);
5306 if (iter.IsAtEnd()) return object;
5307 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
5308 return Freeze(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)));
5311 // It's not possible to freeze objects with external array elements
5312 if (object->HasExternalArrayElements() ||
5313 object->HasFixedTypedArrayElements()) {
5314 THROW_NEW_ERROR(isolate,
5315 NewTypeError("cant_prevent_ext_external_array_elements",
5316 HandleVector(&object, 1)),
5320 Handle<SeededNumberDictionary> new_element_dictionary;
5321 if (!object->elements()->IsDictionary()) {
5322 int length = object->IsJSArray()
5323 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
5324 : object->elements()->length();
5328 object->GetElementsCapacityAndUsage(&capacity, &used);
5329 new_element_dictionary = SeededNumberDictionary::New(isolate, used);
5331 // Move elements to a dictionary; avoid calling NormalizeElements to avoid
5332 // unnecessary transitions.
5333 new_element_dictionary = CopyFastElementsToDictionary(
5334 handle(object->elements()), length, new_element_dictionary);
5336 // No existing elements, use a pre-allocated empty backing store
5337 new_element_dictionary =
5338 isolate->factory()->empty_slow_element_dictionary();
5342 Handle<Map> old_map(object->map(), isolate);
5343 int transition_index = old_map->SearchTransition(
5344 isolate->heap()->frozen_symbol());
5345 if (transition_index != TransitionArray::kNotFound) {
5346 Handle<Map> transition_map(old_map->GetTransition(transition_index));
5347 DCHECK(transition_map->has_dictionary_elements());
5348 DCHECK(transition_map->is_frozen());
5349 DCHECK(!transition_map->is_extensible());
5350 JSObject::MigrateToMap(object, transition_map);
5351 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5352 // Create a new descriptor array with fully-frozen properties
5353 Handle<Map> new_map = Map::CopyForFreeze(old_map);
5354 JSObject::MigrateToMap(object, new_map);
5356 DCHECK(old_map->is_dictionary_map() || !old_map->is_prototype_map());
5357 // Slow path: need to normalize properties for safety
5358 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5360 // Create a new map, since other objects with this map may be extensible.
5361 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5362 Handle<Map> new_map = Map::Copy(handle(object->map()));
5364 new_map->set_is_extensible(false);
5365 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5366 JSObject::MigrateToMap(object, new_map);
5368 // Freeze dictionary-mode properties
5369 FreezeDictionary(object->property_dictionary());
5372 DCHECK(object->map()->has_dictionary_elements());
5373 if (!new_element_dictionary.is_null()) {
5374 object->set_elements(*new_element_dictionary);
5377 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) {
5378 SeededNumberDictionary* dictionary = object->element_dictionary();
5379 // Make sure we never go back to the fast case
5380 dictionary->set_requires_slow_elements();
5381 // Freeze all elements in the dictionary
5382 FreezeDictionary(dictionary);
5389 void JSObject::SetObserved(Handle<JSObject> object) {
5390 DCHECK(!object->IsJSGlobalProxy());
5391 DCHECK(!object->IsJSGlobalObject());
5392 Isolate* isolate = object->GetIsolate();
5393 Handle<Map> new_map;
5394 Handle<Map> old_map(object->map(), isolate);
5395 DCHECK(!old_map->is_observed());
5396 int transition_index = old_map->SearchTransition(
5397 isolate->heap()->observed_symbol());
5398 if (transition_index != TransitionArray::kNotFound) {
5399 new_map = handle(old_map->GetTransition(transition_index), isolate);
5400 DCHECK(new_map->is_observed());
5401 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5402 new_map = Map::CopyForObserved(old_map);
5404 new_map = Map::Copy(old_map);
5405 new_map->set_is_observed();
5407 JSObject::MigrateToMap(object, new_map);
5411 Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object,
5412 Representation representation,
5414 Isolate* isolate = object->GetIsolate();
5415 Handle<Object> raw_value(object->RawFastPropertyAt(index), isolate);
5416 return Object::WrapForRead(isolate, raw_value, representation);
5420 template<class ContextObject>
5421 class JSObjectWalkVisitor {
5423 JSObjectWalkVisitor(ContextObject* site_context, bool copying,
5424 JSObject::DeepCopyHints hints)
5425 : site_context_(site_context),
5429 MUST_USE_RESULT MaybeHandle<JSObject> StructureWalk(Handle<JSObject> object);
5432 MUST_USE_RESULT inline MaybeHandle<JSObject> VisitElementOrProperty(
5433 Handle<JSObject> object,
5434 Handle<JSObject> value) {
5435 Handle<AllocationSite> current_site = site_context()->EnterNewScope();
5436 MaybeHandle<JSObject> copy_of_value = StructureWalk(value);
5437 site_context()->ExitScope(current_site, value);
5438 return copy_of_value;
5441 inline ContextObject* site_context() { return site_context_; }
5442 inline Isolate* isolate() { return site_context()->isolate(); }
5444 inline bool copying() const { return copying_; }
5447 ContextObject* site_context_;
5448 const bool copying_;
5449 const JSObject::DeepCopyHints hints_;
5453 template <class ContextObject>
5454 MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk(
5455 Handle<JSObject> object) {
5456 Isolate* isolate = this->isolate();
5457 bool copying = this->copying();
5458 bool shallow = hints_ == JSObject::kObjectIsShallow;
5461 StackLimitCheck check(isolate);
5463 if (check.HasOverflowed()) {
5464 isolate->StackOverflow();
5465 return MaybeHandle<JSObject>();
5469 if (object->map()->is_deprecated()) {
5470 JSObject::MigrateInstance(object);
5473 Handle<JSObject> copy;
5475 Handle<AllocationSite> site_to_pass;
5476 if (site_context()->ShouldCreateMemento(object)) {
5477 site_to_pass = site_context()->current();
5479 copy = isolate->factory()->CopyJSObjectWithAllocationSite(
5480 object, site_to_pass);
5485 DCHECK(copying || copy.is_identical_to(object));
5487 ElementsKind kind = copy->GetElementsKind();
5488 if (copying && IsFastSmiOrObjectElementsKind(kind) &&
5489 FixedArray::cast(copy->elements())->map() ==
5490 isolate->heap()->fixed_cow_array_map()) {
5491 isolate->counters()->cow_arrays_created_runtime()->Increment();
5495 HandleScope scope(isolate);
5497 // Deep copy own properties.
5498 if (copy->HasFastProperties()) {
5499 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors());
5500 int limit = copy->map()->NumberOfOwnDescriptors();
5501 for (int i = 0; i < limit; i++) {
5502 PropertyDetails details = descriptors->GetDetails(i);
5503 if (details.type() != FIELD) continue;
5504 FieldIndex index = FieldIndex::ForDescriptor(copy->map(), i);
5505 Handle<Object> value(object->RawFastPropertyAt(index), isolate);
5506 if (value->IsJSObject()) {
5507 ASSIGN_RETURN_ON_EXCEPTION(
5509 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5512 Representation representation = details.representation();
5513 value = Object::NewStorageFor(isolate, value, representation);
5516 copy->FastPropertyAtPut(index, *value);
5520 Handle<FixedArray> names =
5521 isolate->factory()->NewFixedArray(copy->NumberOfOwnProperties());
5522 copy->GetOwnPropertyNames(*names, 0);
5523 for (int i = 0; i < names->length(); i++) {
5524 DCHECK(names->get(i)->IsString());
5525 Handle<String> key_string(String::cast(names->get(i)));
5526 Maybe<PropertyAttributes> maybe =
5527 JSReceiver::GetOwnPropertyAttributes(copy, key_string);
5528 DCHECK(maybe.has_value);
5529 PropertyAttributes attributes = maybe.value;
5530 // Only deep copy fields from the object literal expression.
5531 // In particular, don't try to copy the length attribute of
5533 if (attributes != NONE) continue;
5534 Handle<Object> value =
5535 Object::GetProperty(copy, key_string).ToHandleChecked();
5536 if (value->IsJSObject()) {
5537 Handle<JSObject> result;
5538 ASSIGN_RETURN_ON_EXCEPTION(
5540 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5543 // Creating object copy for literals. No strict mode needed.
5544 JSObject::SetProperty(copy, key_string, result, SLOPPY).Assert();
5550 // Deep copy own elements.
5551 // Pixel elements cannot be created using an object literal.
5552 DCHECK(!copy->HasExternalArrayElements());
5554 case FAST_SMI_ELEMENTS:
5556 case FAST_HOLEY_SMI_ELEMENTS:
5557 case FAST_HOLEY_ELEMENTS: {
5558 Handle<FixedArray> elements(FixedArray::cast(copy->elements()));
5559 if (elements->map() == isolate->heap()->fixed_cow_array_map()) {
5561 for (int i = 0; i < elements->length(); i++) {
5562 DCHECK(!elements->get(i)->IsJSObject());
5566 for (int i = 0; i < elements->length(); i++) {
5567 Handle<Object> value(elements->get(i), isolate);
5568 DCHECK(value->IsSmi() ||
5569 value->IsTheHole() ||
5570 (IsFastObjectElementsKind(copy->GetElementsKind())));
5571 if (value->IsJSObject()) {
5572 Handle<JSObject> result;
5573 ASSIGN_RETURN_ON_EXCEPTION(
5575 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5578 elements->set(i, *result);
5585 case DICTIONARY_ELEMENTS: {
5586 Handle<SeededNumberDictionary> element_dictionary(
5587 copy->element_dictionary());
5588 int capacity = element_dictionary->Capacity();
5589 for (int i = 0; i < capacity; i++) {
5590 Object* k = element_dictionary->KeyAt(i);
5591 if (element_dictionary->IsKey(k)) {
5592 Handle<Object> value(element_dictionary->ValueAt(i), isolate);
5593 if (value->IsJSObject()) {
5594 Handle<JSObject> result;
5595 ASSIGN_RETURN_ON_EXCEPTION(
5597 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5600 element_dictionary->ValueAtPut(i, *result);
5607 case SLOPPY_ARGUMENTS_ELEMENTS:
5612 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5613 case EXTERNAL_##TYPE##_ELEMENTS: \
5614 case TYPE##_ELEMENTS: \
5616 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5617 #undef TYPED_ARRAY_CASE
5619 case FAST_DOUBLE_ELEMENTS:
5620 case FAST_HOLEY_DOUBLE_ELEMENTS:
5621 // No contained objects, nothing to do.
5630 MaybeHandle<JSObject> JSObject::DeepWalk(
5631 Handle<JSObject> object,
5632 AllocationSiteCreationContext* site_context) {
5633 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false,
5635 MaybeHandle<JSObject> result = v.StructureWalk(object);
5636 Handle<JSObject> for_assert;
5637 DCHECK(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object));
5642 MaybeHandle<JSObject> JSObject::DeepCopy(
5643 Handle<JSObject> object,
5644 AllocationSiteUsageContext* site_context,
5645 DeepCopyHints hints) {
5646 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints);
5647 MaybeHandle<JSObject> copy = v.StructureWalk(object);
5648 Handle<JSObject> for_assert;
5649 DCHECK(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object));
5654 // Tests for the fast common case for property enumeration:
5655 // - This object and all prototypes has an enum cache (which means that
5656 // it is no proxy, has no interceptors and needs no access checks).
5657 // - This object has no elements.
5658 // - No prototype has enumerable properties/elements.
5659 bool JSReceiver::IsSimpleEnum() {
5660 for (PrototypeIterator iter(GetIsolate(), this,
5661 PrototypeIterator::START_AT_RECEIVER);
5662 !iter.IsAtEnd(); iter.Advance()) {
5663 if (!iter.GetCurrent()->IsJSObject()) return false;
5664 JSObject* curr = JSObject::cast(iter.GetCurrent());
5665 int enum_length = curr->map()->EnumLength();
5666 if (enum_length == kInvalidEnumCacheSentinel) return false;
5667 if (curr->IsAccessCheckNeeded()) return false;
5668 DCHECK(!curr->HasNamedInterceptor());
5669 DCHECK(!curr->HasIndexedInterceptor());
5670 if (curr->NumberOfEnumElements() > 0) return false;
5671 if (curr != this && enum_length != 0) return false;
5677 static bool FilterKey(Object* key, PropertyAttributes filter) {
5678 if ((filter & SYMBOLIC) && key->IsSymbol()) {
5682 if ((filter & PRIVATE_SYMBOL) &&
5683 key->IsSymbol() && Symbol::cast(key)->is_private()) {
5687 if ((filter & STRING) && !key->IsSymbol()) {
5695 int Map::NumberOfDescribedProperties(DescriptorFlag which,
5696 PropertyAttributes filter) {
5698 DescriptorArray* descs = instance_descriptors();
5699 int limit = which == ALL_DESCRIPTORS
5700 ? descs->number_of_descriptors()
5701 : NumberOfOwnDescriptors();
5702 for (int i = 0; i < limit; i++) {
5703 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
5704 !FilterKey(descs->GetKey(i), filter)) {
5712 int Map::NextFreePropertyIndex() {
5714 int number_of_own_descriptors = NumberOfOwnDescriptors();
5715 DescriptorArray* descs = instance_descriptors();
5716 for (int i = 0; i < number_of_own_descriptors; i++) {
5717 if (descs->GetType(i) == FIELD) {
5718 int current_index = descs->GetFieldIndex(i);
5719 if (current_index > max_index) max_index = current_index;
5722 return max_index + 1;
5726 static bool ContainsOnlyValidKeys(Handle<FixedArray> array) {
5727 int len = array->length();
5728 for (int i = 0; i < len; i++) {
5729 Object* e = array->get(i);
5730 if (!(e->IsString() || e->IsNumber())) return false;
5736 static Handle<FixedArray> ReduceFixedArrayTo(
5737 Handle<FixedArray> array, int length) {
5738 DCHECK(array->length() >= length);
5739 if (array->length() == length) return array;
5741 Handle<FixedArray> new_array =
5742 array->GetIsolate()->factory()->NewFixedArray(length);
5743 for (int i = 0; i < length; ++i) new_array->set(i, array->get(i));
5748 static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
5749 bool cache_result) {
5750 Isolate* isolate = object->GetIsolate();
5751 if (object->HasFastProperties()) {
5752 int own_property_count = object->map()->EnumLength();
5753 // If the enum length of the given map is set to kInvalidEnumCache, this
5754 // means that the map itself has never used the present enum cache. The
5755 // first step to using the cache is to set the enum length of the map by
5756 // counting the number of own descriptors that are not DONT_ENUM or
5758 if (own_property_count == kInvalidEnumCacheSentinel) {
5759 own_property_count = object->map()->NumberOfDescribedProperties(
5760 OWN_DESCRIPTORS, DONT_SHOW);
5762 DCHECK(own_property_count == object->map()->NumberOfDescribedProperties(
5763 OWN_DESCRIPTORS, DONT_SHOW));
5766 if (object->map()->instance_descriptors()->HasEnumCache()) {
5767 DescriptorArray* desc = object->map()->instance_descriptors();
5768 Handle<FixedArray> keys(desc->GetEnumCache(), isolate);
5770 // In case the number of properties required in the enum are actually
5771 // present, we can reuse the enum cache. Otherwise, this means that the
5772 // enum cache was generated for a previous (smaller) version of the
5773 // Descriptor Array. In that case we regenerate the enum cache.
5774 if (own_property_count <= keys->length()) {
5775 if (cache_result) object->map()->SetEnumLength(own_property_count);
5776 isolate->counters()->enum_cache_hits()->Increment();
5777 return ReduceFixedArrayTo(keys, own_property_count);
5781 Handle<Map> map(object->map());
5783 if (map->instance_descriptors()->IsEmpty()) {
5784 isolate->counters()->enum_cache_hits()->Increment();
5785 if (cache_result) map->SetEnumLength(0);
5786 return isolate->factory()->empty_fixed_array();
5789 isolate->counters()->enum_cache_misses()->Increment();
5791 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(
5792 own_property_count);
5793 Handle<FixedArray> indices = isolate->factory()->NewFixedArray(
5794 own_property_count);
5796 Handle<DescriptorArray> descs =
5797 Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
5799 int size = map->NumberOfOwnDescriptors();
5802 for (int i = 0; i < size; i++) {
5803 PropertyDetails details = descs->GetDetails(i);
5804 Object* key = descs->GetKey(i);
5805 if (!(details.IsDontEnum() || key->IsSymbol())) {
5806 storage->set(index, key);
5807 if (!indices.is_null()) {
5808 if (details.type() != FIELD) {
5809 indices = Handle<FixedArray>();
5811 FieldIndex field_index = FieldIndex::ForDescriptor(*map, i);
5812 int load_by_field_index = field_index.GetLoadByFieldIndex();
5813 indices->set(index, Smi::FromInt(load_by_field_index));
5819 DCHECK(index == storage->length());
5821 Handle<FixedArray> bridge_storage =
5822 isolate->factory()->NewFixedArray(
5823 DescriptorArray::kEnumCacheBridgeLength);
5824 DescriptorArray* desc = object->map()->instance_descriptors();
5825 desc->SetEnumCache(*bridge_storage,
5827 indices.is_null() ? Object::cast(Smi::FromInt(0))
5828 : Object::cast(*indices));
5830 object->map()->SetEnumLength(own_property_count);
5834 Handle<NameDictionary> dictionary(object->property_dictionary());
5835 int length = dictionary->NumberOfEnumElements();
5837 return Handle<FixedArray>(isolate->heap()->empty_fixed_array());
5839 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length);
5840 dictionary->CopyEnumKeysTo(*storage);
5846 MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object,
5847 KeyCollectionType type) {
5848 USE(ContainsOnlyValidKeys);
5849 Isolate* isolate = object->GetIsolate();
5850 Handle<FixedArray> content = isolate->factory()->empty_fixed_array();
5851 Handle<JSFunction> arguments_function(
5852 JSFunction::cast(isolate->sloppy_arguments_map()->constructor()));
5854 // Only collect keys if access is permitted.
5855 for (PrototypeIterator iter(isolate, object,
5856 PrototypeIterator::START_AT_RECEIVER);
5857 !iter.IsAtEnd(); iter.Advance()) {
5858 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
5859 Handle<JSProxy> proxy(JSProxy::cast(*PrototypeIterator::GetCurrent(iter)),
5861 Handle<Object> args[] = { proxy };
5862 Handle<Object> names;
5863 ASSIGN_RETURN_ON_EXCEPTION(
5865 Execution::Call(isolate,
5866 isolate->proxy_enumerate(),
5871 ASSIGN_RETURN_ON_EXCEPTION(
5873 FixedArray::AddKeysFromArrayLike(
5874 content, Handle<JSObject>::cast(names)),
5879 Handle<JSObject> current =
5880 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
5882 // Check access rights if required.
5883 if (current->IsAccessCheckNeeded() &&
5884 !isolate->MayNamedAccess(
5885 current, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5886 isolate->ReportFailedAccessCheck(current, v8::ACCESS_KEYS);
5887 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, FixedArray);
5891 // Compute the element keys.
5892 Handle<FixedArray> element_keys =
5893 isolate->factory()->NewFixedArray(current->NumberOfEnumElements());
5894 current->GetEnumElementKeys(*element_keys);
5895 ASSIGN_RETURN_ON_EXCEPTION(
5897 FixedArray::UnionOfKeys(content, element_keys),
5899 DCHECK(ContainsOnlyValidKeys(content));
5901 // Add the element keys from the interceptor.
5902 if (current->HasIndexedInterceptor()) {
5903 Handle<JSObject> result;
5904 if (JSObject::GetKeysForIndexedInterceptor(
5905 current, object).ToHandle(&result)) {
5906 ASSIGN_RETURN_ON_EXCEPTION(
5908 FixedArray::AddKeysFromArrayLike(content, result),
5911 DCHECK(ContainsOnlyValidKeys(content));
5914 // We can cache the computed property keys if access checks are
5915 // not needed and no interceptors are involved.
5917 // We do not use the cache if the object has elements and
5918 // therefore it does not make sense to cache the property names
5919 // for arguments objects. Arguments objects will always have
5921 // Wrapped strings have elements, but don't have an elements
5922 // array or dictionary. So the fast inline test for whether to
5923 // use the cache says yes, so we should not create a cache.
5924 bool cache_enum_keys =
5925 ((current->map()->constructor() != *arguments_function) &&
5926 !current->IsJSValue() &&
5927 !current->IsAccessCheckNeeded() &&
5928 !current->HasNamedInterceptor() &&
5929 !current->HasIndexedInterceptor());
5930 // Compute the property keys and cache them if possible.
5931 ASSIGN_RETURN_ON_EXCEPTION(
5933 FixedArray::UnionOfKeys(
5934 content, GetEnumPropertyKeys(current, cache_enum_keys)),
5936 DCHECK(ContainsOnlyValidKeys(content));
5938 // Add the property keys from the interceptor.
5939 if (current->HasNamedInterceptor()) {
5940 Handle<JSObject> result;
5941 if (JSObject::GetKeysForNamedInterceptor(
5942 current, object).ToHandle(&result)) {
5943 ASSIGN_RETURN_ON_EXCEPTION(
5945 FixedArray::AddKeysFromArrayLike(content, result),
5948 DCHECK(ContainsOnlyValidKeys(content));
5951 // If we only want own properties we bail out after the first
5953 if (type == OWN_ONLY) break;
5959 // Try to update an accessor in an elements dictionary. Return true if the
5960 // update succeeded, and false otherwise.
5961 static bool UpdateGetterSetterInDictionary(
5962 SeededNumberDictionary* dictionary,
5966 PropertyAttributes attributes) {
5967 int entry = dictionary->FindEntry(index);
5968 if (entry != SeededNumberDictionary::kNotFound) {
5969 Object* result = dictionary->ValueAt(entry);
5970 PropertyDetails details = dictionary->DetailsAt(entry);
5971 if (details.type() == CALLBACKS && result->IsAccessorPair()) {
5972 DCHECK(details.IsConfigurable());
5973 if (details.attributes() != attributes) {
5974 dictionary->DetailsAtPut(
5976 PropertyDetails(attributes, CALLBACKS, index));
5978 AccessorPair::cast(result)->SetComponents(getter, setter);
5986 void JSObject::DefineElementAccessor(Handle<JSObject> object,
5988 Handle<Object> getter,
5989 Handle<Object> setter,
5990 PropertyAttributes attributes) {
5991 switch (object->GetElementsKind()) {
5992 case FAST_SMI_ELEMENTS:
5994 case FAST_DOUBLE_ELEMENTS:
5995 case FAST_HOLEY_SMI_ELEMENTS:
5996 case FAST_HOLEY_ELEMENTS:
5997 case FAST_HOLEY_DOUBLE_ELEMENTS:
6000 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6001 case EXTERNAL_##TYPE##_ELEMENTS: \
6002 case TYPE##_ELEMENTS: \
6004 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6005 #undef TYPED_ARRAY_CASE
6006 // Ignore getters and setters on pixel and external array elements.
6009 case DICTIONARY_ELEMENTS:
6010 if (UpdateGetterSetterInDictionary(object->element_dictionary(),
6018 case SLOPPY_ARGUMENTS_ELEMENTS: {
6019 // Ascertain whether we have read-only properties or an existing
6020 // getter/setter pair in an arguments elements dictionary backing
6022 FixedArray* parameter_map = FixedArray::cast(object->elements());
6023 uint32_t length = parameter_map->length();
6025 index < (length - 2) ? parameter_map->get(index + 2) : NULL;
6026 if (probe == NULL || probe->IsTheHole()) {
6027 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
6028 if (arguments->IsDictionary()) {
6029 SeededNumberDictionary* dictionary =
6030 SeededNumberDictionary::cast(arguments);
6031 if (UpdateGetterSetterInDictionary(dictionary,
6044 Isolate* isolate = object->GetIsolate();
6045 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair();
6046 accessors->SetComponents(*getter, *setter);
6048 SetElementCallback(object, index, accessors, attributes);
6052 bool Map::DictionaryElementsInPrototypeChainOnly() {
6053 if (IsDictionaryElementsKind(elements_kind())) {
6057 for (PrototypeIterator iter(this); !iter.IsAtEnd(); iter.Advance()) {
6058 if (iter.GetCurrent()->IsJSProxy()) {
6059 // Be conservative, don't walk into proxies.
6063 if (IsDictionaryElementsKind(
6064 JSObject::cast(iter.GetCurrent())->map()->elements_kind())) {
6073 void JSObject::SetElementCallback(Handle<JSObject> object,
6075 Handle<Object> structure,
6076 PropertyAttributes attributes) {
6077 Heap* heap = object->GetHeap();
6078 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6080 // Normalize elements to make this operation simple.
6081 bool had_dictionary_elements = object->HasDictionaryElements();
6082 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
6083 DCHECK(object->HasDictionaryElements() ||
6084 object->HasDictionaryArgumentsElements());
6085 // Update the dictionary with the new CALLBACKS property.
6086 dictionary = SeededNumberDictionary::Set(dictionary, index, structure,
6088 dictionary->set_requires_slow_elements();
6090 // Update the dictionary backing store on the object.
6091 if (object->elements()->map() == heap->sloppy_arguments_elements_map()) {
6092 // Also delete any parameter alias.
6094 // TODO(kmillikin): when deleting the last parameter alias we could
6095 // switch to a direct backing store without the parameter map. This
6096 // would allow GC of the context.
6097 FixedArray* parameter_map = FixedArray::cast(object->elements());
6098 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
6099 parameter_map->set(index + 2, heap->the_hole_value());
6101 parameter_map->set(1, *dictionary);
6103 object->set_elements(*dictionary);
6105 if (!had_dictionary_elements) {
6106 // KeyedStoreICs (at least the non-generic ones) need a reset.
6107 heap->ClearAllICsByKind(Code::KEYED_STORE_IC);
6113 void JSObject::SetPropertyCallback(Handle<JSObject> object,
6115 Handle<Object> structure,
6116 PropertyAttributes attributes) {
6117 PropertyNormalizationMode mode = object->map()->is_prototype_map()
6118 ? KEEP_INOBJECT_PROPERTIES
6119 : CLEAR_INOBJECT_PROPERTIES;
6120 // Normalize object to make this operation simple.
6121 NormalizeProperties(object, mode, 0);
6123 // For the global object allocate a new map to invalidate the global inline
6124 // caches which have a global property cell reference directly in the code.
6125 if (object->IsGlobalObject()) {
6126 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
6127 DCHECK(new_map->is_dictionary_map());
6128 JSObject::MigrateToMap(object, new_map);
6130 // When running crankshaft, changing the map is not enough. We
6131 // need to deoptimize all functions that rely on this global
6133 Deoptimizer::DeoptimizeGlobalObject(*object);
6136 // Update the dictionary with the new CALLBACKS property.
6137 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6138 SetNormalizedProperty(object, name, structure, details);
6140 ReoptimizeIfPrototype(object);
6144 MaybeHandle<Object> JSObject::DefineAccessor(Handle<JSObject> object,
6146 Handle<Object> getter,
6147 Handle<Object> setter,
6148 PropertyAttributes attributes) {
6149 Isolate* isolate = object->GetIsolate();
6150 // Check access rights if needed.
6151 if (object->IsAccessCheckNeeded() &&
6152 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6153 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6154 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6155 return isolate->factory()->undefined_value();
6158 if (object->IsJSGlobalProxy()) {
6159 PrototypeIterator iter(isolate, object);
6160 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
6161 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
6162 DefineAccessor(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)),
6163 name, getter, setter, attributes);
6164 return isolate->factory()->undefined_value();
6167 // Make sure that the top context does not change when doing callbacks or
6168 // interceptor calls.
6169 AssertNoContextChange ncc(isolate);
6171 // Try to flatten before operating on the string.
6172 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
6175 bool is_element = name->AsArrayIndex(&index);
6177 Handle<Object> old_value = isolate->factory()->the_hole_value();
6178 bool is_observed = object->map()->is_observed() &&
6179 *name != isolate->heap()->hidden_string();
6180 bool preexists = false;
6183 Maybe<bool> maybe = HasOwnElement(object, index);
6184 // Workaround for a GCC 4.4.3 bug which leads to "‘preexists’ may be used
6185 // uninitialized in this function".
6186 if (!maybe.has_value) {
6188 return isolate->factory()->undefined_value();
6190 preexists = maybe.value;
6191 if (preexists && GetOwnElementAccessorPair(object, index).is_null()) {
6193 Object::GetElement(isolate, object, index).ToHandleChecked();
6196 LookupIterator it(object, name, LookupIterator::HIDDEN_SKIP_INTERCEPTOR);
6197 CHECK(GetPropertyAttributes(&it).has_value);
6198 preexists = it.IsFound();
6199 if (preexists && (it.state() == LookupIterator::DATA ||
6200 it.GetAccessors()->IsAccessorInfo())) {
6201 old_value = GetProperty(&it).ToHandleChecked();
6207 DefineElementAccessor(object, index, getter, setter, attributes);
6209 DCHECK(getter->IsSpecFunction() || getter->IsUndefined() ||
6211 DCHECK(setter->IsSpecFunction() || setter->IsUndefined() ||
6213 // At least one of the accessors needs to be a new value.
6214 DCHECK(!getter->IsNull() || !setter->IsNull());
6215 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
6216 if (it.state() == LookupIterator::ACCESS_CHECK) {
6217 // We already did an access check before. We do have access.
6220 if (!getter->IsNull()) {
6221 it.TransitionToAccessorProperty(ACCESSOR_GETTER, getter, attributes);
6223 if (!setter->IsNull()) {
6224 it.TransitionToAccessorProperty(ACCESSOR_SETTER, setter, attributes);
6229 const char* type = preexists ? "reconfigure" : "add";
6230 EnqueueChangeRecord(object, type, name, old_value);
6233 return isolate->factory()->undefined_value();
6237 MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object,
6238 Handle<AccessorInfo> info) {
6239 Isolate* isolate = object->GetIsolate();
6240 Factory* factory = isolate->factory();
6241 Handle<Name> name(Name::cast(info->name()));
6243 // Check access rights if needed.
6244 if (object->IsAccessCheckNeeded() &&
6245 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6246 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6247 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6248 return factory->undefined_value();
6251 if (object->IsJSGlobalProxy()) {
6252 PrototypeIterator iter(isolate, object);
6253 if (iter.IsAtEnd()) return object;
6254 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
6256 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), info);
6259 // Make sure that the top context does not change when doing callbacks or
6260 // interceptor calls.
6261 AssertNoContextChange ncc(isolate);
6263 // Try to flatten before operating on the string.
6264 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
6267 bool is_element = name->AsArrayIndex(&index);
6270 if (object->IsJSArray()) return factory->undefined_value();
6272 // Accessors overwrite previous callbacks (cf. with getters/setters).
6273 switch (object->GetElementsKind()) {
6274 case FAST_SMI_ELEMENTS:
6276 case FAST_DOUBLE_ELEMENTS:
6277 case FAST_HOLEY_SMI_ELEMENTS:
6278 case FAST_HOLEY_ELEMENTS:
6279 case FAST_HOLEY_DOUBLE_ELEMENTS:
6282 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6283 case EXTERNAL_##TYPE##_ELEMENTS: \
6284 case TYPE##_ELEMENTS: \
6286 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6287 #undef TYPED_ARRAY_CASE
6288 // Ignore getters and setters on pixel and external array
6290 return factory->undefined_value();
6292 case DICTIONARY_ELEMENTS:
6294 case SLOPPY_ARGUMENTS_ELEMENTS:
6299 SetElementCallback(object, index, info, info->property_attributes());
6302 LookupIterator it(object, name, LookupIterator::HIDDEN_SKIP_INTERCEPTOR);
6303 CHECK(GetPropertyAttributes(&it).has_value);
6304 // ES5 forbids turning a property into an accessor if it's not
6305 // configurable. See 8.6.1 (Table 5).
6306 if (it.IsFound() && (it.IsReadOnly() || !it.IsConfigurable())) {
6307 return factory->undefined_value();
6310 SetPropertyCallback(object, name, info, info->property_attributes());
6317 MaybeHandle<Object> JSObject::GetAccessor(Handle<JSObject> object,
6319 AccessorComponent component) {
6320 Isolate* isolate = object->GetIsolate();
6322 // Make sure that the top context does not change when doing callbacks or
6323 // interceptor calls.
6324 AssertNoContextChange ncc(isolate);
6326 // Make the lookup and include prototypes.
6328 if (name->AsArrayIndex(&index)) {
6329 for (PrototypeIterator iter(isolate, object,
6330 PrototypeIterator::START_AT_RECEIVER);
6331 !iter.IsAtEnd(); iter.Advance()) {
6332 Handle<Object> current = PrototypeIterator::GetCurrent(iter);
6333 // Check access rights if needed.
6334 if (current->IsAccessCheckNeeded() &&
6335 !isolate->MayNamedAccess(Handle<JSObject>::cast(current), name,
6337 isolate->ReportFailedAccessCheck(Handle<JSObject>::cast(current),
6339 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6340 return isolate->factory()->undefined_value();
6343 if (current->IsJSObject() &&
6344 Handle<JSObject>::cast(current)->HasDictionaryElements()) {
6345 JSObject* js_object = JSObject::cast(*current);
6346 SeededNumberDictionary* dictionary = js_object->element_dictionary();
6347 int entry = dictionary->FindEntry(index);
6348 if (entry != SeededNumberDictionary::kNotFound) {
6349 Object* element = dictionary->ValueAt(entry);
6350 if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
6351 element->IsAccessorPair()) {
6352 return handle(AccessorPair::cast(element)->GetComponent(component),
6359 LookupIterator it(object, name,
6360 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
6361 for (; it.IsFound(); it.Next()) {
6362 switch (it.state()) {
6363 case LookupIterator::INTERCEPTOR:
6364 case LookupIterator::NOT_FOUND:
6365 case LookupIterator::TRANSITION:
6368 case LookupIterator::ACCESS_CHECK:
6369 if (it.HasAccess(v8::ACCESS_HAS)) continue;
6370 isolate->ReportFailedAccessCheck(it.GetHolder<JSObject>(),
6372 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6373 return isolate->factory()->undefined_value();
6375 case LookupIterator::JSPROXY:
6376 return isolate->factory()->undefined_value();
6378 case LookupIterator::DATA:
6380 case LookupIterator::ACCESSOR: {
6381 Handle<Object> maybe_pair = it.GetAccessors();
6382 if (maybe_pair->IsAccessorPair()) {
6384 AccessorPair::cast(*maybe_pair)->GetComponent(component),
6391 return isolate->factory()->undefined_value();
6395 Object* JSObject::SlowReverseLookup(Object* value) {
6396 if (HasFastProperties()) {
6397 int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
6398 DescriptorArray* descs = map()->instance_descriptors();
6399 for (int i = 0; i < number_of_own_descriptors; i++) {
6400 if (descs->GetType(i) == FIELD) {
6402 RawFastPropertyAt(FieldIndex::ForDescriptor(map(), i));
6403 if (descs->GetDetails(i).representation().IsDouble()) {
6404 DCHECK(property->IsMutableHeapNumber());
6405 if (value->IsNumber() && property->Number() == value->Number()) {
6406 return descs->GetKey(i);
6408 } else if (property == value) {
6409 return descs->GetKey(i);
6411 } else if (descs->GetType(i) == CONSTANT) {
6412 if (descs->GetConstant(i) == value) {
6413 return descs->GetKey(i);
6417 return GetHeap()->undefined_value();
6419 return property_dictionary()->SlowReverseLookup(value);
6424 Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) {
6425 Handle<Map> result = map->GetIsolate()->factory()->NewMap(
6426 map->instance_type(), instance_size);
6427 result->set_prototype(map->prototype());
6428 result->set_constructor(map->constructor());
6429 result->set_bit_field(map->bit_field());
6430 result->set_bit_field2(map->bit_field2());
6431 int new_bit_field3 = map->bit_field3();
6432 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
6433 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
6434 new_bit_field3 = EnumLengthBits::update(new_bit_field3,
6435 kInvalidEnumCacheSentinel);
6436 new_bit_field3 = Deprecated::update(new_bit_field3, false);
6437 if (!map->is_dictionary_map()) {
6438 new_bit_field3 = IsUnstable::update(new_bit_field3, false);
6440 new_bit_field3 = ConstructionCount::update(new_bit_field3,
6441 JSFunction::kNoSlackTracking);
6442 result->set_bit_field3(new_bit_field3);
6447 Handle<Map> Map::Normalize(Handle<Map> fast_map,
6448 PropertyNormalizationMode mode) {
6449 DCHECK(!fast_map->is_dictionary_map());
6451 Isolate* isolate = fast_map->GetIsolate();
6452 Handle<Object> maybe_cache(isolate->native_context()->normalized_map_cache(),
6454 bool use_cache = !maybe_cache->IsUndefined();
6455 Handle<NormalizedMapCache> cache;
6456 if (use_cache) cache = Handle<NormalizedMapCache>::cast(maybe_cache);
6458 Handle<Map> new_map;
6459 if (use_cache && cache->Get(fast_map, mode).ToHandle(&new_map)) {
6461 if (FLAG_verify_heap) new_map->DictionaryMapVerify();
6463 #ifdef ENABLE_SLOW_DCHECKS
6464 if (FLAG_enable_slow_asserts) {
6465 // The cached map should match newly created normalized map bit-by-bit,
6466 // except for the code cache, which can contain some ics which can be
6467 // applied to the shared map.
6468 Handle<Map> fresh = Map::CopyNormalized(fast_map, mode);
6470 DCHECK(memcmp(fresh->address(),
6472 Map::kCodeCacheOffset) == 0);
6473 STATIC_ASSERT(Map::kDependentCodeOffset ==
6474 Map::kCodeCacheOffset + kPointerSize);
6475 int offset = Map::kDependentCodeOffset + kPointerSize;
6476 DCHECK(memcmp(fresh->address() + offset,
6477 new_map->address() + offset,
6478 Map::kSize - offset) == 0);
6482 new_map = Map::CopyNormalized(fast_map, mode);
6484 cache->Set(fast_map, new_map);
6485 isolate->counters()->normalized_maps()->Increment();
6488 fast_map->NotifyLeafMapLayoutChange();
6493 Handle<Map> Map::CopyNormalized(Handle<Map> map,
6494 PropertyNormalizationMode mode) {
6495 int new_instance_size = map->instance_size();
6496 if (mode == CLEAR_INOBJECT_PROPERTIES) {
6497 new_instance_size -= map->inobject_properties() * kPointerSize;
6500 Handle<Map> result = RawCopy(map, new_instance_size);
6502 if (mode != CLEAR_INOBJECT_PROPERTIES) {
6503 result->set_inobject_properties(map->inobject_properties());
6506 result->set_dictionary_map(true);
6507 result->set_migration_target(false);
6510 if (FLAG_verify_heap) result->DictionaryMapVerify();
6517 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) {
6518 Handle<Map> result = RawCopy(map, map->instance_size());
6520 // Please note instance_type and instance_size are set when allocated.
6521 result->set_inobject_properties(map->inobject_properties());
6522 result->set_unused_property_fields(map->unused_property_fields());
6524 result->set_pre_allocated_property_fields(
6525 map->pre_allocated_property_fields());
6526 result->ClearCodeCache(map->GetHeap());
6527 map->NotifyLeafMapLayoutChange();
6532 Handle<Map> Map::ShareDescriptor(Handle<Map> map,
6533 Handle<DescriptorArray> descriptors,
6534 Descriptor* descriptor) {
6535 // Sanity check. This path is only to be taken if the map owns its descriptor
6536 // array, implying that its NumberOfOwnDescriptors equals the number of
6537 // descriptors in the descriptor array.
6538 DCHECK(map->NumberOfOwnDescriptors() ==
6539 map->instance_descriptors()->number_of_descriptors());
6541 Handle<Map> result = CopyDropDescriptors(map);
6542 Handle<Name> name = descriptor->GetKey();
6544 // Ensure there's space for the new descriptor in the shared descriptor array.
6545 if (descriptors->NumberOfSlackDescriptors() == 0) {
6546 int old_size = descriptors->number_of_descriptors();
6547 if (old_size == 0) {
6548 descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1);
6550 EnsureDescriptorSlack(map, old_size < 4 ? 1 : old_size / 2);
6551 descriptors = handle(map->instance_descriptors());
6556 DisallowHeapAllocation no_gc;
6557 descriptors->Append(descriptor);
6558 result->InitializeDescriptors(*descriptors);
6561 DCHECK(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1);
6562 ConnectTransition(map, result, name, SIMPLE_TRANSITION);
6568 void Map::ConnectTransition(Handle<Map> parent, Handle<Map> child,
6569 Handle<Name> name, SimpleTransitionFlag flag) {
6570 parent->set_owns_descriptors(false);
6571 if (parent->is_prototype_map()) {
6572 DCHECK(child->is_prototype_map());
6574 Handle<TransitionArray> transitions =
6575 TransitionArray::CopyInsert(parent, name, child, flag);
6576 parent->set_transitions(*transitions);
6577 child->SetBackPointer(*parent);
6582 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map,
6583 Handle<DescriptorArray> descriptors,
6584 TransitionFlag flag,
6585 MaybeHandle<Name> maybe_name,
6586 SimpleTransitionFlag simple_flag) {
6587 DCHECK(descriptors->IsSortedNoDuplicates());
6589 Handle<Map> result = CopyDropDescriptors(map);
6590 result->InitializeDescriptors(*descriptors);
6592 if (!map->is_prototype_map()) {
6593 if (flag == INSERT_TRANSITION && map->CanHaveMoreTransitions()) {
6595 CHECK(maybe_name.ToHandle(&name));
6596 ConnectTransition(map, result, name, simple_flag);
6598 int length = descriptors->number_of_descriptors();
6599 for (int i = 0; i < length; i++) {
6600 descriptors->SetRepresentation(i, Representation::Tagged());
6601 if (descriptors->GetDetails(i).type() == FIELD) {
6602 descriptors->SetValue(i, HeapType::Any());
6612 // Since this method is used to rewrite an existing transition tree, it can
6613 // always insert transitions without checking.
6614 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map,
6616 Handle<DescriptorArray> descriptors) {
6617 DCHECK(descriptors->IsSortedNoDuplicates());
6619 Handle<Map> result = CopyDropDescriptors(map);
6621 result->InitializeDescriptors(*descriptors);
6622 result->SetNumberOfOwnDescriptors(new_descriptor + 1);
6624 int unused_property_fields = map->unused_property_fields();
6625 if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
6626 unused_property_fields = map->unused_property_fields() - 1;
6627 if (unused_property_fields < 0) {
6628 unused_property_fields += JSObject::kFieldsAdded;
6632 result->set_unused_property_fields(unused_property_fields);
6634 Handle<Name> name = handle(descriptors->GetKey(new_descriptor));
6635 ConnectTransition(map, result, name, SIMPLE_TRANSITION);
6641 Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind,
6642 TransitionFlag flag) {
6643 if (flag == INSERT_TRANSITION) {
6644 DCHECK(!map->HasElementsTransition() ||
6645 ((map->elements_transition_map()->elements_kind() ==
6646 DICTIONARY_ELEMENTS ||
6647 IsExternalArrayElementsKind(
6648 map->elements_transition_map()->elements_kind())) &&
6649 (kind == DICTIONARY_ELEMENTS ||
6650 IsExternalArrayElementsKind(kind))));
6651 DCHECK(!IsFastElementsKind(kind) ||
6652 IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
6653 DCHECK(kind != map->elements_kind());
6656 bool insert_transition =
6657 flag == INSERT_TRANSITION && !map->HasElementsTransition();
6659 if (insert_transition && map->owns_descriptors()) {
6660 // In case the map owned its own descriptors, share the descriptors and
6661 // transfer ownership to the new map.
6662 Handle<Map> new_map = CopyDropDescriptors(map);
6664 ConnectElementsTransition(map, new_map);
6666 new_map->set_elements_kind(kind);
6667 new_map->InitializeDescriptors(map->instance_descriptors());
6671 // In case the map did not own its own descriptors, a split is forced by
6672 // copying the map; creating a new descriptor array cell.
6673 // Create a new free-floating map only if we are not allowed to store it.
6674 Handle<Map> new_map = Copy(map);
6676 new_map->set_elements_kind(kind);
6678 if (insert_transition) {
6679 ConnectElementsTransition(map, new_map);
6686 Handle<Map> Map::CopyForObserved(Handle<Map> map) {
6687 DCHECK(!map->is_observed());
6689 Isolate* isolate = map->GetIsolate();
6691 // In case the map owned its own descriptors, share the descriptors and
6692 // transfer ownership to the new map.
6693 Handle<Map> new_map;
6694 if (map->owns_descriptors()) {
6695 new_map = CopyDropDescriptors(map);
6697 DCHECK(!map->is_prototype_map());
6698 new_map = Copy(map);
6701 new_map->set_is_observed();
6702 if (map->owns_descriptors()) {
6703 new_map->InitializeDescriptors(map->instance_descriptors());
6706 Handle<Name> name = isolate->factory()->observed_symbol();
6707 ConnectTransition(map, new_map, name, FULL_TRANSITION);
6713 Handle<Map> Map::Copy(Handle<Map> map) {
6714 Handle<DescriptorArray> descriptors(map->instance_descriptors());
6715 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
6716 Handle<DescriptorArray> new_descriptors =
6717 DescriptorArray::CopyUpTo(descriptors, number_of_own_descriptors);
6718 return CopyReplaceDescriptors(
6719 map, new_descriptors, OMIT_TRANSITION, MaybeHandle<Name>());
6723 Handle<Map> Map::Create(Isolate* isolate, int inobject_properties) {
6724 Handle<Map> copy = Copy(handle(isolate->object_function()->initial_map()));
6726 // Check that we do not overflow the instance size when adding the extra
6727 // inobject properties. If the instance size overflows, we allocate as many
6728 // properties as we can as inobject properties.
6729 int max_extra_properties =
6730 (JSObject::kMaxInstanceSize - JSObject::kHeaderSize) >> kPointerSizeLog2;
6732 if (inobject_properties > max_extra_properties) {
6733 inobject_properties = max_extra_properties;
6736 int new_instance_size =
6737 JSObject::kHeaderSize + kPointerSize * inobject_properties;
6739 // Adjust the map with the extra inobject properties.
6740 copy->set_inobject_properties(inobject_properties);
6741 copy->set_unused_property_fields(inobject_properties);
6742 copy->set_instance_size(new_instance_size);
6743 copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy));
6748 Handle<Map> Map::CopyForFreeze(Handle<Map> map) {
6749 int num_descriptors = map->NumberOfOwnDescriptors();
6750 Isolate* isolate = map->GetIsolate();
6751 Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
6752 handle(map->instance_descriptors(), isolate), num_descriptors, FROZEN);
6753 Handle<Map> new_map = CopyReplaceDescriptors(
6754 map, new_desc, INSERT_TRANSITION, isolate->factory()->frozen_symbol());
6756 new_map->set_is_extensible(false);
6757 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
6762 bool DescriptorArray::CanHoldValue(int descriptor, Object* value) {
6763 PropertyDetails details = GetDetails(descriptor);
6764 switch (details.type()) {
6766 return value->FitsRepresentation(details.representation()) &&
6767 GetFieldType(descriptor)->NowContains(value);
6770 DCHECK(GetConstant(descriptor) != value ||
6771 value->FitsRepresentation(details.representation()));
6772 return GetConstant(descriptor) == value;
6787 Handle<Map> Map::PrepareForDataProperty(Handle<Map> map, int descriptor,
6788 Handle<Object> value) {
6789 // Dictionaries can store any property value.
6790 if (map->is_dictionary_map()) return map;
6792 // Migrate to the newest map before storing the property.
6795 Handle<DescriptorArray> descriptors(map->instance_descriptors());
6797 if (descriptors->CanHoldValue(descriptor, *value)) return map;
6799 Isolate* isolate = map->GetIsolate();
6800 Representation representation = value->OptimalRepresentation();
6801 Handle<HeapType> type = value->OptimalType(isolate, representation);
6803 return GeneralizeRepresentation(map, descriptor, representation, type,
6808 Handle<Map> Map::TransitionToDataProperty(Handle<Map> map, Handle<Name> name,
6809 Handle<Object> value,
6810 PropertyAttributes attributes,
6811 StoreFromKeyed store_mode) {
6812 // Dictionary maps can always have additional data properties.
6813 if (map->is_dictionary_map()) return map;
6815 // Migrate to the newest map before storing the property.
6818 int index = map->SearchTransition(*name);
6819 if (index != TransitionArray::kNotFound) {
6820 Handle<Map> transition(map->GetTransition(index));
6821 int descriptor = transition->LastAdded();
6823 // TODO(verwaest): Handle attributes better.
6824 DescriptorArray* descriptors = transition->instance_descriptors();
6825 if (descriptors->GetDetails(descriptor).attributes() != attributes) {
6826 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
6829 return Map::PrepareForDataProperty(transition, descriptor, value);
6832 TransitionFlag flag = INSERT_TRANSITION;
6833 MaybeHandle<Map> maybe_map;
6834 if (value->IsJSFunction()) {
6835 maybe_map = Map::CopyWithConstant(map, name, value, attributes, flag);
6836 } else if (!map->TooManyFastProperties(store_mode)) {
6837 Isolate* isolate = name->GetIsolate();
6838 Representation representation = value->OptimalRepresentation();
6839 Handle<HeapType> type = value->OptimalType(isolate, representation);
6841 Map::CopyWithField(map, name, type, attributes, representation, flag);
6845 if (!maybe_map.ToHandle(&result)) {
6846 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
6853 Handle<Map> Map::ReconfigureDataProperty(Handle<Map> map, int descriptor,
6854 PropertyAttributes attributes) {
6855 // Dictionaries have to be reconfigured in-place.
6856 DCHECK(!map->is_dictionary_map());
6858 // For now, give up on transitioning and just create a unique map.
6859 // TODO(verwaest/ishell): Cache transitions with different attributes.
6860 return CopyGeneralizeAllRepresentations(map, descriptor, FORCE_FIELD,
6861 attributes, "attributes mismatch");
6865 Handle<Map> Map::TransitionToAccessorProperty(Handle<Map> map,
6867 AccessorComponent component,
6868 Handle<Object> accessor,
6869 PropertyAttributes attributes) {
6870 Isolate* isolate = name->GetIsolate();
6872 // Dictionary maps can always have additional data properties.
6873 if (map->is_dictionary_map()) {
6874 // For global objects, property cells are inlined. We need to change the
6876 if (map->IsGlobalObjectMap()) return Copy(map);
6880 // Migrate to the newest map before transitioning to the new property.
6883 PropertyNormalizationMode mode = map->is_prototype_map()
6884 ? KEEP_INOBJECT_PROPERTIES
6885 : CLEAR_INOBJECT_PROPERTIES;
6887 int index = map->SearchTransition(*name);
6888 if (index != TransitionArray::kNotFound) {
6889 Handle<Map> transition(map->GetTransition(index));
6890 DescriptorArray* descriptors = transition->instance_descriptors();
6891 // Fast path, assume that we're modifying the last added descriptor.
6892 int descriptor = transition->LastAdded();
6893 if (descriptors->GetKey(descriptor) != *name) {
6894 // If not, search for the descriptor.
6895 descriptor = descriptors->SearchWithCache(*name, *transition);
6898 if (descriptors->GetDetails(descriptor).type() != CALLBACKS) {
6899 return Map::Normalize(map, mode);
6902 // TODO(verwaest): Handle attributes better.
6903 if (descriptors->GetDetails(descriptor).attributes() != attributes) {
6904 return Map::Normalize(map, mode);
6907 Handle<Object> maybe_pair(descriptors->GetValue(descriptor), isolate);
6908 if (!maybe_pair->IsAccessorPair()) {
6909 return Map::Normalize(map, mode);
6912 Handle<AccessorPair> pair = Handle<AccessorPair>::cast(maybe_pair);
6913 if (pair->get(component) != *accessor) {
6914 return Map::Normalize(map, mode);
6920 Handle<AccessorPair> pair;
6921 DescriptorArray* old_descriptors = map->instance_descriptors();
6922 int descriptor = old_descriptors->SearchWithCache(*name, *map);
6923 if (descriptor != DescriptorArray::kNotFound) {
6924 PropertyDetails old_details = old_descriptors->GetDetails(descriptor);
6925 if (old_details.type() != CALLBACKS) {
6926 return Map::Normalize(map, mode);
6929 if (old_details.attributes() != attributes) {
6930 return Map::Normalize(map, mode);
6933 Handle<Object> maybe_pair(old_descriptors->GetValue(descriptor), isolate);
6934 if (!maybe_pair->IsAccessorPair()) {
6935 return Map::Normalize(map, mode);
6938 Object* current = Handle<AccessorPair>::cast(maybe_pair)->get(component);
6939 if (current == *accessor) return map;
6941 if (!current->IsTheHole()) {
6942 return Map::Normalize(map, mode);
6945 pair = AccessorPair::Copy(Handle<AccessorPair>::cast(maybe_pair));
6946 } else if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors ||
6947 map->TooManyFastProperties(CERTAINLY_NOT_STORE_FROM_KEYED)) {
6948 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
6950 pair = isolate->factory()->NewAccessorPair();
6953 pair->set(component, *accessor);
6954 TransitionFlag flag = INSERT_TRANSITION;
6955 CallbacksDescriptor new_desc(name, pair, attributes);
6956 return Map::CopyInsertDescriptor(map, &new_desc, flag);
6960 Handle<Map> Map::CopyAddDescriptor(Handle<Map> map,
6961 Descriptor* descriptor,
6962 TransitionFlag flag) {
6963 Handle<DescriptorArray> descriptors(map->instance_descriptors());
6965 // Ensure the key is unique.
6966 descriptor->KeyToUniqueName();
6968 if (flag == INSERT_TRANSITION &&
6969 map->owns_descriptors() &&
6970 map->CanHaveMoreTransitions()) {
6971 return ShareDescriptor(map, descriptors, descriptor);
6974 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
6975 descriptors, map->NumberOfOwnDescriptors(), 1);
6976 new_descriptors->Append(descriptor);
6978 return CopyReplaceDescriptors(
6979 map, new_descriptors, flag, descriptor->GetKey(), SIMPLE_TRANSITION);
6983 Handle<Map> Map::CopyInsertDescriptor(Handle<Map> map,
6984 Descriptor* descriptor,
6985 TransitionFlag flag) {
6986 Handle<DescriptorArray> old_descriptors(map->instance_descriptors());
6988 // Ensure the key is unique.
6989 descriptor->KeyToUniqueName();
6991 // We replace the key if it is already present.
6992 int index = old_descriptors->SearchWithCache(*descriptor->GetKey(), *map);
6993 if (index != DescriptorArray::kNotFound) {
6994 return CopyReplaceDescriptor(map, old_descriptors, descriptor, index, flag);
6996 return CopyAddDescriptor(map, descriptor, flag);
7000 Handle<DescriptorArray> DescriptorArray::CopyUpTo(
7001 Handle<DescriptorArray> desc,
7002 int enumeration_index,
7004 return DescriptorArray::CopyUpToAddAttributes(
7005 desc, enumeration_index, NONE, slack);
7009 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
7010 Handle<DescriptorArray> desc,
7011 int enumeration_index,
7012 PropertyAttributes attributes,
7014 if (enumeration_index + slack == 0) {
7015 return desc->GetIsolate()->factory()->empty_descriptor_array();
7018 int size = enumeration_index;
7020 Handle<DescriptorArray> descriptors =
7021 DescriptorArray::Allocate(desc->GetIsolate(), size, slack);
7022 DescriptorArray::WhitenessWitness witness(*descriptors);
7024 if (attributes != NONE) {
7025 for (int i = 0; i < size; ++i) {
7026 Object* value = desc->GetValue(i);
7027 Name* key = desc->GetKey(i);
7028 PropertyDetails details = desc->GetDetails(i);
7029 // Bulk attribute changes never affect private properties.
7030 if (!key->IsSymbol() || !Symbol::cast(key)->is_private()) {
7031 int mask = DONT_DELETE | DONT_ENUM;
7032 // READ_ONLY is an invalid attribute for JS setters/getters.
7033 if (details.type() != CALLBACKS || !value->IsAccessorPair()) {
7036 details = details.CopyAddAttributes(
7037 static_cast<PropertyAttributes>(attributes & mask));
7039 Descriptor inner_desc(
7040 handle(key), handle(value, desc->GetIsolate()), details);
7041 descriptors->Set(i, &inner_desc, witness);
7044 for (int i = 0; i < size; ++i) {
7045 descriptors->CopyFrom(i, *desc, witness);
7049 if (desc->number_of_descriptors() != enumeration_index) descriptors->Sort();
7055 Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map,
7056 Handle<DescriptorArray> descriptors,
7057 Descriptor* descriptor,
7058 int insertion_index,
7059 TransitionFlag flag) {
7060 // Ensure the key is unique.
7061 descriptor->KeyToUniqueName();
7063 Handle<Name> key = descriptor->GetKey();
7064 DCHECK(*key == descriptors->GetKey(insertion_index));
7066 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
7067 descriptors, map->NumberOfOwnDescriptors());
7069 new_descriptors->Replace(insertion_index, descriptor);
7071 SimpleTransitionFlag simple_flag =
7072 (insertion_index == descriptors->number_of_descriptors() - 1)
7075 return CopyReplaceDescriptors(map, new_descriptors, flag, key, simple_flag);
7079 void Map::UpdateCodeCache(Handle<Map> map,
7081 Handle<Code> code) {
7082 Isolate* isolate = map->GetIsolate();
7083 HandleScope scope(isolate);
7084 // Allocate the code cache if not present.
7085 if (map->code_cache()->IsFixedArray()) {
7086 Handle<Object> result = isolate->factory()->NewCodeCache();
7087 map->set_code_cache(*result);
7090 // Update the code cache.
7091 Handle<CodeCache> code_cache(CodeCache::cast(map->code_cache()), isolate);
7092 CodeCache::Update(code_cache, name, code);
7096 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) {
7097 // Do a lookup if a code cache exists.
7098 if (!code_cache()->IsFixedArray()) {
7099 return CodeCache::cast(code_cache())->Lookup(name, flags);
7101 return GetHeap()->undefined_value();
7106 int Map::IndexInCodeCache(Object* name, Code* code) {
7107 // Get the internal index if a code cache exists.
7108 if (!code_cache()->IsFixedArray()) {
7109 return CodeCache::cast(code_cache())->GetIndex(name, code);
7115 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) {
7116 // No GC is supposed to happen between a call to IndexInCodeCache and
7117 // RemoveFromCodeCache so the code cache must be there.
7118 DCHECK(!code_cache()->IsFixedArray());
7119 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
7123 // An iterator over all map transitions in an descriptor array, reusing the
7124 // constructor field of the map while it is running. Negative values in
7125 // the constructor field indicate an active map transition iteration. The
7126 // original constructor is restored after iterating over all entries.
7127 class IntrusiveMapTransitionIterator {
7129 IntrusiveMapTransitionIterator(
7130 Map* map, TransitionArray* transition_array, Object* constructor)
7132 transition_array_(transition_array),
7133 constructor_(constructor) { }
7135 void StartIfNotStarted() {
7136 DCHECK(!(*IteratorField())->IsSmi() || IsIterating());
7137 if (!(*IteratorField())->IsSmi()) {
7138 DCHECK(*IteratorField() == constructor_);
7139 *IteratorField() = Smi::FromInt(-1);
7143 bool IsIterating() {
7144 return (*IteratorField())->IsSmi() &&
7145 Smi::cast(*IteratorField())->value() < 0;
7149 DCHECK(IsIterating());
7150 int value = Smi::cast(*IteratorField())->value();
7151 int index = -value - 1;
7152 int number_of_transitions = transition_array_->number_of_transitions();
7153 while (index < number_of_transitions) {
7154 *IteratorField() = Smi::FromInt(value - 1);
7155 return transition_array_->GetTarget(index);
7158 *IteratorField() = constructor_;
7163 Object** IteratorField() {
7164 return HeapObject::RawField(map_, Map::kConstructorOffset);
7168 TransitionArray* transition_array_;
7169 Object* constructor_;
7173 // An iterator over all prototype transitions, reusing the constructor field
7174 // of the map while it is running. Positive values in the constructor field
7175 // indicate an active prototype transition iteration. The original constructor
7176 // is restored after iterating over all entries.
7177 class IntrusivePrototypeTransitionIterator {
7179 IntrusivePrototypeTransitionIterator(
7180 Map* map, HeapObject* proto_trans, Object* constructor)
7181 : map_(map), proto_trans_(proto_trans), constructor_(constructor) { }
7183 void StartIfNotStarted() {
7184 if (!(*IteratorField())->IsSmi()) {
7185 DCHECK(*IteratorField() == constructor_);
7186 *IteratorField() = Smi::FromInt(0);
7190 bool IsIterating() {
7191 return (*IteratorField())->IsSmi() &&
7192 Smi::cast(*IteratorField())->value() >= 0;
7196 DCHECK(IsIterating());
7197 int transitionNumber = Smi::cast(*IteratorField())->value();
7198 if (transitionNumber < NumberOfTransitions()) {
7199 *IteratorField() = Smi::FromInt(transitionNumber + 1);
7200 return GetTransition(transitionNumber);
7202 *IteratorField() = constructor_;
7207 Object** IteratorField() {
7208 return HeapObject::RawField(map_, Map::kConstructorOffset);
7211 int NumberOfTransitions() {
7212 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7213 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
7214 return Smi::cast(num)->value();
7217 Map* GetTransition(int transitionNumber) {
7218 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7219 return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
7222 int IndexFor(int transitionNumber) {
7223 return Map::kProtoTransitionHeaderSize +
7224 Map::kProtoTransitionMapOffset +
7225 transitionNumber * Map::kProtoTransitionElementsPerEntry;
7229 HeapObject* proto_trans_;
7230 Object* constructor_;
7234 // To traverse the transition tree iteratively, we have to store two kinds of
7235 // information in a map: The parent map in the traversal and which children of a
7236 // node have already been visited. To do this without additional memory, we
7237 // temporarily reuse two fields with known values:
7239 // (1) The map of the map temporarily holds the parent, and is restored to the
7240 // meta map afterwards.
7242 // (2) The info which children have already been visited depends on which part
7243 // of the map we currently iterate. We use the constructor field of the
7244 // map to store the current index. We can do that because the constructor
7245 // is the same for all involved maps.
7247 // (a) If we currently follow normal map transitions, we temporarily store
7248 // the current index in the constructor field, and restore it to the
7249 // original constructor afterwards. Note that a single descriptor can
7250 // have 0, 1, or 2 transitions.
7252 // (b) If we currently follow prototype transitions, we temporarily store
7253 // the current index in the constructor field, and restore it to the
7254 // original constructor afterwards.
7256 // Note that the child iterator is just a concatenation of two iterators: One
7257 // iterating over map transitions and one iterating over prototype transisitons.
7258 class TraversableMap : public Map {
7260 // Record the parent in the traversal within this map. Note that this destroys
7262 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); }
7264 // Reset the current map's map, returning the parent previously stored in it.
7265 TraversableMap* GetAndResetParent() {
7266 TraversableMap* old_parent = static_cast<TraversableMap*>(map());
7267 set_map_no_write_barrier(GetHeap()->meta_map());
7271 // If we have an unvisited child map, return that one and advance. If we have
7272 // none, return NULL and restore the overwritten constructor field.
7273 TraversableMap* ChildIteratorNext(Object* constructor) {
7274 if (!HasTransitionArray()) return NULL;
7276 TransitionArray* transition_array = transitions();
7277 if (transition_array->HasPrototypeTransitions()) {
7278 HeapObject* proto_transitions =
7279 transition_array->GetPrototypeTransitions();
7280 IntrusivePrototypeTransitionIterator proto_iterator(this,
7283 proto_iterator.StartIfNotStarted();
7284 if (proto_iterator.IsIterating()) {
7285 Map* next = proto_iterator.Next();
7286 if (next != NULL) return static_cast<TraversableMap*>(next);
7290 IntrusiveMapTransitionIterator transition_iterator(this,
7293 transition_iterator.StartIfNotStarted();
7294 if (transition_iterator.IsIterating()) {
7295 Map* next = transition_iterator.Next();
7296 if (next != NULL) return static_cast<TraversableMap*>(next);
7304 // Traverse the transition tree in postorder without using the C++ stack by
7305 // doing pointer reversal.
7306 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
7307 // Make sure that we do not allocate in the callback.
7308 DisallowHeapAllocation no_allocation;
7310 TraversableMap* current = static_cast<TraversableMap*>(this);
7311 // Get the root constructor here to restore it later when finished iterating
7313 Object* root_constructor = constructor();
7315 TraversableMap* child = current->ChildIteratorNext(root_constructor);
7316 if (child != NULL) {
7317 child->SetParent(current);
7320 TraversableMap* parent = current->GetAndResetParent();
7321 callback(current, data);
7322 if (current == this) break;
7329 void CodeCache::Update(
7330 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7331 // The number of monomorphic stubs for normal load/store/call IC's can grow to
7332 // a large number and therefore they need to go into a hash table. They are
7333 // used to load global properties from cells.
7334 if (code->type() == Code::NORMAL) {
7335 // Make sure that a hash table is allocated for the normal load code cache.
7336 if (code_cache->normal_type_cache()->IsUndefined()) {
7337 Handle<Object> result =
7338 CodeCacheHashTable::New(code_cache->GetIsolate(),
7339 CodeCacheHashTable::kInitialSize);
7340 code_cache->set_normal_type_cache(*result);
7342 UpdateNormalTypeCache(code_cache, name, code);
7344 DCHECK(code_cache->default_cache()->IsFixedArray());
7345 UpdateDefaultCache(code_cache, name, code);
7350 void CodeCache::UpdateDefaultCache(
7351 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7352 // When updating the default code cache we disregard the type encoded in the
7353 // flags. This allows call constant stubs to overwrite call field
7355 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags());
7357 // First check whether we can update existing code cache without
7359 Handle<FixedArray> cache = handle(code_cache->default_cache());
7360 int length = cache->length();
7362 DisallowHeapAllocation no_alloc;
7363 int deleted_index = -1;
7364 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7365 Object* key = cache->get(i);
7366 if (key->IsNull()) {
7367 if (deleted_index < 0) deleted_index = i;
7370 if (key->IsUndefined()) {
7371 if (deleted_index >= 0) i = deleted_index;
7372 cache->set(i + kCodeCacheEntryNameOffset, *name);
7373 cache->set(i + kCodeCacheEntryCodeOffset, *code);
7376 if (name->Equals(Name::cast(key))) {
7378 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags();
7379 if (Code::RemoveTypeFromFlags(found) == flags) {
7380 cache->set(i + kCodeCacheEntryCodeOffset, *code);
7386 // Reached the end of the code cache. If there were deleted
7387 // elements, reuse the space for the first of them.
7388 if (deleted_index >= 0) {
7389 cache->set(deleted_index + kCodeCacheEntryNameOffset, *name);
7390 cache->set(deleted_index + kCodeCacheEntryCodeOffset, *code);
7395 // Extend the code cache with some new entries (at least one). Must be a
7396 // multiple of the entry size.
7397 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize;
7398 new_length = new_length - new_length % kCodeCacheEntrySize;
7399 DCHECK((new_length % kCodeCacheEntrySize) == 0);
7400 cache = FixedArray::CopySize(cache, new_length);
7402 // Add the (name, code) pair to the new cache.
7403 cache->set(length + kCodeCacheEntryNameOffset, *name);
7404 cache->set(length + kCodeCacheEntryCodeOffset, *code);
7405 code_cache->set_default_cache(*cache);
7409 void CodeCache::UpdateNormalTypeCache(
7410 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7411 // Adding a new entry can cause a new cache to be allocated.
7412 Handle<CodeCacheHashTable> cache(
7413 CodeCacheHashTable::cast(code_cache->normal_type_cache()));
7414 Handle<Object> new_cache = CodeCacheHashTable::Put(cache, name, code);
7415 code_cache->set_normal_type_cache(*new_cache);
7419 Object* CodeCache::Lookup(Name* name, Code::Flags flags) {
7420 Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags));
7421 if (result->IsCode()) {
7422 if (Code::cast(result)->flags() == flags) return result;
7423 return GetHeap()->undefined_value();
7425 return LookupNormalTypeCache(name, flags);
7429 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) {
7430 FixedArray* cache = default_cache();
7431 int length = cache->length();
7432 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7433 Object* key = cache->get(i + kCodeCacheEntryNameOffset);
7434 // Skip deleted elements.
7435 if (key->IsNull()) continue;
7436 if (key->IsUndefined()) return key;
7437 if (name->Equals(Name::cast(key))) {
7438 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset));
7439 if (Code::RemoveTypeFromFlags(code->flags()) == flags) {
7444 return GetHeap()->undefined_value();
7448 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) {
7449 if (!normal_type_cache()->IsUndefined()) {
7450 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7451 return cache->Lookup(name, flags);
7453 return GetHeap()->undefined_value();
7458 int CodeCache::GetIndex(Object* name, Code* code) {
7459 if (code->type() == Code::NORMAL) {
7460 if (normal_type_cache()->IsUndefined()) return -1;
7461 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7462 return cache->GetIndex(Name::cast(name), code->flags());
7465 FixedArray* array = default_cache();
7466 int len = array->length();
7467 for (int i = 0; i < len; i += kCodeCacheEntrySize) {
7468 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1;
7474 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
7475 if (code->type() == Code::NORMAL) {
7476 DCHECK(!normal_type_cache()->IsUndefined());
7477 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7478 DCHECK(cache->GetIndex(Name::cast(name), code->flags()) == index);
7479 cache->RemoveByIndex(index);
7481 FixedArray* array = default_cache();
7482 DCHECK(array->length() >= index && array->get(index)->IsCode());
7483 // Use null instead of undefined for deleted elements to distinguish
7484 // deleted elements from unused elements. This distinction is used
7485 // when looking up in the cache and when updating the cache.
7486 DCHECK_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset);
7487 array->set_null(index - 1); // Name.
7488 array->set_null(index); // Code.
7493 // The key in the code cache hash table consists of the property name and the
7494 // code object. The actual match is on the name and the code flags. If a key
7495 // is created using the flags and not a code object it can only be used for
7496 // lookup not to create a new entry.
7497 class CodeCacheHashTableKey : public HashTableKey {
7499 CodeCacheHashTableKey(Handle<Name> name, Code::Flags flags)
7500 : name_(name), flags_(flags), code_() { }
7502 CodeCacheHashTableKey(Handle<Name> name, Handle<Code> code)
7503 : name_(name), flags_(code->flags()), code_(code) { }
7505 bool IsMatch(Object* other) OVERRIDE {
7506 if (!other->IsFixedArray()) return false;
7507 FixedArray* pair = FixedArray::cast(other);
7508 Name* name = Name::cast(pair->get(0));
7509 Code::Flags flags = Code::cast(pair->get(1))->flags();
7510 if (flags != flags_) {
7513 return name_->Equals(name);
7516 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) {
7517 return name->Hash() ^ flags;
7520 uint32_t Hash() OVERRIDE { return NameFlagsHashHelper(*name_, flags_); }
7522 uint32_t HashForObject(Object* obj) OVERRIDE {
7523 FixedArray* pair = FixedArray::cast(obj);
7524 Name* name = Name::cast(pair->get(0));
7525 Code* code = Code::cast(pair->get(1));
7526 return NameFlagsHashHelper(name, code->flags());
7529 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
7530 Handle<Code> code = code_.ToHandleChecked();
7531 Handle<FixedArray> pair = isolate->factory()->NewFixedArray(2);
7532 pair->set(0, *name_);
7533 pair->set(1, *code);
7540 // TODO(jkummerow): We should be able to get by without this.
7541 MaybeHandle<Code> code_;
7545 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) {
7546 DisallowHeapAllocation no_alloc;
7547 CodeCacheHashTableKey key(handle(name), flags);
7548 int entry = FindEntry(&key);
7549 if (entry == kNotFound) return GetHeap()->undefined_value();
7550 return get(EntryToIndex(entry) + 1);
7554 Handle<CodeCacheHashTable> CodeCacheHashTable::Put(
7555 Handle<CodeCacheHashTable> cache, Handle<Name> name, Handle<Code> code) {
7556 CodeCacheHashTableKey key(name, code);
7558 Handle<CodeCacheHashTable> new_cache = EnsureCapacity(cache, 1, &key);
7560 int entry = new_cache->FindInsertionEntry(key.Hash());
7561 Handle<Object> k = key.AsHandle(cache->GetIsolate());
7563 new_cache->set(EntryToIndex(entry), *k);
7564 new_cache->set(EntryToIndex(entry) + 1, *code);
7565 new_cache->ElementAdded();
7570 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) {
7571 DisallowHeapAllocation no_alloc;
7572 CodeCacheHashTableKey key(handle(name), flags);
7573 int entry = FindEntry(&key);
7574 return (entry == kNotFound) ? -1 : entry;
7578 void CodeCacheHashTable::RemoveByIndex(int index) {
7580 Heap* heap = GetHeap();
7581 set(EntryToIndex(index), heap->the_hole_value());
7582 set(EntryToIndex(index) + 1, heap->the_hole_value());
7587 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache,
7588 MapHandleList* maps,
7590 Handle<Code> code) {
7591 Isolate* isolate = code_cache->GetIsolate();
7592 if (code_cache->cache()->IsUndefined()) {
7593 Handle<PolymorphicCodeCacheHashTable> result =
7594 PolymorphicCodeCacheHashTable::New(
7596 PolymorphicCodeCacheHashTable::kInitialSize);
7597 code_cache->set_cache(*result);
7599 // This entry shouldn't be contained in the cache yet.
7600 DCHECK(PolymorphicCodeCacheHashTable::cast(code_cache->cache())
7601 ->Lookup(maps, flags)->IsUndefined());
7603 Handle<PolymorphicCodeCacheHashTable> hash_table =
7604 handle(PolymorphicCodeCacheHashTable::cast(code_cache->cache()));
7605 Handle<PolymorphicCodeCacheHashTable> new_cache =
7606 PolymorphicCodeCacheHashTable::Put(hash_table, maps, flags, code);
7607 code_cache->set_cache(*new_cache);
7611 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps,
7612 Code::Flags flags) {
7613 if (!cache()->IsUndefined()) {
7614 PolymorphicCodeCacheHashTable* hash_table =
7615 PolymorphicCodeCacheHashTable::cast(cache());
7616 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate());
7618 return GetIsolate()->factory()->undefined_value();
7623 // Despite their name, object of this class are not stored in the actual
7624 // hash table; instead they're temporarily used for lookups. It is therefore
7625 // safe to have a weak (non-owning) pointer to a MapList as a member field.
7626 class PolymorphicCodeCacheHashTableKey : public HashTableKey {
7628 // Callers must ensure that |maps| outlives the newly constructed object.
7629 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags)
7631 code_flags_(code_flags) {}
7633 bool IsMatch(Object* other) OVERRIDE {
7634 MapHandleList other_maps(kDefaultListAllocationSize);
7636 FromObject(other, &other_flags, &other_maps);
7637 if (code_flags_ != other_flags) return false;
7638 if (maps_->length() != other_maps.length()) return false;
7639 // Compare just the hashes first because it's faster.
7640 int this_hash = MapsHashHelper(maps_, code_flags_);
7641 int other_hash = MapsHashHelper(&other_maps, other_flags);
7642 if (this_hash != other_hash) return false;
7644 // Full comparison: for each map in maps_, look for an equivalent map in
7645 // other_maps. This implementation is slow, but probably good enough for
7646 // now because the lists are short (<= 4 elements currently).
7647 for (int i = 0; i < maps_->length(); ++i) {
7648 bool match_found = false;
7649 for (int j = 0; j < other_maps.length(); ++j) {
7650 if (*(maps_->at(i)) == *(other_maps.at(j))) {
7655 if (!match_found) return false;
7660 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
7661 uint32_t hash = code_flags;
7662 for (int i = 0; i < maps->length(); ++i) {
7663 hash ^= maps->at(i)->Hash();
7668 uint32_t Hash() OVERRIDE {
7669 return MapsHashHelper(maps_, code_flags_);
7672 uint32_t HashForObject(Object* obj) OVERRIDE {
7673 MapHandleList other_maps(kDefaultListAllocationSize);
7675 FromObject(obj, &other_flags, &other_maps);
7676 return MapsHashHelper(&other_maps, other_flags);
7679 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
7680 // The maps in |maps_| must be copied to a newly allocated FixedArray,
7681 // both because the referenced MapList is short-lived, and because C++
7682 // objects can't be stored in the heap anyway.
7683 Handle<FixedArray> list =
7684 isolate->factory()->NewUninitializedFixedArray(maps_->length() + 1);
7685 list->set(0, Smi::FromInt(code_flags_));
7686 for (int i = 0; i < maps_->length(); ++i) {
7687 list->set(i + 1, *maps_->at(i));
7693 static MapHandleList* FromObject(Object* obj,
7695 MapHandleList* maps) {
7696 FixedArray* list = FixedArray::cast(obj);
7698 *code_flags = Smi::cast(list->get(0))->value();
7699 for (int i = 1; i < list->length(); ++i) {
7700 maps->Add(Handle<Map>(Map::cast(list->get(i))));
7705 MapHandleList* maps_; // weak.
7707 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
7711 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps,
7713 DisallowHeapAllocation no_alloc;
7714 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
7715 int entry = FindEntry(&key);
7716 if (entry == kNotFound) return GetHeap()->undefined_value();
7717 return get(EntryToIndex(entry) + 1);
7721 Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put(
7722 Handle<PolymorphicCodeCacheHashTable> hash_table,
7723 MapHandleList* maps,
7725 Handle<Code> code) {
7726 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
7727 Handle<PolymorphicCodeCacheHashTable> cache =
7728 EnsureCapacity(hash_table, 1, &key);
7729 int entry = cache->FindInsertionEntry(key.Hash());
7731 Handle<Object> obj = key.AsHandle(hash_table->GetIsolate());
7732 cache->set(EntryToIndex(entry), *obj);
7733 cache->set(EntryToIndex(entry) + 1, *code);
7734 cache->ElementAdded();
7739 void FixedArray::Shrink(int new_length) {
7740 DCHECK(0 <= new_length && new_length <= length());
7741 if (new_length < length()) {
7742 GetHeap()->RightTrimFixedArray<Heap::FROM_MUTATOR>(
7743 this, length() - new_length);
7748 MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike(
7749 Handle<FixedArray> content,
7750 Handle<JSObject> array) {
7751 DCHECK(array->IsJSArray() || array->HasSloppyArgumentsElements());
7752 ElementsAccessor* accessor = array->GetElementsAccessor();
7753 Handle<FixedArray> result;
7754 ASSIGN_RETURN_ON_EXCEPTION(
7755 array->GetIsolate(), result,
7756 accessor->AddElementsToFixedArray(array, array, content),
7759 #ifdef ENABLE_SLOW_DCHECKS
7760 if (FLAG_enable_slow_asserts) {
7761 DisallowHeapAllocation no_allocation;
7762 for (int i = 0; i < result->length(); i++) {
7763 Object* current = result->get(i);
7764 DCHECK(current->IsNumber() || current->IsName());
7772 MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first,
7773 Handle<FixedArray> second) {
7774 ElementsAccessor* accessor = ElementsAccessor::ForArray(second);
7775 Handle<FixedArray> result;
7776 ASSIGN_RETURN_ON_EXCEPTION(
7777 first->GetIsolate(), result,
7778 accessor->AddElementsToFixedArray(
7779 Handle<Object>::null(), // receiver
7780 Handle<JSObject>::null(), // holder
7782 Handle<FixedArrayBase>::cast(second)),
7785 #ifdef ENABLE_SLOW_DCHECKS
7786 if (FLAG_enable_slow_asserts) {
7787 DisallowHeapAllocation no_allocation;
7788 for (int i = 0; i < result->length(); i++) {
7789 Object* current = result->get(i);
7790 DCHECK(current->IsNumber() || current->IsName());
7798 Handle<FixedArray> FixedArray::CopySize(
7799 Handle<FixedArray> array, int new_length, PretenureFlag pretenure) {
7800 Isolate* isolate = array->GetIsolate();
7801 if (new_length == 0) return isolate->factory()->empty_fixed_array();
7802 Handle<FixedArray> result =
7803 isolate->factory()->NewFixedArray(new_length, pretenure);
7805 DisallowHeapAllocation no_gc;
7806 int len = array->length();
7807 if (new_length < len) len = new_length;
7808 // We are taking the map from the old fixed array so the map is sure to
7809 // be an immortal immutable object.
7810 result->set_map_no_write_barrier(array->map());
7811 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
7812 for (int i = 0; i < len; i++) {
7813 result->set(i, array->get(i), mode);
7819 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
7820 DisallowHeapAllocation no_gc;
7821 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
7822 for (int index = 0; index < len; index++) {
7823 dest->set(dest_pos+index, get(pos+index), mode);
7829 bool FixedArray::IsEqualTo(FixedArray* other) {
7830 if (length() != other->length()) return false;
7831 for (int i = 0 ; i < length(); ++i) {
7832 if (get(i) != other->get(i)) return false;
7839 Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
7840 int number_of_descriptors,
7842 DCHECK(0 <= number_of_descriptors);
7843 Factory* factory = isolate->factory();
7844 // Do not use DescriptorArray::cast on incomplete object.
7845 int size = number_of_descriptors + slack;
7846 if (size == 0) return factory->empty_descriptor_array();
7847 // Allocate the array of keys.
7848 Handle<FixedArray> result = factory->NewFixedArray(LengthFor(size));
7850 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
7851 result->set(kEnumCacheIndex, Smi::FromInt(0));
7852 return Handle<DescriptorArray>::cast(result);
7856 void DescriptorArray::ClearEnumCache() {
7857 set(kEnumCacheIndex, Smi::FromInt(0));
7861 void DescriptorArray::Replace(int index, Descriptor* descriptor) {
7862 descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index));
7863 Set(index, descriptor);
7867 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
7868 FixedArray* new_cache,
7869 Object* new_index_cache) {
7870 DCHECK(bridge_storage->length() >= kEnumCacheBridgeLength);
7871 DCHECK(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
7873 DCHECK(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
7874 FixedArray::cast(bridge_storage)->
7875 set(kEnumCacheBridgeCacheIndex, new_cache);
7876 FixedArray::cast(bridge_storage)->
7877 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
7878 set(kEnumCacheIndex, bridge_storage);
7882 void DescriptorArray::CopyFrom(int index,
7883 DescriptorArray* src,
7884 const WhitenessWitness& witness) {
7885 Object* value = src->GetValue(index);
7886 PropertyDetails details = src->GetDetails(index);
7887 Descriptor desc(handle(src->GetKey(index)),
7888 handle(value, src->GetIsolate()),
7890 Set(index, &desc, witness);
7894 // We need the whiteness witness since sort will reshuffle the entries in the
7895 // descriptor array. If the descriptor array were to be black, the shuffling
7896 // would move a slot that was already recorded as pointing into an evacuation
7897 // candidate. This would result in missing updates upon evacuation.
7898 void DescriptorArray::Sort() {
7899 // In-place heap sort.
7900 int len = number_of_descriptors();
7901 // Reset sorting since the descriptor array might contain invalid pointers.
7902 for (int i = 0; i < len; ++i) SetSortedKey(i, i);
7903 // Bottom-up max-heap construction.
7904 // Index of the last node with children
7905 const int max_parent_index = (len / 2) - 1;
7906 for (int i = max_parent_index; i >= 0; --i) {
7907 int parent_index = i;
7908 const uint32_t parent_hash = GetSortedKey(i)->Hash();
7909 while (parent_index <= max_parent_index) {
7910 int child_index = 2 * parent_index + 1;
7911 uint32_t child_hash = GetSortedKey(child_index)->Hash();
7912 if (child_index + 1 < len) {
7913 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
7914 if (right_child_hash > child_hash) {
7916 child_hash = right_child_hash;
7919 if (child_hash <= parent_hash) break;
7920 SwapSortedKeys(parent_index, child_index);
7921 // Now element at child_index could be < its children.
7922 parent_index = child_index; // parent_hash remains correct.
7926 // Extract elements and create sorted array.
7927 for (int i = len - 1; i > 0; --i) {
7928 // Put max element at the back of the array.
7929 SwapSortedKeys(0, i);
7930 // Shift down the new top element.
7931 int parent_index = 0;
7932 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
7933 const int max_parent_index = (i / 2) - 1;
7934 while (parent_index <= max_parent_index) {
7935 int child_index = parent_index * 2 + 1;
7936 uint32_t child_hash = GetSortedKey(child_index)->Hash();
7937 if (child_index + 1 < i) {
7938 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
7939 if (right_child_hash > child_hash) {
7941 child_hash = right_child_hash;
7944 if (child_hash <= parent_hash) break;
7945 SwapSortedKeys(parent_index, child_index);
7946 parent_index = child_index;
7949 DCHECK(IsSortedNoDuplicates());
7953 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) {
7954 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair();
7955 copy->set_getter(pair->getter());
7956 copy->set_setter(pair->setter());
7961 Object* AccessorPair::GetComponent(AccessorComponent component) {
7962 Object* accessor = get(component);
7963 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
7967 Handle<DeoptimizationInputData> DeoptimizationInputData::New(
7968 Isolate* isolate, int deopt_entry_count, PretenureFlag pretenure) {
7969 DCHECK(deopt_entry_count > 0);
7970 return Handle<DeoptimizationInputData>::cast(
7971 isolate->factory()->NewFixedArray(LengthFor(deopt_entry_count),
7976 Handle<DeoptimizationOutputData> DeoptimizationOutputData::New(
7978 int number_of_deopt_points,
7979 PretenureFlag pretenure) {
7980 Handle<FixedArray> result;
7981 if (number_of_deopt_points == 0) {
7982 result = isolate->factory()->empty_fixed_array();
7984 result = isolate->factory()->NewFixedArray(
7985 LengthOfFixedArray(number_of_deopt_points), pretenure);
7987 return Handle<DeoptimizationOutputData>::cast(result);
7992 bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
7993 if (IsEmpty()) return other->IsEmpty();
7994 if (other->IsEmpty()) return false;
7995 if (length() != other->length()) return false;
7996 for (int i = 0; i < length(); ++i) {
7997 if (get(i) != other->get(i)) return false;
8004 bool String::LooksValid() {
8005 if (!GetIsolate()->heap()->Contains(this)) return false;
8010 String::FlatContent String::GetFlatContent() {
8011 DCHECK(!AllowHeapAllocation::IsAllowed());
8012 int length = this->length();
8013 StringShape shape(this);
8014 String* string = this;
8016 if (shape.representation_tag() == kConsStringTag) {
8017 ConsString* cons = ConsString::cast(string);
8018 if (cons->second()->length() != 0) {
8019 return FlatContent();
8021 string = cons->first();
8022 shape = StringShape(string);
8024 if (shape.representation_tag() == kSlicedStringTag) {
8025 SlicedString* slice = SlicedString::cast(string);
8026 offset = slice->offset();
8027 string = slice->parent();
8028 shape = StringShape(string);
8029 DCHECK(shape.representation_tag() != kConsStringTag &&
8030 shape.representation_tag() != kSlicedStringTag);
8032 if (shape.encoding_tag() == kOneByteStringTag) {
8033 const uint8_t* start;
8034 if (shape.representation_tag() == kSeqStringTag) {
8035 start = SeqOneByteString::cast(string)->GetChars();
8037 start = ExternalOneByteString::cast(string)->GetChars();
8039 return FlatContent(start + offset, length);
8041 DCHECK(shape.encoding_tag() == kTwoByteStringTag);
8043 if (shape.representation_tag() == kSeqStringTag) {
8044 start = SeqTwoByteString::cast(string)->GetChars();
8046 start = ExternalTwoByteString::cast(string)->GetChars();
8048 return FlatContent(start + offset, length);
8053 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8054 RobustnessFlag robust_flag,
8057 int* length_return) {
8058 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8059 return SmartArrayPointer<char>(NULL);
8061 Heap* heap = GetHeap();
8063 // Negative length means the to the end of the string.
8064 if (length < 0) length = kMaxInt - offset;
8066 // Compute the size of the UTF-8 string. Start at the specified offset.
8067 Access<ConsStringIteratorOp> op(
8068 heap->isolate()->objects_string_iterator());
8069 StringCharacterStream stream(this, op.value(), offset);
8070 int character_position = offset;
8072 int last = unibrow::Utf16::kNoPreviousCharacter;
8073 while (stream.HasMore() && character_position++ < offset + length) {
8074 uint16_t character = stream.GetNext();
8075 utf8_bytes += unibrow::Utf8::Length(character, last);
8079 if (length_return) {
8080 *length_return = utf8_bytes;
8083 char* result = NewArray<char>(utf8_bytes + 1);
8085 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset.
8086 stream.Reset(this, offset);
8087 character_position = offset;
8088 int utf8_byte_position = 0;
8089 last = unibrow::Utf16::kNoPreviousCharacter;
8090 while (stream.HasMore() && character_position++ < offset + length) {
8091 uint16_t character = stream.GetNext();
8092 if (allow_nulls == DISALLOW_NULLS && character == 0) {
8095 utf8_byte_position +=
8096 unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
8099 result[utf8_byte_position] = 0;
8100 return SmartArrayPointer<char>(result);
8104 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8105 RobustnessFlag robust_flag,
8106 int* length_return) {
8107 return ToCString(allow_nulls, robust_flag, 0, -1, length_return);
8111 const uc16* String::GetTwoByteData(unsigned start) {
8112 DCHECK(!IsOneByteRepresentationUnderneath());
8113 switch (StringShape(this).representation_tag()) {
8115 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
8116 case kExternalStringTag:
8117 return ExternalTwoByteString::cast(this)->
8118 ExternalTwoByteStringGetData(start);
8119 case kSlicedStringTag: {
8120 SlicedString* slice = SlicedString::cast(this);
8121 return slice->parent()->GetTwoByteData(start + slice->offset());
8123 case kConsStringTag:
8132 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) {
8133 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8134 return SmartArrayPointer<uc16>();
8136 Heap* heap = GetHeap();
8138 Access<ConsStringIteratorOp> op(
8139 heap->isolate()->objects_string_iterator());
8140 StringCharacterStream stream(this, op.value());
8142 uc16* result = NewArray<uc16>(length() + 1);
8145 while (stream.HasMore()) {
8146 uint16_t character = stream.GetNext();
8147 result[i++] = character;
8150 return SmartArrayPointer<uc16>(result);
8154 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) {
8155 return reinterpret_cast<uc16*>(
8156 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start;
8160 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) {
8161 Relocatable* current = isolate->relocatable_top();
8162 while (current != NULL) {
8163 current->PostGarbageCollection();
8164 current = current->prev_;
8169 // Reserve space for statics needing saving and restoring.
8170 int Relocatable::ArchiveSpacePerThread() {
8171 return sizeof(Relocatable*); // NOLINT
8175 // Archive statics that are thread-local.
8176 char* Relocatable::ArchiveState(Isolate* isolate, char* to) {
8177 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
8178 isolate->set_relocatable_top(NULL);
8179 return to + ArchiveSpacePerThread();
8183 // Restore statics that are thread-local.
8184 char* Relocatable::RestoreState(Isolate* isolate, char* from) {
8185 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
8186 return from + ArchiveSpacePerThread();
8190 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
8191 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
8193 return thread_storage + ArchiveSpacePerThread();
8197 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) {
8198 Iterate(v, isolate->relocatable_top());
8202 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
8203 Relocatable* current = top;
8204 while (current != NULL) {
8205 current->IterateInstance(v);
8206 current = current->prev_;
8211 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str)
8212 : Relocatable(isolate),
8213 str_(str.location()),
8214 length_(str->length()) {
8215 PostGarbageCollection();
8219 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input)
8220 : Relocatable(isolate),
8223 length_(input.length()),
8224 start_(input.start()) {}
8227 void FlatStringReader::PostGarbageCollection() {
8228 if (str_ == NULL) return;
8229 Handle<String> str(str_);
8230 DCHECK(str->IsFlat());
8231 DisallowHeapAllocation no_gc;
8232 // This does not actually prevent the vector from being relocated later.
8233 String::FlatContent content = str->GetFlatContent();
8234 DCHECK(content.IsFlat());
8235 is_one_byte_ = content.IsOneByte();
8237 start_ = content.ToOneByteVector().start();
8239 start_ = content.ToUC16Vector().start();
8244 void ConsStringIteratorOp::Initialize(ConsString* cons_string, int offset) {
8245 DCHECK(cons_string != NULL);
8246 root_ = cons_string;
8248 // Force stack blown condition to trigger restart.
8250 maximum_depth_ = kStackSize + depth_;
8251 DCHECK(StackBlown());
8255 String* ConsStringIteratorOp::Continue(int* offset_out) {
8256 DCHECK(depth_ != 0);
8257 DCHECK_EQ(0, *offset_out);
8258 bool blew_stack = StackBlown();
8259 String* string = NULL;
8260 // Get the next leaf if there is one.
8261 if (!blew_stack) string = NextLeaf(&blew_stack);
8262 // Restart search from root.
8264 DCHECK(string == NULL);
8265 string = Search(offset_out);
8267 // Ensure future calls return null immediately.
8268 if (string == NULL) Reset(NULL);
8273 String* ConsStringIteratorOp::Search(int* offset_out) {
8274 ConsString* cons_string = root_;
8275 // Reset the stack, pushing the root string.
8278 frames_[0] = cons_string;
8279 const int consumed = consumed_;
8282 // Loop until the string is found which contains the target offset.
8283 String* string = cons_string->first();
8284 int length = string->length();
8286 if (consumed < offset + length) {
8287 // Target offset is in the left branch.
8288 // Keep going if we're still in a ConString.
8289 type = string->map()->instance_type();
8290 if ((type & kStringRepresentationMask) == kConsStringTag) {
8291 cons_string = ConsString::cast(string);
8292 PushLeft(cons_string);
8295 // Tell the stack we're done descending.
8296 AdjustMaximumDepth();
8299 // Update progress through the string.
8301 // Keep going if we're still in a ConString.
8302 string = cons_string->second();
8303 type = string->map()->instance_type();
8304 if ((type & kStringRepresentationMask) == kConsStringTag) {
8305 cons_string = ConsString::cast(string);
8306 PushRight(cons_string);
8309 // Need this to be updated for the current string.
8310 length = string->length();
8311 // Account for the possibility of an empty right leaf.
8312 // This happens only if we have asked for an offset outside the string.
8314 // Reset so future operations will return null immediately.
8318 // Tell the stack we're done descending.
8319 AdjustMaximumDepth();
8320 // Pop stack so next iteration is in correct place.
8323 DCHECK(length != 0);
8324 // Adjust return values and exit.
8325 consumed_ = offset + length;
8326 *offset_out = consumed - offset;
8334 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) {
8336 // Tree traversal complete.
8338 *blew_stack = false;
8341 // We've lost track of higher nodes.
8347 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)];
8348 String* string = cons_string->second();
8349 int32_t type = string->map()->instance_type();
8350 if ((type & kStringRepresentationMask) != kConsStringTag) {
8351 // Pop stack so next iteration is in correct place.
8353 int length = string->length();
8354 // Could be a flattened ConsString.
8355 if (length == 0) continue;
8356 consumed_ += length;
8359 cons_string = ConsString::cast(string);
8360 PushRight(cons_string);
8361 // Need to traverse all the way left.
8364 string = cons_string->first();
8365 type = string->map()->instance_type();
8366 if ((type & kStringRepresentationMask) != kConsStringTag) {
8367 AdjustMaximumDepth();
8368 int length = string->length();
8369 DCHECK(length != 0);
8370 consumed_ += length;
8373 cons_string = ConsString::cast(string);
8374 PushLeft(cons_string);
8382 uint16_t ConsString::ConsStringGet(int index) {
8383 DCHECK(index >= 0 && index < this->length());
8385 // Check for a flattened cons string
8386 if (second()->length() == 0) {
8387 String* left = first();
8388 return left->Get(index);
8391 String* string = String::cast(this);
8394 if (StringShape(string).IsCons()) {
8395 ConsString* cons_string = ConsString::cast(string);
8396 String* left = cons_string->first();
8397 if (left->length() > index) {
8400 index -= left->length();
8401 string = cons_string->second();
8404 return string->Get(index);
8413 uint16_t SlicedString::SlicedStringGet(int index) {
8414 return parent()->Get(offset() + index);
8418 template <typename sinkchar>
8419 void String::WriteToFlat(String* src,
8423 String* source = src;
8427 DCHECK(0 <= from && from <= to && to <= source->length());
8428 switch (StringShape(source).full_representation_tag()) {
8429 case kOneByteStringTag | kExternalStringTag: {
8430 CopyChars(sink, ExternalOneByteString::cast(source)->GetChars() + from,
8434 case kTwoByteStringTag | kExternalStringTag: {
8436 ExternalTwoByteString::cast(source)->GetChars();
8442 case kOneByteStringTag | kSeqStringTag: {
8444 SeqOneByteString::cast(source)->GetChars() + from,
8448 case kTwoByteStringTag | kSeqStringTag: {
8450 SeqTwoByteString::cast(source)->GetChars() + from,
8454 case kOneByteStringTag | kConsStringTag:
8455 case kTwoByteStringTag | kConsStringTag: {
8456 ConsString* cons_string = ConsString::cast(source);
8457 String* first = cons_string->first();
8458 int boundary = first->length();
8459 if (to - boundary >= boundary - from) {
8460 // Right hand side is longer. Recurse over left.
8461 if (from < boundary) {
8462 WriteToFlat(first, sink, from, boundary);
8463 sink += boundary - from;
8469 source = cons_string->second();
8471 // Left hand side is longer. Recurse over right.
8472 if (to > boundary) {
8473 String* second = cons_string->second();
8474 // When repeatedly appending to a string, we get a cons string that
8475 // is unbalanced to the left, a list, essentially. We inline the
8476 // common case of sequential one-byte right child.
8477 if (to - boundary == 1) {
8478 sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
8479 } else if (second->IsSeqOneByteString()) {
8480 CopyChars(sink + boundary - from,
8481 SeqOneByteString::cast(second)->GetChars(),
8485 sink + boundary - from,
8495 case kOneByteStringTag | kSlicedStringTag:
8496 case kTwoByteStringTag | kSlicedStringTag: {
8497 SlicedString* slice = SlicedString::cast(source);
8498 unsigned offset = slice->offset();
8499 WriteToFlat(slice->parent(), sink, from + offset, to + offset);
8508 template <typename SourceChar>
8509 static void CalculateLineEndsImpl(Isolate* isolate,
8510 List<int>* line_ends,
8511 Vector<const SourceChar> src,
8512 bool include_ending_line) {
8513 const int src_len = src.length();
8514 StringSearch<uint8_t, SourceChar> search(isolate, STATIC_CHAR_VECTOR("\n"));
8516 // Find and record line ends.
8518 while (position != -1 && position < src_len) {
8519 position = search.Search(src, position);
8520 if (position != -1) {
8521 line_ends->Add(position);
8523 } else if (include_ending_line) {
8524 // Even if the last line misses a line end, it is counted.
8525 line_ends->Add(src_len);
8532 Handle<FixedArray> String::CalculateLineEnds(Handle<String> src,
8533 bool include_ending_line) {
8535 // Rough estimate of line count based on a roughly estimated average
8536 // length of (unpacked) code.
8537 int line_count_estimate = src->length() >> 4;
8538 List<int> line_ends(line_count_estimate);
8539 Isolate* isolate = src->GetIsolate();
8540 { DisallowHeapAllocation no_allocation; // ensure vectors stay valid.
8541 // Dispatch on type of strings.
8542 String::FlatContent content = src->GetFlatContent();
8543 DCHECK(content.IsFlat());
8544 if (content.IsOneByte()) {
8545 CalculateLineEndsImpl(isolate,
8547 content.ToOneByteVector(),
8548 include_ending_line);
8550 CalculateLineEndsImpl(isolate,
8552 content.ToUC16Vector(),
8553 include_ending_line);
8556 int line_count = line_ends.length();
8557 Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count);
8558 for (int i = 0; i < line_count; i++) {
8559 array->set(i, Smi::FromInt(line_ends[i]));
8565 // Compares the contents of two strings by reading and comparing
8566 // int-sized blocks of characters.
8567 template <typename Char>
8568 static inline bool CompareRawStringContents(const Char* const a,
8569 const Char* const b,
8571 return CompareChars(a, b, length) == 0;
8575 template<typename Chars1, typename Chars2>
8576 class RawStringComparator : public AllStatic {
8578 static inline bool compare(const Chars1* a, const Chars2* b, int len) {
8579 DCHECK(sizeof(Chars1) != sizeof(Chars2));
8580 for (int i = 0; i < len; i++) {
8591 class RawStringComparator<uint16_t, uint16_t> {
8593 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) {
8594 return CompareRawStringContents(a, b, len);
8600 class RawStringComparator<uint8_t, uint8_t> {
8602 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) {
8603 return CompareRawStringContents(a, b, len);
8608 class StringComparator {
8611 explicit inline State(ConsStringIteratorOp* op)
8612 : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {}
8614 inline void Init(String* string) {
8615 ConsString* cons_string = String::VisitFlat(this, string);
8616 op_->Reset(cons_string);
8617 if (cons_string != NULL) {
8619 string = op_->Next(&offset);
8620 String::VisitFlat(this, string, offset);
8624 inline void VisitOneByteString(const uint8_t* chars, int length) {
8625 is_one_byte_ = true;
8630 inline void VisitTwoByteString(const uint16_t* chars, int length) {
8631 is_one_byte_ = false;
8636 void Advance(int consumed) {
8637 DCHECK(consumed <= length_);
8639 if (length_ != consumed) {
8641 buffer8_ += consumed;
8643 buffer16_ += consumed;
8645 length_ -= consumed;
8650 String* next = op_->Next(&offset);
8651 DCHECK_EQ(0, offset);
8652 DCHECK(next != NULL);
8653 String::VisitFlat(this, next);
8656 ConsStringIteratorOp* const op_;
8660 const uint8_t* buffer8_;
8661 const uint16_t* buffer16_;
8665 DISALLOW_IMPLICIT_CONSTRUCTORS(State);
8669 inline StringComparator(ConsStringIteratorOp* op_1,
8670 ConsStringIteratorOp* op_2)
8675 template<typename Chars1, typename Chars2>
8676 static inline bool Equals(State* state_1, State* state_2, int to_check) {
8677 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_);
8678 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_);
8679 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check);
8682 bool Equals(String* string_1, String* string_2) {
8683 int length = string_1->length();
8684 state_1_.Init(string_1);
8685 state_2_.Init(string_2);
8687 int to_check = Min(state_1_.length_, state_2_.length_);
8688 DCHECK(to_check > 0 && to_check <= length);
8690 if (state_1_.is_one_byte_) {
8691 if (state_2_.is_one_byte_) {
8692 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check);
8694 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check);
8697 if (state_2_.is_one_byte_) {
8698 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check);
8700 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check);
8704 if (!is_equal) return false;
8706 // Exit condition. Strings are equal.
8707 if (length == 0) return true;
8708 state_1_.Advance(to_check);
8709 state_2_.Advance(to_check);
8716 DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator);
8720 bool String::SlowEquals(String* other) {
8721 DisallowHeapAllocation no_gc;
8722 // Fast check: negative check with lengths.
8724 if (len != other->length()) return false;
8725 if (len == 0) return true;
8727 // Fast check: if hash code is computed for both strings
8728 // a fast negative check can be performed.
8729 if (HasHashCode() && other->HasHashCode()) {
8730 #ifdef ENABLE_SLOW_DCHECKS
8731 if (FLAG_enable_slow_asserts) {
8732 if (Hash() != other->Hash()) {
8733 bool found_difference = false;
8734 for (int i = 0; i < len; i++) {
8735 if (Get(i) != other->Get(i)) {
8736 found_difference = true;
8740 DCHECK(found_difference);
8744 if (Hash() != other->Hash()) return false;
8747 // We know the strings are both non-empty. Compare the first chars
8748 // before we try to flatten the strings.
8749 if (this->Get(0) != other->Get(0)) return false;
8751 if (IsSeqOneByteString() && other->IsSeqOneByteString()) {
8752 const uint8_t* str1 = SeqOneByteString::cast(this)->GetChars();
8753 const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars();
8754 return CompareRawStringContents(str1, str2, len);
8757 Isolate* isolate = GetIsolate();
8758 StringComparator comparator(isolate->objects_string_compare_iterator_a(),
8759 isolate->objects_string_compare_iterator_b());
8761 return comparator.Equals(this, other);
8765 bool String::SlowEquals(Handle<String> one, Handle<String> two) {
8766 // Fast check: negative check with lengths.
8767 int one_length = one->length();
8768 if (one_length != two->length()) return false;
8769 if (one_length == 0) return true;
8771 // Fast check: if hash code is computed for both strings
8772 // a fast negative check can be performed.
8773 if (one->HasHashCode() && two->HasHashCode()) {
8774 #ifdef ENABLE_SLOW_DCHECKS
8775 if (FLAG_enable_slow_asserts) {
8776 if (one->Hash() != two->Hash()) {
8777 bool found_difference = false;
8778 for (int i = 0; i < one_length; i++) {
8779 if (one->Get(i) != two->Get(i)) {
8780 found_difference = true;
8784 DCHECK(found_difference);
8788 if (one->Hash() != two->Hash()) return false;
8791 // We know the strings are both non-empty. Compare the first chars
8792 // before we try to flatten the strings.
8793 if (one->Get(0) != two->Get(0)) return false;
8795 one = String::Flatten(one);
8796 two = String::Flatten(two);
8798 DisallowHeapAllocation no_gc;
8799 String::FlatContent flat1 = one->GetFlatContent();
8800 String::FlatContent flat2 = two->GetFlatContent();
8802 if (flat1.IsOneByte() && flat2.IsOneByte()) {
8803 return CompareRawStringContents(flat1.ToOneByteVector().start(),
8804 flat2.ToOneByteVector().start(),
8807 for (int i = 0; i < one_length; i++) {
8808 if (flat1.Get(i) != flat2.Get(i)) return false;
8815 bool String::MarkAsUndetectable() {
8816 if (StringShape(this).IsInternalized()) return false;
8818 Map* map = this->map();
8819 Heap* heap = GetHeap();
8820 if (map == heap->string_map()) {
8821 this->set_map(heap->undetectable_string_map());
8823 } else if (map == heap->one_byte_string_map()) {
8824 this->set_map(heap->undetectable_one_byte_string_map());
8827 // Rest cannot be marked as undetectable
8832 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
8833 int slen = length();
8834 // Can't check exact length equality, but we can check bounds.
8835 int str_len = str.length();
8836 if (!allow_prefix_match &&
8838 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) {
8842 unsigned remaining_in_str = static_cast<unsigned>(str_len);
8843 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start());
8844 for (i = 0; i < slen && remaining_in_str > 0; i++) {
8845 unsigned cursor = 0;
8846 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor);
8847 DCHECK(cursor > 0 && cursor <= remaining_in_str);
8848 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
8849 if (i > slen - 1) return false;
8850 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
8851 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
8853 if (Get(i) != r) return false;
8855 utf8_data += cursor;
8856 remaining_in_str -= cursor;
8858 return (allow_prefix_match || i == slen) && remaining_in_str == 0;
8862 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) {
8863 int slen = length();
8864 if (str.length() != slen) return false;
8865 DisallowHeapAllocation no_gc;
8866 FlatContent content = GetFlatContent();
8867 if (content.IsOneByte()) {
8868 return CompareChars(content.ToOneByteVector().start(),
8869 str.start(), slen) == 0;
8871 for (int i = 0; i < slen; i++) {
8872 if (Get(i) != static_cast<uint16_t>(str[i])) return false;
8878 bool String::IsTwoByteEqualTo(Vector<const uc16> str) {
8879 int slen = length();
8880 if (str.length() != slen) return false;
8881 DisallowHeapAllocation no_gc;
8882 FlatContent content = GetFlatContent();
8883 if (content.IsTwoByte()) {
8884 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
8886 for (int i = 0; i < slen; i++) {
8887 if (Get(i) != str[i]) return false;
8893 uint32_t String::ComputeAndSetHash() {
8894 // Should only be called if hash code has not yet been computed.
8895 DCHECK(!HasHashCode());
8897 // Store the hash code in the object.
8898 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed());
8899 set_hash_field(field);
8901 // Check the hash code is there.
8902 DCHECK(HasHashCode());
8903 uint32_t result = field >> kHashShift;
8904 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
8909 bool String::ComputeArrayIndex(uint32_t* index) {
8910 int length = this->length();
8911 if (length == 0 || length > kMaxArrayIndexSize) return false;
8912 ConsStringIteratorOp op;
8913 StringCharacterStream stream(this, &op);
8914 return StringToArrayIndex(&stream, index);
8918 bool String::SlowAsArrayIndex(uint32_t* index) {
8919 if (length() <= kMaxCachedArrayIndexLength) {
8920 Hash(); // force computation of hash code
8921 uint32_t field = hash_field();
8922 if ((field & kIsNotArrayIndexMask) != 0) return false;
8923 // Isolate the array index form the full hash field.
8924 *index = ArrayIndexValueBits::decode(field);
8927 return ComputeArrayIndex(index);
8932 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
8933 int new_size, old_size;
8934 int old_length = string->length();
8935 if (old_length <= new_length) return string;
8937 if (string->IsSeqOneByteString()) {
8938 old_size = SeqOneByteString::SizeFor(old_length);
8939 new_size = SeqOneByteString::SizeFor(new_length);
8941 DCHECK(string->IsSeqTwoByteString());
8942 old_size = SeqTwoByteString::SizeFor(old_length);
8943 new_size = SeqTwoByteString::SizeFor(new_length);
8946 int delta = old_size - new_size;
8948 Address start_of_string = string->address();
8949 DCHECK_OBJECT_ALIGNED(start_of_string);
8950 DCHECK_OBJECT_ALIGNED(start_of_string + new_size);
8952 Heap* heap = string->GetHeap();
8953 NewSpace* newspace = heap->new_space();
8954 if (newspace->Contains(start_of_string) &&
8955 newspace->top() == start_of_string + old_size) {
8956 // Last allocated object in new space. Simply lower allocation top.
8957 newspace->set_top(start_of_string + new_size);
8959 // Sizes are pointer size aligned, so that we can use filler objects
8960 // that are a multiple of pointer size.
8961 heap->CreateFillerObjectAt(start_of_string + new_size, delta);
8963 heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR);
8965 // We are storing the new length using release store after creating a filler
8966 // for the left-over space to avoid races with the sweeper thread.
8967 string->synchronized_set_length(new_length);
8969 if (new_length == 0) return heap->isolate()->factory()->empty_string();
8974 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
8975 // For array indexes mix the length into the hash as an array index could
8978 DCHECK(length <= String::kMaxArrayIndexSize);
8979 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
8980 (1 << String::kArrayIndexValueBits));
8982 value <<= String::ArrayIndexValueBits::kShift;
8983 value |= length << String::ArrayIndexLengthBits::kShift;
8985 DCHECK((value & String::kIsNotArrayIndexMask) == 0);
8986 DCHECK((length > String::kMaxCachedArrayIndexLength) ||
8987 (value & String::kContainsCachedArrayIndexMask) == 0);
8992 uint32_t StringHasher::GetHashField() {
8993 if (length_ <= String::kMaxHashCalcLength) {
8994 if (is_array_index_) {
8995 return MakeArrayIndexHash(array_index_, length_);
8997 return (GetHashCore(raw_running_hash_) << String::kHashShift) |
8998 String::kIsNotArrayIndexMask;
9000 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask;
9005 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars,
9007 int* utf16_length_out) {
9008 int vector_length = chars.length();
9009 // Handle some edge cases
9010 if (vector_length <= 1) {
9011 DCHECK(vector_length == 0 ||
9012 static_cast<uint8_t>(chars.start()[0]) <=
9013 unibrow::Utf8::kMaxOneByteChar);
9014 *utf16_length_out = vector_length;
9015 return HashSequentialString(chars.start(), vector_length, seed);
9017 // Start with a fake length which won't affect computation.
9018 // It will be updated later.
9019 StringHasher hasher(String::kMaxArrayIndexSize, seed);
9020 unsigned remaining = static_cast<unsigned>(vector_length);
9021 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start());
9022 int utf16_length = 0;
9023 bool is_index = true;
9024 DCHECK(hasher.is_array_index_);
9025 while (remaining > 0) {
9026 unsigned consumed = 0;
9027 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed);
9028 DCHECK(consumed > 0 && consumed <= remaining);
9030 remaining -= consumed;
9031 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode;
9032 utf16_length += is_two_characters ? 2 : 1;
9033 // No need to keep hashing. But we do need to calculate utf16_length.
9034 if (utf16_length > String::kMaxHashCalcLength) continue;
9035 if (is_two_characters) {
9036 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c);
9037 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c);
9038 hasher.AddCharacter(c1);
9039 hasher.AddCharacter(c2);
9040 if (is_index) is_index = hasher.UpdateIndex(c1);
9041 if (is_index) is_index = hasher.UpdateIndex(c2);
9043 hasher.AddCharacter(c);
9044 if (is_index) is_index = hasher.UpdateIndex(c);
9047 *utf16_length_out = static_cast<int>(utf16_length);
9048 // Must set length here so that hash computation is correct.
9049 hasher.length_ = utf16_length;
9050 return hasher.GetHashField();
9054 void String::PrintOn(FILE* file) {
9055 int length = this->length();
9056 for (int i = 0; i < length; i++) {
9057 PrintF(file, "%c", Get(i));
9063 // For performance reasons we only hash the 3 most variable fields of a map:
9064 // constructor, prototype and bit_field2.
9066 // Shift away the tag.
9067 int hash = (static_cast<uint32_t>(
9068 reinterpret_cast<uintptr_t>(constructor())) >> 2);
9070 // XOR-ing the prototype and constructor directly yields too many zero bits
9071 // when the two pointers are close (which is fairly common).
9072 // To avoid this we shift the prototype 4 bits relatively to the constructor.
9073 hash ^= (static_cast<uint32_t>(
9074 reinterpret_cast<uintptr_t>(prototype())) << 2);
9076 return hash ^ (hash >> 16) ^ bit_field2();
9080 static bool CheckEquivalent(Map* first, Map* second) {
9082 first->constructor() == second->constructor() &&
9083 first->prototype() == second->prototype() &&
9084 first->instance_type() == second->instance_type() &&
9085 first->bit_field() == second->bit_field() &&
9086 first->bit_field2() == second->bit_field2() &&
9087 first->is_frozen() == second->is_frozen() &&
9088 first->has_instance_call_handler() == second->has_instance_call_handler();
9092 bool Map::EquivalentToForTransition(Map* other) {
9093 return CheckEquivalent(this, other);
9097 bool Map::EquivalentToForNormalization(Map* other,
9098 PropertyNormalizationMode mode) {
9099 int properties = mode == CLEAR_INOBJECT_PROPERTIES
9100 ? 0 : other->inobject_properties();
9101 return CheckEquivalent(this, other) && inobject_properties() == properties;
9105 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
9106 // Unfortunately the serializer relies on pointers within an object being
9107 // visited in-order, so we have to iterate both the code and heap pointers in
9108 // the small section before doing so in the extended section.
9109 for (int s = 0; s <= final_section(); ++s) {
9110 LayoutSection section = static_cast<LayoutSection>(s);
9111 ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR,
9113 while (!code_iter.is_finished()) {
9114 v->VisitCodeEntry(reinterpret_cast<Address>(
9115 RawFieldOfElementAt(code_iter.next_index())));
9118 ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR,
9120 while (!heap_iter.is_finished()) {
9121 v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index()));
9127 void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) {
9128 Type type[] = { CODE_PTR, HEAP_PTR };
9129 Address default_value[] = {
9130 isolate->builtins()->builtin(Builtins::kIllegal)->entry(),
9131 reinterpret_cast<Address>(isolate->heap()->undefined_value()) };
9133 for (int i = 0; i < 2; ++i) {
9134 for (int s = 0; s <= final_section(); ++s) {
9135 LayoutSection section = static_cast<LayoutSection>(s);
9136 if (number_of_entries(type[i], section) > 0) {
9137 int offset = OffsetOfElementAt(first_index(type[i], section));
9139 reinterpret_cast<Address*>(HeapObject::RawField(this, offset)),
9141 number_of_entries(type[i], section));
9148 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
9149 // Iterate over all fields in the body but take care in dealing with
9151 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset);
9152 v->VisitCodeEntry(this->address() + kCodeEntryOffset);
9153 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
9157 void JSFunction::MarkForOptimization() {
9158 DCHECK(!IsOptimized());
9159 DCHECK(shared()->allows_lazy_compilation() ||
9160 code()->optimizable());
9161 DCHECK(!shared()->is_generator());
9162 set_code_no_write_barrier(
9163 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
9164 // No write barrier required, since the builtin is part of the root set.
9168 void JSFunction::MarkForConcurrentOptimization() {
9169 DCHECK(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9170 DCHECK(!IsOptimized());
9171 DCHECK(shared()->allows_lazy_compilation() || code()->optimizable());
9172 DCHECK(!shared()->is_generator());
9173 DCHECK(GetIsolate()->concurrent_recompilation_enabled());
9174 if (FLAG_trace_concurrent_recompilation) {
9175 PrintF(" ** Marking ");
9177 PrintF(" for concurrent recompilation.\n");
9179 set_code_no_write_barrier(
9180 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
9181 // No write barrier required, since the builtin is part of the root set.
9185 void JSFunction::MarkInOptimizationQueue() {
9186 // We can only arrive here via the concurrent-recompilation builtin. If
9187 // break points were set, the code would point to the lazy-compile builtin.
9188 DCHECK(!GetIsolate()->DebuggerHasBreakPoints());
9189 DCHECK(IsMarkedForConcurrentOptimization() && !IsOptimized());
9190 DCHECK(shared()->allows_lazy_compilation() || code()->optimizable());
9191 DCHECK(GetIsolate()->concurrent_recompilation_enabled());
9192 if (FLAG_trace_concurrent_recompilation) {
9193 PrintF(" ** Queueing ");
9195 PrintF(" for concurrent recompilation.\n");
9197 set_code_no_write_barrier(
9198 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
9199 // No write barrier required, since the builtin is part of the root set.
9203 Handle<JSFunction> JSFunction::CloneClosure(Handle<JSFunction> function) {
9204 Isolate* isolate = function->GetIsolate();
9205 Handle<Map> map(function->map());
9206 Handle<SharedFunctionInfo> shared(function->shared());
9207 Handle<Context> context(function->context());
9208 Handle<JSFunction> clone =
9209 isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, context);
9211 if (shared->bound()) {
9212 clone->set_function_bindings(function->function_bindings());
9215 // In typical case, __proto__ of ``function`` is the default Function
9216 // prototype, which means that SetPrototype below is a no-op.
9217 // In rare cases when that is not true, we mutate the clone's __proto__.
9218 Handle<Object> original_prototype(map->prototype(), isolate);
9219 if (*original_prototype != clone->map()->prototype()) {
9220 JSObject::SetPrototype(clone, original_prototype, false).Assert();
9227 void SharedFunctionInfo::AddToOptimizedCodeMap(
9228 Handle<SharedFunctionInfo> shared,
9229 Handle<Context> native_context,
9231 Handle<FixedArray> literals,
9232 BailoutId osr_ast_id) {
9233 Isolate* isolate = shared->GetIsolate();
9234 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
9235 DCHECK(native_context->IsNativeContext());
9236 STATIC_ASSERT(kEntryLength == 4);
9237 Handle<FixedArray> new_code_map;
9238 Handle<Object> value(shared->optimized_code_map(), isolate);
9240 if (value->IsSmi()) {
9241 // No optimized code map.
9242 DCHECK_EQ(0, Smi::cast(*value)->value());
9243 // Create 3 entries per context {context, code, literals}.
9244 new_code_map = isolate->factory()->NewFixedArray(kInitialLength);
9245 old_length = kEntriesStart;
9247 // Copy old map and append one new entry.
9248 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value);
9249 DCHECK_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id));
9250 old_length = old_code_map->length();
9251 new_code_map = FixedArray::CopySize(
9252 old_code_map, old_length + kEntryLength);
9253 // Zap the old map for the sake of the heap verifier.
9254 if (Heap::ShouldZapGarbage()) {
9255 Object** data = old_code_map->data_start();
9256 MemsetPointer(data, isolate->heap()->the_hole_value(), old_length);
9259 new_code_map->set(old_length + kContextOffset, *native_context);
9260 new_code_map->set(old_length + kCachedCodeOffset, *code);
9261 new_code_map->set(old_length + kLiteralsOffset, *literals);
9262 new_code_map->set(old_length + kOsrAstIdOffset,
9263 Smi::FromInt(osr_ast_id.ToInt()));
9266 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9267 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext());
9268 DCHECK(new_code_map->get(i + kCachedCodeOffset)->IsCode());
9269 DCHECK(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
9270 Code::OPTIMIZED_FUNCTION);
9271 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
9272 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
9275 shared->set_optimized_code_map(*new_code_map);
9279 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
9280 DCHECK(index > kEntriesStart);
9281 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9283 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9284 DCHECK_NE(NULL, cached_literals);
9285 return cached_literals;
9291 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
9292 DCHECK(index > kEntriesStart);
9293 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9294 Code* code = Code::cast(code_map->get(index));
9295 DCHECK_NE(NULL, code);
9300 void SharedFunctionInfo::ClearOptimizedCodeMap() {
9301 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9303 // If the next map link slot is already used then the function was
9304 // enqueued with code flushing and we remove it now.
9305 if (!code_map->get(kNextMapIndex)->IsUndefined()) {
9306 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
9307 flusher->EvictOptimizedCodeMap(this);
9310 DCHECK(code_map->get(kNextMapIndex)->IsUndefined());
9311 set_optimized_code_map(Smi::FromInt(0));
9315 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
9316 const char* reason) {
9317 DisallowHeapAllocation no_gc;
9318 if (optimized_code_map()->IsSmi()) return;
9320 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9321 int dst = kEntriesStart;
9322 int length = code_map->length();
9323 for (int src = kEntriesStart; src < length; src += kEntryLength) {
9324 DCHECK(code_map->get(src)->IsNativeContext());
9325 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) {
9326 // Evict the src entry by not copying it to the dst entry.
9327 if (FLAG_trace_opt) {
9328 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9330 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
9334 PrintF(" (osr ast id %d)]\n", osr.ToInt());
9338 // Keep the src entry by copying it to the dst entry.
9340 code_map->set(dst + kContextOffset,
9341 code_map->get(src + kContextOffset));
9342 code_map->set(dst + kCachedCodeOffset,
9343 code_map->get(src + kCachedCodeOffset));
9344 code_map->set(dst + kLiteralsOffset,
9345 code_map->get(src + kLiteralsOffset));
9346 code_map->set(dst + kOsrAstIdOffset,
9347 code_map->get(src + kOsrAstIdOffset));
9349 dst += kEntryLength;
9352 if (dst != length) {
9353 // Always trim even when array is cleared because of heap verifier.
9354 GetHeap()->RightTrimFixedArray<Heap::FROM_MUTATOR>(code_map, length - dst);
9355 if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap();
9360 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
9361 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9362 DCHECK(shrink_by % kEntryLength == 0);
9363 DCHECK(shrink_by <= code_map->length() - kEntriesStart);
9364 // Always trim even when array is cleared because of heap verifier.
9365 GetHeap()->RightTrimFixedArray<Heap::FROM_GC>(code_map, shrink_by);
9366 if (code_map->length() == kEntriesStart) {
9367 ClearOptimizedCodeMap();
9372 void JSObject::OptimizeAsPrototype(Handle<JSObject> object,
9373 PrototypeOptimizationMode mode) {
9374 if (object->IsGlobalObject()) return;
9375 if (object->IsJSGlobalProxy()) return;
9376 if (mode == FAST_PROTOTYPE && !object->map()->is_prototype_map()) {
9377 // First normalize to ensure all JSFunctions are CONSTANT.
9378 JSObject::NormalizeProperties(object, KEEP_INOBJECT_PROPERTIES, 0);
9380 if (!object->HasFastProperties()) {
9381 JSObject::MigrateSlowToFast(object, 0);
9383 if (mode == FAST_PROTOTYPE && object->HasFastProperties() &&
9384 !object->map()->is_prototype_map()) {
9385 Handle<Map> new_map = Map::Copy(handle(object->map()));
9386 JSObject::MigrateToMap(object, new_map);
9387 object->map()->set_is_prototype_map(true);
9392 void JSObject::ReoptimizeIfPrototype(Handle<JSObject> object) {
9393 if (!object->map()->is_prototype_map()) return;
9394 OptimizeAsPrototype(object, FAST_PROTOTYPE);
9398 Handle<Object> CacheInitialJSArrayMaps(
9399 Handle<Context> native_context, Handle<Map> initial_map) {
9400 // Replace all of the cached initial array maps in the native context with
9401 // the appropriate transitioned elements kind maps.
9402 Factory* factory = native_context->GetIsolate()->factory();
9403 Handle<FixedArray> maps = factory->NewFixedArrayWithHoles(
9404 kElementsKindCount, TENURED);
9406 Handle<Map> current_map = initial_map;
9407 ElementsKind kind = current_map->elements_kind();
9408 DCHECK(kind == GetInitialFastElementsKind());
9409 maps->set(kind, *current_map);
9410 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
9411 i < kFastElementsKindCount; ++i) {
9412 Handle<Map> new_map;
9413 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
9414 if (current_map->HasElementsTransition()) {
9415 new_map = handle(current_map->elements_transition_map());
9416 DCHECK(new_map->elements_kind() == next_kind);
9418 new_map = Map::CopyAsElementsKind(
9419 current_map, next_kind, INSERT_TRANSITION);
9421 maps->set(next_kind, *new_map);
9422 current_map = new_map;
9424 native_context->set_js_array_maps(*maps);
9429 void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
9430 Handle<Object> value) {
9431 Isolate* isolate = function->GetIsolate();
9433 DCHECK(value->IsJSReceiver());
9435 // Now some logic for the maps of the objects that are created by using this
9436 // function as a constructor.
9437 if (function->has_initial_map()) {
9438 // If the function has allocated the initial map replace it with a
9439 // copy containing the new prototype. Also complete any in-object
9440 // slack tracking that is in progress at this point because it is
9441 // still tracking the old copy.
9442 if (function->IsInobjectSlackTrackingInProgress()) {
9443 function->CompleteInobjectSlackTracking();
9446 Handle<Map> initial_map(function->initial_map(), isolate);
9448 if (!initial_map->GetIsolate()->bootstrapper()->IsActive() &&
9449 initial_map->instance_type() == JS_OBJECT_TYPE) {
9450 // Put the value in the initial map field until an initial map is needed.
9451 // At that point, a new initial map is created and the prototype is put
9452 // into the initial map where it belongs.
9453 function->set_prototype_or_initial_map(*value);
9455 Handle<Map> new_map = Map::Copy(initial_map);
9456 JSFunction::SetInitialMap(function, new_map, value);
9458 // If the function is used as the global Array function, cache the
9459 // initial map (and transitioned versions) in the native context.
9460 Context* native_context = function->context()->native_context();
9461 Object* array_function =
9462 native_context->get(Context::ARRAY_FUNCTION_INDEX);
9463 if (array_function->IsJSFunction() &&
9464 *function == JSFunction::cast(array_function)) {
9465 CacheInitialJSArrayMaps(handle(native_context, isolate), new_map);
9469 // Deoptimize all code that embeds the previous initial map.
9470 initial_map->dependent_code()->DeoptimizeDependentCodeGroup(
9471 isolate, DependentCode::kInitialMapChangedGroup);
9473 // Put the value in the initial map field until an initial map is
9474 // needed. At that point, a new initial map is created and the
9475 // prototype is put into the initial map where it belongs.
9476 function->set_prototype_or_initial_map(*value);
9478 isolate->heap()->ClearInstanceofCache();
9482 void JSFunction::SetPrototype(Handle<JSFunction> function,
9483 Handle<Object> value) {
9484 DCHECK(function->should_have_prototype());
9485 Handle<Object> construct_prototype = value;
9487 // If the value is not a JSReceiver, store the value in the map's
9488 // constructor field so it can be accessed. Also, set the prototype
9489 // used for constructing objects to the original object prototype.
9490 // See ECMA-262 13.2.2.
9491 if (!value->IsJSReceiver()) {
9492 // Copy the map so this does not affect unrelated functions.
9493 // Remove map transitions because they point to maps with a
9494 // different prototype.
9495 Handle<Map> new_map = Map::Copy(handle(function->map()));
9497 JSObject::MigrateToMap(function, new_map);
9498 new_map->set_constructor(*value);
9499 new_map->set_non_instance_prototype(true);
9500 Isolate* isolate = new_map->GetIsolate();
9501 construct_prototype = handle(
9502 isolate->context()->native_context()->initial_object_prototype(),
9505 function->map()->set_non_instance_prototype(false);
9508 return SetInstancePrototype(function, construct_prototype);
9512 bool JSFunction::RemovePrototype() {
9513 Context* native_context = context()->native_context();
9514 Map* no_prototype_map = shared()->strict_mode() == SLOPPY
9515 ? native_context->sloppy_function_without_prototype_map()
9516 : native_context->strict_function_without_prototype_map();
9518 if (map() == no_prototype_map) return true;
9521 if (map() != (shared()->strict_mode() == SLOPPY
9522 ? native_context->sloppy_function_map()
9523 : native_context->strict_function_map())) {
9528 set_map(no_prototype_map);
9529 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
9534 void JSFunction::SetInitialMap(Handle<JSFunction> function, Handle<Map> map,
9535 Handle<Object> prototype) {
9536 if (prototype->IsJSObject()) {
9537 Handle<JSObject> js_proto = Handle<JSObject>::cast(prototype);
9538 JSObject::OptimizeAsPrototype(js_proto, FAST_PROTOTYPE);
9540 map->set_prototype(*prototype);
9541 function->set_prototype_or_initial_map(*map);
9542 map->set_constructor(*function);
9546 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
9547 if (function->has_initial_map()) return;
9548 Isolate* isolate = function->GetIsolate();
9550 // First create a new map with the size and number of in-object properties
9551 // suggested by the function.
9552 InstanceType instance_type;
9554 int in_object_properties;
9555 if (function->shared()->is_generator()) {
9556 instance_type = JS_GENERATOR_OBJECT_TYPE;
9557 instance_size = JSGeneratorObject::kSize;
9558 in_object_properties = 0;
9560 instance_type = JS_OBJECT_TYPE;
9561 instance_size = function->shared()->CalculateInstanceSize();
9562 in_object_properties = function->shared()->CalculateInObjectProperties();
9564 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size);
9566 // Fetch or allocate prototype.
9567 Handle<Object> prototype;
9568 if (function->has_instance_prototype()) {
9569 prototype = handle(function->instance_prototype(), isolate);
9571 prototype = isolate->factory()->NewFunctionPrototype(function);
9573 map->set_inobject_properties(in_object_properties);
9574 map->set_unused_property_fields(in_object_properties);
9575 DCHECK(map->has_fast_object_elements());
9577 // Finally link initial map and constructor function.
9578 JSFunction::SetInitialMap(function, map, Handle<JSReceiver>::cast(prototype));
9580 if (!function->shared()->is_generator()) {
9581 function->StartInobjectSlackTracking();
9586 void JSFunction::SetInstanceClassName(String* name) {
9587 shared()->set_instance_class_name(name);
9591 void JSFunction::PrintName(FILE* out) {
9592 SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
9593 PrintF(out, "%s", name.get());
9597 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
9598 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
9602 // The filter is a pattern that matches function names in this way:
9603 // "*" all; the default
9604 // "-" all but the top-level function
9605 // "-name" all but the function "name"
9606 // "" only the top-level function
9607 // "name" only the function "name"
9608 // "name*" only functions starting with "name"
9609 // "~" none; the tilde is not an identifier
9610 bool JSFunction::PassesFilter(const char* raw_filter) {
9611 if (*raw_filter == '*') return true;
9612 String* name = shared()->DebugName();
9613 Vector<const char> filter = CStrVector(raw_filter);
9614 if (filter.length() == 0) return name->length() == 0;
9615 if (filter[0] == '-') {
9617 if (filter.length() == 1) {
9618 return (name->length() != 0);
9619 } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) {
9622 if (filter[filter.length() - 1] == '*' &&
9623 name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) {
9628 } else if (name->IsUtf8EqualTo(filter)) {
9631 if (filter[filter.length() - 1] == '*' &&
9632 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) {
9639 void Oddball::Initialize(Isolate* isolate,
9640 Handle<Oddball> oddball,
9641 const char* to_string,
9642 Handle<Object> to_number,
9644 Handle<String> internalized_to_string =
9645 isolate->factory()->InternalizeUtf8String(to_string);
9646 oddball->set_to_string(*internalized_to_string);
9647 oddball->set_to_number(*to_number);
9648 oddball->set_kind(kind);
9652 void Script::InitLineEnds(Handle<Script> script) {
9653 if (!script->line_ends()->IsUndefined()) return;
9655 Isolate* isolate = script->GetIsolate();
9657 if (!script->source()->IsString()) {
9658 DCHECK(script->source()->IsUndefined());
9659 Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0);
9660 script->set_line_ends(*empty);
9661 DCHECK(script->line_ends()->IsFixedArray());
9665 Handle<String> src(String::cast(script->source()), isolate);
9667 Handle<FixedArray> array = String::CalculateLineEnds(src, true);
9669 if (*array != isolate->heap()->empty_fixed_array()) {
9670 array->set_map(isolate->heap()->fixed_cow_array_map());
9673 script->set_line_ends(*array);
9674 DCHECK(script->line_ends()->IsFixedArray());
9678 int Script::GetColumnNumber(Handle<Script> script, int code_pos) {
9679 int line_number = GetLineNumber(script, code_pos);
9680 if (line_number == -1) return -1;
9682 DisallowHeapAllocation no_allocation;
9683 FixedArray* line_ends_array = FixedArray::cast(script->line_ends());
9684 line_number = line_number - script->line_offset()->value();
9685 if (line_number == 0) return code_pos + script->column_offset()->value();
9686 int prev_line_end_pos =
9687 Smi::cast(line_ends_array->get(line_number - 1))->value();
9688 return code_pos - (prev_line_end_pos + 1);
9692 int Script::GetLineNumberWithArray(int code_pos) {
9693 DisallowHeapAllocation no_allocation;
9694 DCHECK(line_ends()->IsFixedArray());
9695 FixedArray* line_ends_array = FixedArray::cast(line_ends());
9696 int line_ends_len = line_ends_array->length();
9697 if (line_ends_len == 0) return -1;
9699 if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) {
9700 return line_offset()->value();
9704 int right = line_ends_len;
9705 while (int half = (right - left) / 2) {
9706 if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) {
9712 return right + line_offset()->value();
9716 int Script::GetLineNumber(Handle<Script> script, int code_pos) {
9717 InitLineEnds(script);
9718 return script->GetLineNumberWithArray(code_pos);
9722 int Script::GetLineNumber(int code_pos) {
9723 DisallowHeapAllocation no_allocation;
9724 if (!line_ends()->IsUndefined()) return GetLineNumberWithArray(code_pos);
9726 // Slow mode: we do not have line_ends. We have to iterate through source.
9727 if (!source()->IsString()) return -1;
9729 String* source_string = String::cast(source());
9731 int len = source_string->length();
9732 for (int pos = 0; pos < len; pos++) {
9733 if (pos == code_pos) break;
9734 if (source_string->Get(pos) == '\n') line++;
9740 Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) {
9741 Isolate* isolate = script->GetIsolate();
9742 Handle<String> name_or_source_url_key =
9743 isolate->factory()->InternalizeOneByteString(
9744 STATIC_CHAR_VECTOR("nameOrSourceURL"));
9745 Handle<JSObject> script_wrapper = Script::GetWrapper(script);
9746 Handle<Object> property = Object::GetProperty(
9747 script_wrapper, name_or_source_url_key).ToHandleChecked();
9748 DCHECK(property->IsJSFunction());
9749 Handle<JSFunction> method = Handle<JSFunction>::cast(property);
9750 Handle<Object> result;
9751 // Do not check against pending exception, since this function may be called
9752 // when an exception has already been pending.
9753 if (!Execution::TryCall(method, script_wrapper, 0, NULL).ToHandle(&result)) {
9754 return isolate->factory()->undefined_value();
9760 // Wrappers for scripts are kept alive and cached in weak global
9761 // handles referred from foreign objects held by the scripts as long as
9762 // they are used. When they are not used anymore, the garbage
9763 // collector will call the weak callback on the global handle
9764 // associated with the wrapper and get rid of both the wrapper and the
9766 static void ClearWrapperCacheWeakCallback(
9767 const v8::WeakCallbackData<v8::Value, void>& data) {
9768 Object** location = reinterpret_cast<Object**>(data.GetParameter());
9769 JSValue* wrapper = JSValue::cast(*location);
9770 Script::cast(wrapper->value())->ClearWrapperCache();
9774 void Script::ClearWrapperCache() {
9775 Foreign* foreign = wrapper();
9776 Object** location = reinterpret_cast<Object**>(foreign->foreign_address());
9777 DCHECK_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location));
9778 foreign->set_foreign_address(0);
9779 GlobalHandles::Destroy(location);
9780 GetIsolate()->counters()->script_wrappers()->Decrement();
9784 Handle<JSObject> Script::GetWrapper(Handle<Script> script) {
9785 if (script->wrapper()->foreign_address() != NULL) {
9786 // Return a handle for the existing script wrapper from the cache.
9787 return Handle<JSValue>(
9788 *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address()));
9790 Isolate* isolate = script->GetIsolate();
9791 // Construct a new script wrapper.
9792 isolate->counters()->script_wrappers()->Increment();
9793 Handle<JSFunction> constructor = isolate->script_function();
9794 Handle<JSValue> result =
9795 Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor));
9797 result->set_value(*script);
9799 // Create a new weak global handle and use it to cache the wrapper
9800 // for future use. The cache will automatically be cleared by the
9801 // garbage collector when it is not used anymore.
9802 Handle<Object> handle = isolate->global_handles()->Create(*result);
9803 GlobalHandles::MakeWeak(handle.location(),
9804 reinterpret_cast<void*>(handle.location()),
9805 &ClearWrapperCacheWeakCallback);
9806 script->wrapper()->set_foreign_address(
9807 reinterpret_cast<Address>(handle.location()));
9812 String* SharedFunctionInfo::DebugName() {
9814 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name();
9815 return String::cast(n);
9819 bool SharedFunctionInfo::HasSourceCode() const {
9820 return !script()->IsUndefined() &&
9821 !reinterpret_cast<Script*>(script())->source()->IsUndefined();
9825 Handle<Object> SharedFunctionInfo::GetSourceCode() {
9826 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
9827 Handle<String> source(String::cast(Script::cast(script())->source()));
9828 return GetIsolate()->factory()->NewSubString(
9829 source, start_position(), end_position());
9833 bool SharedFunctionInfo::IsInlineable() {
9834 // Check that the function has a script associated with it.
9835 if (!script()->IsScript()) return false;
9836 if (optimization_disabled()) return false;
9837 // If we never ran this (unlikely) then lets try to optimize it.
9838 if (code()->kind() != Code::FUNCTION) return true;
9839 return code()->optimizable();
9843 int SharedFunctionInfo::SourceSize() {
9844 return end_position() - start_position();
9848 int SharedFunctionInfo::CalculateInstanceSize() {
9850 JSObject::kHeaderSize +
9851 expected_nof_properties() * kPointerSize;
9852 if (instance_size > JSObject::kMaxInstanceSize) {
9853 instance_size = JSObject::kMaxInstanceSize;
9855 return instance_size;
9859 int SharedFunctionInfo::CalculateInObjectProperties() {
9860 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize;
9864 // Output the source code without any allocation in the heap.
9865 OStream& operator<<(OStream& os, const SourceCodeOf& v) {
9866 const SharedFunctionInfo* s = v.value;
9867 // For some native functions there is no source.
9868 if (!s->HasSourceCode()) return os << "<No Source>";
9870 // Get the source for the script which this function came from.
9871 // Don't use String::cast because we don't want more assertion errors while
9872 // we are already creating a stack dump.
9873 String* script_source =
9874 reinterpret_cast<String*>(Script::cast(s->script())->source());
9876 if (!script_source->LooksValid()) return os << "<Invalid Source>";
9878 if (!s->is_toplevel()) {
9880 Object* name = s->name();
9881 if (name->IsString() && String::cast(name)->length() > 0) {
9882 String::cast(name)->PrintUC16(os);
9886 int len = s->end_position() - s->start_position();
9887 if (len <= v.max_length || v.max_length < 0) {
9888 script_source->PrintUC16(os, s->start_position(), s->end_position());
9891 script_source->PrintUC16(os, s->start_position(),
9892 s->start_position() + v.max_length);
9893 return os << "...\n";
9898 static bool IsCodeEquivalent(Code* code, Code* recompiled) {
9899 if (code->instruction_size() != recompiled->instruction_size()) return false;
9900 ByteArray* code_relocation = code->relocation_info();
9901 ByteArray* recompiled_relocation = recompiled->relocation_info();
9902 int length = code_relocation->length();
9903 if (length != recompiled_relocation->length()) return false;
9904 int compare = memcmp(code_relocation->GetDataStartAddress(),
9905 recompiled_relocation->GetDataStartAddress(),
9907 return compare == 0;
9911 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
9912 DCHECK(!has_deoptimization_support());
9913 DisallowHeapAllocation no_allocation;
9914 Code* code = this->code();
9915 if (IsCodeEquivalent(code, recompiled)) {
9916 // Copy the deoptimization data from the recompiled code.
9917 code->set_deoptimization_data(recompiled->deoptimization_data());
9918 code->set_has_deoptimization_support(true);
9920 // TODO(3025757): In case the recompiled isn't equivalent to the
9921 // old code, we have to replace it. We should try to avoid this
9922 // altogether because it flushes valuable type feedback by
9923 // effectively resetting all IC state.
9924 ReplaceCode(recompiled);
9926 DCHECK(has_deoptimization_support());
9930 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
9931 // Disable optimization for the shared function info and mark the
9932 // code as non-optimizable. The marker on the shared function info
9933 // is there because we flush non-optimized code thereby loosing the
9934 // non-optimizable information for the code. When the code is
9935 // regenerated and set on the shared function info it is marked as
9936 // non-optimizable if optimization is disabled for the shared
9938 set_optimization_disabled(true);
9939 set_bailout_reason(reason);
9940 // Code should be the lazy compilation stub or else unoptimized. If the
9941 // latter, disable optimization for the code too.
9942 DCHECK(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN);
9943 if (code()->kind() == Code::FUNCTION) {
9944 code()->set_optimizable(false);
9946 PROFILE(GetIsolate(), CodeDisableOptEvent(code(), this));
9947 if (FLAG_trace_opt) {
9948 PrintF("[disabled optimization for ");
9950 PrintF(", reason: %s]\n", GetBailoutReason(reason));
9955 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) {
9956 DCHECK(!id.IsNone());
9957 Code* unoptimized = code();
9958 DeoptimizationOutputData* data =
9959 DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
9960 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this);
9962 return true; // Return true if there was no DCHECK.
9966 void JSFunction::StartInobjectSlackTracking() {
9967 DCHECK(has_initial_map() && !IsInobjectSlackTrackingInProgress());
9969 if (!FLAG_clever_optimizations) return;
9970 Map* map = initial_map();
9972 // Only initiate the tracking the first time.
9973 if (map->done_inobject_slack_tracking()) return;
9974 map->set_done_inobject_slack_tracking(true);
9976 // No tracking during the snapshot construction phase.
9977 Isolate* isolate = GetIsolate();
9978 if (isolate->serializer_enabled()) return;
9980 if (map->unused_property_fields() == 0) return;
9982 map->set_construction_count(kGenerousAllocationCount);
9986 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
9987 code()->ClearInlineCaches();
9988 // If we clear ICs, we need to clear the type feedback vector too, since
9989 // CallICs are synced with a feedback vector slot.
9990 ClearTypeFeedbackInfo();
9991 set_ic_age(new_ic_age);
9992 if (code()->kind() == Code::FUNCTION) {
9993 code()->set_profiler_ticks(0);
9994 if (optimization_disabled() &&
9995 opt_count() >= FLAG_max_opt_count) {
9996 // Re-enable optimizations if they were disabled due to opt_count limit.
9997 set_optimization_disabled(false);
9998 code()->set_optimizable(true);
10001 set_deopt_count(0);
10006 static void GetMinInobjectSlack(Map* map, void* data) {
10007 int slack = map->unused_property_fields();
10008 if (*reinterpret_cast<int*>(data) > slack) {
10009 *reinterpret_cast<int*>(data) = slack;
10014 static void ShrinkInstanceSize(Map* map, void* data) {
10015 int slack = *reinterpret_cast<int*>(data);
10016 map->set_inobject_properties(map->inobject_properties() - slack);
10017 map->set_unused_property_fields(map->unused_property_fields() - slack);
10018 map->set_instance_size(map->instance_size() - slack * kPointerSize);
10020 // Visitor id might depend on the instance size, recalculate it.
10021 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
10025 void JSFunction::CompleteInobjectSlackTracking() {
10026 DCHECK(has_initial_map());
10027 Map* map = initial_map();
10029 DCHECK(map->done_inobject_slack_tracking());
10030 map->set_construction_count(kNoSlackTracking);
10032 int slack = map->unused_property_fields();
10033 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
10035 // Resize the initial map and all maps in its transition tree.
10036 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
10041 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
10042 BailoutId osr_ast_id) {
10043 DisallowHeapAllocation no_gc;
10044 DCHECK(native_context->IsNativeContext());
10045 if (!FLAG_cache_optimized_code) return -1;
10046 Object* value = optimized_code_map();
10047 if (!value->IsSmi()) {
10048 FixedArray* optimized_code_map = FixedArray::cast(value);
10049 int length = optimized_code_map->length();
10050 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
10051 for (int i = kEntriesStart; i < length; i += kEntryLength) {
10052 if (optimized_code_map->get(i + kContextOffset) == native_context &&
10053 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
10054 return i + kCachedCodeOffset;
10057 if (FLAG_trace_opt) {
10058 PrintF("[didn't find optimized code in optimized code map for ");
10067 #define DECLARE_TAG(ignore1, name, ignore2) name,
10068 const char* const VisitorSynchronization::kTags[
10069 VisitorSynchronization::kNumberOfSyncTags] = {
10070 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10075 #define DECLARE_TAG(ignore1, ignore2, name) name,
10076 const char* const VisitorSynchronization::kTagNames[
10077 VisitorSynchronization::kNumberOfSyncTags] = {
10078 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10083 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
10084 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
10085 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
10086 Object* old_target = target;
10087 VisitPointer(&target);
10088 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10092 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
10093 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
10094 Object* stub = rinfo->code_age_stub();
10096 VisitPointer(&stub);
10101 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
10102 Object* code = Code::GetObjectFromEntryAddress(entry_address);
10103 Object* old_code = code;
10104 VisitPointer(&code);
10105 if (code != old_code) {
10106 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry();
10111 void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
10112 DCHECK(rinfo->rmode() == RelocInfo::CELL);
10113 Object* cell = rinfo->target_cell();
10114 Object* old_cell = cell;
10115 VisitPointer(&cell);
10116 if (cell != old_cell) {
10117 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
10122 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
10123 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
10124 rinfo->IsPatchedReturnSequence()) ||
10125 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
10126 rinfo->IsPatchedDebugBreakSlotSequence()));
10127 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
10128 Object* old_target = target;
10129 VisitPointer(&target);
10130 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10134 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
10135 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
10136 Object* p = rinfo->target_object();
10141 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
10142 Address p = rinfo->target_reference();
10143 VisitExternalReference(&p);
10147 void Code::InvalidateRelocation() {
10148 InvalidateEmbeddedObjects();
10149 set_relocation_info(GetHeap()->empty_byte_array());
10153 void Code::InvalidateEmbeddedObjects() {
10154 Object* undefined = GetHeap()->undefined_value();
10155 Cell* undefined_cell = GetHeap()->undefined_cell();
10156 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10157 RelocInfo::ModeMask(RelocInfo::CELL);
10158 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10159 RelocInfo::Mode mode = it.rinfo()->rmode();
10160 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10161 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
10162 } else if (mode == RelocInfo::CELL) {
10163 it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER);
10169 void Code::Relocate(intptr_t delta) {
10170 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
10171 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH);
10173 CpuFeatures::FlushICache(instruction_start(), instruction_size());
10177 void Code::CopyFrom(const CodeDesc& desc) {
10178 DCHECK(Marking::Color(this) == Marking::WHITE_OBJECT);
10181 CopyBytes(instruction_start(), desc.buffer,
10182 static_cast<size_t>(desc.instr_size));
10185 CopyBytes(relocation_start(),
10186 desc.buffer + desc.buffer_size - desc.reloc_size,
10187 static_cast<size_t>(desc.reloc_size));
10189 // unbox handles and relocate
10190 intptr_t delta = instruction_start() - desc.buffer;
10191 int mode_mask = RelocInfo::kCodeTargetMask |
10192 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10193 RelocInfo::ModeMask(RelocInfo::CELL) |
10194 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
10195 RelocInfo::kApplyMask;
10196 // Needed to find target_object and runtime_entry on X64
10197 Assembler* origin = desc.origin;
10198 AllowDeferredHandleDereference embedding_raw_address;
10199 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10200 RelocInfo::Mode mode = it.rinfo()->rmode();
10201 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10202 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10203 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
10204 } else if (mode == RelocInfo::CELL) {
10205 Handle<Cell> cell = it.rinfo()->target_cell_handle();
10206 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
10207 } else if (RelocInfo::IsCodeTarget(mode)) {
10208 // rewrite code handles in inline cache targets to direct
10209 // pointers to the first instruction in the code object
10210 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10211 Code* code = Code::cast(*p);
10212 it.rinfo()->set_target_address(code->instruction_start(),
10213 SKIP_WRITE_BARRIER,
10214 SKIP_ICACHE_FLUSH);
10215 } else if (RelocInfo::IsRuntimeEntry(mode)) {
10216 Address p = it.rinfo()->target_runtime_entry(origin);
10217 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER,
10218 SKIP_ICACHE_FLUSH);
10219 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) {
10220 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin);
10221 Code* code = Code::cast(*p);
10222 it.rinfo()->set_code_age_stub(code, SKIP_ICACHE_FLUSH);
10224 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH);
10227 CpuFeatures::FlushICache(instruction_start(), instruction_size());
10231 // Locate the source position which is closest to the address in the code. This
10232 // is using the source position information embedded in the relocation info.
10233 // The position returned is relative to the beginning of the script where the
10234 // source for this function is found.
10235 int Code::SourcePosition(Address pc) {
10236 int distance = kMaxInt;
10237 int position = RelocInfo::kNoPosition; // Initially no position found.
10238 // Run through all the relocation info to find the best matching source
10239 // position. All the code needs to be considered as the sequence of the
10240 // instructions in the code does not necessarily follow the same order as the
10242 RelocIterator it(this, RelocInfo::kPositionMask);
10243 while (!it.done()) {
10244 // Only look at positions after the current pc.
10245 if (it.rinfo()->pc() < pc) {
10246 // Get position and distance.
10248 int dist = static_cast<int>(pc - it.rinfo()->pc());
10249 int pos = static_cast<int>(it.rinfo()->data());
10250 // If this position is closer than the current candidate or if it has the
10251 // same distance as the current candidate and the position is higher then
10252 // this position is the new candidate.
10253 if ((dist < distance) ||
10254 (dist == distance && pos > position)) {
10265 // Same as Code::SourcePosition above except it only looks for statement
10267 int Code::SourceStatementPosition(Address pc) {
10268 // First find the position as close as possible using all position
10270 int position = SourcePosition(pc);
10271 // Now find the closest statement position before the position.
10272 int statement_position = 0;
10273 RelocIterator it(this, RelocInfo::kPositionMask);
10274 while (!it.done()) {
10275 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
10276 int p = static_cast<int>(it.rinfo()->data());
10277 if (statement_position < p && p <= position) {
10278 statement_position = p;
10283 return statement_position;
10287 SafepointEntry Code::GetSafepointEntry(Address pc) {
10288 SafepointTable table(this);
10289 return table.FindEntry(pc);
10293 Object* Code::FindNthObject(int n, Map* match_map) {
10294 DCHECK(is_inline_cache_stub());
10295 DisallowHeapAllocation no_allocation;
10296 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10297 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10298 RelocInfo* info = it.rinfo();
10299 Object* object = info->target_object();
10300 if (object->IsHeapObject()) {
10301 if (HeapObject::cast(object)->map() == match_map) {
10302 if (--n == 0) return object;
10310 AllocationSite* Code::FindFirstAllocationSite() {
10311 Object* result = FindNthObject(1, GetHeap()->allocation_site_map());
10312 return (result != NULL) ? AllocationSite::cast(result) : NULL;
10316 Map* Code::FindFirstMap() {
10317 Object* result = FindNthObject(1, GetHeap()->meta_map());
10318 return (result != NULL) ? Map::cast(result) : NULL;
10322 void Code::FindAndReplace(const FindAndReplacePattern& pattern) {
10323 DCHECK(is_inline_cache_stub() || is_handler());
10324 DisallowHeapAllocation no_allocation;
10325 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10326 STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32);
10327 int current_pattern = 0;
10328 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10329 RelocInfo* info = it.rinfo();
10330 Object* object = info->target_object();
10331 if (object->IsHeapObject()) {
10332 Map* map = HeapObject::cast(object)->map();
10333 if (map == *pattern.find_[current_pattern]) {
10334 info->set_target_object(*pattern.replace_[current_pattern]);
10335 if (++current_pattern == pattern.count_) return;
10343 void Code::FindAllMaps(MapHandleList* maps) {
10344 DCHECK(is_inline_cache_stub());
10345 DisallowHeapAllocation no_allocation;
10346 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10347 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10348 RelocInfo* info = it.rinfo();
10349 Object* object = info->target_object();
10350 if (object->IsMap()) maps->Add(handle(Map::cast(object)));
10355 Code* Code::FindFirstHandler() {
10356 DCHECK(is_inline_cache_stub());
10357 DisallowHeapAllocation no_allocation;
10358 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10359 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10360 RelocInfo* info = it.rinfo();
10361 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10362 if (code->kind() == Code::HANDLER) return code;
10368 bool Code::FindHandlers(CodeHandleList* code_list, int length) {
10369 DCHECK(is_inline_cache_stub());
10370 DisallowHeapAllocation no_allocation;
10371 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10373 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10374 if (i == length) return true;
10375 RelocInfo* info = it.rinfo();
10376 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10377 // IC stubs with handlers never contain non-handler code objects before
10378 // handler targets.
10379 if (code->kind() != Code::HANDLER) break;
10380 code_list->Add(Handle<Code>(code));
10383 return i == length;
10387 MaybeHandle<Code> Code::FindHandlerForMap(Map* map) {
10388 DCHECK(is_inline_cache_stub());
10389 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10390 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10391 bool return_next = false;
10392 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10393 RelocInfo* info = it.rinfo();
10394 if (info->rmode() == RelocInfo::EMBEDDED_OBJECT) {
10395 Object* object = info->target_object();
10396 if (object == map) return_next = true;
10397 } else if (return_next) {
10398 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10399 DCHECK(code->kind() == Code::HANDLER);
10400 return handle(code);
10403 return MaybeHandle<Code>();
10407 Name* Code::FindFirstName() {
10408 DCHECK(is_inline_cache_stub());
10409 DisallowHeapAllocation no_allocation;
10410 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10411 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10412 RelocInfo* info = it.rinfo();
10413 Object* object = info->target_object();
10414 if (object->IsName()) return Name::cast(object);
10420 void Code::ClearInlineCaches() {
10421 ClearInlineCaches(NULL);
10425 void Code::ClearInlineCaches(Code::Kind kind) {
10426 ClearInlineCaches(&kind);
10430 void Code::ClearInlineCaches(Code::Kind* kind) {
10431 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10432 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
10433 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
10434 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10435 RelocInfo* info = it.rinfo();
10436 Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
10437 if (target->is_inline_cache_stub()) {
10438 if (kind == NULL || *kind == target->kind()) {
10439 IC::Clear(this->GetIsolate(), info->pc(),
10440 info->host()->constant_pool());
10447 void SharedFunctionInfo::ClearTypeFeedbackInfo() {
10448 TypeFeedbackVector* vector = feedback_vector();
10449 Heap* heap = GetHeap();
10450 int length = vector->length();
10452 for (int i = 0; i < length; i++) {
10453 Object* obj = vector->get(i);
10454 if (obj->IsHeapObject()) {
10455 InstanceType instance_type =
10456 HeapObject::cast(obj)->map()->instance_type();
10457 switch (instance_type) {
10458 case ALLOCATION_SITE_TYPE:
10459 // AllocationSites are not cleared because they do not store
10460 // information that leaks.
10464 vector->set(i, TypeFeedbackVector::RawUninitializedSentinel(heap),
10465 SKIP_WRITE_BARRIER);
10472 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
10473 DisallowHeapAllocation no_gc;
10474 DCHECK(kind() == FUNCTION);
10475 BackEdgeTable back_edges(this, &no_gc);
10476 for (uint32_t i = 0; i < back_edges.length(); i++) {
10477 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
10479 return BailoutId::None();
10483 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
10484 DisallowHeapAllocation no_gc;
10485 DCHECK(kind() == FUNCTION);
10486 BackEdgeTable back_edges(this, &no_gc);
10487 for (uint32_t i = 0; i < back_edges.length(); i++) {
10488 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
10490 UNREACHABLE(); // We expect to find the back edge.
10495 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
10496 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
10500 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) {
10501 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge,
10502 NO_MARKING_PARITY);
10506 static Code::Age EffectiveAge(Code::Age age) {
10507 if (age == Code::kNotExecutedCodeAge) {
10508 // Treat that's never been executed as old immediately.
10509 age = Code::kIsOldCodeAge;
10510 } else if (age == Code::kExecutedOnceCodeAge) {
10511 // Pre-age code that has only been executed once.
10512 age = Code::kPreAgedCodeAge;
10518 void Code::MakeOlder(MarkingParity current_parity) {
10519 byte* sequence = FindCodeAgeSequence();
10520 if (sequence != NULL) {
10522 MarkingParity code_parity;
10523 Isolate* isolate = GetIsolate();
10524 GetCodeAgeAndParity(isolate, sequence, &age, &code_parity);
10525 age = EffectiveAge(age);
10526 if (age != kLastCodeAge && code_parity != current_parity) {
10527 PatchPlatformCodeAge(isolate,
10529 static_cast<Age>(age + 1),
10536 bool Code::IsOld() {
10537 return GetAge() >= kIsOldCodeAge;
10541 byte* Code::FindCodeAgeSequence() {
10542 return FLAG_age_code &&
10543 prologue_offset() != Code::kPrologueOffsetNotSet &&
10544 (kind() == OPTIMIZED_FUNCTION ||
10545 (kind() == FUNCTION && !has_debug_break_slots()))
10546 ? instruction_start() + prologue_offset()
10551 Code::Age Code::GetAge() {
10552 return EffectiveAge(GetRawAge());
10556 Code::Age Code::GetRawAge() {
10557 byte* sequence = FindCodeAgeSequence();
10558 if (sequence == NULL) {
10559 return kNoAgeCodeAge;
10562 MarkingParity parity;
10563 GetCodeAgeAndParity(GetIsolate(), sequence, &age, &parity);
10568 void Code::GetCodeAgeAndParity(Code* code, Age* age,
10569 MarkingParity* parity) {
10570 Isolate* isolate = code->GetIsolate();
10571 Builtins* builtins = isolate->builtins();
10573 #define HANDLE_CODE_AGE(AGE) \
10574 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \
10575 if (code == stub) { \
10576 *age = k##AGE##CodeAge; \
10577 *parity = EVEN_MARKING_PARITY; \
10580 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10581 if (code == stub) { \
10582 *age = k##AGE##CodeAge; \
10583 *parity = ODD_MARKING_PARITY; \
10586 CODE_AGE_LIST(HANDLE_CODE_AGE)
10587 #undef HANDLE_CODE_AGE
10588 stub = *builtins->MarkCodeAsExecutedOnce();
10589 if (code == stub) {
10590 *age = kNotExecutedCodeAge;
10591 *parity = NO_MARKING_PARITY;
10594 stub = *builtins->MarkCodeAsExecutedTwice();
10595 if (code == stub) {
10596 *age = kExecutedOnceCodeAge;
10597 *parity = NO_MARKING_PARITY;
10604 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) {
10605 Builtins* builtins = isolate->builtins();
10607 #define HANDLE_CODE_AGE(AGE) \
10608 case k##AGE##CodeAge: { \
10609 Code* stub = parity == EVEN_MARKING_PARITY \
10610 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \
10611 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10614 CODE_AGE_LIST(HANDLE_CODE_AGE)
10615 #undef HANDLE_CODE_AGE
10616 case kNotExecutedCodeAge: {
10617 DCHECK(parity == NO_MARKING_PARITY);
10618 return *builtins->MarkCodeAsExecutedOnce();
10620 case kExecutedOnceCodeAge: {
10621 DCHECK(parity == NO_MARKING_PARITY);
10622 return *builtins->MarkCodeAsExecutedTwice();
10632 void Code::PrintDeoptLocation(FILE* out, int bailout_id) {
10633 const char* last_comment = NULL;
10634 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
10635 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
10636 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10637 RelocInfo* info = it.rinfo();
10638 if (info->rmode() == RelocInfo::COMMENT) {
10639 last_comment = reinterpret_cast<const char*>(info->data());
10640 } else if (last_comment != NULL) {
10641 if ((bailout_id == Deoptimizer::GetDeoptimizationId(
10642 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
10643 (bailout_id == Deoptimizer::GetDeoptimizationId(
10644 GetIsolate(), info->target_address(), Deoptimizer::SOFT)) ||
10645 (bailout_id == Deoptimizer::GetDeoptimizationId(
10646 GetIsolate(), info->target_address(), Deoptimizer::LAZY))) {
10647 CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
10648 PrintF(out, " %s\n", last_comment);
10656 bool Code::CanDeoptAt(Address pc) {
10657 DeoptimizationInputData* deopt_data =
10658 DeoptimizationInputData::cast(deoptimization_data());
10659 Address code_start_address = instruction_start();
10660 for (int i = 0; i < deopt_data->DeoptCount(); i++) {
10661 if (deopt_data->Pc(i)->value() == -1) continue;
10662 Address address = code_start_address + deopt_data->Pc(i)->value();
10663 if (address == pc) return true;
10669 // Identify kind of code.
10670 const char* Code::Kind2String(Kind kind) {
10672 #define CASE(name) case name: return #name;
10673 CODE_KIND_LIST(CASE)
10675 case NUMBER_OF_KINDS: break;
10682 #ifdef ENABLE_DISASSEMBLER
10684 void DeoptimizationInputData::DeoptimizationInputDataPrint(
10685 OStream& os) { // NOLINT
10686 disasm::NameConverter converter;
10687 int deopt_count = DeoptCount();
10688 os << "Deoptimization Input Data (deopt points = " << deopt_count << ")\n";
10689 if (0 != deopt_count) {
10690 os << " index ast id argc pc";
10691 if (FLAG_print_code_verbose) os << " commands";
10694 for (int i = 0; i < deopt_count; i++) {
10695 // TODO(svenpanne) Add some basic formatting to our streams.
10696 Vector<char> buf1 = Vector<char>::New(128);
10697 SNPrintF(buf1, "%6d %6d %6d %6d", i, AstId(i).ToInt(),
10698 ArgumentsStackHeight(i)->value(), Pc(i)->value());
10699 os << buf1.start();
10701 if (!FLAG_print_code_verbose) {
10705 // Print details of the frame translation.
10706 int translation_index = TranslationIndex(i)->value();
10707 TranslationIterator iterator(TranslationByteArray(), translation_index);
10708 Translation::Opcode opcode =
10709 static_cast<Translation::Opcode>(iterator.Next());
10710 DCHECK(Translation::BEGIN == opcode);
10711 int frame_count = iterator.Next();
10712 int jsframe_count = iterator.Next();
10713 os << " " << Translation::StringFor(opcode)
10714 << " {frame count=" << frame_count
10715 << ", js frame count=" << jsframe_count << "}\n";
10717 while (iterator.HasNext() &&
10718 Translation::BEGIN !=
10719 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
10720 Vector<char> buf2 = Vector<char>::New(128);
10721 SNPrintF(buf2, "%27s %s ", "", Translation::StringFor(opcode));
10722 os << buf2.start();
10725 case Translation::BEGIN:
10729 case Translation::JS_FRAME: {
10730 int ast_id = iterator.Next();
10731 int function_id = iterator.Next();
10732 unsigned height = iterator.Next();
10733 os << "{ast_id=" << ast_id << ", function=";
10734 if (function_id != Translation::kSelfLiteralId) {
10735 Object* function = LiteralArray()->get(function_id);
10736 os << Brief(JSFunction::cast(function)->shared()->DebugName());
10740 os << ", height=" << height << "}";
10744 case Translation::COMPILED_STUB_FRAME: {
10745 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next());
10746 os << "{kind=" << stub_kind << "}";
10750 case Translation::ARGUMENTS_ADAPTOR_FRAME:
10751 case Translation::CONSTRUCT_STUB_FRAME: {
10752 int function_id = iterator.Next();
10753 JSFunction* function =
10754 JSFunction::cast(LiteralArray()->get(function_id));
10755 unsigned height = iterator.Next();
10756 os << "{function=" << Brief(function->shared()->DebugName())
10757 << ", height=" << height << "}";
10761 case Translation::GETTER_STUB_FRAME:
10762 case Translation::SETTER_STUB_FRAME: {
10763 int function_id = iterator.Next();
10764 JSFunction* function =
10765 JSFunction::cast(LiteralArray()->get(function_id));
10766 os << "{function=" << Brief(function->shared()->DebugName()) << "}";
10770 case Translation::REGISTER: {
10771 int reg_code = iterator.Next();
10772 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}";
10776 case Translation::INT32_REGISTER: {
10777 int reg_code = iterator.Next();
10778 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}";
10782 case Translation::UINT32_REGISTER: {
10783 int reg_code = iterator.Next();
10784 os << "{input=" << converter.NameOfCPURegister(reg_code)
10789 case Translation::DOUBLE_REGISTER: {
10790 int reg_code = iterator.Next();
10791 os << "{input=" << DoubleRegister::AllocationIndexToString(reg_code)
10796 case Translation::FLOAT32x4_REGISTER: {
10797 int reg_code = iterator.Next();
10798 os << "{input=" << SIMD128Register::AllocationIndexToString(reg_code)
10803 case Translation::FLOAT64x2_REGISTER: {
10804 int reg_code = iterator.Next();
10805 os << "{input=" << SIMD128Register::AllocationIndexToString(reg_code)
10810 case Translation::INT32x4_REGISTER: {
10811 int reg_code = iterator.Next();
10812 os << "{input=" << SIMD128Register::AllocationIndexToString(reg_code)
10817 case Translation::STACK_SLOT: {
10818 int input_slot_index = iterator.Next();
10819 os << "{input=" << input_slot_index << "}";
10823 case Translation::INT32_STACK_SLOT: {
10824 int input_slot_index = iterator.Next();
10825 os << "{input=" << input_slot_index << "}";
10829 case Translation::UINT32_STACK_SLOT: {
10830 int input_slot_index = iterator.Next();
10831 os << "{input=" << input_slot_index << " (unsigned)}";
10835 case Translation::DOUBLE_STACK_SLOT: {
10836 int input_slot_index = iterator.Next();
10837 os << "{input=" << input_slot_index << "}";
10841 case Translation::FLOAT32x4_STACK_SLOT: {
10842 int input_slot_index = iterator.Next();
10843 os << "{input=" << input_slot_index << "}";
10847 case Translation::FLOAT64x2_STACK_SLOT: {
10848 int input_slot_index = iterator.Next();
10849 os << "{input=" << input_slot_index << "}";
10853 case Translation::INT32x4_STACK_SLOT: {
10854 int input_slot_index = iterator.Next();
10855 os << "{input=" << input_slot_index << "}";
10859 case Translation::LITERAL: {
10860 unsigned literal_index = iterator.Next();
10861 os << "{literal_id=" << literal_index << "}";
10865 case Translation::DUPLICATED_OBJECT: {
10866 int object_index = iterator.Next();
10867 os << "{object_index=" << object_index << "}";
10871 case Translation::ARGUMENTS_OBJECT:
10872 case Translation::CAPTURED_OBJECT: {
10873 int args_length = iterator.Next();
10874 os << "{length=" << args_length << "}";
10884 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(
10885 OStream& os) { // NOLINT
10886 os << "Deoptimization Output Data (deopt points = " << this->DeoptPoints()
10888 if (this->DeoptPoints() == 0) return;
10890 os << "ast id pc state\n";
10891 for (int i = 0; i < this->DeoptPoints(); i++) {
10892 int pc_and_state = this->PcAndState(i)->value();
10893 // TODO(svenpanne) Add some basic formatting to our streams.
10894 Vector<char> buf = Vector<char>::New(100);
10895 SNPrintF(buf, "%6d %8d %s\n", this->AstId(i).ToInt(),
10896 FullCodeGenerator::PcField::decode(pc_and_state),
10897 FullCodeGenerator::State2String(
10898 FullCodeGenerator::StateField::decode(pc_and_state)));
10904 const char* Code::ICState2String(InlineCacheState state) {
10906 case UNINITIALIZED: return "UNINITIALIZED";
10907 case PREMONOMORPHIC: return "PREMONOMORPHIC";
10908 case MONOMORPHIC: return "MONOMORPHIC";
10909 case PROTOTYPE_FAILURE:
10910 return "PROTOTYPE_FAILURE";
10911 case POLYMORPHIC: return "POLYMORPHIC";
10912 case MEGAMORPHIC: return "MEGAMORPHIC";
10913 case GENERIC: return "GENERIC";
10914 case DEBUG_STUB: return "DEBUG_STUB";
10923 const char* Code::StubType2String(StubType type) {
10925 case NORMAL: return "NORMAL";
10926 case FAST: return "FAST";
10928 UNREACHABLE(); // keep the compiler happy
10933 void Code::PrintExtraICState(OStream& os, // NOLINT
10934 Kind kind, ExtraICState extra) {
10935 os << "extra_ic_state = ";
10936 if ((kind == STORE_IC || kind == KEYED_STORE_IC) && (extra == STRICT)) {
10939 os << extra << "\n";
10944 void Code::Disassemble(const char* name, OStream& os) { // NOLINT
10945 os << "kind = " << Kind2String(kind()) << "\n";
10946 if (IsCodeStubOrIC()) {
10947 const char* n = CodeStub::MajorName(CodeStub::GetMajorKey(this), true);
10948 os << "major_key = " << (n == NULL ? "null" : n) << "\n";
10950 if (is_inline_cache_stub()) {
10951 os << "ic_state = " << ICState2String(ic_state()) << "\n";
10952 PrintExtraICState(os, kind(), extra_ic_state());
10953 if (ic_state() == MONOMORPHIC) {
10954 os << "type = " << StubType2String(type()) << "\n";
10956 if (is_compare_ic_stub()) {
10957 DCHECK(CodeStub::GetMajorKey(this) == CodeStub::CompareIC);
10958 CompareICStub stub(stub_key(), GetIsolate());
10959 os << "compare_state = " << CompareICState::GetStateName(stub.left())
10960 << "*" << CompareICState::GetStateName(stub.right()) << " -> "
10961 << CompareICState::GetStateName(stub.state()) << "\n";
10962 os << "compare_operation = " << Token::Name(stub.op()) << "\n";
10965 if ((name != NULL) && (name[0] != '\0')) {
10966 os << "name = " << name << "\n";
10968 if (kind() == OPTIMIZED_FUNCTION) {
10969 os << "stack_slots = " << stack_slots() << "\n";
10972 os << "Instructions (size = " << instruction_size() << ")\n";
10973 // TODO(svenpanne) The Disassembler should use streams, too!
10975 CodeTracer::Scope trace_scope(GetIsolate()->GetCodeTracer());
10976 Disassembler::Decode(trace_scope.file(), this);
10980 if (kind() == FUNCTION) {
10981 DeoptimizationOutputData* data =
10982 DeoptimizationOutputData::cast(this->deoptimization_data());
10983 data->DeoptimizationOutputDataPrint(os);
10984 } else if (kind() == OPTIMIZED_FUNCTION) {
10985 DeoptimizationInputData* data =
10986 DeoptimizationInputData::cast(this->deoptimization_data());
10987 data->DeoptimizationInputDataPrint(os);
10991 if (is_crankshafted()) {
10992 SafepointTable table(this);
10993 os << "Safepoints (size = " << table.size() << ")\n";
10994 for (unsigned i = 0; i < table.length(); i++) {
10995 unsigned pc_offset = table.GetPcOffset(i);
10996 os << (instruction_start() + pc_offset) << " ";
10997 // TODO(svenpanne) Add some basic formatting to our streams.
10998 Vector<char> buf1 = Vector<char>::New(30);
10999 SNPrintF(buf1, "%4d", pc_offset);
11000 os << buf1.start() << " ";
11001 table.PrintEntry(i, os);
11002 os << " (sp -> fp) ";
11003 SafepointEntry entry = table.GetEntry(i);
11004 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
11005 Vector<char> buf2 = Vector<char>::New(30);
11006 SNPrintF(buf2, "%6d", entry.deoptimization_index());
11007 os << buf2.start();
11011 if (entry.argument_count() > 0) {
11012 os << " argc: " << entry.argument_count();
11017 } else if (kind() == FUNCTION) {
11018 unsigned offset = back_edge_table_offset();
11019 // If there is no back edge table, the "table start" will be at or after
11020 // (due to alignment) the end of the instruction stream.
11021 if (static_cast<int>(offset) < instruction_size()) {
11022 DisallowHeapAllocation no_gc;
11023 BackEdgeTable back_edges(this, &no_gc);
11025 os << "Back edges (size = " << back_edges.length() << ")\n";
11026 os << "ast_id pc_offset loop_depth\n";
11028 for (uint32_t i = 0; i < back_edges.length(); i++) {
11029 Vector<char> buf = Vector<char>::New(100);
11030 SNPrintF(buf, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(),
11031 back_edges.pc_offset(i), back_edges.loop_depth(i));
11037 #ifdef OBJECT_PRINT
11038 if (!type_feedback_info()->IsUndefined()) {
11039 OFStream os(stdout);
11040 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(os);
11046 os << "RelocInfo (size = " << relocation_size() << ")\n";
11047 for (RelocIterator it(this); !it.done(); it.next()) {
11048 it.rinfo()->Print(GetIsolate(), os);
11052 #endif // ENABLE_DISASSEMBLER
11055 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength(
11056 Handle<JSObject> object,
11059 SetFastElementsCapacitySmiMode smi_mode) {
11060 // We should never end in here with a pixel or external array.
11061 DCHECK(!object->HasExternalArrayElements());
11063 // Allocate a new fast elements backing store.
11064 Handle<FixedArray> new_elements =
11065 object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity);
11067 ElementsKind elements_kind = object->GetElementsKind();
11068 ElementsKind new_elements_kind;
11069 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
11070 // or if it's allowed and the old elements array contained only SMIs.
11071 bool has_fast_smi_elements =
11072 (smi_mode == kForceSmiElements) ||
11073 ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements());
11074 if (has_fast_smi_elements) {
11075 if (IsHoleyElementsKind(elements_kind)) {
11076 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
11078 new_elements_kind = FAST_SMI_ELEMENTS;
11081 if (IsHoleyElementsKind(elements_kind)) {
11082 new_elements_kind = FAST_HOLEY_ELEMENTS;
11084 new_elements_kind = FAST_ELEMENTS;
11087 Handle<FixedArrayBase> old_elements(object->elements());
11088 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind);
11089 accessor->CopyElements(object, new_elements, elements_kind);
11091 if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) {
11092 Handle<Map> new_map = (new_elements_kind != elements_kind)
11093 ? GetElementsTransitionMap(object, new_elements_kind)
11094 : handle(object->map());
11095 JSObject::ValidateElements(object);
11096 JSObject::SetMapAndElements(object, new_map, new_elements);
11098 // Transition through the allocation site as well if present.
11099 JSObject::UpdateAllocationSite(object, new_elements_kind);
11101 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements);
11102 parameter_map->set(1, *new_elements);
11105 if (FLAG_trace_elements_transitions) {
11106 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11107 object->GetElementsKind(), new_elements);
11110 if (object->IsJSArray()) {
11111 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11113 return new_elements;
11117 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object,
11120 // We should never end in here with a pixel or external array.
11121 DCHECK(!object->HasExternalArrayElements());
11123 Handle<FixedArrayBase> elems =
11124 object->GetIsolate()->factory()->NewFixedDoubleArray(capacity);
11126 ElementsKind elements_kind = object->GetElementsKind();
11127 CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS);
11128 ElementsKind new_elements_kind = elements_kind;
11129 if (IsHoleyElementsKind(elements_kind)) {
11130 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
11132 new_elements_kind = FAST_DOUBLE_ELEMENTS;
11135 Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind);
11137 Handle<FixedArrayBase> old_elements(object->elements());
11138 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS);
11139 accessor->CopyElements(object, elems, elements_kind);
11141 JSObject::ValidateElements(object);
11142 JSObject::SetMapAndElements(object, new_map, elems);
11144 if (FLAG_trace_elements_transitions) {
11145 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11146 object->GetElementsKind(), elems);
11149 if (object->IsJSArray()) {
11150 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11156 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) {
11157 DCHECK(capacity >= 0);
11158 array->GetIsolate()->factory()->NewJSArrayStorage(
11159 array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
11163 void JSArray::Expand(Handle<JSArray> array, int required_size) {
11164 ElementsAccessor* accessor = array->GetElementsAccessor();
11165 accessor->SetCapacityAndLength(array, required_size, required_size);
11169 // Returns false if the passed-in index is marked non-configurable,
11170 // which will cause the ES5 truncation operation to halt, and thus
11171 // no further old values need be collected.
11172 static bool GetOldValue(Isolate* isolate,
11173 Handle<JSObject> object,
11175 List<Handle<Object> >* old_values,
11176 List<uint32_t>* indices) {
11177 Maybe<PropertyAttributes> maybe =
11178 JSReceiver::GetOwnElementAttribute(object, index);
11179 DCHECK(maybe.has_value);
11180 DCHECK(maybe.value != ABSENT);
11181 if (maybe.value == DONT_DELETE) return false;
11182 Handle<Object> value;
11183 if (!JSObject::GetOwnElementAccessorPair(object, index).is_null()) {
11184 value = Handle<Object>::cast(isolate->factory()->the_hole_value());
11186 value = Object::GetElement(isolate, object, index).ToHandleChecked();
11188 old_values->Add(value);
11189 indices->Add(index);
11193 static void EnqueueSpliceRecord(Handle<JSArray> object,
11195 Handle<JSArray> deleted,
11196 uint32_t add_count) {
11197 Isolate* isolate = object->GetIsolate();
11198 HandleScope scope(isolate);
11199 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index);
11200 Handle<Object> add_count_object =
11201 isolate->factory()->NewNumberFromUint(add_count);
11203 Handle<Object> args[] =
11204 { object, index_object, deleted, add_count_object };
11206 Execution::Call(isolate,
11207 Handle<JSFunction>(isolate->observers_enqueue_splice()),
11208 isolate->factory()->undefined_value(),
11214 static void BeginPerformSplice(Handle<JSArray> object) {
11215 Isolate* isolate = object->GetIsolate();
11216 HandleScope scope(isolate);
11217 Handle<Object> args[] = { object };
11219 Execution::Call(isolate,
11220 Handle<JSFunction>(isolate->observers_begin_perform_splice()),
11221 isolate->factory()->undefined_value(),
11227 static void EndPerformSplice(Handle<JSArray> object) {
11228 Isolate* isolate = object->GetIsolate();
11229 HandleScope scope(isolate);
11230 Handle<Object> args[] = { object };
11232 Execution::Call(isolate,
11233 Handle<JSFunction>(isolate->observers_end_perform_splice()),
11234 isolate->factory()->undefined_value(),
11240 MaybeHandle<Object> JSArray::SetElementsLength(
11241 Handle<JSArray> array,
11242 Handle<Object> new_length_handle) {
11243 if (array->HasFastElements()) {
11244 // If the new array won't fit in a some non-trivial fraction of the max old
11245 // space size, then force it to go dictionary mode.
11246 int max_fast_array_size = static_cast<int>(
11247 (array->GetHeap()->MaxOldGenerationSize() / kDoubleSize) / 4);
11248 if (new_length_handle->IsNumber() &&
11249 NumberToInt32(*new_length_handle) >= max_fast_array_size) {
11250 NormalizeElements(array);
11254 // We should never end in here with a pixel or external array.
11255 DCHECK(array->AllowsSetElementsLength());
11256 if (!array->map()->is_observed()) {
11257 return array->GetElementsAccessor()->SetLength(array, new_length_handle);
11260 Isolate* isolate = array->GetIsolate();
11261 List<uint32_t> indices;
11262 List<Handle<Object> > old_values;
11263 Handle<Object> old_length_handle(array->length(), isolate);
11264 uint32_t old_length = 0;
11265 CHECK(old_length_handle->ToArrayIndex(&old_length));
11266 uint32_t new_length = 0;
11267 CHECK(new_length_handle->ToArrayIndex(&new_length));
11269 static const PropertyAttributes kNoAttrFilter = NONE;
11270 int num_elements = array->NumberOfOwnElements(kNoAttrFilter);
11271 if (num_elements > 0) {
11272 if (old_length == static_cast<uint32_t>(num_elements)) {
11273 // Simple case for arrays without holes.
11274 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
11275 if (!GetOldValue(isolate, array, i, &old_values, &indices)) break;
11278 // For sparse arrays, only iterate over existing elements.
11279 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
11280 // the to-be-removed indices twice.
11281 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
11282 array->GetOwnElementKeys(*keys, kNoAttrFilter);
11283 while (num_elements-- > 0) {
11284 uint32_t index = NumberToUint32(keys->get(num_elements));
11285 if (index < new_length) break;
11286 if (!GetOldValue(isolate, array, index, &old_values, &indices)) break;
11291 Handle<Object> hresult;
11292 ASSIGN_RETURN_ON_EXCEPTION(
11294 array->GetElementsAccessor()->SetLength(array, new_length_handle),
11297 CHECK(array->length()->ToArrayIndex(&new_length));
11298 if (old_length == new_length) return hresult;
11300 BeginPerformSplice(array);
11302 for (int i = 0; i < indices.length(); ++i) {
11303 // For deletions where the property was an accessor, old_values[i]
11304 // will be the hole, which instructs EnqueueChangeRecord to elide
11305 // the "oldValue" property.
11306 JSObject::EnqueueChangeRecord(
11307 array, "delete", isolate->factory()->Uint32ToString(indices[i]),
11310 JSObject::EnqueueChangeRecord(
11311 array, "update", isolate->factory()->length_string(),
11312 old_length_handle);
11314 EndPerformSplice(array);
11316 uint32_t index = Min(old_length, new_length);
11317 uint32_t add_count = new_length > old_length ? new_length - old_length : 0;
11318 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0;
11319 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
11320 if (delete_count > 0) {
11321 for (int i = indices.length() - 1; i >= 0; i--) {
11322 // Skip deletions where the property was an accessor, leaving holes
11323 // in the array of old values.
11324 if (old_values[i]->IsTheHole()) continue;
11325 JSObject::SetElement(
11326 deleted, indices[i] - index, old_values[i], NONE, SLOPPY).Assert();
11329 SetProperty(deleted, isolate->factory()->length_string(),
11330 isolate->factory()->NewNumberFromUint(delete_count),
11334 EnqueueSpliceRecord(array, index, deleted, add_count);
11340 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map,
11341 Handle<Object> prototype) {
11342 FixedArray* cache = map->GetPrototypeTransitions();
11343 int number_of_transitions = map->NumberOfProtoTransitions();
11344 const int proto_offset =
11345 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset;
11346 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
11347 const int step = kProtoTransitionElementsPerEntry;
11348 for (int i = 0; i < number_of_transitions; i++) {
11349 if (cache->get(proto_offset + i * step) == *prototype) {
11350 Object* result = cache->get(map_offset + i * step);
11351 return Handle<Map>(Map::cast(result));
11354 return Handle<Map>();
11358 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map,
11359 Handle<Object> prototype,
11360 Handle<Map> target_map) {
11361 DCHECK(target_map->IsMap());
11362 DCHECK(HeapObject::cast(*prototype)->map()->IsMap());
11363 // Don't cache prototype transition if this map is either shared, or a map of
11365 if (map->is_prototype_map()) return map;
11366 if (map->is_dictionary_map() || !FLAG_cache_prototype_transitions) return map;
11368 const int step = kProtoTransitionElementsPerEntry;
11369 const int header = kProtoTransitionHeaderSize;
11371 Handle<FixedArray> cache(map->GetPrototypeTransitions());
11372 int capacity = (cache->length() - header) / step;
11373 int transitions = map->NumberOfProtoTransitions() + 1;
11375 if (transitions > capacity) {
11376 if (capacity > kMaxCachedPrototypeTransitions) return map;
11378 // Grow array by factor 2 over and above what we need.
11379 cache = FixedArray::CopySize(cache, transitions * 2 * step + header);
11381 SetPrototypeTransitions(map, cache);
11384 // Reload number of transitions as GC might shrink them.
11385 int last = map->NumberOfProtoTransitions();
11386 int entry = header + last * step;
11388 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype);
11389 cache->set(entry + kProtoTransitionMapOffset, *target_map);
11390 map->SetNumberOfProtoTransitions(last + 1);
11396 void Map::ZapTransitions() {
11397 TransitionArray* transition_array = transitions();
11398 // TODO(mstarzinger): Temporarily use a slower version instead of the faster
11399 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer.
11400 Object** data = transition_array->data_start();
11401 Object* the_hole = GetHeap()->the_hole_value();
11402 int length = transition_array->length();
11403 for (int i = 0; i < length; i++) {
11404 data[i] = the_hole;
11409 void Map::ZapPrototypeTransitions() {
11410 FixedArray* proto_transitions = GetPrototypeTransitions();
11411 MemsetPointer(proto_transitions->data_start(),
11412 GetHeap()->the_hole_value(),
11413 proto_transitions->length());
11418 void Map::AddDependentCompilationInfo(Handle<Map> map,
11419 DependentCode::DependencyGroup group,
11420 CompilationInfo* info) {
11421 Handle<DependentCode> codes =
11422 DependentCode::Insert(handle(map->dependent_code(), info->isolate()),
11423 group, info->object_wrapper());
11424 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
11425 info->dependencies(group)->Add(map, info->zone());
11430 void Map::AddDependentCode(Handle<Map> map,
11431 DependentCode::DependencyGroup group,
11432 Handle<Code> code) {
11433 Handle<DependentCode> codes = DependentCode::Insert(
11434 Handle<DependentCode>(map->dependent_code()), group, code);
11435 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
11440 void Map::AddDependentIC(Handle<Map> map,
11441 Handle<Code> stub) {
11442 DCHECK(stub->next_code_link()->IsUndefined());
11443 int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup);
11445 // Slow path: insert the head of the list with possible heap allocation.
11446 Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub);
11448 // Fast path: link the stub to the existing head of the list without any
11449 // heap allocation.
11451 map->dependent_code()->AddToDependentICList(stub);
11456 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
11457 Recompute(entries);
11461 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) {
11462 start_indexes_[0] = 0;
11463 for (int g = 1; g <= kGroupCount; g++) {
11464 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1));
11465 start_indexes_[g] = start_indexes_[g - 1] + count;
11470 DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
11471 DependencyGroup group) {
11472 AllowDeferredHandleDereference dependencies_are_safe;
11473 if (group == DependentCode::kPropertyCellChangedGroup) {
11474 return Handle<PropertyCell>::cast(object)->dependent_code();
11475 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
11476 group == DependentCode::kAllocationSiteTransitionChangedGroup) {
11477 return Handle<AllocationSite>::cast(object)->dependent_code();
11479 return Handle<Map>::cast(object)->dependent_code();
11483 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
11484 DependencyGroup group,
11485 Handle<Object> object) {
11486 GroupStartIndexes starts(*entries);
11487 int start = starts.at(group);
11488 int end = starts.at(group + 1);
11489 int number_of_entries = starts.number_of_entries();
11490 // Check for existing entry to avoid duplicates.
11491 for (int i = start; i < end; i++) {
11492 if (entries->object_at(i) == *object) return entries;
11494 if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
11495 int capacity = kCodesStartIndex + number_of_entries + 1;
11496 if (capacity > 5) capacity = capacity * 5 / 4;
11497 Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
11498 FixedArray::CopySize(entries, capacity, TENURED));
11499 // The number of codes can change after GC.
11500 starts.Recompute(*entries);
11501 start = starts.at(group);
11502 end = starts.at(group + 1);
11503 number_of_entries = starts.number_of_entries();
11504 for (int i = 0; i < number_of_entries; i++) {
11505 entries->clear_at(i);
11507 // If the old fixed array was empty, we need to reset counters of the
11509 if (number_of_entries == 0) {
11510 for (int g = 0; g < kGroupCount; g++) {
11511 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
11514 entries = new_entries;
11516 entries->ExtendGroup(group);
11517 entries->set_object_at(end, *object);
11518 entries->set_number_of_entries(group, end + 1 - start);
11523 void DependentCode::UpdateToFinishedCode(DependencyGroup group,
11524 CompilationInfo* info,
11526 DisallowHeapAllocation no_gc;
11527 AllowDeferredHandleDereference get_object_wrapper;
11528 Foreign* info_wrapper = *info->object_wrapper();
11529 GroupStartIndexes starts(this);
11530 int start = starts.at(group);
11531 int end = starts.at(group + 1);
11532 for (int i = start; i < end; i++) {
11533 if (object_at(i) == info_wrapper) {
11534 set_object_at(i, code);
11540 for (int i = start; i < end; i++) {
11541 DCHECK(is_code_at(i) || compilation_info_at(i) != info);
11547 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
11548 CompilationInfo* info) {
11549 DisallowHeapAllocation no_allocation;
11550 AllowDeferredHandleDereference get_object_wrapper;
11551 Foreign* info_wrapper = *info->object_wrapper();
11552 GroupStartIndexes starts(this);
11553 int start = starts.at(group);
11554 int end = starts.at(group + 1);
11555 // Find compilation info wrapper.
11557 for (int i = start; i < end; i++) {
11558 if (object_at(i) == info_wrapper) {
11563 if (info_pos == -1) return; // Not found.
11564 int gap = info_pos;
11565 // Use the last of each group to fill the gap in the previous group.
11566 for (int i = group; i < kGroupCount; i++) {
11567 int last_of_group = starts.at(i + 1) - 1;
11568 DCHECK(last_of_group >= gap);
11569 if (last_of_group == gap) continue;
11570 copy(last_of_group, gap);
11571 gap = last_of_group;
11573 DCHECK(gap == starts.number_of_entries() - 1);
11574 clear_at(gap); // Clear last gap.
11575 set_number_of_entries(group, end - start - 1);
11578 for (int i = start; i < end - 1; i++) {
11579 DCHECK(is_code_at(i) || compilation_info_at(i) != info);
11585 static bool CodeListContains(Object* head, Code* code) {
11586 while (!head->IsUndefined()) {
11587 if (head == code) return true;
11588 head = Code::cast(head)->next_code_link();
11594 bool DependentCode::Contains(DependencyGroup group, Code* code) {
11595 GroupStartIndexes starts(this);
11596 int start = starts.at(group);
11597 int end = starts.at(group + 1);
11598 if (group == kWeakICGroup) {
11599 return CodeListContains(object_at(start), code);
11601 for (int i = start; i < end; i++) {
11602 if (object_at(i) == code) return true;
11608 bool DependentCode::MarkCodeForDeoptimization(
11610 DependentCode::DependencyGroup group) {
11611 DisallowHeapAllocation no_allocation_scope;
11612 DependentCode::GroupStartIndexes starts(this);
11613 int start = starts.at(group);
11614 int end = starts.at(group + 1);
11615 int code_entries = starts.number_of_entries();
11616 if (start == end) return false;
11618 // Mark all the code that needs to be deoptimized.
11619 bool marked = false;
11620 for (int i = start; i < end; i++) {
11621 if (is_code_at(i)) {
11622 Code* code = code_at(i);
11623 if (!code->marked_for_deoptimization()) {
11624 SetMarkedForDeoptimization(code, group);
11628 CompilationInfo* info = compilation_info_at(i);
11629 info->AbortDueToDependencyChange();
11632 // Compact the array by moving all subsequent groups to fill in the new holes.
11633 for (int src = end, dst = start; src < code_entries; src++, dst++) {
11636 // Now the holes are at the end of the array, zap them for heap-verifier.
11637 int removed = end - start;
11638 for (int i = code_entries - removed; i < code_entries; i++) {
11641 set_number_of_entries(group, 0);
11646 void DependentCode::DeoptimizeDependentCodeGroup(
11648 DependentCode::DependencyGroup group) {
11649 DCHECK(AllowCodeDependencyChange::IsAllowed());
11650 DisallowHeapAllocation no_allocation_scope;
11651 bool marked = MarkCodeForDeoptimization(isolate, group);
11653 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate);
11657 void DependentCode::AddToDependentICList(Handle<Code> stub) {
11658 DisallowHeapAllocation no_heap_allocation;
11659 GroupStartIndexes starts(this);
11660 int i = starts.at(kWeakICGroup);
11661 Object* head = object_at(i);
11662 // Try to insert the stub after the head of the list to minimize number of
11663 // writes to the DependentCode array, since a write to the array can make it
11664 // strong if it was alread marked by incremental marker.
11665 if (head->IsCode()) {
11666 stub->set_next_code_link(Code::cast(head)->next_code_link());
11667 Code::cast(head)->set_next_code_link(*stub);
11669 stub->set_next_code_link(head);
11670 set_object_at(i, *stub);
11675 void DependentCode::SetMarkedForDeoptimization(Code* code,
11676 DependencyGroup group) {
11677 code->set_marked_for_deoptimization(true);
11678 if (FLAG_trace_deopt &&
11679 (code->deoptimization_data() != code->GetHeap()->empty_fixed_array())) {
11680 DeoptimizationInputData* deopt_data =
11681 DeoptimizationInputData::cast(code->deoptimization_data());
11682 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
11683 PrintF(scope.file(), "[marking dependent code 0x%08" V8PRIxPTR
11684 " (opt #%d) for deoptimization, reason: %s]\n",
11685 reinterpret_cast<intptr_t>(code),
11686 deopt_data->OptimizationId()->value(), DependencyGroupName(group));
11691 const char* DependentCode::DependencyGroupName(DependencyGroup group) {
11695 case kWeakCodeGroup:
11696 return "weak-code";
11697 case kTransitionGroup:
11698 return "transition";
11699 case kPrototypeCheckGroup:
11700 return "prototype-check";
11701 case kElementsCantBeAddedGroup:
11702 return "elements-cant-be-added";
11703 case kPropertyCellChangedGroup:
11704 return "property-cell-changed";
11705 case kFieldTypeGroup:
11706 return "field-type";
11707 case kInitialMapChangedGroup:
11708 return "initial-map-changed";
11709 case kAllocationSiteTenuringChangedGroup:
11710 return "allocation-site-tenuring-changed";
11711 case kAllocationSiteTransitionChangedGroup:
11712 return "allocation-site-transition-changed";
11719 Handle<Map> Map::TransitionToPrototype(Handle<Map> map,
11720 Handle<Object> prototype) {
11721 Handle<Map> new_map = GetPrototypeTransition(map, prototype);
11722 if (new_map.is_null()) {
11723 new_map = Copy(map);
11724 PutPrototypeTransition(map, prototype, new_map);
11725 new_map->set_prototype(*prototype);
11731 MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object,
11732 Handle<Object> value,
11733 bool from_javascript) {
11735 int size = object->Size();
11738 Isolate* isolate = object->GetIsolate();
11739 Heap* heap = isolate->heap();
11740 // Silently ignore the change if value is not a JSObject or null.
11741 // SpiderMonkey behaves this way.
11742 if (!value->IsJSReceiver() && !value->IsNull()) return value;
11744 // From 8.6.2 Object Internal Methods
11746 // In addition, if [[Extensible]] is false the value of the [[Class]] and
11747 // [[Prototype]] internal properties of the object may not be modified.
11749 // Implementation specific extensions that modify [[Class]], [[Prototype]]
11750 // or [[Extensible]] must not violate the invariants defined in the preceding
11752 if (!object->map()->is_extensible()) {
11753 Handle<Object> args[] = { object };
11754 THROW_NEW_ERROR(isolate, NewTypeError("non_extensible_proto",
11755 HandleVector(args, arraysize(args))),
11759 // Before we can set the prototype we need to be sure
11760 // prototype cycles are prevented.
11761 // It is sufficient to validate that the receiver is not in the new prototype
11763 for (PrototypeIterator iter(isolate, *value,
11764 PrototypeIterator::START_AT_RECEIVER);
11765 !iter.IsAtEnd(); iter.Advance()) {
11766 if (JSReceiver::cast(iter.GetCurrent()) == *object) {
11768 THROW_NEW_ERROR(isolate,
11769 NewError("cyclic_proto", HandleVector<Object>(NULL, 0)),
11774 bool dictionary_elements_in_chain =
11775 object->map()->DictionaryElementsInPrototypeChainOnly();
11776 Handle<JSObject> real_receiver = object;
11778 if (from_javascript) {
11779 // Find the first object in the chain whose prototype object is not
11780 // hidden and set the new prototype on that object.
11781 PrototypeIterator iter(isolate, real_receiver);
11782 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
11784 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
11789 // Set the new prototype of the object.
11790 Handle<Map> map(real_receiver->map());
11792 // Nothing to do if prototype is already set.
11793 if (map->prototype() == *value) return value;
11795 if (value->IsJSObject()) {
11796 PrototypeOptimizationMode mode =
11797 from_javascript ? REGULAR_PROTOTYPE : FAST_PROTOTYPE;
11798 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value), mode);
11801 Handle<Map> new_map = Map::TransitionToPrototype(map, value);
11802 DCHECK(new_map->prototype() == *value);
11803 JSObject::MigrateToMap(real_receiver, new_map);
11805 if (!dictionary_elements_in_chain &&
11806 new_map->DictionaryElementsInPrototypeChainOnly()) {
11807 // If the prototype chain didn't previously have element callbacks, then
11808 // KeyedStoreICs need to be cleared to ensure any that involve this
11810 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC);
11813 heap->ClearInstanceofCache();
11814 DCHECK(size == object->Size());
11819 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
11821 uint32_t first_arg,
11822 uint32_t arg_count,
11823 EnsureElementsMode mode) {
11824 // Elements in |Arguments| are ordered backwards (because they're on the
11825 // stack), but the method that's called here iterates over them in forward
11827 return EnsureCanContainElements(
11828 object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode);
11832 MaybeHandle<AccessorPair> JSObject::GetOwnElementAccessorPair(
11833 Handle<JSObject> object,
11835 if (object->IsJSGlobalProxy()) {
11836 PrototypeIterator iter(object->GetIsolate(), object);
11837 if (iter.IsAtEnd()) return MaybeHandle<AccessorPair>();
11838 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
11839 return GetOwnElementAccessorPair(
11840 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index);
11843 // Check for lookup interceptor.
11844 if (object->HasIndexedInterceptor()) return MaybeHandle<AccessorPair>();
11846 return object->GetElementsAccessor()->GetAccessorPair(object, object, index);
11850 MaybeHandle<Object> JSObject::SetElementWithInterceptor(
11851 Handle<JSObject> object,
11853 Handle<Object> value,
11854 PropertyAttributes attributes,
11855 StrictMode strict_mode,
11856 bool check_prototype,
11857 SetPropertyMode set_mode) {
11858 Isolate* isolate = object->GetIsolate();
11860 // Make sure that the top context does not change when doing
11861 // callbacks or interceptor calls.
11862 AssertNoContextChange ncc(isolate);
11864 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
11865 if (!interceptor->setter()->IsUndefined()) {
11866 v8::IndexedPropertySetterCallback setter =
11867 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter());
11869 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index));
11870 PropertyCallbackArguments args(isolate, interceptor->data(), *object,
11872 v8::Handle<v8::Value> result =
11873 args.Call(setter, index, v8::Utils::ToLocal(value));
11874 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11875 if (!result.IsEmpty()) return value;
11878 return SetElementWithoutInterceptor(object, index, value, attributes,
11885 MaybeHandle<Object> JSObject::GetElementWithCallback(
11886 Handle<JSObject> object,
11887 Handle<Object> receiver,
11888 Handle<Object> structure,
11890 Handle<Object> holder) {
11891 Isolate* isolate = object->GetIsolate();
11892 DCHECK(!structure->IsForeign());
11893 // api style callbacks.
11894 if (structure->IsExecutableAccessorInfo()) {
11895 Handle<ExecutableAccessorInfo> data =
11896 Handle<ExecutableAccessorInfo>::cast(structure);
11897 Object* fun_obj = data->getter();
11898 v8::AccessorNameGetterCallback call_fun =
11899 v8::ToCData<v8::AccessorNameGetterCallback>(fun_obj);
11900 if (call_fun == NULL) return isolate->factory()->undefined_value();
11901 Handle<JSObject> holder_handle = Handle<JSObject>::cast(holder);
11902 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11903 Handle<String> key = isolate->factory()->NumberToString(number);
11904 LOG(isolate, ApiNamedPropertyAccess("load", *holder_handle, *key));
11905 PropertyCallbackArguments
11906 args(isolate, data->data(), *receiver, *holder_handle);
11907 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key));
11908 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11909 if (result.IsEmpty()) return isolate->factory()->undefined_value();
11910 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
11911 result_internal->VerifyApiCallResultType();
11912 // Rebox handle before return.
11913 return handle(*result_internal, isolate);
11916 // __defineGetter__ callback
11917 if (structure->IsAccessorPair()) {
11918 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
11920 if (getter->IsSpecFunction()) {
11921 // TODO(rossberg): nicer would be to cast to some JSCallable here...
11922 return GetPropertyWithDefinedGetter(
11923 receiver, Handle<JSReceiver>::cast(getter));
11925 // Getter is not a function.
11926 return isolate->factory()->undefined_value();
11929 if (structure->IsDeclaredAccessorInfo()) {
11930 return GetDeclaredAccessorProperty(
11931 receiver, Handle<DeclaredAccessorInfo>::cast(structure), isolate);
11935 return MaybeHandle<Object>();
11939 MaybeHandle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object,
11940 Handle<Object> structure,
11942 Handle<Object> value,
11943 Handle<JSObject> holder,
11944 StrictMode strict_mode) {
11945 Isolate* isolate = object->GetIsolate();
11947 // We should never get here to initialize a const with the hole
11948 // value since a const declaration would conflict with the setter.
11949 DCHECK(!value->IsTheHole());
11950 DCHECK(!structure->IsForeign());
11951 if (structure->IsExecutableAccessorInfo()) {
11952 // api style callbacks
11953 Handle<ExecutableAccessorInfo> data =
11954 Handle<ExecutableAccessorInfo>::cast(structure);
11955 Object* call_obj = data->setter();
11956 v8::AccessorNameSetterCallback call_fun =
11957 v8::ToCData<v8::AccessorNameSetterCallback>(call_obj);
11958 if (call_fun == NULL) return value;
11959 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11960 Handle<String> key(isolate->factory()->NumberToString(number));
11961 LOG(isolate, ApiNamedPropertyAccess("store", *object, *key));
11962 PropertyCallbackArguments
11963 args(isolate, data->data(), *object, *holder);
11964 args.Call(call_fun,
11965 v8::Utils::ToLocal(key),
11966 v8::Utils::ToLocal(value));
11967 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11971 if (structure->IsAccessorPair()) {
11972 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
11973 if (setter->IsSpecFunction()) {
11974 // TODO(rossberg): nicer would be to cast to some JSCallable here...
11975 return SetPropertyWithDefinedSetter(
11976 object, Handle<JSReceiver>::cast(setter), value);
11978 if (strict_mode == SLOPPY) return value;
11979 Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
11980 Handle<Object> args[2] = { key, holder };
11982 isolate, NewTypeError("no_setter_in_callback", HandleVector(args, 2)),
11987 // TODO(dcarney): Handle correctly.
11988 if (structure->IsDeclaredAccessorInfo()) return value;
11991 return MaybeHandle<Object>();
11995 bool JSObject::HasFastArgumentsElements() {
11996 Heap* heap = GetHeap();
11997 if (!elements()->IsFixedArray()) return false;
11998 FixedArray* elements = FixedArray::cast(this->elements());
11999 if (elements->map() != heap->sloppy_arguments_elements_map()) {
12002 FixedArray* arguments = FixedArray::cast(elements->get(1));
12003 return !arguments->IsDictionary();
12007 bool JSObject::HasDictionaryArgumentsElements() {
12008 Heap* heap = GetHeap();
12009 if (!elements()->IsFixedArray()) return false;
12010 FixedArray* elements = FixedArray::cast(this->elements());
12011 if (elements->map() != heap->sloppy_arguments_elements_map()) {
12014 FixedArray* arguments = FixedArray::cast(elements->get(1));
12015 return arguments->IsDictionary();
12019 // Adding n elements in fast case is O(n*n).
12020 // Note: revisit design to have dual undefined values to capture absent
12022 MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object,
12024 Handle<Object> value,
12025 StrictMode strict_mode,
12026 bool check_prototype) {
12027 DCHECK(object->HasFastSmiOrObjectElements() ||
12028 object->HasFastArgumentsElements());
12030 Isolate* isolate = object->GetIsolate();
12032 // Array optimizations rely on the prototype lookups of Array objects always
12033 // returning undefined. If there is a store to the initial prototype object,
12034 // make sure all of these optimizations are invalidated.
12035 if (isolate->is_initial_object_prototype(*object) ||
12036 isolate->is_initial_array_prototype(*object)) {
12037 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate,
12038 DependentCode::kElementsCantBeAddedGroup);
12041 Handle<FixedArray> backing_store(FixedArray::cast(object->elements()));
12042 if (backing_store->map() ==
12043 isolate->heap()->sloppy_arguments_elements_map()) {
12044 backing_store = handle(FixedArray::cast(backing_store->get(1)));
12046 backing_store = EnsureWritableFastElements(object);
12048 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
12050 if (check_prototype &&
12051 (index >= capacity || backing_store->get(index)->IsTheHole())) {
12053 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12054 object, index, value, &found, strict_mode);
12055 if (found) return result;
12058 uint32_t new_capacity = capacity;
12059 // Check if the length property of this object needs to be updated.
12060 uint32_t array_length = 0;
12061 bool must_update_array_length = false;
12062 bool introduces_holes = true;
12063 if (object->IsJSArray()) {
12064 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12065 introduces_holes = index > array_length;
12066 if (index >= array_length) {
12067 must_update_array_length = true;
12068 array_length = index + 1;
12071 introduces_holes = index >= capacity;
12074 // If the array is growing, and it's not growth by a single element at the
12075 // end, make sure that the ElementsKind is HOLEY.
12076 ElementsKind elements_kind = object->GetElementsKind();
12077 if (introduces_holes &&
12078 IsFastElementsKind(elements_kind) &&
12079 !IsFastHoleyElementsKind(elements_kind)) {
12080 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12081 TransitionElementsKind(object, transitioned_kind);
12084 // Check if the capacity of the backing store needs to be increased, or if
12085 // a transition to slow elements is necessary.
12086 if (index >= capacity) {
12087 bool convert_to_slow = true;
12088 if ((index - capacity) < kMaxGap) {
12089 new_capacity = NewElementsCapacity(index + 1);
12090 DCHECK(new_capacity > index);
12091 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12092 convert_to_slow = false;
12095 if (convert_to_slow) {
12096 NormalizeElements(object);
12097 return SetDictionaryElement(object, index, value, NONE, strict_mode,
12101 // Convert to fast double elements if appropriate.
12102 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
12103 // Consider fixing the boilerplate as well if we have one.
12104 ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
12105 ? FAST_HOLEY_DOUBLE_ELEMENTS
12106 : FAST_DOUBLE_ELEMENTS;
12108 UpdateAllocationSite(object, to_kind);
12110 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length);
12111 FixedDoubleArray::cast(object->elements())->set(index, value->Number());
12112 JSObject::ValidateElements(object);
12115 // Change elements kind from Smi-only to generic FAST if necessary.
12116 if (object->HasFastSmiElements() && !value->IsSmi()) {
12117 ElementsKind kind = object->HasFastHoleyElements()
12118 ? FAST_HOLEY_ELEMENTS
12121 UpdateAllocationSite(object, kind);
12122 Handle<Map> new_map = GetElementsTransitionMap(object, kind);
12123 JSObject::MigrateToMap(object, new_map);
12124 DCHECK(IsFastObjectElementsKind(object->GetElementsKind()));
12126 // Increase backing store capacity if that's been decided previously.
12127 if (new_capacity != capacity) {
12128 SetFastElementsCapacitySmiMode smi_mode =
12129 value->IsSmi() && object->HasFastSmiElements()
12130 ? kAllowSmiElements
12131 : kDontAllowSmiElements;
12132 Handle<FixedArray> new_elements =
12133 SetFastElementsCapacityAndLength(object, new_capacity, array_length,
12135 new_elements->set(index, *value);
12136 JSObject::ValidateElements(object);
12140 // Finally, set the new element and length.
12141 DCHECK(object->elements()->IsFixedArray());
12142 backing_store->set(index, *value);
12143 if (must_update_array_length) {
12144 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length));
12150 MaybeHandle<Object> JSObject::SetDictionaryElement(
12151 Handle<JSObject> object,
12153 Handle<Object> value,
12154 PropertyAttributes attributes,
12155 StrictMode strict_mode,
12156 bool check_prototype,
12157 SetPropertyMode set_mode) {
12158 DCHECK(object->HasDictionaryElements() ||
12159 object->HasDictionaryArgumentsElements());
12160 Isolate* isolate = object->GetIsolate();
12162 // Insert element in the dictionary.
12163 Handle<FixedArray> elements(FixedArray::cast(object->elements()));
12164 bool is_arguments =
12165 (elements->map() == isolate->heap()->sloppy_arguments_elements_map());
12166 Handle<SeededNumberDictionary> dictionary(is_arguments
12167 ? SeededNumberDictionary::cast(elements->get(1))
12168 : SeededNumberDictionary::cast(*elements));
12170 int entry = dictionary->FindEntry(index);
12171 if (entry != SeededNumberDictionary::kNotFound) {
12172 Handle<Object> element(dictionary->ValueAt(entry), isolate);
12173 PropertyDetails details = dictionary->DetailsAt(entry);
12174 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
12175 return SetElementWithCallback(object, element, index, value, object,
12178 dictionary->UpdateMaxNumberKey(index);
12179 // If a value has not been initialized we allow writing to it even if it
12180 // is read-only (a declared const that has not been initialized). If a
12181 // value is being defined we skip attribute checks completely.
12182 if (set_mode == DEFINE_PROPERTY) {
12183 details = PropertyDetails(
12184 attributes, NORMAL, details.dictionary_index());
12185 dictionary->DetailsAtPut(entry, details);
12186 } else if (details.IsReadOnly() && !element->IsTheHole()) {
12187 if (strict_mode == SLOPPY) {
12188 return isolate->factory()->undefined_value();
12190 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12191 Handle<Object> args[2] = { number, object };
12192 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
12193 HandleVector(args, 2)),
12197 // Elements of the arguments object in slow mode might be slow aliases.
12198 if (is_arguments && element->IsAliasedArgumentsEntry()) {
12199 Handle<AliasedArgumentsEntry> entry =
12200 Handle<AliasedArgumentsEntry>::cast(element);
12201 Handle<Context> context(Context::cast(elements->get(0)));
12202 int context_index = entry->aliased_context_slot();
12203 DCHECK(!context->get(context_index)->IsTheHole());
12204 context->set(context_index, *value);
12205 // For elements that are still writable we keep slow aliasing.
12206 if (!details.IsReadOnly()) value = element;
12208 dictionary->ValueAtPut(entry, *value);
12211 // Index not already used. Look for an accessor in the prototype chain.
12213 if (check_prototype) {
12215 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12216 object, index, value, &found, strict_mode);
12217 if (found) return result;
12220 // When we set the is_extensible flag to false we always force the
12221 // element into dictionary mode (and force them to stay there).
12222 if (!object->map()->is_extensible()) {
12223 if (strict_mode == SLOPPY) {
12224 return isolate->factory()->undefined_value();
12226 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12227 Handle<String> name = isolate->factory()->NumberToString(number);
12228 Handle<Object> args[1] = { name };
12229 THROW_NEW_ERROR(isolate, NewTypeError("object_not_extensible",
12230 HandleVector(args, 1)),
12235 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
12236 Handle<SeededNumberDictionary> new_dictionary =
12237 SeededNumberDictionary::AddNumberEntry(dictionary, index, value,
12239 if (*dictionary != *new_dictionary) {
12240 if (is_arguments) {
12241 elements->set(1, *new_dictionary);
12243 object->set_elements(*new_dictionary);
12245 dictionary = new_dictionary;
12249 // Update the array length if this JSObject is an array.
12250 if (object->IsJSArray()) {
12251 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index,
12255 // Attempt to put this object back in fast case.
12256 if (object->ShouldConvertToFastElements()) {
12257 uint32_t new_length = 0;
12258 if (object->IsJSArray()) {
12259 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length));
12261 new_length = dictionary->max_number_key() + 1;
12263 bool has_smi_only_elements = false;
12264 bool should_convert_to_fast_double_elements =
12265 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
12266 SetFastElementsCapacitySmiMode smi_mode =
12267 has_smi_only_elements ? kForceSmiElements : kAllowSmiElements;
12269 if (should_convert_to_fast_double_elements) {
12270 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length);
12272 SetFastElementsCapacityAndLength(object, new_length, new_length,
12275 JSObject::ValidateElements(object);
12277 if (FLAG_trace_normalization) {
12278 OFStream os(stdout);
12279 os << "Object elements are fast case again:\n";
12287 MaybeHandle<Object> JSObject::SetFastDoubleElement(
12288 Handle<JSObject> object,
12290 Handle<Object> value,
12291 StrictMode strict_mode,
12292 bool check_prototype) {
12293 DCHECK(object->HasFastDoubleElements());
12295 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements()));
12296 uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
12298 // If storing to an element that isn't in the array, pass the store request
12299 // up the prototype chain before storing in the receiver's elements.
12300 if (check_prototype &&
12301 (index >= elms_length ||
12302 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) {
12304 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12305 object, index, value, &found, strict_mode);
12306 if (found) return result;
12309 // If the value object is not a heap number, switch to fast elements and try
12311 bool value_is_smi = value->IsSmi();
12312 bool introduces_holes = true;
12313 uint32_t length = elms_length;
12314 if (object->IsJSArray()) {
12315 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length));
12316 introduces_holes = index > length;
12318 introduces_holes = index >= elms_length;
12321 if (!value->IsNumber()) {
12322 SetFastElementsCapacityAndLength(object, elms_length, length,
12323 kDontAllowSmiElements);
12324 Handle<Object> result;
12325 ASSIGN_RETURN_ON_EXCEPTION(
12326 object->GetIsolate(), result,
12327 SetFastElement(object, index, value, strict_mode, check_prototype),
12329 JSObject::ValidateElements(object);
12333 double double_value = value_is_smi
12334 ? static_cast<double>(Handle<Smi>::cast(value)->value())
12335 : Handle<HeapNumber>::cast(value)->value();
12337 // If the array is growing, and it's not growth by a single element at the
12338 // end, make sure that the ElementsKind is HOLEY.
12339 ElementsKind elements_kind = object->GetElementsKind();
12340 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
12341 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12342 TransitionElementsKind(object, transitioned_kind);
12345 // Check whether there is extra space in the fixed array.
12346 if (index < elms_length) {
12347 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements()));
12348 elms->set(index, double_value);
12349 if (object->IsJSArray()) {
12350 // Update the length of the array if needed.
12351 uint32_t array_length = 0;
12353 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12354 if (index >= array_length) {
12355 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1));
12361 // Allow gap in fast case.
12362 if ((index - elms_length) < kMaxGap) {
12363 // Try allocating extra space.
12364 int new_capacity = NewElementsCapacity(index+1);
12365 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12366 DCHECK(static_cast<uint32_t>(new_capacity) > index);
12367 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1);
12368 FixedDoubleArray::cast(object->elements())->set(index, double_value);
12369 JSObject::ValidateElements(object);
12374 // Otherwise default to slow case.
12375 DCHECK(object->HasFastDoubleElements());
12376 DCHECK(object->map()->has_fast_double_elements());
12377 DCHECK(object->elements()->IsFixedDoubleArray() ||
12378 object->elements()->length() == 0);
12380 NormalizeElements(object);
12381 DCHECK(object->HasDictionaryElements());
12382 return SetElement(object, index, value, NONE, strict_mode, check_prototype);
12386 MaybeHandle<Object> JSReceiver::SetElement(Handle<JSReceiver> object,
12388 Handle<Object> value,
12389 PropertyAttributes attributes,
12390 StrictMode strict_mode) {
12391 if (object->IsJSProxy()) {
12392 return JSProxy::SetElementWithHandler(
12393 Handle<JSProxy>::cast(object), object, index, value, strict_mode);
12395 return JSObject::SetElement(
12396 Handle<JSObject>::cast(object), index, value, attributes, strict_mode);
12400 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
12402 Handle<Object> value,
12403 StrictMode strict_mode) {
12404 DCHECK(!object->HasExternalArrayElements());
12405 return JSObject::SetElement(object, index, value, NONE, strict_mode, false);
12409 MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object,
12411 Handle<Object> value,
12412 PropertyAttributes attributes,
12413 StrictMode strict_mode,
12414 bool check_prototype,
12415 SetPropertyMode set_mode) {
12416 Isolate* isolate = object->GetIsolate();
12418 if (object->HasExternalArrayElements() ||
12419 object->HasFixedTypedArrayElements()) {
12420 if (!value->IsNumber() && !value->IsFloat32x4() && !value->IsFloat64x2() &&
12421 !value->IsInt32x4() && !value->IsUndefined()) {
12422 ASSIGN_RETURN_ON_EXCEPTION(
12424 Execution::ToNumber(isolate, value), Object);
12428 // Check access rights if needed.
12429 if (object->IsAccessCheckNeeded()) {
12430 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_SET)) {
12431 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
12432 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12437 if (object->IsJSGlobalProxy()) {
12438 PrototypeIterator iter(isolate, object);
12439 if (iter.IsAtEnd()) return value;
12440 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
12442 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index,
12443 value, attributes, strict_mode, check_prototype, set_mode);
12446 // Don't allow element properties to be redefined for external arrays.
12447 if ((object->HasExternalArrayElements() ||
12448 object->HasFixedTypedArrayElements()) &&
12449 set_mode == DEFINE_PROPERTY) {
12450 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12451 Handle<Object> args[] = { object, number };
12452 THROW_NEW_ERROR(isolate, NewTypeError("redef_external_array_element",
12453 HandleVector(args, arraysize(args))),
12457 // Normalize the elements to enable attributes on the property.
12458 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
12459 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
12460 // Make sure that we never go back to fast case.
12461 dictionary->set_requires_slow_elements();
12464 if (!object->map()->is_observed()) {
12465 return object->HasIndexedInterceptor()
12466 ? SetElementWithInterceptor(object, index, value, attributes,
12467 strict_mode, check_prototype, set_mode)
12468 : SetElementWithoutInterceptor(object, index, value, attributes,
12469 strict_mode, check_prototype, set_mode);
12472 Maybe<PropertyAttributes> maybe =
12473 JSReceiver::GetOwnElementAttribute(object, index);
12474 if (!maybe.has_value) return MaybeHandle<Object>();
12475 PropertyAttributes old_attributes = maybe.value;
12477 Handle<Object> old_value = isolate->factory()->the_hole_value();
12478 Handle<Object> old_length_handle;
12479 Handle<Object> new_length_handle;
12481 if (old_attributes != ABSENT) {
12482 if (GetOwnElementAccessorPair(object, index).is_null()) {
12483 old_value = Object::GetElement(isolate, object, index).ToHandleChecked();
12485 } else if (object->IsJSArray()) {
12486 // Store old array length in case adding an element grows the array.
12487 old_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12491 // Check for lookup interceptor
12492 Handle<Object> result;
12493 ASSIGN_RETURN_ON_EXCEPTION(
12495 object->HasIndexedInterceptor()
12496 ? SetElementWithInterceptor(
12497 object, index, value, attributes,
12498 strict_mode, check_prototype, set_mode)
12499 : SetElementWithoutInterceptor(
12500 object, index, value, attributes,
12501 strict_mode, check_prototype, set_mode),
12504 Handle<String> name = isolate->factory()->Uint32ToString(index);
12505 maybe = GetOwnElementAttribute(object, index);
12506 if (!maybe.has_value) return MaybeHandle<Object>();
12507 PropertyAttributes new_attributes = maybe.value;
12509 if (old_attributes == ABSENT) {
12510 if (object->IsJSArray() &&
12511 !old_length_handle->SameValue(
12512 Handle<JSArray>::cast(object)->length())) {
12513 new_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12515 uint32_t old_length = 0;
12516 uint32_t new_length = 0;
12517 CHECK(old_length_handle->ToArrayIndex(&old_length));
12518 CHECK(new_length_handle->ToArrayIndex(&new_length));
12520 BeginPerformSplice(Handle<JSArray>::cast(object));
12521 EnqueueChangeRecord(object, "add", name, old_value);
12522 EnqueueChangeRecord(object, "update", isolate->factory()->length_string(),
12523 old_length_handle);
12524 EndPerformSplice(Handle<JSArray>::cast(object));
12525 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
12526 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted,
12527 new_length - old_length);
12529 EnqueueChangeRecord(object, "add", name, old_value);
12531 } else if (old_value->IsTheHole()) {
12532 EnqueueChangeRecord(object, "reconfigure", name, old_value);
12534 Handle<Object> new_value =
12535 Object::GetElement(isolate, object, index).ToHandleChecked();
12536 bool value_changed = !old_value->SameValue(*new_value);
12537 if (old_attributes != new_attributes) {
12538 if (!value_changed) old_value = isolate->factory()->the_hole_value();
12539 EnqueueChangeRecord(object, "reconfigure", name, old_value);
12540 } else if (value_changed) {
12541 EnqueueChangeRecord(object, "update", name, old_value);
12549 MaybeHandle<Object> JSObject::SetElementWithoutInterceptor(
12550 Handle<JSObject> object,
12552 Handle<Object> value,
12553 PropertyAttributes attributes,
12554 StrictMode strict_mode,
12555 bool check_prototype,
12556 SetPropertyMode set_mode) {
12557 DCHECK(object->HasDictionaryElements() ||
12558 object->HasDictionaryArgumentsElements() ||
12559 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
12560 Isolate* isolate = object->GetIsolate();
12561 if (FLAG_trace_external_array_abuse &&
12562 IsExternalArrayElementsKind(object->GetElementsKind())) {
12563 CheckArrayAbuse(object, "external elements write", index);
12565 if (FLAG_trace_js_array_abuse &&
12566 !IsExternalArrayElementsKind(object->GetElementsKind())) {
12567 if (object->IsJSArray()) {
12568 CheckArrayAbuse(object, "elements write", index, true);
12571 if (object->IsJSArray() && JSArray::WouldChangeReadOnlyLength(
12572 Handle<JSArray>::cast(object), index)) {
12573 if (strict_mode == SLOPPY) {
12576 return JSArray::ReadOnlyLengthError(Handle<JSArray>::cast(object));
12579 switch (object->GetElementsKind()) {
12580 case FAST_SMI_ELEMENTS:
12581 case FAST_ELEMENTS:
12582 case FAST_HOLEY_SMI_ELEMENTS:
12583 case FAST_HOLEY_ELEMENTS:
12584 return SetFastElement(object, index, value, strict_mode, check_prototype);
12585 case FAST_DOUBLE_ELEMENTS:
12586 case FAST_HOLEY_DOUBLE_ELEMENTS:
12587 return SetFastDoubleElement(object, index, value, strict_mode,
12590 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
12591 case EXTERNAL_##TYPE##_ELEMENTS: { \
12592 Handle<External##Type##Array> array( \
12593 External##Type##Array::cast(object->elements())); \
12594 return External##Type##Array::SetValue(array, index, value); \
12596 case TYPE##_ELEMENTS: { \
12597 Handle<Fixed##Type##Array> array( \
12598 Fixed##Type##Array::cast(object->elements())); \
12599 return Fixed##Type##Array::SetValue(array, index, value); \
12602 TYPED_ARRAYS(TYPED_ARRAY_CASE)
12604 #undef TYPED_ARRAY_CASE
12606 case DICTIONARY_ELEMENTS:
12607 return SetDictionaryElement(object, index, value, attributes, strict_mode,
12610 case SLOPPY_ARGUMENTS_ELEMENTS: {
12611 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()));
12612 uint32_t length = parameter_map->length();
12613 Handle<Object> probe = index < length - 2 ?
12614 Handle<Object>(parameter_map->get(index + 2), isolate) :
12616 if (!probe.is_null() && !probe->IsTheHole()) {
12617 Handle<Context> context(Context::cast(parameter_map->get(0)));
12618 int context_index = Handle<Smi>::cast(probe)->value();
12619 DCHECK(!context->get(context_index)->IsTheHole());
12620 context->set(context_index, *value);
12621 // Redefining attributes of an aliased element destroys fast aliasing.
12622 if (set_mode == SET_PROPERTY || attributes == NONE) return value;
12623 parameter_map->set_the_hole(index + 2);
12624 // For elements that are still writable we re-establish slow aliasing.
12625 if ((attributes & READ_ONLY) == 0) {
12626 value = Handle<Object>::cast(
12627 isolate->factory()->NewAliasedArgumentsEntry(context_index));
12630 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)));
12631 if (arguments->IsDictionary()) {
12632 return SetDictionaryElement(object, index, value, attributes,
12637 return SetFastElement(object, index, value, strict_mode,
12642 // All possible cases have been handled above. Add a return to avoid the
12643 // complaints from the compiler.
12645 return isolate->factory()->null_value();
12649 const double AllocationSite::kPretenureRatio = 0.85;
12652 void AllocationSite::ResetPretenureDecision() {
12653 set_pretenure_decision(kUndecided);
12654 set_memento_found_count(0);
12655 set_memento_create_count(0);
12659 PretenureFlag AllocationSite::GetPretenureMode() {
12660 PretenureDecision mode = pretenure_decision();
12661 // Zombie objects "decide" to be untenured.
12662 return mode == kTenure ? TENURED : NOT_TENURED;
12666 bool AllocationSite::IsNestedSite() {
12667 DCHECK(FLAG_trace_track_allocation_sites);
12668 Object* current = GetHeap()->allocation_sites_list();
12669 while (current->IsAllocationSite()) {
12670 AllocationSite* current_site = AllocationSite::cast(current);
12671 if (current_site->nested_site() == this) {
12674 current = current_site->weak_next();
12680 void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
12681 ElementsKind to_kind) {
12682 Isolate* isolate = site->GetIsolate();
12684 if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) {
12685 Handle<JSArray> transition_info =
12686 handle(JSArray::cast(site->transition_info()));
12687 ElementsKind kind = transition_info->GetElementsKind();
12688 // if kind is holey ensure that to_kind is as well.
12689 if (IsHoleyElementsKind(kind)) {
12690 to_kind = GetHoleyElementsKind(to_kind);
12692 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12693 // If the array is huge, it's not likely to be defined in a local
12694 // function, so we shouldn't make new instances of it very often.
12695 uint32_t length = 0;
12696 CHECK(transition_info->length()->ToArrayIndex(&length));
12697 if (length <= kMaximumArrayBytesToPretransition) {
12698 if (FLAG_trace_track_allocation_sites) {
12699 bool is_nested = site->IsNestedSite();
12701 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
12702 reinterpret_cast<void*>(*site),
12703 is_nested ? "(nested)" : "",
12704 ElementsKindToString(kind),
12705 ElementsKindToString(to_kind));
12707 JSObject::TransitionElementsKind(transition_info, to_kind);
12708 site->dependent_code()->DeoptimizeDependentCodeGroup(
12709 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
12713 ElementsKind kind = site->GetElementsKind();
12714 // if kind is holey ensure that to_kind is as well.
12715 if (IsHoleyElementsKind(kind)) {
12716 to_kind = GetHoleyElementsKind(to_kind);
12718 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12719 if (FLAG_trace_track_allocation_sites) {
12720 PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
12721 reinterpret_cast<void*>(*site),
12722 ElementsKindToString(kind),
12723 ElementsKindToString(to_kind));
12725 site->SetElementsKind(to_kind);
12726 site->dependent_code()->DeoptimizeDependentCodeGroup(
12727 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
12734 void AllocationSite::AddDependentCompilationInfo(Handle<AllocationSite> site,
12736 CompilationInfo* info) {
12737 DependentCode::DependencyGroup group = site->ToDependencyGroup(reason);
12738 Handle<DependentCode> dep(site->dependent_code());
12739 Handle<DependentCode> codes =
12740 DependentCode::Insert(dep, group, info->object_wrapper());
12741 if (*codes != site->dependent_code()) site->set_dependent_code(*codes);
12742 info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone());
12746 const char* AllocationSite::PretenureDecisionName(PretenureDecision decision) {
12747 switch (decision) {
12748 case kUndecided: return "undecided";
12749 case kDontTenure: return "don't tenure";
12750 case kMaybeTenure: return "maybe tenure";
12751 case kTenure: return "tenure";
12752 case kZombie: return "zombie";
12753 default: UNREACHABLE();
12759 void JSObject::UpdateAllocationSite(Handle<JSObject> object,
12760 ElementsKind to_kind) {
12761 if (!object->IsJSArray()) return;
12763 Heap* heap = object->GetHeap();
12764 if (!heap->InNewSpace(*object)) return;
12766 Handle<AllocationSite> site;
12768 DisallowHeapAllocation no_allocation;
12770 AllocationMemento* memento = heap->FindAllocationMemento(*object);
12771 if (memento == NULL) return;
12773 // Walk through to the Allocation Site
12774 site = handle(memento->GetAllocationSite());
12776 AllocationSite::DigestTransitionFeedback(site, to_kind);
12780 void JSObject::TransitionElementsKind(Handle<JSObject> object,
12781 ElementsKind to_kind) {
12782 ElementsKind from_kind = object->map()->elements_kind();
12784 if (IsFastHoleyElementsKind(from_kind)) {
12785 to_kind = GetHoleyElementsKind(to_kind);
12788 if (from_kind == to_kind) return;
12789 // Don't update the site if to_kind isn't fast
12790 if (IsFastElementsKind(to_kind)) {
12791 UpdateAllocationSite(object, to_kind);
12794 Isolate* isolate = object->GetIsolate();
12795 if (object->elements() == isolate->heap()->empty_fixed_array() ||
12796 (IsFastSmiOrObjectElementsKind(from_kind) &&
12797 IsFastSmiOrObjectElementsKind(to_kind)) ||
12798 (from_kind == FAST_DOUBLE_ELEMENTS &&
12799 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
12800 DCHECK(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
12801 // No change is needed to the elements() buffer, the transition
12802 // only requires a map change.
12803 Handle<Map> new_map = GetElementsTransitionMap(object, to_kind);
12804 MigrateToMap(object, new_map);
12805 if (FLAG_trace_elements_transitions) {
12806 Handle<FixedArrayBase> elms(object->elements());
12807 PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms);
12812 Handle<FixedArrayBase> elms(object->elements());
12813 uint32_t capacity = static_cast<uint32_t>(elms->length());
12814 uint32_t length = capacity;
12816 if (object->IsJSArray()) {
12817 Object* raw_length = Handle<JSArray>::cast(object)->length();
12818 if (raw_length->IsUndefined()) {
12819 // If length is undefined, then JSArray is being initialized and has no
12820 // elements, assume a length of zero.
12823 CHECK(raw_length->ToArrayIndex(&length));
12827 if (IsFastSmiElementsKind(from_kind) &&
12828 IsFastDoubleElementsKind(to_kind)) {
12829 SetFastDoubleElementsCapacityAndLength(object, capacity, length);
12830 JSObject::ValidateElements(object);
12834 if (IsFastDoubleElementsKind(from_kind) &&
12835 IsFastObjectElementsKind(to_kind)) {
12836 SetFastElementsCapacityAndLength(object, capacity, length,
12837 kDontAllowSmiElements);
12838 JSObject::ValidateElements(object);
12842 // This method should never be called for any other case than the ones
12849 bool Map::IsValidElementsTransition(ElementsKind from_kind,
12850 ElementsKind to_kind) {
12851 // Transitions can't go backwards.
12852 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
12856 // Transitions from HOLEY -> PACKED are not allowed.
12857 return !IsFastHoleyElementsKind(from_kind) ||
12858 IsFastHoleyElementsKind(to_kind);
12862 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array,
12864 Handle<Object> value) {
12865 uint32_t old_len = 0;
12866 CHECK(array->length()->ToArrayIndex(&old_len));
12867 // Check to see if we need to update the length. For now, we make
12868 // sure that the length stays within 32-bits (unsigned).
12869 if (index >= old_len && index != 0xffffffff) {
12870 Handle<Object> len = array->GetIsolate()->factory()->NewNumber(
12871 static_cast<double>(index) + 1);
12872 array->set_length(*len);
12877 bool JSArray::IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map) {
12878 Isolate* isolate = jsarray_map->GetIsolate();
12879 DCHECK(!jsarray_map->is_dictionary_map());
12880 LookupResult lookup(isolate);
12881 Handle<Name> length_string = isolate->factory()->length_string();
12882 jsarray_map->LookupDescriptor(NULL, *length_string, &lookup);
12883 return lookup.IsReadOnly();
12887 bool JSArray::WouldChangeReadOnlyLength(Handle<JSArray> array,
12889 uint32_t length = 0;
12890 CHECK(array->length()->ToArrayIndex(&length));
12891 if (length <= index) {
12892 LookupIterator it(array, array->GetIsolate()->factory()->length_string(),
12893 LookupIterator::OWN_SKIP_INTERCEPTOR);
12894 CHECK_NE(LookupIterator::ACCESS_CHECK, it.state());
12895 CHECK(it.IsFound());
12896 CHECK_EQ(LookupIterator::ACCESSOR, it.state());
12897 return it.IsReadOnly();
12903 MaybeHandle<Object> JSArray::ReadOnlyLengthError(Handle<JSArray> array) {
12904 Isolate* isolate = array->GetIsolate();
12905 Handle<Name> length = isolate->factory()->length_string();
12906 Handle<Object> args[2] = { length, array };
12907 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
12908 HandleVector(args, arraysize(args))),
12913 MaybeHandle<Object> JSObject::GetElementWithInterceptor(
12914 Handle<JSObject> object,
12915 Handle<Object> receiver,
12917 Isolate* isolate = object->GetIsolate();
12919 // Make sure that the top context does not change when doing
12920 // callbacks or interceptor calls.
12921 AssertNoContextChange ncc(isolate);
12923 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate);
12924 if (!interceptor->getter()->IsUndefined()) {
12925 v8::IndexedPropertyGetterCallback getter =
12926 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
12928 ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index));
12929 PropertyCallbackArguments
12930 args(isolate, interceptor->data(), *receiver, *object);
12931 v8::Handle<v8::Value> result = args.Call(getter, index);
12932 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12933 if (!result.IsEmpty()) {
12934 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12935 result_internal->VerifyApiCallResultType();
12936 // Rebox handle before return.
12937 return handle(*result_internal, isolate);
12941 ElementsAccessor* handler = object->GetElementsAccessor();
12942 Handle<Object> result;
12943 ASSIGN_RETURN_ON_EXCEPTION(
12944 isolate, result, handler->Get(receiver, object, index),
12946 if (!result->IsTheHole()) return result;
12948 PrototypeIterator iter(isolate, object);
12949 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
12950 return Object::GetElementWithReceiver(
12951 isolate, PrototypeIterator::GetCurrent(iter), receiver, index);
12955 bool JSObject::HasDenseElements() {
12958 GetElementsCapacityAndUsage(&capacity, &used);
12959 return (capacity == 0) || (used > (capacity / 2));
12963 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
12967 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
12968 FixedArray* backing_store = NULL;
12969 switch (GetElementsKind()) {
12970 case SLOPPY_ARGUMENTS_ELEMENTS:
12971 backing_store_base =
12972 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
12973 backing_store = FixedArray::cast(backing_store_base);
12974 if (backing_store->IsDictionary()) {
12975 SeededNumberDictionary* dictionary =
12976 SeededNumberDictionary::cast(backing_store);
12977 *capacity = dictionary->Capacity();
12978 *used = dictionary->NumberOfElements();
12982 case FAST_SMI_ELEMENTS:
12983 case FAST_ELEMENTS:
12985 *capacity = backing_store_base->length();
12986 *used = Smi::cast(JSArray::cast(this)->length())->value();
12989 // Fall through if packing is not guaranteed.
12990 case FAST_HOLEY_SMI_ELEMENTS:
12991 case FAST_HOLEY_ELEMENTS:
12992 backing_store = FixedArray::cast(backing_store_base);
12993 *capacity = backing_store->length();
12994 for (int i = 0; i < *capacity; ++i) {
12995 if (!backing_store->get(i)->IsTheHole()) ++(*used);
12998 case DICTIONARY_ELEMENTS: {
12999 SeededNumberDictionary* dictionary = element_dictionary();
13000 *capacity = dictionary->Capacity();
13001 *used = dictionary->NumberOfElements();
13004 case FAST_DOUBLE_ELEMENTS:
13006 *capacity = backing_store_base->length();
13007 *used = Smi::cast(JSArray::cast(this)->length())->value();
13010 // Fall through if packing is not guaranteed.
13011 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13012 *capacity = elements()->length();
13013 if (*capacity == 0) break;
13014 FixedDoubleArray * elms = FixedDoubleArray::cast(elements());
13015 for (int i = 0; i < *capacity; i++) {
13016 if (!elms->is_the_hole(i)) ++(*used);
13021 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13022 case EXTERNAL_##TYPE##_ELEMENTS: \
13023 case TYPE##_ELEMENTS: \
13025 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13026 #undef TYPED_ARRAY_CASE
13028 // External arrays are considered 100% used.
13029 FixedArrayBase* external_array = FixedArrayBase::cast(elements());
13030 *capacity = external_array->length();
13031 *used = external_array->length();
13038 bool JSObject::WouldConvertToSlowElements(Handle<Object> key) {
13040 if (HasFastElements() && key->ToArrayIndex(&index)) {
13041 Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements()));
13042 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
13043 if (index >= capacity) {
13044 if ((index - capacity) >= kMaxGap) return true;
13045 uint32_t new_capacity = NewElementsCapacity(index + 1);
13046 return ShouldConvertToSlowElements(new_capacity);
13053 bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
13054 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
13055 kMaxUncheckedFastElementsLength);
13056 if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
13057 (new_capacity <= kMaxUncheckedFastElementsLength &&
13058 GetHeap()->InNewSpace(this))) {
13061 // If the fast-case backing storage takes up roughly three times as
13062 // much space (in machine words) as a dictionary backing storage
13063 // would, the object should have slow elements.
13064 int old_capacity = 0;
13065 int used_elements = 0;
13066 GetElementsCapacityAndUsage(&old_capacity, &used_elements);
13067 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
13068 SeededNumberDictionary::kEntrySize;
13069 return 3 * dictionary_size <= new_capacity;
13073 bool JSObject::ShouldConvertToFastElements() {
13074 DCHECK(HasDictionaryElements() || HasDictionaryArgumentsElements());
13075 // If the elements are sparse, we should not go back to fast case.
13076 if (!HasDenseElements()) return false;
13077 // An object requiring access checks is never allowed to have fast
13078 // elements. If it had fast elements we would skip security checks.
13079 if (IsAccessCheckNeeded()) return false;
13080 // Observed objects may not go to fast mode because they rely on map checks,
13081 // and for fast element accesses we sometimes check element kinds only.
13082 if (map()->is_observed()) return false;
13084 FixedArray* elements = FixedArray::cast(this->elements());
13085 SeededNumberDictionary* dictionary = NULL;
13086 if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) {
13087 dictionary = SeededNumberDictionary::cast(elements->get(1));
13089 dictionary = SeededNumberDictionary::cast(elements);
13091 // If an element has been added at a very high index in the elements
13092 // dictionary, we cannot go back to fast case.
13093 if (dictionary->requires_slow_elements()) return false;
13094 // If the dictionary backing storage takes up roughly half as much
13095 // space (in machine words) as a fast-case backing storage would,
13096 // the object should have fast elements.
13097 uint32_t array_size = 0;
13099 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
13101 array_size = dictionary->max_number_key();
13103 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
13104 SeededNumberDictionary::kEntrySize;
13105 return 2 * dictionary_size >= array_size;
13109 bool JSObject::ShouldConvertToFastDoubleElements(
13110 bool* has_smi_only_elements) {
13111 *has_smi_only_elements = false;
13112 if (HasSloppyArgumentsElements()) return false;
13113 if (FLAG_unbox_double_arrays) {
13114 DCHECK(HasDictionaryElements());
13115 SeededNumberDictionary* dictionary = element_dictionary();
13116 bool found_double = false;
13117 for (int i = 0; i < dictionary->Capacity(); i++) {
13118 Object* key = dictionary->KeyAt(i);
13119 if (key->IsNumber()) {
13120 Object* value = dictionary->ValueAt(i);
13121 if (!value->IsNumber()) return false;
13122 if (!value->IsSmi()) {
13123 found_double = true;
13127 *has_smi_only_elements = !found_double;
13128 return found_double;
13135 // Certain compilers request function template instantiation when they
13136 // see the definition of the other template functions in the
13137 // class. This requires us to have the template functions put
13138 // together, so even though this function belongs in objects-debug.cc,
13139 // we keep it here instead to satisfy certain compilers.
13140 #ifdef OBJECT_PRINT
13141 template <typename Derived, typename Shape, typename Key>
13142 void Dictionary<Derived, Shape, Key>::Print(OStream& os) { // NOLINT
13143 int capacity = DerivedHashTable::Capacity();
13144 for (int i = 0; i < capacity; i++) {
13145 Object* k = DerivedHashTable::KeyAt(i);
13146 if (DerivedHashTable::IsKey(k)) {
13148 if (k->IsString()) {
13149 String::cast(k)->StringPrint(os);
13153 os << ": " << Brief(ValueAt(i)) << "\n";
13160 template<typename Derived, typename Shape, typename Key>
13161 void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) {
13163 int capacity = DerivedHashTable::Capacity();
13164 DisallowHeapAllocation no_gc;
13165 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
13166 for (int i = 0; i < capacity; i++) {
13167 Object* k = Dictionary::KeyAt(i);
13168 if (Dictionary::IsKey(k)) {
13169 elements->set(pos++, ValueAt(i), mode);
13172 DCHECK(pos == elements->length());
13176 InterceptorInfo* JSObject::GetNamedInterceptor() {
13177 DCHECK(map()->has_named_interceptor());
13178 JSFunction* constructor = JSFunction::cast(map()->constructor());
13179 DCHECK(constructor->shared()->IsApiFunction());
13181 constructor->shared()->get_api_func_data()->named_property_handler();
13182 return InterceptorInfo::cast(result);
13186 InterceptorInfo* JSObject::GetIndexedInterceptor() {
13187 DCHECK(map()->has_indexed_interceptor());
13188 JSFunction* constructor = JSFunction::cast(map()->constructor());
13189 DCHECK(constructor->shared()->IsApiFunction());
13191 constructor->shared()->get_api_func_data()->indexed_property_handler();
13192 return InterceptorInfo::cast(result);
13196 MaybeHandle<Object> JSObject::GetPropertyWithInterceptor(
13197 Handle<JSObject> holder,
13198 Handle<Object> receiver,
13199 Handle<Name> name) {
13200 Isolate* isolate = holder->GetIsolate();
13202 // TODO(rossberg): Support symbols in the API.
13203 if (name->IsSymbol()) return isolate->factory()->undefined_value();
13205 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor(), isolate);
13206 Handle<String> name_string = Handle<String>::cast(name);
13208 if (interceptor->getter()->IsUndefined()) return MaybeHandle<Object>();
13210 v8::NamedPropertyGetterCallback getter =
13211 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
13213 ApiNamedPropertyAccess("interceptor-named-get", *holder, *name));
13214 PropertyCallbackArguments
13215 args(isolate, interceptor->data(), *receiver, *holder);
13216 v8::Handle<v8::Value> result =
13217 args.Call(getter, v8::Utils::ToLocal(name_string));
13218 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
13219 if (result.IsEmpty()) return MaybeHandle<Object>();
13221 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13222 result_internal->VerifyApiCallResultType();
13223 // Rebox handle before return
13224 return handle(*result_internal, isolate);
13228 // Compute the property keys from the interceptor.
13229 // TODO(rossberg): support symbols in API, and filter here if needed.
13230 MaybeHandle<JSObject> JSObject::GetKeysForNamedInterceptor(
13231 Handle<JSObject> object, Handle<JSReceiver> receiver) {
13232 Isolate* isolate = receiver->GetIsolate();
13233 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
13234 PropertyCallbackArguments
13235 args(isolate, interceptor->data(), *receiver, *object);
13236 v8::Handle<v8::Object> result;
13237 if (!interceptor->enumerator()->IsUndefined()) {
13238 v8::NamedPropertyEnumeratorCallback enum_fun =
13239 v8::ToCData<v8::NamedPropertyEnumeratorCallback>(
13240 interceptor->enumerator());
13241 LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object));
13242 result = args.Call(enum_fun);
13244 if (result.IsEmpty()) return MaybeHandle<JSObject>();
13245 #if ENABLE_EXTRA_CHECKS
13246 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
13247 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
13249 // Rebox before returning.
13250 return handle(*v8::Utils::OpenHandle(*result), isolate);
13254 // Compute the element keys from the interceptor.
13255 MaybeHandle<JSObject> JSObject::GetKeysForIndexedInterceptor(
13256 Handle<JSObject> object, Handle<JSReceiver> receiver) {
13257 Isolate* isolate = receiver->GetIsolate();
13258 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
13259 PropertyCallbackArguments
13260 args(isolate, interceptor->data(), *receiver, *object);
13261 v8::Handle<v8::Object> result;
13262 if (!interceptor->enumerator()->IsUndefined()) {
13263 v8::IndexedPropertyEnumeratorCallback enum_fun =
13264 v8::ToCData<v8::IndexedPropertyEnumeratorCallback>(
13265 interceptor->enumerator());
13266 LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object));
13267 result = args.Call(enum_fun);
13269 if (result.IsEmpty()) return MaybeHandle<JSObject>();
13270 #if ENABLE_EXTRA_CHECKS
13271 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
13272 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
13274 // Rebox before returning.
13275 return handle(*v8::Utils::OpenHandle(*result), isolate);
13279 Maybe<bool> JSObject::HasRealNamedProperty(Handle<JSObject> object,
13280 Handle<Name> key) {
13281 LookupIterator it(object, key, LookupIterator::OWN_SKIP_INTERCEPTOR);
13282 Maybe<PropertyAttributes> maybe_result = GetPropertyAttributes(&it);
13283 if (!maybe_result.has_value) return Maybe<bool>();
13284 return maybe(it.IsFound());
13288 Maybe<bool> JSObject::HasRealElementProperty(Handle<JSObject> object,
13290 Isolate* isolate = object->GetIsolate();
13291 HandleScope scope(isolate);
13292 // Check access rights if needed.
13293 if (object->IsAccessCheckNeeded()) {
13294 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
13295 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
13296 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<bool>());
13297 return maybe(false);
13301 if (object->IsJSGlobalProxy()) {
13302 HandleScope scope(isolate);
13303 PrototypeIterator iter(isolate, object);
13304 if (iter.IsAtEnd()) return maybe(false);
13305 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
13306 return HasRealElementProperty(
13307 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index);
13310 Maybe<PropertyAttributes> result =
13311 GetElementAttributeWithoutInterceptor(object, object, index, false);
13312 if (!result.has_value) return Maybe<bool>();
13313 return maybe(result.value != ABSENT);
13317 Maybe<bool> JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object,
13318 Handle<Name> key) {
13319 LookupIterator it(object, key, LookupIterator::OWN_SKIP_INTERCEPTOR);
13320 Maybe<PropertyAttributes> maybe_result = GetPropertyAttributes(&it);
13321 if (!maybe_result.has_value) return Maybe<bool>();
13322 return maybe(it.state() == LookupIterator::ACCESSOR);
13326 int JSObject::NumberOfOwnProperties(PropertyAttributes filter) {
13327 if (HasFastProperties()) {
13328 Map* map = this->map();
13329 if (filter == NONE) return map->NumberOfOwnDescriptors();
13330 if (filter & DONT_ENUM) {
13331 int result = map->EnumLength();
13332 if (result != kInvalidEnumCacheSentinel) return result;
13334 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
13336 return property_dictionary()->NumberOfElementsFilterAttributes(filter);
13340 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
13341 Object* temp = get(i);
13344 if (this != numbers) {
13345 temp = numbers->get(i);
13346 numbers->set(i, Smi::cast(numbers->get(j)));
13347 numbers->set(j, Smi::cast(temp));
13352 static void InsertionSortPairs(FixedArray* content,
13353 FixedArray* numbers,
13355 for (int i = 1; i < len; i++) {
13358 (NumberToUint32(numbers->get(j - 1)) >
13359 NumberToUint32(numbers->get(j)))) {
13360 content->SwapPairs(numbers, j - 1, j);
13367 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) {
13368 // In-place heap sort.
13369 DCHECK(content->length() == numbers->length());
13371 // Bottom-up max-heap construction.
13372 for (int i = 1; i < len; ++i) {
13373 int child_index = i;
13374 while (child_index > 0) {
13375 int parent_index = ((child_index + 1) >> 1) - 1;
13376 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13377 uint32_t child_value = NumberToUint32(numbers->get(child_index));
13378 if (parent_value < child_value) {
13379 content->SwapPairs(numbers, parent_index, child_index);
13383 child_index = parent_index;
13387 // Extract elements and create sorted array.
13388 for (int i = len - 1; i > 0; --i) {
13389 // Put max element at the back of the array.
13390 content->SwapPairs(numbers, 0, i);
13391 // Sift down the new top element.
13392 int parent_index = 0;
13394 int child_index = ((parent_index + 1) << 1) - 1;
13395 if (child_index >= i) break;
13396 uint32_t child1_value = NumberToUint32(numbers->get(child_index));
13397 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1));
13398 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13399 if (child_index + 1 >= i || child1_value > child2_value) {
13400 if (parent_value > child1_value) break;
13401 content->SwapPairs(numbers, parent_index, child_index);
13402 parent_index = child_index;
13404 if (parent_value > child2_value) break;
13405 content->SwapPairs(numbers, parent_index, child_index + 1);
13406 parent_index = child_index + 1;
13413 // Sort this array and the numbers as pairs wrt. the (distinct) numbers.
13414 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
13415 DCHECK(this->length() == numbers->length());
13416 // For small arrays, simply use insertion sort.
13418 InsertionSortPairs(this, numbers, len);
13421 // Check the range of indices.
13422 uint32_t min_index = NumberToUint32(numbers->get(0));
13423 uint32_t max_index = min_index;
13425 for (i = 1; i < len; i++) {
13426 if (NumberToUint32(numbers->get(i)) < min_index) {
13427 min_index = NumberToUint32(numbers->get(i));
13428 } else if (NumberToUint32(numbers->get(i)) > max_index) {
13429 max_index = NumberToUint32(numbers->get(i));
13432 if (max_index - min_index + 1 == len) {
13433 // Indices form a contiguous range, unless there are duplicates.
13434 // Do an in-place linear time sort assuming distinct numbers, but
13435 // avoid hanging in case they are not.
13436 for (i = 0; i < len; i++) {
13439 // While the current element at i is not at its correct position p,
13440 // swap the elements at these two positions.
13441 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i &&
13443 SwapPairs(numbers, i, p);
13447 HeapSortPairs(this, numbers, len);
13453 // Fill in the names of own properties into the supplied storage. The main
13454 // purpose of this function is to provide reflection information for the object
13456 void JSObject::GetOwnPropertyNames(
13457 FixedArray* storage, int index, PropertyAttributes filter) {
13458 DCHECK(storage->length() >= (NumberOfOwnProperties(filter) - index));
13459 if (HasFastProperties()) {
13460 int real_size = map()->NumberOfOwnDescriptors();
13461 DescriptorArray* descs = map()->instance_descriptors();
13462 for (int i = 0; i < real_size; i++) {
13463 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
13464 !FilterKey(descs->GetKey(i), filter)) {
13465 storage->set(index++, descs->GetKey(i));
13469 property_dictionary()->CopyKeysTo(storage,
13472 NameDictionary::UNSORTED);
13477 int JSObject::NumberOfOwnElements(PropertyAttributes filter) {
13478 return GetOwnElementKeys(NULL, filter);
13482 int JSObject::NumberOfEnumElements() {
13483 // Fast case for objects with no elements.
13484 if (!IsJSValue() && HasFastObjectElements()) {
13485 uint32_t length = IsJSArray() ?
13486 static_cast<uint32_t>(
13487 Smi::cast(JSArray::cast(this)->length())->value()) :
13488 static_cast<uint32_t>(FixedArray::cast(elements())->length());
13489 if (length == 0) return 0;
13491 // Compute the number of enumerable elements.
13492 return NumberOfOwnElements(static_cast<PropertyAttributes>(DONT_ENUM));
13496 int JSObject::GetOwnElementKeys(FixedArray* storage,
13497 PropertyAttributes filter) {
13499 switch (GetElementsKind()) {
13500 case FAST_SMI_ELEMENTS:
13501 case FAST_ELEMENTS:
13502 case FAST_HOLEY_SMI_ELEMENTS:
13503 case FAST_HOLEY_ELEMENTS: {
13504 int length = IsJSArray() ?
13505 Smi::cast(JSArray::cast(this)->length())->value() :
13506 FixedArray::cast(elements())->length();
13507 for (int i = 0; i < length; i++) {
13508 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
13509 if (storage != NULL) {
13510 storage->set(counter, Smi::FromInt(i));
13515 DCHECK(!storage || storage->length() >= counter);
13518 case FAST_DOUBLE_ELEMENTS:
13519 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13520 int length = IsJSArray() ?
13521 Smi::cast(JSArray::cast(this)->length())->value() :
13522 FixedArrayBase::cast(elements())->length();
13523 for (int i = 0; i < length; i++) {
13524 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) {
13525 if (storage != NULL) {
13526 storage->set(counter, Smi::FromInt(i));
13531 DCHECK(!storage || storage->length() >= counter);
13535 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13536 case EXTERNAL_##TYPE##_ELEMENTS: \
13537 case TYPE##_ELEMENTS: \
13539 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13540 #undef TYPED_ARRAY_CASE
13542 int length = FixedArrayBase::cast(elements())->length();
13543 while (counter < length) {
13544 if (storage != NULL) {
13545 storage->set(counter, Smi::FromInt(counter));
13549 DCHECK(!storage || storage->length() >= counter);
13553 case DICTIONARY_ELEMENTS: {
13554 if (storage != NULL) {
13555 element_dictionary()->CopyKeysTo(storage,
13557 SeededNumberDictionary::SORTED);
13559 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
13562 case SLOPPY_ARGUMENTS_ELEMENTS: {
13563 FixedArray* parameter_map = FixedArray::cast(elements());
13564 int mapped_length = parameter_map->length() - 2;
13565 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
13566 if (arguments->IsDictionary()) {
13567 // Copy the keys from arguments first, because Dictionary::CopyKeysTo
13568 // will insert in storage starting at index 0.
13569 SeededNumberDictionary* dictionary =
13570 SeededNumberDictionary::cast(arguments);
13571 if (storage != NULL) {
13572 dictionary->CopyKeysTo(
13573 storage, filter, SeededNumberDictionary::UNSORTED);
13575 counter += dictionary->NumberOfElementsFilterAttributes(filter);
13576 for (int i = 0; i < mapped_length; ++i) {
13577 if (!parameter_map->get(i + 2)->IsTheHole()) {
13578 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13582 if (storage != NULL) storage->SortPairs(storage, counter);
13585 int backing_length = arguments->length();
13587 for (; i < mapped_length; ++i) {
13588 if (!parameter_map->get(i + 2)->IsTheHole()) {
13589 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13591 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
13592 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13596 for (; i < backing_length; ++i) {
13597 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13605 if (this->IsJSValue()) {
13606 Object* val = JSValue::cast(this)->value();
13607 if (val->IsString()) {
13608 String* str = String::cast(val);
13610 for (int i = 0; i < str->length(); i++) {
13611 storage->set(counter + i, Smi::FromInt(i));
13614 counter += str->length();
13617 DCHECK(!storage || storage->length() == counter);
13622 int JSObject::GetEnumElementKeys(FixedArray* storage) {
13623 return GetOwnElementKeys(storage, static_cast<PropertyAttributes>(DONT_ENUM));
13627 // StringSharedKeys are used as keys in the eval cache.
13628 class StringSharedKey : public HashTableKey {
13630 StringSharedKey(Handle<String> source,
13631 Handle<SharedFunctionInfo> shared,
13632 StrictMode strict_mode,
13633 int scope_position)
13636 strict_mode_(strict_mode),
13637 scope_position_(scope_position) { }
13639 bool IsMatch(Object* other) OVERRIDE {
13640 DisallowHeapAllocation no_allocation;
13641 if (!other->IsFixedArray()) return false;
13642 FixedArray* other_array = FixedArray::cast(other);
13643 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13644 if (shared != *shared_) return false;
13645 int strict_unchecked = Smi::cast(other_array->get(2))->value();
13646 DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13647 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13648 if (strict_mode != strict_mode_) return false;
13649 int scope_position = Smi::cast(other_array->get(3))->value();
13650 if (scope_position != scope_position_) return false;
13651 String* source = String::cast(other_array->get(1));
13652 return source->Equals(*source_);
13655 static uint32_t StringSharedHashHelper(String* source,
13656 SharedFunctionInfo* shared,
13657 StrictMode strict_mode,
13658 int scope_position) {
13659 uint32_t hash = source->Hash();
13660 if (shared->HasSourceCode()) {
13661 // Instead of using the SharedFunctionInfo pointer in the hash
13662 // code computation, we use a combination of the hash of the
13663 // script source code and the start position of the calling scope.
13664 // We do this to ensure that the cache entries can survive garbage
13666 Script* script(Script::cast(shared->script()));
13667 hash ^= String::cast(script->source())->Hash();
13668 if (strict_mode == STRICT) hash ^= 0x8000;
13669 hash += scope_position;
13674 uint32_t Hash() OVERRIDE {
13675 return StringSharedHashHelper(*source_, *shared_, strict_mode_,
13679 uint32_t HashForObject(Object* obj) OVERRIDE {
13680 DisallowHeapAllocation no_allocation;
13681 FixedArray* other_array = FixedArray::cast(obj);
13682 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13683 String* source = String::cast(other_array->get(1));
13684 int strict_unchecked = Smi::cast(other_array->get(2))->value();
13685 DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13686 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13687 int scope_position = Smi::cast(other_array->get(3))->value();
13688 return StringSharedHashHelper(
13689 source, shared, strict_mode, scope_position);
13693 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13694 Handle<FixedArray> array = isolate->factory()->NewFixedArray(4);
13695 array->set(0, *shared_);
13696 array->set(1, *source_);
13697 array->set(2, Smi::FromInt(strict_mode_));
13698 array->set(3, Smi::FromInt(scope_position_));
13703 Handle<String> source_;
13704 Handle<SharedFunctionInfo> shared_;
13705 StrictMode strict_mode_;
13706 int scope_position_;
13710 // RegExpKey carries the source and flags of a regular expression as key.
13711 class RegExpKey : public HashTableKey {
13713 RegExpKey(Handle<String> string, JSRegExp::Flags flags)
13715 flags_(Smi::FromInt(flags.value())) { }
13717 // Rather than storing the key in the hash table, a pointer to the
13718 // stored value is stored where the key should be. IsMatch then
13719 // compares the search key to the found object, rather than comparing
13721 bool IsMatch(Object* obj) OVERRIDE {
13722 FixedArray* val = FixedArray::cast(obj);
13723 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
13724 && (flags_ == val->get(JSRegExp::kFlagsIndex));
13727 uint32_t Hash() OVERRIDE { return RegExpHash(*string_, flags_); }
13729 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13730 // Plain hash maps, which is where regexp keys are used, don't
13731 // use this function.
13733 return MaybeHandle<Object>().ToHandleChecked();
13736 uint32_t HashForObject(Object* obj) OVERRIDE {
13737 FixedArray* val = FixedArray::cast(obj);
13738 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
13739 Smi::cast(val->get(JSRegExp::kFlagsIndex)));
13742 static uint32_t RegExpHash(String* string, Smi* flags) {
13743 return string->Hash() + flags->value();
13746 Handle<String> string_;
13751 Handle<Object> OneByteStringKey::AsHandle(Isolate* isolate) {
13752 if (hash_field_ == 0) Hash();
13753 return isolate->factory()->NewOneByteInternalizedString(string_, hash_field_);
13757 Handle<Object> TwoByteStringKey::AsHandle(Isolate* isolate) {
13758 if (hash_field_ == 0) Hash();
13759 return isolate->factory()->NewTwoByteInternalizedString(string_, hash_field_);
13763 Handle<Object> SeqOneByteSubStringKey::AsHandle(Isolate* isolate) {
13764 if (hash_field_ == 0) Hash();
13765 return isolate->factory()->NewOneByteInternalizedSubString(
13766 string_, from_, length_, hash_field_);
13770 bool SeqOneByteSubStringKey::IsMatch(Object* string) {
13771 Vector<const uint8_t> chars(string_->GetChars() + from_, length_);
13772 return String::cast(string)->IsOneByteEqualTo(chars);
13776 // InternalizedStringKey carries a string/internalized-string object as key.
13777 class InternalizedStringKey : public HashTableKey {
13779 explicit InternalizedStringKey(Handle<String> string)
13780 : string_(string) { }
13782 virtual bool IsMatch(Object* string) OVERRIDE {
13783 return String::cast(string)->Equals(*string_);
13786 virtual uint32_t Hash() OVERRIDE { return string_->Hash(); }
13788 virtual uint32_t HashForObject(Object* other) OVERRIDE {
13789 return String::cast(other)->Hash();
13792 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13793 // Internalize the string if possible.
13794 MaybeHandle<Map> maybe_map =
13795 isolate->factory()->InternalizedStringMapForString(string_);
13797 if (maybe_map.ToHandle(&map)) {
13798 string_->set_map_no_write_barrier(*map);
13799 DCHECK(string_->IsInternalizedString());
13802 // Otherwise allocate a new internalized string.
13803 return isolate->factory()->NewInternalizedStringImpl(
13804 string_, string_->length(), string_->hash_field());
13807 static uint32_t StringHash(Object* obj) {
13808 return String::cast(obj)->Hash();
13811 Handle<String> string_;
13815 template<typename Derived, typename Shape, typename Key>
13816 void HashTable<Derived, Shape, Key>::IteratePrefix(ObjectVisitor* v) {
13817 IteratePointers(v, 0, kElementsStartOffset);
13821 template<typename Derived, typename Shape, typename Key>
13822 void HashTable<Derived, Shape, Key>::IterateElements(ObjectVisitor* v) {
13824 kElementsStartOffset,
13825 kHeaderSize + length() * kPointerSize);
13829 template<typename Derived, typename Shape, typename Key>
13830 Handle<Derived> HashTable<Derived, Shape, Key>::New(
13832 int at_least_space_for,
13833 MinimumCapacity capacity_option,
13834 PretenureFlag pretenure) {
13835 DCHECK(0 <= at_least_space_for);
13836 DCHECK(!capacity_option || base::bits::IsPowerOfTwo32(at_least_space_for));
13837 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
13838 ? at_least_space_for
13839 : ComputeCapacity(at_least_space_for);
13840 if (capacity > HashTable::kMaxCapacity) {
13841 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
13844 Factory* factory = isolate->factory();
13845 int length = EntryToIndex(capacity);
13846 Handle<FixedArray> array = factory->NewFixedArray(length, pretenure);
13847 array->set_map_no_write_barrier(*factory->hash_table_map());
13848 Handle<Derived> table = Handle<Derived>::cast(array);
13850 table->SetNumberOfElements(0);
13851 table->SetNumberOfDeletedElements(0);
13852 table->SetCapacity(capacity);
13857 // Find entry for key otherwise return kNotFound.
13858 int NameDictionary::FindEntry(Handle<Name> key) {
13859 if (!key->IsUniqueName()) {
13860 return DerivedHashTable::FindEntry(key);
13863 // Optimized for unique names. Knowledge of the key type allows:
13864 // 1. Move the check if the key is unique out of the loop.
13865 // 2. Avoid comparing hash codes in unique-to-unique comparison.
13866 // 3. Detect a case when a dictionary key is not unique but the key is.
13867 // In case of positive result the dictionary key may be replaced by the
13868 // internalized string with minimal performance penalty. It gives a chance
13869 // to perform further lookups in code stubs (and significant performance
13870 // boost a certain style of code).
13872 // EnsureCapacity will guarantee the hash table is never full.
13873 uint32_t capacity = Capacity();
13874 uint32_t entry = FirstProbe(key->Hash(), capacity);
13875 uint32_t count = 1;
13878 int index = EntryToIndex(entry);
13879 Object* element = get(index);
13880 if (element->IsUndefined()) break; // Empty entry.
13881 if (*key == element) return entry;
13882 if (!element->IsUniqueName() &&
13883 !element->IsTheHole() &&
13884 Name::cast(element)->Equals(*key)) {
13885 // Replace a key that is a non-internalized string by the equivalent
13886 // internalized string for faster further lookups.
13890 DCHECK(element->IsTheHole() || !Name::cast(element)->Equals(*key));
13891 entry = NextProbe(entry, count++, capacity);
13897 template<typename Derived, typename Shape, typename Key>
13898 void HashTable<Derived, Shape, Key>::Rehash(
13899 Handle<Derived> new_table,
13901 DCHECK(NumberOfElements() < new_table->Capacity());
13903 DisallowHeapAllocation no_gc;
13904 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
13906 // Copy prefix to new array.
13907 for (int i = kPrefixStartIndex;
13908 i < kPrefixStartIndex + Shape::kPrefixSize;
13910 new_table->set(i, get(i), mode);
13913 // Rehash the elements.
13914 int capacity = Capacity();
13915 for (int i = 0; i < capacity; i++) {
13916 uint32_t from_index = EntryToIndex(i);
13917 Object* k = get(from_index);
13919 uint32_t hash = HashTable::HashForObject(key, k);
13920 uint32_t insertion_index =
13921 EntryToIndex(new_table->FindInsertionEntry(hash));
13922 for (int j = 0; j < Shape::kEntrySize; j++) {
13923 new_table->set(insertion_index + j, get(from_index + j), mode);
13927 new_table->SetNumberOfElements(NumberOfElements());
13928 new_table->SetNumberOfDeletedElements(0);
13932 template<typename Derived, typename Shape, typename Key>
13933 uint32_t HashTable<Derived, Shape, Key>::EntryForProbe(
13937 uint32_t expected) {
13938 uint32_t hash = HashTable::HashForObject(key, k);
13939 uint32_t capacity = Capacity();
13940 uint32_t entry = FirstProbe(hash, capacity);
13941 for (int i = 1; i < probe; i++) {
13942 if (entry == expected) return expected;
13943 entry = NextProbe(entry, i, capacity);
13949 template<typename Derived, typename Shape, typename Key>
13950 void HashTable<Derived, Shape, Key>::Swap(uint32_t entry1,
13952 WriteBarrierMode mode) {
13953 int index1 = EntryToIndex(entry1);
13954 int index2 = EntryToIndex(entry2);
13955 Object* temp[Shape::kEntrySize];
13956 for (int j = 0; j < Shape::kEntrySize; j++) {
13957 temp[j] = get(index1 + j);
13959 for (int j = 0; j < Shape::kEntrySize; j++) {
13960 set(index1 + j, get(index2 + j), mode);
13962 for (int j = 0; j < Shape::kEntrySize; j++) {
13963 set(index2 + j, temp[j], mode);
13968 template<typename Derived, typename Shape, typename Key>
13969 void HashTable<Derived, Shape, Key>::Rehash(Key key) {
13970 DisallowHeapAllocation no_gc;
13971 WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
13972 uint32_t capacity = Capacity();
13974 for (int probe = 1; !done; probe++) {
13975 // All elements at entries given by one of the first _probe_ probes
13976 // are placed correctly. Other elements might need to be moved.
13978 for (uint32_t current = 0; current < capacity; current++) {
13979 Object* current_key = get(EntryToIndex(current));
13980 if (IsKey(current_key)) {
13981 uint32_t target = EntryForProbe(key, current_key, probe, current);
13982 if (current == target) continue;
13983 Object* target_key = get(EntryToIndex(target));
13984 if (!IsKey(target_key) ||
13985 EntryForProbe(key, target_key, probe, target) != target) {
13986 // Put the current element into the correct position.
13987 Swap(current, target, mode);
13988 // The other element will be processed on the next iteration.
13991 // The place for the current element is occupied. Leave the element
13992 // for the next probe.
14001 template<typename Derived, typename Shape, typename Key>
14002 Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity(
14003 Handle<Derived> table,
14006 PretenureFlag pretenure) {
14007 Isolate* isolate = table->GetIsolate();
14008 int capacity = table->Capacity();
14009 int nof = table->NumberOfElements() + n;
14010 int nod = table->NumberOfDeletedElements();
14012 // 50% is still free after adding n elements and
14013 // at most 50% of the free elements are deleted elements.
14014 if (nod <= (capacity - nof) >> 1) {
14015 int needed_free = nof >> 1;
14016 if (nof + needed_free <= capacity) return table;
14019 const int kMinCapacityForPretenure = 256;
14020 bool should_pretenure = pretenure == TENURED ||
14021 ((capacity > kMinCapacityForPretenure) &&
14022 !isolate->heap()->InNewSpace(*table));
14023 Handle<Derived> new_table = HashTable::New(
14026 USE_DEFAULT_MINIMUM_CAPACITY,
14027 should_pretenure ? TENURED : NOT_TENURED);
14029 table->Rehash(new_table, key);
14034 template<typename Derived, typename Shape, typename Key>
14035 Handle<Derived> HashTable<Derived, Shape, Key>::Shrink(Handle<Derived> table,
14037 int capacity = table->Capacity();
14038 int nof = table->NumberOfElements();
14040 // Shrink to fit the number of elements if only a quarter of the
14041 // capacity is filled with elements.
14042 if (nof > (capacity >> 2)) return table;
14043 // Allocate a new dictionary with room for at least the current
14044 // number of elements. The allocation method will make sure that
14045 // there is extra room in the dictionary for additions. Don't go
14046 // lower than room for 16 elements.
14047 int at_least_room_for = nof;
14048 if (at_least_room_for < 16) return table;
14050 Isolate* isolate = table->GetIsolate();
14051 const int kMinCapacityForPretenure = 256;
14053 (at_least_room_for > kMinCapacityForPretenure) &&
14054 !isolate->heap()->InNewSpace(*table);
14055 Handle<Derived> new_table = HashTable::New(
14058 USE_DEFAULT_MINIMUM_CAPACITY,
14059 pretenure ? TENURED : NOT_TENURED);
14061 table->Rehash(new_table, key);
14066 template<typename Derived, typename Shape, typename Key>
14067 uint32_t HashTable<Derived, Shape, Key>::FindInsertionEntry(uint32_t hash) {
14068 uint32_t capacity = Capacity();
14069 uint32_t entry = FirstProbe(hash, capacity);
14070 uint32_t count = 1;
14071 // EnsureCapacity will guarantee the hash table is never full.
14073 Object* element = KeyAt(entry);
14074 if (element->IsUndefined() || element->IsTheHole()) break;
14075 entry = NextProbe(entry, count++, capacity);
14081 // Force instantiation of template instances class.
14082 // Please note this list is compiler dependent.
14084 template class HashTable<StringTable, StringTableShape, HashTableKey*>;
14086 template class HashTable<CompilationCacheTable,
14087 CompilationCacheShape,
14090 template class HashTable<MapCache, MapCacheShape, HashTableKey*>;
14092 template class HashTable<ObjectHashTable,
14093 ObjectHashTableShape,
14096 template class HashTable<WeakHashTable, WeakHashTableShape<2>, Handle<Object> >;
14098 template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >;
14100 template class Dictionary<SeededNumberDictionary,
14101 SeededNumberDictionaryShape,
14104 template class Dictionary<UnseededNumberDictionary,
14105 UnseededNumberDictionaryShape,
14108 template Handle<SeededNumberDictionary>
14109 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14110 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14112 template Handle<UnseededNumberDictionary>
14113 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14114 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14116 template Handle<NameDictionary>
14117 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14118 New(Isolate*, int n, PretenureFlag pretenure);
14120 template Handle<SeededNumberDictionary>
14121 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14122 AtPut(Handle<SeededNumberDictionary>, uint32_t, Handle<Object>);
14124 template Handle<UnseededNumberDictionary>
14125 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14126 AtPut(Handle<UnseededNumberDictionary>, uint32_t, Handle<Object>);
14129 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14130 SlowReverseLookup(Object* value);
14133 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14134 SlowReverseLookup(Object* value);
14137 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14140 PropertyAttributes,
14141 Dictionary<SeededNumberDictionary,
14142 SeededNumberDictionaryShape,
14143 uint32_t>::SortMode);
14145 template Handle<Object>
14146 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::DeleteProperty(
14147 Handle<NameDictionary>, int, JSObject::DeleteMode);
14149 template Handle<Object>
14150 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14151 DeleteProperty(Handle<SeededNumberDictionary>, int, JSObject::DeleteMode);
14153 template Handle<NameDictionary>
14154 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
14155 New(Isolate*, int, MinimumCapacity, PretenureFlag);
14157 template Handle<NameDictionary>
14158 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
14159 Shrink(Handle<NameDictionary>, Handle<Name>);
14161 template Handle<SeededNumberDictionary>
14162 HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14163 Shrink(Handle<SeededNumberDictionary>, uint32_t);
14165 template void Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14169 PropertyAttributes,
14171 NameDictionary, NameDictionaryShape, Handle<Name> >::SortMode);
14174 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14175 NumberOfElementsFilterAttributes(PropertyAttributes);
14177 template Handle<NameDictionary>
14178 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::Add(
14179 Handle<NameDictionary>, Handle<Name>, Handle<Object>, PropertyDetails);
14182 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14183 GenerateNewEnumerationIndices(Handle<NameDictionary>);
14186 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14187 NumberOfElementsFilterAttributes(PropertyAttributes);
14189 template Handle<SeededNumberDictionary>
14190 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14191 Add(Handle<SeededNumberDictionary>,
14196 template Handle<UnseededNumberDictionary>
14197 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14198 Add(Handle<UnseededNumberDictionary>,
14203 template Handle<SeededNumberDictionary>
14204 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14205 EnsureCapacity(Handle<SeededNumberDictionary>, int, uint32_t);
14207 template Handle<UnseededNumberDictionary>
14208 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14209 EnsureCapacity(Handle<UnseededNumberDictionary>, int, uint32_t);
14211 template Handle<NameDictionary>
14212 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14213 EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>);
14216 int Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14217 NumberOfEnumElements();
14220 int Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14221 NumberOfEnumElements();
14224 int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14225 FindEntry(uint32_t);
14228 Handle<Object> JSObject::PrepareSlowElementsForSort(
14229 Handle<JSObject> object, uint32_t limit) {
14230 DCHECK(object->HasDictionaryElements());
14231 Isolate* isolate = object->GetIsolate();
14232 // Must stay in dictionary mode, either because of requires_slow_elements,
14233 // or because we are not going to sort (and therefore compact) all of the
14235 Handle<SeededNumberDictionary> dict(object->element_dictionary(), isolate);
14236 Handle<SeededNumberDictionary> new_dict =
14237 SeededNumberDictionary::New(isolate, dict->NumberOfElements());
14240 uint32_t undefs = 0;
14241 int capacity = dict->Capacity();
14242 Handle<Smi> bailout(Smi::FromInt(-1), isolate);
14243 // Entry to the new dictionary does not cause it to grow, as we have
14244 // allocated one that is large enough for all entries.
14245 DisallowHeapAllocation no_gc;
14246 for (int i = 0; i < capacity; i++) {
14247 Object* k = dict->KeyAt(i);
14248 if (!dict->IsKey(k)) continue;
14250 DCHECK(k->IsNumber());
14251 DCHECK(!k->IsSmi() || Smi::cast(k)->value() >= 0);
14252 DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0);
14253 DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32);
14255 HandleScope scope(isolate);
14256 Handle<Object> value(dict->ValueAt(i), isolate);
14257 PropertyDetails details = dict->DetailsAt(i);
14258 if (details.type() == CALLBACKS || details.IsReadOnly()) {
14259 // Bail out and do the sorting of undefineds and array holes in JS.
14260 // Also bail out if the element is not supposed to be moved.
14264 uint32_t key = NumberToUint32(k);
14266 if (value->IsUndefined()) {
14268 } else if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14269 // Adding an entry with the key beyond smi-range requires
14270 // allocation. Bailout.
14273 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14274 new_dict, pos, value, details);
14275 DCHECK(result.is_identical_to(new_dict));
14279 } else if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
14280 // Adding an entry with the key beyond smi-range requires
14281 // allocation. Bailout.
14284 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14285 new_dict, key, value, details);
14286 DCHECK(result.is_identical_to(new_dict));
14291 uint32_t result = pos;
14292 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
14293 while (undefs > 0) {
14294 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14295 // Adding an entry with the key beyond smi-range requires
14296 // allocation. Bailout.
14299 HandleScope scope(isolate);
14300 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14301 new_dict, pos, isolate->factory()->undefined_value(), no_details);
14302 DCHECK(result.is_identical_to(new_dict));
14308 object->set_elements(*new_dict);
14310 AllowHeapAllocation allocate_return_value;
14311 return isolate->factory()->NewNumberFromUint(result);
14315 // Collects all defined (non-hole) and non-undefined (array) elements at
14316 // the start of the elements array.
14317 // If the object is in dictionary mode, it is converted to fast elements
14319 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object,
14321 Isolate* isolate = object->GetIsolate();
14322 if (object->HasSloppyArgumentsElements() ||
14323 object->map()->is_observed()) {
14324 return handle(Smi::FromInt(-1), isolate);
14327 if (object->HasDictionaryElements()) {
14328 // Convert to fast elements containing only the existing properties.
14329 // Ordering is irrelevant, since we are going to sort anyway.
14330 Handle<SeededNumberDictionary> dict(object->element_dictionary());
14331 if (object->IsJSArray() || dict->requires_slow_elements() ||
14332 dict->max_number_key() >= limit) {
14333 return JSObject::PrepareSlowElementsForSort(object, limit);
14335 // Convert to fast elements.
14337 Handle<Map> new_map =
14338 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS);
14340 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ?
14341 NOT_TENURED: TENURED;
14342 Handle<FixedArray> fast_elements =
14343 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure);
14344 dict->CopyValuesTo(*fast_elements);
14345 JSObject::ValidateElements(object);
14347 JSObject::SetMapAndElements(object, new_map, fast_elements);
14348 } else if (object->HasExternalArrayElements() ||
14349 object->HasFixedTypedArrayElements()) {
14350 // Typed arrays cannot have holes or undefined elements.
14351 return handle(Smi::FromInt(
14352 FixedArrayBase::cast(object->elements())->length()), isolate);
14353 } else if (!object->HasFastDoubleElements()) {
14354 EnsureWritableFastElements(object);
14356 DCHECK(object->HasFastSmiOrObjectElements() ||
14357 object->HasFastDoubleElements());
14359 // Collect holes at the end, undefined before that and the rest at the
14360 // start, and return the number of non-hole, non-undefined values.
14362 Handle<FixedArrayBase> elements_base(object->elements());
14363 uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
14364 if (limit > elements_length) {
14365 limit = elements_length ;
14368 return handle(Smi::FromInt(0), isolate);
14371 uint32_t result = 0;
14372 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) {
14373 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base);
14374 // Split elements into defined and the_hole, in that order.
14375 unsigned int holes = limit;
14376 // Assume most arrays contain no holes and undefined values, so minimize the
14377 // number of stores of non-undefined, non-the-hole values.
14378 for (unsigned int i = 0; i < holes; i++) {
14379 if (elements->is_the_hole(i)) {
14384 // Position i needs to be filled.
14385 while (holes > i) {
14386 if (elements->is_the_hole(holes)) {
14389 elements->set(i, elements->get_scalar(holes));
14395 while (holes < limit) {
14396 elements->set_the_hole(holes);
14400 FixedArray* elements = FixedArray::cast(*elements_base);
14401 DisallowHeapAllocation no_gc;
14403 // Split elements into defined, undefined and the_hole, in that order. Only
14404 // count locations for undefined and the hole, and fill them afterwards.
14405 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
14406 unsigned int undefs = limit;
14407 unsigned int holes = limit;
14408 // Assume most arrays contain no holes and undefined values, so minimize the
14409 // number of stores of non-undefined, non-the-hole values.
14410 for (unsigned int i = 0; i < undefs; i++) {
14411 Object* current = elements->get(i);
14412 if (current->IsTheHole()) {
14415 } else if (current->IsUndefined()) {
14420 // Position i needs to be filled.
14421 while (undefs > i) {
14422 current = elements->get(undefs);
14423 if (current->IsTheHole()) {
14426 } else if (current->IsUndefined()) {
14429 elements->set(i, current, write_barrier);
14435 while (undefs < holes) {
14436 elements->set_undefined(undefs);
14439 while (holes < limit) {
14440 elements->set_the_hole(holes);
14445 return isolate->factory()->NewNumberFromUint(result);
14449 ExternalArrayType JSTypedArray::type() {
14450 switch (elements()->map()->instance_type()) {
14451 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \
14452 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14453 case FIXED_##TYPE##_ARRAY_TYPE: \
14454 return kExternal##Type##Array;
14456 TYPED_ARRAYS(INSTANCE_TYPE_TO_ARRAY_TYPE)
14457 #undef INSTANCE_TYPE_TO_ARRAY_TYPE
14461 return static_cast<ExternalArrayType>(-1);
14466 size_t JSTypedArray::element_size() {
14467 switch (elements()->map()->instance_type()) {
14468 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \
14469 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14472 TYPED_ARRAYS(INSTANCE_TYPE_TO_ELEMENT_SIZE)
14473 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE
14482 Handle<Object> ExternalUint8ClampedArray::SetValue(
14483 Handle<ExternalUint8ClampedArray> array,
14485 Handle<Object> value) {
14486 uint8_t clamped_value = 0;
14487 if (index < static_cast<uint32_t>(array->length())) {
14488 if (value->IsSmi()) {
14489 int int_value = Handle<Smi>::cast(value)->value();
14490 if (int_value < 0) {
14492 } else if (int_value > 255) {
14493 clamped_value = 255;
14495 clamped_value = static_cast<uint8_t>(int_value);
14497 } else if (value->IsHeapNumber()) {
14498 double double_value = Handle<HeapNumber>::cast(value)->value();
14499 if (!(double_value > 0)) {
14500 // NaN and less than zero clamp to zero.
14502 } else if (double_value > 255) {
14503 // Greater than 255 clamp to 255.
14504 clamped_value = 255;
14506 // Other doubles are rounded to the nearest integer.
14507 clamped_value = static_cast<uint8_t>(lrint(double_value));
14510 // Clamp undefined to zero (default). All other types have been
14511 // converted to a number type further up in the call chain.
14512 DCHECK(value->IsUndefined());
14514 array->set(index, clamped_value);
14516 return handle(Smi::FromInt(clamped_value), array->GetIsolate());
14520 template<typename ExternalArrayClass, typename ValueType>
14521 static Handle<Object> ExternalArrayIntSetter(
14523 Handle<ExternalArrayClass> receiver,
14525 Handle<Object> value) {
14526 ValueType cast_value = 0;
14527 if (index < static_cast<uint32_t>(receiver->length())) {
14528 if (value->IsSmi()) {
14529 int int_value = Handle<Smi>::cast(value)->value();
14530 cast_value = static_cast<ValueType>(int_value);
14531 } else if (value->IsHeapNumber()) {
14532 double double_value = Handle<HeapNumber>::cast(value)->value();
14533 cast_value = static_cast<ValueType>(DoubleToInt32(double_value));
14535 // Clamp undefined to zero (default). All other types have been
14536 // converted to a number type further up in the call chain.
14537 DCHECK(value->IsUndefined());
14539 receiver->set(index, cast_value);
14541 return isolate->factory()->NewNumberFromInt(cast_value);
14545 Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array,
14547 Handle<Object> value) {
14548 return ExternalArrayIntSetter<ExternalInt8Array, int8_t>(
14549 array->GetIsolate(), array, index, value);
14553 Handle<Object> ExternalUint8Array::SetValue(Handle<ExternalUint8Array> array,
14555 Handle<Object> value) {
14556 return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>(
14557 array->GetIsolate(), array, index, value);
14561 Handle<Object> ExternalInt16Array::SetValue(Handle<ExternalInt16Array> array,
14563 Handle<Object> value) {
14564 return ExternalArrayIntSetter<ExternalInt16Array, int16_t>(
14565 array->GetIsolate(), array, index, value);
14569 Handle<Object> ExternalUint16Array::SetValue(Handle<ExternalUint16Array> array,
14571 Handle<Object> value) {
14572 return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>(
14573 array->GetIsolate(), array, index, value);
14577 Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array,
14579 Handle<Object> value) {
14580 return ExternalArrayIntSetter<ExternalInt32Array, int32_t>(
14581 array->GetIsolate(), array, index, value);
14585 Handle<Object> ExternalUint32Array::SetValue(
14586 Handle<ExternalUint32Array> array,
14588 Handle<Object> value) {
14589 uint32_t cast_value = 0;
14590 if (index < static_cast<uint32_t>(array->length())) {
14591 if (value->IsSmi()) {
14592 int int_value = Handle<Smi>::cast(value)->value();
14593 cast_value = static_cast<uint32_t>(int_value);
14594 } else if (value->IsHeapNumber()) {
14595 double double_value = Handle<HeapNumber>::cast(value)->value();
14596 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value));
14598 // Clamp undefined to zero (default). All other types have been
14599 // converted to a number type further up in the call chain.
14600 DCHECK(value->IsUndefined());
14602 array->set(index, cast_value);
14604 return array->GetIsolate()->factory()->NewNumberFromUint(cast_value);
14608 Handle<Object> ExternalFloat32Array::SetValue(
14609 Handle<ExternalFloat32Array> array,
14611 Handle<Object> value) {
14612 float cast_value = static_cast<float>(base::OS::nan_value());
14613 if (index < static_cast<uint32_t>(array->length())) {
14614 if (value->IsSmi()) {
14615 int int_value = Handle<Smi>::cast(value)->value();
14616 cast_value = static_cast<float>(int_value);
14617 } else if (value->IsHeapNumber()) {
14618 double double_value = Handle<HeapNumber>::cast(value)->value();
14619 cast_value = static_cast<float>(double_value);
14621 // Clamp undefined to NaN (default). All other types have been
14622 // converted to a number type further up in the call chain.
14623 DCHECK(value->IsUndefined());
14625 array->set(index, cast_value);
14627 return array->GetIsolate()->factory()->NewNumber(cast_value);
14631 Handle<Object> ExternalFloat64Array::SetValue(
14632 Handle<ExternalFloat64Array> array,
14634 Handle<Object> value) {
14635 double double_value = base::OS::nan_value();
14636 if (index < static_cast<uint32_t>(array->length())) {
14637 if (value->IsNumber()) {
14638 double_value = value->Number();
14640 // Clamp undefined to NaN (default). All other types have been
14641 // converted to a number type further up in the call chain.
14642 DCHECK(value->IsUndefined());
14644 array->set(index, double_value);
14646 return array->GetIsolate()->factory()->NewNumber(double_value);
14650 Handle<Object> ExternalFloat32x4Array::SetValue(
14651 Handle<ExternalFloat32x4Array> array,
14653 Handle<Object> value) {
14654 float32x4_value_t cast_value;
14655 cast_value.storage[0] = static_cast<float>(base::OS::nan_value());
14656 cast_value.storage[1] = static_cast<float>(base::OS::nan_value());
14657 cast_value.storage[2] = static_cast<float>(base::OS::nan_value());
14658 cast_value.storage[3] = static_cast<float>(base::OS::nan_value());
14659 if (index < static_cast<uint32_t>(array->length())) {
14660 if (value->IsFloat32x4()) {
14661 cast_value = Handle<Float32x4>::cast(value)->get();
14663 // Clamp undefined to NaN (default). All other types have been
14664 // converted to a number type further up in the call chain.
14665 DCHECK(value->IsUndefined());
14667 array->set(index, cast_value);
14669 return array->GetIsolate()->factory()->NewFloat32x4(cast_value);
14673 Handle<Object> ExternalInt32x4Array::SetValue(
14674 Handle<ExternalInt32x4Array> array, uint32_t index, Handle<Object> value) {
14675 int32x4_value_t cast_value;
14676 cast_value.storage[0] = 0;
14677 cast_value.storage[1] = 0;
14678 cast_value.storage[2] = 0;
14679 cast_value.storage[3] = 0;
14680 if (index < static_cast<uint32_t>(array->length())) {
14681 if (value->IsInt32x4()) {
14682 cast_value = Handle<Int32x4>::cast(value)->get();
14684 // Clamp undefined to zero (default). All other types have been
14685 // converted to a number type further up in the call chain.
14686 DCHECK(value->IsUndefined());
14688 array->set(index, cast_value);
14690 return array->GetIsolate()->factory()->NewInt32x4(cast_value);
14694 Handle<Object> ExternalFloat64x2Array::SetValue(
14695 Handle<ExternalFloat64x2Array> array,
14697 Handle<Object> value) {
14698 float64x2_value_t cast_value;
14699 cast_value.storage[0] = base::OS::nan_value();
14700 cast_value.storage[1] = base::OS::nan_value();
14701 if (index < static_cast<uint32_t>(array->length())) {
14702 if (value->IsFloat64x2()) {
14703 cast_value = Handle<Float64x2>::cast(value)->get();
14705 // Clamp undefined to NaN (default). All other types have been
14706 // converted to a number type further up in the call chain.
14707 DCHECK(value->IsUndefined());
14709 array->set(index, cast_value);
14711 return array->GetIsolate()->factory()->NewFloat64x2(cast_value);
14715 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell(
14716 Handle<JSGlobalObject> global,
14717 Handle<Name> name) {
14718 DCHECK(!global->HasFastProperties());
14719 int entry = global->property_dictionary()->FindEntry(name);
14720 if (entry == NameDictionary::kNotFound) {
14721 Isolate* isolate = global->GetIsolate();
14722 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(
14723 isolate->factory()->the_hole_value());
14724 PropertyDetails details(NONE, NORMAL, 0);
14725 details = details.AsDeleted();
14726 Handle<NameDictionary> dictionary = NameDictionary::Add(
14727 handle(global->property_dictionary()), name, cell, details);
14728 global->set_properties(*dictionary);
14731 Object* value = global->property_dictionary()->ValueAt(entry);
14732 DCHECK(value->IsPropertyCell());
14733 return handle(PropertyCell::cast(value));
14738 // This class is used for looking up two character strings in the string table.
14739 // If we don't have a hit we don't want to waste much time so we unroll the
14740 // string hash calculation loop here for speed. Doesn't work if the two
14741 // characters form a decimal integer, since such strings have a different hash
14743 class TwoCharHashTableKey : public HashTableKey {
14745 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
14746 : c1_(c1), c2_(c2) {
14748 uint32_t hash = seed;
14750 hash += hash << 10;
14754 hash += hash << 10;
14758 hash ^= hash >> 11;
14759 hash += hash << 15;
14760 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
14763 // If this assert fails then we failed to reproduce the two-character
14764 // version of the string hashing algorithm above. One reason could be
14765 // that we were passed two digits as characters, since the hash
14766 // algorithm is different in that case.
14767 uint16_t chars[2] = {c1, c2};
14768 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed);
14769 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask;
14770 DCHECK_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash));
14774 bool IsMatch(Object* o) OVERRIDE {
14775 if (!o->IsString()) return false;
14776 String* other = String::cast(o);
14777 if (other->length() != 2) return false;
14778 if (other->Get(0) != c1_) return false;
14779 return other->Get(1) == c2_;
14782 uint32_t Hash() OVERRIDE { return hash_; }
14783 uint32_t HashForObject(Object* key) OVERRIDE {
14784 if (!key->IsString()) return 0;
14785 return String::cast(key)->Hash();
14788 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
14789 // The TwoCharHashTableKey is only used for looking in the string
14790 // table, not for adding to it.
14792 return MaybeHandle<Object>().ToHandleChecked();
14802 MaybeHandle<String> StringTable::InternalizeStringIfExists(
14804 Handle<String> string) {
14805 if (string->IsInternalizedString()) {
14808 return LookupStringIfExists(isolate, string);
14812 MaybeHandle<String> StringTable::LookupStringIfExists(
14814 Handle<String> string) {
14815 Handle<StringTable> string_table = isolate->factory()->string_table();
14816 InternalizedStringKey key(string);
14817 int entry = string_table->FindEntry(&key);
14818 if (entry == kNotFound) {
14819 return MaybeHandle<String>();
14821 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
14822 DCHECK(StringShape(*result).IsInternalized());
14828 MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists(
14832 Handle<StringTable> string_table = isolate->factory()->string_table();
14833 TwoCharHashTableKey key(c1, c2, isolate->heap()->HashSeed());
14834 int entry = string_table->FindEntry(&key);
14835 if (entry == kNotFound) {
14836 return MaybeHandle<String>();
14838 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
14839 DCHECK(StringShape(*result).IsInternalized());
14845 Handle<String> StringTable::LookupString(Isolate* isolate,
14846 Handle<String> string) {
14847 InternalizedStringKey key(string);
14848 return LookupKey(isolate, &key);
14852 Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) {
14853 Handle<StringTable> table = isolate->factory()->string_table();
14854 int entry = table->FindEntry(key);
14856 // String already in table.
14857 if (entry != kNotFound) {
14858 return handle(String::cast(table->KeyAt(entry)), isolate);
14861 // Adding new string. Grow table if needed.
14862 table = StringTable::EnsureCapacity(table, 1, key);
14864 // Create string object.
14865 Handle<Object> string = key->AsHandle(isolate);
14866 // There must be no attempts to internalize strings that could throw
14867 // InvalidStringLength error.
14868 CHECK(!string.is_null());
14870 // Add the new string and return it along with the string table.
14871 entry = table->FindInsertionEntry(key->Hash());
14872 table->set(EntryToIndex(entry), *string);
14873 table->ElementAdded();
14875 isolate->factory()->set_string_table(table);
14876 return Handle<String>::cast(string);
14880 Handle<Object> CompilationCacheTable::Lookup(Handle<String> src,
14881 Handle<Context> context) {
14882 Isolate* isolate = GetIsolate();
14883 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14884 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
14885 RelocInfo::kNoPosition);
14886 int entry = FindEntry(&key);
14887 if (entry == kNotFound) return isolate->factory()->undefined_value();
14888 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
14892 Handle<Object> CompilationCacheTable::LookupEval(Handle<String> src,
14893 Handle<Context> context,
14894 StrictMode strict_mode,
14895 int scope_position) {
14896 Isolate* isolate = GetIsolate();
14897 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14898 StringSharedKey key(src, shared, strict_mode, scope_position);
14899 int entry = FindEntry(&key);
14900 if (entry == kNotFound) return isolate->factory()->undefined_value();
14901 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
14905 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src,
14906 JSRegExp::Flags flags) {
14907 Isolate* isolate = GetIsolate();
14908 DisallowHeapAllocation no_allocation;
14909 RegExpKey key(src, flags);
14910 int entry = FindEntry(&key);
14911 if (entry == kNotFound) return isolate->factory()->undefined_value();
14912 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
14916 Handle<CompilationCacheTable> CompilationCacheTable::Put(
14917 Handle<CompilationCacheTable> cache, Handle<String> src,
14918 Handle<Context> context, Handle<Object> value) {
14919 Isolate* isolate = cache->GetIsolate();
14920 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14921 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
14922 RelocInfo::kNoPosition);
14923 cache = EnsureCapacity(cache, 1, &key);
14924 Handle<Object> k = key.AsHandle(isolate);
14925 int entry = cache->FindInsertionEntry(key.Hash());
14926 cache->set(EntryToIndex(entry), *k);
14927 cache->set(EntryToIndex(entry) + 1, *value);
14928 cache->ElementAdded();
14933 Handle<CompilationCacheTable> CompilationCacheTable::PutEval(
14934 Handle<CompilationCacheTable> cache, Handle<String> src,
14935 Handle<Context> context, Handle<SharedFunctionInfo> value,
14936 int scope_position) {
14937 Isolate* isolate = cache->GetIsolate();
14938 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14939 StringSharedKey key(src, shared, value->strict_mode(), scope_position);
14940 cache = EnsureCapacity(cache, 1, &key);
14941 Handle<Object> k = key.AsHandle(isolate);
14942 int entry = cache->FindInsertionEntry(key.Hash());
14943 cache->set(EntryToIndex(entry), *k);
14944 cache->set(EntryToIndex(entry) + 1, *value);
14945 cache->ElementAdded();
14950 Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp(
14951 Handle<CompilationCacheTable> cache, Handle<String> src,
14952 JSRegExp::Flags flags, Handle<FixedArray> value) {
14953 RegExpKey key(src, flags);
14954 cache = EnsureCapacity(cache, 1, &key);
14955 int entry = cache->FindInsertionEntry(key.Hash());
14956 // We store the value in the key slot, and compare the search key
14957 // to the stored value with a custon IsMatch function during lookups.
14958 cache->set(EntryToIndex(entry), *value);
14959 cache->set(EntryToIndex(entry) + 1, *value);
14960 cache->ElementAdded();
14965 void CompilationCacheTable::Remove(Object* value) {
14966 DisallowHeapAllocation no_allocation;
14967 Object* the_hole_value = GetHeap()->the_hole_value();
14968 for (int entry = 0, size = Capacity(); entry < size; entry++) {
14969 int entry_index = EntryToIndex(entry);
14970 int value_index = entry_index + 1;
14971 if (get(value_index) == value) {
14972 NoWriteBarrierSet(this, entry_index, the_hole_value);
14973 NoWriteBarrierSet(this, value_index, the_hole_value);
14981 // StringsKey used for HashTable where key is array of internalized strings.
14982 class StringsKey : public HashTableKey {
14984 explicit StringsKey(Handle<FixedArray> strings) : strings_(strings) { }
14986 bool IsMatch(Object* strings) OVERRIDE {
14987 FixedArray* o = FixedArray::cast(strings);
14988 int len = strings_->length();
14989 if (o->length() != len) return false;
14990 for (int i = 0; i < len; i++) {
14991 if (o->get(i) != strings_->get(i)) return false;
14996 uint32_t Hash() OVERRIDE { return HashForObject(*strings_); }
14998 uint32_t HashForObject(Object* obj) OVERRIDE {
14999 FixedArray* strings = FixedArray::cast(obj);
15000 int len = strings->length();
15002 for (int i = 0; i < len; i++) {
15003 hash ^= String::cast(strings->get(i))->Hash();
15008 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { return strings_; }
15011 Handle<FixedArray> strings_;
15015 Object* MapCache::Lookup(FixedArray* array) {
15016 DisallowHeapAllocation no_alloc;
15017 StringsKey key(handle(array));
15018 int entry = FindEntry(&key);
15019 if (entry == kNotFound) return GetHeap()->undefined_value();
15020 return get(EntryToIndex(entry) + 1);
15024 Handle<MapCache> MapCache::Put(
15025 Handle<MapCache> map_cache, Handle<FixedArray> array, Handle<Map> value) {
15026 StringsKey key(array);
15028 Handle<MapCache> new_cache = EnsureCapacity(map_cache, 1, &key);
15029 int entry = new_cache->FindInsertionEntry(key.Hash());
15030 new_cache->set(EntryToIndex(entry), *array);
15031 new_cache->set(EntryToIndex(entry) + 1, *value);
15032 new_cache->ElementAdded();
15037 template<typename Derived, typename Shape, typename Key>
15038 Handle<Derived> Dictionary<Derived, Shape, Key>::New(
15040 int at_least_space_for,
15041 PretenureFlag pretenure) {
15042 DCHECK(0 <= at_least_space_for);
15043 Handle<Derived> dict = DerivedHashTable::New(isolate,
15044 at_least_space_for,
15045 USE_DEFAULT_MINIMUM_CAPACITY,
15048 // Initialize the next enumeration index.
15049 dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex);
15054 template<typename Derived, typename Shape, typename Key>
15055 void Dictionary<Derived, Shape, Key>::GenerateNewEnumerationIndices(
15056 Handle<Derived> dictionary) {
15057 Factory* factory = dictionary->GetIsolate()->factory();
15058 int length = dictionary->NumberOfElements();
15060 // Allocate and initialize iteration order array.
15061 Handle<FixedArray> iteration_order = factory->NewFixedArray(length);
15062 for (int i = 0; i < length; i++) {
15063 iteration_order->set(i, Smi::FromInt(i));
15066 // Allocate array with enumeration order.
15067 Handle<FixedArray> enumeration_order = factory->NewFixedArray(length);
15069 // Fill the enumeration order array with property details.
15070 int capacity = dictionary->Capacity();
15072 for (int i = 0; i < capacity; i++) {
15073 if (dictionary->IsKey(dictionary->KeyAt(i))) {
15074 int index = dictionary->DetailsAt(i).dictionary_index();
15075 enumeration_order->set(pos++, Smi::FromInt(index));
15079 // Sort the arrays wrt. enumeration order.
15080 iteration_order->SortPairs(*enumeration_order, enumeration_order->length());
15082 // Overwrite the enumeration_order with the enumeration indices.
15083 for (int i = 0; i < length; i++) {
15084 int index = Smi::cast(iteration_order->get(i))->value();
15085 int enum_index = PropertyDetails::kInitialIndex + i;
15086 enumeration_order->set(index, Smi::FromInt(enum_index));
15089 // Update the dictionary with new indices.
15090 capacity = dictionary->Capacity();
15092 for (int i = 0; i < capacity; i++) {
15093 if (dictionary->IsKey(dictionary->KeyAt(i))) {
15094 int enum_index = Smi::cast(enumeration_order->get(pos++))->value();
15095 PropertyDetails details = dictionary->DetailsAt(i);
15096 PropertyDetails new_details = PropertyDetails(
15097 details.attributes(), details.type(), enum_index);
15098 dictionary->DetailsAtPut(i, new_details);
15102 // Set the next enumeration index.
15103 dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length);
15107 template<typename Derived, typename Shape, typename Key>
15108 Handle<Derived> Dictionary<Derived, Shape, Key>::EnsureCapacity(
15109 Handle<Derived> dictionary, int n, Key key) {
15110 // Check whether there are enough enumeration indices to add n elements.
15111 if (Shape::kIsEnumerable &&
15112 !PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) {
15113 // If not, we generate new indices for the properties.
15114 GenerateNewEnumerationIndices(dictionary);
15116 return DerivedHashTable::EnsureCapacity(dictionary, n, key);
15120 template<typename Derived, typename Shape, typename Key>
15121 Handle<Object> Dictionary<Derived, Shape, Key>::DeleteProperty(
15122 Handle<Derived> dictionary,
15124 JSObject::DeleteMode mode) {
15125 Factory* factory = dictionary->GetIsolate()->factory();
15126 PropertyDetails details = dictionary->DetailsAt(entry);
15127 // Ignore attributes if forcing a deletion.
15128 if (!details.IsConfigurable() && mode != JSReceiver::FORCE_DELETION) {
15129 return factory->false_value();
15132 dictionary->SetEntry(
15133 entry, factory->the_hole_value(), factory->the_hole_value());
15134 dictionary->ElementRemoved();
15135 return factory->true_value();
15139 template<typename Derived, typename Shape, typename Key>
15140 Handle<Derived> Dictionary<Derived, Shape, Key>::AtPut(
15141 Handle<Derived> dictionary, Key key, Handle<Object> value) {
15142 int entry = dictionary->FindEntry(key);
15144 // If the entry is present set the value;
15145 if (entry != Dictionary::kNotFound) {
15146 dictionary->ValueAtPut(entry, *value);
15150 // Check whether the dictionary should be extended.
15151 dictionary = EnsureCapacity(dictionary, 1, key);
15153 USE(Shape::AsHandle(dictionary->GetIsolate(), key));
15155 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
15157 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
15162 template<typename Derived, typename Shape, typename Key>
15163 Handle<Derived> Dictionary<Derived, Shape, Key>::Add(
15164 Handle<Derived> dictionary,
15166 Handle<Object> value,
15167 PropertyDetails details) {
15168 // Valdate key is absent.
15169 SLOW_DCHECK((dictionary->FindEntry(key) == Dictionary::kNotFound));
15170 // Check whether the dictionary should be extended.
15171 dictionary = EnsureCapacity(dictionary, 1, key);
15173 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
15178 // Add a key, value pair to the dictionary.
15179 template<typename Derived, typename Shape, typename Key>
15180 void Dictionary<Derived, Shape, Key>::AddEntry(
15181 Handle<Derived> dictionary,
15183 Handle<Object> value,
15184 PropertyDetails details,
15186 // Compute the key object.
15187 Handle<Object> k = Shape::AsHandle(dictionary->GetIsolate(), key);
15189 uint32_t entry = dictionary->FindInsertionEntry(hash);
15190 // Insert element at empty or deleted entry
15191 if (!details.IsDeleted() &&
15192 details.dictionary_index() == 0 &&
15193 Shape::kIsEnumerable) {
15194 // Assign an enumeration index to the property and update
15195 // SetNextEnumerationIndex.
15196 int index = dictionary->NextEnumerationIndex();
15197 details = PropertyDetails(details.attributes(), details.type(), index);
15198 dictionary->SetNextEnumerationIndex(index + 1);
15200 dictionary->SetEntry(entry, k, value, details);
15201 DCHECK((dictionary->KeyAt(entry)->IsNumber() ||
15202 dictionary->KeyAt(entry)->IsName()));
15203 dictionary->ElementAdded();
15207 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) {
15208 DisallowHeapAllocation no_allocation;
15209 // If the dictionary requires slow elements an element has already
15210 // been added at a high index.
15211 if (requires_slow_elements()) return;
15212 // Check if this index is high enough that we should require slow
15214 if (key > kRequiresSlowElementsLimit) {
15215 set_requires_slow_elements();
15218 // Update max key value.
15219 Object* max_index_object = get(kMaxNumberKeyIndex);
15220 if (!max_index_object->IsSmi() || max_number_key() < key) {
15221 FixedArray::set(kMaxNumberKeyIndex,
15222 Smi::FromInt(key << kRequiresSlowElementsTagSize));
15227 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry(
15228 Handle<SeededNumberDictionary> dictionary,
15230 Handle<Object> value,
15231 PropertyDetails details) {
15232 dictionary->UpdateMaxNumberKey(key);
15233 SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound);
15234 return Add(dictionary, key, value, details);
15238 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry(
15239 Handle<UnseededNumberDictionary> dictionary,
15241 Handle<Object> value) {
15242 SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound);
15243 return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0));
15247 Handle<SeededNumberDictionary> SeededNumberDictionary::AtNumberPut(
15248 Handle<SeededNumberDictionary> dictionary,
15250 Handle<Object> value) {
15251 dictionary->UpdateMaxNumberKey(key);
15252 return AtPut(dictionary, key, value);
15256 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AtNumberPut(
15257 Handle<UnseededNumberDictionary> dictionary,
15259 Handle<Object> value) {
15260 return AtPut(dictionary, key, value);
15264 Handle<SeededNumberDictionary> SeededNumberDictionary::Set(
15265 Handle<SeededNumberDictionary> dictionary,
15267 Handle<Object> value,
15268 PropertyDetails details) {
15269 int entry = dictionary->FindEntry(key);
15270 if (entry == kNotFound) {
15271 return AddNumberEntry(dictionary, key, value, details);
15273 // Preserve enumeration index.
15274 details = PropertyDetails(details.attributes(),
15276 dictionary->DetailsAt(entry).dictionary_index());
15277 Handle<Object> object_key =
15278 SeededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
15279 dictionary->SetEntry(entry, object_key, value, details);
15284 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set(
15285 Handle<UnseededNumberDictionary> dictionary,
15287 Handle<Object> value) {
15288 int entry = dictionary->FindEntry(key);
15289 if (entry == kNotFound) return AddNumberEntry(dictionary, key, value);
15290 Handle<Object> object_key =
15291 UnseededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
15292 dictionary->SetEntry(entry, object_key, value);
15298 template<typename Derived, typename Shape, typename Key>
15299 int Dictionary<Derived, Shape, Key>::NumberOfElementsFilterAttributes(
15300 PropertyAttributes filter) {
15301 int capacity = DerivedHashTable::Capacity();
15303 for (int i = 0; i < capacity; i++) {
15304 Object* k = DerivedHashTable::KeyAt(i);
15305 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15306 PropertyDetails details = DetailsAt(i);
15307 if (details.IsDeleted()) continue;
15308 PropertyAttributes attr = details.attributes();
15309 if ((attr & filter) == 0) result++;
15316 template<typename Derived, typename Shape, typename Key>
15317 int Dictionary<Derived, Shape, Key>::NumberOfEnumElements() {
15318 return NumberOfElementsFilterAttributes(
15319 static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC));
15323 template<typename Derived, typename Shape, typename Key>
15324 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
15325 FixedArray* storage,
15326 PropertyAttributes filter,
15327 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
15328 DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter));
15329 int capacity = DerivedHashTable::Capacity();
15331 for (int i = 0; i < capacity; i++) {
15332 Object* k = DerivedHashTable::KeyAt(i);
15333 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15334 PropertyDetails details = DetailsAt(i);
15335 if (details.IsDeleted()) continue;
15336 PropertyAttributes attr = details.attributes();
15337 if ((attr & filter) == 0) storage->set(index++, k);
15340 if (sort_mode == Dictionary::SORTED) {
15341 storage->SortPairs(storage, index);
15343 DCHECK(storage->length() >= index);
15347 struct EnumIndexComparator {
15348 explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { }
15349 bool operator() (Smi* a, Smi* b) {
15350 PropertyDetails da(dict->DetailsAt(a->value()));
15351 PropertyDetails db(dict->DetailsAt(b->value()));
15352 return da.dictionary_index() < db.dictionary_index();
15354 NameDictionary* dict;
15358 void NameDictionary::CopyEnumKeysTo(FixedArray* storage) {
15359 int length = storage->length();
15360 int capacity = Capacity();
15361 int properties = 0;
15362 for (int i = 0; i < capacity; i++) {
15363 Object* k = KeyAt(i);
15364 if (IsKey(k) && !k->IsSymbol()) {
15365 PropertyDetails details = DetailsAt(i);
15366 if (details.IsDeleted() || details.IsDontEnum()) continue;
15367 storage->set(properties, Smi::FromInt(i));
15369 if (properties == length) break;
15372 CHECK_EQ(length, properties);
15373 EnumIndexComparator cmp(this);
15374 Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress());
15375 std::sort(start, start + length, cmp);
15376 for (int i = 0; i < length; i++) {
15377 int index = Smi::cast(storage->get(i))->value();
15378 storage->set(i, KeyAt(index));
15383 template<typename Derived, typename Shape, typename Key>
15384 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
15385 FixedArray* storage,
15387 PropertyAttributes filter,
15388 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
15389 DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter));
15390 int capacity = DerivedHashTable::Capacity();
15391 for (int i = 0; i < capacity; i++) {
15392 Object* k = DerivedHashTable::KeyAt(i);
15393 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15394 PropertyDetails details = DetailsAt(i);
15395 if (details.IsDeleted()) continue;
15396 PropertyAttributes attr = details.attributes();
15397 if ((attr & filter) == 0) storage->set(index++, k);
15400 if (sort_mode == Dictionary::SORTED) {
15401 storage->SortPairs(storage, index);
15403 DCHECK(storage->length() >= index);
15407 // Backwards lookup (slow).
15408 template<typename Derived, typename Shape, typename Key>
15409 Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) {
15410 int capacity = DerivedHashTable::Capacity();
15411 for (int i = 0; i < capacity; i++) {
15412 Object* k = DerivedHashTable::KeyAt(i);
15413 if (Dictionary::IsKey(k)) {
15414 Object* e = ValueAt(i);
15415 if (e->IsPropertyCell()) {
15416 e = PropertyCell::cast(e)->value();
15418 if (e == value) return k;
15421 Heap* heap = Dictionary::GetHeap();
15422 return heap->undefined_value();
15426 Object* ObjectHashTable::Lookup(Handle<Object> key) {
15427 DisallowHeapAllocation no_gc;
15428 DCHECK(IsKey(*key));
15430 // If the object does not have an identity hash, it was never used as a key.
15431 Object* hash = key->GetHash();
15432 if (hash->IsUndefined()) {
15433 return GetHeap()->the_hole_value();
15435 int entry = FindEntry(key);
15436 if (entry == kNotFound) return GetHeap()->the_hole_value();
15437 return get(EntryToIndex(entry) + 1);
15441 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table,
15442 Handle<Object> key,
15443 Handle<Object> value) {
15444 DCHECK(table->IsKey(*key));
15445 DCHECK(!value->IsTheHole());
15447 Isolate* isolate = table->GetIsolate();
15449 // Make sure the key object has an identity hash code.
15450 Handle<Smi> hash = Object::GetOrCreateHash(isolate, key);
15452 int entry = table->FindEntry(key);
15454 // Key is already in table, just overwrite value.
15455 if (entry != kNotFound) {
15456 table->set(EntryToIndex(entry) + 1, *value);
15460 // Check whether the hash table should be extended.
15461 table = EnsureCapacity(table, 1, key);
15462 table->AddEntry(table->FindInsertionEntry(hash->value()),
15469 Handle<ObjectHashTable> ObjectHashTable::Remove(Handle<ObjectHashTable> table,
15470 Handle<Object> key,
15471 bool* was_present) {
15472 DCHECK(table->IsKey(*key));
15474 Object* hash = key->GetHash();
15475 if (hash->IsUndefined()) {
15476 *was_present = false;
15480 int entry = table->FindEntry(key);
15481 if (entry == kNotFound) {
15482 *was_present = false;
15486 *was_present = true;
15487 table->RemoveEntry(entry);
15488 return Shrink(table, key);
15492 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
15493 set(EntryToIndex(entry), key);
15494 set(EntryToIndex(entry) + 1, value);
15499 void ObjectHashTable::RemoveEntry(int entry) {
15500 set_the_hole(EntryToIndex(entry));
15501 set_the_hole(EntryToIndex(entry) + 1);
15506 Object* WeakHashTable::Lookup(Handle<Object> key) {
15507 DisallowHeapAllocation no_gc;
15508 DCHECK(IsKey(*key));
15509 int entry = FindEntry(key);
15510 if (entry == kNotFound) return GetHeap()->the_hole_value();
15511 return get(EntryToValueIndex(entry));
15515 Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table,
15516 Handle<Object> key,
15517 Handle<Object> value) {
15518 DCHECK(table->IsKey(*key));
15519 int entry = table->FindEntry(key);
15520 // Key is already in table, just overwrite value.
15521 if (entry != kNotFound) {
15522 // TODO(ulan): Skipping write barrier is a temporary solution to avoid
15523 // memory leaks. Remove this once we have special visitor for weak fixed
15525 table->set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
15529 // Check whether the hash table should be extended.
15530 table = EnsureCapacity(table, 1, key, TENURED);
15532 table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value);
15537 void WeakHashTable::AddEntry(int entry,
15538 Handle<Object> key,
15539 Handle<Object> value) {
15540 DisallowHeapAllocation no_allocation;
15541 // TODO(ulan): Skipping write barrier is a temporary solution to avoid
15542 // memory leaks. Remove this once we have special visitor for weak fixed
15544 set(EntryToIndex(entry), *key, SKIP_WRITE_BARRIER);
15545 set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
15550 template<class Derived, class Iterator, int entrysize>
15551 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate(
15552 Isolate* isolate, int capacity, PretenureFlag pretenure) {
15553 // Capacity must be a power of two, since we depend on being able
15554 // to divide and multiple by 2 (kLoadFactor) to derive capacity
15555 // from number of buckets. If we decide to change kLoadFactor
15556 // to something other than 2, capacity should be stored as another
15557 // field of this object.
15558 capacity = base::bits::RoundUpToPowerOfTwo32(Max(kMinCapacity, capacity));
15559 if (capacity > kMaxCapacity) {
15560 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
15562 int num_buckets = capacity / kLoadFactor;
15563 Handle<FixedArray> backing_store = isolate->factory()->NewFixedArray(
15564 kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure);
15565 backing_store->set_map_no_write_barrier(
15566 isolate->heap()->ordered_hash_table_map());
15567 Handle<Derived> table = Handle<Derived>::cast(backing_store);
15568 for (int i = 0; i < num_buckets; ++i) {
15569 table->set(kHashTableStartIndex + i, Smi::FromInt(kNotFound));
15571 table->SetNumberOfBuckets(num_buckets);
15572 table->SetNumberOfElements(0);
15573 table->SetNumberOfDeletedElements(0);
15578 template<class Derived, class Iterator, int entrysize>
15579 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable(
15580 Handle<Derived> table) {
15581 DCHECK(!table->IsObsolete());
15583 int nof = table->NumberOfElements();
15584 int nod = table->NumberOfDeletedElements();
15585 int capacity = table->Capacity();
15586 if ((nof + nod) < capacity) return table;
15587 // Don't need to grow if we can simply clear out deleted entries instead.
15588 // Note that we can't compact in place, though, so we always allocate
15590 return Rehash(table, (nod < (capacity >> 1)) ? capacity << 1 : capacity);
15594 template<class Derived, class Iterator, int entrysize>
15595 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink(
15596 Handle<Derived> table) {
15597 DCHECK(!table->IsObsolete());
15599 int nof = table->NumberOfElements();
15600 int capacity = table->Capacity();
15601 if (nof >= (capacity >> 2)) return table;
15602 return Rehash(table, capacity / 2);
15606 template<class Derived, class Iterator, int entrysize>
15607 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear(
15608 Handle<Derived> table) {
15609 DCHECK(!table->IsObsolete());
15611 Handle<Derived> new_table =
15612 Allocate(table->GetIsolate(),
15614 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
15616 table->SetNextTable(*new_table);
15617 table->SetNumberOfDeletedElements(-1);
15623 template<class Derived, class Iterator, int entrysize>
15624 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Remove(
15625 Handle<Derived> table, Handle<Object> key, bool* was_present) {
15626 int entry = table->FindEntry(key);
15627 if (entry == kNotFound) {
15628 *was_present = false;
15631 *was_present = true;
15632 table->RemoveEntry(entry);
15633 return Shrink(table);
15637 template<class Derived, class Iterator, int entrysize>
15638 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash(
15639 Handle<Derived> table, int new_capacity) {
15640 DCHECK(!table->IsObsolete());
15642 Handle<Derived> new_table =
15643 Allocate(table->GetIsolate(),
15645 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
15646 int nof = table->NumberOfElements();
15647 int nod = table->NumberOfDeletedElements();
15648 int new_buckets = new_table->NumberOfBuckets();
15650 int removed_holes_index = 0;
15652 for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) {
15653 Object* key = table->KeyAt(old_entry);
15654 if (key->IsTheHole()) {
15655 table->SetRemovedIndexAt(removed_holes_index++, old_entry);
15659 Object* hash = key->GetHash();
15660 int bucket = Smi::cast(hash)->value() & (new_buckets - 1);
15661 Object* chain_entry = new_table->get(kHashTableStartIndex + bucket);
15662 new_table->set(kHashTableStartIndex + bucket, Smi::FromInt(new_entry));
15663 int new_index = new_table->EntryToIndex(new_entry);
15664 int old_index = table->EntryToIndex(old_entry);
15665 for (int i = 0; i < entrysize; ++i) {
15666 Object* value = table->get(old_index + i);
15667 new_table->set(new_index + i, value);
15669 new_table->set(new_index + kChainOffset, chain_entry);
15673 DCHECK_EQ(nod, removed_holes_index);
15675 new_table->SetNumberOfElements(nof);
15676 table->SetNextTable(*new_table);
15682 template <class Derived, class Iterator, int entrysize>
15683 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry(
15684 Handle<Object> key, int hash) {
15685 DCHECK(!IsObsolete());
15687 DisallowHeapAllocation no_gc;
15688 DCHECK(!key->IsTheHole());
15689 for (int entry = HashToEntry(hash); entry != kNotFound;
15690 entry = ChainAt(entry)) {
15691 Object* candidate = KeyAt(entry);
15692 if (candidate->SameValueZero(*key))
15699 template <class Derived, class Iterator, int entrysize>
15700 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry(
15701 Handle<Object> key) {
15702 DisallowHeapAllocation no_gc;
15703 Object* hash = key->GetHash();
15704 if (!hash->IsSmi()) return kNotFound;
15705 return FindEntry(key, Smi::cast(hash)->value());
15709 template <class Derived, class Iterator, int entrysize>
15710 int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) {
15711 DCHECK(!IsObsolete());
15713 int entry = UsedCapacity();
15714 int bucket = HashToBucket(hash);
15715 int index = EntryToIndex(entry);
15716 Object* chain_entry = get(kHashTableStartIndex + bucket);
15717 set(kHashTableStartIndex + bucket, Smi::FromInt(entry));
15718 set(index + kChainOffset, chain_entry);
15719 SetNumberOfElements(NumberOfElements() + 1);
15724 template<class Derived, class Iterator, int entrysize>
15725 void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) {
15726 DCHECK(!IsObsolete());
15728 int index = EntryToIndex(entry);
15729 for (int i = 0; i < entrysize; ++i) {
15730 set_the_hole(index + i);
15732 SetNumberOfElements(NumberOfElements() - 1);
15733 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
15737 template Handle<OrderedHashSet>
15738 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Allocate(
15739 Isolate* isolate, int capacity, PretenureFlag pretenure);
15741 template Handle<OrderedHashSet>
15742 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::EnsureGrowable(
15743 Handle<OrderedHashSet> table);
15745 template Handle<OrderedHashSet>
15746 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Shrink(
15747 Handle<OrderedHashSet> table);
15749 template Handle<OrderedHashSet>
15750 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Clear(
15751 Handle<OrderedHashSet> table);
15753 template Handle<OrderedHashSet>
15754 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Remove(
15755 Handle<OrderedHashSet> table, Handle<Object> key, bool* was_present);
15757 template int OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry(
15758 Handle<Object> key, int hash);
15759 template int OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry(
15760 Handle<Object> key);
15763 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::AddEntry(int hash);
15766 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::RemoveEntry(int entry);
15769 template Handle<OrderedHashMap>
15770 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Allocate(
15771 Isolate* isolate, int capacity, PretenureFlag pretenure);
15773 template Handle<OrderedHashMap>
15774 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::EnsureGrowable(
15775 Handle<OrderedHashMap> table);
15777 template Handle<OrderedHashMap>
15778 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Shrink(
15779 Handle<OrderedHashMap> table);
15781 template Handle<OrderedHashMap>
15782 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Clear(
15783 Handle<OrderedHashMap> table);
15785 template Handle<OrderedHashMap>
15786 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Remove(
15787 Handle<OrderedHashMap> table, Handle<Object> key, bool* was_present);
15789 template int OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry(
15790 Handle<Object> key, int hash);
15791 template int OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry(
15792 Handle<Object> key);
15795 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::AddEntry(int hash);
15798 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::RemoveEntry(int entry);
15801 bool OrderedHashSet::Contains(Handle<Object> key) {
15802 return FindEntry(key) != kNotFound;
15806 Handle<OrderedHashSet> OrderedHashSet::Add(Handle<OrderedHashSet> table,
15807 Handle<Object> key) {
15808 int hash = GetOrCreateHash(table->GetIsolate(), key)->value();
15809 if (table->FindEntry(key, hash) != kNotFound) return table;
15811 table = EnsureGrowable(table);
15813 int index = table->AddEntry(hash);
15814 table->set(index, *key);
15819 Object* OrderedHashMap::Lookup(Handle<Object> key) {
15820 DisallowHeapAllocation no_gc;
15821 int entry = FindEntry(key);
15822 if (entry == kNotFound) return GetHeap()->the_hole_value();
15823 return ValueAt(entry);
15827 Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table,
15828 Handle<Object> key,
15829 Handle<Object> value) {
15830 DCHECK(!key->IsTheHole());
15832 int hash = GetOrCreateHash(table->GetIsolate(), key)->value();
15833 int entry = table->FindEntry(key, hash);
15835 if (entry != kNotFound) {
15836 table->set(table->EntryToIndex(entry) + kValueOffset, *value);
15840 table = EnsureGrowable(table);
15842 int index = table->AddEntry(hash);
15843 table->set(index, *key);
15844 table->set(index + kValueOffset, *value);
15849 template<class Derived, class TableType>
15850 void OrderedHashTableIterator<Derived, TableType>::Transition() {
15851 DisallowHeapAllocation no_allocation;
15852 TableType* table = TableType::cast(this->table());
15853 if (!table->IsObsolete()) return;
15855 int index = Smi::cast(this->index())->value();
15856 while (table->IsObsolete()) {
15857 TableType* next_table = table->NextTable();
15860 int nod = table->NumberOfDeletedElements();
15862 // When we clear the table we set the number of deleted elements to -1.
15866 int old_index = index;
15867 for (int i = 0; i < nod; ++i) {
15868 int removed_index = table->RemovedIndexAt(i);
15869 if (removed_index >= old_index) break;
15875 table = next_table;
15879 set_index(Smi::FromInt(index));
15883 template<class Derived, class TableType>
15884 bool OrderedHashTableIterator<Derived, TableType>::HasMore() {
15885 DisallowHeapAllocation no_allocation;
15886 if (this->table()->IsUndefined()) return false;
15890 TableType* table = TableType::cast(this->table());
15891 int index = Smi::cast(this->index())->value();
15892 int used_capacity = table->UsedCapacity();
15894 while (index < used_capacity && table->KeyAt(index)->IsTheHole()) {
15898 set_index(Smi::FromInt(index));
15900 if (index < used_capacity) return true;
15902 set_table(GetHeap()->undefined_value());
15907 template<class Derived, class TableType>
15908 Smi* OrderedHashTableIterator<Derived, TableType>::Next(JSArray* value_array) {
15909 DisallowHeapAllocation no_allocation;
15911 FixedArray* array = FixedArray::cast(value_array->elements());
15912 static_cast<Derived*>(this)->PopulateValueArray(array);
15914 return Smi::cast(kind());
15916 return Smi::FromInt(0);
15921 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Next(
15922 JSArray* value_array);
15925 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::HasMore();
15928 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::MoveNext();
15931 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::CurrentKey();
15934 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Transition();
15938 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Next(
15939 JSArray* value_array);
15942 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::HasMore();
15945 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::MoveNext();
15948 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::CurrentKey();
15951 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Transition();
15954 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator(
15955 DeclaredAccessorDescriptor* descriptor)
15956 : array_(descriptor->serialized_data()->GetDataStartAddress()),
15957 length_(descriptor->serialized_data()->length()),
15962 const DeclaredAccessorDescriptorData*
15963 DeclaredAccessorDescriptorIterator::Next() {
15964 DCHECK(offset_ < length_);
15965 uint8_t* ptr = &array_[offset_];
15966 DCHECK(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0);
15967 const DeclaredAccessorDescriptorData* data =
15968 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr);
15969 offset_ += sizeof(*data);
15970 DCHECK(offset_ <= length_);
15975 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create(
15977 const DeclaredAccessorDescriptorData& descriptor,
15978 Handle<DeclaredAccessorDescriptor> previous) {
15979 int previous_length =
15980 previous.is_null() ? 0 : previous->serialized_data()->length();
15981 int length = sizeof(descriptor) + previous_length;
15982 Handle<ByteArray> serialized_descriptor =
15983 isolate->factory()->NewByteArray(length);
15984 Handle<DeclaredAccessorDescriptor> value =
15985 isolate->factory()->NewDeclaredAccessorDescriptor();
15986 value->set_serialized_data(*serialized_descriptor);
15987 // Copy in the data.
15989 DisallowHeapAllocation no_allocation;
15990 uint8_t* array = serialized_descriptor->GetDataStartAddress();
15991 if (previous_length != 0) {
15992 uint8_t* previous_array =
15993 previous->serialized_data()->GetDataStartAddress();
15994 MemCopy(array, previous_array, previous_length);
15995 array += previous_length;
15997 DCHECK(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0);
15998 DeclaredAccessorDescriptorData* data =
15999 reinterpret_cast<DeclaredAccessorDescriptorData*>(array);
16000 *data = descriptor;
16006 // Check if there is a break point at this code position.
16007 bool DebugInfo::HasBreakPoint(int code_position) {
16008 // Get the break point info object for this code position.
16009 Object* break_point_info = GetBreakPointInfo(code_position);
16011 // If there is no break point info object or no break points in the break
16012 // point info object there is no break point at this code position.
16013 if (break_point_info->IsUndefined()) return false;
16014 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0;
16018 // Get the break point info object for this code position.
16019 Object* DebugInfo::GetBreakPointInfo(int code_position) {
16020 // Find the index of the break point info object for this code position.
16021 int index = GetBreakPointInfoIndex(code_position);
16023 // Return the break point info object if any.
16024 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value();
16025 return BreakPointInfo::cast(break_points()->get(index));
16029 // Clear a break point at the specified code position.
16030 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info,
16032 Handle<Object> break_point_object) {
16033 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16034 debug_info->GetIsolate());
16035 if (break_point_info->IsUndefined()) return;
16036 BreakPointInfo::ClearBreakPoint(
16037 Handle<BreakPointInfo>::cast(break_point_info),
16038 break_point_object);
16042 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
16044 int source_position,
16045 int statement_position,
16046 Handle<Object> break_point_object) {
16047 Isolate* isolate = debug_info->GetIsolate();
16048 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16050 if (!break_point_info->IsUndefined()) {
16051 BreakPointInfo::SetBreakPoint(
16052 Handle<BreakPointInfo>::cast(break_point_info),
16053 break_point_object);
16057 // Adding a new break point for a code position which did not have any
16058 // break points before. Try to find a free slot.
16059 int index = kNoBreakPointInfo;
16060 for (int i = 0; i < debug_info->break_points()->length(); i++) {
16061 if (debug_info->break_points()->get(i)->IsUndefined()) {
16066 if (index == kNoBreakPointInfo) {
16067 // No free slot - extend break point info array.
16068 Handle<FixedArray> old_break_points =
16069 Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
16070 Handle<FixedArray> new_break_points =
16071 isolate->factory()->NewFixedArray(
16072 old_break_points->length() +
16073 DebugInfo::kEstimatedNofBreakPointsInFunction);
16075 debug_info->set_break_points(*new_break_points);
16076 for (int i = 0; i < old_break_points->length(); i++) {
16077 new_break_points->set(i, old_break_points->get(i));
16079 index = old_break_points->length();
16081 DCHECK(index != kNoBreakPointInfo);
16083 // Allocate new BreakPointInfo object and set the break point.
16084 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
16085 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
16086 new_break_point_info->set_code_position(Smi::FromInt(code_position));
16087 new_break_point_info->set_source_position(Smi::FromInt(source_position));
16088 new_break_point_info->
16089 set_statement_position(Smi::FromInt(statement_position));
16090 new_break_point_info->set_break_point_objects(
16091 isolate->heap()->undefined_value());
16092 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
16093 debug_info->break_points()->set(index, *new_break_point_info);
16097 // Get the break point objects for a code position.
16098 Object* DebugInfo::GetBreakPointObjects(int code_position) {
16099 Object* break_point_info = GetBreakPointInfo(code_position);
16100 if (break_point_info->IsUndefined()) {
16101 return GetHeap()->undefined_value();
16103 return BreakPointInfo::cast(break_point_info)->break_point_objects();
16107 // Get the total number of break points.
16108 int DebugInfo::GetBreakPointCount() {
16109 if (break_points()->IsUndefined()) return 0;
16111 for (int i = 0; i < break_points()->length(); i++) {
16112 if (!break_points()->get(i)->IsUndefined()) {
16113 BreakPointInfo* break_point_info =
16114 BreakPointInfo::cast(break_points()->get(i));
16115 count += break_point_info->GetBreakPointCount();
16122 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
16123 Handle<Object> break_point_object) {
16124 Heap* heap = debug_info->GetHeap();
16125 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
16126 for (int i = 0; i < debug_info->break_points()->length(); i++) {
16127 if (!debug_info->break_points()->get(i)->IsUndefined()) {
16128 Handle<BreakPointInfo> break_point_info =
16129 Handle<BreakPointInfo>(BreakPointInfo::cast(
16130 debug_info->break_points()->get(i)));
16131 if (BreakPointInfo::HasBreakPointObject(break_point_info,
16132 break_point_object)) {
16133 return *break_point_info;
16137 return heap->undefined_value();
16141 // Find the index of the break point info object for the specified code
16143 int DebugInfo::GetBreakPointInfoIndex(int code_position) {
16144 if (break_points()->IsUndefined()) return kNoBreakPointInfo;
16145 for (int i = 0; i < break_points()->length(); i++) {
16146 if (!break_points()->get(i)->IsUndefined()) {
16147 BreakPointInfo* break_point_info =
16148 BreakPointInfo::cast(break_points()->get(i));
16149 if (break_point_info->code_position()->value() == code_position) {
16154 return kNoBreakPointInfo;
16158 // Remove the specified break point object.
16159 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
16160 Handle<Object> break_point_object) {
16161 Isolate* isolate = break_point_info->GetIsolate();
16162 // If there are no break points just ignore.
16163 if (break_point_info->break_point_objects()->IsUndefined()) return;
16164 // If there is a single break point clear it if it is the same.
16165 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16166 if (break_point_info->break_point_objects() == *break_point_object) {
16167 break_point_info->set_break_point_objects(
16168 isolate->heap()->undefined_value());
16172 // If there are multiple break points shrink the array
16173 DCHECK(break_point_info->break_point_objects()->IsFixedArray());
16174 Handle<FixedArray> old_array =
16175 Handle<FixedArray>(
16176 FixedArray::cast(break_point_info->break_point_objects()));
16177 Handle<FixedArray> new_array =
16178 isolate->factory()->NewFixedArray(old_array->length() - 1);
16179 int found_count = 0;
16180 for (int i = 0; i < old_array->length(); i++) {
16181 if (old_array->get(i) == *break_point_object) {
16182 DCHECK(found_count == 0);
16185 new_array->set(i - found_count, old_array->get(i));
16188 // If the break point was found in the list change it.
16189 if (found_count > 0) break_point_info->set_break_point_objects(*new_array);
16193 // Add the specified break point object.
16194 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
16195 Handle<Object> break_point_object) {
16196 Isolate* isolate = break_point_info->GetIsolate();
16198 // If there was no break point objects before just set it.
16199 if (break_point_info->break_point_objects()->IsUndefined()) {
16200 break_point_info->set_break_point_objects(*break_point_object);
16203 // If the break point object is the same as before just ignore.
16204 if (break_point_info->break_point_objects() == *break_point_object) return;
16205 // If there was one break point object before replace with array.
16206 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16207 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
16208 array->set(0, break_point_info->break_point_objects());
16209 array->set(1, *break_point_object);
16210 break_point_info->set_break_point_objects(*array);
16213 // If there was more than one break point before extend array.
16214 Handle<FixedArray> old_array =
16215 Handle<FixedArray>(
16216 FixedArray::cast(break_point_info->break_point_objects()));
16217 Handle<FixedArray> new_array =
16218 isolate->factory()->NewFixedArray(old_array->length() + 1);
16219 for (int i = 0; i < old_array->length(); i++) {
16220 // If the break point was there before just ignore.
16221 if (old_array->get(i) == *break_point_object) return;
16222 new_array->set(i, old_array->get(i));
16224 // Add the new break point.
16225 new_array->set(old_array->length(), *break_point_object);
16226 break_point_info->set_break_point_objects(*new_array);
16230 bool BreakPointInfo::HasBreakPointObject(
16231 Handle<BreakPointInfo> break_point_info,
16232 Handle<Object> break_point_object) {
16234 if (break_point_info->break_point_objects()->IsUndefined()) return false;
16235 // Single break point.
16236 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16237 return break_point_info->break_point_objects() == *break_point_object;
16239 // Multiple break points.
16240 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects());
16241 for (int i = 0; i < array->length(); i++) {
16242 if (array->get(i) == *break_point_object) {
16250 // Get the number of break points.
16251 int BreakPointInfo::GetBreakPointCount() {
16253 if (break_point_objects()->IsUndefined()) return 0;
16254 // Single break point.
16255 if (!break_point_objects()->IsFixedArray()) return 1;
16256 // Multiple break points.
16257 return FixedArray::cast(break_point_objects())->length();
16261 Object* JSDate::GetField(Object* object, Smi* index) {
16262 return JSDate::cast(object)->DoGetField(
16263 static_cast<FieldIndex>(index->value()));
16267 Object* JSDate::DoGetField(FieldIndex index) {
16268 DCHECK(index != kDateValue);
16270 DateCache* date_cache = GetIsolate()->date_cache();
16272 if (index < kFirstUncachedField) {
16273 Object* stamp = cache_stamp();
16274 if (stamp != date_cache->stamp() && stamp->IsSmi()) {
16275 // Since the stamp is not NaN, the value is also not NaN.
16276 int64_t local_time_ms =
16277 date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
16278 SetCachedFields(local_time_ms, date_cache);
16281 case kYear: return year();
16282 case kMonth: return month();
16283 case kDay: return day();
16284 case kWeekday: return weekday();
16285 case kHour: return hour();
16286 case kMinute: return min();
16287 case kSecond: return sec();
16288 default: UNREACHABLE();
16292 if (index >= kFirstUTCField) {
16293 return GetUTCField(index, value()->Number(), date_cache);
16296 double time = value()->Number();
16297 if (std::isnan(time)) return GetIsolate()->heap()->nan_value();
16299 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
16300 int days = DateCache::DaysFromTime(local_time_ms);
16302 if (index == kDays) return Smi::FromInt(days);
16304 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16305 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
16306 DCHECK(index == kTimeInDay);
16307 return Smi::FromInt(time_in_day_ms);
16311 Object* JSDate::GetUTCField(FieldIndex index,
16313 DateCache* date_cache) {
16314 DCHECK(index >= kFirstUTCField);
16316 if (std::isnan(value)) return GetIsolate()->heap()->nan_value();
16318 int64_t time_ms = static_cast<int64_t>(value);
16320 if (index == kTimezoneOffset) {
16321 return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
16324 int days = DateCache::DaysFromTime(time_ms);
16326 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
16328 if (index <= kDayUTC) {
16329 int year, month, day;
16330 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16331 if (index == kYearUTC) return Smi::FromInt(year);
16332 if (index == kMonthUTC) return Smi::FromInt(month);
16333 DCHECK(index == kDayUTC);
16334 return Smi::FromInt(day);
16337 int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
16339 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
16340 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
16341 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
16342 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
16343 case kDaysUTC: return Smi::FromInt(days);
16344 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
16345 default: UNREACHABLE();
16353 void JSDate::SetValue(Object* value, bool is_value_nan) {
16355 if (is_value_nan) {
16356 HeapNumber* nan = GetIsolate()->heap()->nan_value();
16357 set_cache_stamp(nan, SKIP_WRITE_BARRIER);
16358 set_year(nan, SKIP_WRITE_BARRIER);
16359 set_month(nan, SKIP_WRITE_BARRIER);
16360 set_day(nan, SKIP_WRITE_BARRIER);
16361 set_hour(nan, SKIP_WRITE_BARRIER);
16362 set_min(nan, SKIP_WRITE_BARRIER);
16363 set_sec(nan, SKIP_WRITE_BARRIER);
16364 set_weekday(nan, SKIP_WRITE_BARRIER);
16366 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
16371 void JSDate::SetCachedFields(int64_t local_time_ms, DateCache* date_cache) {
16372 int days = DateCache::DaysFromTime(local_time_ms);
16373 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16374 int year, month, day;
16375 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16376 int weekday = date_cache->Weekday(days);
16377 int hour = time_in_day_ms / (60 * 60 * 1000);
16378 int min = (time_in_day_ms / (60 * 1000)) % 60;
16379 int sec = (time_in_day_ms / 1000) % 60;
16380 set_cache_stamp(date_cache->stamp());
16381 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
16382 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
16383 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
16384 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
16385 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
16386 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
16387 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
16391 void JSArrayBuffer::Neuter() {
16392 DCHECK(is_external());
16393 set_backing_store(NULL);
16394 set_byte_length(Smi::FromInt(0));
16398 void JSArrayBufferView::NeuterView() {
16399 set_byte_offset(Smi::FromInt(0));
16400 set_byte_length(Smi::FromInt(0));
16404 void JSDataView::Neuter() {
16409 void JSTypedArray::Neuter() {
16411 set_length(Smi::FromInt(0));
16412 set_elements(GetHeap()->EmptyExternalArrayForMap(map()));
16416 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) {
16417 switch (elements_kind) {
16418 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
16419 case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS;
16421 TYPED_ARRAYS(TYPED_ARRAY_CASE)
16422 #undef TYPED_ARRAY_CASE
16426 return FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND;
16431 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
16432 Handle<JSTypedArray> typed_array) {
16434 Handle<Map> map(typed_array->map());
16435 Isolate* isolate = typed_array->GetIsolate();
16437 DCHECK(IsFixedTypedArrayElementsKind(map->elements_kind()));
16439 Handle<Map> new_map = Map::TransitionElementsTo(
16441 FixedToExternalElementsKind(map->elements_kind()));
16443 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
16444 Handle<FixedTypedArrayBase> fixed_typed_array(
16445 FixedTypedArrayBase::cast(typed_array->elements()));
16446 Runtime::SetupArrayBufferAllocatingData(isolate, buffer,
16447 fixed_typed_array->DataSize(), false);
16448 memcpy(buffer->backing_store(),
16449 fixed_typed_array->DataPtr(),
16450 fixed_typed_array->DataSize());
16451 Handle<ExternalArray> new_elements =
16452 isolate->factory()->NewExternalArray(
16453 fixed_typed_array->length(), typed_array->type(),
16454 static_cast<uint8_t*>(buffer->backing_store()));
16456 buffer->set_weak_first_view(*typed_array);
16457 DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
16458 typed_array->set_buffer(*buffer);
16459 JSObject::SetMapAndElements(typed_array, new_map, new_elements);
16465 Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
16466 Handle<Object> result(buffer(), GetIsolate());
16467 if (*result != Smi::FromInt(0)) {
16468 DCHECK(IsExternalArrayElementsKind(map()->elements_kind()));
16469 return Handle<JSArrayBuffer>::cast(result);
16471 Handle<JSTypedArray> self(this);
16472 return MaterializeArrayBuffer(self);
16476 HeapType* PropertyCell::type() {
16477 return static_cast<HeapType*>(type_raw());
16481 void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) {
16482 DCHECK(IsPropertyCell());
16483 set_type_raw(type, ignored);
16487 Handle<HeapType> PropertyCell::UpdatedType(Handle<PropertyCell> cell,
16488 Handle<Object> value) {
16489 Isolate* isolate = cell->GetIsolate();
16490 Handle<HeapType> old_type(cell->type(), isolate);
16491 Handle<HeapType> new_type = HeapType::Constant(value, isolate);
16493 if (new_type->Is(old_type)) return old_type;
16495 cell->dependent_code()->DeoptimizeDependentCodeGroup(
16496 isolate, DependentCode::kPropertyCellChangedGroup);
16498 if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) {
16502 return HeapType::Any(isolate);
16506 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell,
16507 Handle<Object> value) {
16508 cell->set_value(*value);
16509 if (!HeapType::Any()->Is(cell->type())) {
16510 Handle<HeapType> new_type = UpdatedType(cell, value);
16511 cell->set_type(*new_type);
16517 void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell,
16518 CompilationInfo* info) {
16519 Handle<DependentCode> codes =
16520 DependentCode::Insert(handle(cell->dependent_code(), info->isolate()),
16521 DependentCode::kPropertyCellChangedGroup,
16522 info->object_wrapper());
16523 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes);
16524 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
16525 cell, info->zone());
16528 } } // namespace v8::internal