1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/accessors.h"
8 #include "src/allocation-site-scopes.h"
10 #include "src/arguments.h"
11 #include "src/base/bits.h"
12 #include "src/bootstrapper.h"
13 #include "src/code-stubs.h"
14 #include "src/codegen.h"
15 #include "src/cpu-profiler.h"
17 #include "src/debug.h"
18 #include "src/deoptimizer.h"
19 #include "src/elements.h"
20 #include "src/execution.h"
21 #include "src/field-index-inl.h"
22 #include "src/field-index.h"
23 #include "src/full-codegen.h"
24 #include "src/heap/mark-compact.h"
25 #include "src/heap/objects-visiting-inl.h"
26 #include "src/hydrogen.h"
27 #include "src/ic/ic.h"
28 #include "src/isolate-inl.h"
30 #include "src/lookup.h"
31 #include "src/macro-assembler.h"
32 #include "src/objects-inl.h"
33 #include "src/prototype.h"
34 #include "src/safepoint-table.h"
35 #include "src/string-search.h"
36 #include "src/string-stream.h"
37 #include "src/utils.h"
39 #ifdef ENABLE_DISASSEMBLER
40 #include "src/disasm.h"
41 #include "src/disassembler.h"
47 Handle<HeapType> Object::OptimalType(Isolate* isolate,
48 Representation representation) {
49 if (representation.IsNone()) return HeapType::None(isolate);
50 if (FLAG_track_field_types) {
51 if (representation.IsHeapObject() && IsHeapObject()) {
52 // We can track only JavaScript objects with stable maps.
53 Handle<Map> map(HeapObject::cast(this)->map(), isolate);
54 if (map->is_stable() &&
55 map->instance_type() >= FIRST_NONCALLABLE_SPEC_OBJECT_TYPE &&
56 map->instance_type() <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE) {
57 return HeapType::Class(map, isolate);
61 return HeapType::Any(isolate);
65 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
66 Handle<Object> object,
67 Handle<Context> native_context) {
68 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
69 Handle<JSFunction> constructor;
70 if (object->IsNumber()) {
71 constructor = handle(native_context->number_function(), isolate);
72 } else if (object->IsBoolean()) {
73 constructor = handle(native_context->boolean_function(), isolate);
74 } else if (object->IsString()) {
75 constructor = handle(native_context->string_function(), isolate);
76 } else if (object->IsSymbol()) {
77 constructor = handle(native_context->symbol_function(), isolate);
79 return MaybeHandle<JSReceiver>();
81 Handle<JSObject> result = isolate->factory()->NewJSObject(constructor);
82 Handle<JSValue>::cast(result)->set_value(*object);
87 bool Object::BooleanValue() {
88 if (IsBoolean()) return IsTrue();
89 if (IsSmi()) return Smi::cast(this)->value() != 0;
90 if (IsUndefined() || IsNull()) return false;
91 if (IsUndetectableObject()) return false; // Undetectable object is false.
92 if (IsString()) return String::cast(this)->length() != 0;
93 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue();
98 bool Object::IsCallable() const {
99 const Object* fun = this;
100 while (fun->IsJSFunctionProxy()) {
101 fun = JSFunctionProxy::cast(fun)->call_trap();
103 return fun->IsJSFunction() ||
104 (fun->IsHeapObject() &&
105 HeapObject::cast(fun)->map()->has_instance_call_handler());
109 MaybeHandle<Object> Object::GetProperty(LookupIterator* it) {
110 for (; it->IsFound(); it->Next()) {
111 switch (it->state()) {
112 case LookupIterator::NOT_FOUND:
113 case LookupIterator::TRANSITION:
115 case LookupIterator::JSPROXY:
116 return JSProxy::GetPropertyWithHandler(it->GetHolder<JSProxy>(),
117 it->GetReceiver(), it->name());
118 case LookupIterator::INTERCEPTOR: {
119 MaybeHandle<Object> maybe_result = JSObject::GetPropertyWithInterceptor(
120 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
121 if (!maybe_result.is_null()) return maybe_result;
122 if (it->isolate()->has_pending_exception()) return maybe_result;
125 case LookupIterator::ACCESS_CHECK:
126 if (it->HasAccess(v8::ACCESS_GET)) break;
127 return JSObject::GetPropertyWithFailedAccessCheck(it);
128 case LookupIterator::ACCESSOR:
129 return GetPropertyWithAccessor(it->GetReceiver(), it->name(),
130 it->GetHolder<JSObject>(),
132 case LookupIterator::DATA:
133 return it->GetDataValue();
136 return it->factory()->undefined_value();
140 Handle<Object> JSObject::GetDataProperty(Handle<JSObject> object,
142 LookupIterator it(object, key,
143 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
144 return GetDataProperty(&it);
148 Handle<Object> JSObject::GetDataProperty(LookupIterator* it) {
149 for (; it->IsFound(); it->Next()) {
150 switch (it->state()) {
151 case LookupIterator::INTERCEPTOR:
152 case LookupIterator::NOT_FOUND:
153 case LookupIterator::TRANSITION:
155 case LookupIterator::ACCESS_CHECK:
156 if (it->HasAccess(v8::ACCESS_GET)) continue;
158 case LookupIterator::JSPROXY:
160 return it->isolate()->factory()->undefined_value();
161 case LookupIterator::ACCESSOR:
162 // TODO(verwaest): For now this doesn't call into
163 // ExecutableAccessorInfo, since clients don't need it. Update once
166 return it->isolate()->factory()->undefined_value();
167 case LookupIterator::DATA:
168 return it->GetDataValue();
171 return it->isolate()->factory()->undefined_value();
175 bool Object::ToInt32(int32_t* value) {
177 *value = Smi::cast(this)->value();
180 if (IsHeapNumber()) {
181 double num = HeapNumber::cast(this)->value();
182 if (FastI2D(FastD2I(num)) == num) {
183 *value = FastD2I(num);
191 bool Object::ToUint32(uint32_t* value) {
193 int num = Smi::cast(this)->value();
195 *value = static_cast<uint32_t>(num);
199 if (IsHeapNumber()) {
200 double num = HeapNumber::cast(this)->value();
201 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) {
202 *value = FastD2UI(num);
210 bool FunctionTemplateInfo::IsTemplateFor(Object* object) {
211 if (!object->IsHeapObject()) return false;
212 return IsTemplateFor(HeapObject::cast(object)->map());
216 bool FunctionTemplateInfo::IsTemplateFor(Map* map) {
217 // There is a constraint on the object; check.
218 if (!map->IsJSObjectMap()) return false;
219 // Fetch the constructor function of the object.
220 Object* cons_obj = map->constructor();
221 if (!cons_obj->IsJSFunction()) return false;
222 JSFunction* fun = JSFunction::cast(cons_obj);
223 // Iterate through the chain of inheriting function templates to
224 // see if the required one occurs.
225 for (Object* type = fun->shared()->function_data();
226 type->IsFunctionTemplateInfo();
227 type = FunctionTemplateInfo::cast(type)->parent_template()) {
228 if (type == this) return true;
230 // Didn't find the required type in the inheritance chain.
235 template<typename To>
236 static inline To* CheckedCast(void *from) {
237 uintptr_t temp = reinterpret_cast<uintptr_t>(from);
238 DCHECK(temp % sizeof(To) == 0);
239 return reinterpret_cast<To*>(temp);
243 static Handle<Object> PerformCompare(const BitmaskCompareDescriptor& descriptor,
246 uint32_t bitmask = descriptor.bitmask;
247 uint32_t compare_value = descriptor.compare_value;
249 switch (descriptor.size) {
251 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr));
252 compare_value &= 0xff;
256 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr));
257 compare_value &= 0xffff;
261 value = *CheckedCast<uint32_t>(ptr);
265 return isolate->factory()->undefined_value();
267 return isolate->factory()->ToBoolean(
268 (bitmask & value) == (bitmask & compare_value));
272 static Handle<Object> PerformCompare(const PointerCompareDescriptor& descriptor,
275 uintptr_t compare_value =
276 reinterpret_cast<uintptr_t>(descriptor.compare_value);
277 uintptr_t value = *CheckedCast<uintptr_t>(ptr);
278 return isolate->factory()->ToBoolean(compare_value == value);
282 static Handle<Object> GetPrimitiveValue(
283 const PrimitiveValueDescriptor& descriptor,
286 int32_t int32_value = 0;
287 switch (descriptor.data_type) {
288 case kDescriptorInt8Type:
289 int32_value = *CheckedCast<int8_t>(ptr);
291 case kDescriptorUint8Type:
292 int32_value = *CheckedCast<uint8_t>(ptr);
294 case kDescriptorInt16Type:
295 int32_value = *CheckedCast<int16_t>(ptr);
297 case kDescriptorUint16Type:
298 int32_value = *CheckedCast<uint16_t>(ptr);
300 case kDescriptorInt32Type:
301 int32_value = *CheckedCast<int32_t>(ptr);
303 case kDescriptorUint32Type: {
304 uint32_t value = *CheckedCast<uint32_t>(ptr);
305 AllowHeapAllocation allow_gc;
306 return isolate->factory()->NewNumberFromUint(value);
308 case kDescriptorBoolType: {
309 uint8_t byte = *CheckedCast<uint8_t>(ptr);
310 return isolate->factory()->ToBoolean(
311 byte & (0x1 << descriptor.bool_offset));
313 case kDescriptorFloatType: {
314 float value = *CheckedCast<float>(ptr);
315 AllowHeapAllocation allow_gc;
316 return isolate->factory()->NewNumber(value);
318 case kDescriptorDoubleType: {
319 double value = *CheckedCast<double>(ptr);
320 AllowHeapAllocation allow_gc;
321 return isolate->factory()->NewNumber(value);
324 AllowHeapAllocation allow_gc;
325 return isolate->factory()->NewNumberFromInt(int32_value);
329 static Handle<Object> GetDeclaredAccessorProperty(
330 Handle<Object> receiver,
331 Handle<DeclaredAccessorInfo> info,
333 DisallowHeapAllocation no_gc;
334 char* current = reinterpret_cast<char*>(*receiver);
335 DeclaredAccessorDescriptorIterator iterator(info->descriptor());
337 const DeclaredAccessorDescriptorData* data = iterator.Next();
338 switch (data->type) {
339 case kDescriptorReturnObject: {
340 DCHECK(iterator.Complete());
341 current = *CheckedCast<char*>(current);
342 return handle(*CheckedCast<Object*>(current), isolate);
344 case kDescriptorPointerDereference:
345 DCHECK(!iterator.Complete());
346 current = *reinterpret_cast<char**>(current);
348 case kDescriptorPointerShift:
349 DCHECK(!iterator.Complete());
350 current += data->pointer_shift_descriptor.byte_offset;
352 case kDescriptorObjectDereference: {
353 DCHECK(!iterator.Complete());
354 Object* object = CheckedCast<Object>(current);
355 int field = data->object_dereference_descriptor.internal_field;
356 Object* smi = JSObject::cast(object)->GetInternalField(field);
357 DCHECK(smi->IsSmi());
358 current = reinterpret_cast<char*>(smi);
361 case kDescriptorBitmaskCompare:
362 DCHECK(iterator.Complete());
363 return PerformCompare(data->bitmask_compare_descriptor,
366 case kDescriptorPointerCompare:
367 DCHECK(iterator.Complete());
368 return PerformCompare(data->pointer_compare_descriptor,
371 case kDescriptorPrimitiveValue:
372 DCHECK(iterator.Complete());
373 return GetPrimitiveValue(data->primitive_value_descriptor,
379 return isolate->factory()->undefined_value();
383 Handle<FixedArray> JSObject::EnsureWritableFastElements(
384 Handle<JSObject> object) {
385 DCHECK(object->HasFastSmiOrObjectElements());
386 Isolate* isolate = object->GetIsolate();
387 Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate);
388 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
389 Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap(
390 elems, isolate->factory()->fixed_array_map());
391 object->set_elements(*writable_elems);
392 isolate->counters()->cow_arrays_converted()->Increment();
393 return writable_elems;
397 MaybeHandle<Object> JSProxy::GetPropertyWithHandler(Handle<JSProxy> proxy,
398 Handle<Object> receiver,
400 Isolate* isolate = proxy->GetIsolate();
402 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
403 if (name->IsSymbol()) return isolate->factory()->undefined_value();
405 Handle<Object> args[] = { receiver, name };
407 proxy, "get", isolate->derived_get_trap(), arraysize(args), args);
411 MaybeHandle<Object> Object::GetPropertyWithAccessor(Handle<Object> receiver,
413 Handle<JSObject> holder,
414 Handle<Object> structure) {
415 Isolate* isolate = name->GetIsolate();
416 DCHECK(!structure->IsForeign());
417 // api style callbacks.
418 if (structure->IsAccessorInfo()) {
419 Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(structure);
420 if (!info->IsCompatibleReceiver(*receiver)) {
421 Handle<Object> args[2] = { name, receiver };
422 THROW_NEW_ERROR(isolate,
423 NewTypeError("incompatible_method_receiver",
424 HandleVector(args, arraysize(args))),
427 if (structure->IsDeclaredAccessorInfo()) {
428 return GetDeclaredAccessorProperty(
430 Handle<DeclaredAccessorInfo>::cast(structure),
434 Handle<ExecutableAccessorInfo> data =
435 Handle<ExecutableAccessorInfo>::cast(structure);
436 v8::AccessorNameGetterCallback call_fun =
437 v8::ToCData<v8::AccessorNameGetterCallback>(data->getter());
438 if (call_fun == NULL) return isolate->factory()->undefined_value();
440 LOG(isolate, ApiNamedPropertyAccess("load", *holder, *name));
441 PropertyCallbackArguments args(isolate, data->data(), *receiver, *holder);
442 v8::Handle<v8::Value> result =
443 args.Call(call_fun, v8::Utils::ToLocal(name));
444 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
445 if (result.IsEmpty()) {
446 return isolate->factory()->undefined_value();
448 Handle<Object> return_value = v8::Utils::OpenHandle(*result);
449 return_value->VerifyApiCallResultType();
450 // Rebox handle before return.
451 return handle(*return_value, isolate);
454 // __defineGetter__ callback
455 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
457 if (getter->IsSpecFunction()) {
458 // TODO(rossberg): nicer would be to cast to some JSCallable here...
459 return Object::GetPropertyWithDefinedGetter(
460 receiver, Handle<JSReceiver>::cast(getter));
462 // Getter is not a function.
463 return isolate->factory()->undefined_value();
467 bool AccessorInfo::IsCompatibleReceiverType(Isolate* isolate,
468 Handle<AccessorInfo> info,
469 Handle<HeapType> type) {
470 if (!info->HasExpectedReceiverType()) return true;
471 Handle<Map> map = IC::TypeToMap(*type, isolate);
472 if (!map->IsJSObjectMap()) return false;
473 return FunctionTemplateInfo::cast(info->expected_receiver_type())
474 ->IsTemplateFor(*map);
478 MaybeHandle<Object> Object::SetPropertyWithAccessor(
479 Handle<Object> receiver, Handle<Name> name, Handle<Object> value,
480 Handle<JSObject> holder, Handle<Object> structure, StrictMode strict_mode) {
481 Isolate* isolate = name->GetIsolate();
483 // We should never get here to initialize a const with the hole
484 // value since a const declaration would conflict with the setter.
485 DCHECK(!structure->IsForeign());
486 if (structure->IsExecutableAccessorInfo()) {
487 // Don't call executable accessor setters with non-JSObject receivers.
488 if (!receiver->IsJSObject()) return value;
489 // api style callbacks
490 ExecutableAccessorInfo* info = ExecutableAccessorInfo::cast(*structure);
491 if (!info->IsCompatibleReceiver(*receiver)) {
492 Handle<Object> args[2] = { name, receiver };
493 THROW_NEW_ERROR(isolate,
494 NewTypeError("incompatible_method_receiver",
495 HandleVector(args, arraysize(args))),
498 Object* call_obj = info->setter();
499 v8::AccessorNameSetterCallback call_fun =
500 v8::ToCData<v8::AccessorNameSetterCallback>(call_obj);
501 if (call_fun == NULL) return value;
502 LOG(isolate, ApiNamedPropertyAccess("store", *holder, *name));
503 PropertyCallbackArguments args(isolate, info->data(), *receiver, *holder);
505 v8::Utils::ToLocal(name),
506 v8::Utils::ToLocal(value));
507 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
511 if (structure->IsAccessorPair()) {
512 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
513 if (setter->IsSpecFunction()) {
514 // TODO(rossberg): nicer would be to cast to some JSCallable here...
515 return SetPropertyWithDefinedSetter(
516 receiver, Handle<JSReceiver>::cast(setter), value);
518 if (strict_mode == SLOPPY) return value;
519 Handle<Object> args[2] = { name, holder };
521 isolate, NewTypeError("no_setter_in_callback", HandleVector(args, 2)),
526 // TODO(dcarney): Handle correctly.
527 if (structure->IsDeclaredAccessorInfo()) {
532 return MaybeHandle<Object>();
536 MaybeHandle<Object> Object::GetPropertyWithDefinedGetter(
537 Handle<Object> receiver,
538 Handle<JSReceiver> getter) {
539 Isolate* isolate = getter->GetIsolate();
540 Debug* debug = isolate->debug();
541 // Handle stepping into a getter if step into is active.
542 // TODO(rossberg): should this apply to getters that are function proxies?
543 if (debug->StepInActive() && getter->IsJSFunction()) {
545 Handle<JSFunction>::cast(getter), Handle<Object>::null(), 0, false);
548 return Execution::Call(isolate, getter, receiver, 0, NULL, true);
552 MaybeHandle<Object> Object::SetPropertyWithDefinedSetter(
553 Handle<Object> receiver,
554 Handle<JSReceiver> setter,
555 Handle<Object> value) {
556 Isolate* isolate = setter->GetIsolate();
558 Debug* debug = isolate->debug();
559 // Handle stepping into a setter if step into is active.
560 // TODO(rossberg): should this apply to getters that are function proxies?
561 if (debug->StepInActive() && setter->IsJSFunction()) {
563 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false);
566 Handle<Object> argv[] = { value };
567 RETURN_ON_EXCEPTION(isolate, Execution::Call(isolate, setter, receiver,
568 arraysize(argv), argv, true),
574 static bool FindAllCanReadHolder(LookupIterator* it) {
575 for (; it->IsFound(); it->Next()) {
576 if (it->state() == LookupIterator::ACCESSOR) {
577 Handle<Object> accessors = it->GetAccessors();
578 if (accessors->IsAccessorInfo()) {
579 if (AccessorInfo::cast(*accessors)->all_can_read()) return true;
587 MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck(
588 LookupIterator* it) {
589 Handle<JSObject> checked = it->GetHolder<JSObject>();
590 if (FindAllCanReadHolder(it)) {
591 return GetPropertyWithAccessor(it->GetReceiver(), it->name(),
592 it->GetHolder<JSObject>(),
595 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_GET);
596 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
597 return it->factory()->undefined_value();
601 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithFailedAccessCheck(
602 LookupIterator* it) {
603 Handle<JSObject> checked = it->GetHolder<JSObject>();
604 if (FindAllCanReadHolder(it))
605 return maybe(it->property_details().attributes());
606 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_HAS);
607 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(it->isolate(),
608 Maybe<PropertyAttributes>());
609 return maybe(ABSENT);
613 static bool FindAllCanWriteHolder(LookupIterator* it) {
614 for (; it->IsFound(); it->Next()) {
615 if (it->state() == LookupIterator::ACCESSOR) {
616 Handle<Object> accessors = it->GetAccessors();
617 if (accessors->IsAccessorInfo()) {
618 if (AccessorInfo::cast(*accessors)->all_can_write()) return true;
626 MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck(
627 LookupIterator* it, Handle<Object> value, StrictMode strict_mode) {
628 Handle<JSObject> checked = it->GetHolder<JSObject>();
629 if (FindAllCanWriteHolder(it)) {
630 return SetPropertyWithAccessor(it->GetReceiver(), it->name(), value,
631 it->GetHolder<JSObject>(),
632 it->GetAccessors(), strict_mode);
635 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_SET);
636 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
641 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
643 Handle<Object> value,
644 PropertyDetails details) {
645 DCHECK(!object->HasFastProperties());
646 Handle<NameDictionary> property_dictionary(object->property_dictionary());
648 if (!name->IsUniqueName()) {
649 name = object->GetIsolate()->factory()->InternalizeString(
650 Handle<String>::cast(name));
653 int entry = property_dictionary->FindEntry(name);
654 if (entry == NameDictionary::kNotFound) {
655 Handle<Object> store_value = value;
656 if (object->IsGlobalObject()) {
657 store_value = object->GetIsolate()->factory()->NewPropertyCell(value);
660 property_dictionary = NameDictionary::Add(
661 property_dictionary, name, store_value, details);
662 object->set_properties(*property_dictionary);
666 PropertyDetails original_details = property_dictionary->DetailsAt(entry);
667 int enumeration_index;
668 // Preserve the enumeration index unless the property was deleted.
669 if (original_details.IsDeleted()) {
670 enumeration_index = property_dictionary->NextEnumerationIndex();
671 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1);
673 enumeration_index = original_details.dictionary_index();
674 DCHECK(enumeration_index > 0);
677 details = PropertyDetails(
678 details.attributes(), details.type(), enumeration_index);
680 if (object->IsGlobalObject()) {
681 Handle<PropertyCell> cell(
682 PropertyCell::cast(property_dictionary->ValueAt(entry)));
683 PropertyCell::SetValueInferType(cell, value);
684 // Please note we have to update the property details.
685 property_dictionary->DetailsAtPut(entry, details);
687 property_dictionary->SetEntry(entry, name, value, details);
692 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object,
695 DCHECK(!object->HasFastProperties());
696 Isolate* isolate = object->GetIsolate();
697 Handle<NameDictionary> dictionary(object->property_dictionary());
698 int entry = dictionary->FindEntry(name);
699 if (entry != NameDictionary::kNotFound) {
700 // If we have a global object set the cell to the hole.
701 if (object->IsGlobalObject()) {
702 PropertyDetails details = dictionary->DetailsAt(entry);
703 if (!details.IsConfigurable()) {
704 if (mode != FORCE_DELETION) return isolate->factory()->false_value();
705 // When forced to delete global properties, we have to make a
706 // map change to invalidate any ICs that think they can load
707 // from the non-configurable cell without checking if it contains
709 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
710 DCHECK(new_map->is_dictionary_map());
711 JSObject::MigrateToMap(object, new_map);
713 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry)));
714 Handle<Object> value = isolate->factory()->the_hole_value();
715 PropertyCell::SetValueInferType(cell, value);
716 dictionary->DetailsAtPut(entry, details.AsDeleted());
718 Handle<Object> deleted(
719 NameDictionary::DeleteProperty(dictionary, entry, mode));
720 if (*deleted == isolate->heap()->true_value()) {
721 Handle<NameDictionary> new_properties =
722 NameDictionary::Shrink(dictionary, name);
723 object->set_properties(*new_properties);
728 return isolate->factory()->true_value();
732 bool JSObject::IsDirty() {
733 Object* cons_obj = map()->constructor();
734 if (!cons_obj->IsJSFunction())
736 JSFunction* fun = JSFunction::cast(cons_obj);
737 if (!fun->shared()->IsApiFunction())
739 // If the object is fully fast case and has the same map it was
740 // created with then no changes can have been made to it.
741 return map() != fun->initial_map()
742 || !HasFastObjectElements()
743 || !HasFastProperties();
747 MaybeHandle<Object> Object::GetElementWithReceiver(Isolate* isolate,
748 Handle<Object> object,
749 Handle<Object> receiver,
751 if (object->IsUndefined()) {
752 // TODO(verwaest): Why is this check here?
754 return isolate->factory()->undefined_value();
757 // Iterate up the prototype chain until an element is found or the null
758 // prototype is encountered.
759 for (PrototypeIterator iter(isolate, object,
760 object->IsJSProxy() || object->IsJSObject()
761 ? PrototypeIterator::START_AT_RECEIVER
762 : PrototypeIterator::START_AT_PROTOTYPE);
763 !iter.IsAtEnd(); iter.Advance()) {
764 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
765 return JSProxy::GetElementWithHandler(
766 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
770 // Inline the case for JSObjects. Doing so significantly improves the
771 // performance of fetching elements where checking the prototype chain is
773 Handle<JSObject> js_object =
774 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
776 // Check access rights if needed.
777 if (js_object->IsAccessCheckNeeded()) {
778 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) {
779 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET);
780 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
781 return isolate->factory()->undefined_value();
785 if (js_object->HasIndexedInterceptor()) {
786 return JSObject::GetElementWithInterceptor(js_object, receiver, index);
789 if (js_object->elements() != isolate->heap()->empty_fixed_array()) {
790 Handle<Object> result;
791 ASSIGN_RETURN_ON_EXCEPTION(
793 js_object->GetElementsAccessor()->Get(receiver, js_object, index),
795 if (!result->IsTheHole()) return result;
799 return isolate->factory()->undefined_value();
803 Map* Object::GetRootMap(Isolate* isolate) {
804 DisallowHeapAllocation no_alloc;
806 Context* context = isolate->context()->native_context();
807 return context->number_function()->initial_map();
810 HeapObject* heap_object = HeapObject::cast(this);
812 // The object is either a number, a string, a boolean,
813 // a real JS object, or a Harmony proxy.
814 if (heap_object->IsJSReceiver()) {
815 return heap_object->map();
817 Context* context = isolate->context()->native_context();
819 if (heap_object->IsHeapNumber()) {
820 return context->number_function()->initial_map();
822 if (heap_object->IsString()) {
823 return context->string_function()->initial_map();
825 if (heap_object->IsSymbol()) {
826 return context->symbol_function()->initial_map();
828 if (heap_object->IsBoolean()) {
829 return context->boolean_function()->initial_map();
831 return isolate->heap()->null_value()->map();
835 Object* Object::GetHash() {
836 // The object is either a number, a name, an odd-ball,
837 // a real JS object, or a Harmony proxy.
839 uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
840 return Smi::FromInt(hash & Smi::kMaxValue);
843 uint32_t hash = Name::cast(this)->Hash();
844 return Smi::FromInt(hash);
847 uint32_t hash = Oddball::cast(this)->to_string()->Hash();
848 return Smi::FromInt(hash);
851 DCHECK(IsJSReceiver());
852 return JSReceiver::cast(this)->GetIdentityHash();
856 Handle<Smi> Object::GetOrCreateHash(Isolate* isolate, Handle<Object> object) {
857 Handle<Object> hash(object->GetHash(), isolate);
858 if (hash->IsSmi()) return Handle<Smi>::cast(hash);
860 DCHECK(object->IsJSReceiver());
861 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object));
865 bool Object::SameValue(Object* other) {
866 if (other == this) return true;
868 // The object is either a number, a name, an odd-ball,
869 // a real JS object, or a Harmony proxy.
870 if (IsNumber() && other->IsNumber()) {
871 double this_value = Number();
872 double other_value = other->Number();
873 bool equal = this_value == other_value;
874 // SameValue(NaN, NaN) is true.
875 if (!equal) return std::isnan(this_value) && std::isnan(other_value);
876 // SameValue(0.0, -0.0) is false.
877 return (this_value != 0) || ((1 / this_value) == (1 / other_value));
879 if (IsString() && other->IsString()) {
880 return String::cast(this)->Equals(String::cast(other));
886 bool Object::SameValueZero(Object* other) {
887 if (other == this) return true;
889 // The object is either a number, a name, an odd-ball,
890 // a real JS object, or a Harmony proxy.
891 if (IsNumber() && other->IsNumber()) {
892 double this_value = Number();
893 double other_value = other->Number();
895 return this_value == other_value
896 || (std::isnan(this_value) && std::isnan(other_value));
898 if (IsString() && other->IsString()) {
899 return String::cast(this)->Equals(String::cast(other));
905 void Object::ShortPrint(FILE* out) {
911 void Object::ShortPrint(StringStream* accumulator) {
914 accumulator->Add(os.c_str());
918 OStream& operator<<(OStream& os, const Brief& v) {
919 if (v.value->IsSmi()) {
920 Smi::cast(v.value)->SmiPrint(os);
922 // TODO(svenpanne) Const-correct HeapObjectShortPrint!
923 HeapObject* obj = const_cast<HeapObject*>(HeapObject::cast(v.value));
924 obj->HeapObjectShortPrint(os);
930 void Smi::SmiPrint(OStream& os) const { // NOLINT
935 // Should a word be prefixed by 'a' or 'an' in order to read naturally in
936 // English? Returns false for non-ASCII or words that don't start with
937 // a capital letter. The a/an rule follows pronunciation in English.
938 // We don't use the BBC's overcorrect "an historic occasion" though if
939 // you speak a dialect you may well say "an 'istoric occasion".
940 static bool AnWord(String* str) {
941 if (str->length() == 0) return false; // A nothing.
942 int c0 = str->Get(0);
943 int c1 = str->length() > 1 ? str->Get(1) : 0;
946 return true; // An Umpire, but a UTF8String, a U.
948 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') {
949 return true; // An Ape, an ABCBook.
950 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) &&
951 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' ||
952 c0 == 'S' || c0 == 'X')) {
953 return true; // An MP3File, an M.
959 Handle<String> String::SlowFlatten(Handle<ConsString> cons,
960 PretenureFlag pretenure) {
961 DCHECK(AllowHeapAllocation::IsAllowed());
962 DCHECK(cons->second()->length() != 0);
963 Isolate* isolate = cons->GetIsolate();
964 int length = cons->length();
965 PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure
967 Handle<SeqString> result;
968 if (cons->IsOneByteRepresentation()) {
969 Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString(
970 length, tenure).ToHandleChecked();
971 DisallowHeapAllocation no_gc;
972 WriteToFlat(*cons, flat->GetChars(), 0, length);
975 Handle<SeqTwoByteString> flat = isolate->factory()->NewRawTwoByteString(
976 length, tenure).ToHandleChecked();
977 DisallowHeapAllocation no_gc;
978 WriteToFlat(*cons, flat->GetChars(), 0, length);
981 cons->set_first(*result);
982 cons->set_second(isolate->heap()->empty_string());
983 DCHECK(result->IsFlat());
989 bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
990 // Externalizing twice leaks the external resource, so it's
991 // prohibited by the API.
992 DCHECK(!this->IsExternalString());
993 #ifdef ENABLE_SLOW_DCHECKS
994 if (FLAG_enable_slow_asserts) {
995 // Assert that the resource and the string are equivalent.
996 DCHECK(static_cast<size_t>(this->length()) == resource->length());
997 ScopedVector<uc16> smart_chars(this->length());
998 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
999 DCHECK(memcmp(smart_chars.start(),
1001 resource->length() * sizeof(smart_chars[0])) == 0);
1004 int size = this->Size(); // Byte size of the original string.
1005 // Abort if size does not allow in-place conversion.
1006 if (size < ExternalString::kShortSize) return false;
1007 Heap* heap = GetHeap();
1008 bool is_one_byte = this->IsOneByteRepresentation();
1009 bool is_internalized = this->IsInternalizedString();
1011 // Morph the string to an external string by replacing the map and
1012 // reinitializing the fields. This won't work if the space the existing
1013 // string occupies is too small for a regular external string.
1014 // Instead, we resort to a short external string instead, omitting
1015 // the field caching the address of the backing store. When we encounter
1016 // short external strings in generated code, we need to bailout to runtime.
1018 if (size < ExternalString::kSize) {
1019 new_map = is_internalized
1021 ? heap->short_external_internalized_string_with_one_byte_data_map()
1022 : heap->short_external_internalized_string_map())
1023 : (is_one_byte ? heap->short_external_string_with_one_byte_data_map()
1024 : heap->short_external_string_map());
1026 new_map = is_internalized
1028 ? heap->external_internalized_string_with_one_byte_data_map()
1029 : heap->external_internalized_string_map())
1030 : (is_one_byte ? heap->external_string_with_one_byte_data_map()
1031 : heap->external_string_map());
1034 // Byte size of the external String object.
1035 int new_size = this->SizeFromMap(new_map);
1036 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1038 // We are storing the new map using release store after creating a filler for
1039 // the left-over space to avoid races with the sweeper thread.
1040 this->synchronized_set_map(new_map);
1042 ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
1043 self->set_resource(resource);
1044 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1046 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1051 bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
1052 // Externalizing twice leaks the external resource, so it's
1053 // prohibited by the API.
1054 DCHECK(!this->IsExternalString());
1055 #ifdef ENABLE_SLOW_DCHECKS
1056 if (FLAG_enable_slow_asserts) {
1057 // Assert that the resource and the string are equivalent.
1058 DCHECK(static_cast<size_t>(this->length()) == resource->length());
1059 if (this->IsTwoByteRepresentation()) {
1060 ScopedVector<uint16_t> smart_chars(this->length());
1061 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1062 DCHECK(String::IsOneByte(smart_chars.start(), this->length()));
1064 ScopedVector<char> smart_chars(this->length());
1065 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1066 DCHECK(memcmp(smart_chars.start(),
1068 resource->length() * sizeof(smart_chars[0])) == 0);
1071 int size = this->Size(); // Byte size of the original string.
1072 // Abort if size does not allow in-place conversion.
1073 if (size < ExternalString::kShortSize) return false;
1074 Heap* heap = GetHeap();
1075 bool is_internalized = this->IsInternalizedString();
1077 // Morph the string to an external string by replacing the map and
1078 // reinitializing the fields. This won't work if the space the existing
1079 // string occupies is too small for a regular external string.
1080 // Instead, we resort to a short external string instead, omitting
1081 // the field caching the address of the backing store. When we encounter
1082 // short external strings in generated code, we need to bailout to runtime.
1084 if (size < ExternalString::kSize) {
1085 new_map = is_internalized
1086 ? heap->short_external_one_byte_internalized_string_map()
1087 : heap->short_external_one_byte_string_map();
1089 new_map = is_internalized
1090 ? heap->external_one_byte_internalized_string_map()
1091 : heap->external_one_byte_string_map();
1094 // Byte size of the external String object.
1095 int new_size = this->SizeFromMap(new_map);
1096 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1098 // We are storing the new map using release store after creating a filler for
1099 // the left-over space to avoid races with the sweeper thread.
1100 this->synchronized_set_map(new_map);
1102 ExternalOneByteString* self = ExternalOneByteString::cast(this);
1103 self->set_resource(resource);
1104 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1106 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1111 void String::StringShortPrint(StringStream* accumulator) {
1113 if (len > kMaxShortPrintLength) {
1114 accumulator->Add("<Very long string[%u]>", len);
1118 if (!LooksValid()) {
1119 accumulator->Add("<Invalid String>");
1123 ConsStringIteratorOp op;
1124 StringCharacterStream stream(this, &op);
1126 bool truncated = false;
1127 if (len > kMaxShortPrintLength) {
1128 len = kMaxShortPrintLength;
1131 bool one_byte = true;
1132 for (int i = 0; i < len; i++) {
1133 uint16_t c = stream.GetNext();
1135 if (c < 32 || c >= 127) {
1141 accumulator->Add("<String[%u]: ", length());
1142 for (int i = 0; i < len; i++) {
1143 accumulator->Put(static_cast<char>(stream.GetNext()));
1145 accumulator->Put('>');
1147 // Backslash indicates that the string contains control
1148 // characters and that backslashes are therefore escaped.
1149 accumulator->Add("<String[%u]\\: ", length());
1150 for (int i = 0; i < len; i++) {
1151 uint16_t c = stream.GetNext();
1153 accumulator->Add("\\n");
1154 } else if (c == '\r') {
1155 accumulator->Add("\\r");
1156 } else if (c == '\\') {
1157 accumulator->Add("\\\\");
1158 } else if (c < 32 || c > 126) {
1159 accumulator->Add("\\x%02x", c);
1161 accumulator->Put(static_cast<char>(c));
1165 accumulator->Put('.');
1166 accumulator->Put('.');
1167 accumulator->Put('.');
1169 accumulator->Put('>');
1175 void String::PrintUC16(OStream& os, int start, int end) { // NOLINT
1176 if (end < 0) end = length();
1177 ConsStringIteratorOp op;
1178 StringCharacterStream stream(this, &op, start);
1179 for (int i = start; i < end && stream.HasMore(); i++) {
1180 os << AsUC16(stream.GetNext());
1185 void JSObject::JSObjectShortPrint(StringStream* accumulator) {
1186 switch (map()->instance_type()) {
1187 case JS_ARRAY_TYPE: {
1188 double length = JSArray::cast(this)->length()->IsUndefined()
1190 : JSArray::cast(this)->length()->Number();
1191 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
1194 case JS_WEAK_MAP_TYPE: {
1195 accumulator->Add("<JS WeakMap>");
1198 case JS_WEAK_SET_TYPE: {
1199 accumulator->Add("<JS WeakSet>");
1202 case JS_REGEXP_TYPE: {
1203 accumulator->Add("<JS RegExp>");
1206 case JS_FUNCTION_TYPE: {
1207 JSFunction* function = JSFunction::cast(this);
1208 Object* fun_name = function->shared()->DebugName();
1209 bool printed = false;
1210 if (fun_name->IsString()) {
1211 String* str = String::cast(fun_name);
1212 if (str->length() > 0) {
1213 accumulator->Add("<JS Function ");
1214 accumulator->Put(str);
1219 accumulator->Add("<JS Function");
1221 accumulator->Add(" (SharedFunctionInfo %p)",
1222 reinterpret_cast<void*>(function->shared()));
1223 accumulator->Put('>');
1226 case JS_GENERATOR_OBJECT_TYPE: {
1227 accumulator->Add("<JS Generator>");
1230 case JS_MODULE_TYPE: {
1231 accumulator->Add("<JS Module>");
1234 // All other JSObjects are rather similar to each other (JSObject,
1235 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
1237 Map* map_of_this = map();
1238 Heap* heap = GetHeap();
1239 Object* constructor = map_of_this->constructor();
1240 bool printed = false;
1241 if (constructor->IsHeapObject() &&
1242 !heap->Contains(HeapObject::cast(constructor))) {
1243 accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
1245 bool global_object = IsJSGlobalProxy();
1246 if (constructor->IsJSFunction()) {
1247 if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
1248 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
1250 Object* constructor_name =
1251 JSFunction::cast(constructor)->shared()->name();
1252 if (constructor_name->IsString()) {
1253 String* str = String::cast(constructor_name);
1254 if (str->length() > 0) {
1255 bool vowel = AnWord(str);
1256 accumulator->Add("<%sa%s ",
1257 global_object ? "Global Object: " : "",
1259 accumulator->Put(str);
1260 accumulator->Add(" with %smap %p",
1261 map_of_this->is_deprecated() ? "deprecated " : "",
1269 accumulator->Add("<JS %sObject", global_object ? "Global " : "");
1273 accumulator->Add(" value = ");
1274 JSValue::cast(this)->value()->ShortPrint(accumulator);
1276 accumulator->Put('>');
1283 void JSObject::PrintElementsTransition(
1284 FILE* file, Handle<JSObject> object,
1285 ElementsKind from_kind, Handle<FixedArrayBase> from_elements,
1286 ElementsKind to_kind, Handle<FixedArrayBase> to_elements) {
1287 if (from_kind != to_kind) {
1289 os << "elements transition [" << ElementsKindToString(from_kind) << " -> "
1290 << ElementsKindToString(to_kind) << "] in ";
1291 JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true);
1292 PrintF(file, " for ");
1293 object->ShortPrint(file);
1294 PrintF(file, " from ");
1295 from_elements->ShortPrint(file);
1296 PrintF(file, " to ");
1297 to_elements->ShortPrint(file);
1303 void Map::PrintGeneralization(FILE* file,
1308 bool constant_to_field,
1309 Representation old_representation,
1310 Representation new_representation,
1311 HeapType* old_field_type,
1312 HeapType* new_field_type) {
1314 os << "[generalizing ";
1315 constructor_name()->PrintOn(file);
1317 Name* name = instance_descriptors()->GetKey(modify_index);
1318 if (name->IsString()) {
1319 String::cast(name)->PrintOn(file);
1321 os << "{symbol " << static_cast<void*>(name) << "}";
1324 if (constant_to_field) {
1327 os << old_representation.Mnemonic() << "{";
1328 old_field_type->PrintTo(os, HeapType::SEMANTIC_DIM);
1331 os << "->" << new_representation.Mnemonic() << "{";
1332 new_field_type->PrintTo(os, HeapType::SEMANTIC_DIM);
1334 if (strlen(reason) > 0) {
1337 os << "+" << (descriptors - split) << " maps";
1340 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true);
1345 void JSObject::PrintInstanceMigration(FILE* file,
1348 PrintF(file, "[migrating ");
1349 map()->constructor_name()->PrintOn(file);
1351 DescriptorArray* o = original_map->instance_descriptors();
1352 DescriptorArray* n = new_map->instance_descriptors();
1353 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) {
1354 Representation o_r = o->GetDetails(i).representation();
1355 Representation n_r = n->GetDetails(i).representation();
1356 if (!o_r.Equals(n_r)) {
1357 String::cast(o->GetKey(i))->PrintOn(file);
1358 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic());
1359 } else if (o->GetDetails(i).type() == CONSTANT &&
1360 n->GetDetails(i).type() == FIELD) {
1361 Name* name = o->GetKey(i);
1362 if (name->IsString()) {
1363 String::cast(name)->PrintOn(file);
1365 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1374 void HeapObject::HeapObjectShortPrint(OStream& os) { // NOLINT
1375 Heap* heap = GetHeap();
1376 if (!heap->Contains(this)) {
1377 os << "!!!INVALID POINTER!!!";
1380 if (!heap->Contains(map())) {
1381 os << "!!!INVALID MAP!!!";
1388 HeapStringAllocator allocator;
1389 StringStream accumulator(&allocator);
1390 String::cast(this)->StringShortPrint(&accumulator);
1391 os << accumulator.ToCString().get();
1395 HeapStringAllocator allocator;
1396 StringStream accumulator(&allocator);
1397 JSObject::cast(this)->JSObjectShortPrint(&accumulator);
1398 os << accumulator.ToCString().get();
1401 switch (map()->instance_type()) {
1403 os << "<Map(elements=" << Map::cast(this)->elements_kind() << ")>";
1405 case FIXED_ARRAY_TYPE:
1406 os << "<FixedArray[" << FixedArray::cast(this)->length() << "]>";
1408 case FIXED_DOUBLE_ARRAY_TYPE:
1409 os << "<FixedDoubleArray[" << FixedDoubleArray::cast(this)->length()
1412 case BYTE_ARRAY_TYPE:
1413 os << "<ByteArray[" << ByteArray::cast(this)->length() << "]>";
1415 case FREE_SPACE_TYPE:
1416 os << "<FreeSpace[" << FreeSpace::cast(this)->Size() << "]>";
1418 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \
1419 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1420 os << "<External" #Type "Array[" \
1421 << External##Type##Array::cast(this)->length() << "]>"; \
1423 case FIXED_##TYPE##_ARRAY_TYPE: \
1424 os << "<Fixed" #Type "Array[" << Fixed##Type##Array::cast(this)->length() \
1428 TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT)
1429 #undef TYPED_ARRAY_SHORT_PRINT
1431 case SHARED_FUNCTION_INFO_TYPE: {
1432 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this);
1433 SmartArrayPointer<char> debug_name =
1434 shared->DebugName()->ToCString();
1435 if (debug_name[0] != 0) {
1436 os << "<SharedFunctionInfo " << debug_name.get() << ">";
1438 os << "<SharedFunctionInfo>";
1442 case JS_MESSAGE_OBJECT_TYPE:
1443 os << "<JSMessageObject>";
1445 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1447 os << "<" #Name ">"; \
1449 STRUCT_LIST(MAKE_STRUCT_CASE)
1450 #undef MAKE_STRUCT_CASE
1452 Code* code = Code::cast(this);
1453 os << "<Code: " << Code::Kind2String(code->kind()) << ">";
1456 case ODDBALL_TYPE: {
1457 if (IsUndefined()) {
1458 os << "<undefined>";
1459 } else if (IsTheHole()) {
1461 } else if (IsNull()) {
1463 } else if (IsTrue()) {
1465 } else if (IsFalse()) {
1468 os << "<Odd Oddball>";
1473 Symbol* symbol = Symbol::cast(this);
1474 os << "<Symbol: " << symbol->Hash();
1475 if (!symbol->name()->IsUndefined()) {
1477 HeapStringAllocator allocator;
1478 StringStream accumulator(&allocator);
1479 String::cast(symbol->name())->StringShortPrint(&accumulator);
1480 os << accumulator.ToCString().get();
1485 case HEAP_NUMBER_TYPE: {
1487 HeapNumber::cast(this)->HeapNumberPrint(os);
1491 case MUTABLE_HEAP_NUMBER_TYPE: {
1492 os << "<MutableNumber: ";
1493 HeapNumber::cast(this)->HeapNumberPrint(os);
1500 case JS_FUNCTION_PROXY_TYPE:
1501 os << "<JSFunctionProxy>";
1508 HeapStringAllocator allocator;
1509 StringStream accumulator(&allocator);
1510 Cell::cast(this)->value()->ShortPrint(&accumulator);
1511 os << accumulator.ToCString().get();
1514 case PROPERTY_CELL_TYPE: {
1515 os << "PropertyCell for ";
1516 HeapStringAllocator allocator;
1517 StringStream accumulator(&allocator);
1518 PropertyCell::cast(this)->value()->ShortPrint(&accumulator);
1519 os << accumulator.ToCString().get();
1523 os << "<Other heap object (" << map()->instance_type() << ")>";
1529 void HeapObject::Iterate(ObjectVisitor* v) {
1531 IteratePointer(v, kMapOffset);
1532 // Handle object body
1534 IterateBody(m->instance_type(), SizeFromMap(m), v);
1538 void HeapObject::IterateBody(InstanceType type, int object_size,
1540 // Avoiding <Type>::cast(this) because it accesses the map pointer field.
1541 // During GC, the map pointer field is encoded.
1542 if (type < FIRST_NONSTRING_TYPE) {
1543 switch (type & kStringRepresentationMask) {
1546 case kConsStringTag:
1547 ConsString::BodyDescriptor::IterateBody(this, v);
1549 case kSlicedStringTag:
1550 SlicedString::BodyDescriptor::IterateBody(this, v);
1552 case kExternalStringTag:
1553 if ((type & kStringEncodingMask) == kOneByteStringTag) {
1554 reinterpret_cast<ExternalOneByteString*>(this)
1555 ->ExternalOneByteStringIterateBody(v);
1557 reinterpret_cast<ExternalTwoByteString*>(this)->
1558 ExternalTwoByteStringIterateBody(v);
1566 case FIXED_ARRAY_TYPE:
1567 FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
1569 case CONSTANT_POOL_ARRAY_TYPE:
1570 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v);
1572 case FIXED_DOUBLE_ARRAY_TYPE:
1574 case JS_OBJECT_TYPE:
1575 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1576 case JS_GENERATOR_OBJECT_TYPE:
1577 case JS_MODULE_TYPE:
1581 case JS_ARRAY_BUFFER_TYPE:
1582 case JS_TYPED_ARRAY_TYPE:
1583 case JS_DATA_VIEW_TYPE:
1586 case JS_SET_ITERATOR_TYPE:
1587 case JS_MAP_ITERATOR_TYPE:
1588 case JS_WEAK_MAP_TYPE:
1589 case JS_WEAK_SET_TYPE:
1590 case JS_REGEXP_TYPE:
1591 case JS_GLOBAL_PROXY_TYPE:
1592 case JS_GLOBAL_OBJECT_TYPE:
1593 case JS_BUILTINS_OBJECT_TYPE:
1594 case JS_MESSAGE_OBJECT_TYPE:
1595 JSObject::BodyDescriptor::IterateBody(this, object_size, v);
1597 case JS_FUNCTION_TYPE:
1598 reinterpret_cast<JSFunction*>(this)
1599 ->JSFunctionIterateBody(object_size, v);
1602 Oddball::BodyDescriptor::IterateBody(this, v);
1605 JSProxy::BodyDescriptor::IterateBody(this, v);
1607 case JS_FUNCTION_PROXY_TYPE:
1608 JSFunctionProxy::BodyDescriptor::IterateBody(this, v);
1611 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v);
1614 Map::BodyDescriptor::IterateBody(this, v);
1617 reinterpret_cast<Code*>(this)->CodeIterateBody(v);
1620 Cell::BodyDescriptor::IterateBody(this, v);
1622 case PROPERTY_CELL_TYPE:
1623 PropertyCell::BodyDescriptor::IterateBody(this, v);
1626 Symbol::BodyDescriptor::IterateBody(this, v);
1629 case HEAP_NUMBER_TYPE:
1630 case MUTABLE_HEAP_NUMBER_TYPE:
1632 case BYTE_ARRAY_TYPE:
1633 case FREE_SPACE_TYPE:
1636 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
1637 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1638 case FIXED_##TYPE##_ARRAY_TYPE: \
1641 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1642 #undef TYPED_ARRAY_CASE
1644 case SHARED_FUNCTION_INFO_TYPE: {
1645 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
1649 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1651 STRUCT_LIST(MAKE_STRUCT_CASE)
1652 #undef MAKE_STRUCT_CASE
1653 if (type == ALLOCATION_SITE_TYPE) {
1654 AllocationSite::BodyDescriptor::IterateBody(this, v);
1656 StructBodyDescriptor::IterateBody(this, object_size, v);
1660 PrintF("Unknown type: %d\n", type);
1666 bool HeapNumber::HeapNumberBooleanValue() {
1667 return DoubleToBoolean(value());
1671 void HeapNumber::HeapNumberPrint(OStream& os) { // NOLINT
1676 String* JSReceiver::class_name() {
1677 if (IsJSFunction() || IsJSFunctionProxy()) {
1678 return GetHeap()->Function_string();
1680 if (map()->constructor()->IsJSFunction()) {
1681 JSFunction* constructor = JSFunction::cast(map()->constructor());
1682 return String::cast(constructor->shared()->instance_class_name());
1684 // If the constructor is not present, return "Object".
1685 return GetHeap()->Object_string();
1689 String* Map::constructor_name() {
1690 if (constructor()->IsJSFunction()) {
1691 JSFunction* constructor = JSFunction::cast(this->constructor());
1692 String* name = String::cast(constructor->shared()->name());
1693 if (name->length() > 0) return name;
1694 String* inferred_name = constructor->shared()->inferred_name();
1695 if (inferred_name->length() > 0) return inferred_name;
1696 Object* proto = prototype();
1697 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
1699 // TODO(rossberg): what about proxies?
1700 // If the constructor is not present, return "Object".
1701 return GetHeap()->Object_string();
1705 String* JSReceiver::constructor_name() {
1706 return map()->constructor_name();
1710 MaybeHandle<Map> Map::CopyWithField(Handle<Map> map,
1712 Handle<HeapType> type,
1713 PropertyAttributes attributes,
1714 Representation representation,
1715 TransitionFlag flag) {
1716 DCHECK(DescriptorArray::kNotFound ==
1717 map->instance_descriptors()->Search(
1718 *name, map->NumberOfOwnDescriptors()));
1720 // Ensure the descriptor array does not get too big.
1721 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1722 return MaybeHandle<Map>();
1725 Isolate* isolate = map->GetIsolate();
1727 // Compute the new index for new field.
1728 int index = map->NextFreePropertyIndex();
1730 if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
1731 representation = Representation::Tagged();
1732 type = HeapType::Any(isolate);
1735 FieldDescriptor new_field_desc(name, index, type, attributes, representation);
1736 Handle<Map> new_map = Map::CopyAddDescriptor(map, &new_field_desc, flag);
1737 int unused_property_fields = new_map->unused_property_fields() - 1;
1738 if (unused_property_fields < 0) {
1739 unused_property_fields += JSObject::kFieldsAdded;
1741 new_map->set_unused_property_fields(unused_property_fields);
1746 MaybeHandle<Map> Map::CopyWithConstant(Handle<Map> map,
1748 Handle<Object> constant,
1749 PropertyAttributes attributes,
1750 TransitionFlag flag) {
1751 // Ensure the descriptor array does not get too big.
1752 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1753 return MaybeHandle<Map>();
1756 // Allocate new instance descriptors with (name, constant) added.
1757 ConstantDescriptor new_constant_desc(name, constant, attributes);
1758 return Map::CopyAddDescriptor(map, &new_constant_desc, flag);
1762 void JSObject::AddSlowProperty(Handle<JSObject> object,
1764 Handle<Object> value,
1765 PropertyAttributes attributes) {
1766 DCHECK(!object->HasFastProperties());
1767 Isolate* isolate = object->GetIsolate();
1768 Handle<NameDictionary> dict(object->property_dictionary());
1769 if (object->IsGlobalObject()) {
1770 // In case name is an orphaned property reuse the cell.
1771 int entry = dict->FindEntry(name);
1772 if (entry != NameDictionary::kNotFound) {
1773 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry)));
1774 PropertyCell::SetValueInferType(cell, value);
1775 // Assign an enumeration index to the property and update
1776 // SetNextEnumerationIndex.
1777 int index = dict->NextEnumerationIndex();
1778 PropertyDetails details = PropertyDetails(attributes, NORMAL, index);
1779 dict->SetNextEnumerationIndex(index + 1);
1780 dict->SetEntry(entry, name, cell, details);
1783 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value);
1784 PropertyCell::SetValueInferType(cell, value);
1787 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
1788 Handle<NameDictionary> result =
1789 NameDictionary::Add(dict, name, value, details);
1790 if (*dict != *result) object->set_properties(*result);
1794 Context* JSObject::GetCreationContext() {
1795 Object* constructor = this->map()->constructor();
1796 JSFunction* function;
1797 if (!constructor->IsJSFunction()) {
1798 // Functions have null as a constructor,
1799 // but any JSFunction knows its context immediately.
1800 function = JSFunction::cast(this);
1802 function = JSFunction::cast(constructor);
1805 return function->context()->native_context();
1809 void JSObject::EnqueueChangeRecord(Handle<JSObject> object,
1810 const char* type_str,
1812 Handle<Object> old_value) {
1813 DCHECK(!object->IsJSGlobalProxy());
1814 DCHECK(!object->IsJSGlobalObject());
1815 Isolate* isolate = object->GetIsolate();
1816 HandleScope scope(isolate);
1817 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str);
1818 Handle<Object> args[] = { type, object, name, old_value };
1819 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4;
1821 Execution::Call(isolate,
1822 Handle<JSFunction>(isolate->observers_notify_change()),
1823 isolate->factory()->undefined_value(),
1824 argc, args).Assert();
1828 const char* Representation::Mnemonic() const {
1830 case kNone: return "v";
1831 case kTagged: return "t";
1832 case kSmi: return "s";
1833 case kDouble: return "d";
1834 case kInteger32: return "i";
1835 case kHeapObject: return "h";
1836 case kExternal: return "x";
1844 bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields,
1845 int target_inobject, int target_unused,
1846 int* old_number_of_fields) {
1847 // If fields were added (or removed), rewrite the instance.
1848 *old_number_of_fields = NumberOfFields();
1849 DCHECK(target_number_of_fields >= *old_number_of_fields);
1850 if (target_number_of_fields != *old_number_of_fields) return true;
1852 // If smi descriptors were replaced by double descriptors, rewrite.
1853 DescriptorArray* old_desc = instance_descriptors();
1854 DescriptorArray* new_desc = target->instance_descriptors();
1855 int limit = NumberOfOwnDescriptors();
1856 for (int i = 0; i < limit; i++) {
1857 if (new_desc->GetDetails(i).representation().IsDouble() !=
1858 old_desc->GetDetails(i).representation().IsDouble()) {
1863 // If no fields were added, and no inobject properties were removed, setting
1864 // the map is sufficient.
1865 if (target_inobject == inobject_properties()) return false;
1866 // In-object slack tracking may have reduced the object size of the new map.
1867 // In that case, succeed if all existing fields were inobject, and they still
1868 // fit within the new inobject size.
1869 DCHECK(target_inobject < inobject_properties());
1870 if (target_number_of_fields <= target_inobject) {
1871 DCHECK(target_number_of_fields + target_unused == target_inobject);
1874 // Otherwise, properties will need to be moved to the backing store.
1879 void Map::ConnectElementsTransition(Handle<Map> parent, Handle<Map> child) {
1880 Isolate* isolate = parent->GetIsolate();
1881 Handle<Name> name = isolate->factory()->elements_transition_symbol();
1882 ConnectTransition(parent, child, name, FULL_TRANSITION);
1886 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) {
1887 if (object->map() == *new_map) return;
1888 if (object->HasFastProperties()) {
1889 if (!new_map->is_dictionary_map()) {
1890 Handle<Map> old_map(object->map());
1891 MigrateFastToFast(object, new_map);
1892 if (old_map->is_prototype_map()) {
1893 // Clear out the old descriptor array to avoid problems to sharing
1894 // the descriptor array without using an explicit.
1895 old_map->InitializeDescriptors(
1896 old_map->GetHeap()->empty_descriptor_array());
1897 // Ensure that no transition was inserted for prototype migrations.
1898 DCHECK(!old_map->HasTransitionArray());
1899 DCHECK(new_map->GetBackPointer()->IsUndefined());
1902 MigrateFastToSlow(object, new_map, 0);
1905 // For slow-to-fast migrations JSObject::TransformToFastProperties()
1906 // must be used instead.
1907 CHECK(new_map->is_dictionary_map());
1909 // Slow-to-slow migration is trivial.
1910 object->set_map(*new_map);
1915 // To migrate a fast instance to a fast map:
1916 // - First check whether the instance needs to be rewritten. If not, simply
1918 // - Otherwise, allocate a fixed array large enough to hold all fields, in
1919 // addition to unused space.
1920 // - Copy all existing properties in, in the following order: backing store
1921 // properties, unused fields, inobject properties.
1922 // - If all allocation succeeded, commit the state atomically:
1923 // * Copy inobject properties from the backing store back into the object.
1924 // * Trim the difference in instance size of the object. This also cleanly
1925 // frees inobject properties that moved to the backing store.
1926 // * If there are properties left in the backing store, trim of the space used
1927 // to temporarily store the inobject properties.
1928 // * If there are properties left in the backing store, install the backing
1930 void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
1931 Isolate* isolate = object->GetIsolate();
1932 Handle<Map> old_map(object->map());
1933 int old_number_of_fields;
1934 int number_of_fields = new_map->NumberOfFields();
1935 int inobject = new_map->inobject_properties();
1936 int unused = new_map->unused_property_fields();
1938 // Nothing to do if no functions were converted to fields and no smis were
1939 // converted to doubles.
1940 if (!old_map->InstancesNeedRewriting(*new_map, number_of_fields, inobject,
1941 unused, &old_number_of_fields)) {
1942 object->synchronized_set_map(*new_map);
1946 int total_size = number_of_fields + unused;
1947 int external = total_size - inobject;
1949 if (number_of_fields != old_number_of_fields &&
1950 new_map->GetBackPointer() == *old_map) {
1951 PropertyDetails details = new_map->GetLastDescriptorDetails();
1953 if (old_map->unused_property_fields() > 0) {
1954 if (details.representation().IsDouble()) {
1955 Handle<Object> value = isolate->factory()->NewHeapNumber(0, MUTABLE);
1957 FieldIndex::ForDescriptor(*new_map, new_map->LastAdded());
1958 object->FastPropertyAtPut(index, *value);
1960 object->synchronized_set_map(*new_map);
1964 DCHECK(number_of_fields == old_number_of_fields + 1);
1965 // This migration is a transition from a map that has run out out property
1966 // space. Therefore it could be done by extending the backing store.
1967 Handle<FixedArray> old_storage = handle(object->properties(), isolate);
1968 Handle<FixedArray> new_storage =
1969 FixedArray::CopySize(old_storage, external);
1971 // Properly initialize newly added property.
1972 Handle<Object> value;
1973 if (details.representation().IsDouble()) {
1974 value = isolate->factory()->NewHeapNumber(0, MUTABLE);
1976 value = isolate->factory()->uninitialized_value();
1978 DCHECK(details.type() == FIELD);
1979 int target_index = details.field_index() - inobject;
1980 DCHECK(target_index >= 0); // Must be a backing store index.
1981 new_storage->set(target_index, *value);
1983 // From here on we cannot fail and we shouldn't GC anymore.
1984 DisallowHeapAllocation no_allocation;
1986 // Set the new property value and do the map transition.
1987 object->set_properties(*new_storage);
1988 object->synchronized_set_map(*new_map);
1991 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size);
1993 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
1994 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors());
1995 int old_nof = old_map->NumberOfOwnDescriptors();
1996 int new_nof = new_map->NumberOfOwnDescriptors();
1998 // This method only supports generalizing instances to at least the same
1999 // number of properties.
2000 DCHECK(old_nof <= new_nof);
2002 for (int i = 0; i < old_nof; i++) {
2003 PropertyDetails details = new_descriptors->GetDetails(i);
2004 if (details.type() != FIELD) continue;
2005 PropertyDetails old_details = old_descriptors->GetDetails(i);
2006 if (old_details.type() == CALLBACKS) {
2007 DCHECK(details.representation().IsTagged());
2010 DCHECK(old_details.type() == CONSTANT ||
2011 old_details.type() == FIELD);
2012 Object* raw_value = old_details.type() == CONSTANT
2013 ? old_descriptors->GetValue(i)
2014 : object->RawFastPropertyAt(FieldIndex::ForDescriptor(*old_map, i));
2015 Handle<Object> value(raw_value, isolate);
2016 if (!old_details.representation().IsDouble() &&
2017 details.representation().IsDouble()) {
2018 if (old_details.representation().IsNone()) {
2019 value = handle(Smi::FromInt(0), isolate);
2021 value = Object::NewStorageFor(isolate, value, details.representation());
2022 } else if (old_details.representation().IsDouble() &&
2023 !details.representation().IsDouble()) {
2024 value = Object::WrapForRead(isolate, value, old_details.representation());
2026 DCHECK(!(details.representation().IsDouble() && value->IsSmi()));
2027 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2028 if (target_index < 0) target_index += total_size;
2029 array->set(target_index, *value);
2032 for (int i = old_nof; i < new_nof; i++) {
2033 PropertyDetails details = new_descriptors->GetDetails(i);
2034 if (details.type() != FIELD) continue;
2035 Handle<Object> value;
2036 if (details.representation().IsDouble()) {
2037 value = isolate->factory()->NewHeapNumber(0, MUTABLE);
2039 value = isolate->factory()->uninitialized_value();
2041 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2042 if (target_index < 0) target_index += total_size;
2043 array->set(target_index, *value);
2046 // From here on we cannot fail and we shouldn't GC anymore.
2047 DisallowHeapAllocation no_allocation;
2049 // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2050 // avoid overwriting |one_pointer_filler_map|.
2051 int limit = Min(inobject, number_of_fields);
2052 for (int i = 0; i < limit; i++) {
2053 FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
2054 object->FastPropertyAtPut(index, array->get(external + i));
2057 Heap* heap = isolate->heap();
2059 // If there are properties in the new backing store, trim it to the correct
2060 // size and install the backing store into the object.
2062 heap->RightTrimFixedArray<Heap::FROM_MUTATOR>(*array, inobject);
2063 object->set_properties(*array);
2066 // Create filler object past the new instance size.
2067 int new_instance_size = new_map->instance_size();
2068 int instance_size_delta = old_map->instance_size() - new_instance_size;
2069 DCHECK(instance_size_delta >= 0);
2071 if (instance_size_delta > 0) {
2072 Address address = object->address();
2073 heap->CreateFillerObjectAt(
2074 address + new_instance_size, instance_size_delta);
2075 heap->AdjustLiveBytes(address, -instance_size_delta, Heap::FROM_MUTATOR);
2078 // We are storing the new map using release store after creating a filler for
2079 // the left-over space to avoid races with the sweeper thread.
2080 object->synchronized_set_map(*new_map);
2084 void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object,
2086 Representation new_representation,
2087 Handle<HeapType> new_field_type) {
2088 Handle<Map> new_map = Map::GeneralizeRepresentation(
2089 handle(object->map()), modify_index, new_representation, new_field_type,
2091 MigrateToMap(object, new_map);
2095 int Map::NumberOfFields() {
2096 DescriptorArray* descriptors = instance_descriptors();
2098 for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
2099 if (descriptors->GetDetails(i).type() == FIELD) result++;
2105 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2107 StoreMode store_mode,
2108 PropertyAttributes attributes,
2109 const char* reason) {
2110 Isolate* isolate = map->GetIsolate();
2111 Handle<Map> new_map = Copy(map);
2113 DescriptorArray* descriptors = new_map->instance_descriptors();
2114 int length = descriptors->number_of_descriptors();
2115 for (int i = 0; i < length; i++) {
2116 descriptors->SetRepresentation(i, Representation::Tagged());
2117 if (descriptors->GetDetails(i).type() == FIELD) {
2118 descriptors->SetValue(i, HeapType::Any());
2122 // Unless the instance is being migrated, ensure that modify_index is a field.
2123 PropertyDetails details = descriptors->GetDetails(modify_index);
2124 if (store_mode == FORCE_FIELD &&
2125 (details.type() != FIELD || details.attributes() != attributes)) {
2126 int field_index = details.type() == FIELD ? details.field_index()
2127 : new_map->NumberOfFields();
2128 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate),
2129 field_index, attributes, Representation::Tagged());
2130 descriptors->Replace(modify_index, &d);
2131 if (details.type() != FIELD) {
2132 int unused_property_fields = new_map->unused_property_fields() - 1;
2133 if (unused_property_fields < 0) {
2134 unused_property_fields += JSObject::kFieldsAdded;
2136 new_map->set_unused_property_fields(unused_property_fields);
2139 DCHECK(details.attributes() == attributes);
2142 if (FLAG_trace_generalization) {
2143 HeapType* field_type = (details.type() == FIELD)
2144 ? map->instance_descriptors()->GetFieldType(modify_index)
2146 map->PrintGeneralization(stdout, reason, modify_index,
2147 new_map->NumberOfOwnDescriptors(),
2148 new_map->NumberOfOwnDescriptors(),
2149 details.type() == CONSTANT && store_mode == FORCE_FIELD,
2150 details.representation(), Representation::Tagged(),
2151 field_type, HeapType::Any());
2158 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2160 StoreMode store_mode,
2161 const char* reason) {
2162 PropertyDetails details =
2163 map->instance_descriptors()->GetDetails(modify_index);
2164 return CopyGeneralizeAllRepresentations(map, modify_index, store_mode,
2165 details.attributes(), reason);
2169 void Map::DeprecateTransitionTree() {
2170 if (is_deprecated()) return;
2171 if (HasTransitionArray()) {
2172 TransitionArray* transitions = this->transitions();
2173 for (int i = 0; i < transitions->number_of_transitions(); i++) {
2174 transitions->GetTarget(i)->DeprecateTransitionTree();
2178 dependent_code()->DeoptimizeDependentCodeGroup(
2179 GetIsolate(), DependentCode::kTransitionGroup);
2180 NotifyLeafMapLayoutChange();
2184 // Invalidates a transition target at |key|, and installs |new_descriptors| over
2185 // the current instance_descriptors to ensure proper sharing of descriptor
2187 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
2188 if (HasTransitionArray()) {
2189 TransitionArray* transitions = this->transitions();
2190 int transition = transitions->Search(key);
2191 if (transition != TransitionArray::kNotFound) {
2192 transitions->GetTarget(transition)->DeprecateTransitionTree();
2196 // Don't overwrite the empty descriptor array.
2197 if (NumberOfOwnDescriptors() == 0) return;
2199 DescriptorArray* to_replace = instance_descriptors();
2200 Map* current = this;
2201 GetHeap()->incremental_marking()->RecordWrites(to_replace);
2202 while (current->instance_descriptors() == to_replace) {
2203 current->SetEnumLength(kInvalidEnumCacheSentinel);
2204 current->set_instance_descriptors(new_descriptors);
2205 Object* next = current->GetBackPointer();
2206 if (next->IsUndefined()) break;
2207 current = Map::cast(next);
2210 set_owns_descriptors(false);
2214 Map* Map::FindRootMap() {
2217 Object* back = result->GetBackPointer();
2218 if (back->IsUndefined()) return result;
2219 result = Map::cast(back);
2224 Map* Map::FindLastMatchMap(int verbatim,
2226 DescriptorArray* descriptors) {
2227 DisallowHeapAllocation no_allocation;
2229 // This can only be called on roots of transition trees.
2230 DCHECK(GetBackPointer()->IsUndefined());
2232 Map* current = this;
2234 for (int i = verbatim; i < length; i++) {
2235 if (!current->HasTransitionArray()) break;
2236 Name* name = descriptors->GetKey(i);
2237 TransitionArray* transitions = current->transitions();
2238 int transition = transitions->Search(name);
2239 if (transition == TransitionArray::kNotFound) break;
2241 Map* next = transitions->GetTarget(transition);
2242 DescriptorArray* next_descriptors = next->instance_descriptors();
2244 PropertyDetails details = descriptors->GetDetails(i);
2245 PropertyDetails next_details = next_descriptors->GetDetails(i);
2246 if (details.type() != next_details.type()) break;
2247 if (details.attributes() != next_details.attributes()) break;
2248 if (!details.representation().Equals(next_details.representation())) break;
2249 if (next_details.type() == FIELD) {
2250 if (!descriptors->GetFieldType(i)->NowIs(
2251 next_descriptors->GetFieldType(i))) break;
2253 if (descriptors->GetValue(i) != next_descriptors->GetValue(i)) break;
2262 Map* Map::FindFieldOwner(int descriptor) {
2263 DisallowHeapAllocation no_allocation;
2264 DCHECK_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type());
2267 Object* back = result->GetBackPointer();
2268 if (back->IsUndefined()) break;
2269 Map* parent = Map::cast(back);
2270 if (parent->NumberOfOwnDescriptors() <= descriptor) break;
2277 void Map::UpdateFieldType(int descriptor, Handle<Name> name,
2278 Handle<HeapType> new_type) {
2279 DisallowHeapAllocation no_allocation;
2280 PropertyDetails details = instance_descriptors()->GetDetails(descriptor);
2281 if (details.type() != FIELD) return;
2282 if (HasTransitionArray()) {
2283 TransitionArray* transitions = this->transitions();
2284 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
2285 transitions->GetTarget(i)->UpdateFieldType(descriptor, name, new_type);
2288 // Skip if already updated the shared descriptor.
2289 if (instance_descriptors()->GetFieldType(descriptor) == *new_type) return;
2290 FieldDescriptor d(name, instance_descriptors()->GetFieldIndex(descriptor),
2291 new_type, details.attributes(), details.representation());
2292 instance_descriptors()->Replace(descriptor, &d);
2297 Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1,
2298 Handle<HeapType> type2,
2300 static const int kMaxClassesPerFieldType = 5;
2301 if (type1->NowIs(type2)) return type2;
2302 if (type2->NowIs(type1)) return type1;
2303 if (type1->NowStable() && type2->NowStable()) {
2304 Handle<HeapType> type = HeapType::Union(type1, type2, isolate);
2305 if (type->NumClasses() <= kMaxClassesPerFieldType) {
2306 DCHECK(type->NowStable());
2307 DCHECK(type1->NowIs(type));
2308 DCHECK(type2->NowIs(type));
2312 return HeapType::Any(isolate);
2317 void Map::GeneralizeFieldType(Handle<Map> map,
2319 Handle<HeapType> new_field_type) {
2320 Isolate* isolate = map->GetIsolate();
2322 // Check if we actually need to generalize the field type at all.
2323 Handle<HeapType> old_field_type(
2324 map->instance_descriptors()->GetFieldType(modify_index), isolate);
2325 if (new_field_type->NowIs(old_field_type)) {
2326 DCHECK(Map::GeneralizeFieldType(old_field_type,
2328 isolate)->NowIs(old_field_type));
2332 // Determine the field owner.
2333 Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate);
2334 Handle<DescriptorArray> descriptors(
2335 field_owner->instance_descriptors(), isolate);
2336 DCHECK_EQ(*old_field_type, descriptors->GetFieldType(modify_index));
2338 // Determine the generalized new field type.
2339 new_field_type = Map::GeneralizeFieldType(
2340 old_field_type, new_field_type, isolate);
2342 PropertyDetails details = descriptors->GetDetails(modify_index);
2343 Handle<Name> name(descriptors->GetKey(modify_index));
2344 field_owner->UpdateFieldType(modify_index, name, new_field_type);
2345 field_owner->dependent_code()->DeoptimizeDependentCodeGroup(
2346 isolate, DependentCode::kFieldTypeGroup);
2348 if (FLAG_trace_generalization) {
2349 map->PrintGeneralization(
2350 stdout, "field type generalization",
2351 modify_index, map->NumberOfOwnDescriptors(),
2352 map->NumberOfOwnDescriptors(), false,
2353 details.representation(), details.representation(),
2354 *old_field_type, *new_field_type);
2359 // Generalize the representation of the descriptor at |modify_index|.
2360 // This method rewrites the transition tree to reflect the new change. To avoid
2361 // high degrees over polymorphism, and to stabilize quickly, on every rewrite
2362 // the new type is deduced by merging the current type with any potential new
2363 // (partial) version of the type in the transition tree.
2364 // To do this, on each rewrite:
2365 // - Search the root of the transition tree using FindRootMap.
2366 // - Find |target_map|, the newest matching version of this map using the keys
2367 // in the |old_map|'s descriptor array to walk the transition tree.
2368 // - Merge/generalize the descriptor array of the |old_map| and |target_map|.
2369 // - Generalize the |modify_index| descriptor using |new_representation| and
2370 // |new_field_type|.
2371 // - Walk the tree again starting from the root towards |target_map|. Stop at
2372 // |split_map|, the first map who's descriptor array does not match the merged
2373 // descriptor array.
2374 // - If |target_map| == |split_map|, |target_map| is in the expected state.
2376 // - Otherwise, invalidate the outdated transition target from |target_map|, and
2377 // replace its transition tree with a new branch for the updated descriptors.
2378 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map,
2380 Representation new_representation,
2381 Handle<HeapType> new_field_type,
2382 StoreMode store_mode) {
2383 Isolate* isolate = old_map->GetIsolate();
2385 Handle<DescriptorArray> old_descriptors(
2386 old_map->instance_descriptors(), isolate);
2387 int old_nof = old_map->NumberOfOwnDescriptors();
2388 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2389 Representation old_representation = old_details.representation();
2391 // It's fine to transition from None to anything but double without any
2392 // modification to the object, because the default uninitialized value for
2393 // representation None can be overwritten by both smi and tagged values.
2394 // Doubles, however, would require a box allocation.
2395 if (old_representation.IsNone() &&
2396 !new_representation.IsNone() &&
2397 !new_representation.IsDouble()) {
2398 DCHECK(old_details.type() == FIELD);
2399 DCHECK(old_descriptors->GetFieldType(modify_index)->NowIs(
2401 if (FLAG_trace_generalization) {
2402 old_map->PrintGeneralization(
2403 stdout, "uninitialized field",
2404 modify_index, old_map->NumberOfOwnDescriptors(),
2405 old_map->NumberOfOwnDescriptors(), false,
2406 old_representation, new_representation,
2407 old_descriptors->GetFieldType(modify_index), *new_field_type);
2409 old_descriptors->SetRepresentation(modify_index, new_representation);
2410 old_descriptors->SetValue(modify_index, *new_field_type);
2414 // Check the state of the root map.
2415 Handle<Map> root_map(old_map->FindRootMap(), isolate);
2416 if (!old_map->EquivalentToForTransition(*root_map)) {
2417 return CopyGeneralizeAllRepresentations(
2418 old_map, modify_index, store_mode, "not equivalent");
2420 int root_nof = root_map->NumberOfOwnDescriptors();
2421 if (modify_index < root_nof) {
2422 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2423 if ((old_details.type() != FIELD && store_mode == FORCE_FIELD) ||
2424 (old_details.type() == FIELD &&
2425 (!new_field_type->NowIs(old_descriptors->GetFieldType(modify_index)) ||
2426 !new_representation.fits_into(old_details.representation())))) {
2427 return CopyGeneralizeAllRepresentations(
2428 old_map, modify_index, store_mode, "root modification");
2432 Handle<Map> target_map = root_map;
2433 for (int i = root_nof; i < old_nof; ++i) {
2434 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2435 if (j == TransitionArray::kNotFound) break;
2436 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2437 Handle<DescriptorArray> tmp_descriptors = handle(
2438 tmp_map->instance_descriptors(), isolate);
2440 // Check if target map is incompatible.
2441 PropertyDetails old_details = old_descriptors->GetDetails(i);
2442 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2443 PropertyType old_type = old_details.type();
2444 PropertyType tmp_type = tmp_details.type();
2445 if (tmp_details.attributes() != old_details.attributes() ||
2446 ((tmp_type == CALLBACKS || old_type == CALLBACKS) &&
2447 (tmp_type != old_type ||
2448 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2449 return CopyGeneralizeAllRepresentations(
2450 old_map, modify_index, store_mode, "incompatible");
2452 Representation old_representation = old_details.representation();
2453 Representation tmp_representation = tmp_details.representation();
2454 if (!old_representation.fits_into(tmp_representation) ||
2455 (!new_representation.fits_into(tmp_representation) &&
2456 modify_index == i)) {
2459 if (tmp_type == FIELD) {
2460 // Generalize the field type as necessary.
2461 Handle<HeapType> old_field_type = (old_type == FIELD)
2462 ? handle(old_descriptors->GetFieldType(i), isolate)
2463 : old_descriptors->GetValue(i)->OptimalType(
2464 isolate, tmp_representation);
2465 if (modify_index == i) {
2466 old_field_type = GeneralizeFieldType(
2467 new_field_type, old_field_type, isolate);
2469 GeneralizeFieldType(tmp_map, i, old_field_type);
2470 } else if (tmp_type == CONSTANT) {
2471 if (old_type != CONSTANT ||
2472 old_descriptors->GetConstant(i) != tmp_descriptors->GetConstant(i)) {
2476 DCHECK_EQ(tmp_type, old_type);
2477 DCHECK_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i));
2479 target_map = tmp_map;
2482 // Directly change the map if the target map is more general.
2483 Handle<DescriptorArray> target_descriptors(
2484 target_map->instance_descriptors(), isolate);
2485 int target_nof = target_map->NumberOfOwnDescriptors();
2486 if (target_nof == old_nof &&
2487 (store_mode != FORCE_FIELD ||
2488 target_descriptors->GetDetails(modify_index).type() == FIELD)) {
2489 DCHECK(modify_index < target_nof);
2490 DCHECK(new_representation.fits_into(
2491 target_descriptors->GetDetails(modify_index).representation()));
2492 DCHECK(target_descriptors->GetDetails(modify_index).type() != FIELD ||
2493 new_field_type->NowIs(
2494 target_descriptors->GetFieldType(modify_index)));
2498 // Find the last compatible target map in the transition tree.
2499 for (int i = target_nof; i < old_nof; ++i) {
2500 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2501 if (j == TransitionArray::kNotFound) break;
2502 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2503 Handle<DescriptorArray> tmp_descriptors(
2504 tmp_map->instance_descriptors(), isolate);
2506 // Check if target map is compatible.
2507 PropertyDetails old_details = old_descriptors->GetDetails(i);
2508 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2509 if (tmp_details.attributes() != old_details.attributes() ||
2510 ((tmp_details.type() == CALLBACKS || old_details.type() == CALLBACKS) &&
2511 (tmp_details.type() != old_details.type() ||
2512 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2513 return CopyGeneralizeAllRepresentations(
2514 old_map, modify_index, store_mode, "incompatible");
2516 target_map = tmp_map;
2518 target_nof = target_map->NumberOfOwnDescriptors();
2519 target_descriptors = handle(target_map->instance_descriptors(), isolate);
2521 // Allocate a new descriptor array large enough to hold the required
2522 // descriptors, with minimally the exact same size as the old descriptor
2524 int new_slack = Max(
2525 old_nof, old_descriptors->number_of_descriptors()) - old_nof;
2526 Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate(
2527 isolate, old_nof, new_slack);
2528 DCHECK(new_descriptors->length() > target_descriptors->length() ||
2529 new_descriptors->NumberOfSlackDescriptors() > 0 ||
2530 new_descriptors->number_of_descriptors() ==
2531 old_descriptors->number_of_descriptors());
2532 DCHECK(new_descriptors->number_of_descriptors() == old_nof);
2535 int current_offset = 0;
2536 for (int i = 0; i < root_nof; ++i) {
2537 PropertyDetails old_details = old_descriptors->GetDetails(i);
2538 if (old_details.type() == FIELD) current_offset++;
2539 Descriptor d(handle(old_descriptors->GetKey(i), isolate),
2540 handle(old_descriptors->GetValue(i), isolate),
2542 new_descriptors->Set(i, &d);
2545 // |root_nof| -> |target_nof|
2546 for (int i = root_nof; i < target_nof; ++i) {
2547 Handle<Name> target_key(target_descriptors->GetKey(i), isolate);
2548 PropertyDetails old_details = old_descriptors->GetDetails(i);
2549 PropertyDetails target_details = target_descriptors->GetDetails(i);
2550 target_details = target_details.CopyWithRepresentation(
2551 old_details.representation().generalize(
2552 target_details.representation()));
2553 if (modify_index == i) {
2554 target_details = target_details.CopyWithRepresentation(
2555 new_representation.generalize(target_details.representation()));
2557 DCHECK_EQ(old_details.attributes(), target_details.attributes());
2558 if (old_details.type() == FIELD ||
2559 target_details.type() == FIELD ||
2560 (modify_index == i && store_mode == FORCE_FIELD) ||
2561 (target_descriptors->GetValue(i) != old_descriptors->GetValue(i))) {
2562 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2563 ? handle(old_descriptors->GetFieldType(i), isolate)
2564 : old_descriptors->GetValue(i)->OptimalType(
2565 isolate, target_details.representation());
2566 Handle<HeapType> target_field_type = (target_details.type() == FIELD)
2567 ? handle(target_descriptors->GetFieldType(i), isolate)
2568 : target_descriptors->GetValue(i)->OptimalType(
2569 isolate, target_details.representation());
2570 target_field_type = GeneralizeFieldType(
2571 target_field_type, old_field_type, isolate);
2572 if (modify_index == i) {
2573 target_field_type = GeneralizeFieldType(
2574 target_field_type, new_field_type, isolate);
2576 FieldDescriptor d(target_key,
2579 target_details.attributes(),
2580 target_details.representation());
2581 new_descriptors->Set(i, &d);
2583 DCHECK_NE(FIELD, target_details.type());
2584 Descriptor d(target_key,
2585 handle(target_descriptors->GetValue(i), isolate),
2587 new_descriptors->Set(i, &d);
2591 // |target_nof| -> |old_nof|
2592 for (int i = target_nof; i < old_nof; ++i) {
2593 PropertyDetails old_details = old_descriptors->GetDetails(i);
2594 Handle<Name> old_key(old_descriptors->GetKey(i), isolate);
2595 if (modify_index == i) {
2596 old_details = old_details.CopyWithRepresentation(
2597 new_representation.generalize(old_details.representation()));
2599 if (old_details.type() == FIELD) {
2600 Handle<HeapType> old_field_type(
2601 old_descriptors->GetFieldType(i), isolate);
2602 if (modify_index == i) {
2603 old_field_type = GeneralizeFieldType(
2604 old_field_type, new_field_type, isolate);
2606 FieldDescriptor d(old_key,
2609 old_details.attributes(),
2610 old_details.representation());
2611 new_descriptors->Set(i, &d);
2613 DCHECK(old_details.type() == CONSTANT || old_details.type() == CALLBACKS);
2614 if (modify_index == i && store_mode == FORCE_FIELD) {
2615 FieldDescriptor d(old_key,
2617 GeneralizeFieldType(
2618 old_descriptors->GetValue(i)->OptimalType(
2619 isolate, old_details.representation()),
2620 new_field_type, isolate),
2621 old_details.attributes(),
2622 old_details.representation());
2623 new_descriptors->Set(i, &d);
2625 DCHECK_NE(FIELD, old_details.type());
2626 Descriptor d(old_key,
2627 handle(old_descriptors->GetValue(i), isolate),
2629 new_descriptors->Set(i, &d);
2634 new_descriptors->Sort();
2636 DCHECK(store_mode != FORCE_FIELD ||
2637 new_descriptors->GetDetails(modify_index).type() == FIELD);
2639 Handle<Map> split_map(root_map->FindLastMatchMap(
2640 root_nof, old_nof, *new_descriptors), isolate);
2641 int split_nof = split_map->NumberOfOwnDescriptors();
2642 DCHECK_NE(old_nof, split_nof);
2644 split_map->DeprecateTarget(
2645 old_descriptors->GetKey(split_nof), *new_descriptors);
2647 if (FLAG_trace_generalization) {
2648 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2649 PropertyDetails new_details = new_descriptors->GetDetails(modify_index);
2650 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2651 ? handle(old_descriptors->GetFieldType(modify_index), isolate)
2652 : HeapType::Constant(handle(old_descriptors->GetValue(modify_index),
2654 Handle<HeapType> new_field_type = (new_details.type() == FIELD)
2655 ? handle(new_descriptors->GetFieldType(modify_index), isolate)
2656 : HeapType::Constant(handle(new_descriptors->GetValue(modify_index),
2658 old_map->PrintGeneralization(
2659 stdout, "", modify_index, split_nof, old_nof,
2660 old_details.type() == CONSTANT && store_mode == FORCE_FIELD,
2661 old_details.representation(), new_details.representation(),
2662 *old_field_type, *new_field_type);
2665 // Add missing transitions.
2666 Handle<Map> new_map = split_map;
2667 for (int i = split_nof; i < old_nof; ++i) {
2668 new_map = CopyInstallDescriptors(new_map, i, new_descriptors);
2670 new_map->set_owns_descriptors(true);
2675 // Generalize the representation of all FIELD descriptors.
2676 Handle<Map> Map::GeneralizeAllFieldRepresentations(
2678 Handle<DescriptorArray> descriptors(map->instance_descriptors());
2679 for (int i = 0; i < map->NumberOfOwnDescriptors(); ++i) {
2680 if (descriptors->GetDetails(i).type() == FIELD) {
2681 map = GeneralizeRepresentation(map, i, Representation::Tagged(),
2682 HeapType::Any(map->GetIsolate()),
2691 MaybeHandle<Map> Map::TryUpdate(Handle<Map> map) {
2692 Handle<Map> proto_map(map);
2693 while (proto_map->prototype()->IsJSObject()) {
2694 Handle<JSObject> holder(JSObject::cast(proto_map->prototype()));
2695 proto_map = Handle<Map>(holder->map());
2696 if (proto_map->is_deprecated() && JSObject::TryMigrateInstance(holder)) {
2697 proto_map = Handle<Map>(holder->map());
2700 return TryUpdateInternal(map);
2705 Handle<Map> Map::Update(Handle<Map> map) {
2706 if (!map->is_deprecated()) return map;
2707 return GeneralizeRepresentation(map, 0, Representation::None(),
2708 HeapType::None(map->GetIsolate()),
2714 MaybeHandle<Map> Map::TryUpdateInternal(Handle<Map> old_map) {
2715 DisallowHeapAllocation no_allocation;
2716 DisallowDeoptimization no_deoptimization(old_map->GetIsolate());
2718 if (!old_map->is_deprecated()) return old_map;
2720 // Check the state of the root map.
2721 Map* root_map = old_map->FindRootMap();
2722 if (!old_map->EquivalentToForTransition(root_map)) return MaybeHandle<Map>();
2723 int root_nof = root_map->NumberOfOwnDescriptors();
2725 int old_nof = old_map->NumberOfOwnDescriptors();
2726 DescriptorArray* old_descriptors = old_map->instance_descriptors();
2728 Map* new_map = root_map;
2729 for (int i = root_nof; i < old_nof; ++i) {
2730 int j = new_map->SearchTransition(old_descriptors->GetKey(i));
2731 if (j == TransitionArray::kNotFound) return MaybeHandle<Map>();
2732 new_map = new_map->GetTransition(j);
2733 DescriptorArray* new_descriptors = new_map->instance_descriptors();
2735 PropertyDetails new_details = new_descriptors->GetDetails(i);
2736 PropertyDetails old_details = old_descriptors->GetDetails(i);
2737 if (old_details.attributes() != new_details.attributes() ||
2738 !old_details.representation().fits_into(new_details.representation())) {
2739 return MaybeHandle<Map>();
2741 PropertyType new_type = new_details.type();
2742 PropertyType old_type = old_details.type();
2743 Object* new_value = new_descriptors->GetValue(i);
2744 Object* old_value = old_descriptors->GetValue(i);
2747 if ((old_type == FIELD &&
2748 !HeapType::cast(old_value)->NowIs(HeapType::cast(new_value))) ||
2749 (old_type == CONSTANT &&
2750 !HeapType::cast(new_value)->NowContains(old_value)) ||
2751 (old_type == CALLBACKS &&
2752 !HeapType::Any()->Is(HeapType::cast(new_value)))) {
2753 return MaybeHandle<Map>();
2759 if (old_type != new_type || old_value != new_value) {
2760 return MaybeHandle<Map>();
2768 if (new_map->NumberOfOwnDescriptors() != old_nof) return MaybeHandle<Map>();
2769 return handle(new_map);
2773 MaybeHandle<Object> JSObject::SetPropertyWithInterceptor(LookupIterator* it,
2774 Handle<Object> value) {
2775 // TODO(rossberg): Support symbols in the API.
2776 if (it->name()->IsSymbol()) return value;
2778 Handle<String> name_string = Handle<String>::cast(it->name());
2779 Handle<JSObject> holder = it->GetHolder<JSObject>();
2780 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
2781 if (interceptor->setter()->IsUndefined()) return MaybeHandle<Object>();
2784 ApiNamedPropertyAccess("interceptor-named-set", *holder, *name_string));
2785 PropertyCallbackArguments args(it->isolate(), interceptor->data(), *holder,
2787 v8::NamedPropertySetterCallback setter =
2788 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter());
2789 v8::Handle<v8::Value> result = args.Call(
2790 setter, v8::Utils::ToLocal(name_string), v8::Utils::ToLocal(value));
2791 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
2792 if (!result.IsEmpty()) return value;
2794 return MaybeHandle<Object>();
2798 MaybeHandle<Object> Object::SetProperty(Handle<Object> object,
2799 Handle<Name> name, Handle<Object> value,
2800 StrictMode strict_mode,
2801 StoreFromKeyed store_mode) {
2802 LookupIterator it(object, name);
2803 return SetProperty(&it, value, strict_mode, store_mode);
2807 MaybeHandle<Object> Object::SetProperty(LookupIterator* it,
2808 Handle<Object> value,
2809 StrictMode strict_mode,
2810 StoreFromKeyed store_mode) {
2811 // Make sure that the top context does not change when doing callbacks or
2812 // interceptor calls.
2813 AssertNoContextChange ncc(it->isolate());
2816 for (; it->IsFound(); it->Next()) {
2817 switch (it->state()) {
2818 case LookupIterator::NOT_FOUND:
2821 case LookupIterator::ACCESS_CHECK:
2822 // TODO(verwaest): Remove the distinction. This is mostly bogus since we
2823 // don't know whether we'll want to fetch attributes or call a setter
2824 // until we find the property.
2825 if (it->HasAccess(v8::ACCESS_SET)) break;
2826 return JSObject::SetPropertyWithFailedAccessCheck(it, value,
2829 case LookupIterator::JSPROXY:
2830 if (it->HolderIsReceiverOrHiddenPrototype()) {
2831 return JSProxy::SetPropertyWithHandler(it->GetHolder<JSProxy>(),
2832 it->GetReceiver(), it->name(),
2833 value, strict_mode);
2835 // TODO(verwaest): Use the MaybeHandle to indicate result.
2836 bool has_result = false;
2837 MaybeHandle<Object> maybe_result =
2838 JSProxy::SetPropertyViaPrototypesWithHandler(
2839 it->GetHolder<JSProxy>(), it->GetReceiver(), it->name(),
2840 value, strict_mode, &has_result);
2841 if (has_result) return maybe_result;
2846 case LookupIterator::INTERCEPTOR:
2847 if (it->HolderIsReceiverOrHiddenPrototype()) {
2848 MaybeHandle<Object> maybe_result =
2849 JSObject::SetPropertyWithInterceptor(it, value);
2850 if (!maybe_result.is_null()) return maybe_result;
2851 if (it->isolate()->has_pending_exception()) return maybe_result;
2853 Maybe<PropertyAttributes> maybe_attributes =
2854 JSObject::GetPropertyAttributesWithInterceptor(
2855 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
2856 if (!maybe_attributes.has_value) return MaybeHandle<Object>();
2857 done = maybe_attributes.value != ABSENT;
2858 if (done && (maybe_attributes.value & READ_ONLY) != 0) {
2859 return WriteToReadOnlyProperty(it, value, strict_mode);
2864 case LookupIterator::ACCESSOR:
2865 if (it->property_details().IsReadOnly()) {
2866 return WriteToReadOnlyProperty(it, value, strict_mode);
2868 if (it->HolderIsReceiverOrHiddenPrototype() ||
2869 !it->GetAccessors()->IsDeclaredAccessorInfo()) {
2870 return SetPropertyWithAccessor(it->GetReceiver(), it->name(), value,
2871 it->GetHolder<JSObject>(),
2872 it->GetAccessors(), strict_mode);
2877 case LookupIterator::DATA:
2878 if (it->property_details().IsReadOnly()) {
2879 return WriteToReadOnlyProperty(it, value, strict_mode);
2881 if (it->HolderIsReceiverOrHiddenPrototype()) {
2882 return SetDataProperty(it, value);
2887 case LookupIterator::TRANSITION:
2895 // If the receiver is the JSGlobalObject, the store was contextual. In case
2896 // the property did not exist yet on the global object itself, we have to
2897 // throw a reference error in strict mode.
2898 if (it->GetReceiver()->IsJSGlobalObject() && strict_mode == STRICT) {
2899 Handle<Object> args[1] = {it->name()};
2900 THROW_NEW_ERROR(it->isolate(),
2901 NewReferenceError("not_defined", HandleVector(args, 1)),
2905 return AddDataProperty(it, value, NONE, strict_mode, store_mode);
2909 MaybeHandle<Object> Object::WriteToReadOnlyProperty(LookupIterator* it,
2910 Handle<Object> value,
2911 StrictMode strict_mode) {
2912 if (strict_mode != STRICT) return value;
2914 Handle<Object> args[] = {it->name(), it->GetReceiver()};
2915 THROW_NEW_ERROR(it->isolate(),
2916 NewTypeError("strict_read_only_property",
2917 HandleVector(args, arraysize(args))),
2922 Handle<Object> Object::SetDataProperty(LookupIterator* it,
2923 Handle<Object> value) {
2924 // Proxies are handled on the WithHandler path. Other non-JSObjects cannot
2925 // have own properties.
2926 Handle<JSObject> receiver = Handle<JSObject>::cast(it->GetReceiver());
2928 // Store on the holder which may be hidden behind the receiver.
2929 DCHECK(it->HolderIsReceiverOrHiddenPrototype());
2931 // Old value for the observation change record.
2932 // Fetch before transforming the object since the encoding may become
2933 // incompatible with what's cached in |it|.
2935 receiver->map()->is_observed() &&
2936 !it->name().is_identical_to(it->factory()->hidden_string());
2937 MaybeHandle<Object> maybe_old;
2938 if (is_observed) maybe_old = it->GetDataValue();
2940 // Possibly migrate to the most up-to-date map that will be able to store
2941 // |value| under it->name().
2942 it->PrepareForDataProperty(value);
2944 // Write the property value.
2945 it->WriteDataValue(value);
2947 // Send the change record if there are observers.
2948 if (is_observed && !value->SameValue(*maybe_old.ToHandleChecked())) {
2949 JSObject::EnqueueChangeRecord(receiver, "update", it->name(),
2950 maybe_old.ToHandleChecked());
2957 MaybeHandle<Object> Object::AddDataProperty(LookupIterator* it,
2958 Handle<Object> value,
2959 PropertyAttributes attributes,
2960 StrictMode strict_mode,
2961 StoreFromKeyed store_mode) {
2962 DCHECK(!it->GetReceiver()->IsJSProxy());
2963 if (!it->GetReceiver()->IsJSObject()) {
2964 // TODO(verwaest): Throw a TypeError with a more specific message.
2965 return WriteToReadOnlyProperty(it, value, strict_mode);
2968 Handle<JSObject> receiver = it->GetStoreTarget();
2970 // If the receiver is a JSGlobalProxy, store on the prototype (JSGlobalObject)
2971 // instead. If the prototype is Null, the proxy is detached.
2972 if (receiver->IsJSGlobalProxy()) return value;
2974 // Possibly migrate to the most up-to-date map that will be able to store
2975 // |value| under it->name() with |attributes|.
2976 it->PrepareTransitionToDataProperty(value, attributes, store_mode);
2977 if (it->state() != LookupIterator::TRANSITION) {
2978 if (strict_mode == SLOPPY) return value;
2980 Handle<Object> args[1] = {it->name()};
2981 THROW_NEW_ERROR(it->isolate(),
2982 NewTypeError("object_not_extensible",
2983 HandleVector(args, arraysize(args))),
2986 it->ApplyTransitionToDataProperty();
2988 // TODO(verwaest): Encapsulate dictionary handling better.
2989 if (receiver->map()->is_dictionary_map()) {
2990 // TODO(verwaest): Probably should ensure this is done beforehand.
2991 it->InternalizeName();
2992 JSObject::AddSlowProperty(receiver, it->name(), value, attributes);
2994 // Write the property value.
2995 it->WriteDataValue(value);
2998 // Send the change record if there are observers.
2999 if (receiver->map()->is_observed() &&
3000 !it->name().is_identical_to(it->factory()->hidden_string())) {
3001 JSObject::EnqueueChangeRecord(receiver, "add", it->name(),
3002 it->factory()->the_hole_value());
3009 MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes(
3010 Handle<JSObject> object,
3012 Handle<Object> value,
3014 StrictMode strict_mode) {
3015 Isolate *isolate = object->GetIsolate();
3016 for (PrototypeIterator iter(isolate, object); !iter.IsAtEnd();
3018 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
3019 return JSProxy::SetPropertyViaPrototypesWithHandler(
3020 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), object,
3021 isolate->factory()->Uint32ToString(index), // name
3022 value, strict_mode, found);
3024 Handle<JSObject> js_proto =
3025 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
3026 if (!js_proto->HasDictionaryElements()) {
3029 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary());
3030 int entry = dictionary->FindEntry(index);
3031 if (entry != SeededNumberDictionary::kNotFound) {
3032 PropertyDetails details = dictionary->DetailsAt(entry);
3033 if (details.type() == CALLBACKS) {
3035 Handle<Object> structure(dictionary->ValueAt(entry), isolate);
3036 return SetElementWithCallback(object, structure, index, value, js_proto,
3042 return isolate->factory()->the_hole_value();
3046 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
3047 // Only supports adding slack to owned descriptors.
3048 DCHECK(map->owns_descriptors());
3050 Handle<DescriptorArray> descriptors(map->instance_descriptors());
3051 int old_size = map->NumberOfOwnDescriptors();
3052 if (slack <= descriptors->NumberOfSlackDescriptors()) return;
3054 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
3055 descriptors, old_size, slack);
3057 if (old_size == 0) {
3058 map->set_instance_descriptors(*new_descriptors);
3062 // If the source descriptors had an enum cache we copy it. This ensures
3063 // that the maps to which we push the new descriptor array back can rely
3064 // on a cache always being available once it is set. If the map has more
3065 // enumerated descriptors than available in the original cache, the cache
3066 // will be lazily replaced by the extended cache when needed.
3067 if (descriptors->HasEnumCache()) {
3068 new_descriptors->CopyEnumCacheFrom(*descriptors);
3071 // Replace descriptors by new_descriptors in all maps that share it.
3072 map->GetHeap()->incremental_marking()->RecordWrites(*descriptors);
3075 for (Object* current = map->GetBackPointer();
3076 !current->IsUndefined();
3077 current = walk_map->GetBackPointer()) {
3078 walk_map = Map::cast(current);
3079 if (walk_map->instance_descriptors() != *descriptors) break;
3080 walk_map->set_instance_descriptors(*new_descriptors);
3083 map->set_instance_descriptors(*new_descriptors);
3088 static int AppendUniqueCallbacks(NeanderArray* callbacks,
3089 Handle<typename T::Array> array,
3090 int valid_descriptors) {
3091 int nof_callbacks = callbacks->length();
3093 Isolate* isolate = array->GetIsolate();
3094 // Ensure the keys are unique names before writing them into the
3095 // instance descriptor. Since it may cause a GC, it has to be done before we
3096 // temporarily put the heap in an invalid state while appending descriptors.
3097 for (int i = 0; i < nof_callbacks; ++i) {
3098 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3099 if (entry->name()->IsUniqueName()) continue;
3100 Handle<String> key =
3101 isolate->factory()->InternalizeString(
3102 Handle<String>(String::cast(entry->name())));
3103 entry->set_name(*key);
3106 // Fill in new callback descriptors. Process the callbacks from
3107 // back to front so that the last callback with a given name takes
3108 // precedence over previously added callbacks with that name.
3109 for (int i = nof_callbacks - 1; i >= 0; i--) {
3110 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3111 Handle<Name> key(Name::cast(entry->name()));
3112 // Check if a descriptor with this name already exists before writing.
3113 if (!T::Contains(key, entry, valid_descriptors, array)) {
3114 T::Insert(key, entry, valid_descriptors, array);
3115 valid_descriptors++;
3119 return valid_descriptors;
3122 struct DescriptorArrayAppender {
3123 typedef DescriptorArray Array;
3124 static bool Contains(Handle<Name> key,
3125 Handle<AccessorInfo> entry,
3126 int valid_descriptors,
3127 Handle<DescriptorArray> array) {
3128 DisallowHeapAllocation no_gc;
3129 return array->Search(*key, valid_descriptors) != DescriptorArray::kNotFound;
3131 static void Insert(Handle<Name> key,
3132 Handle<AccessorInfo> entry,
3133 int valid_descriptors,
3134 Handle<DescriptorArray> array) {
3135 DisallowHeapAllocation no_gc;
3136 CallbacksDescriptor desc(key, entry, entry->property_attributes());
3137 array->Append(&desc);
3142 struct FixedArrayAppender {
3143 typedef FixedArray Array;
3144 static bool Contains(Handle<Name> key,
3145 Handle<AccessorInfo> entry,
3146 int valid_descriptors,
3147 Handle<FixedArray> array) {
3148 for (int i = 0; i < valid_descriptors; i++) {
3149 if (*key == AccessorInfo::cast(array->get(i))->name()) return true;
3153 static void Insert(Handle<Name> key,
3154 Handle<AccessorInfo> entry,
3155 int valid_descriptors,
3156 Handle<FixedArray> array) {
3157 DisallowHeapAllocation no_gc;
3158 array->set(valid_descriptors, *entry);
3163 void Map::AppendCallbackDescriptors(Handle<Map> map,
3164 Handle<Object> descriptors) {
3165 int nof = map->NumberOfOwnDescriptors();
3166 Handle<DescriptorArray> array(map->instance_descriptors());
3167 NeanderArray callbacks(descriptors);
3168 DCHECK(array->NumberOfSlackDescriptors() >= callbacks.length());
3169 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof);
3170 map->SetNumberOfOwnDescriptors(nof);
3174 int AccessorInfo::AppendUnique(Handle<Object> descriptors,
3175 Handle<FixedArray> array,
3176 int valid_descriptors) {
3177 NeanderArray callbacks(descriptors);
3178 DCHECK(array->length() >= callbacks.length() + valid_descriptors);
3179 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks,
3185 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
3186 DCHECK(!map.is_null());
3187 for (int i = 0; i < maps->length(); ++i) {
3188 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
3195 static Handle<T> MaybeNull(T* p) {
3196 if (p == NULL) return Handle<T>::null();
3197 return Handle<T>(p);
3201 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
3202 ElementsKind kind = elements_kind();
3203 Handle<Map> transitioned_map = Handle<Map>::null();
3204 Handle<Map> current_map(this);
3205 bool packed = IsFastPackedElementsKind(kind);
3206 if (IsTransitionableFastElementsKind(kind)) {
3207 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
3208 kind = GetNextMoreGeneralFastElementsKind(kind, false);
3209 Handle<Map> maybe_transitioned_map =
3210 MaybeNull(current_map->LookupElementsTransitionMap(kind));
3211 if (maybe_transitioned_map.is_null()) break;
3212 if (ContainsMap(candidates, maybe_transitioned_map) &&
3213 (packed || !IsFastPackedElementsKind(kind))) {
3214 transitioned_map = maybe_transitioned_map;
3215 if (!IsFastPackedElementsKind(kind)) packed = false;
3217 current_map = maybe_transitioned_map;
3220 return transitioned_map;
3224 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
3225 Map* current_map = map;
3227 IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind)
3229 : TERMINAL_FAST_ELEMENTS_KIND;
3231 // Support for legacy API: SetIndexedPropertiesTo{External,Pixel}Data
3232 // allows to change elements from arbitrary kind to any ExternalArray
3233 // elements kind. Satisfy its requirements, checking whether we already
3234 // have the cached transition.
3235 if (IsExternalArrayElementsKind(to_kind) &&
3236 !IsFixedTypedArrayElementsKind(map->elements_kind())) {
3237 if (map->HasElementsTransition()) {
3238 Map* next_map = map->elements_transition_map();
3239 if (next_map->elements_kind() == to_kind) return next_map;
3244 ElementsKind kind = map->elements_kind();
3245 while (kind != target_kind) {
3246 kind = GetNextTransitionElementsKind(kind);
3247 if (!current_map->HasElementsTransition()) return current_map;
3248 current_map = current_map->elements_transition_map();
3251 if (to_kind != kind && current_map->HasElementsTransition()) {
3252 DCHECK(to_kind == DICTIONARY_ELEMENTS);
3253 Map* next_map = current_map->elements_transition_map();
3254 if (next_map->elements_kind() == to_kind) return next_map;
3257 DCHECK(current_map->elements_kind() == target_kind);
3262 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
3263 Map* to_map = FindClosestElementsTransition(this, to_kind);
3264 if (to_map->elements_kind() == to_kind) return to_map;
3269 bool Map::IsMapInArrayPrototypeChain() {
3270 Isolate* isolate = GetIsolate();
3271 if (isolate->initial_array_prototype()->map() == this) {
3275 if (isolate->initial_object_prototype()->map() == this) {
3283 static Handle<Map> AddMissingElementsTransitions(Handle<Map> map,
3284 ElementsKind to_kind) {
3285 DCHECK(IsTransitionElementsKind(map->elements_kind()));
3287 Handle<Map> current_map = map;
3289 ElementsKind kind = map->elements_kind();
3290 if (!map->is_prototype_map()) {
3291 while (kind != to_kind && !IsTerminalElementsKind(kind)) {
3292 kind = GetNextTransitionElementsKind(kind);
3294 Map::CopyAsElementsKind(current_map, kind, INSERT_TRANSITION);
3298 // In case we are exiting the fast elements kind system, just add the map in
3300 if (kind != to_kind) {
3301 current_map = Map::CopyAsElementsKind(
3302 current_map, to_kind, INSERT_TRANSITION);
3305 DCHECK(current_map->elements_kind() == to_kind);
3310 Handle<Map> Map::TransitionElementsTo(Handle<Map> map,
3311 ElementsKind to_kind) {
3312 ElementsKind from_kind = map->elements_kind();
3313 if (from_kind == to_kind) return map;
3315 Isolate* isolate = map->GetIsolate();
3316 Context* native_context = isolate->context()->native_context();
3317 Object* maybe_array_maps = native_context->js_array_maps();
3318 if (maybe_array_maps->IsFixedArray()) {
3319 DisallowHeapAllocation no_gc;
3320 FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
3321 if (array_maps->get(from_kind) == *map) {
3322 Object* maybe_transitioned_map = array_maps->get(to_kind);
3323 if (maybe_transitioned_map->IsMap()) {
3324 return handle(Map::cast(maybe_transitioned_map));
3329 return TransitionElementsToSlow(map, to_kind);
3333 Handle<Map> Map::TransitionElementsToSlow(Handle<Map> map,
3334 ElementsKind to_kind) {
3335 ElementsKind from_kind = map->elements_kind();
3337 if (from_kind == to_kind) {
3341 bool allow_store_transition =
3342 // Only remember the map transition if there is not an already existing
3343 // non-matching element transition.
3344 !map->IsUndefined() && !map->is_dictionary_map() &&
3345 IsTransitionElementsKind(from_kind);
3347 // Only store fast element maps in ascending generality.
3348 if (IsFastElementsKind(to_kind)) {
3349 allow_store_transition &=
3350 IsTransitionableFastElementsKind(from_kind) &&
3351 IsMoreGeneralElementsKindTransition(from_kind, to_kind);
3354 if (!allow_store_transition) {
3355 return Map::CopyAsElementsKind(map, to_kind, OMIT_TRANSITION);
3358 return Map::AsElementsKind(map, to_kind);
3363 Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) {
3364 Handle<Map> closest_map(FindClosestElementsTransition(*map, kind));
3366 if (closest_map->elements_kind() == kind) {
3370 return AddMissingElementsTransitions(closest_map, kind);
3374 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
3375 ElementsKind to_kind) {
3376 Handle<Map> map(object->map());
3377 return Map::TransitionElementsTo(map, to_kind);
3381 Maybe<bool> JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy,
3382 Handle<Name> name) {
3383 Isolate* isolate = proxy->GetIsolate();
3385 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3386 if (name->IsSymbol()) return maybe(false);
3388 Handle<Object> args[] = { name };
3389 Handle<Object> result;
3390 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3391 isolate, result, CallTrap(proxy, "has", isolate->derived_has_trap(),
3392 arraysize(args), args),
3395 return maybe(result->BooleanValue());
3399 MaybeHandle<Object> JSProxy::SetPropertyWithHandler(Handle<JSProxy> proxy,
3400 Handle<Object> receiver,
3402 Handle<Object> value,
3403 StrictMode strict_mode) {
3404 Isolate* isolate = proxy->GetIsolate();
3406 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3407 if (name->IsSymbol()) return value;
3409 Handle<Object> args[] = { receiver, name, value };
3410 RETURN_ON_EXCEPTION(
3414 isolate->derived_set_trap(),
3423 MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler(
3424 Handle<JSProxy> proxy, Handle<Object> receiver, Handle<Name> name,
3425 Handle<Object> value, StrictMode strict_mode, bool* done) {
3426 Isolate* isolate = proxy->GetIsolate();
3427 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy.
3429 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3430 if (name->IsSymbol()) {
3432 return isolate->factory()->the_hole_value();
3435 *done = true; // except where redefined...
3436 Handle<Object> args[] = { name };
3437 Handle<Object> result;
3438 ASSIGN_RETURN_ON_EXCEPTION(
3441 "getPropertyDescriptor",
3447 if (result->IsUndefined()) {
3449 return isolate->factory()->the_hole_value();
3452 // Emulate [[GetProperty]] semantics for proxies.
3453 Handle<Object> argv[] = { result };
3454 Handle<Object> desc;
3455 ASSIGN_RETURN_ON_EXCEPTION(
3457 Execution::Call(isolate,
3458 isolate->to_complete_property_descriptor(),
3464 // [[GetProperty]] requires to check that all properties are configurable.
3465 Handle<String> configurable_name =
3466 isolate->factory()->InternalizeOneByteString(
3467 STATIC_CHAR_VECTOR("configurable_"));
3468 Handle<Object> configurable =
3469 Object::GetProperty(desc, configurable_name).ToHandleChecked();
3470 DCHECK(configurable->IsBoolean());
3471 if (configurable->IsFalse()) {
3472 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3473 STATIC_CHAR_VECTOR("getPropertyDescriptor"));
3474 Handle<Object> args[] = { handler, trap, name };
3475 THROW_NEW_ERROR(isolate, NewTypeError("proxy_prop_not_configurable",
3476 HandleVector(args, arraysize(args))),
3479 DCHECK(configurable->IsTrue());
3481 // Check for DataDescriptor.
3482 Handle<String> hasWritable_name =
3483 isolate->factory()->InternalizeOneByteString(
3484 STATIC_CHAR_VECTOR("hasWritable_"));
3485 Handle<Object> hasWritable =
3486 Object::GetProperty(desc, hasWritable_name).ToHandleChecked();
3487 DCHECK(hasWritable->IsBoolean());
3488 if (hasWritable->IsTrue()) {
3489 Handle<String> writable_name = isolate->factory()->InternalizeOneByteString(
3490 STATIC_CHAR_VECTOR("writable_"));
3491 Handle<Object> writable =
3492 Object::GetProperty(desc, writable_name).ToHandleChecked();
3493 DCHECK(writable->IsBoolean());
3494 *done = writable->IsFalse();
3495 if (!*done) return isolate->factory()->the_hole_value();
3496 if (strict_mode == SLOPPY) return value;
3497 Handle<Object> args[] = { name, receiver };
3498 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
3499 HandleVector(args, arraysize(args))),
3503 // We have an AccessorDescriptor.
3504 Handle<String> set_name =
3505 isolate->factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("set_"));
3506 Handle<Object> setter = Object::GetProperty(desc, set_name).ToHandleChecked();
3507 if (!setter->IsUndefined()) {
3508 // TODO(rossberg): nicer would be to cast to some JSCallable here...
3509 return SetPropertyWithDefinedSetter(
3510 receiver, Handle<JSReceiver>::cast(setter), value);
3513 if (strict_mode == SLOPPY) return value;
3514 Handle<Object> args2[] = { name, proxy };
3515 THROW_NEW_ERROR(isolate, NewTypeError("no_setter_in_callback",
3516 HandleVector(args2, arraysize(args2))),
3521 MaybeHandle<Object> JSProxy::DeletePropertyWithHandler(
3522 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) {
3523 Isolate* isolate = proxy->GetIsolate();
3525 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3526 if (name->IsSymbol()) return isolate->factory()->false_value();
3528 Handle<Object> args[] = { name };
3529 Handle<Object> result;
3530 ASSIGN_RETURN_ON_EXCEPTION(
3539 bool result_bool = result->BooleanValue();
3540 if (mode == STRICT_DELETION && !result_bool) {
3541 Handle<Object> handler(proxy->handler(), isolate);
3542 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString(
3543 STATIC_CHAR_VECTOR("delete"));
3544 Handle<Object> args[] = { handler, trap_name };
3545 THROW_NEW_ERROR(isolate, NewTypeError("handler_failed",
3546 HandleVector(args, arraysize(args))),
3549 return isolate->factory()->ToBoolean(result_bool);
3553 MaybeHandle<Object> JSProxy::DeleteElementWithHandler(
3554 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) {
3555 Isolate* isolate = proxy->GetIsolate();
3556 Handle<String> name = isolate->factory()->Uint32ToString(index);
3557 return JSProxy::DeletePropertyWithHandler(proxy, name, mode);
3561 Maybe<PropertyAttributes> JSProxy::GetPropertyAttributesWithHandler(
3562 Handle<JSProxy> proxy, Handle<Object> receiver, Handle<Name> name) {
3563 Isolate* isolate = proxy->GetIsolate();
3564 HandleScope scope(isolate);
3566 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3567 if (name->IsSymbol()) return maybe(ABSENT);
3569 Handle<Object> args[] = { name };
3570 Handle<Object> result;
3571 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3573 proxy->CallTrap(proxy, "getPropertyDescriptor", Handle<Object>(),
3574 arraysize(args), args),
3575 Maybe<PropertyAttributes>());
3577 if (result->IsUndefined()) return maybe(ABSENT);
3579 Handle<Object> argv[] = { result };
3580 Handle<Object> desc;
3581 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3583 Execution::Call(isolate, isolate->to_complete_property_descriptor(),
3584 result, arraysize(argv), argv),
3585 Maybe<PropertyAttributes>());
3587 // Convert result to PropertyAttributes.
3588 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString(
3589 STATIC_CHAR_VECTOR("enumerable_"));
3590 Handle<Object> enumerable;
3591 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, enumerable,
3592 Object::GetProperty(desc, enum_n),
3593 Maybe<PropertyAttributes>());
3594 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString(
3595 STATIC_CHAR_VECTOR("configurable_"));
3596 Handle<Object> configurable;
3597 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, configurable,
3598 Object::GetProperty(desc, conf_n),
3599 Maybe<PropertyAttributes>());
3600 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString(
3601 STATIC_CHAR_VECTOR("writable_"));
3602 Handle<Object> writable;
3603 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, writable,
3604 Object::GetProperty(desc, writ_n),
3605 Maybe<PropertyAttributes>());
3606 if (!writable->BooleanValue()) {
3607 Handle<String> set_n = isolate->factory()->InternalizeOneByteString(
3608 STATIC_CHAR_VECTOR("set_"));
3609 Handle<Object> setter;
3610 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, setter,
3611 Object::GetProperty(desc, set_n),
3612 Maybe<PropertyAttributes>());
3613 writable = isolate->factory()->ToBoolean(!setter->IsUndefined());
3616 if (configurable->IsFalse()) {
3617 Handle<Object> handler(proxy->handler(), isolate);
3618 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3619 STATIC_CHAR_VECTOR("getPropertyDescriptor"));
3620 Handle<Object> args[] = { handler, trap, name };
3621 Handle<Object> error;
3622 MaybeHandle<Object> maybe_error = isolate->factory()->NewTypeError(
3623 "proxy_prop_not_configurable", HandleVector(args, arraysize(args)));
3624 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
3628 int attributes = NONE;
3629 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM;
3630 if (!configurable->BooleanValue()) attributes |= DONT_DELETE;
3631 if (!writable->BooleanValue()) attributes |= READ_ONLY;
3632 return maybe(static_cast<PropertyAttributes>(attributes));
3636 Maybe<PropertyAttributes> JSProxy::GetElementAttributeWithHandler(
3637 Handle<JSProxy> proxy, Handle<JSReceiver> receiver, uint32_t index) {
3638 Isolate* isolate = proxy->GetIsolate();
3639 Handle<String> name = isolate->factory()->Uint32ToString(index);
3640 return GetPropertyAttributesWithHandler(proxy, receiver, name);
3644 void JSProxy::Fix(Handle<JSProxy> proxy) {
3645 Isolate* isolate = proxy->GetIsolate();
3647 // Save identity hash.
3648 Handle<Object> hash(proxy->GetIdentityHash(), isolate);
3650 if (proxy->IsJSFunctionProxy()) {
3651 isolate->factory()->BecomeJSFunction(proxy);
3652 // Code will be set on the JavaScript side.
3654 isolate->factory()->BecomeJSObject(proxy);
3656 DCHECK(proxy->IsJSObject());
3658 // Inherit identity, if it was present.
3659 if (hash->IsSmi()) {
3660 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy),
3661 Handle<Smi>::cast(hash));
3666 MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy,
3668 Handle<Object> derived,
3670 Handle<Object> argv[]) {
3671 Isolate* isolate = proxy->GetIsolate();
3672 Handle<Object> handler(proxy->handler(), isolate);
3674 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name);
3675 Handle<Object> trap;
3676 ASSIGN_RETURN_ON_EXCEPTION(
3678 Object::GetPropertyOrElement(handler, trap_name),
3681 if (trap->IsUndefined()) {
3682 if (derived.is_null()) {
3683 Handle<Object> args[] = { handler, trap_name };
3684 THROW_NEW_ERROR(isolate,
3685 NewTypeError("handler_trap_missing",
3686 HandleVector(args, arraysize(args))),
3689 trap = Handle<Object>(derived);
3692 return Execution::Call(isolate, trap, handler, argc, argv);
3696 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
3697 DCHECK(object->map()->inobject_properties() == map->inobject_properties());
3698 ElementsKind obj_kind = object->map()->elements_kind();
3699 ElementsKind map_kind = map->elements_kind();
3700 if (map_kind != obj_kind) {
3701 ElementsKind to_kind = map_kind;
3702 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
3703 IsDictionaryElementsKind(obj_kind)) {
3706 if (IsDictionaryElementsKind(to_kind)) {
3707 NormalizeElements(object);
3709 TransitionElementsKind(object, to_kind);
3711 map = Map::AsElementsKind(map, to_kind);
3713 JSObject::MigrateToMap(object, map);
3717 void JSObject::MigrateInstance(Handle<JSObject> object) {
3718 Handle<Map> original_map(object->map());
3719 Handle<Map> map = Map::Update(original_map);
3720 map->set_migration_target(true);
3721 MigrateToMap(object, map);
3722 if (FLAG_trace_migration) {
3723 object->PrintInstanceMigration(stdout, *original_map, *map);
3729 bool JSObject::TryMigrateInstance(Handle<JSObject> object) {
3730 Isolate* isolate = object->GetIsolate();
3731 DisallowDeoptimization no_deoptimization(isolate);
3732 Handle<Map> original_map(object->map(), isolate);
3733 Handle<Map> new_map;
3734 if (!Map::TryUpdate(original_map).ToHandle(&new_map)) {
3737 JSObject::MigrateToMap(object, new_map);
3738 if (FLAG_trace_migration) {
3739 object->PrintInstanceMigration(stdout, *original_map, object->map());
3745 void JSObject::MigrateToNewProperty(Handle<JSObject> object,
3747 Handle<Object> value) {
3748 JSObject::MigrateToMap(object, map);
3749 if (map->GetLastDescriptorDetails().type() != FIELD) return;
3750 object->WriteToField(map->LastAdded(), *value);
3754 void JSObject::WriteToField(int descriptor, Object* value) {
3755 DisallowHeapAllocation no_gc;
3757 DescriptorArray* desc = map()->instance_descriptors();
3758 PropertyDetails details = desc->GetDetails(descriptor);
3760 DCHECK(details.type() == FIELD);
3762 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
3763 if (details.representation().IsDouble()) {
3764 // Nothing more to be done.
3765 if (value->IsUninitialized()) return;
3766 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
3767 DCHECK(box->IsMutableHeapNumber());
3768 box->set_value(value->Number());
3770 FastPropertyAtPut(index, value);
3775 void JSObject::AddProperty(Handle<JSObject> object, Handle<Name> name,
3776 Handle<Object> value,
3777 PropertyAttributes attributes) {
3778 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
3779 CHECK_NE(LookupIterator::ACCESS_CHECK, it.state());
3782 DCHECK(!object->IsJSProxy());
3783 DCHECK(!name->AsArrayIndex(&index));
3784 Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it);
3785 DCHECK(maybe.has_value);
3786 DCHECK(!it.IsFound());
3787 DCHECK(object->map()->is_extensible() ||
3788 name.is_identical_to(it.isolate()->factory()->hidden_string()));
3790 AddDataProperty(&it, value, attributes, STRICT,
3791 CERTAINLY_NOT_STORE_FROM_KEYED).Check();
3795 // Reconfigures a property to a data property with attributes, even if it is not
3797 MaybeHandle<Object> JSObject::SetOwnPropertyIgnoreAttributes(
3798 Handle<JSObject> object,
3800 Handle<Object> value,
3801 PropertyAttributes attributes,
3802 ExecutableAccessorInfoHandling handling) {
3803 DCHECK(!value->IsTheHole());
3804 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
3805 bool is_observed = object->map()->is_observed() &&
3806 *name != it.isolate()->heap()->hidden_string();
3807 for (; it.IsFound(); it.Next()) {
3808 switch (it.state()) {
3809 case LookupIterator::INTERCEPTOR:
3810 case LookupIterator::JSPROXY:
3811 case LookupIterator::NOT_FOUND:
3812 case LookupIterator::TRANSITION:
3815 case LookupIterator::ACCESS_CHECK:
3816 if (!it.isolate()->MayNamedAccess(object, name, v8::ACCESS_SET)) {
3817 return SetPropertyWithFailedAccessCheck(&it, value, SLOPPY);
3821 case LookupIterator::ACCESSOR: {
3822 PropertyDetails details = it.property_details();
3823 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
3824 // Ensure the context isn't changed after calling into accessors.
3825 AssertNoContextChange ncc(it.isolate());
3827 Handle<Object> accessors = it.GetAccessors();
3829 if (is_observed && accessors->IsAccessorInfo()) {
3830 ASSIGN_RETURN_ON_EXCEPTION(
3831 it.isolate(), old_value,
3832 GetPropertyWithAccessor(it.GetReceiver(), it.name(),
3833 it.GetHolder<JSObject>(), accessors),
3837 // Special handling for ExecutableAccessorInfo, which behaves like a
3839 if (handling == DONT_FORCE_FIELD &&
3840 accessors->IsExecutableAccessorInfo()) {
3841 Handle<Object> result;
3842 ASSIGN_RETURN_ON_EXCEPTION(
3843 it.isolate(), result,
3844 JSObject::SetPropertyWithAccessor(it.GetReceiver(), it.name(),
3845 value, it.GetHolder<JSObject>(),
3848 DCHECK(result->SameValue(*value));
3850 if (details.attributes() == attributes) {
3851 // Regular property update if the attributes match.
3852 if (is_observed && !old_value->SameValue(*value)) {
3853 // If we are setting the prototype of a function and are
3854 // observed, don't send change records because the prototype
3855 // handles that itself.
3856 if (!object->IsJSFunction() ||
3857 !Name::Equals(it.isolate()->factory()->prototype_string(),
3859 !Handle<JSFunction>::cast(object)->should_have_prototype()) {
3860 EnqueueChangeRecord(object, "update", name, old_value);
3866 // Reconfigure the accessor if attributes mismatch.
3867 Handle<ExecutableAccessorInfo> new_data = Accessors::CloneAccessor(
3868 it.isolate(), Handle<ExecutableAccessorInfo>::cast(accessors));
3869 new_data->set_property_attributes(attributes);
3870 // By clearing the setter we don't have to introduce a lookup to
3871 // the setter, simply make it unavailable to reflect the
3873 if (attributes & READ_ONLY) new_data->clear_setter();
3874 SetPropertyCallback(object, name, new_data, attributes);
3876 if (old_value->SameValue(*value)) {
3877 old_value = it.isolate()->factory()->the_hole_value();
3879 EnqueueChangeRecord(object, "reconfigure", name, old_value);
3884 it.ReconfigureDataProperty(value, attributes);
3885 it.PrepareForDataProperty(value);
3886 it.WriteDataValue(value);
3889 if (old_value->SameValue(*value)) {
3890 old_value = it.isolate()->factory()->the_hole_value();
3892 EnqueueChangeRecord(object, "reconfigure", name, old_value);
3898 case LookupIterator::DATA: {
3899 PropertyDetails details = it.property_details();
3900 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
3901 // Regular property update if the attributes match.
3902 if (details.attributes() == attributes) {
3903 return SetDataProperty(&it, value);
3905 // Reconfigure the data property if the attributes mismatch.
3906 if (is_observed) old_value = it.GetDataValue();
3908 it.ReconfigureDataProperty(value, attributes);
3909 it.PrepareForDataProperty(value);
3910 it.WriteDataValue(value);
3913 if (old_value->SameValue(*value)) {
3914 old_value = it.isolate()->factory()->the_hole_value();
3916 EnqueueChangeRecord(object, "reconfigure", name, old_value);
3924 return AddDataProperty(&it, value, attributes, STRICT,
3925 CERTAINLY_NOT_STORE_FROM_KEYED);
3929 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithInterceptor(
3930 Handle<JSObject> holder,
3931 Handle<Object> receiver,
3932 Handle<Name> name) {
3933 // TODO(rossberg): Support symbols in the API.
3934 if (name->IsSymbol()) return maybe(ABSENT);
3936 Isolate* isolate = holder->GetIsolate();
3937 HandleScope scope(isolate);
3939 // Make sure that the top context does not change when doing
3940 // callbacks or interceptor calls.
3941 AssertNoContextChange ncc(isolate);
3943 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
3944 PropertyCallbackArguments args(
3945 isolate, interceptor->data(), *receiver, *holder);
3946 if (!interceptor->query()->IsUndefined()) {
3947 v8::NamedPropertyQueryCallback query =
3948 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query());
3950 ApiNamedPropertyAccess("interceptor-named-has", *holder, *name));
3951 v8::Handle<v8::Integer> result =
3952 args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name)));
3953 if (!result.IsEmpty()) {
3954 DCHECK(result->IsInt32());
3955 return maybe(static_cast<PropertyAttributes>(result->Int32Value()));
3957 } else if (!interceptor->getter()->IsUndefined()) {
3958 v8::NamedPropertyGetterCallback getter =
3959 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
3961 ApiNamedPropertyAccess("interceptor-named-get-has", *holder, *name));
3962 v8::Handle<v8::Value> result =
3963 args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name)));
3964 if (!result.IsEmpty()) return maybe(DONT_ENUM);
3967 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>());
3968 return maybe(ABSENT);
3972 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
3973 Handle<JSReceiver> object, Handle<Name> name) {
3974 // Check whether the name is an array index.
3976 if (object->IsJSObject() && name->AsArrayIndex(&index)) {
3977 return GetOwnElementAttribute(object, index);
3979 LookupIterator it(object, name, LookupIterator::HIDDEN);
3980 return GetPropertyAttributes(&it);
3984 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
3985 LookupIterator* it) {
3986 for (; it->IsFound(); it->Next()) {
3987 switch (it->state()) {
3988 case LookupIterator::NOT_FOUND:
3989 case LookupIterator::TRANSITION:
3991 case LookupIterator::JSPROXY:
3992 return JSProxy::GetPropertyAttributesWithHandler(
3993 it->GetHolder<JSProxy>(), it->GetReceiver(), it->name());
3994 case LookupIterator::INTERCEPTOR: {
3995 Maybe<PropertyAttributes> result =
3996 JSObject::GetPropertyAttributesWithInterceptor(
3997 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
3998 if (!result.has_value) return result;
3999 if (result.value != ABSENT) return result;
4002 case LookupIterator::ACCESS_CHECK:
4003 if (it->HasAccess(v8::ACCESS_HAS)) break;
4004 return JSObject::GetPropertyAttributesWithFailedAccessCheck(it);
4005 case LookupIterator::ACCESSOR:
4006 case LookupIterator::DATA:
4007 return maybe(it->property_details().attributes());
4010 return maybe(ABSENT);
4014 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithReceiver(
4015 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4016 bool check_prototype) {
4017 Isolate* isolate = object->GetIsolate();
4019 // Check access rights if needed.
4020 if (object->IsAccessCheckNeeded()) {
4021 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
4022 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
4023 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>());
4024 return maybe(ABSENT);
4028 if (object->IsJSGlobalProxy()) {
4029 PrototypeIterator iter(isolate, object);
4030 if (iter.IsAtEnd()) return maybe(ABSENT);
4031 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4032 return JSObject::GetElementAttributeWithReceiver(
4033 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4034 index, check_prototype);
4037 // Check for lookup interceptor except when bootstrapping.
4038 if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
4039 return JSObject::GetElementAttributeWithInterceptor(
4040 object, receiver, index, check_prototype);
4043 return GetElementAttributeWithoutInterceptor(
4044 object, receiver, index, check_prototype);
4048 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithInterceptor(
4049 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4050 bool check_prototype) {
4051 Isolate* isolate = object->GetIsolate();
4052 HandleScope scope(isolate);
4054 // Make sure that the top context does not change when doing
4055 // callbacks or interceptor calls.
4056 AssertNoContextChange ncc(isolate);
4058 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4059 PropertyCallbackArguments args(
4060 isolate, interceptor->data(), *receiver, *object);
4061 if (!interceptor->query()->IsUndefined()) {
4062 v8::IndexedPropertyQueryCallback query =
4063 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query());
4065 ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index));
4066 v8::Handle<v8::Integer> result = args.Call(query, index);
4067 if (!result.IsEmpty())
4068 return maybe(static_cast<PropertyAttributes>(result->Int32Value()));
4069 } else if (!interceptor->getter()->IsUndefined()) {
4070 v8::IndexedPropertyGetterCallback getter =
4071 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
4073 ApiIndexedPropertyAccess(
4074 "interceptor-indexed-get-has", *object, index));
4075 v8::Handle<v8::Value> result = args.Call(getter, index);
4076 if (!result.IsEmpty()) return maybe(NONE);
4079 return GetElementAttributeWithoutInterceptor(
4080 object, receiver, index, check_prototype);
4084 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithoutInterceptor(
4085 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4086 bool check_prototype) {
4087 PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes(
4088 receiver, object, index);
4089 if (attr != ABSENT) return maybe(attr);
4091 // Handle [] on String objects.
4092 if (object->IsStringObjectWithCharacterAt(index)) {
4093 return maybe(static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE));
4096 if (!check_prototype) return maybe(ABSENT);
4098 PrototypeIterator iter(object->GetIsolate(), object);
4099 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
4100 // We need to follow the spec and simulate a call to [[GetOwnProperty]].
4101 return JSProxy::GetElementAttributeWithHandler(
4102 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4105 if (iter.IsAtEnd()) return maybe(ABSENT);
4106 return GetElementAttributeWithReceiver(
4107 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4112 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
4113 Handle<FixedArray> array(
4114 isolate->factory()->NewFixedArray(kEntries, TENURED));
4115 return Handle<NormalizedMapCache>::cast(array);
4119 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
4120 PropertyNormalizationMode mode) {
4121 DisallowHeapAllocation no_gc;
4122 Object* value = FixedArray::get(GetIndex(fast_map));
4123 if (!value->IsMap() ||
4124 !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) {
4125 return MaybeHandle<Map>();
4127 return handle(Map::cast(value));
4131 void NormalizedMapCache::Set(Handle<Map> fast_map,
4132 Handle<Map> normalized_map) {
4133 DisallowHeapAllocation no_gc;
4134 DCHECK(normalized_map->is_dictionary_map());
4135 FixedArray::set(GetIndex(fast_map), *normalized_map);
4139 void NormalizedMapCache::Clear() {
4140 int entries = length();
4141 for (int i = 0; i != entries; i++) {
4147 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object,
4149 Handle<Code> code) {
4150 Handle<Map> map(object->map());
4151 Map::UpdateCodeCache(map, name, code);
4155 void JSObject::NormalizeProperties(Handle<JSObject> object,
4156 PropertyNormalizationMode mode,
4157 int expected_additional_properties) {
4158 if (!object->HasFastProperties()) return;
4160 Handle<Map> map(object->map());
4161 Handle<Map> new_map = Map::Normalize(map, mode);
4163 MigrateFastToSlow(object, new_map, expected_additional_properties);
4167 void JSObject::MigrateFastToSlow(Handle<JSObject> object,
4168 Handle<Map> new_map,
4169 int expected_additional_properties) {
4170 // The global object is always normalized.
4171 DCHECK(!object->IsGlobalObject());
4172 // JSGlobalProxy must never be normalized
4173 DCHECK(!object->IsJSGlobalProxy());
4175 Isolate* isolate = object->GetIsolate();
4176 HandleScope scope(isolate);
4177 Handle<Map> map(object->map());
4179 // Allocate new content.
4180 int real_size = map->NumberOfOwnDescriptors();
4181 int property_count = real_size;
4182 if (expected_additional_properties > 0) {
4183 property_count += expected_additional_properties;
4185 property_count += 2; // Make space for two more properties.
4187 Handle<NameDictionary> dictionary =
4188 NameDictionary::New(isolate, property_count);
4190 Handle<DescriptorArray> descs(map->instance_descriptors());
4191 for (int i = 0; i < real_size; i++) {
4192 PropertyDetails details = descs->GetDetails(i);
4193 switch (details.type()) {
4195 Handle<Name> key(descs->GetKey(i));
4196 Handle<Object> value(descs->GetConstant(i), isolate);
4197 PropertyDetails d = PropertyDetails(
4198 details.attributes(), NORMAL, i + 1);
4199 dictionary = NameDictionary::Add(dictionary, key, value, d);
4203 Handle<Name> key(descs->GetKey(i));
4204 FieldIndex index = FieldIndex::ForDescriptor(*map, i);
4205 Handle<Object> value(
4206 object->RawFastPropertyAt(index), isolate);
4207 if (details.representation().IsDouble()) {
4208 DCHECK(value->IsMutableHeapNumber());
4209 Handle<HeapNumber> old = Handle<HeapNumber>::cast(value);
4210 value = isolate->factory()->NewHeapNumber(old->value());
4213 PropertyDetails(details.attributes(), NORMAL, i + 1);
4214 dictionary = NameDictionary::Add(dictionary, key, value, d);
4218 Handle<Name> key(descs->GetKey(i));
4219 Handle<Object> value(descs->GetCallbacksObject(i), isolate);
4220 PropertyDetails d = PropertyDetails(
4221 details.attributes(), CALLBACKS, i + 1);
4222 dictionary = NameDictionary::Add(dictionary, key, value, d);
4231 // Copy the next enumeration index from instance descriptor.
4232 dictionary->SetNextEnumerationIndex(real_size + 1);
4234 // From here on we cannot fail and we shouldn't GC anymore.
4235 DisallowHeapAllocation no_allocation;
4237 // Resize the object in the heap if necessary.
4238 int new_instance_size = new_map->instance_size();
4239 int instance_size_delta = map->instance_size() - new_instance_size;
4240 DCHECK(instance_size_delta >= 0);
4242 if (instance_size_delta > 0) {
4243 Heap* heap = isolate->heap();
4244 heap->CreateFillerObjectAt(object->address() + new_instance_size,
4245 instance_size_delta);
4246 heap->AdjustLiveBytes(object->address(), -instance_size_delta,
4247 Heap::FROM_MUTATOR);
4250 // We are storing the new map using release store after creating a filler for
4251 // the left-over space to avoid races with the sweeper thread.
4252 object->synchronized_set_map(*new_map);
4254 object->set_properties(*dictionary);
4256 isolate->counters()->props_to_dictionary()->Increment();
4259 if (FLAG_trace_normalization) {
4260 OFStream os(stdout);
4261 os << "Object properties have been normalized:\n";
4268 void JSObject::MigrateSlowToFast(Handle<JSObject> object,
4269 int unused_property_fields) {
4270 if (object->HasFastProperties()) return;
4271 DCHECK(!object->IsGlobalObject());
4272 Isolate* isolate = object->GetIsolate();
4273 Factory* factory = isolate->factory();
4274 Handle<NameDictionary> dictionary(object->property_dictionary());
4276 // Make sure we preserve dictionary representation if there are too many
4278 int number_of_elements = dictionary->NumberOfElements();
4279 if (number_of_elements > kMaxNumberOfDescriptors) return;
4281 if (number_of_elements != dictionary->NextEnumerationIndex()) {
4282 NameDictionary::DoGenerateNewEnumerationIndices(dictionary);
4285 int instance_descriptor_length = 0;
4286 int number_of_fields = 0;
4288 // Compute the length of the instance descriptor.
4289 int capacity = dictionary->Capacity();
4290 for (int i = 0; i < capacity; i++) {
4291 Object* k = dictionary->KeyAt(i);
4292 if (dictionary->IsKey(k)) {
4293 Object* value = dictionary->ValueAt(i);
4294 PropertyType type = dictionary->DetailsAt(i).type();
4295 DCHECK(type != FIELD);
4296 instance_descriptor_length++;
4297 if (type == NORMAL && !value->IsJSFunction()) {
4298 number_of_fields += 1;
4303 int inobject_props = object->map()->inobject_properties();
4305 // Allocate new map.
4306 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
4307 new_map->set_dictionary_map(false);
4309 if (instance_descriptor_length == 0) {
4310 DisallowHeapAllocation no_gc;
4311 DCHECK_LE(unused_property_fields, inobject_props);
4312 // Transform the object.
4313 new_map->set_unused_property_fields(inobject_props);
4314 object->synchronized_set_map(*new_map);
4315 object->set_properties(isolate->heap()->empty_fixed_array());
4316 // Check that it really works.
4317 DCHECK(object->HasFastProperties());
4321 // Allocate the instance descriptor.
4322 Handle<DescriptorArray> descriptors = DescriptorArray::Allocate(
4323 isolate, instance_descriptor_length);
4325 int number_of_allocated_fields =
4326 number_of_fields + unused_property_fields - inobject_props;
4327 if (number_of_allocated_fields < 0) {
4328 // There is enough inobject space for all fields (including unused).
4329 number_of_allocated_fields = 0;
4330 unused_property_fields = inobject_props - number_of_fields;
4333 // Allocate the fixed array for the fields.
4334 Handle<FixedArray> fields = factory->NewFixedArray(
4335 number_of_allocated_fields);
4337 // Fill in the instance descriptor and the fields.
4338 int current_offset = 0;
4339 for (int i = 0; i < capacity; i++) {
4340 Object* k = dictionary->KeyAt(i);
4341 if (dictionary->IsKey(k)) {
4342 Object* value = dictionary->ValueAt(i);
4344 if (k->IsSymbol()) {
4345 key = handle(Symbol::cast(k));
4347 // Ensure the key is a unique name before writing into the
4348 // instance descriptor.
4349 key = factory->InternalizeString(handle(String::cast(k)));
4352 PropertyDetails details = dictionary->DetailsAt(i);
4353 int enumeration_index = details.dictionary_index();
4354 PropertyType type = details.type();
4356 if (value->IsJSFunction()) {
4357 ConstantDescriptor d(key,
4358 handle(value, isolate),
4359 details.attributes());
4360 descriptors->Set(enumeration_index - 1, &d);
4361 } else if (type == NORMAL) {
4362 if (current_offset < inobject_props) {
4363 object->InObjectPropertyAtPut(current_offset,
4365 UPDATE_WRITE_BARRIER);
4367 int offset = current_offset - inobject_props;
4368 fields->set(offset, value);
4370 FieldDescriptor d(key,
4372 details.attributes(),
4373 // TODO(verwaest): value->OptimalRepresentation();
4374 Representation::Tagged());
4375 descriptors->Set(enumeration_index - 1, &d);
4376 } else if (type == CALLBACKS) {
4377 CallbacksDescriptor d(key,
4378 handle(value, isolate),
4379 details.attributes());
4380 descriptors->Set(enumeration_index - 1, &d);
4386 DCHECK(current_offset == number_of_fields);
4388 descriptors->Sort();
4390 DisallowHeapAllocation no_gc;
4391 new_map->InitializeDescriptors(*descriptors);
4392 new_map->set_unused_property_fields(unused_property_fields);
4394 // Transform the object.
4395 object->synchronized_set_map(*new_map);
4397 object->set_properties(*fields);
4398 DCHECK(object->IsJSObject());
4400 // Check that it really works.
4401 DCHECK(object->HasFastProperties());
4405 void JSObject::ResetElements(Handle<JSObject> object) {
4406 Isolate* isolate = object->GetIsolate();
4407 CHECK(object->map() != isolate->heap()->sloppy_arguments_elements_map());
4408 if (object->map()->has_dictionary_elements()) {
4409 Handle<SeededNumberDictionary> new_elements =
4410 SeededNumberDictionary::New(isolate, 0);
4411 object->set_elements(*new_elements);
4413 object->set_elements(object->map()->GetInitialElements());
4418 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary(
4419 Handle<FixedArrayBase> array,
4421 Handle<SeededNumberDictionary> dictionary) {
4422 Isolate* isolate = array->GetIsolate();
4423 Factory* factory = isolate->factory();
4424 bool has_double_elements = array->IsFixedDoubleArray();
4425 for (int i = 0; i < length; i++) {
4426 Handle<Object> value;
4427 if (has_double_elements) {
4428 Handle<FixedDoubleArray> double_array =
4429 Handle<FixedDoubleArray>::cast(array);
4430 if (double_array->is_the_hole(i)) {
4431 value = factory->the_hole_value();
4433 value = factory->NewHeapNumber(double_array->get_scalar(i));
4436 value = handle(Handle<FixedArray>::cast(array)->get(i), isolate);
4438 if (!value->IsTheHole()) {
4439 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
4441 SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details);
4448 Handle<SeededNumberDictionary> JSObject::NormalizeElements(
4449 Handle<JSObject> object) {
4450 DCHECK(!object->HasExternalArrayElements() &&
4451 !object->HasFixedTypedArrayElements());
4452 Isolate* isolate = object->GetIsolate();
4454 // Find the backing store.
4455 Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements()));
4457 (array->map() == isolate->heap()->sloppy_arguments_elements_map());
4459 array = handle(FixedArrayBase::cast(
4460 Handle<FixedArray>::cast(array)->get(1)));
4462 if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array);
4464 DCHECK(object->HasFastSmiOrObjectElements() ||
4465 object->HasFastDoubleElements() ||
4466 object->HasFastArgumentsElements());
4467 // Compute the effective length and allocate a new backing store.
4468 int length = object->IsJSArray()
4469 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
4471 int old_capacity = 0;
4472 int used_elements = 0;
4473 object->GetElementsCapacityAndUsage(&old_capacity, &used_elements);
4474 Handle<SeededNumberDictionary> dictionary =
4475 SeededNumberDictionary::New(isolate, used_elements);
4477 dictionary = CopyFastElementsToDictionary(array, length, dictionary);
4479 // Switch to using the dictionary as the backing storage for elements.
4481 FixedArray::cast(object->elements())->set(1, *dictionary);
4483 // Set the new map first to satify the elements type assert in
4485 Handle<Map> new_map =
4486 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
4488 JSObject::MigrateToMap(object, new_map);
4489 object->set_elements(*dictionary);
4492 isolate->counters()->elements_to_dictionary()->Increment();
4495 if (FLAG_trace_normalization) {
4496 OFStream os(stdout);
4497 os << "Object elements have been normalized:\n";
4502 DCHECK(object->HasDictionaryElements() ||
4503 object->HasDictionaryArgumentsElements());
4508 static Smi* GenerateIdentityHash(Isolate* isolate) {
4512 // Generate a random 32-bit hash value but limit range to fit
4514 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue;
4516 } while (hash_value == 0 && attempts < 30);
4517 hash_value = hash_value != 0 ? hash_value : 1; // never return 0
4519 return Smi::FromInt(hash_value);
4523 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) {
4524 DCHECK(!object->IsJSGlobalProxy());
4525 Isolate* isolate = object->GetIsolate();
4526 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4530 template<typename ProxyType>
4531 static Handle<Smi> GetOrCreateIdentityHashHelper(Handle<ProxyType> proxy) {
4532 Isolate* isolate = proxy->GetIsolate();
4534 Handle<Object> maybe_hash(proxy->hash(), isolate);
4535 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash);
4537 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate);
4538 proxy->set_hash(*hash);
4543 Object* JSObject::GetIdentityHash() {
4544 DisallowHeapAllocation no_gc;
4545 Isolate* isolate = GetIsolate();
4546 if (IsJSGlobalProxy()) {
4547 return JSGlobalProxy::cast(this)->hash();
4549 Object* stored_value =
4550 GetHiddenProperty(isolate->factory()->identity_hash_string());
4551 return stored_value->IsSmi()
4553 : isolate->heap()->undefined_value();
4557 Handle<Smi> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) {
4558 if (object->IsJSGlobalProxy()) {
4559 return GetOrCreateIdentityHashHelper(Handle<JSGlobalProxy>::cast(object));
4562 Isolate* isolate = object->GetIsolate();
4564 Handle<Object> maybe_hash(object->GetIdentityHash(), isolate);
4565 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash);
4567 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate);
4568 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4573 Object* JSProxy::GetIdentityHash() {
4574 return this->hash();
4578 Handle<Smi> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) {
4579 return GetOrCreateIdentityHashHelper(proxy);
4583 Object* JSObject::GetHiddenProperty(Handle<Name> key) {
4584 DisallowHeapAllocation no_gc;
4585 DCHECK(key->IsUniqueName());
4586 if (IsJSGlobalProxy()) {
4587 // JSGlobalProxies store their hash internally.
4588 DCHECK(*key != GetHeap()->identity_hash_string());
4589 // For a proxy, use the prototype as target object.
4590 PrototypeIterator iter(GetIsolate(), this);
4591 // If the proxy is detached, return undefined.
4592 if (iter.IsAtEnd()) return GetHeap()->the_hole_value();
4593 DCHECK(iter.GetCurrent()->IsJSGlobalObject());
4594 return JSObject::cast(iter.GetCurrent())->GetHiddenProperty(key);
4596 DCHECK(!IsJSGlobalProxy());
4597 Object* inline_value = GetHiddenPropertiesHashTable();
4599 if (inline_value->IsSmi()) {
4600 // Handle inline-stored identity hash.
4601 if (*key == GetHeap()->identity_hash_string()) {
4602 return inline_value;
4604 return GetHeap()->the_hole_value();
4608 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value();
4610 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
4611 Object* entry = hashtable->Lookup(key);
4616 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object,
4618 Handle<Object> value) {
4619 Isolate* isolate = object->GetIsolate();
4621 DCHECK(key->IsUniqueName());
4622 if (object->IsJSGlobalProxy()) {
4623 // JSGlobalProxies store their hash internally.
4624 DCHECK(*key != *isolate->factory()->identity_hash_string());
4625 // For a proxy, use the prototype as target object.
4626 PrototypeIterator iter(isolate, object);
4627 // If the proxy is detached, return undefined.
4628 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
4629 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4630 return SetHiddenProperty(
4631 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key,
4634 DCHECK(!object->IsJSGlobalProxy());
4636 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4638 // If there is no backing store yet, store the identity hash inline.
4639 if (value->IsSmi() &&
4640 *key == *isolate->factory()->identity_hash_string() &&
4641 (inline_value->IsUndefined() || inline_value->IsSmi())) {
4642 return JSObject::SetHiddenPropertiesHashTable(object, value);
4645 Handle<ObjectHashTable> hashtable =
4646 GetOrCreateHiddenPropertiesHashtable(object);
4648 // If it was found, check if the key is already in the dictionary.
4649 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key,
4651 if (*new_table != *hashtable) {
4652 // If adding the key expanded the dictionary (i.e., Add returned a new
4653 // dictionary), store it back to the object.
4654 SetHiddenPropertiesHashTable(object, new_table);
4657 // Return this to mark success.
4662 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) {
4663 Isolate* isolate = object->GetIsolate();
4664 DCHECK(key->IsUniqueName());
4666 if (object->IsJSGlobalProxy()) {
4667 PrototypeIterator iter(isolate, object);
4668 if (iter.IsAtEnd()) return;
4669 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4670 return DeleteHiddenProperty(
4671 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key);
4674 Object* inline_value = object->GetHiddenPropertiesHashTable();
4676 // We never delete (inline-stored) identity hashes.
4677 DCHECK(*key != *isolate->factory()->identity_hash_string());
4678 if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
4680 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value));
4681 bool was_present = false;
4682 ObjectHashTable::Remove(hashtable, key, &was_present);
4686 bool JSObject::HasHiddenProperties(Handle<JSObject> object) {
4687 Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string();
4688 LookupIterator it(object, hidden, LookupIterator::OWN_SKIP_INTERCEPTOR);
4689 Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it);
4690 // Cannot get an exception since the hidden_string isn't accessible to JS.
4691 DCHECK(maybe.has_value);
4692 return maybe.value != ABSENT;
4696 Object* JSObject::GetHiddenPropertiesHashTable() {
4697 DCHECK(!IsJSGlobalProxy());
4698 if (HasFastProperties()) {
4699 // If the object has fast properties, check whether the first slot
4700 // in the descriptor array matches the hidden string. Since the
4701 // hidden strings hash code is zero (and no other name has hash
4702 // code zero) it will always occupy the first entry if present.
4703 DescriptorArray* descriptors = this->map()->instance_descriptors();
4704 if (descriptors->number_of_descriptors() > 0) {
4705 int sorted_index = descriptors->GetSortedKeyIndex(0);
4706 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
4707 sorted_index < map()->NumberOfOwnDescriptors()) {
4708 DCHECK(descriptors->GetType(sorted_index) == FIELD);
4709 DCHECK(descriptors->GetDetails(sorted_index).representation().
4710 IsCompatibleForLoad(Representation::Tagged()));
4711 FieldIndex index = FieldIndex::ForDescriptor(this->map(),
4713 return this->RawFastPropertyAt(index);
4715 return GetHeap()->undefined_value();
4718 return GetHeap()->undefined_value();
4721 Isolate* isolate = GetIsolate();
4722 LookupIterator it(handle(this), isolate->factory()->hidden_string(),
4723 LookupIterator::OWN_SKIP_INTERCEPTOR);
4724 // Access check is always skipped for the hidden string anyways.
4725 return *GetDataProperty(&it);
4729 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable(
4730 Handle<JSObject> object) {
4731 Isolate* isolate = object->GetIsolate();
4733 static const int kInitialCapacity = 4;
4734 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4735 if (inline_value->IsHashTable()) {
4736 return Handle<ObjectHashTable>::cast(inline_value);
4739 Handle<ObjectHashTable> hashtable = ObjectHashTable::New(
4740 isolate, kInitialCapacity, USE_CUSTOM_MINIMUM_CAPACITY);
4742 if (inline_value->IsSmi()) {
4743 // We were storing the identity hash inline and now allocated an actual
4744 // dictionary. Put the identity hash into the new dictionary.
4745 hashtable = ObjectHashTable::Put(hashtable,
4746 isolate->factory()->identity_hash_string(),
4750 SetHiddenPropertiesHashTable(object, hashtable);
4755 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object,
4756 Handle<Object> value) {
4757 DCHECK(!object->IsJSGlobalProxy());
4758 Isolate* isolate = object->GetIsolate();
4759 Handle<Name> name = isolate->factory()->hidden_string();
4760 SetOwnPropertyIgnoreAttributes(object, name, value, DONT_ENUM).Assert();
4765 MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor(
4766 Handle<JSObject> holder, Handle<JSObject> receiver, Handle<Name> name) {
4767 Isolate* isolate = holder->GetIsolate();
4769 // TODO(rossberg): Support symbols in the API.
4770 if (name->IsSymbol()) return MaybeHandle<Object>();
4772 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
4773 if (interceptor->deleter()->IsUndefined()) return MaybeHandle<Object>();
4775 v8::NamedPropertyDeleterCallback deleter =
4776 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter());
4778 ApiNamedPropertyAccess("interceptor-named-delete", *holder, *name));
4779 PropertyCallbackArguments args(isolate, interceptor->data(), *receiver,
4781 v8::Handle<v8::Boolean> result =
4782 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4783 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4784 if (result.IsEmpty()) return MaybeHandle<Object>();
4786 DCHECK(result->IsBoolean());
4787 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
4788 result_internal->VerifyApiCallResultType();
4789 // Rebox CustomArguments::kReturnValueOffset before returning.
4790 return handle(*result_internal, isolate);
4794 MaybeHandle<Object> JSObject::DeleteElementWithInterceptor(
4795 Handle<JSObject> object,
4797 Isolate* isolate = object->GetIsolate();
4798 Factory* factory = isolate->factory();
4800 // Make sure that the top context does not change when doing
4801 // callbacks or interceptor calls.
4802 AssertNoContextChange ncc(isolate);
4804 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4805 if (interceptor->deleter()->IsUndefined()) return factory->false_value();
4806 v8::IndexedPropertyDeleterCallback deleter =
4807 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter());
4809 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index));
4810 PropertyCallbackArguments args(
4811 isolate, interceptor->data(), *object, *object);
4812 v8::Handle<v8::Boolean> result = args.Call(deleter, index);
4813 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4814 if (!result.IsEmpty()) {
4815 DCHECK(result->IsBoolean());
4816 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
4817 result_internal->VerifyApiCallResultType();
4818 // Rebox CustomArguments::kReturnValueOffset before returning.
4819 return handle(*result_internal, isolate);
4821 MaybeHandle<Object> delete_result = object->GetElementsAccessor()->Delete(
4822 object, index, NORMAL_DELETION);
4823 return delete_result;
4827 MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object,
4830 Isolate* isolate = object->GetIsolate();
4831 Factory* factory = isolate->factory();
4833 // Check access rights if needed.
4834 if (object->IsAccessCheckNeeded() &&
4835 !isolate->MayIndexedAccess(object, index, v8::ACCESS_DELETE)) {
4836 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE);
4837 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4838 return factory->false_value();
4841 if (object->IsStringObjectWithCharacterAt(index)) {
4842 if (mode == STRICT_DELETION) {
4843 // Deleting a non-configurable property in strict mode.
4844 Handle<Object> name = factory->NewNumberFromUint(index);
4845 Handle<Object> args[2] = { name, object };
4846 THROW_NEW_ERROR(isolate, NewTypeError("strict_delete_property",
4847 HandleVector(args, 2)),
4850 return factory->false_value();
4853 if (object->IsJSGlobalProxy()) {
4854 PrototypeIterator iter(isolate, object);
4855 if (iter.IsAtEnd()) return factory->false_value();
4856 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4857 return DeleteElement(
4858 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index,
4862 Handle<Object> old_value;
4863 bool should_enqueue_change_record = false;
4864 if (object->map()->is_observed()) {
4865 Maybe<bool> maybe = HasOwnElement(object, index);
4866 if (!maybe.has_value) return MaybeHandle<Object>();
4867 should_enqueue_change_record = maybe.value;
4868 if (should_enqueue_change_record) {
4869 if (!GetOwnElementAccessorPair(object, index).is_null()) {
4870 old_value = Handle<Object>::cast(factory->the_hole_value());
4872 old_value = Object::GetElement(
4873 isolate, object, index).ToHandleChecked();
4878 // Skip interceptor if forcing deletion.
4879 MaybeHandle<Object> maybe_result;
4880 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) {
4881 maybe_result = DeleteElementWithInterceptor(object, index);
4883 maybe_result = object->GetElementsAccessor()->Delete(object, index, mode);
4885 Handle<Object> result;
4886 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object);
4888 if (should_enqueue_change_record) {
4889 Maybe<bool> maybe = HasOwnElement(object, index);
4890 if (!maybe.has_value) return MaybeHandle<Object>();
4892 Handle<String> name = factory->Uint32ToString(index);
4893 EnqueueChangeRecord(object, "delete", name, old_value);
4901 MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object,
4903 DeleteMode delete_mode) {
4904 // ECMA-262, 3rd, 8.6.2.5
4905 DCHECK(name->IsName());
4908 if (name->AsArrayIndex(&index)) {
4909 return DeleteElement(object, index, delete_mode);
4912 // Skip interceptors on FORCE_DELETION.
4913 LookupIterator::Configuration config =
4914 delete_mode == FORCE_DELETION ? LookupIterator::HIDDEN_SKIP_INTERCEPTOR
4915 : LookupIterator::HIDDEN;
4917 LookupIterator it(object, name, config);
4919 bool is_observed = object->map()->is_observed() &&
4920 *name != it.isolate()->heap()->hidden_string();
4921 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
4923 for (; it.IsFound(); it.Next()) {
4924 switch (it.state()) {
4925 case LookupIterator::JSPROXY:
4926 case LookupIterator::NOT_FOUND:
4927 case LookupIterator::TRANSITION:
4929 case LookupIterator::ACCESS_CHECK:
4930 if (it.HasAccess(v8::ACCESS_DELETE)) break;
4931 it.isolate()->ReportFailedAccessCheck(it.GetHolder<JSObject>(),
4933 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it.isolate(), Object);
4934 return it.isolate()->factory()->false_value();
4935 case LookupIterator::INTERCEPTOR: {
4936 MaybeHandle<Object> maybe_result =
4937 JSObject::DeletePropertyWithInterceptor(it.GetHolder<JSObject>(),
4939 // Delete with interceptor succeeded. Return result.
4940 if (!maybe_result.is_null()) return maybe_result;
4941 // An exception was thrown in the interceptor. Propagate.
4942 if (it.isolate()->has_pending_exception()) return maybe_result;
4945 case LookupIterator::DATA:
4947 old_value = it.GetDataValue();
4950 case LookupIterator::ACCESSOR: {
4951 if (delete_mode != FORCE_DELETION && !it.IsConfigurable()) {
4952 // Fail if the property is not configurable.
4953 if (delete_mode == STRICT_DELETION) {
4954 Handle<Object> args[2] = {name, object};
4955 THROW_NEW_ERROR(it.isolate(),
4956 NewTypeError("strict_delete_property",
4957 HandleVector(args, arraysize(args))),
4960 return it.isolate()->factory()->false_value();
4963 PropertyNormalizationMode mode = object->map()->is_prototype_map()
4964 ? KEEP_INOBJECT_PROPERTIES
4965 : CLEAR_INOBJECT_PROPERTIES;
4966 Handle<JSObject> holder = it.GetHolder<JSObject>();
4967 // TODO(verwaest): Remove this temporary compatibility hack when blink
4968 // tests are updated.
4969 if (!holder.is_identical_to(object) &&
4970 !(object->IsJSGlobalProxy() && holder->IsJSGlobalObject())) {
4971 return it.isolate()->factory()->true_value();
4973 NormalizeProperties(holder, mode, 0);
4974 Handle<Object> result =
4975 DeleteNormalizedProperty(holder, name, delete_mode);
4976 ReoptimizeIfPrototype(holder);
4979 EnqueueChangeRecord(object, "delete", name, old_value);
4987 return it.isolate()->factory()->true_value();
4991 MaybeHandle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object,
4994 if (object->IsJSProxy()) {
4995 return JSProxy::DeleteElementWithHandler(
4996 Handle<JSProxy>::cast(object), index, mode);
4998 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode);
5002 MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object,
5005 if (object->IsJSProxy()) {
5006 return JSProxy::DeletePropertyWithHandler(
5007 Handle<JSProxy>::cast(object), name, mode);
5009 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode);
5013 bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
5016 DCHECK(IsFastObjectElementsKind(kind) ||
5017 kind == DICTIONARY_ELEMENTS);
5018 if (IsFastObjectElementsKind(kind)) {
5019 int length = IsJSArray()
5020 ? Smi::cast(JSArray::cast(this)->length())->value()
5021 : elements->length();
5022 for (int i = 0; i < length; ++i) {
5023 Object* element = elements->get(i);
5024 if (!element->IsTheHole() && element == object) return true;
5028 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object);
5029 if (!key->IsUndefined()) return true;
5035 // Check whether this object references another object.
5036 bool JSObject::ReferencesObject(Object* obj) {
5037 Map* map_of_this = map();
5038 Heap* heap = GetHeap();
5039 DisallowHeapAllocation no_allocation;
5041 // Is the object the constructor for this object?
5042 if (map_of_this->constructor() == obj) {
5046 // Is the object the prototype for this object?
5047 if (map_of_this->prototype() == obj) {
5051 // Check if the object is among the named properties.
5052 Object* key = SlowReverseLookup(obj);
5053 if (!key->IsUndefined()) {
5057 // Check if the object is among the indexed properties.
5058 ElementsKind kind = GetElementsKind();
5060 // Raw pixels and external arrays do not reference other
5062 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5063 case EXTERNAL_##TYPE##_ELEMENTS: \
5064 case TYPE##_ELEMENTS: \
5067 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5068 #undef TYPED_ARRAY_CASE
5070 case FAST_DOUBLE_ELEMENTS:
5071 case FAST_HOLEY_DOUBLE_ELEMENTS:
5073 case FAST_SMI_ELEMENTS:
5074 case FAST_HOLEY_SMI_ELEMENTS:
5077 case FAST_HOLEY_ELEMENTS:
5078 case DICTIONARY_ELEMENTS: {
5079 FixedArray* elements = FixedArray::cast(this->elements());
5080 if (ReferencesObjectFromElements(elements, kind, obj)) return true;
5083 case SLOPPY_ARGUMENTS_ELEMENTS: {
5084 FixedArray* parameter_map = FixedArray::cast(elements());
5085 // Check the mapped parameters.
5086 int length = parameter_map->length();
5087 for (int i = 2; i < length; ++i) {
5088 Object* value = parameter_map->get(i);
5089 if (!value->IsTheHole() && value == obj) return true;
5091 // Check the arguments.
5092 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5093 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
5094 FAST_HOLEY_ELEMENTS;
5095 if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
5100 // For functions check the context.
5101 if (IsJSFunction()) {
5102 // Get the constructor function for arguments array.
5103 Map* arguments_map =
5104 heap->isolate()->context()->native_context()->sloppy_arguments_map();
5105 JSFunction* arguments_function =
5106 JSFunction::cast(arguments_map->constructor());
5108 // Get the context and don't check if it is the native context.
5109 JSFunction* f = JSFunction::cast(this);
5110 Context* context = f->context();
5111 if (context->IsNativeContext()) {
5115 // Check the non-special context slots.
5116 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) {
5117 // Only check JS objects.
5118 if (context->get(i)->IsJSObject()) {
5119 JSObject* ctxobj = JSObject::cast(context->get(i));
5120 // If it is an arguments array check the content.
5121 if (ctxobj->map()->constructor() == arguments_function) {
5122 if (ctxobj->ReferencesObject(obj)) {
5125 } else if (ctxobj == obj) {
5131 // Check the context extension (if any) if it can have references.
5132 if (context->has_extension() && !context->IsCatchContext()) {
5133 // With harmony scoping, a JSFunction may have a global context.
5134 // TODO(mvstanton): walk into the ScopeInfo.
5135 if (FLAG_harmony_scoping && context->IsGlobalContext()) {
5139 return JSObject::cast(context->extension())->ReferencesObject(obj);
5143 // No references to object.
5148 MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
5149 Isolate* isolate = object->GetIsolate();
5151 if (!object->map()->is_extensible()) return object;
5153 if (object->IsAccessCheckNeeded() &&
5154 !isolate->MayNamedAccess(
5155 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5156 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5157 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5158 return isolate->factory()->false_value();
5161 if (object->IsJSGlobalProxy()) {
5162 PrototypeIterator iter(isolate, object);
5163 if (iter.IsAtEnd()) return object;
5164 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
5165 return PreventExtensions(
5166 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)));
5169 // It's not possible to seal objects with external array elements
5170 if (object->HasExternalArrayElements() ||
5171 object->HasFixedTypedArrayElements()) {
5172 THROW_NEW_ERROR(isolate,
5173 NewTypeError("cant_prevent_ext_external_array_elements",
5174 HandleVector(&object, 1)),
5178 // If there are fast elements we normalize.
5179 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
5180 DCHECK(object->HasDictionaryElements() ||
5181 object->HasDictionaryArgumentsElements());
5183 // Make sure that we never go back to fast case.
5184 dictionary->set_requires_slow_elements();
5186 // Do a map transition, other objects with this map may still
5188 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5189 Handle<Map> new_map = Map::Copy(handle(object->map()));
5191 new_map->set_is_extensible(false);
5192 JSObject::MigrateToMap(object, new_map);
5193 DCHECK(!object->map()->is_extensible());
5195 if (object->map()->is_observed()) {
5196 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(),
5197 isolate->factory()->the_hole_value());
5203 template<typename Dictionary>
5204 static void FreezeDictionary(Dictionary* dictionary) {
5205 int capacity = dictionary->Capacity();
5206 for (int i = 0; i < capacity; i++) {
5207 Object* k = dictionary->KeyAt(i);
5208 if (dictionary->IsKey(k) &&
5209 !(k->IsSymbol() && Symbol::cast(k)->is_private())) {
5210 PropertyDetails details = dictionary->DetailsAt(i);
5211 int attrs = DONT_DELETE;
5212 // READ_ONLY is an invalid attribute for JS setters/getters.
5213 if (details.type() == CALLBACKS) {
5214 Object* v = dictionary->ValueAt(i);
5215 if (v->IsPropertyCell()) v = PropertyCell::cast(v)->value();
5216 if (!v->IsAccessorPair()) attrs |= READ_ONLY;
5220 details = details.CopyAddAttributes(
5221 static_cast<PropertyAttributes>(attrs));
5222 dictionary->DetailsAtPut(i, details);
5228 MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) {
5229 // Freezing sloppy arguments should be handled elsewhere.
5230 DCHECK(!object->HasSloppyArgumentsElements());
5231 DCHECK(!object->map()->is_observed());
5233 if (object->map()->is_frozen()) return object;
5235 Isolate* isolate = object->GetIsolate();
5236 if (object->IsAccessCheckNeeded() &&
5237 !isolate->MayNamedAccess(
5238 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5239 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5240 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5241 return isolate->factory()->false_value();
5244 if (object->IsJSGlobalProxy()) {
5245 PrototypeIterator iter(isolate, object);
5246 if (iter.IsAtEnd()) return object;
5247 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
5248 return Freeze(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)));
5251 // It's not possible to freeze objects with external array elements
5252 if (object->HasExternalArrayElements() ||
5253 object->HasFixedTypedArrayElements()) {
5254 THROW_NEW_ERROR(isolate,
5255 NewTypeError("cant_prevent_ext_external_array_elements",
5256 HandleVector(&object, 1)),
5260 Handle<SeededNumberDictionary> new_element_dictionary;
5261 if (!object->elements()->IsDictionary()) {
5262 int length = object->IsJSArray()
5263 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
5264 : object->elements()->length();
5268 object->GetElementsCapacityAndUsage(&capacity, &used);
5269 new_element_dictionary = SeededNumberDictionary::New(isolate, used);
5271 // Move elements to a dictionary; avoid calling NormalizeElements to avoid
5272 // unnecessary transitions.
5273 new_element_dictionary = CopyFastElementsToDictionary(
5274 handle(object->elements()), length, new_element_dictionary);
5276 // No existing elements, use a pre-allocated empty backing store
5277 new_element_dictionary =
5278 isolate->factory()->empty_slow_element_dictionary();
5282 Handle<Map> old_map(object->map(), isolate);
5283 int transition_index = old_map->SearchTransition(
5284 isolate->heap()->frozen_symbol());
5285 if (transition_index != TransitionArray::kNotFound) {
5286 Handle<Map> transition_map(old_map->GetTransition(transition_index));
5287 DCHECK(transition_map->has_dictionary_elements());
5288 DCHECK(transition_map->is_frozen());
5289 DCHECK(!transition_map->is_extensible());
5290 JSObject::MigrateToMap(object, transition_map);
5291 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5292 // Create a new descriptor array with fully-frozen properties
5293 Handle<Map> new_map = Map::CopyForFreeze(old_map);
5294 JSObject::MigrateToMap(object, new_map);
5296 DCHECK(old_map->is_dictionary_map() || !old_map->is_prototype_map());
5297 // Slow path: need to normalize properties for safety
5298 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5300 // Create a new map, since other objects with this map may be extensible.
5301 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5302 Handle<Map> new_map = Map::Copy(handle(object->map()));
5304 new_map->set_is_extensible(false);
5305 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5306 JSObject::MigrateToMap(object, new_map);
5308 // Freeze dictionary-mode properties
5309 FreezeDictionary(object->property_dictionary());
5312 DCHECK(object->map()->has_dictionary_elements());
5313 if (!new_element_dictionary.is_null()) {
5314 object->set_elements(*new_element_dictionary);
5317 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) {
5318 SeededNumberDictionary* dictionary = object->element_dictionary();
5319 // Make sure we never go back to the fast case
5320 dictionary->set_requires_slow_elements();
5321 // Freeze all elements in the dictionary
5322 FreezeDictionary(dictionary);
5329 void JSObject::SetObserved(Handle<JSObject> object) {
5330 DCHECK(!object->IsJSGlobalProxy());
5331 DCHECK(!object->IsJSGlobalObject());
5332 Isolate* isolate = object->GetIsolate();
5333 Handle<Map> new_map;
5334 Handle<Map> old_map(object->map(), isolate);
5335 DCHECK(!old_map->is_observed());
5336 int transition_index = old_map->SearchTransition(
5337 isolate->heap()->observed_symbol());
5338 if (transition_index != TransitionArray::kNotFound) {
5339 new_map = handle(old_map->GetTransition(transition_index), isolate);
5340 DCHECK(new_map->is_observed());
5341 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5342 new_map = Map::CopyForObserved(old_map);
5344 new_map = Map::Copy(old_map);
5345 new_map->set_is_observed();
5347 JSObject::MigrateToMap(object, new_map);
5351 Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object,
5352 Representation representation,
5354 Isolate* isolate = object->GetIsolate();
5355 Handle<Object> raw_value(object->RawFastPropertyAt(index), isolate);
5356 return Object::WrapForRead(isolate, raw_value, representation);
5360 template<class ContextObject>
5361 class JSObjectWalkVisitor {
5363 JSObjectWalkVisitor(ContextObject* site_context, bool copying,
5364 JSObject::DeepCopyHints hints)
5365 : site_context_(site_context),
5369 MUST_USE_RESULT MaybeHandle<JSObject> StructureWalk(Handle<JSObject> object);
5372 MUST_USE_RESULT inline MaybeHandle<JSObject> VisitElementOrProperty(
5373 Handle<JSObject> object,
5374 Handle<JSObject> value) {
5375 Handle<AllocationSite> current_site = site_context()->EnterNewScope();
5376 MaybeHandle<JSObject> copy_of_value = StructureWalk(value);
5377 site_context()->ExitScope(current_site, value);
5378 return copy_of_value;
5381 inline ContextObject* site_context() { return site_context_; }
5382 inline Isolate* isolate() { return site_context()->isolate(); }
5384 inline bool copying() const { return copying_; }
5387 ContextObject* site_context_;
5388 const bool copying_;
5389 const JSObject::DeepCopyHints hints_;
5393 template <class ContextObject>
5394 MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk(
5395 Handle<JSObject> object) {
5396 Isolate* isolate = this->isolate();
5397 bool copying = this->copying();
5398 bool shallow = hints_ == JSObject::kObjectIsShallow;
5401 StackLimitCheck check(isolate);
5403 if (check.HasOverflowed()) {
5404 isolate->StackOverflow();
5405 return MaybeHandle<JSObject>();
5409 if (object->map()->is_deprecated()) {
5410 JSObject::MigrateInstance(object);
5413 Handle<JSObject> copy;
5415 Handle<AllocationSite> site_to_pass;
5416 if (site_context()->ShouldCreateMemento(object)) {
5417 site_to_pass = site_context()->current();
5419 copy = isolate->factory()->CopyJSObjectWithAllocationSite(
5420 object, site_to_pass);
5425 DCHECK(copying || copy.is_identical_to(object));
5427 ElementsKind kind = copy->GetElementsKind();
5428 if (copying && IsFastSmiOrObjectElementsKind(kind) &&
5429 FixedArray::cast(copy->elements())->map() ==
5430 isolate->heap()->fixed_cow_array_map()) {
5431 isolate->counters()->cow_arrays_created_runtime()->Increment();
5435 HandleScope scope(isolate);
5437 // Deep copy own properties.
5438 if (copy->HasFastProperties()) {
5439 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors());
5440 int limit = copy->map()->NumberOfOwnDescriptors();
5441 for (int i = 0; i < limit; i++) {
5442 PropertyDetails details = descriptors->GetDetails(i);
5443 if (details.type() != FIELD) continue;
5444 FieldIndex index = FieldIndex::ForDescriptor(copy->map(), i);
5445 Handle<Object> value(object->RawFastPropertyAt(index), isolate);
5446 if (value->IsJSObject()) {
5447 ASSIGN_RETURN_ON_EXCEPTION(
5449 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5452 Representation representation = details.representation();
5453 value = Object::NewStorageFor(isolate, value, representation);
5456 copy->FastPropertyAtPut(index, *value);
5460 Handle<FixedArray> names =
5461 isolate->factory()->NewFixedArray(copy->NumberOfOwnProperties());
5462 copy->GetOwnPropertyNames(*names, 0);
5463 for (int i = 0; i < names->length(); i++) {
5464 DCHECK(names->get(i)->IsString());
5465 Handle<String> key_string(String::cast(names->get(i)));
5466 Maybe<PropertyAttributes> maybe =
5467 JSReceiver::GetOwnPropertyAttributes(copy, key_string);
5468 DCHECK(maybe.has_value);
5469 PropertyAttributes attributes = maybe.value;
5470 // Only deep copy fields from the object literal expression.
5471 // In particular, don't try to copy the length attribute of
5473 if (attributes != NONE) continue;
5474 Handle<Object> value =
5475 Object::GetProperty(copy, key_string).ToHandleChecked();
5476 if (value->IsJSObject()) {
5477 Handle<JSObject> result;
5478 ASSIGN_RETURN_ON_EXCEPTION(
5480 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5483 // Creating object copy for literals. No strict mode needed.
5484 JSObject::SetProperty(copy, key_string, result, SLOPPY).Assert();
5490 // Deep copy own elements.
5491 // Pixel elements cannot be created using an object literal.
5492 DCHECK(!copy->HasExternalArrayElements());
5494 case FAST_SMI_ELEMENTS:
5496 case FAST_HOLEY_SMI_ELEMENTS:
5497 case FAST_HOLEY_ELEMENTS: {
5498 Handle<FixedArray> elements(FixedArray::cast(copy->elements()));
5499 if (elements->map() == isolate->heap()->fixed_cow_array_map()) {
5501 for (int i = 0; i < elements->length(); i++) {
5502 DCHECK(!elements->get(i)->IsJSObject());
5506 for (int i = 0; i < elements->length(); i++) {
5507 Handle<Object> value(elements->get(i), isolate);
5508 DCHECK(value->IsSmi() ||
5509 value->IsTheHole() ||
5510 (IsFastObjectElementsKind(copy->GetElementsKind())));
5511 if (value->IsJSObject()) {
5512 Handle<JSObject> result;
5513 ASSIGN_RETURN_ON_EXCEPTION(
5515 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5518 elements->set(i, *result);
5525 case DICTIONARY_ELEMENTS: {
5526 Handle<SeededNumberDictionary> element_dictionary(
5527 copy->element_dictionary());
5528 int capacity = element_dictionary->Capacity();
5529 for (int i = 0; i < capacity; i++) {
5530 Object* k = element_dictionary->KeyAt(i);
5531 if (element_dictionary->IsKey(k)) {
5532 Handle<Object> value(element_dictionary->ValueAt(i), isolate);
5533 if (value->IsJSObject()) {
5534 Handle<JSObject> result;
5535 ASSIGN_RETURN_ON_EXCEPTION(
5537 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5540 element_dictionary->ValueAtPut(i, *result);
5547 case SLOPPY_ARGUMENTS_ELEMENTS:
5552 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5553 case EXTERNAL_##TYPE##_ELEMENTS: \
5554 case TYPE##_ELEMENTS: \
5556 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5557 #undef TYPED_ARRAY_CASE
5559 case FAST_DOUBLE_ELEMENTS:
5560 case FAST_HOLEY_DOUBLE_ELEMENTS:
5561 // No contained objects, nothing to do.
5570 MaybeHandle<JSObject> JSObject::DeepWalk(
5571 Handle<JSObject> object,
5572 AllocationSiteCreationContext* site_context) {
5573 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false,
5575 MaybeHandle<JSObject> result = v.StructureWalk(object);
5576 Handle<JSObject> for_assert;
5577 DCHECK(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object));
5582 MaybeHandle<JSObject> JSObject::DeepCopy(
5583 Handle<JSObject> object,
5584 AllocationSiteUsageContext* site_context,
5585 DeepCopyHints hints) {
5586 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints);
5587 MaybeHandle<JSObject> copy = v.StructureWalk(object);
5588 Handle<JSObject> for_assert;
5589 DCHECK(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object));
5594 // Tests for the fast common case for property enumeration:
5595 // - This object and all prototypes has an enum cache (which means that
5596 // it is no proxy, has no interceptors and needs no access checks).
5597 // - This object has no elements.
5598 // - No prototype has enumerable properties/elements.
5599 bool JSReceiver::IsSimpleEnum() {
5600 for (PrototypeIterator iter(GetIsolate(), this,
5601 PrototypeIterator::START_AT_RECEIVER);
5602 !iter.IsAtEnd(); iter.Advance()) {
5603 if (!iter.GetCurrent()->IsJSObject()) return false;
5604 JSObject* curr = JSObject::cast(iter.GetCurrent());
5605 int enum_length = curr->map()->EnumLength();
5606 if (enum_length == kInvalidEnumCacheSentinel) return false;
5607 if (curr->IsAccessCheckNeeded()) return false;
5608 DCHECK(!curr->HasNamedInterceptor());
5609 DCHECK(!curr->HasIndexedInterceptor());
5610 if (curr->NumberOfEnumElements() > 0) return false;
5611 if (curr != this && enum_length != 0) return false;
5617 static bool FilterKey(Object* key, PropertyAttributes filter) {
5618 if ((filter & SYMBOLIC) && key->IsSymbol()) {
5622 if ((filter & PRIVATE_SYMBOL) &&
5623 key->IsSymbol() && Symbol::cast(key)->is_private()) {
5627 if ((filter & STRING) && !key->IsSymbol()) {
5635 int Map::NumberOfDescribedProperties(DescriptorFlag which,
5636 PropertyAttributes filter) {
5638 DescriptorArray* descs = instance_descriptors();
5639 int limit = which == ALL_DESCRIPTORS
5640 ? descs->number_of_descriptors()
5641 : NumberOfOwnDescriptors();
5642 for (int i = 0; i < limit; i++) {
5643 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
5644 !FilterKey(descs->GetKey(i), filter)) {
5652 int Map::NextFreePropertyIndex() {
5654 int number_of_own_descriptors = NumberOfOwnDescriptors();
5655 DescriptorArray* descs = instance_descriptors();
5656 for (int i = 0; i < number_of_own_descriptors; i++) {
5657 if (descs->GetType(i) == FIELD) {
5658 int current_index = descs->GetFieldIndex(i);
5659 if (current_index > max_index) max_index = current_index;
5662 return max_index + 1;
5666 static bool ContainsOnlyValidKeys(Handle<FixedArray> array) {
5667 int len = array->length();
5668 for (int i = 0; i < len; i++) {
5669 Object* e = array->get(i);
5670 if (!(e->IsString() || e->IsNumber())) return false;
5676 static Handle<FixedArray> ReduceFixedArrayTo(
5677 Handle<FixedArray> array, int length) {
5678 DCHECK(array->length() >= length);
5679 if (array->length() == length) return array;
5681 Handle<FixedArray> new_array =
5682 array->GetIsolate()->factory()->NewFixedArray(length);
5683 for (int i = 0; i < length; ++i) new_array->set(i, array->get(i));
5688 static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
5689 bool cache_result) {
5690 Isolate* isolate = object->GetIsolate();
5691 if (object->HasFastProperties()) {
5692 int own_property_count = object->map()->EnumLength();
5693 // If the enum length of the given map is set to kInvalidEnumCache, this
5694 // means that the map itself has never used the present enum cache. The
5695 // first step to using the cache is to set the enum length of the map by
5696 // counting the number of own descriptors that are not DONT_ENUM or
5698 if (own_property_count == kInvalidEnumCacheSentinel) {
5699 own_property_count = object->map()->NumberOfDescribedProperties(
5700 OWN_DESCRIPTORS, DONT_SHOW);
5702 DCHECK(own_property_count == object->map()->NumberOfDescribedProperties(
5703 OWN_DESCRIPTORS, DONT_SHOW));
5706 if (object->map()->instance_descriptors()->HasEnumCache()) {
5707 DescriptorArray* desc = object->map()->instance_descriptors();
5708 Handle<FixedArray> keys(desc->GetEnumCache(), isolate);
5710 // In case the number of properties required in the enum are actually
5711 // present, we can reuse the enum cache. Otherwise, this means that the
5712 // enum cache was generated for a previous (smaller) version of the
5713 // Descriptor Array. In that case we regenerate the enum cache.
5714 if (own_property_count <= keys->length()) {
5715 if (cache_result) object->map()->SetEnumLength(own_property_count);
5716 isolate->counters()->enum_cache_hits()->Increment();
5717 return ReduceFixedArrayTo(keys, own_property_count);
5721 Handle<Map> map(object->map());
5723 if (map->instance_descriptors()->IsEmpty()) {
5724 isolate->counters()->enum_cache_hits()->Increment();
5725 if (cache_result) map->SetEnumLength(0);
5726 return isolate->factory()->empty_fixed_array();
5729 isolate->counters()->enum_cache_misses()->Increment();
5731 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(
5732 own_property_count);
5733 Handle<FixedArray> indices = isolate->factory()->NewFixedArray(
5734 own_property_count);
5736 Handle<DescriptorArray> descs =
5737 Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
5739 int size = map->NumberOfOwnDescriptors();
5742 for (int i = 0; i < size; i++) {
5743 PropertyDetails details = descs->GetDetails(i);
5744 Object* key = descs->GetKey(i);
5745 if (!(details.IsDontEnum() || key->IsSymbol())) {
5746 storage->set(index, key);
5747 if (!indices.is_null()) {
5748 if (details.type() != FIELD) {
5749 indices = Handle<FixedArray>();
5751 FieldIndex field_index = FieldIndex::ForDescriptor(*map, i);
5752 int load_by_field_index = field_index.GetLoadByFieldIndex();
5753 indices->set(index, Smi::FromInt(load_by_field_index));
5759 DCHECK(index == storage->length());
5761 Handle<FixedArray> bridge_storage =
5762 isolate->factory()->NewFixedArray(
5763 DescriptorArray::kEnumCacheBridgeLength);
5764 DescriptorArray* desc = object->map()->instance_descriptors();
5765 desc->SetEnumCache(*bridge_storage,
5767 indices.is_null() ? Object::cast(Smi::FromInt(0))
5768 : Object::cast(*indices));
5770 object->map()->SetEnumLength(own_property_count);
5774 Handle<NameDictionary> dictionary(object->property_dictionary());
5775 int length = dictionary->NumberOfEnumElements();
5777 return Handle<FixedArray>(isolate->heap()->empty_fixed_array());
5779 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length);
5780 dictionary->CopyEnumKeysTo(*storage);
5786 MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object,
5787 KeyCollectionType type) {
5788 USE(ContainsOnlyValidKeys);
5789 Isolate* isolate = object->GetIsolate();
5790 Handle<FixedArray> content = isolate->factory()->empty_fixed_array();
5791 Handle<JSFunction> arguments_function(
5792 JSFunction::cast(isolate->sloppy_arguments_map()->constructor()));
5794 // Only collect keys if access is permitted.
5795 for (PrototypeIterator iter(isolate, object,
5796 PrototypeIterator::START_AT_RECEIVER);
5797 !iter.IsAtEnd(); iter.Advance()) {
5798 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
5799 Handle<JSProxy> proxy(JSProxy::cast(*PrototypeIterator::GetCurrent(iter)),
5801 Handle<Object> args[] = { proxy };
5802 Handle<Object> names;
5803 ASSIGN_RETURN_ON_EXCEPTION(
5805 Execution::Call(isolate,
5806 isolate->proxy_enumerate(),
5811 ASSIGN_RETURN_ON_EXCEPTION(
5813 FixedArray::AddKeysFromArrayLike(
5814 content, Handle<JSObject>::cast(names)),
5819 Handle<JSObject> current =
5820 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
5822 // Check access rights if required.
5823 if (current->IsAccessCheckNeeded() &&
5824 !isolate->MayNamedAccess(
5825 current, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5826 isolate->ReportFailedAccessCheck(current, v8::ACCESS_KEYS);
5827 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, FixedArray);
5831 // Compute the element keys.
5832 Handle<FixedArray> element_keys =
5833 isolate->factory()->NewFixedArray(current->NumberOfEnumElements());
5834 current->GetEnumElementKeys(*element_keys);
5835 ASSIGN_RETURN_ON_EXCEPTION(
5837 FixedArray::UnionOfKeys(content, element_keys),
5839 DCHECK(ContainsOnlyValidKeys(content));
5841 // Add the element keys from the interceptor.
5842 if (current->HasIndexedInterceptor()) {
5843 Handle<JSObject> result;
5844 if (JSObject::GetKeysForIndexedInterceptor(
5845 current, object).ToHandle(&result)) {
5846 ASSIGN_RETURN_ON_EXCEPTION(
5848 FixedArray::AddKeysFromArrayLike(content, result),
5851 DCHECK(ContainsOnlyValidKeys(content));
5854 // We can cache the computed property keys if access checks are
5855 // not needed and no interceptors are involved.
5857 // We do not use the cache if the object has elements and
5858 // therefore it does not make sense to cache the property names
5859 // for arguments objects. Arguments objects will always have
5861 // Wrapped strings have elements, but don't have an elements
5862 // array or dictionary. So the fast inline test for whether to
5863 // use the cache says yes, so we should not create a cache.
5864 bool cache_enum_keys =
5865 ((current->map()->constructor() != *arguments_function) &&
5866 !current->IsJSValue() &&
5867 !current->IsAccessCheckNeeded() &&
5868 !current->HasNamedInterceptor() &&
5869 !current->HasIndexedInterceptor());
5870 // Compute the property keys and cache them if possible.
5871 ASSIGN_RETURN_ON_EXCEPTION(
5873 FixedArray::UnionOfKeys(
5874 content, GetEnumPropertyKeys(current, cache_enum_keys)),
5876 DCHECK(ContainsOnlyValidKeys(content));
5878 // Add the property keys from the interceptor.
5879 if (current->HasNamedInterceptor()) {
5880 Handle<JSObject> result;
5881 if (JSObject::GetKeysForNamedInterceptor(
5882 current, object).ToHandle(&result)) {
5883 ASSIGN_RETURN_ON_EXCEPTION(
5885 FixedArray::AddKeysFromArrayLike(content, result),
5888 DCHECK(ContainsOnlyValidKeys(content));
5891 // If we only want own properties we bail out after the first
5893 if (type == OWN_ONLY) break;
5899 // Try to update an accessor in an elements dictionary. Return true if the
5900 // update succeeded, and false otherwise.
5901 static bool UpdateGetterSetterInDictionary(
5902 SeededNumberDictionary* dictionary,
5906 PropertyAttributes attributes) {
5907 int entry = dictionary->FindEntry(index);
5908 if (entry != SeededNumberDictionary::kNotFound) {
5909 Object* result = dictionary->ValueAt(entry);
5910 PropertyDetails details = dictionary->DetailsAt(entry);
5911 if (details.type() == CALLBACKS && result->IsAccessorPair()) {
5912 DCHECK(details.IsConfigurable());
5913 if (details.attributes() != attributes) {
5914 dictionary->DetailsAtPut(
5916 PropertyDetails(attributes, CALLBACKS, index));
5918 AccessorPair::cast(result)->SetComponents(getter, setter);
5926 void JSObject::DefineElementAccessor(Handle<JSObject> object,
5928 Handle<Object> getter,
5929 Handle<Object> setter,
5930 PropertyAttributes attributes) {
5931 switch (object->GetElementsKind()) {
5932 case FAST_SMI_ELEMENTS:
5934 case FAST_DOUBLE_ELEMENTS:
5935 case FAST_HOLEY_SMI_ELEMENTS:
5936 case FAST_HOLEY_ELEMENTS:
5937 case FAST_HOLEY_DOUBLE_ELEMENTS:
5940 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5941 case EXTERNAL_##TYPE##_ELEMENTS: \
5942 case TYPE##_ELEMENTS: \
5944 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5945 #undef TYPED_ARRAY_CASE
5946 // Ignore getters and setters on pixel and external array elements.
5949 case DICTIONARY_ELEMENTS:
5950 if (UpdateGetterSetterInDictionary(object->element_dictionary(),
5958 case SLOPPY_ARGUMENTS_ELEMENTS: {
5959 // Ascertain whether we have read-only properties or an existing
5960 // getter/setter pair in an arguments elements dictionary backing
5962 FixedArray* parameter_map = FixedArray::cast(object->elements());
5963 uint32_t length = parameter_map->length();
5965 index < (length - 2) ? parameter_map->get(index + 2) : NULL;
5966 if (probe == NULL || probe->IsTheHole()) {
5967 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5968 if (arguments->IsDictionary()) {
5969 SeededNumberDictionary* dictionary =
5970 SeededNumberDictionary::cast(arguments);
5971 if (UpdateGetterSetterInDictionary(dictionary,
5984 Isolate* isolate = object->GetIsolate();
5985 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair();
5986 accessors->SetComponents(*getter, *setter);
5988 SetElementCallback(object, index, accessors, attributes);
5992 bool Map::DictionaryElementsInPrototypeChainOnly() {
5993 if (IsDictionaryElementsKind(elements_kind())) {
5997 for (PrototypeIterator iter(this); !iter.IsAtEnd(); iter.Advance()) {
5998 if (iter.GetCurrent()->IsJSProxy()) {
5999 // Be conservative, don't walk into proxies.
6003 if (IsDictionaryElementsKind(
6004 JSObject::cast(iter.GetCurrent())->map()->elements_kind())) {
6013 void JSObject::SetElementCallback(Handle<JSObject> object,
6015 Handle<Object> structure,
6016 PropertyAttributes attributes) {
6017 Heap* heap = object->GetHeap();
6018 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6020 // Normalize elements to make this operation simple.
6021 bool had_dictionary_elements = object->HasDictionaryElements();
6022 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
6023 DCHECK(object->HasDictionaryElements() ||
6024 object->HasDictionaryArgumentsElements());
6025 // Update the dictionary with the new CALLBACKS property.
6026 dictionary = SeededNumberDictionary::Set(dictionary, index, structure,
6028 dictionary->set_requires_slow_elements();
6030 // Update the dictionary backing store on the object.
6031 if (object->elements()->map() == heap->sloppy_arguments_elements_map()) {
6032 // Also delete any parameter alias.
6034 // TODO(kmillikin): when deleting the last parameter alias we could
6035 // switch to a direct backing store without the parameter map. This
6036 // would allow GC of the context.
6037 FixedArray* parameter_map = FixedArray::cast(object->elements());
6038 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
6039 parameter_map->set(index + 2, heap->the_hole_value());
6041 parameter_map->set(1, *dictionary);
6043 object->set_elements(*dictionary);
6045 if (!had_dictionary_elements) {
6046 // KeyedStoreICs (at least the non-generic ones) need a reset.
6047 heap->ClearAllICsByKind(Code::KEYED_STORE_IC);
6053 void JSObject::SetPropertyCallback(Handle<JSObject> object,
6055 Handle<Object> structure,
6056 PropertyAttributes attributes) {
6057 PropertyNormalizationMode mode = object->map()->is_prototype_map()
6058 ? KEEP_INOBJECT_PROPERTIES
6059 : CLEAR_INOBJECT_PROPERTIES;
6060 // Normalize object to make this operation simple.
6061 NormalizeProperties(object, mode, 0);
6063 // For the global object allocate a new map to invalidate the global inline
6064 // caches which have a global property cell reference directly in the code.
6065 if (object->IsGlobalObject()) {
6066 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
6067 DCHECK(new_map->is_dictionary_map());
6068 JSObject::MigrateToMap(object, new_map);
6070 // When running crankshaft, changing the map is not enough. We
6071 // need to deoptimize all functions that rely on this global
6073 Deoptimizer::DeoptimizeGlobalObject(*object);
6076 // Update the dictionary with the new CALLBACKS property.
6077 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6078 SetNormalizedProperty(object, name, structure, details);
6080 ReoptimizeIfPrototype(object);
6084 MaybeHandle<Object> JSObject::DefineAccessor(Handle<JSObject> object,
6086 Handle<Object> getter,
6087 Handle<Object> setter,
6088 PropertyAttributes attributes) {
6089 Isolate* isolate = object->GetIsolate();
6090 // Check access rights if needed.
6091 if (object->IsAccessCheckNeeded() &&
6092 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6093 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6094 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6095 return isolate->factory()->undefined_value();
6098 if (object->IsJSGlobalProxy()) {
6099 PrototypeIterator iter(isolate, object);
6100 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
6101 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
6102 DefineAccessor(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)),
6103 name, getter, setter, attributes);
6104 return isolate->factory()->undefined_value();
6107 // Make sure that the top context does not change when doing callbacks or
6108 // interceptor calls.
6109 AssertNoContextChange ncc(isolate);
6111 // Try to flatten before operating on the string.
6112 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
6115 bool is_element = name->AsArrayIndex(&index);
6117 Handle<Object> old_value = isolate->factory()->the_hole_value();
6118 bool is_observed = object->map()->is_observed() &&
6119 *name != isolate->heap()->hidden_string();
6120 bool preexists = false;
6123 Maybe<bool> maybe = HasOwnElement(object, index);
6124 // Workaround for a GCC 4.4.3 bug which leads to "‘preexists’ may be used
6125 // uninitialized in this function".
6126 if (!maybe.has_value) {
6128 return isolate->factory()->undefined_value();
6130 preexists = maybe.value;
6131 if (preexists && GetOwnElementAccessorPair(object, index).is_null()) {
6133 Object::GetElement(isolate, object, index).ToHandleChecked();
6136 LookupIterator it(object, name, LookupIterator::HIDDEN_SKIP_INTERCEPTOR);
6137 CHECK(GetPropertyAttributes(&it).has_value);
6138 preexists = it.IsFound();
6139 if (preexists && (it.state() == LookupIterator::DATA ||
6140 it.GetAccessors()->IsAccessorInfo())) {
6141 old_value = GetProperty(&it).ToHandleChecked();
6147 DefineElementAccessor(object, index, getter, setter, attributes);
6149 DCHECK(getter->IsSpecFunction() || getter->IsUndefined() ||
6151 DCHECK(setter->IsSpecFunction() || setter->IsUndefined() ||
6153 // At least one of the accessors needs to be a new value.
6154 DCHECK(!getter->IsNull() || !setter->IsNull());
6155 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
6156 if (it.state() == LookupIterator::ACCESS_CHECK) {
6157 // We already did an access check before. We do have access.
6160 if (!getter->IsNull()) {
6161 it.TransitionToAccessorProperty(ACCESSOR_GETTER, getter, attributes);
6163 if (!setter->IsNull()) {
6164 it.TransitionToAccessorProperty(ACCESSOR_SETTER, setter, attributes);
6169 const char* type = preexists ? "reconfigure" : "add";
6170 EnqueueChangeRecord(object, type, name, old_value);
6173 return isolate->factory()->undefined_value();
6177 MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object,
6178 Handle<AccessorInfo> info) {
6179 Isolate* isolate = object->GetIsolate();
6180 Factory* factory = isolate->factory();
6181 Handle<Name> name(Name::cast(info->name()));
6183 // Check access rights if needed.
6184 if (object->IsAccessCheckNeeded() &&
6185 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6186 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6187 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6188 return factory->undefined_value();
6191 if (object->IsJSGlobalProxy()) {
6192 PrototypeIterator iter(isolate, object);
6193 if (iter.IsAtEnd()) return object;
6194 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
6196 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), info);
6199 // Make sure that the top context does not change when doing callbacks or
6200 // interceptor calls.
6201 AssertNoContextChange ncc(isolate);
6203 // Try to flatten before operating on the string.
6204 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
6207 bool is_element = name->AsArrayIndex(&index);
6210 if (object->IsJSArray()) return factory->undefined_value();
6212 // Accessors overwrite previous callbacks (cf. with getters/setters).
6213 switch (object->GetElementsKind()) {
6214 case FAST_SMI_ELEMENTS:
6216 case FAST_DOUBLE_ELEMENTS:
6217 case FAST_HOLEY_SMI_ELEMENTS:
6218 case FAST_HOLEY_ELEMENTS:
6219 case FAST_HOLEY_DOUBLE_ELEMENTS:
6222 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6223 case EXTERNAL_##TYPE##_ELEMENTS: \
6224 case TYPE##_ELEMENTS: \
6226 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6227 #undef TYPED_ARRAY_CASE
6228 // Ignore getters and setters on pixel and external array
6230 return factory->undefined_value();
6232 case DICTIONARY_ELEMENTS:
6234 case SLOPPY_ARGUMENTS_ELEMENTS:
6239 SetElementCallback(object, index, info, info->property_attributes());
6242 LookupIterator it(object, name, LookupIterator::HIDDEN_SKIP_INTERCEPTOR);
6243 CHECK(GetPropertyAttributes(&it).has_value);
6244 // ES5 forbids turning a property into an accessor if it's not
6245 // configurable. See 8.6.1 (Table 5).
6246 if (it.IsFound() && (it.IsReadOnly() || !it.IsConfigurable())) {
6247 return factory->undefined_value();
6250 SetPropertyCallback(object, name, info, info->property_attributes());
6257 MaybeHandle<Object> JSObject::GetAccessor(Handle<JSObject> object,
6259 AccessorComponent component) {
6260 Isolate* isolate = object->GetIsolate();
6262 // Make sure that the top context does not change when doing callbacks or
6263 // interceptor calls.
6264 AssertNoContextChange ncc(isolate);
6266 // Make the lookup and include prototypes.
6268 if (name->AsArrayIndex(&index)) {
6269 for (PrototypeIterator iter(isolate, object,
6270 PrototypeIterator::START_AT_RECEIVER);
6271 !iter.IsAtEnd(); iter.Advance()) {
6272 Handle<Object> current = PrototypeIterator::GetCurrent(iter);
6273 // Check access rights if needed.
6274 if (current->IsAccessCheckNeeded() &&
6275 !isolate->MayNamedAccess(Handle<JSObject>::cast(current), name,
6277 isolate->ReportFailedAccessCheck(Handle<JSObject>::cast(current),
6279 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6280 return isolate->factory()->undefined_value();
6283 if (current->IsJSObject() &&
6284 Handle<JSObject>::cast(current)->HasDictionaryElements()) {
6285 JSObject* js_object = JSObject::cast(*current);
6286 SeededNumberDictionary* dictionary = js_object->element_dictionary();
6287 int entry = dictionary->FindEntry(index);
6288 if (entry != SeededNumberDictionary::kNotFound) {
6289 Object* element = dictionary->ValueAt(entry);
6290 if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
6291 element->IsAccessorPair()) {
6292 return handle(AccessorPair::cast(element)->GetComponent(component),
6299 LookupIterator it(object, name,
6300 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
6301 for (; it.IsFound(); it.Next()) {
6302 switch (it.state()) {
6303 case LookupIterator::INTERCEPTOR:
6304 case LookupIterator::NOT_FOUND:
6305 case LookupIterator::TRANSITION:
6308 case LookupIterator::ACCESS_CHECK:
6309 if (it.HasAccess(v8::ACCESS_HAS)) continue;
6310 isolate->ReportFailedAccessCheck(it.GetHolder<JSObject>(),
6312 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6313 return isolate->factory()->undefined_value();
6315 case LookupIterator::JSPROXY:
6316 return isolate->factory()->undefined_value();
6318 case LookupIterator::DATA:
6320 case LookupIterator::ACCESSOR: {
6321 Handle<Object> maybe_pair = it.GetAccessors();
6322 if (maybe_pair->IsAccessorPair()) {
6324 AccessorPair::cast(*maybe_pair)->GetComponent(component),
6331 return isolate->factory()->undefined_value();
6335 Object* JSObject::SlowReverseLookup(Object* value) {
6336 if (HasFastProperties()) {
6337 int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
6338 DescriptorArray* descs = map()->instance_descriptors();
6339 for (int i = 0; i < number_of_own_descriptors; i++) {
6340 if (descs->GetType(i) == FIELD) {
6342 RawFastPropertyAt(FieldIndex::ForDescriptor(map(), i));
6343 if (descs->GetDetails(i).representation().IsDouble()) {
6344 DCHECK(property->IsMutableHeapNumber());
6345 if (value->IsNumber() && property->Number() == value->Number()) {
6346 return descs->GetKey(i);
6348 } else if (property == value) {
6349 return descs->GetKey(i);
6351 } else if (descs->GetType(i) == CONSTANT) {
6352 if (descs->GetConstant(i) == value) {
6353 return descs->GetKey(i);
6357 return GetHeap()->undefined_value();
6359 return property_dictionary()->SlowReverseLookup(value);
6364 Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) {
6365 Handle<Map> result = map->GetIsolate()->factory()->NewMap(
6366 map->instance_type(), instance_size);
6367 result->set_prototype(map->prototype());
6368 result->set_constructor(map->constructor());
6369 result->set_bit_field(map->bit_field());
6370 result->set_bit_field2(map->bit_field2());
6371 int new_bit_field3 = map->bit_field3();
6372 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
6373 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
6374 new_bit_field3 = EnumLengthBits::update(new_bit_field3,
6375 kInvalidEnumCacheSentinel);
6376 new_bit_field3 = Deprecated::update(new_bit_field3, false);
6377 if (!map->is_dictionary_map()) {
6378 new_bit_field3 = IsUnstable::update(new_bit_field3, false);
6380 new_bit_field3 = ConstructionCount::update(new_bit_field3,
6381 JSFunction::kNoSlackTracking);
6382 result->set_bit_field3(new_bit_field3);
6387 Handle<Map> Map::Normalize(Handle<Map> fast_map,
6388 PropertyNormalizationMode mode) {
6389 DCHECK(!fast_map->is_dictionary_map());
6391 Isolate* isolate = fast_map->GetIsolate();
6392 Handle<Object> maybe_cache(isolate->native_context()->normalized_map_cache(),
6394 bool use_cache = !maybe_cache->IsUndefined();
6395 Handle<NormalizedMapCache> cache;
6396 if (use_cache) cache = Handle<NormalizedMapCache>::cast(maybe_cache);
6398 Handle<Map> new_map;
6399 if (use_cache && cache->Get(fast_map, mode).ToHandle(&new_map)) {
6401 if (FLAG_verify_heap) new_map->DictionaryMapVerify();
6403 #ifdef ENABLE_SLOW_DCHECKS
6404 if (FLAG_enable_slow_asserts) {
6405 // The cached map should match newly created normalized map bit-by-bit,
6406 // except for the code cache, which can contain some ics which can be
6407 // applied to the shared map.
6408 Handle<Map> fresh = Map::CopyNormalized(fast_map, mode);
6410 DCHECK(memcmp(fresh->address(),
6412 Map::kCodeCacheOffset) == 0);
6413 STATIC_ASSERT(Map::kDependentCodeOffset ==
6414 Map::kCodeCacheOffset + kPointerSize);
6415 int offset = Map::kDependentCodeOffset + kPointerSize;
6416 DCHECK(memcmp(fresh->address() + offset,
6417 new_map->address() + offset,
6418 Map::kSize - offset) == 0);
6422 new_map = Map::CopyNormalized(fast_map, mode);
6424 cache->Set(fast_map, new_map);
6425 isolate->counters()->normalized_maps()->Increment();
6428 fast_map->NotifyLeafMapLayoutChange();
6433 Handle<Map> Map::CopyNormalized(Handle<Map> map,
6434 PropertyNormalizationMode mode) {
6435 int new_instance_size = map->instance_size();
6436 if (mode == CLEAR_INOBJECT_PROPERTIES) {
6437 new_instance_size -= map->inobject_properties() * kPointerSize;
6440 Handle<Map> result = RawCopy(map, new_instance_size);
6442 if (mode != CLEAR_INOBJECT_PROPERTIES) {
6443 result->set_inobject_properties(map->inobject_properties());
6446 result->set_dictionary_map(true);
6447 result->set_migration_target(false);
6450 if (FLAG_verify_heap) result->DictionaryMapVerify();
6457 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) {
6458 Handle<Map> result = RawCopy(map, map->instance_size());
6460 // Please note instance_type and instance_size are set when allocated.
6461 result->set_inobject_properties(map->inobject_properties());
6462 result->set_unused_property_fields(map->unused_property_fields());
6464 result->set_pre_allocated_property_fields(
6465 map->pre_allocated_property_fields());
6466 result->ClearCodeCache(map->GetHeap());
6467 map->NotifyLeafMapLayoutChange();
6472 Handle<Map> Map::ShareDescriptor(Handle<Map> map,
6473 Handle<DescriptorArray> descriptors,
6474 Descriptor* descriptor) {
6475 // Sanity check. This path is only to be taken if the map owns its descriptor
6476 // array, implying that its NumberOfOwnDescriptors equals the number of
6477 // descriptors in the descriptor array.
6478 DCHECK(map->NumberOfOwnDescriptors() ==
6479 map->instance_descriptors()->number_of_descriptors());
6481 Handle<Map> result = CopyDropDescriptors(map);
6482 Handle<Name> name = descriptor->GetKey();
6484 // Ensure there's space for the new descriptor in the shared descriptor array.
6485 if (descriptors->NumberOfSlackDescriptors() == 0) {
6486 int old_size = descriptors->number_of_descriptors();
6487 if (old_size == 0) {
6488 descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1);
6490 EnsureDescriptorSlack(map, old_size < 4 ? 1 : old_size / 2);
6491 descriptors = handle(map->instance_descriptors());
6496 DisallowHeapAllocation no_gc;
6497 descriptors->Append(descriptor);
6498 result->InitializeDescriptors(*descriptors);
6501 DCHECK(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1);
6502 ConnectTransition(map, result, name, SIMPLE_TRANSITION);
6508 void Map::ConnectTransition(Handle<Map> parent, Handle<Map> child,
6509 Handle<Name> name, SimpleTransitionFlag flag) {
6510 parent->set_owns_descriptors(false);
6511 if (parent->is_prototype_map()) {
6512 DCHECK(child->is_prototype_map());
6514 Handle<TransitionArray> transitions =
6515 TransitionArray::CopyInsert(parent, name, child, flag);
6516 parent->set_transitions(*transitions);
6517 child->SetBackPointer(*parent);
6522 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map,
6523 Handle<DescriptorArray> descriptors,
6524 TransitionFlag flag,
6525 MaybeHandle<Name> maybe_name,
6526 SimpleTransitionFlag simple_flag) {
6527 DCHECK(descriptors->IsSortedNoDuplicates());
6529 Handle<Map> result = CopyDropDescriptors(map);
6530 result->InitializeDescriptors(*descriptors);
6532 if (!map->is_prototype_map()) {
6533 if (flag == INSERT_TRANSITION && map->CanHaveMoreTransitions()) {
6535 CHECK(maybe_name.ToHandle(&name));
6536 ConnectTransition(map, result, name, simple_flag);
6538 int length = descriptors->number_of_descriptors();
6539 for (int i = 0; i < length; i++) {
6540 descriptors->SetRepresentation(i, Representation::Tagged());
6541 if (descriptors->GetDetails(i).type() == FIELD) {
6542 descriptors->SetValue(i, HeapType::Any());
6552 // Since this method is used to rewrite an existing transition tree, it can
6553 // always insert transitions without checking.
6554 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map,
6556 Handle<DescriptorArray> descriptors) {
6557 DCHECK(descriptors->IsSortedNoDuplicates());
6559 Handle<Map> result = CopyDropDescriptors(map);
6561 result->InitializeDescriptors(*descriptors);
6562 result->SetNumberOfOwnDescriptors(new_descriptor + 1);
6564 int unused_property_fields = map->unused_property_fields();
6565 if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
6566 unused_property_fields = map->unused_property_fields() - 1;
6567 if (unused_property_fields < 0) {
6568 unused_property_fields += JSObject::kFieldsAdded;
6572 result->set_unused_property_fields(unused_property_fields);
6574 Handle<Name> name = handle(descriptors->GetKey(new_descriptor));
6575 ConnectTransition(map, result, name, SIMPLE_TRANSITION);
6581 Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind,
6582 TransitionFlag flag) {
6583 if (flag == INSERT_TRANSITION) {
6584 DCHECK(!map->HasElementsTransition() ||
6585 ((map->elements_transition_map()->elements_kind() ==
6586 DICTIONARY_ELEMENTS ||
6587 IsExternalArrayElementsKind(
6588 map->elements_transition_map()->elements_kind())) &&
6589 (kind == DICTIONARY_ELEMENTS ||
6590 IsExternalArrayElementsKind(kind))));
6591 DCHECK(!IsFastElementsKind(kind) ||
6592 IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
6593 DCHECK(kind != map->elements_kind());
6596 bool insert_transition =
6597 flag == INSERT_TRANSITION && !map->HasElementsTransition();
6599 if (insert_transition && map->owns_descriptors()) {
6600 // In case the map owned its own descriptors, share the descriptors and
6601 // transfer ownership to the new map.
6602 Handle<Map> new_map = CopyDropDescriptors(map);
6604 ConnectElementsTransition(map, new_map);
6606 new_map->set_elements_kind(kind);
6607 new_map->InitializeDescriptors(map->instance_descriptors());
6611 // In case the map did not own its own descriptors, a split is forced by
6612 // copying the map; creating a new descriptor array cell.
6613 // Create a new free-floating map only if we are not allowed to store it.
6614 Handle<Map> new_map = Copy(map);
6616 new_map->set_elements_kind(kind);
6618 if (insert_transition) {
6619 ConnectElementsTransition(map, new_map);
6626 Handle<Map> Map::CopyForObserved(Handle<Map> map) {
6627 DCHECK(!map->is_observed());
6629 Isolate* isolate = map->GetIsolate();
6631 // In case the map owned its own descriptors, share the descriptors and
6632 // transfer ownership to the new map.
6633 Handle<Map> new_map;
6634 if (map->owns_descriptors()) {
6635 new_map = CopyDropDescriptors(map);
6637 DCHECK(!map->is_prototype_map());
6638 new_map = Copy(map);
6641 new_map->set_is_observed();
6642 if (map->owns_descriptors()) {
6643 new_map->InitializeDescriptors(map->instance_descriptors());
6646 Handle<Name> name = isolate->factory()->observed_symbol();
6647 ConnectTransition(map, new_map, name, FULL_TRANSITION);
6653 Handle<Map> Map::Copy(Handle<Map> map) {
6654 Handle<DescriptorArray> descriptors(map->instance_descriptors());
6655 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
6656 Handle<DescriptorArray> new_descriptors =
6657 DescriptorArray::CopyUpTo(descriptors, number_of_own_descriptors);
6658 return CopyReplaceDescriptors(
6659 map, new_descriptors, OMIT_TRANSITION, MaybeHandle<Name>());
6663 Handle<Map> Map::Create(Isolate* isolate, int inobject_properties) {
6664 Handle<Map> copy = Copy(handle(isolate->object_function()->initial_map()));
6666 // Check that we do not overflow the instance size when adding the extra
6667 // inobject properties. If the instance size overflows, we allocate as many
6668 // properties as we can as inobject properties.
6669 int max_extra_properties =
6670 (JSObject::kMaxInstanceSize - JSObject::kHeaderSize) >> kPointerSizeLog2;
6672 if (inobject_properties > max_extra_properties) {
6673 inobject_properties = max_extra_properties;
6676 int new_instance_size =
6677 JSObject::kHeaderSize + kPointerSize * inobject_properties;
6679 // Adjust the map with the extra inobject properties.
6680 copy->set_inobject_properties(inobject_properties);
6681 copy->set_unused_property_fields(inobject_properties);
6682 copy->set_instance_size(new_instance_size);
6683 copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy));
6688 Handle<Map> Map::CopyForFreeze(Handle<Map> map) {
6689 int num_descriptors = map->NumberOfOwnDescriptors();
6690 Isolate* isolate = map->GetIsolate();
6691 Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
6692 handle(map->instance_descriptors(), isolate), num_descriptors, FROZEN);
6693 Handle<Map> new_map = CopyReplaceDescriptors(
6694 map, new_desc, INSERT_TRANSITION, isolate->factory()->frozen_symbol());
6696 new_map->set_is_extensible(false);
6697 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
6702 bool DescriptorArray::CanHoldValue(int descriptor, Object* value) {
6703 PropertyDetails details = GetDetails(descriptor);
6704 switch (details.type()) {
6706 return value->FitsRepresentation(details.representation()) &&
6707 GetFieldType(descriptor)->NowContains(value);
6710 DCHECK(GetConstant(descriptor) != value ||
6711 value->FitsRepresentation(details.representation()));
6712 return GetConstant(descriptor) == value;
6727 Handle<Map> Map::PrepareForDataProperty(Handle<Map> map, int descriptor,
6728 Handle<Object> value) {
6729 // Dictionaries can store any property value.
6730 if (map->is_dictionary_map()) return map;
6732 // Migrate to the newest map before storing the property.
6735 Handle<DescriptorArray> descriptors(map->instance_descriptors());
6737 if (descriptors->CanHoldValue(descriptor, *value)) return map;
6739 Isolate* isolate = map->GetIsolate();
6740 Representation representation = value->OptimalRepresentation();
6741 Handle<HeapType> type = value->OptimalType(isolate, representation);
6743 return GeneralizeRepresentation(map, descriptor, representation, type,
6748 Handle<Map> Map::TransitionToDataProperty(Handle<Map> map, Handle<Name> name,
6749 Handle<Object> value,
6750 PropertyAttributes attributes,
6751 StoreFromKeyed store_mode) {
6752 // Dictionary maps can always have additional data properties.
6753 if (map->is_dictionary_map()) return map;
6755 // Migrate to the newest map before storing the property.
6758 int index = map->SearchTransition(*name);
6759 if (index != TransitionArray::kNotFound) {
6760 Handle<Map> transition(map->GetTransition(index));
6761 int descriptor = transition->LastAdded();
6763 // TODO(verwaest): Handle attributes better.
6764 DescriptorArray* descriptors = transition->instance_descriptors();
6765 if (descriptors->GetDetails(descriptor).attributes() != attributes) {
6766 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
6769 return Map::PrepareForDataProperty(transition, descriptor, value);
6772 TransitionFlag flag = INSERT_TRANSITION;
6773 MaybeHandle<Map> maybe_map;
6774 if (value->IsJSFunction()) {
6775 maybe_map = Map::CopyWithConstant(map, name, value, attributes, flag);
6776 } else if (!map->TooManyFastProperties(store_mode)) {
6777 Isolate* isolate = name->GetIsolate();
6778 Representation representation = value->OptimalRepresentation();
6779 Handle<HeapType> type = value->OptimalType(isolate, representation);
6781 Map::CopyWithField(map, name, type, attributes, representation, flag);
6785 if (!maybe_map.ToHandle(&result)) {
6786 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
6793 Handle<Map> Map::ReconfigureDataProperty(Handle<Map> map, int descriptor,
6794 PropertyAttributes attributes) {
6795 // Dictionaries have to be reconfigured in-place.
6796 DCHECK(!map->is_dictionary_map());
6798 // For now, give up on transitioning and just create a unique map.
6799 // TODO(verwaest/ishell): Cache transitions with different attributes.
6800 return CopyGeneralizeAllRepresentations(map, descriptor, FORCE_FIELD,
6801 attributes, "attributes mismatch");
6805 Handle<Map> Map::TransitionToAccessorProperty(Handle<Map> map,
6807 AccessorComponent component,
6808 Handle<Object> accessor,
6809 PropertyAttributes attributes) {
6810 Isolate* isolate = name->GetIsolate();
6812 // Dictionary maps can always have additional data properties.
6813 if (map->is_dictionary_map()) {
6814 // For global objects, property cells are inlined. We need to change the
6816 if (map->IsGlobalObjectMap()) return Copy(map);
6820 // Migrate to the newest map before transitioning to the new property.
6823 PropertyNormalizationMode mode = map->is_prototype_map()
6824 ? KEEP_INOBJECT_PROPERTIES
6825 : CLEAR_INOBJECT_PROPERTIES;
6827 int index = map->SearchTransition(*name);
6828 if (index != TransitionArray::kNotFound) {
6829 Handle<Map> transition(map->GetTransition(index));
6830 DescriptorArray* descriptors = transition->instance_descriptors();
6831 // Fast path, assume that we're modifying the last added descriptor.
6832 int descriptor = transition->LastAdded();
6833 if (descriptors->GetKey(descriptor) != *name) {
6834 // If not, search for the descriptor.
6835 descriptor = descriptors->SearchWithCache(*name, *transition);
6838 if (descriptors->GetDetails(descriptor).type() != CALLBACKS) {
6839 return Map::Normalize(map, mode);
6842 // TODO(verwaest): Handle attributes better.
6843 if (descriptors->GetDetails(descriptor).attributes() != attributes) {
6844 return Map::Normalize(map, mode);
6847 Handle<Object> maybe_pair(descriptors->GetValue(descriptor), isolate);
6848 if (!maybe_pair->IsAccessorPair()) {
6849 return Map::Normalize(map, mode);
6852 Handle<AccessorPair> pair = Handle<AccessorPair>::cast(maybe_pair);
6853 if (pair->get(component) != *accessor) {
6854 return Map::Normalize(map, mode);
6860 Handle<AccessorPair> pair;
6861 DescriptorArray* old_descriptors = map->instance_descriptors();
6862 int descriptor = old_descriptors->SearchWithCache(*name, *map);
6863 if (descriptor != DescriptorArray::kNotFound) {
6864 PropertyDetails old_details = old_descriptors->GetDetails(descriptor);
6865 if (old_details.type() != CALLBACKS) {
6866 return Map::Normalize(map, mode);
6869 if (old_details.attributes() != attributes) {
6870 return Map::Normalize(map, mode);
6873 Handle<Object> maybe_pair(old_descriptors->GetValue(descriptor), isolate);
6874 if (!maybe_pair->IsAccessorPair()) {
6875 return Map::Normalize(map, mode);
6878 Object* current = Handle<AccessorPair>::cast(maybe_pair)->get(component);
6879 if (current == *accessor) return map;
6881 if (!current->IsTheHole()) {
6882 return Map::Normalize(map, mode);
6885 pair = AccessorPair::Copy(Handle<AccessorPair>::cast(maybe_pair));
6886 } else if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors ||
6887 map->TooManyFastProperties(CERTAINLY_NOT_STORE_FROM_KEYED)) {
6888 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
6890 pair = isolate->factory()->NewAccessorPair();
6893 pair->set(component, *accessor);
6894 TransitionFlag flag = INSERT_TRANSITION;
6895 CallbacksDescriptor new_desc(name, pair, attributes);
6896 return Map::CopyInsertDescriptor(map, &new_desc, flag);
6900 Handle<Map> Map::CopyAddDescriptor(Handle<Map> map,
6901 Descriptor* descriptor,
6902 TransitionFlag flag) {
6903 Handle<DescriptorArray> descriptors(map->instance_descriptors());
6905 // Ensure the key is unique.
6906 descriptor->KeyToUniqueName();
6908 if (flag == INSERT_TRANSITION &&
6909 map->owns_descriptors() &&
6910 map->CanHaveMoreTransitions()) {
6911 return ShareDescriptor(map, descriptors, descriptor);
6914 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
6915 descriptors, map->NumberOfOwnDescriptors(), 1);
6916 new_descriptors->Append(descriptor);
6918 return CopyReplaceDescriptors(
6919 map, new_descriptors, flag, descriptor->GetKey(), SIMPLE_TRANSITION);
6923 Handle<Map> Map::CopyInsertDescriptor(Handle<Map> map,
6924 Descriptor* descriptor,
6925 TransitionFlag flag) {
6926 Handle<DescriptorArray> old_descriptors(map->instance_descriptors());
6928 // Ensure the key is unique.
6929 descriptor->KeyToUniqueName();
6931 // We replace the key if it is already present.
6932 int index = old_descriptors->SearchWithCache(*descriptor->GetKey(), *map);
6933 if (index != DescriptorArray::kNotFound) {
6934 return CopyReplaceDescriptor(map, old_descriptors, descriptor, index, flag);
6936 return CopyAddDescriptor(map, descriptor, flag);
6940 Handle<DescriptorArray> DescriptorArray::CopyUpTo(
6941 Handle<DescriptorArray> desc,
6942 int enumeration_index,
6944 return DescriptorArray::CopyUpToAddAttributes(
6945 desc, enumeration_index, NONE, slack);
6949 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
6950 Handle<DescriptorArray> desc,
6951 int enumeration_index,
6952 PropertyAttributes attributes,
6954 if (enumeration_index + slack == 0) {
6955 return desc->GetIsolate()->factory()->empty_descriptor_array();
6958 int size = enumeration_index;
6960 Handle<DescriptorArray> descriptors =
6961 DescriptorArray::Allocate(desc->GetIsolate(), size, slack);
6962 DescriptorArray::WhitenessWitness witness(*descriptors);
6964 if (attributes != NONE) {
6965 for (int i = 0; i < size; ++i) {
6966 Object* value = desc->GetValue(i);
6967 Name* key = desc->GetKey(i);
6968 PropertyDetails details = desc->GetDetails(i);
6969 // Bulk attribute changes never affect private properties.
6970 if (!key->IsSymbol() || !Symbol::cast(key)->is_private()) {
6971 int mask = DONT_DELETE | DONT_ENUM;
6972 // READ_ONLY is an invalid attribute for JS setters/getters.
6973 if (details.type() != CALLBACKS || !value->IsAccessorPair()) {
6976 details = details.CopyAddAttributes(
6977 static_cast<PropertyAttributes>(attributes & mask));
6979 Descriptor inner_desc(
6980 handle(key), handle(value, desc->GetIsolate()), details);
6981 descriptors->Set(i, &inner_desc, witness);
6984 for (int i = 0; i < size; ++i) {
6985 descriptors->CopyFrom(i, *desc, witness);
6989 if (desc->number_of_descriptors() != enumeration_index) descriptors->Sort();
6995 Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map,
6996 Handle<DescriptorArray> descriptors,
6997 Descriptor* descriptor,
6998 int insertion_index,
6999 TransitionFlag flag) {
7000 // Ensure the key is unique.
7001 descriptor->KeyToUniqueName();
7003 Handle<Name> key = descriptor->GetKey();
7004 DCHECK(*key == descriptors->GetKey(insertion_index));
7006 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
7007 descriptors, map->NumberOfOwnDescriptors());
7009 new_descriptors->Replace(insertion_index, descriptor);
7011 SimpleTransitionFlag simple_flag =
7012 (insertion_index == descriptors->number_of_descriptors() - 1)
7015 return CopyReplaceDescriptors(map, new_descriptors, flag, key, simple_flag);
7019 void Map::UpdateCodeCache(Handle<Map> map,
7021 Handle<Code> code) {
7022 Isolate* isolate = map->GetIsolate();
7023 HandleScope scope(isolate);
7024 // Allocate the code cache if not present.
7025 if (map->code_cache()->IsFixedArray()) {
7026 Handle<Object> result = isolate->factory()->NewCodeCache();
7027 map->set_code_cache(*result);
7030 // Update the code cache.
7031 Handle<CodeCache> code_cache(CodeCache::cast(map->code_cache()), isolate);
7032 CodeCache::Update(code_cache, name, code);
7036 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) {
7037 // Do a lookup if a code cache exists.
7038 if (!code_cache()->IsFixedArray()) {
7039 return CodeCache::cast(code_cache())->Lookup(name, flags);
7041 return GetHeap()->undefined_value();
7046 int Map::IndexInCodeCache(Object* name, Code* code) {
7047 // Get the internal index if a code cache exists.
7048 if (!code_cache()->IsFixedArray()) {
7049 return CodeCache::cast(code_cache())->GetIndex(name, code);
7055 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) {
7056 // No GC is supposed to happen between a call to IndexInCodeCache and
7057 // RemoveFromCodeCache so the code cache must be there.
7058 DCHECK(!code_cache()->IsFixedArray());
7059 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
7063 // An iterator over all map transitions in an descriptor array, reusing the
7064 // constructor field of the map while it is running. Negative values in
7065 // the constructor field indicate an active map transition iteration. The
7066 // original constructor is restored after iterating over all entries.
7067 class IntrusiveMapTransitionIterator {
7069 IntrusiveMapTransitionIterator(
7070 Map* map, TransitionArray* transition_array, Object* constructor)
7072 transition_array_(transition_array),
7073 constructor_(constructor) { }
7075 void StartIfNotStarted() {
7076 DCHECK(!(*IteratorField())->IsSmi() || IsIterating());
7077 if (!(*IteratorField())->IsSmi()) {
7078 DCHECK(*IteratorField() == constructor_);
7079 *IteratorField() = Smi::FromInt(-1);
7083 bool IsIterating() {
7084 return (*IteratorField())->IsSmi() &&
7085 Smi::cast(*IteratorField())->value() < 0;
7089 DCHECK(IsIterating());
7090 int value = Smi::cast(*IteratorField())->value();
7091 int index = -value - 1;
7092 int number_of_transitions = transition_array_->number_of_transitions();
7093 while (index < number_of_transitions) {
7094 *IteratorField() = Smi::FromInt(value - 1);
7095 return transition_array_->GetTarget(index);
7098 *IteratorField() = constructor_;
7103 Object** IteratorField() {
7104 return HeapObject::RawField(map_, Map::kConstructorOffset);
7108 TransitionArray* transition_array_;
7109 Object* constructor_;
7113 // An iterator over all prototype transitions, reusing the constructor field
7114 // of the map while it is running. Positive values in the constructor field
7115 // indicate an active prototype transition iteration. The original constructor
7116 // is restored after iterating over all entries.
7117 class IntrusivePrototypeTransitionIterator {
7119 IntrusivePrototypeTransitionIterator(
7120 Map* map, HeapObject* proto_trans, Object* constructor)
7121 : map_(map), proto_trans_(proto_trans), constructor_(constructor) { }
7123 void StartIfNotStarted() {
7124 if (!(*IteratorField())->IsSmi()) {
7125 DCHECK(*IteratorField() == constructor_);
7126 *IteratorField() = Smi::FromInt(0);
7130 bool IsIterating() {
7131 return (*IteratorField())->IsSmi() &&
7132 Smi::cast(*IteratorField())->value() >= 0;
7136 DCHECK(IsIterating());
7137 int transitionNumber = Smi::cast(*IteratorField())->value();
7138 if (transitionNumber < NumberOfTransitions()) {
7139 *IteratorField() = Smi::FromInt(transitionNumber + 1);
7140 return GetTransition(transitionNumber);
7142 *IteratorField() = constructor_;
7147 Object** IteratorField() {
7148 return HeapObject::RawField(map_, Map::kConstructorOffset);
7151 int NumberOfTransitions() {
7152 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7153 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
7154 return Smi::cast(num)->value();
7157 Map* GetTransition(int transitionNumber) {
7158 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7159 return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
7162 int IndexFor(int transitionNumber) {
7163 return Map::kProtoTransitionHeaderSize +
7164 Map::kProtoTransitionMapOffset +
7165 transitionNumber * Map::kProtoTransitionElementsPerEntry;
7169 HeapObject* proto_trans_;
7170 Object* constructor_;
7174 // To traverse the transition tree iteratively, we have to store two kinds of
7175 // information in a map: The parent map in the traversal and which children of a
7176 // node have already been visited. To do this without additional memory, we
7177 // temporarily reuse two fields with known values:
7179 // (1) The map of the map temporarily holds the parent, and is restored to the
7180 // meta map afterwards.
7182 // (2) The info which children have already been visited depends on which part
7183 // of the map we currently iterate. We use the constructor field of the
7184 // map to store the current index. We can do that because the constructor
7185 // is the same for all involved maps.
7187 // (a) If we currently follow normal map transitions, we temporarily store
7188 // the current index in the constructor field, and restore it to the
7189 // original constructor afterwards. Note that a single descriptor can
7190 // have 0, 1, or 2 transitions.
7192 // (b) If we currently follow prototype transitions, we temporarily store
7193 // the current index in the constructor field, and restore it to the
7194 // original constructor afterwards.
7196 // Note that the child iterator is just a concatenation of two iterators: One
7197 // iterating over map transitions and one iterating over prototype transisitons.
7198 class TraversableMap : public Map {
7200 // Record the parent in the traversal within this map. Note that this destroys
7202 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); }
7204 // Reset the current map's map, returning the parent previously stored in it.
7205 TraversableMap* GetAndResetParent() {
7206 TraversableMap* old_parent = static_cast<TraversableMap*>(map());
7207 set_map_no_write_barrier(GetHeap()->meta_map());
7211 // If we have an unvisited child map, return that one and advance. If we have
7212 // none, return NULL and restore the overwritten constructor field.
7213 TraversableMap* ChildIteratorNext(Object* constructor) {
7214 if (!HasTransitionArray()) return NULL;
7216 TransitionArray* transition_array = transitions();
7217 if (transition_array->HasPrototypeTransitions()) {
7218 HeapObject* proto_transitions =
7219 transition_array->GetPrototypeTransitions();
7220 IntrusivePrototypeTransitionIterator proto_iterator(this,
7223 proto_iterator.StartIfNotStarted();
7224 if (proto_iterator.IsIterating()) {
7225 Map* next = proto_iterator.Next();
7226 if (next != NULL) return static_cast<TraversableMap*>(next);
7230 IntrusiveMapTransitionIterator transition_iterator(this,
7233 transition_iterator.StartIfNotStarted();
7234 if (transition_iterator.IsIterating()) {
7235 Map* next = transition_iterator.Next();
7236 if (next != NULL) return static_cast<TraversableMap*>(next);
7244 // Traverse the transition tree in postorder without using the C++ stack by
7245 // doing pointer reversal.
7246 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
7247 // Make sure that we do not allocate in the callback.
7248 DisallowHeapAllocation no_allocation;
7250 TraversableMap* current = static_cast<TraversableMap*>(this);
7251 // Get the root constructor here to restore it later when finished iterating
7253 Object* root_constructor = constructor();
7255 TraversableMap* child = current->ChildIteratorNext(root_constructor);
7256 if (child != NULL) {
7257 child->SetParent(current);
7260 TraversableMap* parent = current->GetAndResetParent();
7261 callback(current, data);
7262 if (current == this) break;
7269 void CodeCache::Update(
7270 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7271 // The number of monomorphic stubs for normal load/store/call IC's can grow to
7272 // a large number and therefore they need to go into a hash table. They are
7273 // used to load global properties from cells.
7274 if (code->type() == Code::NORMAL) {
7275 // Make sure that a hash table is allocated for the normal load code cache.
7276 if (code_cache->normal_type_cache()->IsUndefined()) {
7277 Handle<Object> result =
7278 CodeCacheHashTable::New(code_cache->GetIsolate(),
7279 CodeCacheHashTable::kInitialSize);
7280 code_cache->set_normal_type_cache(*result);
7282 UpdateNormalTypeCache(code_cache, name, code);
7284 DCHECK(code_cache->default_cache()->IsFixedArray());
7285 UpdateDefaultCache(code_cache, name, code);
7290 void CodeCache::UpdateDefaultCache(
7291 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7292 // When updating the default code cache we disregard the type encoded in the
7293 // flags. This allows call constant stubs to overwrite call field
7295 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags());
7297 // First check whether we can update existing code cache without
7299 Handle<FixedArray> cache = handle(code_cache->default_cache());
7300 int length = cache->length();
7302 DisallowHeapAllocation no_alloc;
7303 int deleted_index = -1;
7304 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7305 Object* key = cache->get(i);
7306 if (key->IsNull()) {
7307 if (deleted_index < 0) deleted_index = i;
7310 if (key->IsUndefined()) {
7311 if (deleted_index >= 0) i = deleted_index;
7312 cache->set(i + kCodeCacheEntryNameOffset, *name);
7313 cache->set(i + kCodeCacheEntryCodeOffset, *code);
7316 if (name->Equals(Name::cast(key))) {
7318 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags();
7319 if (Code::RemoveTypeFromFlags(found) == flags) {
7320 cache->set(i + kCodeCacheEntryCodeOffset, *code);
7326 // Reached the end of the code cache. If there were deleted
7327 // elements, reuse the space for the first of them.
7328 if (deleted_index >= 0) {
7329 cache->set(deleted_index + kCodeCacheEntryNameOffset, *name);
7330 cache->set(deleted_index + kCodeCacheEntryCodeOffset, *code);
7335 // Extend the code cache with some new entries (at least one). Must be a
7336 // multiple of the entry size.
7337 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize;
7338 new_length = new_length - new_length % kCodeCacheEntrySize;
7339 DCHECK((new_length % kCodeCacheEntrySize) == 0);
7340 cache = FixedArray::CopySize(cache, new_length);
7342 // Add the (name, code) pair to the new cache.
7343 cache->set(length + kCodeCacheEntryNameOffset, *name);
7344 cache->set(length + kCodeCacheEntryCodeOffset, *code);
7345 code_cache->set_default_cache(*cache);
7349 void CodeCache::UpdateNormalTypeCache(
7350 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7351 // Adding a new entry can cause a new cache to be allocated.
7352 Handle<CodeCacheHashTable> cache(
7353 CodeCacheHashTable::cast(code_cache->normal_type_cache()));
7354 Handle<Object> new_cache = CodeCacheHashTable::Put(cache, name, code);
7355 code_cache->set_normal_type_cache(*new_cache);
7359 Object* CodeCache::Lookup(Name* name, Code::Flags flags) {
7360 Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags));
7361 if (result->IsCode()) {
7362 if (Code::cast(result)->flags() == flags) return result;
7363 return GetHeap()->undefined_value();
7365 return LookupNormalTypeCache(name, flags);
7369 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) {
7370 FixedArray* cache = default_cache();
7371 int length = cache->length();
7372 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7373 Object* key = cache->get(i + kCodeCacheEntryNameOffset);
7374 // Skip deleted elements.
7375 if (key->IsNull()) continue;
7376 if (key->IsUndefined()) return key;
7377 if (name->Equals(Name::cast(key))) {
7378 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset));
7379 if (Code::RemoveTypeFromFlags(code->flags()) == flags) {
7384 return GetHeap()->undefined_value();
7388 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) {
7389 if (!normal_type_cache()->IsUndefined()) {
7390 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7391 return cache->Lookup(name, flags);
7393 return GetHeap()->undefined_value();
7398 int CodeCache::GetIndex(Object* name, Code* code) {
7399 if (code->type() == Code::NORMAL) {
7400 if (normal_type_cache()->IsUndefined()) return -1;
7401 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7402 return cache->GetIndex(Name::cast(name), code->flags());
7405 FixedArray* array = default_cache();
7406 int len = array->length();
7407 for (int i = 0; i < len; i += kCodeCacheEntrySize) {
7408 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1;
7414 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
7415 if (code->type() == Code::NORMAL) {
7416 DCHECK(!normal_type_cache()->IsUndefined());
7417 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7418 DCHECK(cache->GetIndex(Name::cast(name), code->flags()) == index);
7419 cache->RemoveByIndex(index);
7421 FixedArray* array = default_cache();
7422 DCHECK(array->length() >= index && array->get(index)->IsCode());
7423 // Use null instead of undefined for deleted elements to distinguish
7424 // deleted elements from unused elements. This distinction is used
7425 // when looking up in the cache and when updating the cache.
7426 DCHECK_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset);
7427 array->set_null(index - 1); // Name.
7428 array->set_null(index); // Code.
7433 // The key in the code cache hash table consists of the property name and the
7434 // code object. The actual match is on the name and the code flags. If a key
7435 // is created using the flags and not a code object it can only be used for
7436 // lookup not to create a new entry.
7437 class CodeCacheHashTableKey : public HashTableKey {
7439 CodeCacheHashTableKey(Handle<Name> name, Code::Flags flags)
7440 : name_(name), flags_(flags), code_() { }
7442 CodeCacheHashTableKey(Handle<Name> name, Handle<Code> code)
7443 : name_(name), flags_(code->flags()), code_(code) { }
7445 bool IsMatch(Object* other) OVERRIDE {
7446 if (!other->IsFixedArray()) return false;
7447 FixedArray* pair = FixedArray::cast(other);
7448 Name* name = Name::cast(pair->get(0));
7449 Code::Flags flags = Code::cast(pair->get(1))->flags();
7450 if (flags != flags_) {
7453 return name_->Equals(name);
7456 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) {
7457 return name->Hash() ^ flags;
7460 uint32_t Hash() OVERRIDE { return NameFlagsHashHelper(*name_, flags_); }
7462 uint32_t HashForObject(Object* obj) OVERRIDE {
7463 FixedArray* pair = FixedArray::cast(obj);
7464 Name* name = Name::cast(pair->get(0));
7465 Code* code = Code::cast(pair->get(1));
7466 return NameFlagsHashHelper(name, code->flags());
7469 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
7470 Handle<Code> code = code_.ToHandleChecked();
7471 Handle<FixedArray> pair = isolate->factory()->NewFixedArray(2);
7472 pair->set(0, *name_);
7473 pair->set(1, *code);
7480 // TODO(jkummerow): We should be able to get by without this.
7481 MaybeHandle<Code> code_;
7485 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) {
7486 DisallowHeapAllocation no_alloc;
7487 CodeCacheHashTableKey key(handle(name), flags);
7488 int entry = FindEntry(&key);
7489 if (entry == kNotFound) return GetHeap()->undefined_value();
7490 return get(EntryToIndex(entry) + 1);
7494 Handle<CodeCacheHashTable> CodeCacheHashTable::Put(
7495 Handle<CodeCacheHashTable> cache, Handle<Name> name, Handle<Code> code) {
7496 CodeCacheHashTableKey key(name, code);
7498 Handle<CodeCacheHashTable> new_cache = EnsureCapacity(cache, 1, &key);
7500 int entry = new_cache->FindInsertionEntry(key.Hash());
7501 Handle<Object> k = key.AsHandle(cache->GetIsolate());
7503 new_cache->set(EntryToIndex(entry), *k);
7504 new_cache->set(EntryToIndex(entry) + 1, *code);
7505 new_cache->ElementAdded();
7510 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) {
7511 DisallowHeapAllocation no_alloc;
7512 CodeCacheHashTableKey key(handle(name), flags);
7513 int entry = FindEntry(&key);
7514 return (entry == kNotFound) ? -1 : entry;
7518 void CodeCacheHashTable::RemoveByIndex(int index) {
7520 Heap* heap = GetHeap();
7521 set(EntryToIndex(index), heap->the_hole_value());
7522 set(EntryToIndex(index) + 1, heap->the_hole_value());
7527 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache,
7528 MapHandleList* maps,
7530 Handle<Code> code) {
7531 Isolate* isolate = code_cache->GetIsolate();
7532 if (code_cache->cache()->IsUndefined()) {
7533 Handle<PolymorphicCodeCacheHashTable> result =
7534 PolymorphicCodeCacheHashTable::New(
7536 PolymorphicCodeCacheHashTable::kInitialSize);
7537 code_cache->set_cache(*result);
7539 // This entry shouldn't be contained in the cache yet.
7540 DCHECK(PolymorphicCodeCacheHashTable::cast(code_cache->cache())
7541 ->Lookup(maps, flags)->IsUndefined());
7543 Handle<PolymorphicCodeCacheHashTable> hash_table =
7544 handle(PolymorphicCodeCacheHashTable::cast(code_cache->cache()));
7545 Handle<PolymorphicCodeCacheHashTable> new_cache =
7546 PolymorphicCodeCacheHashTable::Put(hash_table, maps, flags, code);
7547 code_cache->set_cache(*new_cache);
7551 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps,
7552 Code::Flags flags) {
7553 if (!cache()->IsUndefined()) {
7554 PolymorphicCodeCacheHashTable* hash_table =
7555 PolymorphicCodeCacheHashTable::cast(cache());
7556 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate());
7558 return GetIsolate()->factory()->undefined_value();
7563 // Despite their name, object of this class are not stored in the actual
7564 // hash table; instead they're temporarily used for lookups. It is therefore
7565 // safe to have a weak (non-owning) pointer to a MapList as a member field.
7566 class PolymorphicCodeCacheHashTableKey : public HashTableKey {
7568 // Callers must ensure that |maps| outlives the newly constructed object.
7569 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags)
7571 code_flags_(code_flags) {}
7573 bool IsMatch(Object* other) OVERRIDE {
7574 MapHandleList other_maps(kDefaultListAllocationSize);
7576 FromObject(other, &other_flags, &other_maps);
7577 if (code_flags_ != other_flags) return false;
7578 if (maps_->length() != other_maps.length()) return false;
7579 // Compare just the hashes first because it's faster.
7580 int this_hash = MapsHashHelper(maps_, code_flags_);
7581 int other_hash = MapsHashHelper(&other_maps, other_flags);
7582 if (this_hash != other_hash) return false;
7584 // Full comparison: for each map in maps_, look for an equivalent map in
7585 // other_maps. This implementation is slow, but probably good enough for
7586 // now because the lists are short (<= 4 elements currently).
7587 for (int i = 0; i < maps_->length(); ++i) {
7588 bool match_found = false;
7589 for (int j = 0; j < other_maps.length(); ++j) {
7590 if (*(maps_->at(i)) == *(other_maps.at(j))) {
7595 if (!match_found) return false;
7600 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
7601 uint32_t hash = code_flags;
7602 for (int i = 0; i < maps->length(); ++i) {
7603 hash ^= maps->at(i)->Hash();
7608 uint32_t Hash() OVERRIDE {
7609 return MapsHashHelper(maps_, code_flags_);
7612 uint32_t HashForObject(Object* obj) OVERRIDE {
7613 MapHandleList other_maps(kDefaultListAllocationSize);
7615 FromObject(obj, &other_flags, &other_maps);
7616 return MapsHashHelper(&other_maps, other_flags);
7619 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
7620 // The maps in |maps_| must be copied to a newly allocated FixedArray,
7621 // both because the referenced MapList is short-lived, and because C++
7622 // objects can't be stored in the heap anyway.
7623 Handle<FixedArray> list =
7624 isolate->factory()->NewUninitializedFixedArray(maps_->length() + 1);
7625 list->set(0, Smi::FromInt(code_flags_));
7626 for (int i = 0; i < maps_->length(); ++i) {
7627 list->set(i + 1, *maps_->at(i));
7633 static MapHandleList* FromObject(Object* obj,
7635 MapHandleList* maps) {
7636 FixedArray* list = FixedArray::cast(obj);
7638 *code_flags = Smi::cast(list->get(0))->value();
7639 for (int i = 1; i < list->length(); ++i) {
7640 maps->Add(Handle<Map>(Map::cast(list->get(i))));
7645 MapHandleList* maps_; // weak.
7647 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
7651 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps,
7653 DisallowHeapAllocation no_alloc;
7654 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
7655 int entry = FindEntry(&key);
7656 if (entry == kNotFound) return GetHeap()->undefined_value();
7657 return get(EntryToIndex(entry) + 1);
7661 Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put(
7662 Handle<PolymorphicCodeCacheHashTable> hash_table,
7663 MapHandleList* maps,
7665 Handle<Code> code) {
7666 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
7667 Handle<PolymorphicCodeCacheHashTable> cache =
7668 EnsureCapacity(hash_table, 1, &key);
7669 int entry = cache->FindInsertionEntry(key.Hash());
7671 Handle<Object> obj = key.AsHandle(hash_table->GetIsolate());
7672 cache->set(EntryToIndex(entry), *obj);
7673 cache->set(EntryToIndex(entry) + 1, *code);
7674 cache->ElementAdded();
7679 void FixedArray::Shrink(int new_length) {
7680 DCHECK(0 <= new_length && new_length <= length());
7681 if (new_length < length()) {
7682 GetHeap()->RightTrimFixedArray<Heap::FROM_MUTATOR>(
7683 this, length() - new_length);
7688 MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike(
7689 Handle<FixedArray> content,
7690 Handle<JSObject> array) {
7691 DCHECK(array->IsJSArray() || array->HasSloppyArgumentsElements());
7692 ElementsAccessor* accessor = array->GetElementsAccessor();
7693 Handle<FixedArray> result;
7694 ASSIGN_RETURN_ON_EXCEPTION(
7695 array->GetIsolate(), result,
7696 accessor->AddElementsToFixedArray(array, array, content),
7699 #ifdef ENABLE_SLOW_DCHECKS
7700 if (FLAG_enable_slow_asserts) {
7701 DisallowHeapAllocation no_allocation;
7702 for (int i = 0; i < result->length(); i++) {
7703 Object* current = result->get(i);
7704 DCHECK(current->IsNumber() || current->IsName());
7712 MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first,
7713 Handle<FixedArray> second) {
7714 ElementsAccessor* accessor = ElementsAccessor::ForArray(second);
7715 Handle<FixedArray> result;
7716 ASSIGN_RETURN_ON_EXCEPTION(
7717 first->GetIsolate(), result,
7718 accessor->AddElementsToFixedArray(
7719 Handle<Object>::null(), // receiver
7720 Handle<JSObject>::null(), // holder
7722 Handle<FixedArrayBase>::cast(second)),
7725 #ifdef ENABLE_SLOW_DCHECKS
7726 if (FLAG_enable_slow_asserts) {
7727 DisallowHeapAllocation no_allocation;
7728 for (int i = 0; i < result->length(); i++) {
7729 Object* current = result->get(i);
7730 DCHECK(current->IsNumber() || current->IsName());
7738 Handle<FixedArray> FixedArray::CopySize(
7739 Handle<FixedArray> array, int new_length, PretenureFlag pretenure) {
7740 Isolate* isolate = array->GetIsolate();
7741 if (new_length == 0) return isolate->factory()->empty_fixed_array();
7742 Handle<FixedArray> result =
7743 isolate->factory()->NewFixedArray(new_length, pretenure);
7745 DisallowHeapAllocation no_gc;
7746 int len = array->length();
7747 if (new_length < len) len = new_length;
7748 // We are taking the map from the old fixed array so the map is sure to
7749 // be an immortal immutable object.
7750 result->set_map_no_write_barrier(array->map());
7751 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
7752 for (int i = 0; i < len; i++) {
7753 result->set(i, array->get(i), mode);
7759 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
7760 DisallowHeapAllocation no_gc;
7761 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
7762 for (int index = 0; index < len; index++) {
7763 dest->set(dest_pos+index, get(pos+index), mode);
7769 bool FixedArray::IsEqualTo(FixedArray* other) {
7770 if (length() != other->length()) return false;
7771 for (int i = 0 ; i < length(); ++i) {
7772 if (get(i) != other->get(i)) return false;
7779 Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
7780 int number_of_descriptors,
7782 DCHECK(0 <= number_of_descriptors);
7783 Factory* factory = isolate->factory();
7784 // Do not use DescriptorArray::cast on incomplete object.
7785 int size = number_of_descriptors + slack;
7786 if (size == 0) return factory->empty_descriptor_array();
7787 // Allocate the array of keys.
7788 Handle<FixedArray> result = factory->NewFixedArray(LengthFor(size));
7790 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
7791 result->set(kEnumCacheIndex, Smi::FromInt(0));
7792 return Handle<DescriptorArray>::cast(result);
7796 void DescriptorArray::ClearEnumCache() {
7797 set(kEnumCacheIndex, Smi::FromInt(0));
7801 void DescriptorArray::Replace(int index, Descriptor* descriptor) {
7802 descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index));
7803 Set(index, descriptor);
7807 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
7808 FixedArray* new_cache,
7809 Object* new_index_cache) {
7810 DCHECK(bridge_storage->length() >= kEnumCacheBridgeLength);
7811 DCHECK(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
7813 DCHECK(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
7814 FixedArray::cast(bridge_storage)->
7815 set(kEnumCacheBridgeCacheIndex, new_cache);
7816 FixedArray::cast(bridge_storage)->
7817 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
7818 set(kEnumCacheIndex, bridge_storage);
7822 void DescriptorArray::CopyFrom(int index,
7823 DescriptorArray* src,
7824 const WhitenessWitness& witness) {
7825 Object* value = src->GetValue(index);
7826 PropertyDetails details = src->GetDetails(index);
7827 Descriptor desc(handle(src->GetKey(index)),
7828 handle(value, src->GetIsolate()),
7830 Set(index, &desc, witness);
7834 // We need the whiteness witness since sort will reshuffle the entries in the
7835 // descriptor array. If the descriptor array were to be black, the shuffling
7836 // would move a slot that was already recorded as pointing into an evacuation
7837 // candidate. This would result in missing updates upon evacuation.
7838 void DescriptorArray::Sort() {
7839 // In-place heap sort.
7840 int len = number_of_descriptors();
7841 // Reset sorting since the descriptor array might contain invalid pointers.
7842 for (int i = 0; i < len; ++i) SetSortedKey(i, i);
7843 // Bottom-up max-heap construction.
7844 // Index of the last node with children
7845 const int max_parent_index = (len / 2) - 1;
7846 for (int i = max_parent_index; i >= 0; --i) {
7847 int parent_index = i;
7848 const uint32_t parent_hash = GetSortedKey(i)->Hash();
7849 while (parent_index <= max_parent_index) {
7850 int child_index = 2 * parent_index + 1;
7851 uint32_t child_hash = GetSortedKey(child_index)->Hash();
7852 if (child_index + 1 < len) {
7853 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
7854 if (right_child_hash > child_hash) {
7856 child_hash = right_child_hash;
7859 if (child_hash <= parent_hash) break;
7860 SwapSortedKeys(parent_index, child_index);
7861 // Now element at child_index could be < its children.
7862 parent_index = child_index; // parent_hash remains correct.
7866 // Extract elements and create sorted array.
7867 for (int i = len - 1; i > 0; --i) {
7868 // Put max element at the back of the array.
7869 SwapSortedKeys(0, i);
7870 // Shift down the new top element.
7871 int parent_index = 0;
7872 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
7873 const int max_parent_index = (i / 2) - 1;
7874 while (parent_index <= max_parent_index) {
7875 int child_index = parent_index * 2 + 1;
7876 uint32_t child_hash = GetSortedKey(child_index)->Hash();
7877 if (child_index + 1 < i) {
7878 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
7879 if (right_child_hash > child_hash) {
7881 child_hash = right_child_hash;
7884 if (child_hash <= parent_hash) break;
7885 SwapSortedKeys(parent_index, child_index);
7886 parent_index = child_index;
7889 DCHECK(IsSortedNoDuplicates());
7893 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) {
7894 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair();
7895 copy->set_getter(pair->getter());
7896 copy->set_setter(pair->setter());
7901 Object* AccessorPair::GetComponent(AccessorComponent component) {
7902 Object* accessor = get(component);
7903 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
7907 Handle<DeoptimizationInputData> DeoptimizationInputData::New(
7908 Isolate* isolate, int deopt_entry_count, PretenureFlag pretenure) {
7909 DCHECK(deopt_entry_count > 0);
7910 return Handle<DeoptimizationInputData>::cast(
7911 isolate->factory()->NewFixedArray(LengthFor(deopt_entry_count),
7916 Handle<DeoptimizationOutputData> DeoptimizationOutputData::New(
7918 int number_of_deopt_points,
7919 PretenureFlag pretenure) {
7920 Handle<FixedArray> result;
7921 if (number_of_deopt_points == 0) {
7922 result = isolate->factory()->empty_fixed_array();
7924 result = isolate->factory()->NewFixedArray(
7925 LengthOfFixedArray(number_of_deopt_points), pretenure);
7927 return Handle<DeoptimizationOutputData>::cast(result);
7932 bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
7933 if (IsEmpty()) return other->IsEmpty();
7934 if (other->IsEmpty()) return false;
7935 if (length() != other->length()) return false;
7936 for (int i = 0; i < length(); ++i) {
7937 if (get(i) != other->get(i)) return false;
7944 bool String::LooksValid() {
7945 if (!GetIsolate()->heap()->Contains(this)) return false;
7950 String::FlatContent String::GetFlatContent() {
7951 DCHECK(!AllowHeapAllocation::IsAllowed());
7952 int length = this->length();
7953 StringShape shape(this);
7954 String* string = this;
7956 if (shape.representation_tag() == kConsStringTag) {
7957 ConsString* cons = ConsString::cast(string);
7958 if (cons->second()->length() != 0) {
7959 return FlatContent();
7961 string = cons->first();
7962 shape = StringShape(string);
7964 if (shape.representation_tag() == kSlicedStringTag) {
7965 SlicedString* slice = SlicedString::cast(string);
7966 offset = slice->offset();
7967 string = slice->parent();
7968 shape = StringShape(string);
7969 DCHECK(shape.representation_tag() != kConsStringTag &&
7970 shape.representation_tag() != kSlicedStringTag);
7972 if (shape.encoding_tag() == kOneByteStringTag) {
7973 const uint8_t* start;
7974 if (shape.representation_tag() == kSeqStringTag) {
7975 start = SeqOneByteString::cast(string)->GetChars();
7977 start = ExternalOneByteString::cast(string)->GetChars();
7979 return FlatContent(start + offset, length);
7981 DCHECK(shape.encoding_tag() == kTwoByteStringTag);
7983 if (shape.representation_tag() == kSeqStringTag) {
7984 start = SeqTwoByteString::cast(string)->GetChars();
7986 start = ExternalTwoByteString::cast(string)->GetChars();
7988 return FlatContent(start + offset, length);
7993 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
7994 RobustnessFlag robust_flag,
7997 int* length_return) {
7998 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
7999 return SmartArrayPointer<char>(NULL);
8001 Heap* heap = GetHeap();
8003 // Negative length means the to the end of the string.
8004 if (length < 0) length = kMaxInt - offset;
8006 // Compute the size of the UTF-8 string. Start at the specified offset.
8007 Access<ConsStringIteratorOp> op(
8008 heap->isolate()->objects_string_iterator());
8009 StringCharacterStream stream(this, op.value(), offset);
8010 int character_position = offset;
8012 int last = unibrow::Utf16::kNoPreviousCharacter;
8013 while (stream.HasMore() && character_position++ < offset + length) {
8014 uint16_t character = stream.GetNext();
8015 utf8_bytes += unibrow::Utf8::Length(character, last);
8019 if (length_return) {
8020 *length_return = utf8_bytes;
8023 char* result = NewArray<char>(utf8_bytes + 1);
8025 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset.
8026 stream.Reset(this, offset);
8027 character_position = offset;
8028 int utf8_byte_position = 0;
8029 last = unibrow::Utf16::kNoPreviousCharacter;
8030 while (stream.HasMore() && character_position++ < offset + length) {
8031 uint16_t character = stream.GetNext();
8032 if (allow_nulls == DISALLOW_NULLS && character == 0) {
8035 utf8_byte_position +=
8036 unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
8039 result[utf8_byte_position] = 0;
8040 return SmartArrayPointer<char>(result);
8044 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8045 RobustnessFlag robust_flag,
8046 int* length_return) {
8047 return ToCString(allow_nulls, robust_flag, 0, -1, length_return);
8051 const uc16* String::GetTwoByteData(unsigned start) {
8052 DCHECK(!IsOneByteRepresentationUnderneath());
8053 switch (StringShape(this).representation_tag()) {
8055 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
8056 case kExternalStringTag:
8057 return ExternalTwoByteString::cast(this)->
8058 ExternalTwoByteStringGetData(start);
8059 case kSlicedStringTag: {
8060 SlicedString* slice = SlicedString::cast(this);
8061 return slice->parent()->GetTwoByteData(start + slice->offset());
8063 case kConsStringTag:
8072 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) {
8073 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8074 return SmartArrayPointer<uc16>();
8076 Heap* heap = GetHeap();
8078 Access<ConsStringIteratorOp> op(
8079 heap->isolate()->objects_string_iterator());
8080 StringCharacterStream stream(this, op.value());
8082 uc16* result = NewArray<uc16>(length() + 1);
8085 while (stream.HasMore()) {
8086 uint16_t character = stream.GetNext();
8087 result[i++] = character;
8090 return SmartArrayPointer<uc16>(result);
8094 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) {
8095 return reinterpret_cast<uc16*>(
8096 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start;
8100 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) {
8101 Relocatable* current = isolate->relocatable_top();
8102 while (current != NULL) {
8103 current->PostGarbageCollection();
8104 current = current->prev_;
8109 // Reserve space for statics needing saving and restoring.
8110 int Relocatable::ArchiveSpacePerThread() {
8111 return sizeof(Relocatable*); // NOLINT
8115 // Archive statics that are thread-local.
8116 char* Relocatable::ArchiveState(Isolate* isolate, char* to) {
8117 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
8118 isolate->set_relocatable_top(NULL);
8119 return to + ArchiveSpacePerThread();
8123 // Restore statics that are thread-local.
8124 char* Relocatable::RestoreState(Isolate* isolate, char* from) {
8125 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
8126 return from + ArchiveSpacePerThread();
8130 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
8131 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
8133 return thread_storage + ArchiveSpacePerThread();
8137 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) {
8138 Iterate(v, isolate->relocatable_top());
8142 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
8143 Relocatable* current = top;
8144 while (current != NULL) {
8145 current->IterateInstance(v);
8146 current = current->prev_;
8151 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str)
8152 : Relocatable(isolate),
8153 str_(str.location()),
8154 length_(str->length()) {
8155 PostGarbageCollection();
8159 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input)
8160 : Relocatable(isolate),
8163 length_(input.length()),
8164 start_(input.start()) {}
8167 void FlatStringReader::PostGarbageCollection() {
8168 if (str_ == NULL) return;
8169 Handle<String> str(str_);
8170 DCHECK(str->IsFlat());
8171 DisallowHeapAllocation no_gc;
8172 // This does not actually prevent the vector from being relocated later.
8173 String::FlatContent content = str->GetFlatContent();
8174 DCHECK(content.IsFlat());
8175 is_one_byte_ = content.IsOneByte();
8177 start_ = content.ToOneByteVector().start();
8179 start_ = content.ToUC16Vector().start();
8184 void ConsStringIteratorOp::Initialize(ConsString* cons_string, int offset) {
8185 DCHECK(cons_string != NULL);
8186 root_ = cons_string;
8188 // Force stack blown condition to trigger restart.
8190 maximum_depth_ = kStackSize + depth_;
8191 DCHECK(StackBlown());
8195 String* ConsStringIteratorOp::Continue(int* offset_out) {
8196 DCHECK(depth_ != 0);
8197 DCHECK_EQ(0, *offset_out);
8198 bool blew_stack = StackBlown();
8199 String* string = NULL;
8200 // Get the next leaf if there is one.
8201 if (!blew_stack) string = NextLeaf(&blew_stack);
8202 // Restart search from root.
8204 DCHECK(string == NULL);
8205 string = Search(offset_out);
8207 // Ensure future calls return null immediately.
8208 if (string == NULL) Reset(NULL);
8213 String* ConsStringIteratorOp::Search(int* offset_out) {
8214 ConsString* cons_string = root_;
8215 // Reset the stack, pushing the root string.
8218 frames_[0] = cons_string;
8219 const int consumed = consumed_;
8222 // Loop until the string is found which contains the target offset.
8223 String* string = cons_string->first();
8224 int length = string->length();
8226 if (consumed < offset + length) {
8227 // Target offset is in the left branch.
8228 // Keep going if we're still in a ConString.
8229 type = string->map()->instance_type();
8230 if ((type & kStringRepresentationMask) == kConsStringTag) {
8231 cons_string = ConsString::cast(string);
8232 PushLeft(cons_string);
8235 // Tell the stack we're done descending.
8236 AdjustMaximumDepth();
8239 // Update progress through the string.
8241 // Keep going if we're still in a ConString.
8242 string = cons_string->second();
8243 type = string->map()->instance_type();
8244 if ((type & kStringRepresentationMask) == kConsStringTag) {
8245 cons_string = ConsString::cast(string);
8246 PushRight(cons_string);
8249 // Need this to be updated for the current string.
8250 length = string->length();
8251 // Account for the possibility of an empty right leaf.
8252 // This happens only if we have asked for an offset outside the string.
8254 // Reset so future operations will return null immediately.
8258 // Tell the stack we're done descending.
8259 AdjustMaximumDepth();
8260 // Pop stack so next iteration is in correct place.
8263 DCHECK(length != 0);
8264 // Adjust return values and exit.
8265 consumed_ = offset + length;
8266 *offset_out = consumed - offset;
8274 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) {
8276 // Tree traversal complete.
8278 *blew_stack = false;
8281 // We've lost track of higher nodes.
8287 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)];
8288 String* string = cons_string->second();
8289 int32_t type = string->map()->instance_type();
8290 if ((type & kStringRepresentationMask) != kConsStringTag) {
8291 // Pop stack so next iteration is in correct place.
8293 int length = string->length();
8294 // Could be a flattened ConsString.
8295 if (length == 0) continue;
8296 consumed_ += length;
8299 cons_string = ConsString::cast(string);
8300 PushRight(cons_string);
8301 // Need to traverse all the way left.
8304 string = cons_string->first();
8305 type = string->map()->instance_type();
8306 if ((type & kStringRepresentationMask) != kConsStringTag) {
8307 AdjustMaximumDepth();
8308 int length = string->length();
8309 DCHECK(length != 0);
8310 consumed_ += length;
8313 cons_string = ConsString::cast(string);
8314 PushLeft(cons_string);
8322 uint16_t ConsString::ConsStringGet(int index) {
8323 DCHECK(index >= 0 && index < this->length());
8325 // Check for a flattened cons string
8326 if (second()->length() == 0) {
8327 String* left = first();
8328 return left->Get(index);
8331 String* string = String::cast(this);
8334 if (StringShape(string).IsCons()) {
8335 ConsString* cons_string = ConsString::cast(string);
8336 String* left = cons_string->first();
8337 if (left->length() > index) {
8340 index -= left->length();
8341 string = cons_string->second();
8344 return string->Get(index);
8353 uint16_t SlicedString::SlicedStringGet(int index) {
8354 return parent()->Get(offset() + index);
8358 template <typename sinkchar>
8359 void String::WriteToFlat(String* src,
8363 String* source = src;
8367 DCHECK(0 <= from && from <= to && to <= source->length());
8368 switch (StringShape(source).full_representation_tag()) {
8369 case kOneByteStringTag | kExternalStringTag: {
8370 CopyChars(sink, ExternalOneByteString::cast(source)->GetChars() + from,
8374 case kTwoByteStringTag | kExternalStringTag: {
8376 ExternalTwoByteString::cast(source)->GetChars();
8382 case kOneByteStringTag | kSeqStringTag: {
8384 SeqOneByteString::cast(source)->GetChars() + from,
8388 case kTwoByteStringTag | kSeqStringTag: {
8390 SeqTwoByteString::cast(source)->GetChars() + from,
8394 case kOneByteStringTag | kConsStringTag:
8395 case kTwoByteStringTag | kConsStringTag: {
8396 ConsString* cons_string = ConsString::cast(source);
8397 String* first = cons_string->first();
8398 int boundary = first->length();
8399 if (to - boundary >= boundary - from) {
8400 // Right hand side is longer. Recurse over left.
8401 if (from < boundary) {
8402 WriteToFlat(first, sink, from, boundary);
8403 sink += boundary - from;
8409 source = cons_string->second();
8411 // Left hand side is longer. Recurse over right.
8412 if (to > boundary) {
8413 String* second = cons_string->second();
8414 // When repeatedly appending to a string, we get a cons string that
8415 // is unbalanced to the left, a list, essentially. We inline the
8416 // common case of sequential one-byte right child.
8417 if (to - boundary == 1) {
8418 sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
8419 } else if (second->IsSeqOneByteString()) {
8420 CopyChars(sink + boundary - from,
8421 SeqOneByteString::cast(second)->GetChars(),
8425 sink + boundary - from,
8435 case kOneByteStringTag | kSlicedStringTag:
8436 case kTwoByteStringTag | kSlicedStringTag: {
8437 SlicedString* slice = SlicedString::cast(source);
8438 unsigned offset = slice->offset();
8439 WriteToFlat(slice->parent(), sink, from + offset, to + offset);
8448 template <typename SourceChar>
8449 static void CalculateLineEndsImpl(Isolate* isolate,
8450 List<int>* line_ends,
8451 Vector<const SourceChar> src,
8452 bool include_ending_line) {
8453 const int src_len = src.length();
8454 StringSearch<uint8_t, SourceChar> search(isolate, STATIC_CHAR_VECTOR("\n"));
8456 // Find and record line ends.
8458 while (position != -1 && position < src_len) {
8459 position = search.Search(src, position);
8460 if (position != -1) {
8461 line_ends->Add(position);
8463 } else if (include_ending_line) {
8464 // Even if the last line misses a line end, it is counted.
8465 line_ends->Add(src_len);
8472 Handle<FixedArray> String::CalculateLineEnds(Handle<String> src,
8473 bool include_ending_line) {
8475 // Rough estimate of line count based on a roughly estimated average
8476 // length of (unpacked) code.
8477 int line_count_estimate = src->length() >> 4;
8478 List<int> line_ends(line_count_estimate);
8479 Isolate* isolate = src->GetIsolate();
8480 { DisallowHeapAllocation no_allocation; // ensure vectors stay valid.
8481 // Dispatch on type of strings.
8482 String::FlatContent content = src->GetFlatContent();
8483 DCHECK(content.IsFlat());
8484 if (content.IsOneByte()) {
8485 CalculateLineEndsImpl(isolate,
8487 content.ToOneByteVector(),
8488 include_ending_line);
8490 CalculateLineEndsImpl(isolate,
8492 content.ToUC16Vector(),
8493 include_ending_line);
8496 int line_count = line_ends.length();
8497 Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count);
8498 for (int i = 0; i < line_count; i++) {
8499 array->set(i, Smi::FromInt(line_ends[i]));
8505 // Compares the contents of two strings by reading and comparing
8506 // int-sized blocks of characters.
8507 template <typename Char>
8508 static inline bool CompareRawStringContents(const Char* const a,
8509 const Char* const b,
8511 return CompareChars(a, b, length) == 0;
8515 template<typename Chars1, typename Chars2>
8516 class RawStringComparator : public AllStatic {
8518 static inline bool compare(const Chars1* a, const Chars2* b, int len) {
8519 DCHECK(sizeof(Chars1) != sizeof(Chars2));
8520 for (int i = 0; i < len; i++) {
8531 class RawStringComparator<uint16_t, uint16_t> {
8533 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) {
8534 return CompareRawStringContents(a, b, len);
8540 class RawStringComparator<uint8_t, uint8_t> {
8542 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) {
8543 return CompareRawStringContents(a, b, len);
8548 class StringComparator {
8551 explicit inline State(ConsStringIteratorOp* op)
8552 : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {}
8554 inline void Init(String* string) {
8555 ConsString* cons_string = String::VisitFlat(this, string);
8556 op_->Reset(cons_string);
8557 if (cons_string != NULL) {
8559 string = op_->Next(&offset);
8560 String::VisitFlat(this, string, offset);
8564 inline void VisitOneByteString(const uint8_t* chars, int length) {
8565 is_one_byte_ = true;
8570 inline void VisitTwoByteString(const uint16_t* chars, int length) {
8571 is_one_byte_ = false;
8576 void Advance(int consumed) {
8577 DCHECK(consumed <= length_);
8579 if (length_ != consumed) {
8581 buffer8_ += consumed;
8583 buffer16_ += consumed;
8585 length_ -= consumed;
8590 String* next = op_->Next(&offset);
8591 DCHECK_EQ(0, offset);
8592 DCHECK(next != NULL);
8593 String::VisitFlat(this, next);
8596 ConsStringIteratorOp* const op_;
8600 const uint8_t* buffer8_;
8601 const uint16_t* buffer16_;
8605 DISALLOW_IMPLICIT_CONSTRUCTORS(State);
8609 inline StringComparator(ConsStringIteratorOp* op_1,
8610 ConsStringIteratorOp* op_2)
8615 template<typename Chars1, typename Chars2>
8616 static inline bool Equals(State* state_1, State* state_2, int to_check) {
8617 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_);
8618 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_);
8619 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check);
8622 bool Equals(String* string_1, String* string_2) {
8623 int length = string_1->length();
8624 state_1_.Init(string_1);
8625 state_2_.Init(string_2);
8627 int to_check = Min(state_1_.length_, state_2_.length_);
8628 DCHECK(to_check > 0 && to_check <= length);
8630 if (state_1_.is_one_byte_) {
8631 if (state_2_.is_one_byte_) {
8632 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check);
8634 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check);
8637 if (state_2_.is_one_byte_) {
8638 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check);
8640 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check);
8644 if (!is_equal) return false;
8646 // Exit condition. Strings are equal.
8647 if (length == 0) return true;
8648 state_1_.Advance(to_check);
8649 state_2_.Advance(to_check);
8656 DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator);
8660 bool String::SlowEquals(String* other) {
8661 DisallowHeapAllocation no_gc;
8662 // Fast check: negative check with lengths.
8664 if (len != other->length()) return false;
8665 if (len == 0) return true;
8667 // Fast check: if hash code is computed for both strings
8668 // a fast negative check can be performed.
8669 if (HasHashCode() && other->HasHashCode()) {
8670 #ifdef ENABLE_SLOW_DCHECKS
8671 if (FLAG_enable_slow_asserts) {
8672 if (Hash() != other->Hash()) {
8673 bool found_difference = false;
8674 for (int i = 0; i < len; i++) {
8675 if (Get(i) != other->Get(i)) {
8676 found_difference = true;
8680 DCHECK(found_difference);
8684 if (Hash() != other->Hash()) return false;
8687 // We know the strings are both non-empty. Compare the first chars
8688 // before we try to flatten the strings.
8689 if (this->Get(0) != other->Get(0)) return false;
8691 if (IsSeqOneByteString() && other->IsSeqOneByteString()) {
8692 const uint8_t* str1 = SeqOneByteString::cast(this)->GetChars();
8693 const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars();
8694 return CompareRawStringContents(str1, str2, len);
8697 Isolate* isolate = GetIsolate();
8698 StringComparator comparator(isolate->objects_string_compare_iterator_a(),
8699 isolate->objects_string_compare_iterator_b());
8701 return comparator.Equals(this, other);
8705 bool String::SlowEquals(Handle<String> one, Handle<String> two) {
8706 // Fast check: negative check with lengths.
8707 int one_length = one->length();
8708 if (one_length != two->length()) return false;
8709 if (one_length == 0) return true;
8711 // Fast check: if hash code is computed for both strings
8712 // a fast negative check can be performed.
8713 if (one->HasHashCode() && two->HasHashCode()) {
8714 #ifdef ENABLE_SLOW_DCHECKS
8715 if (FLAG_enable_slow_asserts) {
8716 if (one->Hash() != two->Hash()) {
8717 bool found_difference = false;
8718 for (int i = 0; i < one_length; i++) {
8719 if (one->Get(i) != two->Get(i)) {
8720 found_difference = true;
8724 DCHECK(found_difference);
8728 if (one->Hash() != two->Hash()) return false;
8731 // We know the strings are both non-empty. Compare the first chars
8732 // before we try to flatten the strings.
8733 if (one->Get(0) != two->Get(0)) return false;
8735 one = String::Flatten(one);
8736 two = String::Flatten(two);
8738 DisallowHeapAllocation no_gc;
8739 String::FlatContent flat1 = one->GetFlatContent();
8740 String::FlatContent flat2 = two->GetFlatContent();
8742 if (flat1.IsOneByte() && flat2.IsOneByte()) {
8743 return CompareRawStringContents(flat1.ToOneByteVector().start(),
8744 flat2.ToOneByteVector().start(),
8747 for (int i = 0; i < one_length; i++) {
8748 if (flat1.Get(i) != flat2.Get(i)) return false;
8755 bool String::MarkAsUndetectable() {
8756 if (StringShape(this).IsInternalized()) return false;
8758 Map* map = this->map();
8759 Heap* heap = GetHeap();
8760 if (map == heap->string_map()) {
8761 this->set_map(heap->undetectable_string_map());
8763 } else if (map == heap->one_byte_string_map()) {
8764 this->set_map(heap->undetectable_one_byte_string_map());
8767 // Rest cannot be marked as undetectable
8772 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
8773 int slen = length();
8774 // Can't check exact length equality, but we can check bounds.
8775 int str_len = str.length();
8776 if (!allow_prefix_match &&
8778 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) {
8782 unsigned remaining_in_str = static_cast<unsigned>(str_len);
8783 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start());
8784 for (i = 0; i < slen && remaining_in_str > 0; i++) {
8785 unsigned cursor = 0;
8786 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor);
8787 DCHECK(cursor > 0 && cursor <= remaining_in_str);
8788 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
8789 if (i > slen - 1) return false;
8790 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
8791 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
8793 if (Get(i) != r) return false;
8795 utf8_data += cursor;
8796 remaining_in_str -= cursor;
8798 return (allow_prefix_match || i == slen) && remaining_in_str == 0;
8802 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) {
8803 int slen = length();
8804 if (str.length() != slen) return false;
8805 DisallowHeapAllocation no_gc;
8806 FlatContent content = GetFlatContent();
8807 if (content.IsOneByte()) {
8808 return CompareChars(content.ToOneByteVector().start(),
8809 str.start(), slen) == 0;
8811 for (int i = 0; i < slen; i++) {
8812 if (Get(i) != static_cast<uint16_t>(str[i])) return false;
8818 bool String::IsTwoByteEqualTo(Vector<const uc16> str) {
8819 int slen = length();
8820 if (str.length() != slen) return false;
8821 DisallowHeapAllocation no_gc;
8822 FlatContent content = GetFlatContent();
8823 if (content.IsTwoByte()) {
8824 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
8826 for (int i = 0; i < slen; i++) {
8827 if (Get(i) != str[i]) return false;
8833 uint32_t String::ComputeAndSetHash() {
8834 // Should only be called if hash code has not yet been computed.
8835 DCHECK(!HasHashCode());
8837 // Store the hash code in the object.
8838 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed());
8839 set_hash_field(field);
8841 // Check the hash code is there.
8842 DCHECK(HasHashCode());
8843 uint32_t result = field >> kHashShift;
8844 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
8849 bool String::ComputeArrayIndex(uint32_t* index) {
8850 int length = this->length();
8851 if (length == 0 || length > kMaxArrayIndexSize) return false;
8852 ConsStringIteratorOp op;
8853 StringCharacterStream stream(this, &op);
8854 return StringToArrayIndex(&stream, index);
8858 bool String::SlowAsArrayIndex(uint32_t* index) {
8859 if (length() <= kMaxCachedArrayIndexLength) {
8860 Hash(); // force computation of hash code
8861 uint32_t field = hash_field();
8862 if ((field & kIsNotArrayIndexMask) != 0) return false;
8863 // Isolate the array index form the full hash field.
8864 *index = ArrayIndexValueBits::decode(field);
8867 return ComputeArrayIndex(index);
8872 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
8873 int new_size, old_size;
8874 int old_length = string->length();
8875 if (old_length <= new_length) return string;
8877 if (string->IsSeqOneByteString()) {
8878 old_size = SeqOneByteString::SizeFor(old_length);
8879 new_size = SeqOneByteString::SizeFor(new_length);
8881 DCHECK(string->IsSeqTwoByteString());
8882 old_size = SeqTwoByteString::SizeFor(old_length);
8883 new_size = SeqTwoByteString::SizeFor(new_length);
8886 int delta = old_size - new_size;
8888 Address start_of_string = string->address();
8889 DCHECK_OBJECT_ALIGNED(start_of_string);
8890 DCHECK_OBJECT_ALIGNED(start_of_string + new_size);
8892 Heap* heap = string->GetHeap();
8893 NewSpace* newspace = heap->new_space();
8894 if (newspace->Contains(start_of_string) &&
8895 newspace->top() == start_of_string + old_size) {
8896 // Last allocated object in new space. Simply lower allocation top.
8897 newspace->set_top(start_of_string + new_size);
8899 // Sizes are pointer size aligned, so that we can use filler objects
8900 // that are a multiple of pointer size.
8901 heap->CreateFillerObjectAt(start_of_string + new_size, delta);
8903 heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR);
8905 // We are storing the new length using release store after creating a filler
8906 // for the left-over space to avoid races with the sweeper thread.
8907 string->synchronized_set_length(new_length);
8909 if (new_length == 0) return heap->isolate()->factory()->empty_string();
8914 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
8915 // For array indexes mix the length into the hash as an array index could
8918 DCHECK(length <= String::kMaxArrayIndexSize);
8919 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
8920 (1 << String::kArrayIndexValueBits));
8922 value <<= String::ArrayIndexValueBits::kShift;
8923 value |= length << String::ArrayIndexLengthBits::kShift;
8925 DCHECK((value & String::kIsNotArrayIndexMask) == 0);
8926 DCHECK((length > String::kMaxCachedArrayIndexLength) ||
8927 (value & String::kContainsCachedArrayIndexMask) == 0);
8932 uint32_t StringHasher::GetHashField() {
8933 if (length_ <= String::kMaxHashCalcLength) {
8934 if (is_array_index_) {
8935 return MakeArrayIndexHash(array_index_, length_);
8937 return (GetHashCore(raw_running_hash_) << String::kHashShift) |
8938 String::kIsNotArrayIndexMask;
8940 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask;
8945 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars,
8947 int* utf16_length_out) {
8948 int vector_length = chars.length();
8949 // Handle some edge cases
8950 if (vector_length <= 1) {
8951 DCHECK(vector_length == 0 ||
8952 static_cast<uint8_t>(chars.start()[0]) <=
8953 unibrow::Utf8::kMaxOneByteChar);
8954 *utf16_length_out = vector_length;
8955 return HashSequentialString(chars.start(), vector_length, seed);
8957 // Start with a fake length which won't affect computation.
8958 // It will be updated later.
8959 StringHasher hasher(String::kMaxArrayIndexSize, seed);
8960 unsigned remaining = static_cast<unsigned>(vector_length);
8961 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start());
8962 int utf16_length = 0;
8963 bool is_index = true;
8964 DCHECK(hasher.is_array_index_);
8965 while (remaining > 0) {
8966 unsigned consumed = 0;
8967 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed);
8968 DCHECK(consumed > 0 && consumed <= remaining);
8970 remaining -= consumed;
8971 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode;
8972 utf16_length += is_two_characters ? 2 : 1;
8973 // No need to keep hashing. But we do need to calculate utf16_length.
8974 if (utf16_length > String::kMaxHashCalcLength) continue;
8975 if (is_two_characters) {
8976 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c);
8977 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c);
8978 hasher.AddCharacter(c1);
8979 hasher.AddCharacter(c2);
8980 if (is_index) is_index = hasher.UpdateIndex(c1);
8981 if (is_index) is_index = hasher.UpdateIndex(c2);
8983 hasher.AddCharacter(c);
8984 if (is_index) is_index = hasher.UpdateIndex(c);
8987 *utf16_length_out = static_cast<int>(utf16_length);
8988 // Must set length here so that hash computation is correct.
8989 hasher.length_ = utf16_length;
8990 return hasher.GetHashField();
8994 void String::PrintOn(FILE* file) {
8995 int length = this->length();
8996 for (int i = 0; i < length; i++) {
8997 PrintF(file, "%c", Get(i));
9003 // For performance reasons we only hash the 3 most variable fields of a map:
9004 // constructor, prototype and bit_field2.
9006 // Shift away the tag.
9007 int hash = (static_cast<uint32_t>(
9008 reinterpret_cast<uintptr_t>(constructor())) >> 2);
9010 // XOR-ing the prototype and constructor directly yields too many zero bits
9011 // when the two pointers are close (which is fairly common).
9012 // To avoid this we shift the prototype 4 bits relatively to the constructor.
9013 hash ^= (static_cast<uint32_t>(
9014 reinterpret_cast<uintptr_t>(prototype())) << 2);
9016 return hash ^ (hash >> 16) ^ bit_field2();
9020 static bool CheckEquivalent(Map* first, Map* second) {
9022 first->constructor() == second->constructor() &&
9023 first->prototype() == second->prototype() &&
9024 first->instance_type() == second->instance_type() &&
9025 first->bit_field() == second->bit_field() &&
9026 first->bit_field2() == second->bit_field2() &&
9027 first->is_frozen() == second->is_frozen() &&
9028 first->has_instance_call_handler() == second->has_instance_call_handler();
9032 bool Map::EquivalentToForTransition(Map* other) {
9033 return CheckEquivalent(this, other);
9037 bool Map::EquivalentToForNormalization(Map* other,
9038 PropertyNormalizationMode mode) {
9039 int properties = mode == CLEAR_INOBJECT_PROPERTIES
9040 ? 0 : other->inobject_properties();
9041 return CheckEquivalent(this, other) && inobject_properties() == properties;
9045 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
9046 // Unfortunately the serializer relies on pointers within an object being
9047 // visited in-order, so we have to iterate both the code and heap pointers in
9048 // the small section before doing so in the extended section.
9049 for (int s = 0; s <= final_section(); ++s) {
9050 LayoutSection section = static_cast<LayoutSection>(s);
9051 ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR,
9053 while (!code_iter.is_finished()) {
9054 v->VisitCodeEntry(reinterpret_cast<Address>(
9055 RawFieldOfElementAt(code_iter.next_index())));
9058 ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR,
9060 while (!heap_iter.is_finished()) {
9061 v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index()));
9067 void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) {
9068 Type type[] = { CODE_PTR, HEAP_PTR };
9069 Address default_value[] = {
9070 isolate->builtins()->builtin(Builtins::kIllegal)->entry(),
9071 reinterpret_cast<Address>(isolate->heap()->undefined_value()) };
9073 for (int i = 0; i < 2; ++i) {
9074 for (int s = 0; s <= final_section(); ++s) {
9075 LayoutSection section = static_cast<LayoutSection>(s);
9076 if (number_of_entries(type[i], section) > 0) {
9077 int offset = OffsetOfElementAt(first_index(type[i], section));
9079 reinterpret_cast<Address*>(HeapObject::RawField(this, offset)),
9081 number_of_entries(type[i], section));
9088 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
9089 // Iterate over all fields in the body but take care in dealing with
9091 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset);
9092 v->VisitCodeEntry(this->address() + kCodeEntryOffset);
9093 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
9097 void JSFunction::MarkForOptimization() {
9098 DCHECK(!IsOptimized());
9099 DCHECK(shared()->allows_lazy_compilation() ||
9100 code()->optimizable());
9101 DCHECK(!shared()->is_generator());
9102 set_code_no_write_barrier(
9103 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
9104 // No write barrier required, since the builtin is part of the root set.
9108 void JSFunction::MarkForConcurrentOptimization() {
9109 DCHECK(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9110 DCHECK(!IsOptimized());
9111 DCHECK(shared()->allows_lazy_compilation() || code()->optimizable());
9112 DCHECK(!shared()->is_generator());
9113 DCHECK(GetIsolate()->concurrent_recompilation_enabled());
9114 if (FLAG_trace_concurrent_recompilation) {
9115 PrintF(" ** Marking ");
9117 PrintF(" for concurrent recompilation.\n");
9119 set_code_no_write_barrier(
9120 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
9121 // No write barrier required, since the builtin is part of the root set.
9125 void JSFunction::MarkInOptimizationQueue() {
9126 // We can only arrive here via the concurrent-recompilation builtin. If
9127 // break points were set, the code would point to the lazy-compile builtin.
9128 DCHECK(!GetIsolate()->DebuggerHasBreakPoints());
9129 DCHECK(IsMarkedForConcurrentOptimization() && !IsOptimized());
9130 DCHECK(shared()->allows_lazy_compilation() || code()->optimizable());
9131 DCHECK(GetIsolate()->concurrent_recompilation_enabled());
9132 if (FLAG_trace_concurrent_recompilation) {
9133 PrintF(" ** Queueing ");
9135 PrintF(" for concurrent recompilation.\n");
9137 set_code_no_write_barrier(
9138 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
9139 // No write barrier required, since the builtin is part of the root set.
9143 Handle<JSFunction> JSFunction::CloneClosure(Handle<JSFunction> function) {
9144 Isolate* isolate = function->GetIsolate();
9145 Handle<Map> map(function->map());
9146 Handle<SharedFunctionInfo> shared(function->shared());
9147 Handle<Context> context(function->context());
9148 Handle<JSFunction> clone =
9149 isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, context);
9151 if (shared->bound()) {
9152 clone->set_function_bindings(function->function_bindings());
9155 // In typical case, __proto__ of ``function`` is the default Function
9156 // prototype, which means that SetPrototype below is a no-op.
9157 // In rare cases when that is not true, we mutate the clone's __proto__.
9158 Handle<Object> original_prototype(map->prototype(), isolate);
9159 if (*original_prototype != clone->map()->prototype()) {
9160 JSObject::SetPrototype(clone, original_prototype, false).Assert();
9167 void SharedFunctionInfo::AddToOptimizedCodeMap(
9168 Handle<SharedFunctionInfo> shared,
9169 Handle<Context> native_context,
9171 Handle<FixedArray> literals,
9172 BailoutId osr_ast_id) {
9173 Isolate* isolate = shared->GetIsolate();
9174 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
9175 DCHECK(native_context->IsNativeContext());
9176 STATIC_ASSERT(kEntryLength == 4);
9177 Handle<FixedArray> new_code_map;
9178 Handle<Object> value(shared->optimized_code_map(), isolate);
9180 if (value->IsSmi()) {
9181 // No optimized code map.
9182 DCHECK_EQ(0, Smi::cast(*value)->value());
9183 // Create 3 entries per context {context, code, literals}.
9184 new_code_map = isolate->factory()->NewFixedArray(kInitialLength);
9185 old_length = kEntriesStart;
9187 // Copy old map and append one new entry.
9188 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value);
9189 DCHECK_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id));
9190 old_length = old_code_map->length();
9191 new_code_map = FixedArray::CopySize(
9192 old_code_map, old_length + kEntryLength);
9193 // Zap the old map for the sake of the heap verifier.
9194 if (Heap::ShouldZapGarbage()) {
9195 Object** data = old_code_map->data_start();
9196 MemsetPointer(data, isolate->heap()->the_hole_value(), old_length);
9199 new_code_map->set(old_length + kContextOffset, *native_context);
9200 new_code_map->set(old_length + kCachedCodeOffset, *code);
9201 new_code_map->set(old_length + kLiteralsOffset, *literals);
9202 new_code_map->set(old_length + kOsrAstIdOffset,
9203 Smi::FromInt(osr_ast_id.ToInt()));
9206 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9207 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext());
9208 DCHECK(new_code_map->get(i + kCachedCodeOffset)->IsCode());
9209 DCHECK(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
9210 Code::OPTIMIZED_FUNCTION);
9211 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
9212 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
9215 shared->set_optimized_code_map(*new_code_map);
9219 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
9220 DCHECK(index > kEntriesStart);
9221 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9223 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9224 DCHECK_NE(NULL, cached_literals);
9225 return cached_literals;
9231 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
9232 DCHECK(index > kEntriesStart);
9233 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9234 Code* code = Code::cast(code_map->get(index));
9235 DCHECK_NE(NULL, code);
9240 void SharedFunctionInfo::ClearOptimizedCodeMap() {
9241 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9243 // If the next map link slot is already used then the function was
9244 // enqueued with code flushing and we remove it now.
9245 if (!code_map->get(kNextMapIndex)->IsUndefined()) {
9246 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
9247 flusher->EvictOptimizedCodeMap(this);
9250 DCHECK(code_map->get(kNextMapIndex)->IsUndefined());
9251 set_optimized_code_map(Smi::FromInt(0));
9255 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
9256 const char* reason) {
9257 DisallowHeapAllocation no_gc;
9258 if (optimized_code_map()->IsSmi()) return;
9260 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9261 int dst = kEntriesStart;
9262 int length = code_map->length();
9263 for (int src = kEntriesStart; src < length; src += kEntryLength) {
9264 DCHECK(code_map->get(src)->IsNativeContext());
9265 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) {
9266 // Evict the src entry by not copying it to the dst entry.
9267 if (FLAG_trace_opt) {
9268 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9270 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
9274 PrintF(" (osr ast id %d)]\n", osr.ToInt());
9278 // Keep the src entry by copying it to the dst entry.
9280 code_map->set(dst + kContextOffset,
9281 code_map->get(src + kContextOffset));
9282 code_map->set(dst + kCachedCodeOffset,
9283 code_map->get(src + kCachedCodeOffset));
9284 code_map->set(dst + kLiteralsOffset,
9285 code_map->get(src + kLiteralsOffset));
9286 code_map->set(dst + kOsrAstIdOffset,
9287 code_map->get(src + kOsrAstIdOffset));
9289 dst += kEntryLength;
9292 if (dst != length) {
9293 // Always trim even when array is cleared because of heap verifier.
9294 GetHeap()->RightTrimFixedArray<Heap::FROM_MUTATOR>(code_map, length - dst);
9295 if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap();
9300 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
9301 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9302 DCHECK(shrink_by % kEntryLength == 0);
9303 DCHECK(shrink_by <= code_map->length() - kEntriesStart);
9304 // Always trim even when array is cleared because of heap verifier.
9305 GetHeap()->RightTrimFixedArray<Heap::FROM_GC>(code_map, shrink_by);
9306 if (code_map->length() == kEntriesStart) {
9307 ClearOptimizedCodeMap();
9312 void JSObject::OptimizeAsPrototype(Handle<JSObject> object,
9313 PrototypeOptimizationMode mode) {
9314 if (object->IsGlobalObject()) return;
9315 if (object->IsJSGlobalProxy()) return;
9316 if (mode == FAST_PROTOTYPE && !object->map()->is_prototype_map()) {
9317 // First normalize to ensure all JSFunctions are CONSTANT.
9318 JSObject::NormalizeProperties(object, KEEP_INOBJECT_PROPERTIES, 0);
9320 if (!object->HasFastProperties()) {
9321 JSObject::MigrateSlowToFast(object, 0);
9323 if (mode == FAST_PROTOTYPE && object->HasFastProperties() &&
9324 !object->map()->is_prototype_map()) {
9325 Handle<Map> new_map = Map::Copy(handle(object->map()));
9326 JSObject::MigrateToMap(object, new_map);
9327 object->map()->set_is_prototype_map(true);
9332 void JSObject::ReoptimizeIfPrototype(Handle<JSObject> object) {
9333 if (!object->map()->is_prototype_map()) return;
9334 OptimizeAsPrototype(object, FAST_PROTOTYPE);
9338 Handle<Object> CacheInitialJSArrayMaps(
9339 Handle<Context> native_context, Handle<Map> initial_map) {
9340 // Replace all of the cached initial array maps in the native context with
9341 // the appropriate transitioned elements kind maps.
9342 Factory* factory = native_context->GetIsolate()->factory();
9343 Handle<FixedArray> maps = factory->NewFixedArrayWithHoles(
9344 kElementsKindCount, TENURED);
9346 Handle<Map> current_map = initial_map;
9347 ElementsKind kind = current_map->elements_kind();
9348 DCHECK(kind == GetInitialFastElementsKind());
9349 maps->set(kind, *current_map);
9350 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
9351 i < kFastElementsKindCount; ++i) {
9352 Handle<Map> new_map;
9353 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
9354 if (current_map->HasElementsTransition()) {
9355 new_map = handle(current_map->elements_transition_map());
9356 DCHECK(new_map->elements_kind() == next_kind);
9358 new_map = Map::CopyAsElementsKind(
9359 current_map, next_kind, INSERT_TRANSITION);
9361 maps->set(next_kind, *new_map);
9362 current_map = new_map;
9364 native_context->set_js_array_maps(*maps);
9369 void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
9370 Handle<Object> value) {
9371 Isolate* isolate = function->GetIsolate();
9373 DCHECK(value->IsJSReceiver());
9375 // Now some logic for the maps of the objects that are created by using this
9376 // function as a constructor.
9377 if (function->has_initial_map()) {
9378 // If the function has allocated the initial map replace it with a
9379 // copy containing the new prototype. Also complete any in-object
9380 // slack tracking that is in progress at this point because it is
9381 // still tracking the old copy.
9382 if (function->IsInobjectSlackTrackingInProgress()) {
9383 function->CompleteInobjectSlackTracking();
9386 Handle<Map> initial_map(function->initial_map(), isolate);
9388 if (!initial_map->GetIsolate()->bootstrapper()->IsActive() &&
9389 initial_map->instance_type() == JS_OBJECT_TYPE) {
9390 // Put the value in the initial map field until an initial map is needed.
9391 // At that point, a new initial map is created and the prototype is put
9392 // into the initial map where it belongs.
9393 function->set_prototype_or_initial_map(*value);
9395 Handle<Map> new_map = Map::Copy(initial_map);
9396 JSFunction::SetInitialMap(function, new_map, value);
9398 // If the function is used as the global Array function, cache the
9399 // initial map (and transitioned versions) in the native context.
9400 Context* native_context = function->context()->native_context();
9401 Object* array_function =
9402 native_context->get(Context::ARRAY_FUNCTION_INDEX);
9403 if (array_function->IsJSFunction() &&
9404 *function == JSFunction::cast(array_function)) {
9405 CacheInitialJSArrayMaps(handle(native_context, isolate), new_map);
9409 // Deoptimize all code that embeds the previous initial map.
9410 initial_map->dependent_code()->DeoptimizeDependentCodeGroup(
9411 isolate, DependentCode::kInitialMapChangedGroup);
9413 // Put the value in the initial map field until an initial map is
9414 // needed. At that point, a new initial map is created and the
9415 // prototype is put into the initial map where it belongs.
9416 function->set_prototype_or_initial_map(*value);
9418 isolate->heap()->ClearInstanceofCache();
9422 void JSFunction::SetPrototype(Handle<JSFunction> function,
9423 Handle<Object> value) {
9424 DCHECK(function->should_have_prototype());
9425 Handle<Object> construct_prototype = value;
9427 // If the value is not a JSReceiver, store the value in the map's
9428 // constructor field so it can be accessed. Also, set the prototype
9429 // used for constructing objects to the original object prototype.
9430 // See ECMA-262 13.2.2.
9431 if (!value->IsJSReceiver()) {
9432 // Copy the map so this does not affect unrelated functions.
9433 // Remove map transitions because they point to maps with a
9434 // different prototype.
9435 Handle<Map> new_map = Map::Copy(handle(function->map()));
9437 JSObject::MigrateToMap(function, new_map);
9438 new_map->set_constructor(*value);
9439 new_map->set_non_instance_prototype(true);
9440 Isolate* isolate = new_map->GetIsolate();
9441 construct_prototype = handle(
9442 isolate->context()->native_context()->initial_object_prototype(),
9445 function->map()->set_non_instance_prototype(false);
9448 return SetInstancePrototype(function, construct_prototype);
9452 bool JSFunction::RemovePrototype() {
9453 Context* native_context = context()->native_context();
9454 Map* no_prototype_map = shared()->strict_mode() == SLOPPY
9455 ? native_context->sloppy_function_without_prototype_map()
9456 : native_context->strict_function_without_prototype_map();
9458 if (map() == no_prototype_map) return true;
9461 if (map() != (shared()->strict_mode() == SLOPPY
9462 ? native_context->sloppy_function_map()
9463 : native_context->strict_function_map())) {
9468 set_map(no_prototype_map);
9469 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
9474 void JSFunction::SetInitialMap(Handle<JSFunction> function, Handle<Map> map,
9475 Handle<Object> prototype) {
9476 if (prototype->IsJSObject()) {
9477 Handle<JSObject> js_proto = Handle<JSObject>::cast(prototype);
9478 JSObject::OptimizeAsPrototype(js_proto, FAST_PROTOTYPE);
9480 map->set_prototype(*prototype);
9481 function->set_prototype_or_initial_map(*map);
9482 map->set_constructor(*function);
9486 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
9487 if (function->has_initial_map()) return;
9488 Isolate* isolate = function->GetIsolate();
9490 // First create a new map with the size and number of in-object properties
9491 // suggested by the function.
9492 InstanceType instance_type;
9494 int in_object_properties;
9495 if (function->shared()->is_generator()) {
9496 instance_type = JS_GENERATOR_OBJECT_TYPE;
9497 instance_size = JSGeneratorObject::kSize;
9498 in_object_properties = 0;
9500 instance_type = JS_OBJECT_TYPE;
9501 instance_size = function->shared()->CalculateInstanceSize();
9502 in_object_properties = function->shared()->CalculateInObjectProperties();
9504 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size);
9506 // Fetch or allocate prototype.
9507 Handle<Object> prototype;
9508 if (function->has_instance_prototype()) {
9509 prototype = handle(function->instance_prototype(), isolate);
9511 prototype = isolate->factory()->NewFunctionPrototype(function);
9513 map->set_inobject_properties(in_object_properties);
9514 map->set_unused_property_fields(in_object_properties);
9515 DCHECK(map->has_fast_object_elements());
9517 // Finally link initial map and constructor function.
9518 JSFunction::SetInitialMap(function, map, Handle<JSReceiver>::cast(prototype));
9520 if (!function->shared()->is_generator()) {
9521 function->StartInobjectSlackTracking();
9526 void JSFunction::SetInstanceClassName(String* name) {
9527 shared()->set_instance_class_name(name);
9531 void JSFunction::PrintName(FILE* out) {
9532 SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
9533 PrintF(out, "%s", name.get());
9537 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
9538 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
9542 // The filter is a pattern that matches function names in this way:
9543 // "*" all; the default
9544 // "-" all but the top-level function
9545 // "-name" all but the function "name"
9546 // "" only the top-level function
9547 // "name" only the function "name"
9548 // "name*" only functions starting with "name"
9549 // "~" none; the tilde is not an identifier
9550 bool JSFunction::PassesFilter(const char* raw_filter) {
9551 if (*raw_filter == '*') return true;
9552 String* name = shared()->DebugName();
9553 Vector<const char> filter = CStrVector(raw_filter);
9554 if (filter.length() == 0) return name->length() == 0;
9555 if (filter[0] == '-') {
9557 if (filter.length() == 1) {
9558 return (name->length() != 0);
9559 } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) {
9562 if (filter[filter.length() - 1] == '*' &&
9563 name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) {
9568 } else if (name->IsUtf8EqualTo(filter)) {
9571 if (filter[filter.length() - 1] == '*' &&
9572 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) {
9579 void Oddball::Initialize(Isolate* isolate,
9580 Handle<Oddball> oddball,
9581 const char* to_string,
9582 Handle<Object> to_number,
9584 Handle<String> internalized_to_string =
9585 isolate->factory()->InternalizeUtf8String(to_string);
9586 oddball->set_to_string(*internalized_to_string);
9587 oddball->set_to_number(*to_number);
9588 oddball->set_kind(kind);
9592 void Script::InitLineEnds(Handle<Script> script) {
9593 if (!script->line_ends()->IsUndefined()) return;
9595 Isolate* isolate = script->GetIsolate();
9597 if (!script->source()->IsString()) {
9598 DCHECK(script->source()->IsUndefined());
9599 Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0);
9600 script->set_line_ends(*empty);
9601 DCHECK(script->line_ends()->IsFixedArray());
9605 Handle<String> src(String::cast(script->source()), isolate);
9607 Handle<FixedArray> array = String::CalculateLineEnds(src, true);
9609 if (*array != isolate->heap()->empty_fixed_array()) {
9610 array->set_map(isolate->heap()->fixed_cow_array_map());
9613 script->set_line_ends(*array);
9614 DCHECK(script->line_ends()->IsFixedArray());
9618 int Script::GetColumnNumber(Handle<Script> script, int code_pos) {
9619 int line_number = GetLineNumber(script, code_pos);
9620 if (line_number == -1) return -1;
9622 DisallowHeapAllocation no_allocation;
9623 FixedArray* line_ends_array = FixedArray::cast(script->line_ends());
9624 line_number = line_number - script->line_offset()->value();
9625 if (line_number == 0) return code_pos + script->column_offset()->value();
9626 int prev_line_end_pos =
9627 Smi::cast(line_ends_array->get(line_number - 1))->value();
9628 return code_pos - (prev_line_end_pos + 1);
9632 int Script::GetLineNumberWithArray(int code_pos) {
9633 DisallowHeapAllocation no_allocation;
9634 DCHECK(line_ends()->IsFixedArray());
9635 FixedArray* line_ends_array = FixedArray::cast(line_ends());
9636 int line_ends_len = line_ends_array->length();
9637 if (line_ends_len == 0) return -1;
9639 if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) {
9640 return line_offset()->value();
9644 int right = line_ends_len;
9645 while (int half = (right - left) / 2) {
9646 if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) {
9652 return right + line_offset()->value();
9656 int Script::GetLineNumber(Handle<Script> script, int code_pos) {
9657 InitLineEnds(script);
9658 return script->GetLineNumberWithArray(code_pos);
9662 int Script::GetLineNumber(int code_pos) {
9663 DisallowHeapAllocation no_allocation;
9664 if (!line_ends()->IsUndefined()) return GetLineNumberWithArray(code_pos);
9666 // Slow mode: we do not have line_ends. We have to iterate through source.
9667 if (!source()->IsString()) return -1;
9669 String* source_string = String::cast(source());
9671 int len = source_string->length();
9672 for (int pos = 0; pos < len; pos++) {
9673 if (pos == code_pos) break;
9674 if (source_string->Get(pos) == '\n') line++;
9680 Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) {
9681 Isolate* isolate = script->GetIsolate();
9682 Handle<String> name_or_source_url_key =
9683 isolate->factory()->InternalizeOneByteString(
9684 STATIC_CHAR_VECTOR("nameOrSourceURL"));
9685 Handle<JSObject> script_wrapper = Script::GetWrapper(script);
9686 Handle<Object> property = Object::GetProperty(
9687 script_wrapper, name_or_source_url_key).ToHandleChecked();
9688 DCHECK(property->IsJSFunction());
9689 Handle<JSFunction> method = Handle<JSFunction>::cast(property);
9690 Handle<Object> result;
9691 // Do not check against pending exception, since this function may be called
9692 // when an exception has already been pending.
9693 if (!Execution::TryCall(method, script_wrapper, 0, NULL).ToHandle(&result)) {
9694 return isolate->factory()->undefined_value();
9700 // Wrappers for scripts are kept alive and cached in weak global
9701 // handles referred from foreign objects held by the scripts as long as
9702 // they are used. When they are not used anymore, the garbage
9703 // collector will call the weak callback on the global handle
9704 // associated with the wrapper and get rid of both the wrapper and the
9706 static void ClearWrapperCacheWeakCallback(
9707 const v8::WeakCallbackData<v8::Value, void>& data) {
9708 Object** location = reinterpret_cast<Object**>(data.GetParameter());
9709 JSValue* wrapper = JSValue::cast(*location);
9710 Script::cast(wrapper->value())->ClearWrapperCache();
9714 void Script::ClearWrapperCache() {
9715 Foreign* foreign = wrapper();
9716 Object** location = reinterpret_cast<Object**>(foreign->foreign_address());
9717 DCHECK_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location));
9718 foreign->set_foreign_address(0);
9719 GlobalHandles::Destroy(location);
9720 GetIsolate()->counters()->script_wrappers()->Decrement();
9724 Handle<JSObject> Script::GetWrapper(Handle<Script> script) {
9725 if (script->wrapper()->foreign_address() != NULL) {
9726 // Return a handle for the existing script wrapper from the cache.
9727 return Handle<JSValue>(
9728 *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address()));
9730 Isolate* isolate = script->GetIsolate();
9731 // Construct a new script wrapper.
9732 isolate->counters()->script_wrappers()->Increment();
9733 Handle<JSFunction> constructor = isolate->script_function();
9734 Handle<JSValue> result =
9735 Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor));
9737 result->set_value(*script);
9739 // Create a new weak global handle and use it to cache the wrapper
9740 // for future use. The cache will automatically be cleared by the
9741 // garbage collector when it is not used anymore.
9742 Handle<Object> handle = isolate->global_handles()->Create(*result);
9743 GlobalHandles::MakeWeak(handle.location(),
9744 reinterpret_cast<void*>(handle.location()),
9745 &ClearWrapperCacheWeakCallback);
9746 script->wrapper()->set_foreign_address(
9747 reinterpret_cast<Address>(handle.location()));
9752 String* SharedFunctionInfo::DebugName() {
9754 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name();
9755 return String::cast(n);
9759 bool SharedFunctionInfo::HasSourceCode() const {
9760 return !script()->IsUndefined() &&
9761 !reinterpret_cast<Script*>(script())->source()->IsUndefined();
9765 Handle<Object> SharedFunctionInfo::GetSourceCode() {
9766 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
9767 Handle<String> source(String::cast(Script::cast(script())->source()));
9768 return GetIsolate()->factory()->NewSubString(
9769 source, start_position(), end_position());
9773 bool SharedFunctionInfo::IsInlineable() {
9774 // Check that the function has a script associated with it.
9775 if (!script()->IsScript()) return false;
9776 if (optimization_disabled()) return false;
9777 // If we never ran this (unlikely) then lets try to optimize it.
9778 if (code()->kind() != Code::FUNCTION) return true;
9779 return code()->optimizable();
9783 int SharedFunctionInfo::SourceSize() {
9784 return end_position() - start_position();
9788 int SharedFunctionInfo::CalculateInstanceSize() {
9790 JSObject::kHeaderSize +
9791 expected_nof_properties() * kPointerSize;
9792 if (instance_size > JSObject::kMaxInstanceSize) {
9793 instance_size = JSObject::kMaxInstanceSize;
9795 return instance_size;
9799 int SharedFunctionInfo::CalculateInObjectProperties() {
9800 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize;
9804 // Output the source code without any allocation in the heap.
9805 OStream& operator<<(OStream& os, const SourceCodeOf& v) {
9806 const SharedFunctionInfo* s = v.value;
9807 // For some native functions there is no source.
9808 if (!s->HasSourceCode()) return os << "<No Source>";
9810 // Get the source for the script which this function came from.
9811 // Don't use String::cast because we don't want more assertion errors while
9812 // we are already creating a stack dump.
9813 String* script_source =
9814 reinterpret_cast<String*>(Script::cast(s->script())->source());
9816 if (!script_source->LooksValid()) return os << "<Invalid Source>";
9818 if (!s->is_toplevel()) {
9820 Object* name = s->name();
9821 if (name->IsString() && String::cast(name)->length() > 0) {
9822 String::cast(name)->PrintUC16(os);
9826 int len = s->end_position() - s->start_position();
9827 if (len <= v.max_length || v.max_length < 0) {
9828 script_source->PrintUC16(os, s->start_position(), s->end_position());
9831 script_source->PrintUC16(os, s->start_position(),
9832 s->start_position() + v.max_length);
9833 return os << "...\n";
9838 static bool IsCodeEquivalent(Code* code, Code* recompiled) {
9839 if (code->instruction_size() != recompiled->instruction_size()) return false;
9840 ByteArray* code_relocation = code->relocation_info();
9841 ByteArray* recompiled_relocation = recompiled->relocation_info();
9842 int length = code_relocation->length();
9843 if (length != recompiled_relocation->length()) return false;
9844 int compare = memcmp(code_relocation->GetDataStartAddress(),
9845 recompiled_relocation->GetDataStartAddress(),
9847 return compare == 0;
9851 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
9852 DCHECK(!has_deoptimization_support());
9853 DisallowHeapAllocation no_allocation;
9854 Code* code = this->code();
9855 if (IsCodeEquivalent(code, recompiled)) {
9856 // Copy the deoptimization data from the recompiled code.
9857 code->set_deoptimization_data(recompiled->deoptimization_data());
9858 code->set_has_deoptimization_support(true);
9860 // TODO(3025757): In case the recompiled isn't equivalent to the
9861 // old code, we have to replace it. We should try to avoid this
9862 // altogether because it flushes valuable type feedback by
9863 // effectively resetting all IC state.
9864 ReplaceCode(recompiled);
9866 DCHECK(has_deoptimization_support());
9870 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
9871 // Disable optimization for the shared function info and mark the
9872 // code as non-optimizable. The marker on the shared function info
9873 // is there because we flush non-optimized code thereby loosing the
9874 // non-optimizable information for the code. When the code is
9875 // regenerated and set on the shared function info it is marked as
9876 // non-optimizable if optimization is disabled for the shared
9878 set_optimization_disabled(true);
9879 set_bailout_reason(reason);
9880 // Code should be the lazy compilation stub or else unoptimized. If the
9881 // latter, disable optimization for the code too.
9882 DCHECK(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN);
9883 if (code()->kind() == Code::FUNCTION) {
9884 code()->set_optimizable(false);
9886 PROFILE(GetIsolate(), CodeDisableOptEvent(code(), this));
9887 if (FLAG_trace_opt) {
9888 PrintF("[disabled optimization for ");
9890 PrintF(", reason: %s]\n", GetBailoutReason(reason));
9895 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) {
9896 DCHECK(!id.IsNone());
9897 Code* unoptimized = code();
9898 DeoptimizationOutputData* data =
9899 DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
9900 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this);
9902 return true; // Return true if there was no DCHECK.
9906 void JSFunction::StartInobjectSlackTracking() {
9907 DCHECK(has_initial_map() && !IsInobjectSlackTrackingInProgress());
9909 if (!FLAG_clever_optimizations) return;
9910 Map* map = initial_map();
9912 // Only initiate the tracking the first time.
9913 if (map->done_inobject_slack_tracking()) return;
9914 map->set_done_inobject_slack_tracking(true);
9916 // No tracking during the snapshot construction phase.
9917 Isolate* isolate = GetIsolate();
9918 if (isolate->serializer_enabled()) return;
9920 if (map->unused_property_fields() == 0) return;
9922 map->set_construction_count(kGenerousAllocationCount);
9926 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
9927 code()->ClearInlineCaches();
9928 // If we clear ICs, we need to clear the type feedback vector too, since
9929 // CallICs are synced with a feedback vector slot.
9930 ClearTypeFeedbackInfo();
9931 set_ic_age(new_ic_age);
9932 if (code()->kind() == Code::FUNCTION) {
9933 code()->set_profiler_ticks(0);
9934 if (optimization_disabled() &&
9935 opt_count() >= FLAG_max_opt_count) {
9936 // Re-enable optimizations if they were disabled due to opt_count limit.
9937 set_optimization_disabled(false);
9938 code()->set_optimizable(true);
9946 static void GetMinInobjectSlack(Map* map, void* data) {
9947 int slack = map->unused_property_fields();
9948 if (*reinterpret_cast<int*>(data) > slack) {
9949 *reinterpret_cast<int*>(data) = slack;
9954 static void ShrinkInstanceSize(Map* map, void* data) {
9955 int slack = *reinterpret_cast<int*>(data);
9956 map->set_inobject_properties(map->inobject_properties() - slack);
9957 map->set_unused_property_fields(map->unused_property_fields() - slack);
9958 map->set_instance_size(map->instance_size() - slack * kPointerSize);
9960 // Visitor id might depend on the instance size, recalculate it.
9961 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
9965 void JSFunction::CompleteInobjectSlackTracking() {
9966 DCHECK(has_initial_map());
9967 Map* map = initial_map();
9969 DCHECK(map->done_inobject_slack_tracking());
9970 map->set_construction_count(kNoSlackTracking);
9972 int slack = map->unused_property_fields();
9973 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
9975 // Resize the initial map and all maps in its transition tree.
9976 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
9981 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
9982 BailoutId osr_ast_id) {
9983 DisallowHeapAllocation no_gc;
9984 DCHECK(native_context->IsNativeContext());
9985 if (!FLAG_cache_optimized_code) return -1;
9986 Object* value = optimized_code_map();
9987 if (!value->IsSmi()) {
9988 FixedArray* optimized_code_map = FixedArray::cast(value);
9989 int length = optimized_code_map->length();
9990 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
9991 for (int i = kEntriesStart; i < length; i += kEntryLength) {
9992 if (optimized_code_map->get(i + kContextOffset) == native_context &&
9993 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
9994 return i + kCachedCodeOffset;
9997 if (FLAG_trace_opt) {
9998 PrintF("[didn't find optimized code in optimized code map for ");
10007 #define DECLARE_TAG(ignore1, name, ignore2) name,
10008 const char* const VisitorSynchronization::kTags[
10009 VisitorSynchronization::kNumberOfSyncTags] = {
10010 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10015 #define DECLARE_TAG(ignore1, ignore2, name) name,
10016 const char* const VisitorSynchronization::kTagNames[
10017 VisitorSynchronization::kNumberOfSyncTags] = {
10018 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10023 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
10024 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
10025 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
10026 Object* old_target = target;
10027 VisitPointer(&target);
10028 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10032 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
10033 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
10034 Object* stub = rinfo->code_age_stub();
10036 VisitPointer(&stub);
10041 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
10042 Object* code = Code::GetObjectFromEntryAddress(entry_address);
10043 Object* old_code = code;
10044 VisitPointer(&code);
10045 if (code != old_code) {
10046 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry();
10051 void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
10052 DCHECK(rinfo->rmode() == RelocInfo::CELL);
10053 Object* cell = rinfo->target_cell();
10054 Object* old_cell = cell;
10055 VisitPointer(&cell);
10056 if (cell != old_cell) {
10057 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
10062 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
10063 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
10064 rinfo->IsPatchedReturnSequence()) ||
10065 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
10066 rinfo->IsPatchedDebugBreakSlotSequence()));
10067 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
10068 Object* old_target = target;
10069 VisitPointer(&target);
10070 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10074 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
10075 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
10076 Object* p = rinfo->target_object();
10081 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
10082 Address p = rinfo->target_reference();
10083 VisitExternalReference(&p);
10087 void Code::InvalidateRelocation() {
10088 InvalidateEmbeddedObjects();
10089 set_relocation_info(GetHeap()->empty_byte_array());
10093 void Code::InvalidateEmbeddedObjects() {
10094 Object* undefined = GetHeap()->undefined_value();
10095 Cell* undefined_cell = GetHeap()->undefined_cell();
10096 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10097 RelocInfo::ModeMask(RelocInfo::CELL);
10098 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10099 RelocInfo::Mode mode = it.rinfo()->rmode();
10100 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10101 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
10102 } else if (mode == RelocInfo::CELL) {
10103 it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER);
10109 void Code::Relocate(intptr_t delta) {
10110 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
10111 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH);
10113 CpuFeatures::FlushICache(instruction_start(), instruction_size());
10117 void Code::CopyFrom(const CodeDesc& desc) {
10118 DCHECK(Marking::Color(this) == Marking::WHITE_OBJECT);
10121 CopyBytes(instruction_start(), desc.buffer,
10122 static_cast<size_t>(desc.instr_size));
10125 CopyBytes(relocation_start(),
10126 desc.buffer + desc.buffer_size - desc.reloc_size,
10127 static_cast<size_t>(desc.reloc_size));
10129 // unbox handles and relocate
10130 intptr_t delta = instruction_start() - desc.buffer;
10131 int mode_mask = RelocInfo::kCodeTargetMask |
10132 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10133 RelocInfo::ModeMask(RelocInfo::CELL) |
10134 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
10135 RelocInfo::kApplyMask;
10136 // Needed to find target_object and runtime_entry on X64
10137 Assembler* origin = desc.origin;
10138 AllowDeferredHandleDereference embedding_raw_address;
10139 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10140 RelocInfo::Mode mode = it.rinfo()->rmode();
10141 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10142 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10143 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
10144 } else if (mode == RelocInfo::CELL) {
10145 Handle<Cell> cell = it.rinfo()->target_cell_handle();
10146 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
10147 } else if (RelocInfo::IsCodeTarget(mode)) {
10148 // rewrite code handles in inline cache targets to direct
10149 // pointers to the first instruction in the code object
10150 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10151 Code* code = Code::cast(*p);
10152 it.rinfo()->set_target_address(code->instruction_start(),
10153 SKIP_WRITE_BARRIER,
10154 SKIP_ICACHE_FLUSH);
10155 } else if (RelocInfo::IsRuntimeEntry(mode)) {
10156 Address p = it.rinfo()->target_runtime_entry(origin);
10157 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER,
10158 SKIP_ICACHE_FLUSH);
10159 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) {
10160 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin);
10161 Code* code = Code::cast(*p);
10162 it.rinfo()->set_code_age_stub(code, SKIP_ICACHE_FLUSH);
10164 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH);
10167 CpuFeatures::FlushICache(instruction_start(), instruction_size());
10171 // Locate the source position which is closest to the address in the code. This
10172 // is using the source position information embedded in the relocation info.
10173 // The position returned is relative to the beginning of the script where the
10174 // source for this function is found.
10175 int Code::SourcePosition(Address pc) {
10176 int distance = kMaxInt;
10177 int position = RelocInfo::kNoPosition; // Initially no position found.
10178 // Run through all the relocation info to find the best matching source
10179 // position. All the code needs to be considered as the sequence of the
10180 // instructions in the code does not necessarily follow the same order as the
10182 RelocIterator it(this, RelocInfo::kPositionMask);
10183 while (!it.done()) {
10184 // Only look at positions after the current pc.
10185 if (it.rinfo()->pc() < pc) {
10186 // Get position and distance.
10188 int dist = static_cast<int>(pc - it.rinfo()->pc());
10189 int pos = static_cast<int>(it.rinfo()->data());
10190 // If this position is closer than the current candidate or if it has the
10191 // same distance as the current candidate and the position is higher then
10192 // this position is the new candidate.
10193 if ((dist < distance) ||
10194 (dist == distance && pos > position)) {
10205 // Same as Code::SourcePosition above except it only looks for statement
10207 int Code::SourceStatementPosition(Address pc) {
10208 // First find the position as close as possible using all position
10210 int position = SourcePosition(pc);
10211 // Now find the closest statement position before the position.
10212 int statement_position = 0;
10213 RelocIterator it(this, RelocInfo::kPositionMask);
10214 while (!it.done()) {
10215 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
10216 int p = static_cast<int>(it.rinfo()->data());
10217 if (statement_position < p && p <= position) {
10218 statement_position = p;
10223 return statement_position;
10227 SafepointEntry Code::GetSafepointEntry(Address pc) {
10228 SafepointTable table(this);
10229 return table.FindEntry(pc);
10233 Object* Code::FindNthObject(int n, Map* match_map) {
10234 DCHECK(is_inline_cache_stub());
10235 DisallowHeapAllocation no_allocation;
10236 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10237 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10238 RelocInfo* info = it.rinfo();
10239 Object* object = info->target_object();
10240 if (object->IsHeapObject()) {
10241 if (HeapObject::cast(object)->map() == match_map) {
10242 if (--n == 0) return object;
10250 AllocationSite* Code::FindFirstAllocationSite() {
10251 Object* result = FindNthObject(1, GetHeap()->allocation_site_map());
10252 return (result != NULL) ? AllocationSite::cast(result) : NULL;
10256 Map* Code::FindFirstMap() {
10257 Object* result = FindNthObject(1, GetHeap()->meta_map());
10258 return (result != NULL) ? Map::cast(result) : NULL;
10262 void Code::FindAndReplace(const FindAndReplacePattern& pattern) {
10263 DCHECK(is_inline_cache_stub() || is_handler());
10264 DisallowHeapAllocation no_allocation;
10265 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10266 STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32);
10267 int current_pattern = 0;
10268 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10269 RelocInfo* info = it.rinfo();
10270 Object* object = info->target_object();
10271 if (object->IsHeapObject()) {
10272 Map* map = HeapObject::cast(object)->map();
10273 if (map == *pattern.find_[current_pattern]) {
10274 info->set_target_object(*pattern.replace_[current_pattern]);
10275 if (++current_pattern == pattern.count_) return;
10283 void Code::FindAllMaps(MapHandleList* maps) {
10284 DCHECK(is_inline_cache_stub());
10285 DisallowHeapAllocation no_allocation;
10286 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10287 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10288 RelocInfo* info = it.rinfo();
10289 Object* object = info->target_object();
10290 if (object->IsMap()) maps->Add(handle(Map::cast(object)));
10295 Code* Code::FindFirstHandler() {
10296 DCHECK(is_inline_cache_stub());
10297 DisallowHeapAllocation no_allocation;
10298 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10299 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10300 RelocInfo* info = it.rinfo();
10301 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10302 if (code->kind() == Code::HANDLER) return code;
10308 bool Code::FindHandlers(CodeHandleList* code_list, int length) {
10309 DCHECK(is_inline_cache_stub());
10310 DisallowHeapAllocation no_allocation;
10311 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10313 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10314 if (i == length) return true;
10315 RelocInfo* info = it.rinfo();
10316 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10317 // IC stubs with handlers never contain non-handler code objects before
10318 // handler targets.
10319 if (code->kind() != Code::HANDLER) break;
10320 code_list->Add(Handle<Code>(code));
10323 return i == length;
10327 MaybeHandle<Code> Code::FindHandlerForMap(Map* map) {
10328 DCHECK(is_inline_cache_stub());
10329 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10330 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10331 bool return_next = false;
10332 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10333 RelocInfo* info = it.rinfo();
10334 if (info->rmode() == RelocInfo::EMBEDDED_OBJECT) {
10335 Object* object = info->target_object();
10336 if (object == map) return_next = true;
10337 } else if (return_next) {
10338 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10339 DCHECK(code->kind() == Code::HANDLER);
10340 return handle(code);
10343 return MaybeHandle<Code>();
10347 Name* Code::FindFirstName() {
10348 DCHECK(is_inline_cache_stub());
10349 DisallowHeapAllocation no_allocation;
10350 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10351 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10352 RelocInfo* info = it.rinfo();
10353 Object* object = info->target_object();
10354 if (object->IsName()) return Name::cast(object);
10360 void Code::ClearInlineCaches() {
10361 ClearInlineCaches(NULL);
10365 void Code::ClearInlineCaches(Code::Kind kind) {
10366 ClearInlineCaches(&kind);
10370 void Code::ClearInlineCaches(Code::Kind* kind) {
10371 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10372 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
10373 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
10374 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10375 RelocInfo* info = it.rinfo();
10376 Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
10377 if (target->is_inline_cache_stub()) {
10378 if (kind == NULL || *kind == target->kind()) {
10379 IC::Clear(this->GetIsolate(), info->pc(),
10380 info->host()->constant_pool());
10387 void SharedFunctionInfo::ClearTypeFeedbackInfo() {
10388 TypeFeedbackVector* vector = feedback_vector();
10389 Heap* heap = GetHeap();
10390 int length = vector->length();
10392 for (int i = 0; i < length; i++) {
10393 Object* obj = vector->get(i);
10394 if (obj->IsHeapObject()) {
10395 InstanceType instance_type =
10396 HeapObject::cast(obj)->map()->instance_type();
10397 switch (instance_type) {
10398 case ALLOCATION_SITE_TYPE:
10399 // AllocationSites are not cleared because they do not store
10400 // information that leaks.
10404 vector->set(i, TypeFeedbackVector::RawUninitializedSentinel(heap),
10405 SKIP_WRITE_BARRIER);
10412 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
10413 DisallowHeapAllocation no_gc;
10414 DCHECK(kind() == FUNCTION);
10415 BackEdgeTable back_edges(this, &no_gc);
10416 for (uint32_t i = 0; i < back_edges.length(); i++) {
10417 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
10419 return BailoutId::None();
10423 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
10424 DisallowHeapAllocation no_gc;
10425 DCHECK(kind() == FUNCTION);
10426 BackEdgeTable back_edges(this, &no_gc);
10427 for (uint32_t i = 0; i < back_edges.length(); i++) {
10428 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
10430 UNREACHABLE(); // We expect to find the back edge.
10435 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
10436 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
10440 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) {
10441 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge,
10442 NO_MARKING_PARITY);
10446 static Code::Age EffectiveAge(Code::Age age) {
10447 if (age == Code::kNotExecutedCodeAge) {
10448 // Treat that's never been executed as old immediately.
10449 age = Code::kIsOldCodeAge;
10450 } else if (age == Code::kExecutedOnceCodeAge) {
10451 // Pre-age code that has only been executed once.
10452 age = Code::kPreAgedCodeAge;
10458 void Code::MakeOlder(MarkingParity current_parity) {
10459 byte* sequence = FindCodeAgeSequence();
10460 if (sequence != NULL) {
10462 MarkingParity code_parity;
10463 Isolate* isolate = GetIsolate();
10464 GetCodeAgeAndParity(isolate, sequence, &age, &code_parity);
10465 age = EffectiveAge(age);
10466 if (age != kLastCodeAge && code_parity != current_parity) {
10467 PatchPlatformCodeAge(isolate,
10469 static_cast<Age>(age + 1),
10476 bool Code::IsOld() {
10477 return GetAge() >= kIsOldCodeAge;
10481 byte* Code::FindCodeAgeSequence() {
10482 return FLAG_age_code &&
10483 prologue_offset() != Code::kPrologueOffsetNotSet &&
10484 (kind() == OPTIMIZED_FUNCTION ||
10485 (kind() == FUNCTION && !has_debug_break_slots()))
10486 ? instruction_start() + prologue_offset()
10491 Code::Age Code::GetAge() {
10492 return EffectiveAge(GetRawAge());
10496 Code::Age Code::GetRawAge() {
10497 byte* sequence = FindCodeAgeSequence();
10498 if (sequence == NULL) {
10499 return kNoAgeCodeAge;
10502 MarkingParity parity;
10503 GetCodeAgeAndParity(GetIsolate(), sequence, &age, &parity);
10508 void Code::GetCodeAgeAndParity(Code* code, Age* age,
10509 MarkingParity* parity) {
10510 Isolate* isolate = code->GetIsolate();
10511 Builtins* builtins = isolate->builtins();
10513 #define HANDLE_CODE_AGE(AGE) \
10514 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \
10515 if (code == stub) { \
10516 *age = k##AGE##CodeAge; \
10517 *parity = EVEN_MARKING_PARITY; \
10520 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10521 if (code == stub) { \
10522 *age = k##AGE##CodeAge; \
10523 *parity = ODD_MARKING_PARITY; \
10526 CODE_AGE_LIST(HANDLE_CODE_AGE)
10527 #undef HANDLE_CODE_AGE
10528 stub = *builtins->MarkCodeAsExecutedOnce();
10529 if (code == stub) {
10530 *age = kNotExecutedCodeAge;
10531 *parity = NO_MARKING_PARITY;
10534 stub = *builtins->MarkCodeAsExecutedTwice();
10535 if (code == stub) {
10536 *age = kExecutedOnceCodeAge;
10537 *parity = NO_MARKING_PARITY;
10544 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) {
10545 Builtins* builtins = isolate->builtins();
10547 #define HANDLE_CODE_AGE(AGE) \
10548 case k##AGE##CodeAge: { \
10549 Code* stub = parity == EVEN_MARKING_PARITY \
10550 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \
10551 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10554 CODE_AGE_LIST(HANDLE_CODE_AGE)
10555 #undef HANDLE_CODE_AGE
10556 case kNotExecutedCodeAge: {
10557 DCHECK(parity == NO_MARKING_PARITY);
10558 return *builtins->MarkCodeAsExecutedOnce();
10560 case kExecutedOnceCodeAge: {
10561 DCHECK(parity == NO_MARKING_PARITY);
10562 return *builtins->MarkCodeAsExecutedTwice();
10572 void Code::PrintDeoptLocation(FILE* out, int bailout_id) {
10573 const char* last_comment = NULL;
10574 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
10575 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
10576 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10577 RelocInfo* info = it.rinfo();
10578 if (info->rmode() == RelocInfo::COMMENT) {
10579 last_comment = reinterpret_cast<const char*>(info->data());
10580 } else if (last_comment != NULL) {
10581 if ((bailout_id == Deoptimizer::GetDeoptimizationId(
10582 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
10583 (bailout_id == Deoptimizer::GetDeoptimizationId(
10584 GetIsolate(), info->target_address(), Deoptimizer::SOFT)) ||
10585 (bailout_id == Deoptimizer::GetDeoptimizationId(
10586 GetIsolate(), info->target_address(), Deoptimizer::LAZY))) {
10587 CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
10588 PrintF(out, " %s\n", last_comment);
10596 bool Code::CanDeoptAt(Address pc) {
10597 DeoptimizationInputData* deopt_data =
10598 DeoptimizationInputData::cast(deoptimization_data());
10599 Address code_start_address = instruction_start();
10600 for (int i = 0; i < deopt_data->DeoptCount(); i++) {
10601 if (deopt_data->Pc(i)->value() == -1) continue;
10602 Address address = code_start_address + deopt_data->Pc(i)->value();
10603 if (address == pc) return true;
10609 // Identify kind of code.
10610 const char* Code::Kind2String(Kind kind) {
10612 #define CASE(name) case name: return #name;
10613 CODE_KIND_LIST(CASE)
10615 case NUMBER_OF_KINDS: break;
10622 #ifdef ENABLE_DISASSEMBLER
10624 void DeoptimizationInputData::DeoptimizationInputDataPrint(
10625 OStream& os) { // NOLINT
10626 disasm::NameConverter converter;
10627 int deopt_count = DeoptCount();
10628 os << "Deoptimization Input Data (deopt points = " << deopt_count << ")\n";
10629 if (0 != deopt_count) {
10630 os << " index ast id argc pc";
10631 if (FLAG_print_code_verbose) os << " commands";
10634 for (int i = 0; i < deopt_count; i++) {
10635 // TODO(svenpanne) Add some basic formatting to our streams.
10636 Vector<char> buf1 = Vector<char>::New(128);
10637 SNPrintF(buf1, "%6d %6d %6d %6d", i, AstId(i).ToInt(),
10638 ArgumentsStackHeight(i)->value(), Pc(i)->value());
10639 os << buf1.start();
10641 if (!FLAG_print_code_verbose) {
10645 // Print details of the frame translation.
10646 int translation_index = TranslationIndex(i)->value();
10647 TranslationIterator iterator(TranslationByteArray(), translation_index);
10648 Translation::Opcode opcode =
10649 static_cast<Translation::Opcode>(iterator.Next());
10650 DCHECK(Translation::BEGIN == opcode);
10651 int frame_count = iterator.Next();
10652 int jsframe_count = iterator.Next();
10653 os << " " << Translation::StringFor(opcode)
10654 << " {frame count=" << frame_count
10655 << ", js frame count=" << jsframe_count << "}\n";
10657 while (iterator.HasNext() &&
10658 Translation::BEGIN !=
10659 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
10660 Vector<char> buf2 = Vector<char>::New(128);
10661 SNPrintF(buf2, "%27s %s ", "", Translation::StringFor(opcode));
10662 os << buf2.start();
10665 case Translation::BEGIN:
10669 case Translation::JS_FRAME: {
10670 int ast_id = iterator.Next();
10671 int function_id = iterator.Next();
10672 unsigned height = iterator.Next();
10673 os << "{ast_id=" << ast_id << ", function=";
10674 if (function_id != Translation::kSelfLiteralId) {
10675 Object* function = LiteralArray()->get(function_id);
10676 os << Brief(JSFunction::cast(function)->shared()->DebugName());
10680 os << ", height=" << height << "}";
10684 case Translation::COMPILED_STUB_FRAME: {
10685 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next());
10686 os << "{kind=" << stub_kind << "}";
10690 case Translation::ARGUMENTS_ADAPTOR_FRAME:
10691 case Translation::CONSTRUCT_STUB_FRAME: {
10692 int function_id = iterator.Next();
10693 JSFunction* function =
10694 JSFunction::cast(LiteralArray()->get(function_id));
10695 unsigned height = iterator.Next();
10696 os << "{function=" << Brief(function->shared()->DebugName())
10697 << ", height=" << height << "}";
10701 case Translation::GETTER_STUB_FRAME:
10702 case Translation::SETTER_STUB_FRAME: {
10703 int function_id = iterator.Next();
10704 JSFunction* function =
10705 JSFunction::cast(LiteralArray()->get(function_id));
10706 os << "{function=" << Brief(function->shared()->DebugName()) << "}";
10710 case Translation::REGISTER: {
10711 int reg_code = iterator.Next();
10712 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}";
10716 case Translation::INT32_REGISTER: {
10717 int reg_code = iterator.Next();
10718 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}";
10722 case Translation::UINT32_REGISTER: {
10723 int reg_code = iterator.Next();
10724 os << "{input=" << converter.NameOfCPURegister(reg_code)
10729 case Translation::DOUBLE_REGISTER: {
10730 int reg_code = iterator.Next();
10731 os << "{input=" << DoubleRegister::AllocationIndexToString(reg_code)
10736 case Translation::STACK_SLOT: {
10737 int input_slot_index = iterator.Next();
10738 os << "{input=" << input_slot_index << "}";
10742 case Translation::INT32_STACK_SLOT: {
10743 int input_slot_index = iterator.Next();
10744 os << "{input=" << input_slot_index << "}";
10748 case Translation::UINT32_STACK_SLOT: {
10749 int input_slot_index = iterator.Next();
10750 os << "{input=" << input_slot_index << " (unsigned)}";
10754 case Translation::DOUBLE_STACK_SLOT: {
10755 int input_slot_index = iterator.Next();
10756 os << "{input=" << input_slot_index << "}";
10760 case Translation::LITERAL: {
10761 unsigned literal_index = iterator.Next();
10762 os << "{literal_id=" << literal_index << "}";
10766 case Translation::DUPLICATED_OBJECT: {
10767 int object_index = iterator.Next();
10768 os << "{object_index=" << object_index << "}";
10772 case Translation::ARGUMENTS_OBJECT:
10773 case Translation::CAPTURED_OBJECT: {
10774 int args_length = iterator.Next();
10775 os << "{length=" << args_length << "}";
10785 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(
10786 OStream& os) { // NOLINT
10787 os << "Deoptimization Output Data (deopt points = " << this->DeoptPoints()
10789 if (this->DeoptPoints() == 0) return;
10791 os << "ast id pc state\n";
10792 for (int i = 0; i < this->DeoptPoints(); i++) {
10793 int pc_and_state = this->PcAndState(i)->value();
10794 // TODO(svenpanne) Add some basic formatting to our streams.
10795 Vector<char> buf = Vector<char>::New(100);
10796 SNPrintF(buf, "%6d %8d %s\n", this->AstId(i).ToInt(),
10797 FullCodeGenerator::PcField::decode(pc_and_state),
10798 FullCodeGenerator::State2String(
10799 FullCodeGenerator::StateField::decode(pc_and_state)));
10805 const char* Code::ICState2String(InlineCacheState state) {
10807 case UNINITIALIZED: return "UNINITIALIZED";
10808 case PREMONOMORPHIC: return "PREMONOMORPHIC";
10809 case MONOMORPHIC: return "MONOMORPHIC";
10810 case PROTOTYPE_FAILURE:
10811 return "PROTOTYPE_FAILURE";
10812 case POLYMORPHIC: return "POLYMORPHIC";
10813 case MEGAMORPHIC: return "MEGAMORPHIC";
10814 case GENERIC: return "GENERIC";
10815 case DEBUG_STUB: return "DEBUG_STUB";
10824 const char* Code::StubType2String(StubType type) {
10826 case NORMAL: return "NORMAL";
10827 case FAST: return "FAST";
10829 UNREACHABLE(); // keep the compiler happy
10834 void Code::PrintExtraICState(OStream& os, // NOLINT
10835 Kind kind, ExtraICState extra) {
10836 os << "extra_ic_state = ";
10837 if ((kind == STORE_IC || kind == KEYED_STORE_IC) && (extra == STRICT)) {
10840 os << extra << "\n";
10845 void Code::Disassemble(const char* name, OStream& os) { // NOLINT
10846 os << "kind = " << Kind2String(kind()) << "\n";
10847 if (IsCodeStubOrIC()) {
10848 const char* n = CodeStub::MajorName(CodeStub::GetMajorKey(this), true);
10849 os << "major_key = " << (n == NULL ? "null" : n) << "\n";
10851 if (is_inline_cache_stub()) {
10852 os << "ic_state = " << ICState2String(ic_state()) << "\n";
10853 PrintExtraICState(os, kind(), extra_ic_state());
10854 if (ic_state() == MONOMORPHIC) {
10855 os << "type = " << StubType2String(type()) << "\n";
10857 if (is_compare_ic_stub()) {
10858 DCHECK(CodeStub::GetMajorKey(this) == CodeStub::CompareIC);
10859 CompareICStub stub(stub_key(), GetIsolate());
10860 os << "compare_state = " << CompareICState::GetStateName(stub.left())
10861 << "*" << CompareICState::GetStateName(stub.right()) << " -> "
10862 << CompareICState::GetStateName(stub.state()) << "\n";
10863 os << "compare_operation = " << Token::Name(stub.op()) << "\n";
10866 if ((name != NULL) && (name[0] != '\0')) {
10867 os << "name = " << name << "\n";
10869 if (kind() == OPTIMIZED_FUNCTION) {
10870 os << "stack_slots = " << stack_slots() << "\n";
10873 os << "Instructions (size = " << instruction_size() << ")\n";
10874 // TODO(svenpanne) The Disassembler should use streams, too!
10876 CodeTracer::Scope trace_scope(GetIsolate()->GetCodeTracer());
10877 Disassembler::Decode(trace_scope.file(), this);
10881 if (kind() == FUNCTION) {
10882 DeoptimizationOutputData* data =
10883 DeoptimizationOutputData::cast(this->deoptimization_data());
10884 data->DeoptimizationOutputDataPrint(os);
10885 } else if (kind() == OPTIMIZED_FUNCTION) {
10886 DeoptimizationInputData* data =
10887 DeoptimizationInputData::cast(this->deoptimization_data());
10888 data->DeoptimizationInputDataPrint(os);
10892 if (is_crankshafted()) {
10893 SafepointTable table(this);
10894 os << "Safepoints (size = " << table.size() << ")\n";
10895 for (unsigned i = 0; i < table.length(); i++) {
10896 unsigned pc_offset = table.GetPcOffset(i);
10897 os << (instruction_start() + pc_offset) << " ";
10898 // TODO(svenpanne) Add some basic formatting to our streams.
10899 Vector<char> buf1 = Vector<char>::New(30);
10900 SNPrintF(buf1, "%4d", pc_offset);
10901 os << buf1.start() << " ";
10902 table.PrintEntry(i, os);
10903 os << " (sp -> fp) ";
10904 SafepointEntry entry = table.GetEntry(i);
10905 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
10906 Vector<char> buf2 = Vector<char>::New(30);
10907 SNPrintF(buf2, "%6d", entry.deoptimization_index());
10908 os << buf2.start();
10912 if (entry.argument_count() > 0) {
10913 os << " argc: " << entry.argument_count();
10918 } else if (kind() == FUNCTION) {
10919 unsigned offset = back_edge_table_offset();
10920 // If there is no back edge table, the "table start" will be at or after
10921 // (due to alignment) the end of the instruction stream.
10922 if (static_cast<int>(offset) < instruction_size()) {
10923 DisallowHeapAllocation no_gc;
10924 BackEdgeTable back_edges(this, &no_gc);
10926 os << "Back edges (size = " << back_edges.length() << ")\n";
10927 os << "ast_id pc_offset loop_depth\n";
10929 for (uint32_t i = 0; i < back_edges.length(); i++) {
10930 Vector<char> buf = Vector<char>::New(100);
10931 SNPrintF(buf, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(),
10932 back_edges.pc_offset(i), back_edges.loop_depth(i));
10938 #ifdef OBJECT_PRINT
10939 if (!type_feedback_info()->IsUndefined()) {
10940 OFStream os(stdout);
10941 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(os);
10947 os << "RelocInfo (size = " << relocation_size() << ")\n";
10948 for (RelocIterator it(this); !it.done(); it.next()) {
10949 it.rinfo()->Print(GetIsolate(), os);
10953 #endif // ENABLE_DISASSEMBLER
10956 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength(
10957 Handle<JSObject> object,
10960 SetFastElementsCapacitySmiMode smi_mode) {
10961 // We should never end in here with a pixel or external array.
10962 DCHECK(!object->HasExternalArrayElements());
10964 // Allocate a new fast elements backing store.
10965 Handle<FixedArray> new_elements =
10966 object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity);
10968 ElementsKind elements_kind = object->GetElementsKind();
10969 ElementsKind new_elements_kind;
10970 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
10971 // or if it's allowed and the old elements array contained only SMIs.
10972 bool has_fast_smi_elements =
10973 (smi_mode == kForceSmiElements) ||
10974 ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements());
10975 if (has_fast_smi_elements) {
10976 if (IsHoleyElementsKind(elements_kind)) {
10977 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
10979 new_elements_kind = FAST_SMI_ELEMENTS;
10982 if (IsHoleyElementsKind(elements_kind)) {
10983 new_elements_kind = FAST_HOLEY_ELEMENTS;
10985 new_elements_kind = FAST_ELEMENTS;
10988 Handle<FixedArrayBase> old_elements(object->elements());
10989 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind);
10990 accessor->CopyElements(object, new_elements, elements_kind);
10992 if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) {
10993 Handle<Map> new_map = (new_elements_kind != elements_kind)
10994 ? GetElementsTransitionMap(object, new_elements_kind)
10995 : handle(object->map());
10996 JSObject::ValidateElements(object);
10997 JSObject::SetMapAndElements(object, new_map, new_elements);
10999 // Transition through the allocation site as well if present.
11000 JSObject::UpdateAllocationSite(object, new_elements_kind);
11002 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements);
11003 parameter_map->set(1, *new_elements);
11006 if (FLAG_trace_elements_transitions) {
11007 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11008 object->GetElementsKind(), new_elements);
11011 if (object->IsJSArray()) {
11012 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11014 return new_elements;
11018 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object,
11021 // We should never end in here with a pixel or external array.
11022 DCHECK(!object->HasExternalArrayElements());
11024 Handle<FixedArrayBase> elems =
11025 object->GetIsolate()->factory()->NewFixedDoubleArray(capacity);
11027 ElementsKind elements_kind = object->GetElementsKind();
11028 CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS);
11029 ElementsKind new_elements_kind = elements_kind;
11030 if (IsHoleyElementsKind(elements_kind)) {
11031 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
11033 new_elements_kind = FAST_DOUBLE_ELEMENTS;
11036 Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind);
11038 Handle<FixedArrayBase> old_elements(object->elements());
11039 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS);
11040 accessor->CopyElements(object, elems, elements_kind);
11042 JSObject::ValidateElements(object);
11043 JSObject::SetMapAndElements(object, new_map, elems);
11045 if (FLAG_trace_elements_transitions) {
11046 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11047 object->GetElementsKind(), elems);
11050 if (object->IsJSArray()) {
11051 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11057 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) {
11058 DCHECK(capacity >= 0);
11059 array->GetIsolate()->factory()->NewJSArrayStorage(
11060 array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
11064 void JSArray::Expand(Handle<JSArray> array, int required_size) {
11065 ElementsAccessor* accessor = array->GetElementsAccessor();
11066 accessor->SetCapacityAndLength(array, required_size, required_size);
11070 // Returns false if the passed-in index is marked non-configurable,
11071 // which will cause the ES5 truncation operation to halt, and thus
11072 // no further old values need be collected.
11073 static bool GetOldValue(Isolate* isolate,
11074 Handle<JSObject> object,
11076 List<Handle<Object> >* old_values,
11077 List<uint32_t>* indices) {
11078 Maybe<PropertyAttributes> maybe =
11079 JSReceiver::GetOwnElementAttribute(object, index);
11080 DCHECK(maybe.has_value);
11081 DCHECK(maybe.value != ABSENT);
11082 if (maybe.value == DONT_DELETE) return false;
11083 Handle<Object> value;
11084 if (!JSObject::GetOwnElementAccessorPair(object, index).is_null()) {
11085 value = Handle<Object>::cast(isolate->factory()->the_hole_value());
11087 value = Object::GetElement(isolate, object, index).ToHandleChecked();
11089 old_values->Add(value);
11090 indices->Add(index);
11094 static void EnqueueSpliceRecord(Handle<JSArray> object,
11096 Handle<JSArray> deleted,
11097 uint32_t add_count) {
11098 Isolate* isolate = object->GetIsolate();
11099 HandleScope scope(isolate);
11100 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index);
11101 Handle<Object> add_count_object =
11102 isolate->factory()->NewNumberFromUint(add_count);
11104 Handle<Object> args[] =
11105 { object, index_object, deleted, add_count_object };
11107 Execution::Call(isolate,
11108 Handle<JSFunction>(isolate->observers_enqueue_splice()),
11109 isolate->factory()->undefined_value(),
11115 static void BeginPerformSplice(Handle<JSArray> object) {
11116 Isolate* isolate = object->GetIsolate();
11117 HandleScope scope(isolate);
11118 Handle<Object> args[] = { object };
11120 Execution::Call(isolate,
11121 Handle<JSFunction>(isolate->observers_begin_perform_splice()),
11122 isolate->factory()->undefined_value(),
11128 static void EndPerformSplice(Handle<JSArray> object) {
11129 Isolate* isolate = object->GetIsolate();
11130 HandleScope scope(isolate);
11131 Handle<Object> args[] = { object };
11133 Execution::Call(isolate,
11134 Handle<JSFunction>(isolate->observers_end_perform_splice()),
11135 isolate->factory()->undefined_value(),
11141 MaybeHandle<Object> JSArray::SetElementsLength(
11142 Handle<JSArray> array,
11143 Handle<Object> new_length_handle) {
11144 if (array->HasFastElements()) {
11145 // If the new array won't fit in a some non-trivial fraction of the max old
11146 // space size, then force it to go dictionary mode.
11147 int max_fast_array_size = static_cast<int>(
11148 (array->GetHeap()->MaxOldGenerationSize() / kDoubleSize) / 4);
11149 if (new_length_handle->IsNumber() &&
11150 NumberToInt32(*new_length_handle) >= max_fast_array_size) {
11151 NormalizeElements(array);
11155 // We should never end in here with a pixel or external array.
11156 DCHECK(array->AllowsSetElementsLength());
11157 if (!array->map()->is_observed()) {
11158 return array->GetElementsAccessor()->SetLength(array, new_length_handle);
11161 Isolate* isolate = array->GetIsolate();
11162 List<uint32_t> indices;
11163 List<Handle<Object> > old_values;
11164 Handle<Object> old_length_handle(array->length(), isolate);
11165 uint32_t old_length = 0;
11166 CHECK(old_length_handle->ToArrayIndex(&old_length));
11167 uint32_t new_length = 0;
11168 CHECK(new_length_handle->ToArrayIndex(&new_length));
11170 static const PropertyAttributes kNoAttrFilter = NONE;
11171 int num_elements = array->NumberOfOwnElements(kNoAttrFilter);
11172 if (num_elements > 0) {
11173 if (old_length == static_cast<uint32_t>(num_elements)) {
11174 // Simple case for arrays without holes.
11175 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
11176 if (!GetOldValue(isolate, array, i, &old_values, &indices)) break;
11179 // For sparse arrays, only iterate over existing elements.
11180 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
11181 // the to-be-removed indices twice.
11182 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
11183 array->GetOwnElementKeys(*keys, kNoAttrFilter);
11184 while (num_elements-- > 0) {
11185 uint32_t index = NumberToUint32(keys->get(num_elements));
11186 if (index < new_length) break;
11187 if (!GetOldValue(isolate, array, index, &old_values, &indices)) break;
11192 Handle<Object> hresult;
11193 ASSIGN_RETURN_ON_EXCEPTION(
11195 array->GetElementsAccessor()->SetLength(array, new_length_handle),
11198 CHECK(array->length()->ToArrayIndex(&new_length));
11199 if (old_length == new_length) return hresult;
11201 BeginPerformSplice(array);
11203 for (int i = 0; i < indices.length(); ++i) {
11204 // For deletions where the property was an accessor, old_values[i]
11205 // will be the hole, which instructs EnqueueChangeRecord to elide
11206 // the "oldValue" property.
11207 JSObject::EnqueueChangeRecord(
11208 array, "delete", isolate->factory()->Uint32ToString(indices[i]),
11211 JSObject::EnqueueChangeRecord(
11212 array, "update", isolate->factory()->length_string(),
11213 old_length_handle);
11215 EndPerformSplice(array);
11217 uint32_t index = Min(old_length, new_length);
11218 uint32_t add_count = new_length > old_length ? new_length - old_length : 0;
11219 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0;
11220 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
11221 if (delete_count > 0) {
11222 for (int i = indices.length() - 1; i >= 0; i--) {
11223 // Skip deletions where the property was an accessor, leaving holes
11224 // in the array of old values.
11225 if (old_values[i]->IsTheHole()) continue;
11226 JSObject::SetElement(
11227 deleted, indices[i] - index, old_values[i], NONE, SLOPPY).Assert();
11230 SetProperty(deleted, isolate->factory()->length_string(),
11231 isolate->factory()->NewNumberFromUint(delete_count),
11235 EnqueueSpliceRecord(array, index, deleted, add_count);
11241 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map,
11242 Handle<Object> prototype) {
11243 FixedArray* cache = map->GetPrototypeTransitions();
11244 int number_of_transitions = map->NumberOfProtoTransitions();
11245 const int proto_offset =
11246 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset;
11247 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
11248 const int step = kProtoTransitionElementsPerEntry;
11249 for (int i = 0; i < number_of_transitions; i++) {
11250 if (cache->get(proto_offset + i * step) == *prototype) {
11251 Object* result = cache->get(map_offset + i * step);
11252 return Handle<Map>(Map::cast(result));
11255 return Handle<Map>();
11259 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map,
11260 Handle<Object> prototype,
11261 Handle<Map> target_map) {
11262 DCHECK(target_map->IsMap());
11263 DCHECK(HeapObject::cast(*prototype)->map()->IsMap());
11264 // Don't cache prototype transition if this map is either shared, or a map of
11266 if (map->is_prototype_map()) return map;
11267 if (map->is_dictionary_map() || !FLAG_cache_prototype_transitions) return map;
11269 const int step = kProtoTransitionElementsPerEntry;
11270 const int header = kProtoTransitionHeaderSize;
11272 Handle<FixedArray> cache(map->GetPrototypeTransitions());
11273 int capacity = (cache->length() - header) / step;
11274 int transitions = map->NumberOfProtoTransitions() + 1;
11276 if (transitions > capacity) {
11277 if (capacity > kMaxCachedPrototypeTransitions) return map;
11279 // Grow array by factor 2 over and above what we need.
11280 cache = FixedArray::CopySize(cache, transitions * 2 * step + header);
11282 SetPrototypeTransitions(map, cache);
11285 // Reload number of transitions as GC might shrink them.
11286 int last = map->NumberOfProtoTransitions();
11287 int entry = header + last * step;
11289 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype);
11290 cache->set(entry + kProtoTransitionMapOffset, *target_map);
11291 map->SetNumberOfProtoTransitions(last + 1);
11297 void Map::ZapTransitions() {
11298 TransitionArray* transition_array = transitions();
11299 // TODO(mstarzinger): Temporarily use a slower version instead of the faster
11300 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer.
11301 Object** data = transition_array->data_start();
11302 Object* the_hole = GetHeap()->the_hole_value();
11303 int length = transition_array->length();
11304 for (int i = 0; i < length; i++) {
11305 data[i] = the_hole;
11310 void Map::ZapPrototypeTransitions() {
11311 FixedArray* proto_transitions = GetPrototypeTransitions();
11312 MemsetPointer(proto_transitions->data_start(),
11313 GetHeap()->the_hole_value(),
11314 proto_transitions->length());
11319 void Map::AddDependentCompilationInfo(Handle<Map> map,
11320 DependentCode::DependencyGroup group,
11321 CompilationInfo* info) {
11322 Handle<DependentCode> codes =
11323 DependentCode::Insert(handle(map->dependent_code(), info->isolate()),
11324 group, info->object_wrapper());
11325 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
11326 info->dependencies(group)->Add(map, info->zone());
11331 void Map::AddDependentCode(Handle<Map> map,
11332 DependentCode::DependencyGroup group,
11333 Handle<Code> code) {
11334 Handle<DependentCode> codes = DependentCode::Insert(
11335 Handle<DependentCode>(map->dependent_code()), group, code);
11336 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
11341 void Map::AddDependentIC(Handle<Map> map,
11342 Handle<Code> stub) {
11343 DCHECK(stub->next_code_link()->IsUndefined());
11344 int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup);
11346 // Slow path: insert the head of the list with possible heap allocation.
11347 Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub);
11349 // Fast path: link the stub to the existing head of the list without any
11350 // heap allocation.
11352 map->dependent_code()->AddToDependentICList(stub);
11357 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
11358 Recompute(entries);
11362 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) {
11363 start_indexes_[0] = 0;
11364 for (int g = 1; g <= kGroupCount; g++) {
11365 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1));
11366 start_indexes_[g] = start_indexes_[g - 1] + count;
11371 DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
11372 DependencyGroup group) {
11373 AllowDeferredHandleDereference dependencies_are_safe;
11374 if (group == DependentCode::kPropertyCellChangedGroup) {
11375 return Handle<PropertyCell>::cast(object)->dependent_code();
11376 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
11377 group == DependentCode::kAllocationSiteTransitionChangedGroup) {
11378 return Handle<AllocationSite>::cast(object)->dependent_code();
11380 return Handle<Map>::cast(object)->dependent_code();
11384 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
11385 DependencyGroup group,
11386 Handle<Object> object) {
11387 GroupStartIndexes starts(*entries);
11388 int start = starts.at(group);
11389 int end = starts.at(group + 1);
11390 int number_of_entries = starts.number_of_entries();
11391 // Check for existing entry to avoid duplicates.
11392 for (int i = start; i < end; i++) {
11393 if (entries->object_at(i) == *object) return entries;
11395 if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
11396 int capacity = kCodesStartIndex + number_of_entries + 1;
11397 if (capacity > 5) capacity = capacity * 5 / 4;
11398 Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
11399 FixedArray::CopySize(entries, capacity, TENURED));
11400 // The number of codes can change after GC.
11401 starts.Recompute(*entries);
11402 start = starts.at(group);
11403 end = starts.at(group + 1);
11404 number_of_entries = starts.number_of_entries();
11405 for (int i = 0; i < number_of_entries; i++) {
11406 entries->clear_at(i);
11408 // If the old fixed array was empty, we need to reset counters of the
11410 if (number_of_entries == 0) {
11411 for (int g = 0; g < kGroupCount; g++) {
11412 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
11415 entries = new_entries;
11417 entries->ExtendGroup(group);
11418 entries->set_object_at(end, *object);
11419 entries->set_number_of_entries(group, end + 1 - start);
11424 void DependentCode::UpdateToFinishedCode(DependencyGroup group,
11425 CompilationInfo* info,
11427 DisallowHeapAllocation no_gc;
11428 AllowDeferredHandleDereference get_object_wrapper;
11429 Foreign* info_wrapper = *info->object_wrapper();
11430 GroupStartIndexes starts(this);
11431 int start = starts.at(group);
11432 int end = starts.at(group + 1);
11433 for (int i = start; i < end; i++) {
11434 if (object_at(i) == info_wrapper) {
11435 set_object_at(i, code);
11441 for (int i = start; i < end; i++) {
11442 DCHECK(is_code_at(i) || compilation_info_at(i) != info);
11448 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
11449 CompilationInfo* info) {
11450 DisallowHeapAllocation no_allocation;
11451 AllowDeferredHandleDereference get_object_wrapper;
11452 Foreign* info_wrapper = *info->object_wrapper();
11453 GroupStartIndexes starts(this);
11454 int start = starts.at(group);
11455 int end = starts.at(group + 1);
11456 // Find compilation info wrapper.
11458 for (int i = start; i < end; i++) {
11459 if (object_at(i) == info_wrapper) {
11464 if (info_pos == -1) return; // Not found.
11465 int gap = info_pos;
11466 // Use the last of each group to fill the gap in the previous group.
11467 for (int i = group; i < kGroupCount; i++) {
11468 int last_of_group = starts.at(i + 1) - 1;
11469 DCHECK(last_of_group >= gap);
11470 if (last_of_group == gap) continue;
11471 copy(last_of_group, gap);
11472 gap = last_of_group;
11474 DCHECK(gap == starts.number_of_entries() - 1);
11475 clear_at(gap); // Clear last gap.
11476 set_number_of_entries(group, end - start - 1);
11479 for (int i = start; i < end - 1; i++) {
11480 DCHECK(is_code_at(i) || compilation_info_at(i) != info);
11486 static bool CodeListContains(Object* head, Code* code) {
11487 while (!head->IsUndefined()) {
11488 if (head == code) return true;
11489 head = Code::cast(head)->next_code_link();
11495 bool DependentCode::Contains(DependencyGroup group, Code* code) {
11496 GroupStartIndexes starts(this);
11497 int start = starts.at(group);
11498 int end = starts.at(group + 1);
11499 if (group == kWeakICGroup) {
11500 return CodeListContains(object_at(start), code);
11502 for (int i = start; i < end; i++) {
11503 if (object_at(i) == code) return true;
11509 bool DependentCode::MarkCodeForDeoptimization(
11511 DependentCode::DependencyGroup group) {
11512 DisallowHeapAllocation no_allocation_scope;
11513 DependentCode::GroupStartIndexes starts(this);
11514 int start = starts.at(group);
11515 int end = starts.at(group + 1);
11516 int code_entries = starts.number_of_entries();
11517 if (start == end) return false;
11519 // Mark all the code that needs to be deoptimized.
11520 bool marked = false;
11521 for (int i = start; i < end; i++) {
11522 if (is_code_at(i)) {
11523 Code* code = code_at(i);
11524 if (!code->marked_for_deoptimization()) {
11525 SetMarkedForDeoptimization(code, group);
11529 CompilationInfo* info = compilation_info_at(i);
11530 info->AbortDueToDependencyChange();
11533 // Compact the array by moving all subsequent groups to fill in the new holes.
11534 for (int src = end, dst = start; src < code_entries; src++, dst++) {
11537 // Now the holes are at the end of the array, zap them for heap-verifier.
11538 int removed = end - start;
11539 for (int i = code_entries - removed; i < code_entries; i++) {
11542 set_number_of_entries(group, 0);
11547 void DependentCode::DeoptimizeDependentCodeGroup(
11549 DependentCode::DependencyGroup group) {
11550 DCHECK(AllowCodeDependencyChange::IsAllowed());
11551 DisallowHeapAllocation no_allocation_scope;
11552 bool marked = MarkCodeForDeoptimization(isolate, group);
11554 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate);
11558 void DependentCode::AddToDependentICList(Handle<Code> stub) {
11559 DisallowHeapAllocation no_heap_allocation;
11560 GroupStartIndexes starts(this);
11561 int i = starts.at(kWeakICGroup);
11562 Object* head = object_at(i);
11563 // Try to insert the stub after the head of the list to minimize number of
11564 // writes to the DependentCode array, since a write to the array can make it
11565 // strong if it was alread marked by incremental marker.
11566 if (head->IsCode()) {
11567 stub->set_next_code_link(Code::cast(head)->next_code_link());
11568 Code::cast(head)->set_next_code_link(*stub);
11570 stub->set_next_code_link(head);
11571 set_object_at(i, *stub);
11576 void DependentCode::SetMarkedForDeoptimization(Code* code,
11577 DependencyGroup group) {
11578 code->set_marked_for_deoptimization(true);
11579 if (FLAG_trace_deopt &&
11580 (code->deoptimization_data() != code->GetHeap()->empty_fixed_array())) {
11581 DeoptimizationInputData* deopt_data =
11582 DeoptimizationInputData::cast(code->deoptimization_data());
11583 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
11584 PrintF(scope.file(), "[marking dependent code 0x%08" V8PRIxPTR
11585 " (opt #%d) for deoptimization, reason: %s]\n",
11586 reinterpret_cast<intptr_t>(code),
11587 deopt_data->OptimizationId()->value(), DependencyGroupName(group));
11592 const char* DependentCode::DependencyGroupName(DependencyGroup group) {
11596 case kWeakCodeGroup:
11597 return "weak-code";
11598 case kTransitionGroup:
11599 return "transition";
11600 case kPrototypeCheckGroup:
11601 return "prototype-check";
11602 case kElementsCantBeAddedGroup:
11603 return "elements-cant-be-added";
11604 case kPropertyCellChangedGroup:
11605 return "property-cell-changed";
11606 case kFieldTypeGroup:
11607 return "field-type";
11608 case kInitialMapChangedGroup:
11609 return "initial-map-changed";
11610 case kAllocationSiteTenuringChangedGroup:
11611 return "allocation-site-tenuring-changed";
11612 case kAllocationSiteTransitionChangedGroup:
11613 return "allocation-site-transition-changed";
11620 Handle<Map> Map::TransitionToPrototype(Handle<Map> map,
11621 Handle<Object> prototype) {
11622 Handle<Map> new_map = GetPrototypeTransition(map, prototype);
11623 if (new_map.is_null()) {
11624 new_map = Copy(map);
11625 PutPrototypeTransition(map, prototype, new_map);
11626 new_map->set_prototype(*prototype);
11632 MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object,
11633 Handle<Object> value,
11634 bool from_javascript) {
11636 int size = object->Size();
11639 Isolate* isolate = object->GetIsolate();
11640 Heap* heap = isolate->heap();
11641 // Silently ignore the change if value is not a JSObject or null.
11642 // SpiderMonkey behaves this way.
11643 if (!value->IsJSReceiver() && !value->IsNull()) return value;
11645 // From 8.6.2 Object Internal Methods
11647 // In addition, if [[Extensible]] is false the value of the [[Class]] and
11648 // [[Prototype]] internal properties of the object may not be modified.
11650 // Implementation specific extensions that modify [[Class]], [[Prototype]]
11651 // or [[Extensible]] must not violate the invariants defined in the preceding
11653 if (!object->map()->is_extensible()) {
11654 Handle<Object> args[] = { object };
11655 THROW_NEW_ERROR(isolate, NewTypeError("non_extensible_proto",
11656 HandleVector(args, arraysize(args))),
11660 // Before we can set the prototype we need to be sure
11661 // prototype cycles are prevented.
11662 // It is sufficient to validate that the receiver is not in the new prototype
11664 for (PrototypeIterator iter(isolate, *value,
11665 PrototypeIterator::START_AT_RECEIVER);
11666 !iter.IsAtEnd(); iter.Advance()) {
11667 if (JSReceiver::cast(iter.GetCurrent()) == *object) {
11669 THROW_NEW_ERROR(isolate,
11670 NewError("cyclic_proto", HandleVector<Object>(NULL, 0)),
11675 bool dictionary_elements_in_chain =
11676 object->map()->DictionaryElementsInPrototypeChainOnly();
11677 Handle<JSObject> real_receiver = object;
11679 if (from_javascript) {
11680 // Find the first object in the chain whose prototype object is not
11681 // hidden and set the new prototype on that object.
11682 PrototypeIterator iter(isolate, real_receiver);
11683 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
11685 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
11690 // Set the new prototype of the object.
11691 Handle<Map> map(real_receiver->map());
11693 // Nothing to do if prototype is already set.
11694 if (map->prototype() == *value) return value;
11696 if (value->IsJSObject()) {
11697 PrototypeOptimizationMode mode =
11698 from_javascript ? REGULAR_PROTOTYPE : FAST_PROTOTYPE;
11699 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value), mode);
11702 Handle<Map> new_map = Map::TransitionToPrototype(map, value);
11703 DCHECK(new_map->prototype() == *value);
11704 JSObject::MigrateToMap(real_receiver, new_map);
11706 if (!dictionary_elements_in_chain &&
11707 new_map->DictionaryElementsInPrototypeChainOnly()) {
11708 // If the prototype chain didn't previously have element callbacks, then
11709 // KeyedStoreICs need to be cleared to ensure any that involve this
11711 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC);
11714 heap->ClearInstanceofCache();
11715 DCHECK(size == object->Size());
11720 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
11722 uint32_t first_arg,
11723 uint32_t arg_count,
11724 EnsureElementsMode mode) {
11725 // Elements in |Arguments| are ordered backwards (because they're on the
11726 // stack), but the method that's called here iterates over them in forward
11728 return EnsureCanContainElements(
11729 object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode);
11733 MaybeHandle<AccessorPair> JSObject::GetOwnElementAccessorPair(
11734 Handle<JSObject> object,
11736 if (object->IsJSGlobalProxy()) {
11737 PrototypeIterator iter(object->GetIsolate(), object);
11738 if (iter.IsAtEnd()) return MaybeHandle<AccessorPair>();
11739 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
11740 return GetOwnElementAccessorPair(
11741 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index);
11744 // Check for lookup interceptor.
11745 if (object->HasIndexedInterceptor()) return MaybeHandle<AccessorPair>();
11747 return object->GetElementsAccessor()->GetAccessorPair(object, object, index);
11751 MaybeHandle<Object> JSObject::SetElementWithInterceptor(
11752 Handle<JSObject> object,
11754 Handle<Object> value,
11755 PropertyAttributes attributes,
11756 StrictMode strict_mode,
11757 bool check_prototype,
11758 SetPropertyMode set_mode) {
11759 Isolate* isolate = object->GetIsolate();
11761 // Make sure that the top context does not change when doing
11762 // callbacks or interceptor calls.
11763 AssertNoContextChange ncc(isolate);
11765 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
11766 if (!interceptor->setter()->IsUndefined()) {
11767 v8::IndexedPropertySetterCallback setter =
11768 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter());
11770 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index));
11771 PropertyCallbackArguments args(isolate, interceptor->data(), *object,
11773 v8::Handle<v8::Value> result =
11774 args.Call(setter, index, v8::Utils::ToLocal(value));
11775 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11776 if (!result.IsEmpty()) return value;
11779 return SetElementWithoutInterceptor(object, index, value, attributes,
11786 MaybeHandle<Object> JSObject::GetElementWithCallback(
11787 Handle<JSObject> object,
11788 Handle<Object> receiver,
11789 Handle<Object> structure,
11791 Handle<Object> holder) {
11792 Isolate* isolate = object->GetIsolate();
11793 DCHECK(!structure->IsForeign());
11794 // api style callbacks.
11795 if (structure->IsExecutableAccessorInfo()) {
11796 Handle<ExecutableAccessorInfo> data =
11797 Handle<ExecutableAccessorInfo>::cast(structure);
11798 Object* fun_obj = data->getter();
11799 v8::AccessorNameGetterCallback call_fun =
11800 v8::ToCData<v8::AccessorNameGetterCallback>(fun_obj);
11801 if (call_fun == NULL) return isolate->factory()->undefined_value();
11802 Handle<JSObject> holder_handle = Handle<JSObject>::cast(holder);
11803 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11804 Handle<String> key = isolate->factory()->NumberToString(number);
11805 LOG(isolate, ApiNamedPropertyAccess("load", *holder_handle, *key));
11806 PropertyCallbackArguments
11807 args(isolate, data->data(), *receiver, *holder_handle);
11808 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key));
11809 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11810 if (result.IsEmpty()) return isolate->factory()->undefined_value();
11811 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
11812 result_internal->VerifyApiCallResultType();
11813 // Rebox handle before return.
11814 return handle(*result_internal, isolate);
11817 // __defineGetter__ callback
11818 if (structure->IsAccessorPair()) {
11819 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
11821 if (getter->IsSpecFunction()) {
11822 // TODO(rossberg): nicer would be to cast to some JSCallable here...
11823 return GetPropertyWithDefinedGetter(
11824 receiver, Handle<JSReceiver>::cast(getter));
11826 // Getter is not a function.
11827 return isolate->factory()->undefined_value();
11830 if (structure->IsDeclaredAccessorInfo()) {
11831 return GetDeclaredAccessorProperty(
11832 receiver, Handle<DeclaredAccessorInfo>::cast(structure), isolate);
11836 return MaybeHandle<Object>();
11840 MaybeHandle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object,
11841 Handle<Object> structure,
11843 Handle<Object> value,
11844 Handle<JSObject> holder,
11845 StrictMode strict_mode) {
11846 Isolate* isolate = object->GetIsolate();
11848 // We should never get here to initialize a const with the hole
11849 // value since a const declaration would conflict with the setter.
11850 DCHECK(!value->IsTheHole());
11851 DCHECK(!structure->IsForeign());
11852 if (structure->IsExecutableAccessorInfo()) {
11853 // api style callbacks
11854 Handle<ExecutableAccessorInfo> data =
11855 Handle<ExecutableAccessorInfo>::cast(structure);
11856 Object* call_obj = data->setter();
11857 v8::AccessorNameSetterCallback call_fun =
11858 v8::ToCData<v8::AccessorNameSetterCallback>(call_obj);
11859 if (call_fun == NULL) return value;
11860 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11861 Handle<String> key(isolate->factory()->NumberToString(number));
11862 LOG(isolate, ApiNamedPropertyAccess("store", *object, *key));
11863 PropertyCallbackArguments
11864 args(isolate, data->data(), *object, *holder);
11865 args.Call(call_fun,
11866 v8::Utils::ToLocal(key),
11867 v8::Utils::ToLocal(value));
11868 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11872 if (structure->IsAccessorPair()) {
11873 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
11874 if (setter->IsSpecFunction()) {
11875 // TODO(rossberg): nicer would be to cast to some JSCallable here...
11876 return SetPropertyWithDefinedSetter(
11877 object, Handle<JSReceiver>::cast(setter), value);
11879 if (strict_mode == SLOPPY) return value;
11880 Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
11881 Handle<Object> args[2] = { key, holder };
11883 isolate, NewTypeError("no_setter_in_callback", HandleVector(args, 2)),
11888 // TODO(dcarney): Handle correctly.
11889 if (structure->IsDeclaredAccessorInfo()) return value;
11892 return MaybeHandle<Object>();
11896 bool JSObject::HasFastArgumentsElements() {
11897 Heap* heap = GetHeap();
11898 if (!elements()->IsFixedArray()) return false;
11899 FixedArray* elements = FixedArray::cast(this->elements());
11900 if (elements->map() != heap->sloppy_arguments_elements_map()) {
11903 FixedArray* arguments = FixedArray::cast(elements->get(1));
11904 return !arguments->IsDictionary();
11908 bool JSObject::HasDictionaryArgumentsElements() {
11909 Heap* heap = GetHeap();
11910 if (!elements()->IsFixedArray()) return false;
11911 FixedArray* elements = FixedArray::cast(this->elements());
11912 if (elements->map() != heap->sloppy_arguments_elements_map()) {
11915 FixedArray* arguments = FixedArray::cast(elements->get(1));
11916 return arguments->IsDictionary();
11920 // Adding n elements in fast case is O(n*n).
11921 // Note: revisit design to have dual undefined values to capture absent
11923 MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object,
11925 Handle<Object> value,
11926 StrictMode strict_mode,
11927 bool check_prototype) {
11928 DCHECK(object->HasFastSmiOrObjectElements() ||
11929 object->HasFastArgumentsElements());
11931 Isolate* isolate = object->GetIsolate();
11933 // Array optimizations rely on the prototype lookups of Array objects always
11934 // returning undefined. If there is a store to the initial prototype object,
11935 // make sure all of these optimizations are invalidated.
11936 if (isolate->is_initial_object_prototype(*object) ||
11937 isolate->is_initial_array_prototype(*object)) {
11938 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate,
11939 DependentCode::kElementsCantBeAddedGroup);
11942 Handle<FixedArray> backing_store(FixedArray::cast(object->elements()));
11943 if (backing_store->map() ==
11944 isolate->heap()->sloppy_arguments_elements_map()) {
11945 backing_store = handle(FixedArray::cast(backing_store->get(1)));
11947 backing_store = EnsureWritableFastElements(object);
11949 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
11951 if (check_prototype &&
11952 (index >= capacity || backing_store->get(index)->IsTheHole())) {
11954 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
11955 object, index, value, &found, strict_mode);
11956 if (found) return result;
11959 uint32_t new_capacity = capacity;
11960 // Check if the length property of this object needs to be updated.
11961 uint32_t array_length = 0;
11962 bool must_update_array_length = false;
11963 bool introduces_holes = true;
11964 if (object->IsJSArray()) {
11965 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
11966 introduces_holes = index > array_length;
11967 if (index >= array_length) {
11968 must_update_array_length = true;
11969 array_length = index + 1;
11972 introduces_holes = index >= capacity;
11975 // If the array is growing, and it's not growth by a single element at the
11976 // end, make sure that the ElementsKind is HOLEY.
11977 ElementsKind elements_kind = object->GetElementsKind();
11978 if (introduces_holes &&
11979 IsFastElementsKind(elements_kind) &&
11980 !IsFastHoleyElementsKind(elements_kind)) {
11981 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
11982 TransitionElementsKind(object, transitioned_kind);
11985 // Check if the capacity of the backing store needs to be increased, or if
11986 // a transition to slow elements is necessary.
11987 if (index >= capacity) {
11988 bool convert_to_slow = true;
11989 if ((index - capacity) < kMaxGap) {
11990 new_capacity = NewElementsCapacity(index + 1);
11991 DCHECK(new_capacity > index);
11992 if (!object->ShouldConvertToSlowElements(new_capacity)) {
11993 convert_to_slow = false;
11996 if (convert_to_slow) {
11997 NormalizeElements(object);
11998 return SetDictionaryElement(object, index, value, NONE, strict_mode,
12002 // Convert to fast double elements if appropriate.
12003 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
12004 // Consider fixing the boilerplate as well if we have one.
12005 ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
12006 ? FAST_HOLEY_DOUBLE_ELEMENTS
12007 : FAST_DOUBLE_ELEMENTS;
12009 UpdateAllocationSite(object, to_kind);
12011 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length);
12012 FixedDoubleArray::cast(object->elements())->set(index, value->Number());
12013 JSObject::ValidateElements(object);
12016 // Change elements kind from Smi-only to generic FAST if necessary.
12017 if (object->HasFastSmiElements() && !value->IsSmi()) {
12018 ElementsKind kind = object->HasFastHoleyElements()
12019 ? FAST_HOLEY_ELEMENTS
12022 UpdateAllocationSite(object, kind);
12023 Handle<Map> new_map = GetElementsTransitionMap(object, kind);
12024 JSObject::MigrateToMap(object, new_map);
12025 DCHECK(IsFastObjectElementsKind(object->GetElementsKind()));
12027 // Increase backing store capacity if that's been decided previously.
12028 if (new_capacity != capacity) {
12029 SetFastElementsCapacitySmiMode smi_mode =
12030 value->IsSmi() && object->HasFastSmiElements()
12031 ? kAllowSmiElements
12032 : kDontAllowSmiElements;
12033 Handle<FixedArray> new_elements =
12034 SetFastElementsCapacityAndLength(object, new_capacity, array_length,
12036 new_elements->set(index, *value);
12037 JSObject::ValidateElements(object);
12041 // Finally, set the new element and length.
12042 DCHECK(object->elements()->IsFixedArray());
12043 backing_store->set(index, *value);
12044 if (must_update_array_length) {
12045 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length));
12051 MaybeHandle<Object> JSObject::SetDictionaryElement(
12052 Handle<JSObject> object,
12054 Handle<Object> value,
12055 PropertyAttributes attributes,
12056 StrictMode strict_mode,
12057 bool check_prototype,
12058 SetPropertyMode set_mode) {
12059 DCHECK(object->HasDictionaryElements() ||
12060 object->HasDictionaryArgumentsElements());
12061 Isolate* isolate = object->GetIsolate();
12063 // Insert element in the dictionary.
12064 Handle<FixedArray> elements(FixedArray::cast(object->elements()));
12065 bool is_arguments =
12066 (elements->map() == isolate->heap()->sloppy_arguments_elements_map());
12067 Handle<SeededNumberDictionary> dictionary(is_arguments
12068 ? SeededNumberDictionary::cast(elements->get(1))
12069 : SeededNumberDictionary::cast(*elements));
12071 int entry = dictionary->FindEntry(index);
12072 if (entry != SeededNumberDictionary::kNotFound) {
12073 Handle<Object> element(dictionary->ValueAt(entry), isolate);
12074 PropertyDetails details = dictionary->DetailsAt(entry);
12075 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
12076 return SetElementWithCallback(object, element, index, value, object,
12079 dictionary->UpdateMaxNumberKey(index);
12080 // If a value has not been initialized we allow writing to it even if it
12081 // is read-only (a declared const that has not been initialized). If a
12082 // value is being defined we skip attribute checks completely.
12083 if (set_mode == DEFINE_PROPERTY) {
12084 details = PropertyDetails(
12085 attributes, NORMAL, details.dictionary_index());
12086 dictionary->DetailsAtPut(entry, details);
12087 } else if (details.IsReadOnly() && !element->IsTheHole()) {
12088 if (strict_mode == SLOPPY) {
12089 return isolate->factory()->undefined_value();
12091 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12092 Handle<Object> args[2] = { number, object };
12093 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
12094 HandleVector(args, 2)),
12098 // Elements of the arguments object in slow mode might be slow aliases.
12099 if (is_arguments && element->IsAliasedArgumentsEntry()) {
12100 Handle<AliasedArgumentsEntry> entry =
12101 Handle<AliasedArgumentsEntry>::cast(element);
12102 Handle<Context> context(Context::cast(elements->get(0)));
12103 int context_index = entry->aliased_context_slot();
12104 DCHECK(!context->get(context_index)->IsTheHole());
12105 context->set(context_index, *value);
12106 // For elements that are still writable we keep slow aliasing.
12107 if (!details.IsReadOnly()) value = element;
12109 dictionary->ValueAtPut(entry, *value);
12112 // Index not already used. Look for an accessor in the prototype chain.
12114 if (check_prototype) {
12116 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12117 object, index, value, &found, strict_mode);
12118 if (found) return result;
12121 // When we set the is_extensible flag to false we always force the
12122 // element into dictionary mode (and force them to stay there).
12123 if (!object->map()->is_extensible()) {
12124 if (strict_mode == SLOPPY) {
12125 return isolate->factory()->undefined_value();
12127 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12128 Handle<String> name = isolate->factory()->NumberToString(number);
12129 Handle<Object> args[1] = { name };
12130 THROW_NEW_ERROR(isolate, NewTypeError("object_not_extensible",
12131 HandleVector(args, 1)),
12136 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
12137 Handle<SeededNumberDictionary> new_dictionary =
12138 SeededNumberDictionary::AddNumberEntry(dictionary, index, value,
12140 if (*dictionary != *new_dictionary) {
12141 if (is_arguments) {
12142 elements->set(1, *new_dictionary);
12144 object->set_elements(*new_dictionary);
12146 dictionary = new_dictionary;
12150 // Update the array length if this JSObject is an array.
12151 if (object->IsJSArray()) {
12152 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index,
12156 // Attempt to put this object back in fast case.
12157 if (object->ShouldConvertToFastElements()) {
12158 uint32_t new_length = 0;
12159 if (object->IsJSArray()) {
12160 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length));
12162 new_length = dictionary->max_number_key() + 1;
12164 bool has_smi_only_elements = false;
12165 bool should_convert_to_fast_double_elements =
12166 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
12167 SetFastElementsCapacitySmiMode smi_mode =
12168 has_smi_only_elements ? kForceSmiElements : kAllowSmiElements;
12170 if (should_convert_to_fast_double_elements) {
12171 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length);
12173 SetFastElementsCapacityAndLength(object, new_length, new_length,
12176 JSObject::ValidateElements(object);
12178 if (FLAG_trace_normalization) {
12179 OFStream os(stdout);
12180 os << "Object elements are fast case again:\n";
12188 MaybeHandle<Object> JSObject::SetFastDoubleElement(
12189 Handle<JSObject> object,
12191 Handle<Object> value,
12192 StrictMode strict_mode,
12193 bool check_prototype) {
12194 DCHECK(object->HasFastDoubleElements());
12196 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements()));
12197 uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
12199 // If storing to an element that isn't in the array, pass the store request
12200 // up the prototype chain before storing in the receiver's elements.
12201 if (check_prototype &&
12202 (index >= elms_length ||
12203 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) {
12205 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12206 object, index, value, &found, strict_mode);
12207 if (found) return result;
12210 // If the value object is not a heap number, switch to fast elements and try
12212 bool value_is_smi = value->IsSmi();
12213 bool introduces_holes = true;
12214 uint32_t length = elms_length;
12215 if (object->IsJSArray()) {
12216 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length));
12217 introduces_holes = index > length;
12219 introduces_holes = index >= elms_length;
12222 if (!value->IsNumber()) {
12223 SetFastElementsCapacityAndLength(object, elms_length, length,
12224 kDontAllowSmiElements);
12225 Handle<Object> result;
12226 ASSIGN_RETURN_ON_EXCEPTION(
12227 object->GetIsolate(), result,
12228 SetFastElement(object, index, value, strict_mode, check_prototype),
12230 JSObject::ValidateElements(object);
12234 double double_value = value_is_smi
12235 ? static_cast<double>(Handle<Smi>::cast(value)->value())
12236 : Handle<HeapNumber>::cast(value)->value();
12238 // If the array is growing, and it's not growth by a single element at the
12239 // end, make sure that the ElementsKind is HOLEY.
12240 ElementsKind elements_kind = object->GetElementsKind();
12241 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
12242 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12243 TransitionElementsKind(object, transitioned_kind);
12246 // Check whether there is extra space in the fixed array.
12247 if (index < elms_length) {
12248 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements()));
12249 elms->set(index, double_value);
12250 if (object->IsJSArray()) {
12251 // Update the length of the array if needed.
12252 uint32_t array_length = 0;
12254 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12255 if (index >= array_length) {
12256 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1));
12262 // Allow gap in fast case.
12263 if ((index - elms_length) < kMaxGap) {
12264 // Try allocating extra space.
12265 int new_capacity = NewElementsCapacity(index+1);
12266 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12267 DCHECK(static_cast<uint32_t>(new_capacity) > index);
12268 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1);
12269 FixedDoubleArray::cast(object->elements())->set(index, double_value);
12270 JSObject::ValidateElements(object);
12275 // Otherwise default to slow case.
12276 DCHECK(object->HasFastDoubleElements());
12277 DCHECK(object->map()->has_fast_double_elements());
12278 DCHECK(object->elements()->IsFixedDoubleArray() ||
12279 object->elements()->length() == 0);
12281 NormalizeElements(object);
12282 DCHECK(object->HasDictionaryElements());
12283 return SetElement(object, index, value, NONE, strict_mode, check_prototype);
12287 MaybeHandle<Object> JSReceiver::SetElement(Handle<JSReceiver> object,
12289 Handle<Object> value,
12290 PropertyAttributes attributes,
12291 StrictMode strict_mode) {
12292 if (object->IsJSProxy()) {
12293 return JSProxy::SetElementWithHandler(
12294 Handle<JSProxy>::cast(object), object, index, value, strict_mode);
12296 return JSObject::SetElement(
12297 Handle<JSObject>::cast(object), index, value, attributes, strict_mode);
12301 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
12303 Handle<Object> value,
12304 StrictMode strict_mode) {
12305 DCHECK(!object->HasExternalArrayElements());
12306 return JSObject::SetElement(object, index, value, NONE, strict_mode, false);
12310 MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object,
12312 Handle<Object> value,
12313 PropertyAttributes attributes,
12314 StrictMode strict_mode,
12315 bool check_prototype,
12316 SetPropertyMode set_mode) {
12317 Isolate* isolate = object->GetIsolate();
12319 if (object->HasExternalArrayElements() ||
12320 object->HasFixedTypedArrayElements()) {
12321 if (!value->IsNumber() && !value->IsUndefined()) {
12322 ASSIGN_RETURN_ON_EXCEPTION(
12324 Execution::ToNumber(isolate, value), Object);
12328 // Check access rights if needed.
12329 if (object->IsAccessCheckNeeded()) {
12330 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_SET)) {
12331 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
12332 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12337 if (object->IsJSGlobalProxy()) {
12338 PrototypeIterator iter(isolate, object);
12339 if (iter.IsAtEnd()) return value;
12340 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
12342 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index,
12343 value, attributes, strict_mode, check_prototype, set_mode);
12346 // Don't allow element properties to be redefined for external arrays.
12347 if ((object->HasExternalArrayElements() ||
12348 object->HasFixedTypedArrayElements()) &&
12349 set_mode == DEFINE_PROPERTY) {
12350 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12351 Handle<Object> args[] = { object, number };
12352 THROW_NEW_ERROR(isolate, NewTypeError("redef_external_array_element",
12353 HandleVector(args, arraysize(args))),
12357 // Normalize the elements to enable attributes on the property.
12358 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
12359 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
12360 // Make sure that we never go back to fast case.
12361 dictionary->set_requires_slow_elements();
12364 if (!object->map()->is_observed()) {
12365 return object->HasIndexedInterceptor()
12366 ? SetElementWithInterceptor(object, index, value, attributes,
12367 strict_mode, check_prototype, set_mode)
12368 : SetElementWithoutInterceptor(object, index, value, attributes,
12369 strict_mode, check_prototype, set_mode);
12372 Maybe<PropertyAttributes> maybe =
12373 JSReceiver::GetOwnElementAttribute(object, index);
12374 if (!maybe.has_value) return MaybeHandle<Object>();
12375 PropertyAttributes old_attributes = maybe.value;
12377 Handle<Object> old_value = isolate->factory()->the_hole_value();
12378 Handle<Object> old_length_handle;
12379 Handle<Object> new_length_handle;
12381 if (old_attributes != ABSENT) {
12382 if (GetOwnElementAccessorPair(object, index).is_null()) {
12383 old_value = Object::GetElement(isolate, object, index).ToHandleChecked();
12385 } else if (object->IsJSArray()) {
12386 // Store old array length in case adding an element grows the array.
12387 old_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12391 // Check for lookup interceptor
12392 Handle<Object> result;
12393 ASSIGN_RETURN_ON_EXCEPTION(
12395 object->HasIndexedInterceptor()
12396 ? SetElementWithInterceptor(
12397 object, index, value, attributes,
12398 strict_mode, check_prototype, set_mode)
12399 : SetElementWithoutInterceptor(
12400 object, index, value, attributes,
12401 strict_mode, check_prototype, set_mode),
12404 Handle<String> name = isolate->factory()->Uint32ToString(index);
12405 maybe = GetOwnElementAttribute(object, index);
12406 if (!maybe.has_value) return MaybeHandle<Object>();
12407 PropertyAttributes new_attributes = maybe.value;
12409 if (old_attributes == ABSENT) {
12410 if (object->IsJSArray() &&
12411 !old_length_handle->SameValue(
12412 Handle<JSArray>::cast(object)->length())) {
12413 new_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12415 uint32_t old_length = 0;
12416 uint32_t new_length = 0;
12417 CHECK(old_length_handle->ToArrayIndex(&old_length));
12418 CHECK(new_length_handle->ToArrayIndex(&new_length));
12420 BeginPerformSplice(Handle<JSArray>::cast(object));
12421 EnqueueChangeRecord(object, "add", name, old_value);
12422 EnqueueChangeRecord(object, "update", isolate->factory()->length_string(),
12423 old_length_handle);
12424 EndPerformSplice(Handle<JSArray>::cast(object));
12425 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
12426 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted,
12427 new_length - old_length);
12429 EnqueueChangeRecord(object, "add", name, old_value);
12431 } else if (old_value->IsTheHole()) {
12432 EnqueueChangeRecord(object, "reconfigure", name, old_value);
12434 Handle<Object> new_value =
12435 Object::GetElement(isolate, object, index).ToHandleChecked();
12436 bool value_changed = !old_value->SameValue(*new_value);
12437 if (old_attributes != new_attributes) {
12438 if (!value_changed) old_value = isolate->factory()->the_hole_value();
12439 EnqueueChangeRecord(object, "reconfigure", name, old_value);
12440 } else if (value_changed) {
12441 EnqueueChangeRecord(object, "update", name, old_value);
12449 MaybeHandle<Object> JSObject::SetElementWithoutInterceptor(
12450 Handle<JSObject> object,
12452 Handle<Object> value,
12453 PropertyAttributes attributes,
12454 StrictMode strict_mode,
12455 bool check_prototype,
12456 SetPropertyMode set_mode) {
12457 DCHECK(object->HasDictionaryElements() ||
12458 object->HasDictionaryArgumentsElements() ||
12459 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
12460 Isolate* isolate = object->GetIsolate();
12461 if (FLAG_trace_external_array_abuse &&
12462 IsExternalArrayElementsKind(object->GetElementsKind())) {
12463 CheckArrayAbuse(object, "external elements write", index);
12465 if (FLAG_trace_js_array_abuse &&
12466 !IsExternalArrayElementsKind(object->GetElementsKind())) {
12467 if (object->IsJSArray()) {
12468 CheckArrayAbuse(object, "elements write", index, true);
12471 if (object->IsJSArray() && JSArray::WouldChangeReadOnlyLength(
12472 Handle<JSArray>::cast(object), index)) {
12473 if (strict_mode == SLOPPY) {
12476 return JSArray::ReadOnlyLengthError(Handle<JSArray>::cast(object));
12479 switch (object->GetElementsKind()) {
12480 case FAST_SMI_ELEMENTS:
12481 case FAST_ELEMENTS:
12482 case FAST_HOLEY_SMI_ELEMENTS:
12483 case FAST_HOLEY_ELEMENTS:
12484 return SetFastElement(object, index, value, strict_mode, check_prototype);
12485 case FAST_DOUBLE_ELEMENTS:
12486 case FAST_HOLEY_DOUBLE_ELEMENTS:
12487 return SetFastDoubleElement(object, index, value, strict_mode,
12490 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
12491 case EXTERNAL_##TYPE##_ELEMENTS: { \
12492 Handle<External##Type##Array> array( \
12493 External##Type##Array::cast(object->elements())); \
12494 return External##Type##Array::SetValue(array, index, value); \
12496 case TYPE##_ELEMENTS: { \
12497 Handle<Fixed##Type##Array> array( \
12498 Fixed##Type##Array::cast(object->elements())); \
12499 return Fixed##Type##Array::SetValue(array, index, value); \
12502 TYPED_ARRAYS(TYPED_ARRAY_CASE)
12504 #undef TYPED_ARRAY_CASE
12506 case DICTIONARY_ELEMENTS:
12507 return SetDictionaryElement(object, index, value, attributes, strict_mode,
12510 case SLOPPY_ARGUMENTS_ELEMENTS: {
12511 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()));
12512 uint32_t length = parameter_map->length();
12513 Handle<Object> probe = index < length - 2 ?
12514 Handle<Object>(parameter_map->get(index + 2), isolate) :
12516 if (!probe.is_null() && !probe->IsTheHole()) {
12517 Handle<Context> context(Context::cast(parameter_map->get(0)));
12518 int context_index = Handle<Smi>::cast(probe)->value();
12519 DCHECK(!context->get(context_index)->IsTheHole());
12520 context->set(context_index, *value);
12521 // Redefining attributes of an aliased element destroys fast aliasing.
12522 if (set_mode == SET_PROPERTY || attributes == NONE) return value;
12523 parameter_map->set_the_hole(index + 2);
12524 // For elements that are still writable we re-establish slow aliasing.
12525 if ((attributes & READ_ONLY) == 0) {
12526 value = Handle<Object>::cast(
12527 isolate->factory()->NewAliasedArgumentsEntry(context_index));
12530 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)));
12531 if (arguments->IsDictionary()) {
12532 return SetDictionaryElement(object, index, value, attributes,
12537 return SetFastElement(object, index, value, strict_mode,
12542 // All possible cases have been handled above. Add a return to avoid the
12543 // complaints from the compiler.
12545 return isolate->factory()->null_value();
12549 const double AllocationSite::kPretenureRatio = 0.85;
12552 void AllocationSite::ResetPretenureDecision() {
12553 set_pretenure_decision(kUndecided);
12554 set_memento_found_count(0);
12555 set_memento_create_count(0);
12559 PretenureFlag AllocationSite::GetPretenureMode() {
12560 PretenureDecision mode = pretenure_decision();
12561 // Zombie objects "decide" to be untenured.
12562 return mode == kTenure ? TENURED : NOT_TENURED;
12566 bool AllocationSite::IsNestedSite() {
12567 DCHECK(FLAG_trace_track_allocation_sites);
12568 Object* current = GetHeap()->allocation_sites_list();
12569 while (current->IsAllocationSite()) {
12570 AllocationSite* current_site = AllocationSite::cast(current);
12571 if (current_site->nested_site() == this) {
12574 current = current_site->weak_next();
12580 void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
12581 ElementsKind to_kind) {
12582 Isolate* isolate = site->GetIsolate();
12584 if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) {
12585 Handle<JSArray> transition_info =
12586 handle(JSArray::cast(site->transition_info()));
12587 ElementsKind kind = transition_info->GetElementsKind();
12588 // if kind is holey ensure that to_kind is as well.
12589 if (IsHoleyElementsKind(kind)) {
12590 to_kind = GetHoleyElementsKind(to_kind);
12592 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12593 // If the array is huge, it's not likely to be defined in a local
12594 // function, so we shouldn't make new instances of it very often.
12595 uint32_t length = 0;
12596 CHECK(transition_info->length()->ToArrayIndex(&length));
12597 if (length <= kMaximumArrayBytesToPretransition) {
12598 if (FLAG_trace_track_allocation_sites) {
12599 bool is_nested = site->IsNestedSite();
12601 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
12602 reinterpret_cast<void*>(*site),
12603 is_nested ? "(nested)" : "",
12604 ElementsKindToString(kind),
12605 ElementsKindToString(to_kind));
12607 JSObject::TransitionElementsKind(transition_info, to_kind);
12608 site->dependent_code()->DeoptimizeDependentCodeGroup(
12609 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
12613 ElementsKind kind = site->GetElementsKind();
12614 // if kind is holey ensure that to_kind is as well.
12615 if (IsHoleyElementsKind(kind)) {
12616 to_kind = GetHoleyElementsKind(to_kind);
12618 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12619 if (FLAG_trace_track_allocation_sites) {
12620 PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
12621 reinterpret_cast<void*>(*site),
12622 ElementsKindToString(kind),
12623 ElementsKindToString(to_kind));
12625 site->SetElementsKind(to_kind);
12626 site->dependent_code()->DeoptimizeDependentCodeGroup(
12627 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
12634 void AllocationSite::AddDependentCompilationInfo(Handle<AllocationSite> site,
12636 CompilationInfo* info) {
12637 DependentCode::DependencyGroup group = site->ToDependencyGroup(reason);
12638 Handle<DependentCode> dep(site->dependent_code());
12639 Handle<DependentCode> codes =
12640 DependentCode::Insert(dep, group, info->object_wrapper());
12641 if (*codes != site->dependent_code()) site->set_dependent_code(*codes);
12642 info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone());
12646 const char* AllocationSite::PretenureDecisionName(PretenureDecision decision) {
12647 switch (decision) {
12648 case kUndecided: return "undecided";
12649 case kDontTenure: return "don't tenure";
12650 case kMaybeTenure: return "maybe tenure";
12651 case kTenure: return "tenure";
12652 case kZombie: return "zombie";
12653 default: UNREACHABLE();
12659 void JSObject::UpdateAllocationSite(Handle<JSObject> object,
12660 ElementsKind to_kind) {
12661 if (!object->IsJSArray()) return;
12663 Heap* heap = object->GetHeap();
12664 if (!heap->InNewSpace(*object)) return;
12666 Handle<AllocationSite> site;
12668 DisallowHeapAllocation no_allocation;
12670 AllocationMemento* memento = heap->FindAllocationMemento(*object);
12671 if (memento == NULL) return;
12673 // Walk through to the Allocation Site
12674 site = handle(memento->GetAllocationSite());
12676 AllocationSite::DigestTransitionFeedback(site, to_kind);
12680 void JSObject::TransitionElementsKind(Handle<JSObject> object,
12681 ElementsKind to_kind) {
12682 ElementsKind from_kind = object->map()->elements_kind();
12684 if (IsFastHoleyElementsKind(from_kind)) {
12685 to_kind = GetHoleyElementsKind(to_kind);
12688 if (from_kind == to_kind) return;
12689 // Don't update the site if to_kind isn't fast
12690 if (IsFastElementsKind(to_kind)) {
12691 UpdateAllocationSite(object, to_kind);
12694 Isolate* isolate = object->GetIsolate();
12695 if (object->elements() == isolate->heap()->empty_fixed_array() ||
12696 (IsFastSmiOrObjectElementsKind(from_kind) &&
12697 IsFastSmiOrObjectElementsKind(to_kind)) ||
12698 (from_kind == FAST_DOUBLE_ELEMENTS &&
12699 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
12700 DCHECK(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
12701 // No change is needed to the elements() buffer, the transition
12702 // only requires a map change.
12703 Handle<Map> new_map = GetElementsTransitionMap(object, to_kind);
12704 MigrateToMap(object, new_map);
12705 if (FLAG_trace_elements_transitions) {
12706 Handle<FixedArrayBase> elms(object->elements());
12707 PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms);
12712 Handle<FixedArrayBase> elms(object->elements());
12713 uint32_t capacity = static_cast<uint32_t>(elms->length());
12714 uint32_t length = capacity;
12716 if (object->IsJSArray()) {
12717 Object* raw_length = Handle<JSArray>::cast(object)->length();
12718 if (raw_length->IsUndefined()) {
12719 // If length is undefined, then JSArray is being initialized and has no
12720 // elements, assume a length of zero.
12723 CHECK(raw_length->ToArrayIndex(&length));
12727 if (IsFastSmiElementsKind(from_kind) &&
12728 IsFastDoubleElementsKind(to_kind)) {
12729 SetFastDoubleElementsCapacityAndLength(object, capacity, length);
12730 JSObject::ValidateElements(object);
12734 if (IsFastDoubleElementsKind(from_kind) &&
12735 IsFastObjectElementsKind(to_kind)) {
12736 SetFastElementsCapacityAndLength(object, capacity, length,
12737 kDontAllowSmiElements);
12738 JSObject::ValidateElements(object);
12742 // This method should never be called for any other case than the ones
12749 bool Map::IsValidElementsTransition(ElementsKind from_kind,
12750 ElementsKind to_kind) {
12751 // Transitions can't go backwards.
12752 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
12756 // Transitions from HOLEY -> PACKED are not allowed.
12757 return !IsFastHoleyElementsKind(from_kind) ||
12758 IsFastHoleyElementsKind(to_kind);
12762 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array,
12764 Handle<Object> value) {
12765 uint32_t old_len = 0;
12766 CHECK(array->length()->ToArrayIndex(&old_len));
12767 // Check to see if we need to update the length. For now, we make
12768 // sure that the length stays within 32-bits (unsigned).
12769 if (index >= old_len && index != 0xffffffff) {
12770 Handle<Object> len = array->GetIsolate()->factory()->NewNumber(
12771 static_cast<double>(index) + 1);
12772 array->set_length(*len);
12777 bool JSArray::IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map) {
12778 Isolate* isolate = jsarray_map->GetIsolate();
12779 DCHECK(!jsarray_map->is_dictionary_map());
12780 LookupResult lookup(isolate);
12781 Handle<Name> length_string = isolate->factory()->length_string();
12782 jsarray_map->LookupDescriptor(NULL, *length_string, &lookup);
12783 return lookup.IsReadOnly();
12787 bool JSArray::WouldChangeReadOnlyLength(Handle<JSArray> array,
12789 uint32_t length = 0;
12790 CHECK(array->length()->ToArrayIndex(&length));
12791 if (length <= index) {
12792 LookupIterator it(array, array->GetIsolate()->factory()->length_string(),
12793 LookupIterator::OWN_SKIP_INTERCEPTOR);
12794 CHECK_NE(LookupIterator::ACCESS_CHECK, it.state());
12795 CHECK(it.IsFound());
12796 CHECK_EQ(LookupIterator::ACCESSOR, it.state());
12797 return it.IsReadOnly();
12803 MaybeHandle<Object> JSArray::ReadOnlyLengthError(Handle<JSArray> array) {
12804 Isolate* isolate = array->GetIsolate();
12805 Handle<Name> length = isolate->factory()->length_string();
12806 Handle<Object> args[2] = { length, array };
12807 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
12808 HandleVector(args, arraysize(args))),
12813 MaybeHandle<Object> JSObject::GetElementWithInterceptor(
12814 Handle<JSObject> object,
12815 Handle<Object> receiver,
12817 Isolate* isolate = object->GetIsolate();
12819 // Make sure that the top context does not change when doing
12820 // callbacks or interceptor calls.
12821 AssertNoContextChange ncc(isolate);
12823 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate);
12824 if (!interceptor->getter()->IsUndefined()) {
12825 v8::IndexedPropertyGetterCallback getter =
12826 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
12828 ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index));
12829 PropertyCallbackArguments
12830 args(isolate, interceptor->data(), *receiver, *object);
12831 v8::Handle<v8::Value> result = args.Call(getter, index);
12832 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12833 if (!result.IsEmpty()) {
12834 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12835 result_internal->VerifyApiCallResultType();
12836 // Rebox handle before return.
12837 return handle(*result_internal, isolate);
12841 ElementsAccessor* handler = object->GetElementsAccessor();
12842 Handle<Object> result;
12843 ASSIGN_RETURN_ON_EXCEPTION(
12844 isolate, result, handler->Get(receiver, object, index),
12846 if (!result->IsTheHole()) return result;
12848 PrototypeIterator iter(isolate, object);
12849 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
12850 return Object::GetElementWithReceiver(
12851 isolate, PrototypeIterator::GetCurrent(iter), receiver, index);
12855 bool JSObject::HasDenseElements() {
12858 GetElementsCapacityAndUsage(&capacity, &used);
12859 return (capacity == 0) || (used > (capacity / 2));
12863 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
12867 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
12868 FixedArray* backing_store = NULL;
12869 switch (GetElementsKind()) {
12870 case SLOPPY_ARGUMENTS_ELEMENTS:
12871 backing_store_base =
12872 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
12873 backing_store = FixedArray::cast(backing_store_base);
12874 if (backing_store->IsDictionary()) {
12875 SeededNumberDictionary* dictionary =
12876 SeededNumberDictionary::cast(backing_store);
12877 *capacity = dictionary->Capacity();
12878 *used = dictionary->NumberOfElements();
12882 case FAST_SMI_ELEMENTS:
12883 case FAST_ELEMENTS:
12885 *capacity = backing_store_base->length();
12886 *used = Smi::cast(JSArray::cast(this)->length())->value();
12889 // Fall through if packing is not guaranteed.
12890 case FAST_HOLEY_SMI_ELEMENTS:
12891 case FAST_HOLEY_ELEMENTS:
12892 backing_store = FixedArray::cast(backing_store_base);
12893 *capacity = backing_store->length();
12894 for (int i = 0; i < *capacity; ++i) {
12895 if (!backing_store->get(i)->IsTheHole()) ++(*used);
12898 case DICTIONARY_ELEMENTS: {
12899 SeededNumberDictionary* dictionary = element_dictionary();
12900 *capacity = dictionary->Capacity();
12901 *used = dictionary->NumberOfElements();
12904 case FAST_DOUBLE_ELEMENTS:
12906 *capacity = backing_store_base->length();
12907 *used = Smi::cast(JSArray::cast(this)->length())->value();
12910 // Fall through if packing is not guaranteed.
12911 case FAST_HOLEY_DOUBLE_ELEMENTS: {
12912 *capacity = elements()->length();
12913 if (*capacity == 0) break;
12914 FixedDoubleArray * elms = FixedDoubleArray::cast(elements());
12915 for (int i = 0; i < *capacity; i++) {
12916 if (!elms->is_the_hole(i)) ++(*used);
12921 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
12922 case EXTERNAL_##TYPE##_ELEMENTS: \
12923 case TYPE##_ELEMENTS: \
12925 TYPED_ARRAYS(TYPED_ARRAY_CASE)
12926 #undef TYPED_ARRAY_CASE
12928 // External arrays are considered 100% used.
12929 FixedArrayBase* external_array = FixedArrayBase::cast(elements());
12930 *capacity = external_array->length();
12931 *used = external_array->length();
12938 bool JSObject::WouldConvertToSlowElements(Handle<Object> key) {
12940 if (HasFastElements() && key->ToArrayIndex(&index)) {
12941 Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements()));
12942 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
12943 if (index >= capacity) {
12944 if ((index - capacity) >= kMaxGap) return true;
12945 uint32_t new_capacity = NewElementsCapacity(index + 1);
12946 return ShouldConvertToSlowElements(new_capacity);
12953 bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
12954 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
12955 kMaxUncheckedFastElementsLength);
12956 if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
12957 (new_capacity <= kMaxUncheckedFastElementsLength &&
12958 GetHeap()->InNewSpace(this))) {
12961 // If the fast-case backing storage takes up roughly three times as
12962 // much space (in machine words) as a dictionary backing storage
12963 // would, the object should have slow elements.
12964 int old_capacity = 0;
12965 int used_elements = 0;
12966 GetElementsCapacityAndUsage(&old_capacity, &used_elements);
12967 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
12968 SeededNumberDictionary::kEntrySize;
12969 return 3 * dictionary_size <= new_capacity;
12973 bool JSObject::ShouldConvertToFastElements() {
12974 DCHECK(HasDictionaryElements() || HasDictionaryArgumentsElements());
12975 // If the elements are sparse, we should not go back to fast case.
12976 if (!HasDenseElements()) return false;
12977 // An object requiring access checks is never allowed to have fast
12978 // elements. If it had fast elements we would skip security checks.
12979 if (IsAccessCheckNeeded()) return false;
12980 // Observed objects may not go to fast mode because they rely on map checks,
12981 // and for fast element accesses we sometimes check element kinds only.
12982 if (map()->is_observed()) return false;
12984 FixedArray* elements = FixedArray::cast(this->elements());
12985 SeededNumberDictionary* dictionary = NULL;
12986 if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) {
12987 dictionary = SeededNumberDictionary::cast(elements->get(1));
12989 dictionary = SeededNumberDictionary::cast(elements);
12991 // If an element has been added at a very high index in the elements
12992 // dictionary, we cannot go back to fast case.
12993 if (dictionary->requires_slow_elements()) return false;
12994 // If the dictionary backing storage takes up roughly half as much
12995 // space (in machine words) as a fast-case backing storage would,
12996 // the object should have fast elements.
12997 uint32_t array_size = 0;
12999 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
13001 array_size = dictionary->max_number_key();
13003 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
13004 SeededNumberDictionary::kEntrySize;
13005 return 2 * dictionary_size >= array_size;
13009 bool JSObject::ShouldConvertToFastDoubleElements(
13010 bool* has_smi_only_elements) {
13011 *has_smi_only_elements = false;
13012 if (HasSloppyArgumentsElements()) return false;
13013 if (FLAG_unbox_double_arrays) {
13014 DCHECK(HasDictionaryElements());
13015 SeededNumberDictionary* dictionary = element_dictionary();
13016 bool found_double = false;
13017 for (int i = 0; i < dictionary->Capacity(); i++) {
13018 Object* key = dictionary->KeyAt(i);
13019 if (key->IsNumber()) {
13020 Object* value = dictionary->ValueAt(i);
13021 if (!value->IsNumber()) return false;
13022 if (!value->IsSmi()) {
13023 found_double = true;
13027 *has_smi_only_elements = !found_double;
13028 return found_double;
13035 // Certain compilers request function template instantiation when they
13036 // see the definition of the other template functions in the
13037 // class. This requires us to have the template functions put
13038 // together, so even though this function belongs in objects-debug.cc,
13039 // we keep it here instead to satisfy certain compilers.
13040 #ifdef OBJECT_PRINT
13041 template <typename Derived, typename Shape, typename Key>
13042 void Dictionary<Derived, Shape, Key>::Print(OStream& os) { // NOLINT
13043 int capacity = DerivedHashTable::Capacity();
13044 for (int i = 0; i < capacity; i++) {
13045 Object* k = DerivedHashTable::KeyAt(i);
13046 if (DerivedHashTable::IsKey(k)) {
13048 if (k->IsString()) {
13049 String::cast(k)->StringPrint(os);
13053 os << ": " << Brief(ValueAt(i)) << "\n";
13060 template<typename Derived, typename Shape, typename Key>
13061 void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) {
13063 int capacity = DerivedHashTable::Capacity();
13064 DisallowHeapAllocation no_gc;
13065 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
13066 for (int i = 0; i < capacity; i++) {
13067 Object* k = Dictionary::KeyAt(i);
13068 if (Dictionary::IsKey(k)) {
13069 elements->set(pos++, ValueAt(i), mode);
13072 DCHECK(pos == elements->length());
13076 InterceptorInfo* JSObject::GetNamedInterceptor() {
13077 DCHECK(map()->has_named_interceptor());
13078 JSFunction* constructor = JSFunction::cast(map()->constructor());
13079 DCHECK(constructor->shared()->IsApiFunction());
13081 constructor->shared()->get_api_func_data()->named_property_handler();
13082 return InterceptorInfo::cast(result);
13086 InterceptorInfo* JSObject::GetIndexedInterceptor() {
13087 DCHECK(map()->has_indexed_interceptor());
13088 JSFunction* constructor = JSFunction::cast(map()->constructor());
13089 DCHECK(constructor->shared()->IsApiFunction());
13091 constructor->shared()->get_api_func_data()->indexed_property_handler();
13092 return InterceptorInfo::cast(result);
13096 MaybeHandle<Object> JSObject::GetPropertyWithInterceptor(
13097 Handle<JSObject> holder,
13098 Handle<Object> receiver,
13099 Handle<Name> name) {
13100 Isolate* isolate = holder->GetIsolate();
13102 // TODO(rossberg): Support symbols in the API.
13103 if (name->IsSymbol()) return isolate->factory()->undefined_value();
13105 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor(), isolate);
13106 Handle<String> name_string = Handle<String>::cast(name);
13108 if (interceptor->getter()->IsUndefined()) return MaybeHandle<Object>();
13110 v8::NamedPropertyGetterCallback getter =
13111 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
13113 ApiNamedPropertyAccess("interceptor-named-get", *holder, *name));
13114 PropertyCallbackArguments
13115 args(isolate, interceptor->data(), *receiver, *holder);
13116 v8::Handle<v8::Value> result =
13117 args.Call(getter, v8::Utils::ToLocal(name_string));
13118 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
13119 if (result.IsEmpty()) return MaybeHandle<Object>();
13121 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13122 result_internal->VerifyApiCallResultType();
13123 // Rebox handle before return
13124 return handle(*result_internal, isolate);
13128 // Compute the property keys from the interceptor.
13129 // TODO(rossberg): support symbols in API, and filter here if needed.
13130 MaybeHandle<JSObject> JSObject::GetKeysForNamedInterceptor(
13131 Handle<JSObject> object, Handle<JSReceiver> receiver) {
13132 Isolate* isolate = receiver->GetIsolate();
13133 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
13134 PropertyCallbackArguments
13135 args(isolate, interceptor->data(), *receiver, *object);
13136 v8::Handle<v8::Object> result;
13137 if (!interceptor->enumerator()->IsUndefined()) {
13138 v8::NamedPropertyEnumeratorCallback enum_fun =
13139 v8::ToCData<v8::NamedPropertyEnumeratorCallback>(
13140 interceptor->enumerator());
13141 LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object));
13142 result = args.Call(enum_fun);
13144 if (result.IsEmpty()) return MaybeHandle<JSObject>();
13145 #if ENABLE_EXTRA_CHECKS
13146 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
13147 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
13149 // Rebox before returning.
13150 return handle(*v8::Utils::OpenHandle(*result), isolate);
13154 // Compute the element keys from the interceptor.
13155 MaybeHandle<JSObject> JSObject::GetKeysForIndexedInterceptor(
13156 Handle<JSObject> object, Handle<JSReceiver> receiver) {
13157 Isolate* isolate = receiver->GetIsolate();
13158 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
13159 PropertyCallbackArguments
13160 args(isolate, interceptor->data(), *receiver, *object);
13161 v8::Handle<v8::Object> result;
13162 if (!interceptor->enumerator()->IsUndefined()) {
13163 v8::IndexedPropertyEnumeratorCallback enum_fun =
13164 v8::ToCData<v8::IndexedPropertyEnumeratorCallback>(
13165 interceptor->enumerator());
13166 LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object));
13167 result = args.Call(enum_fun);
13169 if (result.IsEmpty()) return MaybeHandle<JSObject>();
13170 #if ENABLE_EXTRA_CHECKS
13171 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
13172 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
13174 // Rebox before returning.
13175 return handle(*v8::Utils::OpenHandle(*result), isolate);
13179 Maybe<bool> JSObject::HasRealNamedProperty(Handle<JSObject> object,
13180 Handle<Name> key) {
13181 LookupIterator it(object, key, LookupIterator::OWN_SKIP_INTERCEPTOR);
13182 Maybe<PropertyAttributes> maybe_result = GetPropertyAttributes(&it);
13183 if (!maybe_result.has_value) return Maybe<bool>();
13184 return maybe(it.IsFound());
13188 Maybe<bool> JSObject::HasRealElementProperty(Handle<JSObject> object,
13190 Isolate* isolate = object->GetIsolate();
13191 HandleScope scope(isolate);
13192 // Check access rights if needed.
13193 if (object->IsAccessCheckNeeded()) {
13194 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
13195 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
13196 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<bool>());
13197 return maybe(false);
13201 if (object->IsJSGlobalProxy()) {
13202 HandleScope scope(isolate);
13203 PrototypeIterator iter(isolate, object);
13204 if (iter.IsAtEnd()) return maybe(false);
13205 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
13206 return HasRealElementProperty(
13207 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index);
13210 Maybe<PropertyAttributes> result =
13211 GetElementAttributeWithoutInterceptor(object, object, index, false);
13212 if (!result.has_value) return Maybe<bool>();
13213 return maybe(result.value != ABSENT);
13217 Maybe<bool> JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object,
13218 Handle<Name> key) {
13219 LookupIterator it(object, key, LookupIterator::OWN_SKIP_INTERCEPTOR);
13220 Maybe<PropertyAttributes> maybe_result = GetPropertyAttributes(&it);
13221 if (!maybe_result.has_value) return Maybe<bool>();
13222 return maybe(it.state() == LookupIterator::ACCESSOR);
13226 int JSObject::NumberOfOwnProperties(PropertyAttributes filter) {
13227 if (HasFastProperties()) {
13228 Map* map = this->map();
13229 if (filter == NONE) return map->NumberOfOwnDescriptors();
13230 if (filter & DONT_ENUM) {
13231 int result = map->EnumLength();
13232 if (result != kInvalidEnumCacheSentinel) return result;
13234 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
13236 return property_dictionary()->NumberOfElementsFilterAttributes(filter);
13240 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
13241 Object* temp = get(i);
13244 if (this != numbers) {
13245 temp = numbers->get(i);
13246 numbers->set(i, Smi::cast(numbers->get(j)));
13247 numbers->set(j, Smi::cast(temp));
13252 static void InsertionSortPairs(FixedArray* content,
13253 FixedArray* numbers,
13255 for (int i = 1; i < len; i++) {
13258 (NumberToUint32(numbers->get(j - 1)) >
13259 NumberToUint32(numbers->get(j)))) {
13260 content->SwapPairs(numbers, j - 1, j);
13267 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) {
13268 // In-place heap sort.
13269 DCHECK(content->length() == numbers->length());
13271 // Bottom-up max-heap construction.
13272 for (int i = 1; i < len; ++i) {
13273 int child_index = i;
13274 while (child_index > 0) {
13275 int parent_index = ((child_index + 1) >> 1) - 1;
13276 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13277 uint32_t child_value = NumberToUint32(numbers->get(child_index));
13278 if (parent_value < child_value) {
13279 content->SwapPairs(numbers, parent_index, child_index);
13283 child_index = parent_index;
13287 // Extract elements and create sorted array.
13288 for (int i = len - 1; i > 0; --i) {
13289 // Put max element at the back of the array.
13290 content->SwapPairs(numbers, 0, i);
13291 // Sift down the new top element.
13292 int parent_index = 0;
13294 int child_index = ((parent_index + 1) << 1) - 1;
13295 if (child_index >= i) break;
13296 uint32_t child1_value = NumberToUint32(numbers->get(child_index));
13297 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1));
13298 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13299 if (child_index + 1 >= i || child1_value > child2_value) {
13300 if (parent_value > child1_value) break;
13301 content->SwapPairs(numbers, parent_index, child_index);
13302 parent_index = child_index;
13304 if (parent_value > child2_value) break;
13305 content->SwapPairs(numbers, parent_index, child_index + 1);
13306 parent_index = child_index + 1;
13313 // Sort this array and the numbers as pairs wrt. the (distinct) numbers.
13314 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
13315 DCHECK(this->length() == numbers->length());
13316 // For small arrays, simply use insertion sort.
13318 InsertionSortPairs(this, numbers, len);
13321 // Check the range of indices.
13322 uint32_t min_index = NumberToUint32(numbers->get(0));
13323 uint32_t max_index = min_index;
13325 for (i = 1; i < len; i++) {
13326 if (NumberToUint32(numbers->get(i)) < min_index) {
13327 min_index = NumberToUint32(numbers->get(i));
13328 } else if (NumberToUint32(numbers->get(i)) > max_index) {
13329 max_index = NumberToUint32(numbers->get(i));
13332 if (max_index - min_index + 1 == len) {
13333 // Indices form a contiguous range, unless there are duplicates.
13334 // Do an in-place linear time sort assuming distinct numbers, but
13335 // avoid hanging in case they are not.
13336 for (i = 0; i < len; i++) {
13339 // While the current element at i is not at its correct position p,
13340 // swap the elements at these two positions.
13341 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i &&
13343 SwapPairs(numbers, i, p);
13347 HeapSortPairs(this, numbers, len);
13353 // Fill in the names of own properties into the supplied storage. The main
13354 // purpose of this function is to provide reflection information for the object
13356 void JSObject::GetOwnPropertyNames(
13357 FixedArray* storage, int index, PropertyAttributes filter) {
13358 DCHECK(storage->length() >= (NumberOfOwnProperties(filter) - index));
13359 if (HasFastProperties()) {
13360 int real_size = map()->NumberOfOwnDescriptors();
13361 DescriptorArray* descs = map()->instance_descriptors();
13362 for (int i = 0; i < real_size; i++) {
13363 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
13364 !FilterKey(descs->GetKey(i), filter)) {
13365 storage->set(index++, descs->GetKey(i));
13369 property_dictionary()->CopyKeysTo(storage,
13372 NameDictionary::UNSORTED);
13377 int JSObject::NumberOfOwnElements(PropertyAttributes filter) {
13378 return GetOwnElementKeys(NULL, filter);
13382 int JSObject::NumberOfEnumElements() {
13383 // Fast case for objects with no elements.
13384 if (!IsJSValue() && HasFastObjectElements()) {
13385 uint32_t length = IsJSArray() ?
13386 static_cast<uint32_t>(
13387 Smi::cast(JSArray::cast(this)->length())->value()) :
13388 static_cast<uint32_t>(FixedArray::cast(elements())->length());
13389 if (length == 0) return 0;
13391 // Compute the number of enumerable elements.
13392 return NumberOfOwnElements(static_cast<PropertyAttributes>(DONT_ENUM));
13396 int JSObject::GetOwnElementKeys(FixedArray* storage,
13397 PropertyAttributes filter) {
13399 switch (GetElementsKind()) {
13400 case FAST_SMI_ELEMENTS:
13401 case FAST_ELEMENTS:
13402 case FAST_HOLEY_SMI_ELEMENTS:
13403 case FAST_HOLEY_ELEMENTS: {
13404 int length = IsJSArray() ?
13405 Smi::cast(JSArray::cast(this)->length())->value() :
13406 FixedArray::cast(elements())->length();
13407 for (int i = 0; i < length; i++) {
13408 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
13409 if (storage != NULL) {
13410 storage->set(counter, Smi::FromInt(i));
13415 DCHECK(!storage || storage->length() >= counter);
13418 case FAST_DOUBLE_ELEMENTS:
13419 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13420 int length = IsJSArray() ?
13421 Smi::cast(JSArray::cast(this)->length())->value() :
13422 FixedArrayBase::cast(elements())->length();
13423 for (int i = 0; i < length; i++) {
13424 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) {
13425 if (storage != NULL) {
13426 storage->set(counter, Smi::FromInt(i));
13431 DCHECK(!storage || storage->length() >= counter);
13435 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13436 case EXTERNAL_##TYPE##_ELEMENTS: \
13437 case TYPE##_ELEMENTS: \
13439 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13440 #undef TYPED_ARRAY_CASE
13442 int length = FixedArrayBase::cast(elements())->length();
13443 while (counter < length) {
13444 if (storage != NULL) {
13445 storage->set(counter, Smi::FromInt(counter));
13449 DCHECK(!storage || storage->length() >= counter);
13453 case DICTIONARY_ELEMENTS: {
13454 if (storage != NULL) {
13455 element_dictionary()->CopyKeysTo(storage,
13457 SeededNumberDictionary::SORTED);
13459 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
13462 case SLOPPY_ARGUMENTS_ELEMENTS: {
13463 FixedArray* parameter_map = FixedArray::cast(elements());
13464 int mapped_length = parameter_map->length() - 2;
13465 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
13466 if (arguments->IsDictionary()) {
13467 // Copy the keys from arguments first, because Dictionary::CopyKeysTo
13468 // will insert in storage starting at index 0.
13469 SeededNumberDictionary* dictionary =
13470 SeededNumberDictionary::cast(arguments);
13471 if (storage != NULL) {
13472 dictionary->CopyKeysTo(
13473 storage, filter, SeededNumberDictionary::UNSORTED);
13475 counter += dictionary->NumberOfElementsFilterAttributes(filter);
13476 for (int i = 0; i < mapped_length; ++i) {
13477 if (!parameter_map->get(i + 2)->IsTheHole()) {
13478 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13482 if (storage != NULL) storage->SortPairs(storage, counter);
13485 int backing_length = arguments->length();
13487 for (; i < mapped_length; ++i) {
13488 if (!parameter_map->get(i + 2)->IsTheHole()) {
13489 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13491 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
13492 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13496 for (; i < backing_length; ++i) {
13497 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13505 if (this->IsJSValue()) {
13506 Object* val = JSValue::cast(this)->value();
13507 if (val->IsString()) {
13508 String* str = String::cast(val);
13510 for (int i = 0; i < str->length(); i++) {
13511 storage->set(counter + i, Smi::FromInt(i));
13514 counter += str->length();
13517 DCHECK(!storage || storage->length() == counter);
13522 int JSObject::GetEnumElementKeys(FixedArray* storage) {
13523 return GetOwnElementKeys(storage, static_cast<PropertyAttributes>(DONT_ENUM));
13527 // StringSharedKeys are used as keys in the eval cache.
13528 class StringSharedKey : public HashTableKey {
13530 StringSharedKey(Handle<String> source,
13531 Handle<SharedFunctionInfo> shared,
13532 StrictMode strict_mode,
13533 int scope_position)
13536 strict_mode_(strict_mode),
13537 scope_position_(scope_position) { }
13539 bool IsMatch(Object* other) OVERRIDE {
13540 DisallowHeapAllocation no_allocation;
13541 if (!other->IsFixedArray()) return false;
13542 FixedArray* other_array = FixedArray::cast(other);
13543 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13544 if (shared != *shared_) return false;
13545 int strict_unchecked = Smi::cast(other_array->get(2))->value();
13546 DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13547 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13548 if (strict_mode != strict_mode_) return false;
13549 int scope_position = Smi::cast(other_array->get(3))->value();
13550 if (scope_position != scope_position_) return false;
13551 String* source = String::cast(other_array->get(1));
13552 return source->Equals(*source_);
13555 static uint32_t StringSharedHashHelper(String* source,
13556 SharedFunctionInfo* shared,
13557 StrictMode strict_mode,
13558 int scope_position) {
13559 uint32_t hash = source->Hash();
13560 if (shared->HasSourceCode()) {
13561 // Instead of using the SharedFunctionInfo pointer in the hash
13562 // code computation, we use a combination of the hash of the
13563 // script source code and the start position of the calling scope.
13564 // We do this to ensure that the cache entries can survive garbage
13566 Script* script(Script::cast(shared->script()));
13567 hash ^= String::cast(script->source())->Hash();
13568 if (strict_mode == STRICT) hash ^= 0x8000;
13569 hash += scope_position;
13574 uint32_t Hash() OVERRIDE {
13575 return StringSharedHashHelper(*source_, *shared_, strict_mode_,
13579 uint32_t HashForObject(Object* obj) OVERRIDE {
13580 DisallowHeapAllocation no_allocation;
13581 FixedArray* other_array = FixedArray::cast(obj);
13582 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13583 String* source = String::cast(other_array->get(1));
13584 int strict_unchecked = Smi::cast(other_array->get(2))->value();
13585 DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13586 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13587 int scope_position = Smi::cast(other_array->get(3))->value();
13588 return StringSharedHashHelper(
13589 source, shared, strict_mode, scope_position);
13593 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13594 Handle<FixedArray> array = isolate->factory()->NewFixedArray(4);
13595 array->set(0, *shared_);
13596 array->set(1, *source_);
13597 array->set(2, Smi::FromInt(strict_mode_));
13598 array->set(3, Smi::FromInt(scope_position_));
13603 Handle<String> source_;
13604 Handle<SharedFunctionInfo> shared_;
13605 StrictMode strict_mode_;
13606 int scope_position_;
13610 // RegExpKey carries the source and flags of a regular expression as key.
13611 class RegExpKey : public HashTableKey {
13613 RegExpKey(Handle<String> string, JSRegExp::Flags flags)
13615 flags_(Smi::FromInt(flags.value())) { }
13617 // Rather than storing the key in the hash table, a pointer to the
13618 // stored value is stored where the key should be. IsMatch then
13619 // compares the search key to the found object, rather than comparing
13621 bool IsMatch(Object* obj) OVERRIDE {
13622 FixedArray* val = FixedArray::cast(obj);
13623 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
13624 && (flags_ == val->get(JSRegExp::kFlagsIndex));
13627 uint32_t Hash() OVERRIDE { return RegExpHash(*string_, flags_); }
13629 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13630 // Plain hash maps, which is where regexp keys are used, don't
13631 // use this function.
13633 return MaybeHandle<Object>().ToHandleChecked();
13636 uint32_t HashForObject(Object* obj) OVERRIDE {
13637 FixedArray* val = FixedArray::cast(obj);
13638 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
13639 Smi::cast(val->get(JSRegExp::kFlagsIndex)));
13642 static uint32_t RegExpHash(String* string, Smi* flags) {
13643 return string->Hash() + flags->value();
13646 Handle<String> string_;
13651 Handle<Object> OneByteStringKey::AsHandle(Isolate* isolate) {
13652 if (hash_field_ == 0) Hash();
13653 return isolate->factory()->NewOneByteInternalizedString(string_, hash_field_);
13657 Handle<Object> TwoByteStringKey::AsHandle(Isolate* isolate) {
13658 if (hash_field_ == 0) Hash();
13659 return isolate->factory()->NewTwoByteInternalizedString(string_, hash_field_);
13663 Handle<Object> SeqOneByteSubStringKey::AsHandle(Isolate* isolate) {
13664 if (hash_field_ == 0) Hash();
13665 return isolate->factory()->NewOneByteInternalizedSubString(
13666 string_, from_, length_, hash_field_);
13670 bool SeqOneByteSubStringKey::IsMatch(Object* string) {
13671 Vector<const uint8_t> chars(string_->GetChars() + from_, length_);
13672 return String::cast(string)->IsOneByteEqualTo(chars);
13676 // InternalizedStringKey carries a string/internalized-string object as key.
13677 class InternalizedStringKey : public HashTableKey {
13679 explicit InternalizedStringKey(Handle<String> string)
13680 : string_(string) { }
13682 virtual bool IsMatch(Object* string) OVERRIDE {
13683 return String::cast(string)->Equals(*string_);
13686 virtual uint32_t Hash() OVERRIDE { return string_->Hash(); }
13688 virtual uint32_t HashForObject(Object* other) OVERRIDE {
13689 return String::cast(other)->Hash();
13692 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13693 // Internalize the string if possible.
13694 MaybeHandle<Map> maybe_map =
13695 isolate->factory()->InternalizedStringMapForString(string_);
13697 if (maybe_map.ToHandle(&map)) {
13698 string_->set_map_no_write_barrier(*map);
13699 DCHECK(string_->IsInternalizedString());
13702 // Otherwise allocate a new internalized string.
13703 return isolate->factory()->NewInternalizedStringImpl(
13704 string_, string_->length(), string_->hash_field());
13707 static uint32_t StringHash(Object* obj) {
13708 return String::cast(obj)->Hash();
13711 Handle<String> string_;
13715 template<typename Derived, typename Shape, typename Key>
13716 void HashTable<Derived, Shape, Key>::IteratePrefix(ObjectVisitor* v) {
13717 IteratePointers(v, 0, kElementsStartOffset);
13721 template<typename Derived, typename Shape, typename Key>
13722 void HashTable<Derived, Shape, Key>::IterateElements(ObjectVisitor* v) {
13724 kElementsStartOffset,
13725 kHeaderSize + length() * kPointerSize);
13729 template<typename Derived, typename Shape, typename Key>
13730 Handle<Derived> HashTable<Derived, Shape, Key>::New(
13732 int at_least_space_for,
13733 MinimumCapacity capacity_option,
13734 PretenureFlag pretenure) {
13735 DCHECK(0 <= at_least_space_for);
13736 DCHECK(!capacity_option || base::bits::IsPowerOfTwo32(at_least_space_for));
13737 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
13738 ? at_least_space_for
13739 : ComputeCapacity(at_least_space_for);
13740 if (capacity > HashTable::kMaxCapacity) {
13741 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
13744 Factory* factory = isolate->factory();
13745 int length = EntryToIndex(capacity);
13746 Handle<FixedArray> array = factory->NewFixedArray(length, pretenure);
13747 array->set_map_no_write_barrier(*factory->hash_table_map());
13748 Handle<Derived> table = Handle<Derived>::cast(array);
13750 table->SetNumberOfElements(0);
13751 table->SetNumberOfDeletedElements(0);
13752 table->SetCapacity(capacity);
13757 // Find entry for key otherwise return kNotFound.
13758 int NameDictionary::FindEntry(Handle<Name> key) {
13759 if (!key->IsUniqueName()) {
13760 return DerivedHashTable::FindEntry(key);
13763 // Optimized for unique names. Knowledge of the key type allows:
13764 // 1. Move the check if the key is unique out of the loop.
13765 // 2. Avoid comparing hash codes in unique-to-unique comparison.
13766 // 3. Detect a case when a dictionary key is not unique but the key is.
13767 // In case of positive result the dictionary key may be replaced by the
13768 // internalized string with minimal performance penalty. It gives a chance
13769 // to perform further lookups in code stubs (and significant performance
13770 // boost a certain style of code).
13772 // EnsureCapacity will guarantee the hash table is never full.
13773 uint32_t capacity = Capacity();
13774 uint32_t entry = FirstProbe(key->Hash(), capacity);
13775 uint32_t count = 1;
13778 int index = EntryToIndex(entry);
13779 Object* element = get(index);
13780 if (element->IsUndefined()) break; // Empty entry.
13781 if (*key == element) return entry;
13782 if (!element->IsUniqueName() &&
13783 !element->IsTheHole() &&
13784 Name::cast(element)->Equals(*key)) {
13785 // Replace a key that is a non-internalized string by the equivalent
13786 // internalized string for faster further lookups.
13790 DCHECK(element->IsTheHole() || !Name::cast(element)->Equals(*key));
13791 entry = NextProbe(entry, count++, capacity);
13797 template<typename Derived, typename Shape, typename Key>
13798 void HashTable<Derived, Shape, Key>::Rehash(
13799 Handle<Derived> new_table,
13801 DCHECK(NumberOfElements() < new_table->Capacity());
13803 DisallowHeapAllocation no_gc;
13804 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
13806 // Copy prefix to new array.
13807 for (int i = kPrefixStartIndex;
13808 i < kPrefixStartIndex + Shape::kPrefixSize;
13810 new_table->set(i, get(i), mode);
13813 // Rehash the elements.
13814 int capacity = Capacity();
13815 for (int i = 0; i < capacity; i++) {
13816 uint32_t from_index = EntryToIndex(i);
13817 Object* k = get(from_index);
13819 uint32_t hash = HashTable::HashForObject(key, k);
13820 uint32_t insertion_index =
13821 EntryToIndex(new_table->FindInsertionEntry(hash));
13822 for (int j = 0; j < Shape::kEntrySize; j++) {
13823 new_table->set(insertion_index + j, get(from_index + j), mode);
13827 new_table->SetNumberOfElements(NumberOfElements());
13828 new_table->SetNumberOfDeletedElements(0);
13832 template<typename Derived, typename Shape, typename Key>
13833 uint32_t HashTable<Derived, Shape, Key>::EntryForProbe(
13837 uint32_t expected) {
13838 uint32_t hash = HashTable::HashForObject(key, k);
13839 uint32_t capacity = Capacity();
13840 uint32_t entry = FirstProbe(hash, capacity);
13841 for (int i = 1; i < probe; i++) {
13842 if (entry == expected) return expected;
13843 entry = NextProbe(entry, i, capacity);
13849 template<typename Derived, typename Shape, typename Key>
13850 void HashTable<Derived, Shape, Key>::Swap(uint32_t entry1,
13852 WriteBarrierMode mode) {
13853 int index1 = EntryToIndex(entry1);
13854 int index2 = EntryToIndex(entry2);
13855 Object* temp[Shape::kEntrySize];
13856 for (int j = 0; j < Shape::kEntrySize; j++) {
13857 temp[j] = get(index1 + j);
13859 for (int j = 0; j < Shape::kEntrySize; j++) {
13860 set(index1 + j, get(index2 + j), mode);
13862 for (int j = 0; j < Shape::kEntrySize; j++) {
13863 set(index2 + j, temp[j], mode);
13868 template<typename Derived, typename Shape, typename Key>
13869 void HashTable<Derived, Shape, Key>::Rehash(Key key) {
13870 DisallowHeapAllocation no_gc;
13871 WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
13872 uint32_t capacity = Capacity();
13874 for (int probe = 1; !done; probe++) {
13875 // All elements at entries given by one of the first _probe_ probes
13876 // are placed correctly. Other elements might need to be moved.
13878 for (uint32_t current = 0; current < capacity; current++) {
13879 Object* current_key = get(EntryToIndex(current));
13880 if (IsKey(current_key)) {
13881 uint32_t target = EntryForProbe(key, current_key, probe, current);
13882 if (current == target) continue;
13883 Object* target_key = get(EntryToIndex(target));
13884 if (!IsKey(target_key) ||
13885 EntryForProbe(key, target_key, probe, target) != target) {
13886 // Put the current element into the correct position.
13887 Swap(current, target, mode);
13888 // The other element will be processed on the next iteration.
13891 // The place for the current element is occupied. Leave the element
13892 // for the next probe.
13901 template<typename Derived, typename Shape, typename Key>
13902 Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity(
13903 Handle<Derived> table,
13906 PretenureFlag pretenure) {
13907 Isolate* isolate = table->GetIsolate();
13908 int capacity = table->Capacity();
13909 int nof = table->NumberOfElements() + n;
13910 int nod = table->NumberOfDeletedElements();
13912 // 50% is still free after adding n elements and
13913 // at most 50% of the free elements are deleted elements.
13914 if (nod <= (capacity - nof) >> 1) {
13915 int needed_free = nof >> 1;
13916 if (nof + needed_free <= capacity) return table;
13919 const int kMinCapacityForPretenure = 256;
13920 bool should_pretenure = pretenure == TENURED ||
13921 ((capacity > kMinCapacityForPretenure) &&
13922 !isolate->heap()->InNewSpace(*table));
13923 Handle<Derived> new_table = HashTable::New(
13926 USE_DEFAULT_MINIMUM_CAPACITY,
13927 should_pretenure ? TENURED : NOT_TENURED);
13929 table->Rehash(new_table, key);
13934 template<typename Derived, typename Shape, typename Key>
13935 Handle<Derived> HashTable<Derived, Shape, Key>::Shrink(Handle<Derived> table,
13937 int capacity = table->Capacity();
13938 int nof = table->NumberOfElements();
13940 // Shrink to fit the number of elements if only a quarter of the
13941 // capacity is filled with elements.
13942 if (nof > (capacity >> 2)) return table;
13943 // Allocate a new dictionary with room for at least the current
13944 // number of elements. The allocation method will make sure that
13945 // there is extra room in the dictionary for additions. Don't go
13946 // lower than room for 16 elements.
13947 int at_least_room_for = nof;
13948 if (at_least_room_for < 16) return table;
13950 Isolate* isolate = table->GetIsolate();
13951 const int kMinCapacityForPretenure = 256;
13953 (at_least_room_for > kMinCapacityForPretenure) &&
13954 !isolate->heap()->InNewSpace(*table);
13955 Handle<Derived> new_table = HashTable::New(
13958 USE_DEFAULT_MINIMUM_CAPACITY,
13959 pretenure ? TENURED : NOT_TENURED);
13961 table->Rehash(new_table, key);
13966 template<typename Derived, typename Shape, typename Key>
13967 uint32_t HashTable<Derived, Shape, Key>::FindInsertionEntry(uint32_t hash) {
13968 uint32_t capacity = Capacity();
13969 uint32_t entry = FirstProbe(hash, capacity);
13970 uint32_t count = 1;
13971 // EnsureCapacity will guarantee the hash table is never full.
13973 Object* element = KeyAt(entry);
13974 if (element->IsUndefined() || element->IsTheHole()) break;
13975 entry = NextProbe(entry, count++, capacity);
13981 // Force instantiation of template instances class.
13982 // Please note this list is compiler dependent.
13984 template class HashTable<StringTable, StringTableShape, HashTableKey*>;
13986 template class HashTable<CompilationCacheTable,
13987 CompilationCacheShape,
13990 template class HashTable<MapCache, MapCacheShape, HashTableKey*>;
13992 template class HashTable<ObjectHashTable,
13993 ObjectHashTableShape,
13996 template class HashTable<WeakHashTable, WeakHashTableShape<2>, Handle<Object> >;
13998 template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >;
14000 template class Dictionary<SeededNumberDictionary,
14001 SeededNumberDictionaryShape,
14004 template class Dictionary<UnseededNumberDictionary,
14005 UnseededNumberDictionaryShape,
14008 template Handle<SeededNumberDictionary>
14009 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14010 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14012 template Handle<UnseededNumberDictionary>
14013 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14014 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14016 template Handle<NameDictionary>
14017 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14018 New(Isolate*, int n, PretenureFlag pretenure);
14020 template Handle<SeededNumberDictionary>
14021 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14022 AtPut(Handle<SeededNumberDictionary>, uint32_t, Handle<Object>);
14024 template Handle<UnseededNumberDictionary>
14025 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14026 AtPut(Handle<UnseededNumberDictionary>, uint32_t, Handle<Object>);
14029 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14030 SlowReverseLookup(Object* value);
14033 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14034 SlowReverseLookup(Object* value);
14037 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14040 PropertyAttributes,
14041 Dictionary<SeededNumberDictionary,
14042 SeededNumberDictionaryShape,
14043 uint32_t>::SortMode);
14045 template Handle<Object>
14046 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::DeleteProperty(
14047 Handle<NameDictionary>, int, JSObject::DeleteMode);
14049 template Handle<Object>
14050 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14051 DeleteProperty(Handle<SeededNumberDictionary>, int, JSObject::DeleteMode);
14053 template Handle<NameDictionary>
14054 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
14055 New(Isolate*, int, MinimumCapacity, PretenureFlag);
14057 template Handle<NameDictionary>
14058 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
14059 Shrink(Handle<NameDictionary>, Handle<Name>);
14061 template Handle<SeededNumberDictionary>
14062 HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14063 Shrink(Handle<SeededNumberDictionary>, uint32_t);
14065 template void Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14069 PropertyAttributes,
14071 NameDictionary, NameDictionaryShape, Handle<Name> >::SortMode);
14074 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14075 NumberOfElementsFilterAttributes(PropertyAttributes);
14077 template Handle<NameDictionary>
14078 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::Add(
14079 Handle<NameDictionary>, Handle<Name>, Handle<Object>, PropertyDetails);
14082 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14083 GenerateNewEnumerationIndices(Handle<NameDictionary>);
14086 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14087 NumberOfElementsFilterAttributes(PropertyAttributes);
14089 template Handle<SeededNumberDictionary>
14090 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14091 Add(Handle<SeededNumberDictionary>,
14096 template Handle<UnseededNumberDictionary>
14097 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14098 Add(Handle<UnseededNumberDictionary>,
14103 template Handle<SeededNumberDictionary>
14104 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14105 EnsureCapacity(Handle<SeededNumberDictionary>, int, uint32_t);
14107 template Handle<UnseededNumberDictionary>
14108 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14109 EnsureCapacity(Handle<UnseededNumberDictionary>, int, uint32_t);
14111 template Handle<NameDictionary>
14112 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14113 EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>);
14116 int Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14117 NumberOfEnumElements();
14120 int Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14121 NumberOfEnumElements();
14124 int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14125 FindEntry(uint32_t);
14128 Handle<Object> JSObject::PrepareSlowElementsForSort(
14129 Handle<JSObject> object, uint32_t limit) {
14130 DCHECK(object->HasDictionaryElements());
14131 Isolate* isolate = object->GetIsolate();
14132 // Must stay in dictionary mode, either because of requires_slow_elements,
14133 // or because we are not going to sort (and therefore compact) all of the
14135 Handle<SeededNumberDictionary> dict(object->element_dictionary(), isolate);
14136 Handle<SeededNumberDictionary> new_dict =
14137 SeededNumberDictionary::New(isolate, dict->NumberOfElements());
14140 uint32_t undefs = 0;
14141 int capacity = dict->Capacity();
14142 Handle<Smi> bailout(Smi::FromInt(-1), isolate);
14143 // Entry to the new dictionary does not cause it to grow, as we have
14144 // allocated one that is large enough for all entries.
14145 DisallowHeapAllocation no_gc;
14146 for (int i = 0; i < capacity; i++) {
14147 Object* k = dict->KeyAt(i);
14148 if (!dict->IsKey(k)) continue;
14150 DCHECK(k->IsNumber());
14151 DCHECK(!k->IsSmi() || Smi::cast(k)->value() >= 0);
14152 DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0);
14153 DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32);
14155 HandleScope scope(isolate);
14156 Handle<Object> value(dict->ValueAt(i), isolate);
14157 PropertyDetails details = dict->DetailsAt(i);
14158 if (details.type() == CALLBACKS || details.IsReadOnly()) {
14159 // Bail out and do the sorting of undefineds and array holes in JS.
14160 // Also bail out if the element is not supposed to be moved.
14164 uint32_t key = NumberToUint32(k);
14166 if (value->IsUndefined()) {
14168 } else if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14169 // Adding an entry with the key beyond smi-range requires
14170 // allocation. Bailout.
14173 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14174 new_dict, pos, value, details);
14175 DCHECK(result.is_identical_to(new_dict));
14179 } else if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
14180 // Adding an entry with the key beyond smi-range requires
14181 // allocation. Bailout.
14184 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14185 new_dict, key, value, details);
14186 DCHECK(result.is_identical_to(new_dict));
14191 uint32_t result = pos;
14192 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
14193 while (undefs > 0) {
14194 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14195 // Adding an entry with the key beyond smi-range requires
14196 // allocation. Bailout.
14199 HandleScope scope(isolate);
14200 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14201 new_dict, pos, isolate->factory()->undefined_value(), no_details);
14202 DCHECK(result.is_identical_to(new_dict));
14208 object->set_elements(*new_dict);
14210 AllowHeapAllocation allocate_return_value;
14211 return isolate->factory()->NewNumberFromUint(result);
14215 // Collects all defined (non-hole) and non-undefined (array) elements at
14216 // the start of the elements array.
14217 // If the object is in dictionary mode, it is converted to fast elements
14219 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object,
14221 Isolate* isolate = object->GetIsolate();
14222 if (object->HasSloppyArgumentsElements() ||
14223 object->map()->is_observed()) {
14224 return handle(Smi::FromInt(-1), isolate);
14227 if (object->HasDictionaryElements()) {
14228 // Convert to fast elements containing only the existing properties.
14229 // Ordering is irrelevant, since we are going to sort anyway.
14230 Handle<SeededNumberDictionary> dict(object->element_dictionary());
14231 if (object->IsJSArray() || dict->requires_slow_elements() ||
14232 dict->max_number_key() >= limit) {
14233 return JSObject::PrepareSlowElementsForSort(object, limit);
14235 // Convert to fast elements.
14237 Handle<Map> new_map =
14238 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS);
14240 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ?
14241 NOT_TENURED: TENURED;
14242 Handle<FixedArray> fast_elements =
14243 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure);
14244 dict->CopyValuesTo(*fast_elements);
14245 JSObject::ValidateElements(object);
14247 JSObject::SetMapAndElements(object, new_map, fast_elements);
14248 } else if (object->HasExternalArrayElements() ||
14249 object->HasFixedTypedArrayElements()) {
14250 // Typed arrays cannot have holes or undefined elements.
14251 return handle(Smi::FromInt(
14252 FixedArrayBase::cast(object->elements())->length()), isolate);
14253 } else if (!object->HasFastDoubleElements()) {
14254 EnsureWritableFastElements(object);
14256 DCHECK(object->HasFastSmiOrObjectElements() ||
14257 object->HasFastDoubleElements());
14259 // Collect holes at the end, undefined before that and the rest at the
14260 // start, and return the number of non-hole, non-undefined values.
14262 Handle<FixedArrayBase> elements_base(object->elements());
14263 uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
14264 if (limit > elements_length) {
14265 limit = elements_length ;
14268 return handle(Smi::FromInt(0), isolate);
14271 uint32_t result = 0;
14272 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) {
14273 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base);
14274 // Split elements into defined and the_hole, in that order.
14275 unsigned int holes = limit;
14276 // Assume most arrays contain no holes and undefined values, so minimize the
14277 // number of stores of non-undefined, non-the-hole values.
14278 for (unsigned int i = 0; i < holes; i++) {
14279 if (elements->is_the_hole(i)) {
14284 // Position i needs to be filled.
14285 while (holes > i) {
14286 if (elements->is_the_hole(holes)) {
14289 elements->set(i, elements->get_scalar(holes));
14295 while (holes < limit) {
14296 elements->set_the_hole(holes);
14300 FixedArray* elements = FixedArray::cast(*elements_base);
14301 DisallowHeapAllocation no_gc;
14303 // Split elements into defined, undefined and the_hole, in that order. Only
14304 // count locations for undefined and the hole, and fill them afterwards.
14305 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
14306 unsigned int undefs = limit;
14307 unsigned int holes = limit;
14308 // Assume most arrays contain no holes and undefined values, so minimize the
14309 // number of stores of non-undefined, non-the-hole values.
14310 for (unsigned int i = 0; i < undefs; i++) {
14311 Object* current = elements->get(i);
14312 if (current->IsTheHole()) {
14315 } else if (current->IsUndefined()) {
14320 // Position i needs to be filled.
14321 while (undefs > i) {
14322 current = elements->get(undefs);
14323 if (current->IsTheHole()) {
14326 } else if (current->IsUndefined()) {
14329 elements->set(i, current, write_barrier);
14335 while (undefs < holes) {
14336 elements->set_undefined(undefs);
14339 while (holes < limit) {
14340 elements->set_the_hole(holes);
14345 return isolate->factory()->NewNumberFromUint(result);
14349 ExternalArrayType JSTypedArray::type() {
14350 switch (elements()->map()->instance_type()) {
14351 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \
14352 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14353 case FIXED_##TYPE##_ARRAY_TYPE: \
14354 return kExternal##Type##Array;
14356 TYPED_ARRAYS(INSTANCE_TYPE_TO_ARRAY_TYPE)
14357 #undef INSTANCE_TYPE_TO_ARRAY_TYPE
14361 return static_cast<ExternalArrayType>(-1);
14366 size_t JSTypedArray::element_size() {
14367 switch (elements()->map()->instance_type()) {
14368 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \
14369 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14372 TYPED_ARRAYS(INSTANCE_TYPE_TO_ELEMENT_SIZE)
14373 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE
14382 Handle<Object> ExternalUint8ClampedArray::SetValue(
14383 Handle<ExternalUint8ClampedArray> array,
14385 Handle<Object> value) {
14386 uint8_t clamped_value = 0;
14387 if (index < static_cast<uint32_t>(array->length())) {
14388 if (value->IsSmi()) {
14389 int int_value = Handle<Smi>::cast(value)->value();
14390 if (int_value < 0) {
14392 } else if (int_value > 255) {
14393 clamped_value = 255;
14395 clamped_value = static_cast<uint8_t>(int_value);
14397 } else if (value->IsHeapNumber()) {
14398 double double_value = Handle<HeapNumber>::cast(value)->value();
14399 if (!(double_value > 0)) {
14400 // NaN and less than zero clamp to zero.
14402 } else if (double_value > 255) {
14403 // Greater than 255 clamp to 255.
14404 clamped_value = 255;
14406 // Other doubles are rounded to the nearest integer.
14407 clamped_value = static_cast<uint8_t>(lrint(double_value));
14410 // Clamp undefined to zero (default). All other types have been
14411 // converted to a number type further up in the call chain.
14412 DCHECK(value->IsUndefined());
14414 array->set(index, clamped_value);
14416 return handle(Smi::FromInt(clamped_value), array->GetIsolate());
14420 template<typename ExternalArrayClass, typename ValueType>
14421 static Handle<Object> ExternalArrayIntSetter(
14423 Handle<ExternalArrayClass> receiver,
14425 Handle<Object> value) {
14426 ValueType cast_value = 0;
14427 if (index < static_cast<uint32_t>(receiver->length())) {
14428 if (value->IsSmi()) {
14429 int int_value = Handle<Smi>::cast(value)->value();
14430 cast_value = static_cast<ValueType>(int_value);
14431 } else if (value->IsHeapNumber()) {
14432 double double_value = Handle<HeapNumber>::cast(value)->value();
14433 cast_value = static_cast<ValueType>(DoubleToInt32(double_value));
14435 // Clamp undefined to zero (default). All other types have been
14436 // converted to a number type further up in the call chain.
14437 DCHECK(value->IsUndefined());
14439 receiver->set(index, cast_value);
14441 return isolate->factory()->NewNumberFromInt(cast_value);
14445 Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array,
14447 Handle<Object> value) {
14448 return ExternalArrayIntSetter<ExternalInt8Array, int8_t>(
14449 array->GetIsolate(), array, index, value);
14453 Handle<Object> ExternalUint8Array::SetValue(Handle<ExternalUint8Array> array,
14455 Handle<Object> value) {
14456 return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>(
14457 array->GetIsolate(), array, index, value);
14461 Handle<Object> ExternalInt16Array::SetValue(Handle<ExternalInt16Array> array,
14463 Handle<Object> value) {
14464 return ExternalArrayIntSetter<ExternalInt16Array, int16_t>(
14465 array->GetIsolate(), array, index, value);
14469 Handle<Object> ExternalUint16Array::SetValue(Handle<ExternalUint16Array> array,
14471 Handle<Object> value) {
14472 return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>(
14473 array->GetIsolate(), array, index, value);
14477 Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array,
14479 Handle<Object> value) {
14480 return ExternalArrayIntSetter<ExternalInt32Array, int32_t>(
14481 array->GetIsolate(), array, index, value);
14485 Handle<Object> ExternalUint32Array::SetValue(
14486 Handle<ExternalUint32Array> array,
14488 Handle<Object> value) {
14489 uint32_t cast_value = 0;
14490 if (index < static_cast<uint32_t>(array->length())) {
14491 if (value->IsSmi()) {
14492 int int_value = Handle<Smi>::cast(value)->value();
14493 cast_value = static_cast<uint32_t>(int_value);
14494 } else if (value->IsHeapNumber()) {
14495 double double_value = Handle<HeapNumber>::cast(value)->value();
14496 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value));
14498 // Clamp undefined to zero (default). All other types have been
14499 // converted to a number type further up in the call chain.
14500 DCHECK(value->IsUndefined());
14502 array->set(index, cast_value);
14504 return array->GetIsolate()->factory()->NewNumberFromUint(cast_value);
14508 Handle<Object> ExternalFloat32Array::SetValue(
14509 Handle<ExternalFloat32Array> array,
14511 Handle<Object> value) {
14512 float cast_value = static_cast<float>(base::OS::nan_value());
14513 if (index < static_cast<uint32_t>(array->length())) {
14514 if (value->IsSmi()) {
14515 int int_value = Handle<Smi>::cast(value)->value();
14516 cast_value = static_cast<float>(int_value);
14517 } else if (value->IsHeapNumber()) {
14518 double double_value = Handle<HeapNumber>::cast(value)->value();
14519 cast_value = static_cast<float>(double_value);
14521 // Clamp undefined to NaN (default). All other types have been
14522 // converted to a number type further up in the call chain.
14523 DCHECK(value->IsUndefined());
14525 array->set(index, cast_value);
14527 return array->GetIsolate()->factory()->NewNumber(cast_value);
14531 Handle<Object> ExternalFloat64Array::SetValue(
14532 Handle<ExternalFloat64Array> array,
14534 Handle<Object> value) {
14535 double double_value = base::OS::nan_value();
14536 if (index < static_cast<uint32_t>(array->length())) {
14537 if (value->IsNumber()) {
14538 double_value = value->Number();
14540 // Clamp undefined to NaN (default). All other types have been
14541 // converted to a number type further up in the call chain.
14542 DCHECK(value->IsUndefined());
14544 array->set(index, double_value);
14546 return array->GetIsolate()->factory()->NewNumber(double_value);
14550 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell(
14551 Handle<JSGlobalObject> global,
14552 Handle<Name> name) {
14553 DCHECK(!global->HasFastProperties());
14554 int entry = global->property_dictionary()->FindEntry(name);
14555 if (entry == NameDictionary::kNotFound) {
14556 Isolate* isolate = global->GetIsolate();
14557 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(
14558 isolate->factory()->the_hole_value());
14559 PropertyDetails details(NONE, NORMAL, 0);
14560 details = details.AsDeleted();
14561 Handle<NameDictionary> dictionary = NameDictionary::Add(
14562 handle(global->property_dictionary()), name, cell, details);
14563 global->set_properties(*dictionary);
14566 Object* value = global->property_dictionary()->ValueAt(entry);
14567 DCHECK(value->IsPropertyCell());
14568 return handle(PropertyCell::cast(value));
14573 // This class is used for looking up two character strings in the string table.
14574 // If we don't have a hit we don't want to waste much time so we unroll the
14575 // string hash calculation loop here for speed. Doesn't work if the two
14576 // characters form a decimal integer, since such strings have a different hash
14578 class TwoCharHashTableKey : public HashTableKey {
14580 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
14581 : c1_(c1), c2_(c2) {
14583 uint32_t hash = seed;
14585 hash += hash << 10;
14589 hash += hash << 10;
14593 hash ^= hash >> 11;
14594 hash += hash << 15;
14595 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
14598 // If this assert fails then we failed to reproduce the two-character
14599 // version of the string hashing algorithm above. One reason could be
14600 // that we were passed two digits as characters, since the hash
14601 // algorithm is different in that case.
14602 uint16_t chars[2] = {c1, c2};
14603 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed);
14604 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask;
14605 DCHECK_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash));
14609 bool IsMatch(Object* o) OVERRIDE {
14610 if (!o->IsString()) return false;
14611 String* other = String::cast(o);
14612 if (other->length() != 2) return false;
14613 if (other->Get(0) != c1_) return false;
14614 return other->Get(1) == c2_;
14617 uint32_t Hash() OVERRIDE { return hash_; }
14618 uint32_t HashForObject(Object* key) OVERRIDE {
14619 if (!key->IsString()) return 0;
14620 return String::cast(key)->Hash();
14623 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
14624 // The TwoCharHashTableKey is only used for looking in the string
14625 // table, not for adding to it.
14627 return MaybeHandle<Object>().ToHandleChecked();
14637 MaybeHandle<String> StringTable::InternalizeStringIfExists(
14639 Handle<String> string) {
14640 if (string->IsInternalizedString()) {
14643 return LookupStringIfExists(isolate, string);
14647 MaybeHandle<String> StringTable::LookupStringIfExists(
14649 Handle<String> string) {
14650 Handle<StringTable> string_table = isolate->factory()->string_table();
14651 InternalizedStringKey key(string);
14652 int entry = string_table->FindEntry(&key);
14653 if (entry == kNotFound) {
14654 return MaybeHandle<String>();
14656 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
14657 DCHECK(StringShape(*result).IsInternalized());
14663 MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists(
14667 Handle<StringTable> string_table = isolate->factory()->string_table();
14668 TwoCharHashTableKey key(c1, c2, isolate->heap()->HashSeed());
14669 int entry = string_table->FindEntry(&key);
14670 if (entry == kNotFound) {
14671 return MaybeHandle<String>();
14673 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
14674 DCHECK(StringShape(*result).IsInternalized());
14680 Handle<String> StringTable::LookupString(Isolate* isolate,
14681 Handle<String> string) {
14682 InternalizedStringKey key(string);
14683 return LookupKey(isolate, &key);
14687 Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) {
14688 Handle<StringTable> table = isolate->factory()->string_table();
14689 int entry = table->FindEntry(key);
14691 // String already in table.
14692 if (entry != kNotFound) {
14693 return handle(String::cast(table->KeyAt(entry)), isolate);
14696 // Adding new string. Grow table if needed.
14697 table = StringTable::EnsureCapacity(table, 1, key);
14699 // Create string object.
14700 Handle<Object> string = key->AsHandle(isolate);
14701 // There must be no attempts to internalize strings that could throw
14702 // InvalidStringLength error.
14703 CHECK(!string.is_null());
14705 // Add the new string and return it along with the string table.
14706 entry = table->FindInsertionEntry(key->Hash());
14707 table->set(EntryToIndex(entry), *string);
14708 table->ElementAdded();
14710 isolate->factory()->set_string_table(table);
14711 return Handle<String>::cast(string);
14715 Handle<Object> CompilationCacheTable::Lookup(Handle<String> src,
14716 Handle<Context> context) {
14717 Isolate* isolate = GetIsolate();
14718 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14719 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
14720 RelocInfo::kNoPosition);
14721 int entry = FindEntry(&key);
14722 if (entry == kNotFound) return isolate->factory()->undefined_value();
14723 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
14727 Handle<Object> CompilationCacheTable::LookupEval(Handle<String> src,
14728 Handle<Context> context,
14729 StrictMode strict_mode,
14730 int scope_position) {
14731 Isolate* isolate = GetIsolate();
14732 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14733 StringSharedKey key(src, shared, strict_mode, scope_position);
14734 int entry = FindEntry(&key);
14735 if (entry == kNotFound) return isolate->factory()->undefined_value();
14736 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
14740 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src,
14741 JSRegExp::Flags flags) {
14742 Isolate* isolate = GetIsolate();
14743 DisallowHeapAllocation no_allocation;
14744 RegExpKey key(src, flags);
14745 int entry = FindEntry(&key);
14746 if (entry == kNotFound) return isolate->factory()->undefined_value();
14747 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
14751 Handle<CompilationCacheTable> CompilationCacheTable::Put(
14752 Handle<CompilationCacheTable> cache, Handle<String> src,
14753 Handle<Context> context, Handle<Object> value) {
14754 Isolate* isolate = cache->GetIsolate();
14755 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14756 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
14757 RelocInfo::kNoPosition);
14758 cache = EnsureCapacity(cache, 1, &key);
14759 Handle<Object> k = key.AsHandle(isolate);
14760 int entry = cache->FindInsertionEntry(key.Hash());
14761 cache->set(EntryToIndex(entry), *k);
14762 cache->set(EntryToIndex(entry) + 1, *value);
14763 cache->ElementAdded();
14768 Handle<CompilationCacheTable> CompilationCacheTable::PutEval(
14769 Handle<CompilationCacheTable> cache, Handle<String> src,
14770 Handle<Context> context, Handle<SharedFunctionInfo> value,
14771 int scope_position) {
14772 Isolate* isolate = cache->GetIsolate();
14773 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14774 StringSharedKey key(src, shared, value->strict_mode(), scope_position);
14775 cache = EnsureCapacity(cache, 1, &key);
14776 Handle<Object> k = key.AsHandle(isolate);
14777 int entry = cache->FindInsertionEntry(key.Hash());
14778 cache->set(EntryToIndex(entry), *k);
14779 cache->set(EntryToIndex(entry) + 1, *value);
14780 cache->ElementAdded();
14785 Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp(
14786 Handle<CompilationCacheTable> cache, Handle<String> src,
14787 JSRegExp::Flags flags, Handle<FixedArray> value) {
14788 RegExpKey key(src, flags);
14789 cache = EnsureCapacity(cache, 1, &key);
14790 int entry = cache->FindInsertionEntry(key.Hash());
14791 // We store the value in the key slot, and compare the search key
14792 // to the stored value with a custon IsMatch function during lookups.
14793 cache->set(EntryToIndex(entry), *value);
14794 cache->set(EntryToIndex(entry) + 1, *value);
14795 cache->ElementAdded();
14800 void CompilationCacheTable::Remove(Object* value) {
14801 DisallowHeapAllocation no_allocation;
14802 Object* the_hole_value = GetHeap()->the_hole_value();
14803 for (int entry = 0, size = Capacity(); entry < size; entry++) {
14804 int entry_index = EntryToIndex(entry);
14805 int value_index = entry_index + 1;
14806 if (get(value_index) == value) {
14807 NoWriteBarrierSet(this, entry_index, the_hole_value);
14808 NoWriteBarrierSet(this, value_index, the_hole_value);
14816 // StringsKey used for HashTable where key is array of internalized strings.
14817 class StringsKey : public HashTableKey {
14819 explicit StringsKey(Handle<FixedArray> strings) : strings_(strings) { }
14821 bool IsMatch(Object* strings) OVERRIDE {
14822 FixedArray* o = FixedArray::cast(strings);
14823 int len = strings_->length();
14824 if (o->length() != len) return false;
14825 for (int i = 0; i < len; i++) {
14826 if (o->get(i) != strings_->get(i)) return false;
14831 uint32_t Hash() OVERRIDE { return HashForObject(*strings_); }
14833 uint32_t HashForObject(Object* obj) OVERRIDE {
14834 FixedArray* strings = FixedArray::cast(obj);
14835 int len = strings->length();
14837 for (int i = 0; i < len; i++) {
14838 hash ^= String::cast(strings->get(i))->Hash();
14843 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { return strings_; }
14846 Handle<FixedArray> strings_;
14850 Object* MapCache::Lookup(FixedArray* array) {
14851 DisallowHeapAllocation no_alloc;
14852 StringsKey key(handle(array));
14853 int entry = FindEntry(&key);
14854 if (entry == kNotFound) return GetHeap()->undefined_value();
14855 return get(EntryToIndex(entry) + 1);
14859 Handle<MapCache> MapCache::Put(
14860 Handle<MapCache> map_cache, Handle<FixedArray> array, Handle<Map> value) {
14861 StringsKey key(array);
14863 Handle<MapCache> new_cache = EnsureCapacity(map_cache, 1, &key);
14864 int entry = new_cache->FindInsertionEntry(key.Hash());
14865 new_cache->set(EntryToIndex(entry), *array);
14866 new_cache->set(EntryToIndex(entry) + 1, *value);
14867 new_cache->ElementAdded();
14872 template<typename Derived, typename Shape, typename Key>
14873 Handle<Derived> Dictionary<Derived, Shape, Key>::New(
14875 int at_least_space_for,
14876 PretenureFlag pretenure) {
14877 DCHECK(0 <= at_least_space_for);
14878 Handle<Derived> dict = DerivedHashTable::New(isolate,
14879 at_least_space_for,
14880 USE_DEFAULT_MINIMUM_CAPACITY,
14883 // Initialize the next enumeration index.
14884 dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex);
14889 template<typename Derived, typename Shape, typename Key>
14890 void Dictionary<Derived, Shape, Key>::GenerateNewEnumerationIndices(
14891 Handle<Derived> dictionary) {
14892 Factory* factory = dictionary->GetIsolate()->factory();
14893 int length = dictionary->NumberOfElements();
14895 // Allocate and initialize iteration order array.
14896 Handle<FixedArray> iteration_order = factory->NewFixedArray(length);
14897 for (int i = 0; i < length; i++) {
14898 iteration_order->set(i, Smi::FromInt(i));
14901 // Allocate array with enumeration order.
14902 Handle<FixedArray> enumeration_order = factory->NewFixedArray(length);
14904 // Fill the enumeration order array with property details.
14905 int capacity = dictionary->Capacity();
14907 for (int i = 0; i < capacity; i++) {
14908 if (dictionary->IsKey(dictionary->KeyAt(i))) {
14909 int index = dictionary->DetailsAt(i).dictionary_index();
14910 enumeration_order->set(pos++, Smi::FromInt(index));
14914 // Sort the arrays wrt. enumeration order.
14915 iteration_order->SortPairs(*enumeration_order, enumeration_order->length());
14917 // Overwrite the enumeration_order with the enumeration indices.
14918 for (int i = 0; i < length; i++) {
14919 int index = Smi::cast(iteration_order->get(i))->value();
14920 int enum_index = PropertyDetails::kInitialIndex + i;
14921 enumeration_order->set(index, Smi::FromInt(enum_index));
14924 // Update the dictionary with new indices.
14925 capacity = dictionary->Capacity();
14927 for (int i = 0; i < capacity; i++) {
14928 if (dictionary->IsKey(dictionary->KeyAt(i))) {
14929 int enum_index = Smi::cast(enumeration_order->get(pos++))->value();
14930 PropertyDetails details = dictionary->DetailsAt(i);
14931 PropertyDetails new_details = PropertyDetails(
14932 details.attributes(), details.type(), enum_index);
14933 dictionary->DetailsAtPut(i, new_details);
14937 // Set the next enumeration index.
14938 dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length);
14942 template<typename Derived, typename Shape, typename Key>
14943 Handle<Derived> Dictionary<Derived, Shape, Key>::EnsureCapacity(
14944 Handle<Derived> dictionary, int n, Key key) {
14945 // Check whether there are enough enumeration indices to add n elements.
14946 if (Shape::kIsEnumerable &&
14947 !PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) {
14948 // If not, we generate new indices for the properties.
14949 GenerateNewEnumerationIndices(dictionary);
14951 return DerivedHashTable::EnsureCapacity(dictionary, n, key);
14955 template<typename Derived, typename Shape, typename Key>
14956 Handle<Object> Dictionary<Derived, Shape, Key>::DeleteProperty(
14957 Handle<Derived> dictionary,
14959 JSObject::DeleteMode mode) {
14960 Factory* factory = dictionary->GetIsolate()->factory();
14961 PropertyDetails details = dictionary->DetailsAt(entry);
14962 // Ignore attributes if forcing a deletion.
14963 if (!details.IsConfigurable() && mode != JSReceiver::FORCE_DELETION) {
14964 return factory->false_value();
14967 dictionary->SetEntry(
14968 entry, factory->the_hole_value(), factory->the_hole_value());
14969 dictionary->ElementRemoved();
14970 return factory->true_value();
14974 template<typename Derived, typename Shape, typename Key>
14975 Handle<Derived> Dictionary<Derived, Shape, Key>::AtPut(
14976 Handle<Derived> dictionary, Key key, Handle<Object> value) {
14977 int entry = dictionary->FindEntry(key);
14979 // If the entry is present set the value;
14980 if (entry != Dictionary::kNotFound) {
14981 dictionary->ValueAtPut(entry, *value);
14985 // Check whether the dictionary should be extended.
14986 dictionary = EnsureCapacity(dictionary, 1, key);
14988 USE(Shape::AsHandle(dictionary->GetIsolate(), key));
14990 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
14992 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
14997 template<typename Derived, typename Shape, typename Key>
14998 Handle<Derived> Dictionary<Derived, Shape, Key>::Add(
14999 Handle<Derived> dictionary,
15001 Handle<Object> value,
15002 PropertyDetails details) {
15003 // Valdate key is absent.
15004 SLOW_DCHECK((dictionary->FindEntry(key) == Dictionary::kNotFound));
15005 // Check whether the dictionary should be extended.
15006 dictionary = EnsureCapacity(dictionary, 1, key);
15008 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
15013 // Add a key, value pair to the dictionary.
15014 template<typename Derived, typename Shape, typename Key>
15015 void Dictionary<Derived, Shape, Key>::AddEntry(
15016 Handle<Derived> dictionary,
15018 Handle<Object> value,
15019 PropertyDetails details,
15021 // Compute the key object.
15022 Handle<Object> k = Shape::AsHandle(dictionary->GetIsolate(), key);
15024 uint32_t entry = dictionary->FindInsertionEntry(hash);
15025 // Insert element at empty or deleted entry
15026 if (!details.IsDeleted() &&
15027 details.dictionary_index() == 0 &&
15028 Shape::kIsEnumerable) {
15029 // Assign an enumeration index to the property and update
15030 // SetNextEnumerationIndex.
15031 int index = dictionary->NextEnumerationIndex();
15032 details = PropertyDetails(details.attributes(), details.type(), index);
15033 dictionary->SetNextEnumerationIndex(index + 1);
15035 dictionary->SetEntry(entry, k, value, details);
15036 DCHECK((dictionary->KeyAt(entry)->IsNumber() ||
15037 dictionary->KeyAt(entry)->IsName()));
15038 dictionary->ElementAdded();
15042 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) {
15043 DisallowHeapAllocation no_allocation;
15044 // If the dictionary requires slow elements an element has already
15045 // been added at a high index.
15046 if (requires_slow_elements()) return;
15047 // Check if this index is high enough that we should require slow
15049 if (key > kRequiresSlowElementsLimit) {
15050 set_requires_slow_elements();
15053 // Update max key value.
15054 Object* max_index_object = get(kMaxNumberKeyIndex);
15055 if (!max_index_object->IsSmi() || max_number_key() < key) {
15056 FixedArray::set(kMaxNumberKeyIndex,
15057 Smi::FromInt(key << kRequiresSlowElementsTagSize));
15062 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry(
15063 Handle<SeededNumberDictionary> dictionary,
15065 Handle<Object> value,
15066 PropertyDetails details) {
15067 dictionary->UpdateMaxNumberKey(key);
15068 SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound);
15069 return Add(dictionary, key, value, details);
15073 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry(
15074 Handle<UnseededNumberDictionary> dictionary,
15076 Handle<Object> value) {
15077 SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound);
15078 return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0));
15082 Handle<SeededNumberDictionary> SeededNumberDictionary::AtNumberPut(
15083 Handle<SeededNumberDictionary> dictionary,
15085 Handle<Object> value) {
15086 dictionary->UpdateMaxNumberKey(key);
15087 return AtPut(dictionary, key, value);
15091 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AtNumberPut(
15092 Handle<UnseededNumberDictionary> dictionary,
15094 Handle<Object> value) {
15095 return AtPut(dictionary, key, value);
15099 Handle<SeededNumberDictionary> SeededNumberDictionary::Set(
15100 Handle<SeededNumberDictionary> dictionary,
15102 Handle<Object> value,
15103 PropertyDetails details) {
15104 int entry = dictionary->FindEntry(key);
15105 if (entry == kNotFound) {
15106 return AddNumberEntry(dictionary, key, value, details);
15108 // Preserve enumeration index.
15109 details = PropertyDetails(details.attributes(),
15111 dictionary->DetailsAt(entry).dictionary_index());
15112 Handle<Object> object_key =
15113 SeededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
15114 dictionary->SetEntry(entry, object_key, value, details);
15119 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set(
15120 Handle<UnseededNumberDictionary> dictionary,
15122 Handle<Object> value) {
15123 int entry = dictionary->FindEntry(key);
15124 if (entry == kNotFound) return AddNumberEntry(dictionary, key, value);
15125 Handle<Object> object_key =
15126 UnseededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
15127 dictionary->SetEntry(entry, object_key, value);
15133 template<typename Derived, typename Shape, typename Key>
15134 int Dictionary<Derived, Shape, Key>::NumberOfElementsFilterAttributes(
15135 PropertyAttributes filter) {
15136 int capacity = DerivedHashTable::Capacity();
15138 for (int i = 0; i < capacity; i++) {
15139 Object* k = DerivedHashTable::KeyAt(i);
15140 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15141 PropertyDetails details = DetailsAt(i);
15142 if (details.IsDeleted()) continue;
15143 PropertyAttributes attr = details.attributes();
15144 if ((attr & filter) == 0) result++;
15151 template<typename Derived, typename Shape, typename Key>
15152 int Dictionary<Derived, Shape, Key>::NumberOfEnumElements() {
15153 return NumberOfElementsFilterAttributes(
15154 static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC));
15158 template<typename Derived, typename Shape, typename Key>
15159 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
15160 FixedArray* storage,
15161 PropertyAttributes filter,
15162 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
15163 DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter));
15164 int capacity = DerivedHashTable::Capacity();
15166 for (int i = 0; i < capacity; i++) {
15167 Object* k = DerivedHashTable::KeyAt(i);
15168 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15169 PropertyDetails details = DetailsAt(i);
15170 if (details.IsDeleted()) continue;
15171 PropertyAttributes attr = details.attributes();
15172 if ((attr & filter) == 0) storage->set(index++, k);
15175 if (sort_mode == Dictionary::SORTED) {
15176 storage->SortPairs(storage, index);
15178 DCHECK(storage->length() >= index);
15182 struct EnumIndexComparator {
15183 explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { }
15184 bool operator() (Smi* a, Smi* b) {
15185 PropertyDetails da(dict->DetailsAt(a->value()));
15186 PropertyDetails db(dict->DetailsAt(b->value()));
15187 return da.dictionary_index() < db.dictionary_index();
15189 NameDictionary* dict;
15193 void NameDictionary::CopyEnumKeysTo(FixedArray* storage) {
15194 int length = storage->length();
15195 int capacity = Capacity();
15196 int properties = 0;
15197 for (int i = 0; i < capacity; i++) {
15198 Object* k = KeyAt(i);
15199 if (IsKey(k) && !k->IsSymbol()) {
15200 PropertyDetails details = DetailsAt(i);
15201 if (details.IsDeleted() || details.IsDontEnum()) continue;
15202 storage->set(properties, Smi::FromInt(i));
15204 if (properties == length) break;
15207 CHECK_EQ(length, properties);
15208 EnumIndexComparator cmp(this);
15209 Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress());
15210 std::sort(start, start + length, cmp);
15211 for (int i = 0; i < length; i++) {
15212 int index = Smi::cast(storage->get(i))->value();
15213 storage->set(i, KeyAt(index));
15218 template<typename Derived, typename Shape, typename Key>
15219 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
15220 FixedArray* storage,
15222 PropertyAttributes filter,
15223 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
15224 DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter));
15225 int capacity = DerivedHashTable::Capacity();
15226 for (int i = 0; i < capacity; i++) {
15227 Object* k = DerivedHashTable::KeyAt(i);
15228 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15229 PropertyDetails details = DetailsAt(i);
15230 if (details.IsDeleted()) continue;
15231 PropertyAttributes attr = details.attributes();
15232 if ((attr & filter) == 0) storage->set(index++, k);
15235 if (sort_mode == Dictionary::SORTED) {
15236 storage->SortPairs(storage, index);
15238 DCHECK(storage->length() >= index);
15242 // Backwards lookup (slow).
15243 template<typename Derived, typename Shape, typename Key>
15244 Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) {
15245 int capacity = DerivedHashTable::Capacity();
15246 for (int i = 0; i < capacity; i++) {
15247 Object* k = DerivedHashTable::KeyAt(i);
15248 if (Dictionary::IsKey(k)) {
15249 Object* e = ValueAt(i);
15250 if (e->IsPropertyCell()) {
15251 e = PropertyCell::cast(e)->value();
15253 if (e == value) return k;
15256 Heap* heap = Dictionary::GetHeap();
15257 return heap->undefined_value();
15261 Object* ObjectHashTable::Lookup(Handle<Object> key) {
15262 DisallowHeapAllocation no_gc;
15263 DCHECK(IsKey(*key));
15265 // If the object does not have an identity hash, it was never used as a key.
15266 Object* hash = key->GetHash();
15267 if (hash->IsUndefined()) {
15268 return GetHeap()->the_hole_value();
15270 int entry = FindEntry(key);
15271 if (entry == kNotFound) return GetHeap()->the_hole_value();
15272 return get(EntryToIndex(entry) + 1);
15276 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table,
15277 Handle<Object> key,
15278 Handle<Object> value) {
15279 DCHECK(table->IsKey(*key));
15280 DCHECK(!value->IsTheHole());
15282 Isolate* isolate = table->GetIsolate();
15284 // Make sure the key object has an identity hash code.
15285 Handle<Smi> hash = Object::GetOrCreateHash(isolate, key);
15287 int entry = table->FindEntry(key);
15289 // Key is already in table, just overwrite value.
15290 if (entry != kNotFound) {
15291 table->set(EntryToIndex(entry) + 1, *value);
15295 // Check whether the hash table should be extended.
15296 table = EnsureCapacity(table, 1, key);
15297 table->AddEntry(table->FindInsertionEntry(hash->value()),
15304 Handle<ObjectHashTable> ObjectHashTable::Remove(Handle<ObjectHashTable> table,
15305 Handle<Object> key,
15306 bool* was_present) {
15307 DCHECK(table->IsKey(*key));
15309 Object* hash = key->GetHash();
15310 if (hash->IsUndefined()) {
15311 *was_present = false;
15315 int entry = table->FindEntry(key);
15316 if (entry == kNotFound) {
15317 *was_present = false;
15321 *was_present = true;
15322 table->RemoveEntry(entry);
15323 return Shrink(table, key);
15327 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
15328 set(EntryToIndex(entry), key);
15329 set(EntryToIndex(entry) + 1, value);
15334 void ObjectHashTable::RemoveEntry(int entry) {
15335 set_the_hole(EntryToIndex(entry));
15336 set_the_hole(EntryToIndex(entry) + 1);
15341 Object* WeakHashTable::Lookup(Handle<Object> key) {
15342 DisallowHeapAllocation no_gc;
15343 DCHECK(IsKey(*key));
15344 int entry = FindEntry(key);
15345 if (entry == kNotFound) return GetHeap()->the_hole_value();
15346 return get(EntryToValueIndex(entry));
15350 Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table,
15351 Handle<Object> key,
15352 Handle<Object> value) {
15353 DCHECK(table->IsKey(*key));
15354 int entry = table->FindEntry(key);
15355 // Key is already in table, just overwrite value.
15356 if (entry != kNotFound) {
15357 // TODO(ulan): Skipping write barrier is a temporary solution to avoid
15358 // memory leaks. Remove this once we have special visitor for weak fixed
15360 table->set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
15364 // Check whether the hash table should be extended.
15365 table = EnsureCapacity(table, 1, key, TENURED);
15367 table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value);
15372 void WeakHashTable::AddEntry(int entry,
15373 Handle<Object> key,
15374 Handle<Object> value) {
15375 DisallowHeapAllocation no_allocation;
15376 // TODO(ulan): Skipping write barrier is a temporary solution to avoid
15377 // memory leaks. Remove this once we have special visitor for weak fixed
15379 set(EntryToIndex(entry), *key, SKIP_WRITE_BARRIER);
15380 set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
15385 template<class Derived, class Iterator, int entrysize>
15386 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate(
15387 Isolate* isolate, int capacity, PretenureFlag pretenure) {
15388 // Capacity must be a power of two, since we depend on being able
15389 // to divide and multiple by 2 (kLoadFactor) to derive capacity
15390 // from number of buckets. If we decide to change kLoadFactor
15391 // to something other than 2, capacity should be stored as another
15392 // field of this object.
15393 capacity = base::bits::RoundUpToPowerOfTwo32(Max(kMinCapacity, capacity));
15394 if (capacity > kMaxCapacity) {
15395 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
15397 int num_buckets = capacity / kLoadFactor;
15398 Handle<FixedArray> backing_store = isolate->factory()->NewFixedArray(
15399 kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure);
15400 backing_store->set_map_no_write_barrier(
15401 isolate->heap()->ordered_hash_table_map());
15402 Handle<Derived> table = Handle<Derived>::cast(backing_store);
15403 for (int i = 0; i < num_buckets; ++i) {
15404 table->set(kHashTableStartIndex + i, Smi::FromInt(kNotFound));
15406 table->SetNumberOfBuckets(num_buckets);
15407 table->SetNumberOfElements(0);
15408 table->SetNumberOfDeletedElements(0);
15413 template<class Derived, class Iterator, int entrysize>
15414 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable(
15415 Handle<Derived> table) {
15416 DCHECK(!table->IsObsolete());
15418 int nof = table->NumberOfElements();
15419 int nod = table->NumberOfDeletedElements();
15420 int capacity = table->Capacity();
15421 if ((nof + nod) < capacity) return table;
15422 // Don't need to grow if we can simply clear out deleted entries instead.
15423 // Note that we can't compact in place, though, so we always allocate
15425 return Rehash(table, (nod < (capacity >> 1)) ? capacity << 1 : capacity);
15429 template<class Derived, class Iterator, int entrysize>
15430 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink(
15431 Handle<Derived> table) {
15432 DCHECK(!table->IsObsolete());
15434 int nof = table->NumberOfElements();
15435 int capacity = table->Capacity();
15436 if (nof >= (capacity >> 2)) return table;
15437 return Rehash(table, capacity / 2);
15441 template<class Derived, class Iterator, int entrysize>
15442 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear(
15443 Handle<Derived> table) {
15444 DCHECK(!table->IsObsolete());
15446 Handle<Derived> new_table =
15447 Allocate(table->GetIsolate(),
15449 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
15451 table->SetNextTable(*new_table);
15452 table->SetNumberOfDeletedElements(-1);
15458 template<class Derived, class Iterator, int entrysize>
15459 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Remove(
15460 Handle<Derived> table, Handle<Object> key, bool* was_present) {
15461 int entry = table->FindEntry(key);
15462 if (entry == kNotFound) {
15463 *was_present = false;
15466 *was_present = true;
15467 table->RemoveEntry(entry);
15468 return Shrink(table);
15472 template<class Derived, class Iterator, int entrysize>
15473 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash(
15474 Handle<Derived> table, int new_capacity) {
15475 DCHECK(!table->IsObsolete());
15477 Handle<Derived> new_table =
15478 Allocate(table->GetIsolate(),
15480 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
15481 int nof = table->NumberOfElements();
15482 int nod = table->NumberOfDeletedElements();
15483 int new_buckets = new_table->NumberOfBuckets();
15485 int removed_holes_index = 0;
15487 for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) {
15488 Object* key = table->KeyAt(old_entry);
15489 if (key->IsTheHole()) {
15490 table->SetRemovedIndexAt(removed_holes_index++, old_entry);
15494 Object* hash = key->GetHash();
15495 int bucket = Smi::cast(hash)->value() & (new_buckets - 1);
15496 Object* chain_entry = new_table->get(kHashTableStartIndex + bucket);
15497 new_table->set(kHashTableStartIndex + bucket, Smi::FromInt(new_entry));
15498 int new_index = new_table->EntryToIndex(new_entry);
15499 int old_index = table->EntryToIndex(old_entry);
15500 for (int i = 0; i < entrysize; ++i) {
15501 Object* value = table->get(old_index + i);
15502 new_table->set(new_index + i, value);
15504 new_table->set(new_index + kChainOffset, chain_entry);
15508 DCHECK_EQ(nod, removed_holes_index);
15510 new_table->SetNumberOfElements(nof);
15511 table->SetNextTable(*new_table);
15517 template <class Derived, class Iterator, int entrysize>
15518 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry(
15519 Handle<Object> key, int hash) {
15520 DCHECK(!IsObsolete());
15522 DisallowHeapAllocation no_gc;
15523 DCHECK(!key->IsTheHole());
15524 for (int entry = HashToEntry(hash); entry != kNotFound;
15525 entry = ChainAt(entry)) {
15526 Object* candidate = KeyAt(entry);
15527 if (candidate->SameValueZero(*key))
15534 template <class Derived, class Iterator, int entrysize>
15535 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry(
15536 Handle<Object> key) {
15537 DisallowHeapAllocation no_gc;
15538 Object* hash = key->GetHash();
15539 if (!hash->IsSmi()) return kNotFound;
15540 return FindEntry(key, Smi::cast(hash)->value());
15544 template <class Derived, class Iterator, int entrysize>
15545 int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) {
15546 DCHECK(!IsObsolete());
15548 int entry = UsedCapacity();
15549 int bucket = HashToBucket(hash);
15550 int index = EntryToIndex(entry);
15551 Object* chain_entry = get(kHashTableStartIndex + bucket);
15552 set(kHashTableStartIndex + bucket, Smi::FromInt(entry));
15553 set(index + kChainOffset, chain_entry);
15554 SetNumberOfElements(NumberOfElements() + 1);
15559 template<class Derived, class Iterator, int entrysize>
15560 void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) {
15561 DCHECK(!IsObsolete());
15563 int index = EntryToIndex(entry);
15564 for (int i = 0; i < entrysize; ++i) {
15565 set_the_hole(index + i);
15567 SetNumberOfElements(NumberOfElements() - 1);
15568 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
15572 template Handle<OrderedHashSet>
15573 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Allocate(
15574 Isolate* isolate, int capacity, PretenureFlag pretenure);
15576 template Handle<OrderedHashSet>
15577 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::EnsureGrowable(
15578 Handle<OrderedHashSet> table);
15580 template Handle<OrderedHashSet>
15581 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Shrink(
15582 Handle<OrderedHashSet> table);
15584 template Handle<OrderedHashSet>
15585 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Clear(
15586 Handle<OrderedHashSet> table);
15588 template Handle<OrderedHashSet>
15589 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Remove(
15590 Handle<OrderedHashSet> table, Handle<Object> key, bool* was_present);
15592 template int OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry(
15593 Handle<Object> key, int hash);
15594 template int OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry(
15595 Handle<Object> key);
15598 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::AddEntry(int hash);
15601 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::RemoveEntry(int entry);
15604 template Handle<OrderedHashMap>
15605 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Allocate(
15606 Isolate* isolate, int capacity, PretenureFlag pretenure);
15608 template Handle<OrderedHashMap>
15609 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::EnsureGrowable(
15610 Handle<OrderedHashMap> table);
15612 template Handle<OrderedHashMap>
15613 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Shrink(
15614 Handle<OrderedHashMap> table);
15616 template Handle<OrderedHashMap>
15617 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Clear(
15618 Handle<OrderedHashMap> table);
15620 template Handle<OrderedHashMap>
15621 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Remove(
15622 Handle<OrderedHashMap> table, Handle<Object> key, bool* was_present);
15624 template int OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry(
15625 Handle<Object> key, int hash);
15626 template int OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry(
15627 Handle<Object> key);
15630 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::AddEntry(int hash);
15633 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::RemoveEntry(int entry);
15636 bool OrderedHashSet::Contains(Handle<Object> key) {
15637 return FindEntry(key) != kNotFound;
15641 Handle<OrderedHashSet> OrderedHashSet::Add(Handle<OrderedHashSet> table,
15642 Handle<Object> key) {
15643 int hash = GetOrCreateHash(table->GetIsolate(), key)->value();
15644 if (table->FindEntry(key, hash) != kNotFound) return table;
15646 table = EnsureGrowable(table);
15648 int index = table->AddEntry(hash);
15649 table->set(index, *key);
15654 Object* OrderedHashMap::Lookup(Handle<Object> key) {
15655 DisallowHeapAllocation no_gc;
15656 int entry = FindEntry(key);
15657 if (entry == kNotFound) return GetHeap()->the_hole_value();
15658 return ValueAt(entry);
15662 Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table,
15663 Handle<Object> key,
15664 Handle<Object> value) {
15665 DCHECK(!key->IsTheHole());
15667 int hash = GetOrCreateHash(table->GetIsolate(), key)->value();
15668 int entry = table->FindEntry(key, hash);
15670 if (entry != kNotFound) {
15671 table->set(table->EntryToIndex(entry) + kValueOffset, *value);
15675 table = EnsureGrowable(table);
15677 int index = table->AddEntry(hash);
15678 table->set(index, *key);
15679 table->set(index + kValueOffset, *value);
15684 template<class Derived, class TableType>
15685 void OrderedHashTableIterator<Derived, TableType>::Transition() {
15686 DisallowHeapAllocation no_allocation;
15687 TableType* table = TableType::cast(this->table());
15688 if (!table->IsObsolete()) return;
15690 int index = Smi::cast(this->index())->value();
15691 while (table->IsObsolete()) {
15692 TableType* next_table = table->NextTable();
15695 int nod = table->NumberOfDeletedElements();
15697 // When we clear the table we set the number of deleted elements to -1.
15701 int old_index = index;
15702 for (int i = 0; i < nod; ++i) {
15703 int removed_index = table->RemovedIndexAt(i);
15704 if (removed_index >= old_index) break;
15710 table = next_table;
15714 set_index(Smi::FromInt(index));
15718 template<class Derived, class TableType>
15719 bool OrderedHashTableIterator<Derived, TableType>::HasMore() {
15720 DisallowHeapAllocation no_allocation;
15721 if (this->table()->IsUndefined()) return false;
15725 TableType* table = TableType::cast(this->table());
15726 int index = Smi::cast(this->index())->value();
15727 int used_capacity = table->UsedCapacity();
15729 while (index < used_capacity && table->KeyAt(index)->IsTheHole()) {
15733 set_index(Smi::FromInt(index));
15735 if (index < used_capacity) return true;
15737 set_table(GetHeap()->undefined_value());
15742 template<class Derived, class TableType>
15743 Smi* OrderedHashTableIterator<Derived, TableType>::Next(JSArray* value_array) {
15744 DisallowHeapAllocation no_allocation;
15746 FixedArray* array = FixedArray::cast(value_array->elements());
15747 static_cast<Derived*>(this)->PopulateValueArray(array);
15749 return Smi::cast(kind());
15751 return Smi::FromInt(0);
15756 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Next(
15757 JSArray* value_array);
15760 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::HasMore();
15763 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::MoveNext();
15766 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::CurrentKey();
15769 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Transition();
15773 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Next(
15774 JSArray* value_array);
15777 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::HasMore();
15780 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::MoveNext();
15783 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::CurrentKey();
15786 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Transition();
15789 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator(
15790 DeclaredAccessorDescriptor* descriptor)
15791 : array_(descriptor->serialized_data()->GetDataStartAddress()),
15792 length_(descriptor->serialized_data()->length()),
15797 const DeclaredAccessorDescriptorData*
15798 DeclaredAccessorDescriptorIterator::Next() {
15799 DCHECK(offset_ < length_);
15800 uint8_t* ptr = &array_[offset_];
15801 DCHECK(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0);
15802 const DeclaredAccessorDescriptorData* data =
15803 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr);
15804 offset_ += sizeof(*data);
15805 DCHECK(offset_ <= length_);
15810 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create(
15812 const DeclaredAccessorDescriptorData& descriptor,
15813 Handle<DeclaredAccessorDescriptor> previous) {
15814 int previous_length =
15815 previous.is_null() ? 0 : previous->serialized_data()->length();
15816 int length = sizeof(descriptor) + previous_length;
15817 Handle<ByteArray> serialized_descriptor =
15818 isolate->factory()->NewByteArray(length);
15819 Handle<DeclaredAccessorDescriptor> value =
15820 isolate->factory()->NewDeclaredAccessorDescriptor();
15821 value->set_serialized_data(*serialized_descriptor);
15822 // Copy in the data.
15824 DisallowHeapAllocation no_allocation;
15825 uint8_t* array = serialized_descriptor->GetDataStartAddress();
15826 if (previous_length != 0) {
15827 uint8_t* previous_array =
15828 previous->serialized_data()->GetDataStartAddress();
15829 MemCopy(array, previous_array, previous_length);
15830 array += previous_length;
15832 DCHECK(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0);
15833 DeclaredAccessorDescriptorData* data =
15834 reinterpret_cast<DeclaredAccessorDescriptorData*>(array);
15835 *data = descriptor;
15841 // Check if there is a break point at this code position.
15842 bool DebugInfo::HasBreakPoint(int code_position) {
15843 // Get the break point info object for this code position.
15844 Object* break_point_info = GetBreakPointInfo(code_position);
15846 // If there is no break point info object or no break points in the break
15847 // point info object there is no break point at this code position.
15848 if (break_point_info->IsUndefined()) return false;
15849 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0;
15853 // Get the break point info object for this code position.
15854 Object* DebugInfo::GetBreakPointInfo(int code_position) {
15855 // Find the index of the break point info object for this code position.
15856 int index = GetBreakPointInfoIndex(code_position);
15858 // Return the break point info object if any.
15859 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value();
15860 return BreakPointInfo::cast(break_points()->get(index));
15864 // Clear a break point at the specified code position.
15865 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info,
15867 Handle<Object> break_point_object) {
15868 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
15869 debug_info->GetIsolate());
15870 if (break_point_info->IsUndefined()) return;
15871 BreakPointInfo::ClearBreakPoint(
15872 Handle<BreakPointInfo>::cast(break_point_info),
15873 break_point_object);
15877 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
15879 int source_position,
15880 int statement_position,
15881 Handle<Object> break_point_object) {
15882 Isolate* isolate = debug_info->GetIsolate();
15883 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
15885 if (!break_point_info->IsUndefined()) {
15886 BreakPointInfo::SetBreakPoint(
15887 Handle<BreakPointInfo>::cast(break_point_info),
15888 break_point_object);
15892 // Adding a new break point for a code position which did not have any
15893 // break points before. Try to find a free slot.
15894 int index = kNoBreakPointInfo;
15895 for (int i = 0; i < debug_info->break_points()->length(); i++) {
15896 if (debug_info->break_points()->get(i)->IsUndefined()) {
15901 if (index == kNoBreakPointInfo) {
15902 // No free slot - extend break point info array.
15903 Handle<FixedArray> old_break_points =
15904 Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
15905 Handle<FixedArray> new_break_points =
15906 isolate->factory()->NewFixedArray(
15907 old_break_points->length() +
15908 DebugInfo::kEstimatedNofBreakPointsInFunction);
15910 debug_info->set_break_points(*new_break_points);
15911 for (int i = 0; i < old_break_points->length(); i++) {
15912 new_break_points->set(i, old_break_points->get(i));
15914 index = old_break_points->length();
15916 DCHECK(index != kNoBreakPointInfo);
15918 // Allocate new BreakPointInfo object and set the break point.
15919 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
15920 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
15921 new_break_point_info->set_code_position(Smi::FromInt(code_position));
15922 new_break_point_info->set_source_position(Smi::FromInt(source_position));
15923 new_break_point_info->
15924 set_statement_position(Smi::FromInt(statement_position));
15925 new_break_point_info->set_break_point_objects(
15926 isolate->heap()->undefined_value());
15927 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
15928 debug_info->break_points()->set(index, *new_break_point_info);
15932 // Get the break point objects for a code position.
15933 Object* DebugInfo::GetBreakPointObjects(int code_position) {
15934 Object* break_point_info = GetBreakPointInfo(code_position);
15935 if (break_point_info->IsUndefined()) {
15936 return GetHeap()->undefined_value();
15938 return BreakPointInfo::cast(break_point_info)->break_point_objects();
15942 // Get the total number of break points.
15943 int DebugInfo::GetBreakPointCount() {
15944 if (break_points()->IsUndefined()) return 0;
15946 for (int i = 0; i < break_points()->length(); i++) {
15947 if (!break_points()->get(i)->IsUndefined()) {
15948 BreakPointInfo* break_point_info =
15949 BreakPointInfo::cast(break_points()->get(i));
15950 count += break_point_info->GetBreakPointCount();
15957 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
15958 Handle<Object> break_point_object) {
15959 Heap* heap = debug_info->GetHeap();
15960 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
15961 for (int i = 0; i < debug_info->break_points()->length(); i++) {
15962 if (!debug_info->break_points()->get(i)->IsUndefined()) {
15963 Handle<BreakPointInfo> break_point_info =
15964 Handle<BreakPointInfo>(BreakPointInfo::cast(
15965 debug_info->break_points()->get(i)));
15966 if (BreakPointInfo::HasBreakPointObject(break_point_info,
15967 break_point_object)) {
15968 return *break_point_info;
15972 return heap->undefined_value();
15976 // Find the index of the break point info object for the specified code
15978 int DebugInfo::GetBreakPointInfoIndex(int code_position) {
15979 if (break_points()->IsUndefined()) return kNoBreakPointInfo;
15980 for (int i = 0; i < break_points()->length(); i++) {
15981 if (!break_points()->get(i)->IsUndefined()) {
15982 BreakPointInfo* break_point_info =
15983 BreakPointInfo::cast(break_points()->get(i));
15984 if (break_point_info->code_position()->value() == code_position) {
15989 return kNoBreakPointInfo;
15993 // Remove the specified break point object.
15994 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
15995 Handle<Object> break_point_object) {
15996 Isolate* isolate = break_point_info->GetIsolate();
15997 // If there are no break points just ignore.
15998 if (break_point_info->break_point_objects()->IsUndefined()) return;
15999 // If there is a single break point clear it if it is the same.
16000 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16001 if (break_point_info->break_point_objects() == *break_point_object) {
16002 break_point_info->set_break_point_objects(
16003 isolate->heap()->undefined_value());
16007 // If there are multiple break points shrink the array
16008 DCHECK(break_point_info->break_point_objects()->IsFixedArray());
16009 Handle<FixedArray> old_array =
16010 Handle<FixedArray>(
16011 FixedArray::cast(break_point_info->break_point_objects()));
16012 Handle<FixedArray> new_array =
16013 isolate->factory()->NewFixedArray(old_array->length() - 1);
16014 int found_count = 0;
16015 for (int i = 0; i < old_array->length(); i++) {
16016 if (old_array->get(i) == *break_point_object) {
16017 DCHECK(found_count == 0);
16020 new_array->set(i - found_count, old_array->get(i));
16023 // If the break point was found in the list change it.
16024 if (found_count > 0) break_point_info->set_break_point_objects(*new_array);
16028 // Add the specified break point object.
16029 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
16030 Handle<Object> break_point_object) {
16031 Isolate* isolate = break_point_info->GetIsolate();
16033 // If there was no break point objects before just set it.
16034 if (break_point_info->break_point_objects()->IsUndefined()) {
16035 break_point_info->set_break_point_objects(*break_point_object);
16038 // If the break point object is the same as before just ignore.
16039 if (break_point_info->break_point_objects() == *break_point_object) return;
16040 // If there was one break point object before replace with array.
16041 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16042 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
16043 array->set(0, break_point_info->break_point_objects());
16044 array->set(1, *break_point_object);
16045 break_point_info->set_break_point_objects(*array);
16048 // If there was more than one break point before extend array.
16049 Handle<FixedArray> old_array =
16050 Handle<FixedArray>(
16051 FixedArray::cast(break_point_info->break_point_objects()));
16052 Handle<FixedArray> new_array =
16053 isolate->factory()->NewFixedArray(old_array->length() + 1);
16054 for (int i = 0; i < old_array->length(); i++) {
16055 // If the break point was there before just ignore.
16056 if (old_array->get(i) == *break_point_object) return;
16057 new_array->set(i, old_array->get(i));
16059 // Add the new break point.
16060 new_array->set(old_array->length(), *break_point_object);
16061 break_point_info->set_break_point_objects(*new_array);
16065 bool BreakPointInfo::HasBreakPointObject(
16066 Handle<BreakPointInfo> break_point_info,
16067 Handle<Object> break_point_object) {
16069 if (break_point_info->break_point_objects()->IsUndefined()) return false;
16070 // Single break point.
16071 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16072 return break_point_info->break_point_objects() == *break_point_object;
16074 // Multiple break points.
16075 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects());
16076 for (int i = 0; i < array->length(); i++) {
16077 if (array->get(i) == *break_point_object) {
16085 // Get the number of break points.
16086 int BreakPointInfo::GetBreakPointCount() {
16088 if (break_point_objects()->IsUndefined()) return 0;
16089 // Single break point.
16090 if (!break_point_objects()->IsFixedArray()) return 1;
16091 // Multiple break points.
16092 return FixedArray::cast(break_point_objects())->length();
16096 Object* JSDate::GetField(Object* object, Smi* index) {
16097 return JSDate::cast(object)->DoGetField(
16098 static_cast<FieldIndex>(index->value()));
16102 Object* JSDate::DoGetField(FieldIndex index) {
16103 DCHECK(index != kDateValue);
16105 DateCache* date_cache = GetIsolate()->date_cache();
16107 if (index < kFirstUncachedField) {
16108 Object* stamp = cache_stamp();
16109 if (stamp != date_cache->stamp() && stamp->IsSmi()) {
16110 // Since the stamp is not NaN, the value is also not NaN.
16111 int64_t local_time_ms =
16112 date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
16113 SetCachedFields(local_time_ms, date_cache);
16116 case kYear: return year();
16117 case kMonth: return month();
16118 case kDay: return day();
16119 case kWeekday: return weekday();
16120 case kHour: return hour();
16121 case kMinute: return min();
16122 case kSecond: return sec();
16123 default: UNREACHABLE();
16127 if (index >= kFirstUTCField) {
16128 return GetUTCField(index, value()->Number(), date_cache);
16131 double time = value()->Number();
16132 if (std::isnan(time)) return GetIsolate()->heap()->nan_value();
16134 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
16135 int days = DateCache::DaysFromTime(local_time_ms);
16137 if (index == kDays) return Smi::FromInt(days);
16139 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16140 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
16141 DCHECK(index == kTimeInDay);
16142 return Smi::FromInt(time_in_day_ms);
16146 Object* JSDate::GetUTCField(FieldIndex index,
16148 DateCache* date_cache) {
16149 DCHECK(index >= kFirstUTCField);
16151 if (std::isnan(value)) return GetIsolate()->heap()->nan_value();
16153 int64_t time_ms = static_cast<int64_t>(value);
16155 if (index == kTimezoneOffset) {
16156 return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
16159 int days = DateCache::DaysFromTime(time_ms);
16161 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
16163 if (index <= kDayUTC) {
16164 int year, month, day;
16165 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16166 if (index == kYearUTC) return Smi::FromInt(year);
16167 if (index == kMonthUTC) return Smi::FromInt(month);
16168 DCHECK(index == kDayUTC);
16169 return Smi::FromInt(day);
16172 int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
16174 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
16175 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
16176 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
16177 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
16178 case kDaysUTC: return Smi::FromInt(days);
16179 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
16180 default: UNREACHABLE();
16188 void JSDate::SetValue(Object* value, bool is_value_nan) {
16190 if (is_value_nan) {
16191 HeapNumber* nan = GetIsolate()->heap()->nan_value();
16192 set_cache_stamp(nan, SKIP_WRITE_BARRIER);
16193 set_year(nan, SKIP_WRITE_BARRIER);
16194 set_month(nan, SKIP_WRITE_BARRIER);
16195 set_day(nan, SKIP_WRITE_BARRIER);
16196 set_hour(nan, SKIP_WRITE_BARRIER);
16197 set_min(nan, SKIP_WRITE_BARRIER);
16198 set_sec(nan, SKIP_WRITE_BARRIER);
16199 set_weekday(nan, SKIP_WRITE_BARRIER);
16201 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
16206 void JSDate::SetCachedFields(int64_t local_time_ms, DateCache* date_cache) {
16207 int days = DateCache::DaysFromTime(local_time_ms);
16208 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16209 int year, month, day;
16210 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16211 int weekday = date_cache->Weekday(days);
16212 int hour = time_in_day_ms / (60 * 60 * 1000);
16213 int min = (time_in_day_ms / (60 * 1000)) % 60;
16214 int sec = (time_in_day_ms / 1000) % 60;
16215 set_cache_stamp(date_cache->stamp());
16216 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
16217 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
16218 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
16219 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
16220 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
16221 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
16222 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
16226 void JSArrayBuffer::Neuter() {
16227 DCHECK(is_external());
16228 set_backing_store(NULL);
16229 set_byte_length(Smi::FromInt(0));
16233 void JSArrayBufferView::NeuterView() {
16234 set_byte_offset(Smi::FromInt(0));
16235 set_byte_length(Smi::FromInt(0));
16239 void JSDataView::Neuter() {
16244 void JSTypedArray::Neuter() {
16246 set_length(Smi::FromInt(0));
16247 set_elements(GetHeap()->EmptyExternalArrayForMap(map()));
16251 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) {
16252 switch (elements_kind) {
16253 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
16254 case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS;
16256 TYPED_ARRAYS(TYPED_ARRAY_CASE)
16257 #undef TYPED_ARRAY_CASE
16261 return FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND;
16266 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
16267 Handle<JSTypedArray> typed_array) {
16269 Handle<Map> map(typed_array->map());
16270 Isolate* isolate = typed_array->GetIsolate();
16272 DCHECK(IsFixedTypedArrayElementsKind(map->elements_kind()));
16274 Handle<Map> new_map = Map::TransitionElementsTo(
16276 FixedToExternalElementsKind(map->elements_kind()));
16278 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
16279 Handle<FixedTypedArrayBase> fixed_typed_array(
16280 FixedTypedArrayBase::cast(typed_array->elements()));
16281 Runtime::SetupArrayBufferAllocatingData(isolate, buffer,
16282 fixed_typed_array->DataSize(), false);
16283 memcpy(buffer->backing_store(),
16284 fixed_typed_array->DataPtr(),
16285 fixed_typed_array->DataSize());
16286 Handle<ExternalArray> new_elements =
16287 isolate->factory()->NewExternalArray(
16288 fixed_typed_array->length(), typed_array->type(),
16289 static_cast<uint8_t*>(buffer->backing_store()));
16291 buffer->set_weak_first_view(*typed_array);
16292 DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
16293 typed_array->set_buffer(*buffer);
16294 JSObject::SetMapAndElements(typed_array, new_map, new_elements);
16300 Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
16301 Handle<Object> result(buffer(), GetIsolate());
16302 if (*result != Smi::FromInt(0)) {
16303 DCHECK(IsExternalArrayElementsKind(map()->elements_kind()));
16304 return Handle<JSArrayBuffer>::cast(result);
16306 Handle<JSTypedArray> self(this);
16307 return MaterializeArrayBuffer(self);
16311 HeapType* PropertyCell::type() {
16312 return static_cast<HeapType*>(type_raw());
16316 void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) {
16317 DCHECK(IsPropertyCell());
16318 set_type_raw(type, ignored);
16322 Handle<HeapType> PropertyCell::UpdatedType(Handle<PropertyCell> cell,
16323 Handle<Object> value) {
16324 Isolate* isolate = cell->GetIsolate();
16325 Handle<HeapType> old_type(cell->type(), isolate);
16326 Handle<HeapType> new_type = HeapType::Constant(value, isolate);
16328 if (new_type->Is(old_type)) return old_type;
16330 cell->dependent_code()->DeoptimizeDependentCodeGroup(
16331 isolate, DependentCode::kPropertyCellChangedGroup);
16333 if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) {
16337 return HeapType::Any(isolate);
16341 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell,
16342 Handle<Object> value) {
16343 cell->set_value(*value);
16344 if (!HeapType::Any()->Is(cell->type())) {
16345 Handle<HeapType> new_type = UpdatedType(cell, value);
16346 cell->set_type(*new_type);
16352 void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell,
16353 CompilationInfo* info) {
16354 Handle<DependentCode> codes =
16355 DependentCode::Insert(handle(cell->dependent_code(), info->isolate()),
16356 DependentCode::kPropertyCellChangedGroup,
16357 info->object_wrapper());
16358 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes);
16359 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
16360 cell, info->zone());
16363 } } // namespace v8::internal