1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/accessors.h"
10 #include "src/allocation-site-scopes.h"
12 #include "src/arguments.h"
13 #include "src/base/bits.h"
14 #include "src/bootstrapper.h"
15 #include "src/code-stubs.h"
16 #include "src/codegen.h"
17 #include "src/cpu-profiler.h"
19 #include "src/debug.h"
20 #include "src/deoptimizer.h"
21 #include "src/elements.h"
22 #include "src/execution.h"
23 #include "src/field-index-inl.h"
24 #include "src/field-index.h"
25 #include "src/full-codegen.h"
26 #include "src/heap/mark-compact.h"
27 #include "src/heap/objects-visiting-inl.h"
28 #include "src/hydrogen.h"
29 #include "src/ic/ic.h"
30 #include "src/isolate-inl.h"
32 #include "src/lookup.h"
33 #include "src/macro-assembler.h"
34 #include "src/objects-inl.h"
35 #include "src/prototype.h"
36 #include "src/safepoint-table.h"
37 #include "src/string-search.h"
38 #include "src/string-stream.h"
39 #include "src/utils.h"
41 #ifdef ENABLE_DISASSEMBLER
42 #include "src/disasm.h"
43 #include "src/disassembler.h"
49 Handle<HeapType> Object::OptimalType(Isolate* isolate,
50 Representation representation) {
51 if (representation.IsNone()) return HeapType::None(isolate);
52 if (FLAG_track_field_types) {
53 if (representation.IsHeapObject() && IsHeapObject()) {
54 // We can track only JavaScript objects with stable maps.
55 Handle<Map> map(HeapObject::cast(this)->map(), isolate);
56 if (map->is_stable() &&
57 map->instance_type() >= FIRST_NONCALLABLE_SPEC_OBJECT_TYPE &&
58 map->instance_type() <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE) {
59 return HeapType::Class(map, isolate);
63 return HeapType::Any(isolate);
67 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
68 Handle<Object> object,
69 Handle<Context> native_context) {
70 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
71 Handle<JSFunction> constructor;
72 if (object->IsNumber()) {
73 constructor = handle(native_context->number_function(), isolate);
74 } else if (object->IsBoolean()) {
75 constructor = handle(native_context->boolean_function(), isolate);
76 } else if (object->IsString()) {
77 constructor = handle(native_context->string_function(), isolate);
78 } else if (object->IsSymbol()) {
79 constructor = handle(native_context->symbol_function(), isolate);
81 return MaybeHandle<JSReceiver>();
83 Handle<JSObject> result = isolate->factory()->NewJSObject(constructor);
84 Handle<JSValue>::cast(result)->set_value(*object);
89 bool Object::BooleanValue() {
90 if (IsBoolean()) return IsTrue();
91 if (IsSmi()) return Smi::cast(this)->value() != 0;
92 if (IsUndefined() || IsNull()) return false;
93 if (IsUndetectableObject()) return false; // Undetectable object is false.
94 if (IsString()) return String::cast(this)->length() != 0;
95 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue();
100 bool Object::IsCallable() const {
101 const Object* fun = this;
102 while (fun->IsJSFunctionProxy()) {
103 fun = JSFunctionProxy::cast(fun)->call_trap();
105 return fun->IsJSFunction() ||
106 (fun->IsHeapObject() &&
107 HeapObject::cast(fun)->map()->has_instance_call_handler());
111 MaybeHandle<Object> Object::GetProperty(LookupIterator* it) {
112 for (; it->IsFound(); it->Next()) {
113 switch (it->state()) {
114 case LookupIterator::NOT_FOUND:
115 case LookupIterator::TRANSITION:
117 case LookupIterator::JSPROXY:
118 return JSProxy::GetPropertyWithHandler(it->GetHolder<JSProxy>(),
119 it->GetReceiver(), it->name());
120 case LookupIterator::INTERCEPTOR: {
121 MaybeHandle<Object> maybe_result = JSObject::GetPropertyWithInterceptor(
122 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
123 if (!maybe_result.is_null()) return maybe_result;
124 if (it->isolate()->has_pending_exception()) return maybe_result;
127 case LookupIterator::ACCESS_CHECK:
128 if (it->HasAccess(v8::ACCESS_GET)) break;
129 return JSObject::GetPropertyWithFailedAccessCheck(it);
130 case LookupIterator::ACCESSOR:
131 return GetPropertyWithAccessor(it->GetReceiver(), it->name(),
132 it->GetHolder<JSObject>(),
134 case LookupIterator::DATA:
135 return it->GetDataValue();
138 return it->factory()->undefined_value();
142 Handle<Object> JSObject::GetDataProperty(Handle<JSObject> object,
144 LookupIterator it(object, key,
145 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
146 return GetDataProperty(&it);
150 Handle<Object> JSObject::GetDataProperty(LookupIterator* it) {
151 for (; it->IsFound(); it->Next()) {
152 switch (it->state()) {
153 case LookupIterator::INTERCEPTOR:
154 case LookupIterator::NOT_FOUND:
155 case LookupIterator::TRANSITION:
157 case LookupIterator::ACCESS_CHECK:
158 if (it->HasAccess(v8::ACCESS_GET)) continue;
160 case LookupIterator::JSPROXY:
162 return it->isolate()->factory()->undefined_value();
163 case LookupIterator::ACCESSOR:
164 // TODO(verwaest): For now this doesn't call into
165 // ExecutableAccessorInfo, since clients don't need it. Update once
168 return it->isolate()->factory()->undefined_value();
169 case LookupIterator::DATA:
170 return it->GetDataValue();
173 return it->isolate()->factory()->undefined_value();
177 bool Object::ToInt32(int32_t* value) {
179 *value = Smi::cast(this)->value();
182 if (IsHeapNumber()) {
183 double num = HeapNumber::cast(this)->value();
184 if (FastI2D(FastD2I(num)) == num) {
185 *value = FastD2I(num);
193 bool Object::ToUint32(uint32_t* value) {
195 int num = Smi::cast(this)->value();
197 *value = static_cast<uint32_t>(num);
201 if (IsHeapNumber()) {
202 double num = HeapNumber::cast(this)->value();
203 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) {
204 *value = FastD2UI(num);
212 bool FunctionTemplateInfo::IsTemplateFor(Object* object) {
213 if (!object->IsHeapObject()) return false;
214 return IsTemplateFor(HeapObject::cast(object)->map());
218 bool FunctionTemplateInfo::IsTemplateFor(Map* map) {
219 // There is a constraint on the object; check.
220 if (!map->IsJSObjectMap()) return false;
221 // Fetch the constructor function of the object.
222 Object* cons_obj = map->constructor();
223 if (!cons_obj->IsJSFunction()) return false;
224 JSFunction* fun = JSFunction::cast(cons_obj);
225 // Iterate through the chain of inheriting function templates to
226 // see if the required one occurs.
227 for (Object* type = fun->shared()->function_data();
228 type->IsFunctionTemplateInfo();
229 type = FunctionTemplateInfo::cast(type)->parent_template()) {
230 if (type == this) return true;
232 // Didn't find the required type in the inheritance chain.
237 template<typename To>
238 static inline To* CheckedCast(void *from) {
239 uintptr_t temp = reinterpret_cast<uintptr_t>(from);
240 DCHECK(temp % sizeof(To) == 0);
241 return reinterpret_cast<To*>(temp);
245 static Handle<Object> PerformCompare(const BitmaskCompareDescriptor& descriptor,
248 uint32_t bitmask = descriptor.bitmask;
249 uint32_t compare_value = descriptor.compare_value;
251 switch (descriptor.size) {
253 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr));
254 compare_value &= 0xff;
258 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr));
259 compare_value &= 0xffff;
263 value = *CheckedCast<uint32_t>(ptr);
267 return isolate->factory()->undefined_value();
269 return isolate->factory()->ToBoolean(
270 (bitmask & value) == (bitmask & compare_value));
274 static Handle<Object> PerformCompare(const PointerCompareDescriptor& descriptor,
277 uintptr_t compare_value =
278 reinterpret_cast<uintptr_t>(descriptor.compare_value);
279 uintptr_t value = *CheckedCast<uintptr_t>(ptr);
280 return isolate->factory()->ToBoolean(compare_value == value);
284 static Handle<Object> GetPrimitiveValue(
285 const PrimitiveValueDescriptor& descriptor,
288 int32_t int32_value = 0;
289 switch (descriptor.data_type) {
290 case kDescriptorInt8Type:
291 int32_value = *CheckedCast<int8_t>(ptr);
293 case kDescriptorUint8Type:
294 int32_value = *CheckedCast<uint8_t>(ptr);
296 case kDescriptorInt16Type:
297 int32_value = *CheckedCast<int16_t>(ptr);
299 case kDescriptorUint16Type:
300 int32_value = *CheckedCast<uint16_t>(ptr);
302 case kDescriptorInt32Type:
303 int32_value = *CheckedCast<int32_t>(ptr);
305 case kDescriptorUint32Type: {
306 uint32_t value = *CheckedCast<uint32_t>(ptr);
307 AllowHeapAllocation allow_gc;
308 return isolate->factory()->NewNumberFromUint(value);
310 case kDescriptorBoolType: {
311 uint8_t byte = *CheckedCast<uint8_t>(ptr);
312 return isolate->factory()->ToBoolean(
313 byte & (0x1 << descriptor.bool_offset));
315 case kDescriptorFloatType: {
316 float value = *CheckedCast<float>(ptr);
317 AllowHeapAllocation allow_gc;
318 return isolate->factory()->NewNumber(value);
320 case kDescriptorDoubleType: {
321 double value = *CheckedCast<double>(ptr);
322 AllowHeapAllocation allow_gc;
323 return isolate->factory()->NewNumber(value);
326 AllowHeapAllocation allow_gc;
327 return isolate->factory()->NewNumberFromInt(int32_value);
331 static Handle<Object> GetDeclaredAccessorProperty(
332 Handle<Object> receiver,
333 Handle<DeclaredAccessorInfo> info,
335 DisallowHeapAllocation no_gc;
336 char* current = reinterpret_cast<char*>(*receiver);
337 DeclaredAccessorDescriptorIterator iterator(info->descriptor());
339 const DeclaredAccessorDescriptorData* data = iterator.Next();
340 switch (data->type) {
341 case kDescriptorReturnObject: {
342 DCHECK(iterator.Complete());
343 current = *CheckedCast<char*>(current);
344 return handle(*CheckedCast<Object*>(current), isolate);
346 case kDescriptorPointerDereference:
347 DCHECK(!iterator.Complete());
348 current = *reinterpret_cast<char**>(current);
350 case kDescriptorPointerShift:
351 DCHECK(!iterator.Complete());
352 current += data->pointer_shift_descriptor.byte_offset;
354 case kDescriptorObjectDereference: {
355 DCHECK(!iterator.Complete());
356 Object* object = CheckedCast<Object>(current);
357 int field = data->object_dereference_descriptor.internal_field;
358 Object* smi = JSObject::cast(object)->GetInternalField(field);
359 DCHECK(smi->IsSmi());
360 current = reinterpret_cast<char*>(smi);
363 case kDescriptorBitmaskCompare:
364 DCHECK(iterator.Complete());
365 return PerformCompare(data->bitmask_compare_descriptor,
368 case kDescriptorPointerCompare:
369 DCHECK(iterator.Complete());
370 return PerformCompare(data->pointer_compare_descriptor,
373 case kDescriptorPrimitiveValue:
374 DCHECK(iterator.Complete());
375 return GetPrimitiveValue(data->primitive_value_descriptor,
381 return isolate->factory()->undefined_value();
385 Handle<FixedArray> JSObject::EnsureWritableFastElements(
386 Handle<JSObject> object) {
387 DCHECK(object->HasFastSmiOrObjectElements());
388 Isolate* isolate = object->GetIsolate();
389 Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate);
390 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
391 Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap(
392 elems, isolate->factory()->fixed_array_map());
393 object->set_elements(*writable_elems);
394 isolate->counters()->cow_arrays_converted()->Increment();
395 return writable_elems;
399 MaybeHandle<Object> JSProxy::GetPropertyWithHandler(Handle<JSProxy> proxy,
400 Handle<Object> receiver,
402 Isolate* isolate = proxy->GetIsolate();
404 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
405 if (name->IsSymbol()) return isolate->factory()->undefined_value();
407 Handle<Object> args[] = { receiver, name };
409 proxy, "get", isolate->derived_get_trap(), arraysize(args), args);
413 MaybeHandle<Object> Object::GetPropertyWithAccessor(Handle<Object> receiver,
415 Handle<JSObject> holder,
416 Handle<Object> structure) {
417 Isolate* isolate = name->GetIsolate();
418 DCHECK(!structure->IsForeign());
419 // api style callbacks.
420 if (structure->IsAccessorInfo()) {
421 Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(structure);
422 if (!info->IsCompatibleReceiver(*receiver)) {
423 Handle<Object> args[2] = { name, receiver };
424 THROW_NEW_ERROR(isolate,
425 NewTypeError("incompatible_method_receiver",
426 HandleVector(args, arraysize(args))),
429 if (structure->IsDeclaredAccessorInfo()) {
430 return GetDeclaredAccessorProperty(
432 Handle<DeclaredAccessorInfo>::cast(structure),
436 Handle<ExecutableAccessorInfo> data =
437 Handle<ExecutableAccessorInfo>::cast(structure);
438 v8::AccessorNameGetterCallback call_fun =
439 v8::ToCData<v8::AccessorNameGetterCallback>(data->getter());
440 if (call_fun == NULL) return isolate->factory()->undefined_value();
442 LOG(isolate, ApiNamedPropertyAccess("load", *holder, *name));
443 PropertyCallbackArguments args(isolate, data->data(), *receiver, *holder);
444 v8::Handle<v8::Value> result =
445 args.Call(call_fun, v8::Utils::ToLocal(name));
446 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
447 if (result.IsEmpty()) {
448 return isolate->factory()->undefined_value();
450 Handle<Object> return_value = v8::Utils::OpenHandle(*result);
451 return_value->VerifyApiCallResultType();
452 // Rebox handle before return.
453 return handle(*return_value, isolate);
456 // __defineGetter__ callback
457 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
459 if (getter->IsSpecFunction()) {
460 // TODO(rossberg): nicer would be to cast to some JSCallable here...
461 return Object::GetPropertyWithDefinedGetter(
462 receiver, Handle<JSReceiver>::cast(getter));
464 // Getter is not a function.
465 return isolate->factory()->undefined_value();
469 bool AccessorInfo::IsCompatibleReceiverType(Isolate* isolate,
470 Handle<AccessorInfo> info,
471 Handle<HeapType> type) {
472 if (!info->HasExpectedReceiverType()) return true;
473 Handle<Map> map = IC::TypeToMap(*type, isolate);
474 if (!map->IsJSObjectMap()) return false;
475 return FunctionTemplateInfo::cast(info->expected_receiver_type())
476 ->IsTemplateFor(*map);
480 MaybeHandle<Object> Object::SetPropertyWithAccessor(
481 Handle<Object> receiver, Handle<Name> name, Handle<Object> value,
482 Handle<JSObject> holder, Handle<Object> structure, StrictMode strict_mode) {
483 Isolate* isolate = name->GetIsolate();
485 // We should never get here to initialize a const with the hole
486 // value since a const declaration would conflict with the setter.
487 DCHECK(!structure->IsForeign());
488 if (structure->IsExecutableAccessorInfo()) {
489 // Don't call executable accessor setters with non-JSObject receivers.
490 if (!receiver->IsJSObject()) return value;
491 // api style callbacks
492 ExecutableAccessorInfo* info = ExecutableAccessorInfo::cast(*structure);
493 if (!info->IsCompatibleReceiver(*receiver)) {
494 Handle<Object> args[2] = { name, receiver };
495 THROW_NEW_ERROR(isolate,
496 NewTypeError("incompatible_method_receiver",
497 HandleVector(args, arraysize(args))),
500 Object* call_obj = info->setter();
501 v8::AccessorNameSetterCallback call_fun =
502 v8::ToCData<v8::AccessorNameSetterCallback>(call_obj);
503 if (call_fun == NULL) return value;
504 LOG(isolate, ApiNamedPropertyAccess("store", *holder, *name));
505 PropertyCallbackArguments args(isolate, info->data(), *receiver, *holder);
507 v8::Utils::ToLocal(name),
508 v8::Utils::ToLocal(value));
509 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
513 if (structure->IsAccessorPair()) {
514 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
515 if (setter->IsSpecFunction()) {
516 // TODO(rossberg): nicer would be to cast to some JSCallable here...
517 return SetPropertyWithDefinedSetter(
518 receiver, Handle<JSReceiver>::cast(setter), value);
520 if (strict_mode == SLOPPY) return value;
521 Handle<Object> args[2] = { name, holder };
523 isolate, NewTypeError("no_setter_in_callback", HandleVector(args, 2)),
528 // TODO(dcarney): Handle correctly.
529 if (structure->IsDeclaredAccessorInfo()) {
534 return MaybeHandle<Object>();
538 MaybeHandle<Object> Object::GetPropertyWithDefinedGetter(
539 Handle<Object> receiver,
540 Handle<JSReceiver> getter) {
541 Isolate* isolate = getter->GetIsolate();
542 Debug* debug = isolate->debug();
543 // Handle stepping into a getter if step into is active.
544 // TODO(rossberg): should this apply to getters that are function proxies?
545 if (debug->StepInActive() && getter->IsJSFunction()) {
547 Handle<JSFunction>::cast(getter), Handle<Object>::null(), 0, false);
550 return Execution::Call(isolate, getter, receiver, 0, NULL, true);
554 MaybeHandle<Object> Object::SetPropertyWithDefinedSetter(
555 Handle<Object> receiver,
556 Handle<JSReceiver> setter,
557 Handle<Object> value) {
558 Isolate* isolate = setter->GetIsolate();
560 Debug* debug = isolate->debug();
561 // Handle stepping into a setter if step into is active.
562 // TODO(rossberg): should this apply to getters that are function proxies?
563 if (debug->StepInActive() && setter->IsJSFunction()) {
565 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false);
568 Handle<Object> argv[] = { value };
569 RETURN_ON_EXCEPTION(isolate, Execution::Call(isolate, setter, receiver,
570 arraysize(argv), argv, true),
576 static bool FindAllCanReadHolder(LookupIterator* it) {
577 for (; it->IsFound(); it->Next()) {
578 if (it->state() == LookupIterator::ACCESSOR) {
579 Handle<Object> accessors = it->GetAccessors();
580 if (accessors->IsAccessorInfo()) {
581 if (AccessorInfo::cast(*accessors)->all_can_read()) return true;
589 MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck(
590 LookupIterator* it) {
591 Handle<JSObject> checked = it->GetHolder<JSObject>();
592 if (FindAllCanReadHolder(it)) {
593 return GetPropertyWithAccessor(it->GetReceiver(), it->name(),
594 it->GetHolder<JSObject>(),
597 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_GET);
598 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
599 return it->factory()->undefined_value();
603 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithFailedAccessCheck(
604 LookupIterator* it) {
605 Handle<JSObject> checked = it->GetHolder<JSObject>();
606 if (FindAllCanReadHolder(it))
607 return maybe(it->property_details().attributes());
608 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_HAS);
609 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(it->isolate(),
610 Maybe<PropertyAttributes>());
611 return maybe(ABSENT);
615 static bool FindAllCanWriteHolder(LookupIterator* it) {
616 for (; it->IsFound(); it->Next()) {
617 if (it->state() == LookupIterator::ACCESSOR) {
618 Handle<Object> accessors = it->GetAccessors();
619 if (accessors->IsAccessorInfo()) {
620 if (AccessorInfo::cast(*accessors)->all_can_write()) return true;
628 MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck(
629 LookupIterator* it, Handle<Object> value, StrictMode strict_mode) {
630 Handle<JSObject> checked = it->GetHolder<JSObject>();
631 if (FindAllCanWriteHolder(it)) {
632 return SetPropertyWithAccessor(it->GetReceiver(), it->name(), value,
633 it->GetHolder<JSObject>(),
634 it->GetAccessors(), strict_mode);
637 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_SET);
638 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
643 void JSObject::SetNormalizedProperty(Handle<JSObject> object,
645 Handle<Object> value,
646 PropertyDetails details) {
647 DCHECK(!object->HasFastProperties());
648 Handle<NameDictionary> property_dictionary(object->property_dictionary());
650 if (!name->IsUniqueName()) {
651 name = object->GetIsolate()->factory()->InternalizeString(
652 Handle<String>::cast(name));
655 int entry = property_dictionary->FindEntry(name);
656 if (entry == NameDictionary::kNotFound) {
657 Handle<Object> store_value = value;
658 if (object->IsGlobalObject()) {
659 store_value = object->GetIsolate()->factory()->NewPropertyCell(value);
662 property_dictionary = NameDictionary::Add(
663 property_dictionary, name, store_value, details);
664 object->set_properties(*property_dictionary);
668 PropertyDetails original_details = property_dictionary->DetailsAt(entry);
669 int enumeration_index;
670 // Preserve the enumeration index unless the property was deleted.
671 if (original_details.IsDeleted()) {
672 enumeration_index = property_dictionary->NextEnumerationIndex();
673 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1);
675 enumeration_index = original_details.dictionary_index();
676 DCHECK(enumeration_index > 0);
679 details = PropertyDetails(
680 details.attributes(), details.type(), enumeration_index);
682 if (object->IsGlobalObject()) {
683 Handle<PropertyCell> cell(
684 PropertyCell::cast(property_dictionary->ValueAt(entry)));
685 PropertyCell::SetValueInferType(cell, value);
686 // Please note we have to update the property details.
687 property_dictionary->DetailsAtPut(entry, details);
689 property_dictionary->SetEntry(entry, name, value, details);
694 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object,
697 DCHECK(!object->HasFastProperties());
698 Isolate* isolate = object->GetIsolate();
699 Handle<NameDictionary> dictionary(object->property_dictionary());
700 int entry = dictionary->FindEntry(name);
701 if (entry != NameDictionary::kNotFound) {
702 // If we have a global object set the cell to the hole.
703 if (object->IsGlobalObject()) {
704 PropertyDetails details = dictionary->DetailsAt(entry);
705 if (!details.IsConfigurable()) {
706 if (mode != FORCE_DELETION) return isolate->factory()->false_value();
707 // When forced to delete global properties, we have to make a
708 // map change to invalidate any ICs that think they can load
709 // from the non-configurable cell without checking if it contains
711 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
712 DCHECK(new_map->is_dictionary_map());
713 JSObject::MigrateToMap(object, new_map);
715 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry)));
716 Handle<Object> value = isolate->factory()->the_hole_value();
717 PropertyCell::SetValueInferType(cell, value);
718 dictionary->DetailsAtPut(entry, details.AsDeleted());
720 Handle<Object> deleted(
721 NameDictionary::DeleteProperty(dictionary, entry, mode));
722 if (*deleted == isolate->heap()->true_value()) {
723 Handle<NameDictionary> new_properties =
724 NameDictionary::Shrink(dictionary, name);
725 object->set_properties(*new_properties);
730 return isolate->factory()->true_value();
734 MaybeHandle<Object> Object::GetElementWithReceiver(Isolate* isolate,
735 Handle<Object> object,
736 Handle<Object> receiver,
738 if (object->IsUndefined()) {
739 // TODO(verwaest): Why is this check here?
741 return isolate->factory()->undefined_value();
744 // Iterate up the prototype chain until an element is found or the null
745 // prototype is encountered.
746 for (PrototypeIterator iter(isolate, object,
747 object->IsJSProxy() || object->IsJSObject()
748 ? PrototypeIterator::START_AT_RECEIVER
749 : PrototypeIterator::START_AT_PROTOTYPE);
750 !iter.IsAtEnd(); iter.Advance()) {
751 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
752 return JSProxy::GetElementWithHandler(
753 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
757 // Inline the case for JSObjects. Doing so significantly improves the
758 // performance of fetching elements where checking the prototype chain is
760 Handle<JSObject> js_object =
761 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
763 // Check access rights if needed.
764 if (js_object->IsAccessCheckNeeded()) {
765 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) {
766 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET);
767 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
768 return isolate->factory()->undefined_value();
772 if (js_object->HasIndexedInterceptor()) {
773 return JSObject::GetElementWithInterceptor(js_object, receiver, index);
776 if (js_object->elements() != isolate->heap()->empty_fixed_array()) {
777 Handle<Object> result;
778 ASSIGN_RETURN_ON_EXCEPTION(
780 js_object->GetElementsAccessor()->Get(receiver, js_object, index),
782 if (!result->IsTheHole()) return result;
786 return isolate->factory()->undefined_value();
790 MaybeHandle<Object> Object::SetElementWithReceiver(
791 Isolate* isolate, Handle<Object> object, Handle<Object> receiver,
792 uint32_t index, Handle<Object> value, StrictMode strict_mode) {
793 // Iterate up the prototype chain until an element is found or the null
794 // prototype is encountered.
796 for (PrototypeIterator iter(isolate, object,
797 object->IsJSProxy() || object->IsJSObject()
798 ? PrototypeIterator::START_AT_RECEIVER
799 : PrototypeIterator::START_AT_PROTOTYPE);
800 !iter.IsAtEnd() && !done; iter.Advance()) {
801 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
802 // TODO(dslomov): implement.
803 isolate->ThrowIllegalOperation();
804 return MaybeHandle<Object>();
807 Handle<JSObject> js_object =
808 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
810 // Check access rights if needed.
811 if (js_object->IsAccessCheckNeeded()) {
812 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_SET)) {
813 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_SET);
814 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
815 return isolate->factory()->undefined_value();
819 if (js_object->HasIndexedInterceptor()) {
820 Maybe<PropertyAttributes> from_interceptor =
821 JSObject::GetElementAttributeFromInterceptor(js_object, receiver,
823 if (!from_interceptor.has_value) return MaybeHandle<Object>();
824 if ((from_interceptor.value & READ_ONLY) != 0) {
825 return WriteToReadOnlyElement(isolate, receiver, index, value,
828 done = from_interceptor.value != ABSENT;
832 js_object->elements() != isolate->heap()->empty_fixed_array()) {
833 ElementsAccessor* accessor = js_object->GetElementsAccessor();
834 PropertyAttributes attrs =
835 accessor->GetAttributes(receiver, js_object, index);
836 if ((attrs & READ_ONLY) != 0) {
837 return WriteToReadOnlyElement(isolate, receiver, index, value,
840 Handle<AccessorPair> accessor_pair;
841 if (accessor->GetAccessorPair(receiver, js_object, index)
842 .ToHandle(&accessor_pair)) {
843 return JSObject::SetElementWithCallback(receiver, accessor_pair, index,
844 value, js_object, strict_mode);
846 done = attrs != ABSENT;
851 if (!receiver->IsJSObject()) {
852 return WriteToReadOnlyElement(isolate, receiver, index, value, strict_mode);
854 Handle<JSObject> target = Handle<JSObject>::cast(receiver);
855 ElementsAccessor* accessor = target->GetElementsAccessor();
856 PropertyAttributes attrs = accessor->GetAttributes(receiver, target, index);
857 if ((attrs & READ_ONLY) != 0) {
858 return WriteToReadOnlyElement(isolate, receiver, index, value, strict_mode);
860 PropertyAttributes new_attrs = attrs != ABSENT ? attrs : NONE;
861 return JSObject::SetElement(target, index, value, new_attrs, strict_mode,
866 Map* Object::GetRootMap(Isolate* isolate) {
867 DisallowHeapAllocation no_alloc;
869 Context* context = isolate->context()->native_context();
870 return context->number_function()->initial_map();
873 HeapObject* heap_object = HeapObject::cast(this);
875 // The object is either a number, a string, a boolean,
876 // a real JS object, or a Harmony proxy.
877 if (heap_object->IsJSReceiver()) {
878 return heap_object->map();
880 Context* context = isolate->context()->native_context();
882 if (heap_object->IsHeapNumber()) {
883 return context->number_function()->initial_map();
885 if (heap_object->IsString()) {
886 return context->string_function()->initial_map();
888 if (heap_object->IsSymbol()) {
889 return context->symbol_function()->initial_map();
891 if (heap_object->IsBoolean()) {
892 return context->boolean_function()->initial_map();
894 return isolate->heap()->null_value()->map();
898 Object* Object::GetHash() {
899 // The object is either a number, a name, an odd-ball,
900 // a real JS object, or a Harmony proxy.
902 uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
903 return Smi::FromInt(hash & Smi::kMaxValue);
906 uint32_t hash = Name::cast(this)->Hash();
907 return Smi::FromInt(hash);
910 uint32_t hash = Oddball::cast(this)->to_string()->Hash();
911 return Smi::FromInt(hash);
914 DCHECK(IsJSReceiver());
915 return JSReceiver::cast(this)->GetIdentityHash();
919 Handle<Smi> Object::GetOrCreateHash(Isolate* isolate, Handle<Object> object) {
920 Handle<Object> hash(object->GetHash(), isolate);
921 if (hash->IsSmi()) return Handle<Smi>::cast(hash);
923 DCHECK(object->IsJSReceiver());
924 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object));
928 bool Object::SameValue(Object* other) {
929 if (other == this) return true;
931 // The object is either a number, a name, an odd-ball,
932 // a real JS object, or a Harmony proxy.
933 if (IsNumber() && other->IsNumber()) {
934 double this_value = Number();
935 double other_value = other->Number();
936 bool equal = this_value == other_value;
937 // SameValue(NaN, NaN) is true.
938 if (!equal) return std::isnan(this_value) && std::isnan(other_value);
939 // SameValue(0.0, -0.0) is false.
940 return (this_value != 0) || ((1 / this_value) == (1 / other_value));
942 if (IsString() && other->IsString()) {
943 return String::cast(this)->Equals(String::cast(other));
949 bool Object::SameValueZero(Object* other) {
950 if (other == this) return true;
952 // The object is either a number, a name, an odd-ball,
953 // a real JS object, or a Harmony proxy.
954 if (IsNumber() && other->IsNumber()) {
955 double this_value = Number();
956 double other_value = other->Number();
958 return this_value == other_value
959 || (std::isnan(this_value) && std::isnan(other_value));
961 if (IsString() && other->IsString()) {
962 return String::cast(this)->Equals(String::cast(other));
968 void Object::ShortPrint(FILE* out) {
974 void Object::ShortPrint(StringStream* accumulator) {
975 std::ostringstream os;
977 accumulator->Add(os.str().c_str());
981 std::ostream& operator<<(std::ostream& os, const Brief& v) {
982 if (v.value->IsSmi()) {
983 Smi::cast(v.value)->SmiPrint(os);
985 // TODO(svenpanne) Const-correct HeapObjectShortPrint!
986 HeapObject* obj = const_cast<HeapObject*>(HeapObject::cast(v.value));
987 obj->HeapObjectShortPrint(os);
993 void Smi::SmiPrint(std::ostream& os) const { // NOLINT
998 // Should a word be prefixed by 'a' or 'an' in order to read naturally in
999 // English? Returns false for non-ASCII or words that don't start with
1000 // a capital letter. The a/an rule follows pronunciation in English.
1001 // We don't use the BBC's overcorrect "an historic occasion" though if
1002 // you speak a dialect you may well say "an 'istoric occasion".
1003 static bool AnWord(String* str) {
1004 if (str->length() == 0) return false; // A nothing.
1005 int c0 = str->Get(0);
1006 int c1 = str->length() > 1 ? str->Get(1) : 0;
1009 return true; // An Umpire, but a UTF8String, a U.
1011 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') {
1012 return true; // An Ape, an ABCBook.
1013 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) &&
1014 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' ||
1015 c0 == 'S' || c0 == 'X')) {
1016 return true; // An MP3File, an M.
1022 Handle<String> String::SlowFlatten(Handle<ConsString> cons,
1023 PretenureFlag pretenure) {
1024 DCHECK(AllowHeapAllocation::IsAllowed());
1025 DCHECK(cons->second()->length() != 0);
1026 Isolate* isolate = cons->GetIsolate();
1027 int length = cons->length();
1028 PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure
1030 Handle<SeqString> result;
1031 if (cons->IsOneByteRepresentation()) {
1032 Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString(
1033 length, tenure).ToHandleChecked();
1034 DisallowHeapAllocation no_gc;
1035 WriteToFlat(*cons, flat->GetChars(), 0, length);
1038 Handle<SeqTwoByteString> flat = isolate->factory()->NewRawTwoByteString(
1039 length, tenure).ToHandleChecked();
1040 DisallowHeapAllocation no_gc;
1041 WriteToFlat(*cons, flat->GetChars(), 0, length);
1044 cons->set_first(*result);
1045 cons->set_second(isolate->heap()->empty_string());
1046 DCHECK(result->IsFlat());
1052 bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
1053 // Externalizing twice leaks the external resource, so it's
1054 // prohibited by the API.
1055 DCHECK(!this->IsExternalString());
1056 #ifdef ENABLE_SLOW_DCHECKS
1057 if (FLAG_enable_slow_asserts) {
1058 // Assert that the resource and the string are equivalent.
1059 DCHECK(static_cast<size_t>(this->length()) == resource->length());
1060 ScopedVector<uc16> smart_chars(this->length());
1061 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1062 DCHECK(memcmp(smart_chars.start(),
1064 resource->length() * sizeof(smart_chars[0])) == 0);
1067 int size = this->Size(); // Byte size of the original string.
1068 // Abort if size does not allow in-place conversion.
1069 if (size < ExternalString::kShortSize) return false;
1070 Heap* heap = GetHeap();
1071 bool is_one_byte = this->IsOneByteRepresentation();
1072 bool is_internalized = this->IsInternalizedString();
1074 // Morph the string to an external string by replacing the map and
1075 // reinitializing the fields. This won't work if the space the existing
1076 // string occupies is too small for a regular external string.
1077 // Instead, we resort to a short external string instead, omitting
1078 // the field caching the address of the backing store. When we encounter
1079 // short external strings in generated code, we need to bailout to runtime.
1081 if (size < ExternalString::kSize) {
1082 new_map = is_internalized
1084 ? heap->short_external_internalized_string_with_one_byte_data_map()
1085 : heap->short_external_internalized_string_map())
1086 : (is_one_byte ? heap->short_external_string_with_one_byte_data_map()
1087 : heap->short_external_string_map());
1089 new_map = is_internalized
1091 ? heap->external_internalized_string_with_one_byte_data_map()
1092 : heap->external_internalized_string_map())
1093 : (is_one_byte ? heap->external_string_with_one_byte_data_map()
1094 : heap->external_string_map());
1097 // Byte size of the external String object.
1098 int new_size = this->SizeFromMap(new_map);
1099 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1101 // We are storing the new map using release store after creating a filler for
1102 // the left-over space to avoid races with the sweeper thread.
1103 this->synchronized_set_map(new_map);
1105 ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
1106 self->set_resource(resource);
1107 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1109 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1114 bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
1115 // Externalizing twice leaks the external resource, so it's
1116 // prohibited by the API.
1117 DCHECK(!this->IsExternalString());
1118 #ifdef ENABLE_SLOW_DCHECKS
1119 if (FLAG_enable_slow_asserts) {
1120 // Assert that the resource and the string are equivalent.
1121 DCHECK(static_cast<size_t>(this->length()) == resource->length());
1122 if (this->IsTwoByteRepresentation()) {
1123 ScopedVector<uint16_t> smart_chars(this->length());
1124 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1125 DCHECK(String::IsOneByte(smart_chars.start(), this->length()));
1127 ScopedVector<char> smart_chars(this->length());
1128 String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1129 DCHECK(memcmp(smart_chars.start(),
1131 resource->length() * sizeof(smart_chars[0])) == 0);
1134 int size = this->Size(); // Byte size of the original string.
1135 // Abort if size does not allow in-place conversion.
1136 if (size < ExternalString::kShortSize) return false;
1137 Heap* heap = GetHeap();
1138 bool is_internalized = this->IsInternalizedString();
1140 // Morph the string to an external string by replacing the map and
1141 // reinitializing the fields. This won't work if the space the existing
1142 // string occupies is too small for a regular external string.
1143 // Instead, we resort to a short external string instead, omitting
1144 // the field caching the address of the backing store. When we encounter
1145 // short external strings in generated code, we need to bailout to runtime.
1147 if (size < ExternalString::kSize) {
1148 new_map = is_internalized
1149 ? heap->short_external_one_byte_internalized_string_map()
1150 : heap->short_external_one_byte_string_map();
1152 new_map = is_internalized
1153 ? heap->external_one_byte_internalized_string_map()
1154 : heap->external_one_byte_string_map();
1157 // Byte size of the external String object.
1158 int new_size = this->SizeFromMap(new_map);
1159 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1161 // We are storing the new map using release store after creating a filler for
1162 // the left-over space to avoid races with the sweeper thread.
1163 this->synchronized_set_map(new_map);
1165 ExternalOneByteString* self = ExternalOneByteString::cast(this);
1166 self->set_resource(resource);
1167 if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1169 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1174 void String::StringShortPrint(StringStream* accumulator) {
1176 if (len > kMaxShortPrintLength) {
1177 accumulator->Add("<Very long string[%u]>", len);
1181 if (!LooksValid()) {
1182 accumulator->Add("<Invalid String>");
1186 StringCharacterStream stream(this);
1188 bool truncated = false;
1189 if (len > kMaxShortPrintLength) {
1190 len = kMaxShortPrintLength;
1193 bool one_byte = true;
1194 for (int i = 0; i < len; i++) {
1195 uint16_t c = stream.GetNext();
1197 if (c < 32 || c >= 127) {
1203 accumulator->Add("<String[%u]: ", length());
1204 for (int i = 0; i < len; i++) {
1205 accumulator->Put(static_cast<char>(stream.GetNext()));
1207 accumulator->Put('>');
1209 // Backslash indicates that the string contains control
1210 // characters and that backslashes are therefore escaped.
1211 accumulator->Add("<String[%u]\\: ", length());
1212 for (int i = 0; i < len; i++) {
1213 uint16_t c = stream.GetNext();
1215 accumulator->Add("\\n");
1216 } else if (c == '\r') {
1217 accumulator->Add("\\r");
1218 } else if (c == '\\') {
1219 accumulator->Add("\\\\");
1220 } else if (c < 32 || c > 126) {
1221 accumulator->Add("\\x%02x", c);
1223 accumulator->Put(static_cast<char>(c));
1227 accumulator->Put('.');
1228 accumulator->Put('.');
1229 accumulator->Put('.');
1231 accumulator->Put('>');
1237 void String::PrintUC16(std::ostream& os, int start, int end) { // NOLINT
1238 if (end < 0) end = length();
1239 StringCharacterStream stream(this, start);
1240 for (int i = start; i < end && stream.HasMore(); i++) {
1241 os << AsUC16(stream.GetNext());
1246 void JSObject::JSObjectShortPrint(StringStream* accumulator) {
1247 switch (map()->instance_type()) {
1248 case JS_ARRAY_TYPE: {
1249 double length = JSArray::cast(this)->length()->IsUndefined()
1251 : JSArray::cast(this)->length()->Number();
1252 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
1255 case JS_WEAK_MAP_TYPE: {
1256 accumulator->Add("<JS WeakMap>");
1259 case JS_WEAK_SET_TYPE: {
1260 accumulator->Add("<JS WeakSet>");
1263 case JS_REGEXP_TYPE: {
1264 accumulator->Add("<JS RegExp>");
1267 case JS_FUNCTION_TYPE: {
1268 JSFunction* function = JSFunction::cast(this);
1269 Object* fun_name = function->shared()->DebugName();
1270 bool printed = false;
1271 if (fun_name->IsString()) {
1272 String* str = String::cast(fun_name);
1273 if (str->length() > 0) {
1274 accumulator->Add("<JS Function ");
1275 accumulator->Put(str);
1280 accumulator->Add("<JS Function");
1282 accumulator->Add(" (SharedFunctionInfo %p)",
1283 reinterpret_cast<void*>(function->shared()));
1284 accumulator->Put('>');
1287 case JS_GENERATOR_OBJECT_TYPE: {
1288 accumulator->Add("<JS Generator>");
1291 case JS_MODULE_TYPE: {
1292 accumulator->Add("<JS Module>");
1295 // All other JSObjects are rather similar to each other (JSObject,
1296 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
1298 Map* map_of_this = map();
1299 Heap* heap = GetHeap();
1300 Object* constructor = map_of_this->constructor();
1301 bool printed = false;
1302 if (constructor->IsHeapObject() &&
1303 !heap->Contains(HeapObject::cast(constructor))) {
1304 accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
1306 bool global_object = IsJSGlobalProxy();
1307 if (constructor->IsJSFunction()) {
1308 if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
1309 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
1311 Object* constructor_name =
1312 JSFunction::cast(constructor)->shared()->name();
1313 if (constructor_name->IsString()) {
1314 String* str = String::cast(constructor_name);
1315 if (str->length() > 0) {
1316 bool vowel = AnWord(str);
1317 accumulator->Add("<%sa%s ",
1318 global_object ? "Global Object: " : "",
1320 accumulator->Put(str);
1321 accumulator->Add(" with %smap %p",
1322 map_of_this->is_deprecated() ? "deprecated " : "",
1330 accumulator->Add("<JS %sObject", global_object ? "Global " : "");
1334 accumulator->Add(" value = ");
1335 JSValue::cast(this)->value()->ShortPrint(accumulator);
1337 accumulator->Put('>');
1344 void JSObject::PrintElementsTransition(
1345 FILE* file, Handle<JSObject> object,
1346 ElementsKind from_kind, Handle<FixedArrayBase> from_elements,
1347 ElementsKind to_kind, Handle<FixedArrayBase> to_elements) {
1348 if (from_kind != to_kind) {
1350 os << "elements transition [" << ElementsKindToString(from_kind) << " -> "
1351 << ElementsKindToString(to_kind) << "] in ";
1352 JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true);
1353 PrintF(file, " for ");
1354 object->ShortPrint(file);
1355 PrintF(file, " from ");
1356 from_elements->ShortPrint(file);
1357 PrintF(file, " to ");
1358 to_elements->ShortPrint(file);
1364 void Map::PrintGeneralization(FILE* file,
1369 bool constant_to_field,
1370 Representation old_representation,
1371 Representation new_representation,
1372 HeapType* old_field_type,
1373 HeapType* new_field_type) {
1375 os << "[generalizing ";
1376 constructor_name()->PrintOn(file);
1378 Name* name = instance_descriptors()->GetKey(modify_index);
1379 if (name->IsString()) {
1380 String::cast(name)->PrintOn(file);
1382 os << "{symbol " << static_cast<void*>(name) << "}";
1385 if (constant_to_field) {
1388 os << old_representation.Mnemonic() << "{";
1389 old_field_type->PrintTo(os, HeapType::SEMANTIC_DIM);
1392 os << "->" << new_representation.Mnemonic() << "{";
1393 new_field_type->PrintTo(os, HeapType::SEMANTIC_DIM);
1395 if (strlen(reason) > 0) {
1398 os << "+" << (descriptors - split) << " maps";
1401 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true);
1406 void JSObject::PrintInstanceMigration(FILE* file,
1409 PrintF(file, "[migrating ");
1410 map()->constructor_name()->PrintOn(file);
1412 DescriptorArray* o = original_map->instance_descriptors();
1413 DescriptorArray* n = new_map->instance_descriptors();
1414 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) {
1415 Representation o_r = o->GetDetails(i).representation();
1416 Representation n_r = n->GetDetails(i).representation();
1417 if (!o_r.Equals(n_r)) {
1418 String::cast(o->GetKey(i))->PrintOn(file);
1419 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic());
1420 } else if (o->GetDetails(i).type() == CONSTANT &&
1421 n->GetDetails(i).type() == FIELD) {
1422 Name* name = o->GetKey(i);
1423 if (name->IsString()) {
1424 String::cast(name)->PrintOn(file);
1426 PrintF(file, "{symbol %p}", static_cast<void*>(name));
1435 void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
1436 Heap* heap = GetHeap();
1437 if (!heap->Contains(this)) {
1438 os << "!!!INVALID POINTER!!!";
1441 if (!heap->Contains(map())) {
1442 os << "!!!INVALID MAP!!!";
1449 HeapStringAllocator allocator;
1450 StringStream accumulator(&allocator);
1451 String::cast(this)->StringShortPrint(&accumulator);
1452 os << accumulator.ToCString().get();
1456 HeapStringAllocator allocator;
1457 StringStream accumulator(&allocator);
1458 JSObject::cast(this)->JSObjectShortPrint(&accumulator);
1459 os << accumulator.ToCString().get();
1462 switch (map()->instance_type()) {
1464 os << "<Map(elements=" << Map::cast(this)->elements_kind() << ")>";
1466 case FIXED_ARRAY_TYPE:
1467 os << "<FixedArray[" << FixedArray::cast(this)->length() << "]>";
1469 case FIXED_DOUBLE_ARRAY_TYPE:
1470 os << "<FixedDoubleArray[" << FixedDoubleArray::cast(this)->length()
1473 case BYTE_ARRAY_TYPE:
1474 os << "<ByteArray[" << ByteArray::cast(this)->length() << "]>";
1476 case FREE_SPACE_TYPE:
1477 os << "<FreeSpace[" << FreeSpace::cast(this)->Size() << "]>";
1479 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \
1480 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1481 os << "<External" #Type "Array[" \
1482 << External##Type##Array::cast(this)->length() << "]>"; \
1484 case FIXED_##TYPE##_ARRAY_TYPE: \
1485 os << "<Fixed" #Type "Array[" << Fixed##Type##Array::cast(this)->length() \
1489 TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT)
1490 #undef TYPED_ARRAY_SHORT_PRINT
1492 case SHARED_FUNCTION_INFO_TYPE: {
1493 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this);
1494 SmartArrayPointer<char> debug_name =
1495 shared->DebugName()->ToCString();
1496 if (debug_name[0] != 0) {
1497 os << "<SharedFunctionInfo " << debug_name.get() << ">";
1499 os << "<SharedFunctionInfo>";
1503 case JS_MESSAGE_OBJECT_TYPE:
1504 os << "<JSMessageObject>";
1506 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1508 os << "<" #Name ">"; \
1510 STRUCT_LIST(MAKE_STRUCT_CASE)
1511 #undef MAKE_STRUCT_CASE
1513 Code* code = Code::cast(this);
1514 os << "<Code: " << Code::Kind2String(code->kind()) << ">";
1517 case ODDBALL_TYPE: {
1518 if (IsUndefined()) {
1519 os << "<undefined>";
1520 } else if (IsTheHole()) {
1522 } else if (IsNull()) {
1524 } else if (IsTrue()) {
1526 } else if (IsFalse()) {
1529 os << "<Odd Oddball>";
1534 Symbol* symbol = Symbol::cast(this);
1535 symbol->SymbolShortPrint(os);
1538 case HEAP_NUMBER_TYPE: {
1540 HeapNumber::cast(this)->HeapNumberPrint(os);
1544 case MUTABLE_HEAP_NUMBER_TYPE: {
1545 os << "<MutableNumber: ";
1546 HeapNumber::cast(this)->HeapNumberPrint(os);
1553 case JS_FUNCTION_PROXY_TYPE:
1554 os << "<JSFunctionProxy>";
1561 HeapStringAllocator allocator;
1562 StringStream accumulator(&allocator);
1563 Cell::cast(this)->value()->ShortPrint(&accumulator);
1564 os << accumulator.ToCString().get();
1567 case PROPERTY_CELL_TYPE: {
1568 os << "PropertyCell for ";
1569 HeapStringAllocator allocator;
1570 StringStream accumulator(&allocator);
1571 PropertyCell::cast(this)->value()->ShortPrint(&accumulator);
1572 os << accumulator.ToCString().get();
1576 os << "<Other heap object (" << map()->instance_type() << ")>";
1582 void HeapObject::Iterate(ObjectVisitor* v) {
1584 IteratePointer(v, kMapOffset);
1585 // Handle object body
1587 IterateBody(m->instance_type(), SizeFromMap(m), v);
1591 void HeapObject::IterateBody(InstanceType type, int object_size,
1593 // Avoiding <Type>::cast(this) because it accesses the map pointer field.
1594 // During GC, the map pointer field is encoded.
1595 if (type < FIRST_NONSTRING_TYPE) {
1596 switch (type & kStringRepresentationMask) {
1599 case kConsStringTag:
1600 ConsString::BodyDescriptor::IterateBody(this, v);
1602 case kSlicedStringTag:
1603 SlicedString::BodyDescriptor::IterateBody(this, v);
1605 case kExternalStringTag:
1606 if ((type & kStringEncodingMask) == kOneByteStringTag) {
1607 reinterpret_cast<ExternalOneByteString*>(this)
1608 ->ExternalOneByteStringIterateBody(v);
1610 reinterpret_cast<ExternalTwoByteString*>(this)->
1611 ExternalTwoByteStringIterateBody(v);
1619 case FIXED_ARRAY_TYPE:
1620 FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
1622 case CONSTANT_POOL_ARRAY_TYPE:
1623 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v);
1625 case FIXED_DOUBLE_ARRAY_TYPE:
1627 case JS_OBJECT_TYPE:
1628 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1629 case JS_GENERATOR_OBJECT_TYPE:
1630 case JS_MODULE_TYPE:
1634 case JS_ARRAY_BUFFER_TYPE:
1635 case JS_TYPED_ARRAY_TYPE:
1636 case JS_DATA_VIEW_TYPE:
1639 case JS_SET_ITERATOR_TYPE:
1640 case JS_MAP_ITERATOR_TYPE:
1641 case JS_WEAK_MAP_TYPE:
1642 case JS_WEAK_SET_TYPE:
1643 case JS_REGEXP_TYPE:
1644 case JS_GLOBAL_PROXY_TYPE:
1645 case JS_GLOBAL_OBJECT_TYPE:
1646 case JS_BUILTINS_OBJECT_TYPE:
1647 case JS_MESSAGE_OBJECT_TYPE:
1648 JSObject::BodyDescriptor::IterateBody(this, object_size, v);
1650 case JS_FUNCTION_TYPE:
1651 reinterpret_cast<JSFunction*>(this)
1652 ->JSFunctionIterateBody(object_size, v);
1655 Oddball::BodyDescriptor::IterateBody(this, v);
1658 JSProxy::BodyDescriptor::IterateBody(this, v);
1660 case JS_FUNCTION_PROXY_TYPE:
1661 JSFunctionProxy::BodyDescriptor::IterateBody(this, v);
1664 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v);
1667 Map::BodyDescriptor::IterateBody(this, v);
1670 reinterpret_cast<Code*>(this)->CodeIterateBody(v);
1673 Cell::BodyDescriptor::IterateBody(this, v);
1675 case PROPERTY_CELL_TYPE:
1676 PropertyCell::BodyDescriptor::IterateBody(this, v);
1678 case WEAK_CELL_TYPE:
1679 WeakCell::BodyDescriptor::IterateBody(this, v);
1682 Symbol::BodyDescriptor::IterateBody(this, v);
1685 case HEAP_NUMBER_TYPE:
1686 case MUTABLE_HEAP_NUMBER_TYPE:
1688 case BYTE_ARRAY_TYPE:
1689 case FREE_SPACE_TYPE:
1692 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
1693 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1694 case FIXED_##TYPE##_ARRAY_TYPE: \
1697 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1698 #undef TYPED_ARRAY_CASE
1700 case SHARED_FUNCTION_INFO_TYPE: {
1701 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
1705 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1707 STRUCT_LIST(MAKE_STRUCT_CASE)
1708 #undef MAKE_STRUCT_CASE
1709 if (type == ALLOCATION_SITE_TYPE) {
1710 AllocationSite::BodyDescriptor::IterateBody(this, v);
1712 StructBodyDescriptor::IterateBody(this, object_size, v);
1716 PrintF("Unknown type: %d\n", type);
1722 bool HeapNumber::HeapNumberBooleanValue() {
1723 return DoubleToBoolean(value());
1727 void HeapNumber::HeapNumberPrint(std::ostream& os) { // NOLINT
1732 String* JSReceiver::class_name() {
1733 if (IsJSFunction() || IsJSFunctionProxy()) {
1734 return GetHeap()->Function_string();
1736 if (map()->constructor()->IsJSFunction()) {
1737 JSFunction* constructor = JSFunction::cast(map()->constructor());
1738 return String::cast(constructor->shared()->instance_class_name());
1740 // If the constructor is not present, return "Object".
1741 return GetHeap()->Object_string();
1745 String* Map::constructor_name() {
1746 if (constructor()->IsJSFunction()) {
1747 JSFunction* constructor = JSFunction::cast(this->constructor());
1748 String* name = String::cast(constructor->shared()->name());
1749 if (name->length() > 0) return name;
1750 String* inferred_name = constructor->shared()->inferred_name();
1751 if (inferred_name->length() > 0) return inferred_name;
1752 Object* proto = prototype();
1753 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
1755 // TODO(rossberg): what about proxies?
1756 // If the constructor is not present, return "Object".
1757 return GetHeap()->Object_string();
1761 String* JSReceiver::constructor_name() {
1762 return map()->constructor_name();
1766 MaybeHandle<Map> Map::CopyWithField(Handle<Map> map,
1768 Handle<HeapType> type,
1769 PropertyAttributes attributes,
1770 Representation representation,
1771 TransitionFlag flag) {
1772 DCHECK(DescriptorArray::kNotFound ==
1773 map->instance_descriptors()->Search(
1774 *name, map->NumberOfOwnDescriptors()));
1776 // Ensure the descriptor array does not get too big.
1777 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1778 return MaybeHandle<Map>();
1781 Isolate* isolate = map->GetIsolate();
1783 // Compute the new index for new field.
1784 int index = map->NextFreePropertyIndex();
1786 if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
1787 representation = Representation::Tagged();
1788 type = HeapType::Any(isolate);
1791 FieldDescriptor new_field_desc(name, index, type, attributes, representation);
1792 Handle<Map> new_map = Map::CopyAddDescriptor(map, &new_field_desc, flag);
1793 int unused_property_fields = new_map->unused_property_fields() - 1;
1794 if (unused_property_fields < 0) {
1795 unused_property_fields += JSObject::kFieldsAdded;
1797 new_map->set_unused_property_fields(unused_property_fields);
1802 MaybeHandle<Map> Map::CopyWithConstant(Handle<Map> map,
1804 Handle<Object> constant,
1805 PropertyAttributes attributes,
1806 TransitionFlag flag) {
1807 // Ensure the descriptor array does not get too big.
1808 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
1809 return MaybeHandle<Map>();
1812 // Allocate new instance descriptors with (name, constant) added.
1813 ConstantDescriptor new_constant_desc(name, constant, attributes);
1814 return Map::CopyAddDescriptor(map, &new_constant_desc, flag);
1818 void JSObject::AddSlowProperty(Handle<JSObject> object,
1820 Handle<Object> value,
1821 PropertyAttributes attributes) {
1822 DCHECK(!object->HasFastProperties());
1823 Isolate* isolate = object->GetIsolate();
1824 Handle<NameDictionary> dict(object->property_dictionary());
1825 if (object->IsGlobalObject()) {
1826 // In case name is an orphaned property reuse the cell.
1827 int entry = dict->FindEntry(name);
1828 if (entry != NameDictionary::kNotFound) {
1829 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry)));
1830 PropertyCell::SetValueInferType(cell, value);
1831 // Assign an enumeration index to the property and update
1832 // SetNextEnumerationIndex.
1833 int index = dict->NextEnumerationIndex();
1834 PropertyDetails details = PropertyDetails(attributes, NORMAL, index);
1835 dict->SetNextEnumerationIndex(index + 1);
1836 dict->SetEntry(entry, name, cell, details);
1839 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value);
1840 PropertyCell::SetValueInferType(cell, value);
1843 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
1844 Handle<NameDictionary> result =
1845 NameDictionary::Add(dict, name, value, details);
1846 if (*dict != *result) object->set_properties(*result);
1850 Context* JSObject::GetCreationContext() {
1851 Object* constructor = this->map()->constructor();
1852 JSFunction* function;
1853 if (!constructor->IsJSFunction()) {
1854 // Functions have null as a constructor,
1855 // but any JSFunction knows its context immediately.
1856 function = JSFunction::cast(this);
1858 function = JSFunction::cast(constructor);
1861 return function->context()->native_context();
1865 MaybeHandle<Object> JSObject::EnqueueChangeRecord(Handle<JSObject> object,
1866 const char* type_str,
1868 Handle<Object> old_value) {
1869 DCHECK(!object->IsJSGlobalProxy());
1870 DCHECK(!object->IsJSGlobalObject());
1871 Isolate* isolate = object->GetIsolate();
1872 HandleScope scope(isolate);
1873 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str);
1874 Handle<Object> args[] = { type, object, name, old_value };
1875 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4;
1877 return Execution::Call(isolate,
1878 Handle<JSFunction>(isolate->observers_notify_change()),
1879 isolate->factory()->undefined_value(), argc, args);
1883 const char* Representation::Mnemonic() const {
1885 case kNone: return "v";
1886 case kTagged: return "t";
1887 case kSmi: return "s";
1888 case kDouble: return "d";
1889 case kInteger32: return "i";
1890 case kHeapObject: return "h";
1891 case kExternal: return "x";
1899 bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields,
1900 int target_inobject, int target_unused,
1901 int* old_number_of_fields) {
1902 // If fields were added (or removed), rewrite the instance.
1903 *old_number_of_fields = NumberOfFields();
1904 DCHECK(target_number_of_fields >= *old_number_of_fields);
1905 if (target_number_of_fields != *old_number_of_fields) return true;
1907 // If smi descriptors were replaced by double descriptors, rewrite.
1908 DescriptorArray* old_desc = instance_descriptors();
1909 DescriptorArray* new_desc = target->instance_descriptors();
1910 int limit = NumberOfOwnDescriptors();
1911 for (int i = 0; i < limit; i++) {
1912 if (new_desc->GetDetails(i).representation().IsDouble() !=
1913 old_desc->GetDetails(i).representation().IsDouble()) {
1918 // If no fields were added, and no inobject properties were removed, setting
1919 // the map is sufficient.
1920 if (target_inobject == inobject_properties()) return false;
1921 // In-object slack tracking may have reduced the object size of the new map.
1922 // In that case, succeed if all existing fields were inobject, and they still
1923 // fit within the new inobject size.
1924 DCHECK(target_inobject < inobject_properties());
1925 if (target_number_of_fields <= target_inobject) {
1926 DCHECK(target_number_of_fields + target_unused == target_inobject);
1929 // Otherwise, properties will need to be moved to the backing store.
1934 void Map::ConnectElementsTransition(Handle<Map> parent, Handle<Map> child) {
1935 Isolate* isolate = parent->GetIsolate();
1936 Handle<Name> name = isolate->factory()->elements_transition_symbol();
1937 ConnectTransition(parent, child, name, FULL_TRANSITION);
1941 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) {
1942 if (object->map() == *new_map) return;
1943 if (object->HasFastProperties()) {
1944 if (!new_map->is_dictionary_map()) {
1945 Handle<Map> old_map(object->map());
1946 MigrateFastToFast(object, new_map);
1947 if (old_map->is_prototype_map()) {
1948 // Clear out the old descriptor array to avoid problems to sharing
1949 // the descriptor array without using an explicit.
1950 old_map->InitializeDescriptors(
1951 old_map->GetHeap()->empty_descriptor_array());
1952 // Ensure that no transition was inserted for prototype migrations.
1953 DCHECK(!old_map->HasTransitionArray());
1954 DCHECK(new_map->GetBackPointer()->IsUndefined());
1957 MigrateFastToSlow(object, new_map, 0);
1960 // For slow-to-fast migrations JSObject::TransformToFastProperties()
1961 // must be used instead.
1962 CHECK(new_map->is_dictionary_map());
1964 // Slow-to-slow migration is trivial.
1965 object->set_map(*new_map);
1970 // To migrate a fast instance to a fast map:
1971 // - First check whether the instance needs to be rewritten. If not, simply
1973 // - Otherwise, allocate a fixed array large enough to hold all fields, in
1974 // addition to unused space.
1975 // - Copy all existing properties in, in the following order: backing store
1976 // properties, unused fields, inobject properties.
1977 // - If all allocation succeeded, commit the state atomically:
1978 // * Copy inobject properties from the backing store back into the object.
1979 // * Trim the difference in instance size of the object. This also cleanly
1980 // frees inobject properties that moved to the backing store.
1981 // * If there are properties left in the backing store, trim of the space used
1982 // to temporarily store the inobject properties.
1983 // * If there are properties left in the backing store, install the backing
1985 void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
1986 Isolate* isolate = object->GetIsolate();
1987 Handle<Map> old_map(object->map());
1988 int old_number_of_fields;
1989 int number_of_fields = new_map->NumberOfFields();
1990 int inobject = new_map->inobject_properties();
1991 int unused = new_map->unused_property_fields();
1993 // Nothing to do if no functions were converted to fields and no smis were
1994 // converted to doubles.
1995 if (!old_map->InstancesNeedRewriting(*new_map, number_of_fields, inobject,
1996 unused, &old_number_of_fields)) {
1997 object->synchronized_set_map(*new_map);
2001 int total_size = number_of_fields + unused;
2002 int external = total_size - inobject;
2004 if (number_of_fields != old_number_of_fields &&
2005 new_map->GetBackPointer() == *old_map) {
2006 PropertyDetails details = new_map->GetLastDescriptorDetails();
2008 if (old_map->unused_property_fields() > 0) {
2009 if (details.representation().IsDouble()) {
2010 Handle<Object> value = isolate->factory()->NewHeapNumber(0, MUTABLE);
2012 FieldIndex::ForDescriptor(*new_map, new_map->LastAdded());
2013 object->FastPropertyAtPut(index, *value);
2015 object->synchronized_set_map(*new_map);
2019 DCHECK(number_of_fields == old_number_of_fields + 1);
2020 // This migration is a transition from a map that has run out of property
2021 // space. Therefore it could be done by extending the backing store.
2022 Handle<FixedArray> old_storage = handle(object->properties(), isolate);
2023 Handle<FixedArray> new_storage =
2024 FixedArray::CopySize(old_storage, external);
2026 // Properly initialize newly added property.
2027 Handle<Object> value;
2028 if (details.representation().IsDouble()) {
2029 value = isolate->factory()->NewHeapNumber(0, MUTABLE);
2031 value = isolate->factory()->uninitialized_value();
2033 DCHECK(details.type() == FIELD);
2034 int target_index = details.field_index() - inobject;
2035 DCHECK(target_index >= 0); // Must be a backing store index.
2036 new_storage->set(target_index, *value);
2038 // From here on we cannot fail and we shouldn't GC anymore.
2039 DisallowHeapAllocation no_allocation;
2041 // Set the new property value and do the map transition.
2042 object->set_properties(*new_storage);
2043 object->synchronized_set_map(*new_map);
2046 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size);
2048 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
2049 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors());
2050 int old_nof = old_map->NumberOfOwnDescriptors();
2051 int new_nof = new_map->NumberOfOwnDescriptors();
2053 // This method only supports generalizing instances to at least the same
2054 // number of properties.
2055 DCHECK(old_nof <= new_nof);
2057 for (int i = 0; i < old_nof; i++) {
2058 PropertyDetails details = new_descriptors->GetDetails(i);
2059 if (details.type() != FIELD) continue;
2060 PropertyDetails old_details = old_descriptors->GetDetails(i);
2061 if (old_details.type() == CALLBACKS) {
2062 DCHECK(details.representation().IsTagged());
2065 DCHECK(old_details.type() == CONSTANT ||
2066 old_details.type() == FIELD);
2067 Object* raw_value = old_details.type() == CONSTANT
2068 ? old_descriptors->GetValue(i)
2069 : object->RawFastPropertyAt(FieldIndex::ForDescriptor(*old_map, i));
2070 Handle<Object> value(raw_value, isolate);
2071 if (!old_details.representation().IsDouble() &&
2072 details.representation().IsDouble()) {
2073 if (old_details.representation().IsNone()) {
2074 value = handle(Smi::FromInt(0), isolate);
2076 value = Object::NewStorageFor(isolate, value, details.representation());
2077 } else if (old_details.representation().IsDouble() &&
2078 !details.representation().IsDouble()) {
2079 value = Object::WrapForRead(isolate, value, old_details.representation());
2081 DCHECK(!(details.representation().IsDouble() && value->IsSmi()));
2082 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2083 if (target_index < 0) target_index += total_size;
2084 array->set(target_index, *value);
2087 for (int i = old_nof; i < new_nof; i++) {
2088 PropertyDetails details = new_descriptors->GetDetails(i);
2089 if (details.type() != FIELD) continue;
2090 Handle<Object> value;
2091 if (details.representation().IsDouble()) {
2092 value = isolate->factory()->NewHeapNumber(0, MUTABLE);
2094 value = isolate->factory()->uninitialized_value();
2096 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2097 if (target_index < 0) target_index += total_size;
2098 array->set(target_index, *value);
2101 // From here on we cannot fail and we shouldn't GC anymore.
2102 DisallowHeapAllocation no_allocation;
2104 // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2105 // avoid overwriting |one_pointer_filler_map|.
2106 int limit = Min(inobject, number_of_fields);
2107 for (int i = 0; i < limit; i++) {
2108 FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
2109 object->FastPropertyAtPut(index, array->get(external + i));
2112 Heap* heap = isolate->heap();
2114 // If there are properties in the new backing store, trim it to the correct
2115 // size and install the backing store into the object.
2117 heap->RightTrimFixedArray<Heap::FROM_MUTATOR>(*array, inobject);
2118 object->set_properties(*array);
2121 // Create filler object past the new instance size.
2122 int new_instance_size = new_map->instance_size();
2123 int instance_size_delta = old_map->instance_size() - new_instance_size;
2124 DCHECK(instance_size_delta >= 0);
2126 if (instance_size_delta > 0) {
2127 Address address = object->address();
2128 heap->CreateFillerObjectAt(
2129 address + new_instance_size, instance_size_delta);
2130 heap->AdjustLiveBytes(address, -instance_size_delta, Heap::FROM_MUTATOR);
2133 // We are storing the new map using release store after creating a filler for
2134 // the left-over space to avoid races with the sweeper thread.
2135 object->synchronized_set_map(*new_map);
2139 int Map::NumberOfFields() {
2140 DescriptorArray* descriptors = instance_descriptors();
2142 for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
2143 if (descriptors->GetDetails(i).type() == FIELD) result++;
2149 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2151 StoreMode store_mode,
2152 PropertyAttributes attributes,
2153 const char* reason) {
2154 Isolate* isolate = map->GetIsolate();
2155 Handle<Map> new_map = Copy(map);
2157 DescriptorArray* descriptors = new_map->instance_descriptors();
2158 int length = descriptors->number_of_descriptors();
2159 for (int i = 0; i < length; i++) {
2160 descriptors->SetRepresentation(i, Representation::Tagged());
2161 if (descriptors->GetDetails(i).type() == FIELD) {
2162 descriptors->SetValue(i, HeapType::Any());
2166 // Unless the instance is being migrated, ensure that modify_index is a field.
2167 PropertyDetails details = descriptors->GetDetails(modify_index);
2168 if (store_mode == FORCE_FIELD &&
2169 (details.type() != FIELD || details.attributes() != attributes)) {
2170 int field_index = details.type() == FIELD ? details.field_index()
2171 : new_map->NumberOfFields();
2172 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate),
2173 field_index, attributes, Representation::Tagged());
2174 descriptors->Replace(modify_index, &d);
2175 if (details.type() != FIELD) {
2176 int unused_property_fields = new_map->unused_property_fields() - 1;
2177 if (unused_property_fields < 0) {
2178 unused_property_fields += JSObject::kFieldsAdded;
2180 new_map->set_unused_property_fields(unused_property_fields);
2183 DCHECK(details.attributes() == attributes);
2186 if (FLAG_trace_generalization) {
2187 HeapType* field_type = (details.type() == FIELD)
2188 ? map->instance_descriptors()->GetFieldType(modify_index)
2190 map->PrintGeneralization(stdout, reason, modify_index,
2191 new_map->NumberOfOwnDescriptors(),
2192 new_map->NumberOfOwnDescriptors(),
2193 details.type() == CONSTANT && store_mode == FORCE_FIELD,
2194 details.representation(), Representation::Tagged(),
2195 field_type, HeapType::Any());
2202 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map,
2204 StoreMode store_mode,
2205 const char* reason) {
2206 PropertyDetails details =
2207 map->instance_descriptors()->GetDetails(modify_index);
2208 return CopyGeneralizeAllRepresentations(map, modify_index, store_mode,
2209 details.attributes(), reason);
2213 void Map::DeprecateTransitionTree() {
2214 if (is_deprecated()) return;
2215 if (HasTransitionArray()) {
2216 TransitionArray* transitions = this->transitions();
2217 for (int i = 0; i < transitions->number_of_transitions(); i++) {
2218 transitions->GetTarget(i)->DeprecateTransitionTree();
2222 dependent_code()->DeoptimizeDependentCodeGroup(
2223 GetIsolate(), DependentCode::kTransitionGroup);
2224 NotifyLeafMapLayoutChange();
2228 // Invalidates a transition target at |key|, and installs |new_descriptors| over
2229 // the current instance_descriptors to ensure proper sharing of descriptor
2231 // Returns true if the transition target at given key was deprecated.
2232 bool Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
2233 bool transition_target_deprecated = false;
2234 if (HasTransitionArray()) {
2235 TransitionArray* transitions = this->transitions();
2236 int transition = transitions->Search(key);
2237 if (transition != TransitionArray::kNotFound) {
2238 transitions->GetTarget(transition)->DeprecateTransitionTree();
2239 transition_target_deprecated = true;
2243 // Don't overwrite the empty descriptor array.
2244 if (NumberOfOwnDescriptors() == 0) return transition_target_deprecated;
2246 DescriptorArray* to_replace = instance_descriptors();
2247 Map* current = this;
2248 GetHeap()->incremental_marking()->RecordWrites(to_replace);
2249 while (current->instance_descriptors() == to_replace) {
2250 current->SetEnumLength(kInvalidEnumCacheSentinel);
2251 current->set_instance_descriptors(new_descriptors);
2252 Object* next = current->GetBackPointer();
2253 if (next->IsUndefined()) break;
2254 current = Map::cast(next);
2257 set_owns_descriptors(false);
2258 return transition_target_deprecated;
2262 Map* Map::FindRootMap() {
2265 Object* back = result->GetBackPointer();
2266 if (back->IsUndefined()) return result;
2267 result = Map::cast(back);
2272 Map* Map::FindLastMatchMap(int verbatim,
2274 DescriptorArray* descriptors) {
2275 DisallowHeapAllocation no_allocation;
2277 // This can only be called on roots of transition trees.
2278 DCHECK(GetBackPointer()->IsUndefined());
2280 Map* current = this;
2282 for (int i = verbatim; i < length; i++) {
2283 if (!current->HasTransitionArray()) break;
2284 Name* name = descriptors->GetKey(i);
2285 TransitionArray* transitions = current->transitions();
2286 int transition = transitions->Search(name);
2287 if (transition == TransitionArray::kNotFound) break;
2289 Map* next = transitions->GetTarget(transition);
2290 DescriptorArray* next_descriptors = next->instance_descriptors();
2292 PropertyDetails details = descriptors->GetDetails(i);
2293 PropertyDetails next_details = next_descriptors->GetDetails(i);
2294 if (details.type() != next_details.type()) break;
2295 if (details.attributes() != next_details.attributes()) break;
2296 if (!details.representation().Equals(next_details.representation())) break;
2297 if (next_details.type() == FIELD) {
2298 if (!descriptors->GetFieldType(i)->NowIs(
2299 next_descriptors->GetFieldType(i))) break;
2301 if (descriptors->GetValue(i) != next_descriptors->GetValue(i)) break;
2310 Map* Map::FindFieldOwner(int descriptor) {
2311 DisallowHeapAllocation no_allocation;
2312 DCHECK_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type());
2315 Object* back = result->GetBackPointer();
2316 if (back->IsUndefined()) break;
2317 Map* parent = Map::cast(back);
2318 if (parent->NumberOfOwnDescriptors() <= descriptor) break;
2325 void Map::UpdateFieldType(int descriptor, Handle<Name> name,
2326 Handle<HeapType> new_type) {
2327 DisallowHeapAllocation no_allocation;
2328 PropertyDetails details = instance_descriptors()->GetDetails(descriptor);
2329 if (details.type() != FIELD) return;
2330 if (HasTransitionArray()) {
2331 TransitionArray* transitions = this->transitions();
2332 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
2333 transitions->GetTarget(i)->UpdateFieldType(descriptor, name, new_type);
2336 // Skip if already updated the shared descriptor.
2337 if (instance_descriptors()->GetFieldType(descriptor) == *new_type) return;
2338 FieldDescriptor d(name, instance_descriptors()->GetFieldIndex(descriptor),
2339 new_type, details.attributes(), details.representation());
2340 instance_descriptors()->Replace(descriptor, &d);
2345 Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1,
2346 Handle<HeapType> type2,
2348 static const int kMaxClassesPerFieldType = 5;
2349 if (type1->NowIs(type2)) return type2;
2350 if (type2->NowIs(type1)) return type1;
2351 if (type1->NowStable() && type2->NowStable()) {
2352 Handle<HeapType> type = HeapType::Union(type1, type2, isolate);
2353 if (type->NumClasses() <= kMaxClassesPerFieldType) {
2354 DCHECK(type->NowStable());
2355 DCHECK(type1->NowIs(type));
2356 DCHECK(type2->NowIs(type));
2360 return HeapType::Any(isolate);
2365 void Map::GeneralizeFieldType(Handle<Map> map,
2367 Handle<HeapType> new_field_type) {
2368 Isolate* isolate = map->GetIsolate();
2370 // Check if we actually need to generalize the field type at all.
2371 Handle<HeapType> old_field_type(
2372 map->instance_descriptors()->GetFieldType(modify_index), isolate);
2373 if (new_field_type->NowIs(old_field_type)) {
2374 DCHECK(Map::GeneralizeFieldType(old_field_type,
2376 isolate)->NowIs(old_field_type));
2380 // Determine the field owner.
2381 Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate);
2382 Handle<DescriptorArray> descriptors(
2383 field_owner->instance_descriptors(), isolate);
2384 DCHECK_EQ(*old_field_type, descriptors->GetFieldType(modify_index));
2386 // Determine the generalized new field type.
2387 new_field_type = Map::GeneralizeFieldType(
2388 old_field_type, new_field_type, isolate);
2390 PropertyDetails details = descriptors->GetDetails(modify_index);
2391 Handle<Name> name(descriptors->GetKey(modify_index));
2392 field_owner->UpdateFieldType(modify_index, name, new_field_type);
2393 field_owner->dependent_code()->DeoptimizeDependentCodeGroup(
2394 isolate, DependentCode::kFieldTypeGroup);
2396 if (FLAG_trace_generalization) {
2397 map->PrintGeneralization(
2398 stdout, "field type generalization",
2399 modify_index, map->NumberOfOwnDescriptors(),
2400 map->NumberOfOwnDescriptors(), false,
2401 details.representation(), details.representation(),
2402 *old_field_type, *new_field_type);
2407 // Generalize the representation of the descriptor at |modify_index|.
2408 // This method rewrites the transition tree to reflect the new change. To avoid
2409 // high degrees over polymorphism, and to stabilize quickly, on every rewrite
2410 // the new type is deduced by merging the current type with any potential new
2411 // (partial) version of the type in the transition tree.
2412 // To do this, on each rewrite:
2413 // - Search the root of the transition tree using FindRootMap.
2414 // - Find |target_map|, the newest matching version of this map using the keys
2415 // in the |old_map|'s descriptor array to walk the transition tree.
2416 // - Merge/generalize the descriptor array of the |old_map| and |target_map|.
2417 // - Generalize the |modify_index| descriptor using |new_representation| and
2418 // |new_field_type|.
2419 // - Walk the tree again starting from the root towards |target_map|. Stop at
2420 // |split_map|, the first map who's descriptor array does not match the merged
2421 // descriptor array.
2422 // - If |target_map| == |split_map|, |target_map| is in the expected state.
2424 // - Otherwise, invalidate the outdated transition target from |target_map|, and
2425 // replace its transition tree with a new branch for the updated descriptors.
2426 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map,
2428 Representation new_representation,
2429 Handle<HeapType> new_field_type,
2430 StoreMode store_mode) {
2431 Isolate* isolate = old_map->GetIsolate();
2433 Handle<DescriptorArray> old_descriptors(
2434 old_map->instance_descriptors(), isolate);
2435 int old_nof = old_map->NumberOfOwnDescriptors();
2436 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2437 Representation old_representation = old_details.representation();
2439 // It's fine to transition from None to anything but double without any
2440 // modification to the object, because the default uninitialized value for
2441 // representation None can be overwritten by both smi and tagged values.
2442 // Doubles, however, would require a box allocation.
2443 if (old_representation.IsNone() &&
2444 !new_representation.IsNone() &&
2445 !new_representation.IsDouble()) {
2446 DCHECK(old_details.type() == FIELD);
2447 DCHECK(old_descriptors->GetFieldType(modify_index)->NowIs(
2449 if (FLAG_trace_generalization) {
2450 old_map->PrintGeneralization(
2451 stdout, "uninitialized field",
2452 modify_index, old_map->NumberOfOwnDescriptors(),
2453 old_map->NumberOfOwnDescriptors(), false,
2454 old_representation, new_representation,
2455 old_descriptors->GetFieldType(modify_index), *new_field_type);
2457 old_descriptors->SetRepresentation(modify_index, new_representation);
2458 old_descriptors->SetValue(modify_index, *new_field_type);
2462 // Check the state of the root map.
2463 Handle<Map> root_map(old_map->FindRootMap(), isolate);
2464 if (!old_map->EquivalentToForTransition(*root_map)) {
2465 return CopyGeneralizeAllRepresentations(
2466 old_map, modify_index, store_mode, "not equivalent");
2468 int root_nof = root_map->NumberOfOwnDescriptors();
2469 if (modify_index < root_nof) {
2470 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2471 if ((old_details.type() != FIELD && store_mode == FORCE_FIELD) ||
2472 (old_details.type() == FIELD &&
2473 (!new_field_type->NowIs(old_descriptors->GetFieldType(modify_index)) ||
2474 !new_representation.fits_into(old_details.representation())))) {
2475 return CopyGeneralizeAllRepresentations(
2476 old_map, modify_index, store_mode, "root modification");
2480 Handle<Map> target_map = root_map;
2481 for (int i = root_nof; i < old_nof; ++i) {
2482 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2483 if (j == TransitionArray::kNotFound) break;
2484 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2485 Handle<DescriptorArray> tmp_descriptors = handle(
2486 tmp_map->instance_descriptors(), isolate);
2488 // Check if target map is incompatible.
2489 PropertyDetails old_details = old_descriptors->GetDetails(i);
2490 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2491 PropertyType old_type = old_details.type();
2492 PropertyType tmp_type = tmp_details.type();
2493 if (tmp_details.attributes() != old_details.attributes() ||
2494 ((tmp_type == CALLBACKS || old_type == CALLBACKS) &&
2495 (tmp_type != old_type ||
2496 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2497 return CopyGeneralizeAllRepresentations(
2498 old_map, modify_index, store_mode, "incompatible");
2500 Representation old_representation = old_details.representation();
2501 Representation tmp_representation = tmp_details.representation();
2502 if (!old_representation.fits_into(tmp_representation) ||
2503 (!new_representation.fits_into(tmp_representation) &&
2504 modify_index == i)) {
2507 if (tmp_type == FIELD) {
2508 // Generalize the field type as necessary.
2509 Handle<HeapType> old_field_type = (old_type == FIELD)
2510 ? handle(old_descriptors->GetFieldType(i), isolate)
2511 : old_descriptors->GetValue(i)->OptimalType(
2512 isolate, tmp_representation);
2513 if (modify_index == i) {
2514 old_field_type = GeneralizeFieldType(
2515 new_field_type, old_field_type, isolate);
2517 GeneralizeFieldType(tmp_map, i, old_field_type);
2518 } else if (tmp_type == CONSTANT) {
2519 if (old_type != CONSTANT ||
2520 old_descriptors->GetConstant(i) != tmp_descriptors->GetConstant(i)) {
2524 DCHECK_EQ(tmp_type, old_type);
2525 DCHECK_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i));
2527 target_map = tmp_map;
2530 // Directly change the map if the target map is more general.
2531 Handle<DescriptorArray> target_descriptors(
2532 target_map->instance_descriptors(), isolate);
2533 int target_nof = target_map->NumberOfOwnDescriptors();
2534 if (target_nof == old_nof &&
2535 (store_mode != FORCE_FIELD ||
2536 target_descriptors->GetDetails(modify_index).type() == FIELD)) {
2537 DCHECK(modify_index < target_nof);
2538 DCHECK(new_representation.fits_into(
2539 target_descriptors->GetDetails(modify_index).representation()));
2540 DCHECK(target_descriptors->GetDetails(modify_index).type() != FIELD ||
2541 new_field_type->NowIs(
2542 target_descriptors->GetFieldType(modify_index)));
2546 // Find the last compatible target map in the transition tree.
2547 for (int i = target_nof; i < old_nof; ++i) {
2548 int j = target_map->SearchTransition(old_descriptors->GetKey(i));
2549 if (j == TransitionArray::kNotFound) break;
2550 Handle<Map> tmp_map(target_map->GetTransition(j), isolate);
2551 Handle<DescriptorArray> tmp_descriptors(
2552 tmp_map->instance_descriptors(), isolate);
2554 // Check if target map is compatible.
2555 PropertyDetails old_details = old_descriptors->GetDetails(i);
2556 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
2557 if (tmp_details.attributes() != old_details.attributes() ||
2558 ((tmp_details.type() == CALLBACKS || old_details.type() == CALLBACKS) &&
2559 (tmp_details.type() != old_details.type() ||
2560 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) {
2561 return CopyGeneralizeAllRepresentations(
2562 old_map, modify_index, store_mode, "incompatible");
2564 target_map = tmp_map;
2566 target_nof = target_map->NumberOfOwnDescriptors();
2567 target_descriptors = handle(target_map->instance_descriptors(), isolate);
2569 // Allocate a new descriptor array large enough to hold the required
2570 // descriptors, with minimally the exact same size as the old descriptor
2572 int new_slack = Max(
2573 old_nof, old_descriptors->number_of_descriptors()) - old_nof;
2574 Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate(
2575 isolate, old_nof, new_slack);
2576 DCHECK(new_descriptors->length() > target_descriptors->length() ||
2577 new_descriptors->NumberOfSlackDescriptors() > 0 ||
2578 new_descriptors->number_of_descriptors() ==
2579 old_descriptors->number_of_descriptors());
2580 DCHECK(new_descriptors->number_of_descriptors() == old_nof);
2583 int current_offset = 0;
2584 for (int i = 0; i < root_nof; ++i) {
2585 PropertyDetails old_details = old_descriptors->GetDetails(i);
2586 if (old_details.type() == FIELD) current_offset++;
2587 Descriptor d(handle(old_descriptors->GetKey(i), isolate),
2588 handle(old_descriptors->GetValue(i), isolate),
2590 new_descriptors->Set(i, &d);
2593 // |root_nof| -> |target_nof|
2594 for (int i = root_nof; i < target_nof; ++i) {
2595 Handle<Name> target_key(target_descriptors->GetKey(i), isolate);
2596 PropertyDetails old_details = old_descriptors->GetDetails(i);
2597 PropertyDetails target_details = target_descriptors->GetDetails(i);
2598 target_details = target_details.CopyWithRepresentation(
2599 old_details.representation().generalize(
2600 target_details.representation()));
2601 if (modify_index == i) {
2602 target_details = target_details.CopyWithRepresentation(
2603 new_representation.generalize(target_details.representation()));
2605 DCHECK_EQ(old_details.attributes(), target_details.attributes());
2606 if (old_details.type() == FIELD ||
2607 target_details.type() == FIELD ||
2608 (modify_index == i && store_mode == FORCE_FIELD) ||
2609 (target_descriptors->GetValue(i) != old_descriptors->GetValue(i))) {
2610 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2611 ? handle(old_descriptors->GetFieldType(i), isolate)
2612 : old_descriptors->GetValue(i)->OptimalType(
2613 isolate, target_details.representation());
2614 Handle<HeapType> target_field_type = (target_details.type() == FIELD)
2615 ? handle(target_descriptors->GetFieldType(i), isolate)
2616 : target_descriptors->GetValue(i)->OptimalType(
2617 isolate, target_details.representation());
2618 target_field_type = GeneralizeFieldType(
2619 target_field_type, old_field_type, isolate);
2620 if (modify_index == i) {
2621 target_field_type = GeneralizeFieldType(
2622 target_field_type, new_field_type, isolate);
2624 FieldDescriptor d(target_key,
2627 target_details.attributes(),
2628 target_details.representation());
2629 new_descriptors->Set(i, &d);
2631 DCHECK_NE(FIELD, target_details.type());
2632 Descriptor d(target_key,
2633 handle(target_descriptors->GetValue(i), isolate),
2635 new_descriptors->Set(i, &d);
2639 // |target_nof| -> |old_nof|
2640 for (int i = target_nof; i < old_nof; ++i) {
2641 PropertyDetails old_details = old_descriptors->GetDetails(i);
2642 Handle<Name> old_key(old_descriptors->GetKey(i), isolate);
2643 if (modify_index == i) {
2644 old_details = old_details.CopyWithRepresentation(
2645 new_representation.generalize(old_details.representation()));
2647 if (old_details.type() == FIELD) {
2648 Handle<HeapType> old_field_type(
2649 old_descriptors->GetFieldType(i), isolate);
2650 if (modify_index == i) {
2651 old_field_type = GeneralizeFieldType(
2652 old_field_type, new_field_type, isolate);
2654 FieldDescriptor d(old_key,
2657 old_details.attributes(),
2658 old_details.representation());
2659 new_descriptors->Set(i, &d);
2661 DCHECK(old_details.type() == CONSTANT || old_details.type() == CALLBACKS);
2662 if (modify_index == i && store_mode == FORCE_FIELD) {
2663 FieldDescriptor d(old_key,
2665 GeneralizeFieldType(
2666 old_descriptors->GetValue(i)->OptimalType(
2667 isolate, old_details.representation()),
2668 new_field_type, isolate),
2669 old_details.attributes(),
2670 old_details.representation());
2671 new_descriptors->Set(i, &d);
2673 DCHECK_NE(FIELD, old_details.type());
2674 Descriptor d(old_key,
2675 handle(old_descriptors->GetValue(i), isolate),
2677 new_descriptors->Set(i, &d);
2682 new_descriptors->Sort();
2684 DCHECK(store_mode != FORCE_FIELD ||
2685 new_descriptors->GetDetails(modify_index).type() == FIELD);
2687 Handle<Map> split_map(root_map->FindLastMatchMap(
2688 root_nof, old_nof, *new_descriptors), isolate);
2689 int split_nof = split_map->NumberOfOwnDescriptors();
2690 DCHECK_NE(old_nof, split_nof);
2692 bool transition_target_deprecated =
2693 split_map->DeprecateTarget(old_descriptors->GetKey(split_nof),
2696 // If |transition_target_deprecated| is true then the transition array
2697 // already contains entry for given descriptor. This means that the transition
2698 // could be inserted regardless of whether transitions array is full or not.
2699 if (!transition_target_deprecated && !split_map->CanHaveMoreTransitions()) {
2700 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2701 "can't have more transitions");
2703 if (FLAG_trace_generalization) {
2704 PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2705 PropertyDetails new_details = new_descriptors->GetDetails(modify_index);
2706 Handle<HeapType> old_field_type = (old_details.type() == FIELD)
2707 ? handle(old_descriptors->GetFieldType(modify_index), isolate)
2708 : HeapType::Constant(handle(old_descriptors->GetValue(modify_index),
2710 Handle<HeapType> new_field_type = (new_details.type() == FIELD)
2711 ? handle(new_descriptors->GetFieldType(modify_index), isolate)
2712 : HeapType::Constant(handle(new_descriptors->GetValue(modify_index),
2714 old_map->PrintGeneralization(
2715 stdout, "", modify_index, split_nof, old_nof,
2716 old_details.type() == CONSTANT && store_mode == FORCE_FIELD,
2717 old_details.representation(), new_details.representation(),
2718 *old_field_type, *new_field_type);
2721 // Add missing transitions.
2722 Handle<Map> new_map = split_map;
2723 for (int i = split_nof; i < old_nof; ++i) {
2724 new_map = CopyInstallDescriptors(new_map, i, new_descriptors);
2726 new_map->set_owns_descriptors(true);
2731 // Generalize the representation of all FIELD descriptors.
2732 Handle<Map> Map::GeneralizeAllFieldRepresentations(
2734 Handle<DescriptorArray> descriptors(map->instance_descriptors());
2735 for (int i = 0; i < map->NumberOfOwnDescriptors(); ++i) {
2736 if (descriptors->GetDetails(i).type() == FIELD) {
2737 map = GeneralizeRepresentation(map, i, Representation::Tagged(),
2738 HeapType::Any(map->GetIsolate()),
2747 MaybeHandle<Map> Map::TryUpdate(Handle<Map> map) {
2748 Handle<Map> proto_map(map);
2749 while (proto_map->prototype()->IsJSObject()) {
2750 Handle<JSObject> holder(JSObject::cast(proto_map->prototype()));
2751 proto_map = Handle<Map>(holder->map());
2752 if (proto_map->is_deprecated() && JSObject::TryMigrateInstance(holder)) {
2753 proto_map = Handle<Map>(holder->map());
2756 return TryUpdateInternal(map);
2761 Handle<Map> Map::Update(Handle<Map> map) {
2762 if (!map->is_deprecated()) return map;
2763 return GeneralizeRepresentation(map, 0, Representation::None(),
2764 HeapType::None(map->GetIsolate()),
2770 MaybeHandle<Map> Map::TryUpdateInternal(Handle<Map> old_map) {
2771 DisallowHeapAllocation no_allocation;
2772 DisallowDeoptimization no_deoptimization(old_map->GetIsolate());
2774 if (!old_map->is_deprecated()) return old_map;
2776 // Check the state of the root map.
2777 Map* root_map = old_map->FindRootMap();
2778 if (!old_map->EquivalentToForTransition(root_map)) return MaybeHandle<Map>();
2779 int root_nof = root_map->NumberOfOwnDescriptors();
2781 int old_nof = old_map->NumberOfOwnDescriptors();
2782 DescriptorArray* old_descriptors = old_map->instance_descriptors();
2784 Map* new_map = root_map;
2785 for (int i = root_nof; i < old_nof; ++i) {
2786 int j = new_map->SearchTransition(old_descriptors->GetKey(i));
2787 if (j == TransitionArray::kNotFound) return MaybeHandle<Map>();
2788 new_map = new_map->GetTransition(j);
2789 DescriptorArray* new_descriptors = new_map->instance_descriptors();
2791 PropertyDetails new_details = new_descriptors->GetDetails(i);
2792 PropertyDetails old_details = old_descriptors->GetDetails(i);
2793 if (old_details.attributes() != new_details.attributes() ||
2794 !old_details.representation().fits_into(new_details.representation())) {
2795 return MaybeHandle<Map>();
2797 PropertyType new_type = new_details.type();
2798 PropertyType old_type = old_details.type();
2799 Object* new_value = new_descriptors->GetValue(i);
2800 Object* old_value = old_descriptors->GetValue(i);
2803 if ((old_type == FIELD &&
2804 !HeapType::cast(old_value)->NowIs(HeapType::cast(new_value))) ||
2805 (old_type == CONSTANT &&
2806 !HeapType::cast(new_value)->NowContains(old_value)) ||
2807 (old_type == CALLBACKS &&
2808 !HeapType::Any()->Is(HeapType::cast(new_value)))) {
2809 return MaybeHandle<Map>();
2815 if (old_type != new_type || old_value != new_value) {
2816 return MaybeHandle<Map>();
2824 if (new_map->NumberOfOwnDescriptors() != old_nof) return MaybeHandle<Map>();
2825 return handle(new_map);
2829 MaybeHandle<Object> JSObject::SetPropertyWithInterceptor(LookupIterator* it,
2830 Handle<Object> value) {
2831 // TODO(rossberg): Support symbols in the API.
2832 if (it->name()->IsSymbol()) return value;
2834 Handle<String> name_string = Handle<String>::cast(it->name());
2835 Handle<JSObject> holder = it->GetHolder<JSObject>();
2836 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
2837 if (interceptor->setter()->IsUndefined()) return MaybeHandle<Object>();
2840 ApiNamedPropertyAccess("interceptor-named-set", *holder, *name_string));
2841 PropertyCallbackArguments args(it->isolate(), interceptor->data(), *holder,
2843 v8::NamedPropertySetterCallback setter =
2844 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter());
2845 v8::Handle<v8::Value> result = args.Call(
2846 setter, v8::Utils::ToLocal(name_string), v8::Utils::ToLocal(value));
2847 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object);
2848 if (!result.IsEmpty()) return value;
2850 return MaybeHandle<Object>();
2854 MaybeHandle<Object> Object::SetProperty(Handle<Object> object,
2855 Handle<Name> name, Handle<Object> value,
2856 StrictMode strict_mode,
2857 StoreFromKeyed store_mode) {
2858 LookupIterator it(object, name);
2859 return SetProperty(&it, value, strict_mode, store_mode);
2863 MaybeHandle<Object> Object::SetProperty(LookupIterator* it,
2864 Handle<Object> value,
2865 StrictMode strict_mode,
2866 StoreFromKeyed store_mode,
2867 StorePropertyMode data_store_mode) {
2868 // Make sure that the top context does not change when doing callbacks or
2869 // interceptor calls.
2870 AssertNoContextChange ncc(it->isolate());
2873 for (; it->IsFound(); it->Next()) {
2874 switch (it->state()) {
2875 case LookupIterator::NOT_FOUND:
2878 case LookupIterator::ACCESS_CHECK:
2879 // TODO(verwaest): Remove the distinction. This is mostly bogus since we
2880 // don't know whether we'll want to fetch attributes or call a setter
2881 // until we find the property.
2882 if (it->HasAccess(v8::ACCESS_SET)) break;
2883 return JSObject::SetPropertyWithFailedAccessCheck(it, value,
2886 case LookupIterator::JSPROXY:
2887 if (it->HolderIsReceiverOrHiddenPrototype()) {
2888 return JSProxy::SetPropertyWithHandler(it->GetHolder<JSProxy>(),
2889 it->GetReceiver(), it->name(),
2890 value, strict_mode);
2892 // TODO(verwaest): Use the MaybeHandle to indicate result.
2893 bool has_result = false;
2894 MaybeHandle<Object> maybe_result =
2895 JSProxy::SetPropertyViaPrototypesWithHandler(
2896 it->GetHolder<JSProxy>(), it->GetReceiver(), it->name(),
2897 value, strict_mode, &has_result);
2898 if (has_result) return maybe_result;
2903 case LookupIterator::INTERCEPTOR:
2904 if (it->HolderIsReceiverOrHiddenPrototype()) {
2905 MaybeHandle<Object> maybe_result =
2906 JSObject::SetPropertyWithInterceptor(it, value);
2907 if (!maybe_result.is_null()) return maybe_result;
2908 if (it->isolate()->has_pending_exception()) return maybe_result;
2910 Maybe<PropertyAttributes> maybe_attributes =
2911 JSObject::GetPropertyAttributesWithInterceptor(
2912 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
2913 if (!maybe_attributes.has_value) return MaybeHandle<Object>();
2914 done = maybe_attributes.value != ABSENT;
2915 if (done && (maybe_attributes.value & READ_ONLY) != 0) {
2916 return WriteToReadOnlyProperty(it, value, strict_mode);
2921 case LookupIterator::ACCESSOR:
2922 if (it->property_details().IsReadOnly()) {
2923 return WriteToReadOnlyProperty(it, value, strict_mode);
2925 if (it->HolderIsReceiverOrHiddenPrototype() ||
2926 !it->GetAccessors()->IsDeclaredAccessorInfo()) {
2927 return SetPropertyWithAccessor(it->GetReceiver(), it->name(), value,
2928 it->GetHolder<JSObject>(),
2929 it->GetAccessors(), strict_mode);
2934 case LookupIterator::DATA:
2935 if (it->property_details().IsReadOnly()) {
2936 return WriteToReadOnlyProperty(it, value, strict_mode);
2938 if (it->HolderIsReceiverOrHiddenPrototype()) {
2939 return SetDataProperty(it, value);
2944 case LookupIterator::TRANSITION:
2952 // If the receiver is the JSGlobalObject, the store was contextual. In case
2953 // the property did not exist yet on the global object itself, we have to
2954 // throw a reference error in strict mode.
2955 if (it->GetReceiver()->IsJSGlobalObject() && strict_mode == STRICT) {
2956 Handle<Object> args[1] = {it->name()};
2957 THROW_NEW_ERROR(it->isolate(),
2958 NewReferenceError("not_defined", HandleVector(args, 1)),
2962 if (data_store_mode == SUPER_PROPERTY) {
2963 LookupIterator own_lookup(it->GetReceiver(), it->name(),
2964 LookupIterator::OWN);
2966 return JSObject::SetProperty(&own_lookup, value, strict_mode, store_mode,
2970 return AddDataProperty(it, value, NONE, strict_mode, store_mode);
2974 MaybeHandle<Object> Object::WriteToReadOnlyProperty(LookupIterator* it,
2975 Handle<Object> value,
2976 StrictMode strict_mode) {
2977 if (strict_mode != STRICT) return value;
2979 Handle<Object> args[] = {it->name(), it->GetReceiver()};
2980 THROW_NEW_ERROR(it->isolate(),
2981 NewTypeError("strict_read_only_property",
2982 HandleVector(args, arraysize(args))),
2987 MaybeHandle<Object> Object::WriteToReadOnlyElement(Isolate* isolate,
2988 Handle<Object> receiver,
2990 Handle<Object> value,
2991 StrictMode strict_mode) {
2992 if (strict_mode != STRICT) return value;
2994 Handle<Object> args[] = {isolate->factory()->NewNumberFromUint(index),
2996 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
2997 HandleVector(args, arraysize(args))),
3002 MaybeHandle<Object> Object::SetDataProperty(LookupIterator* it,
3003 Handle<Object> value) {
3004 // Proxies are handled on the WithHandler path. Other non-JSObjects cannot
3005 // have own properties.
3006 Handle<JSObject> receiver = Handle<JSObject>::cast(it->GetReceiver());
3008 // Store on the holder which may be hidden behind the receiver.
3009 DCHECK(it->HolderIsReceiverOrHiddenPrototype());
3011 // Old value for the observation change record.
3012 // Fetch before transforming the object since the encoding may become
3013 // incompatible with what's cached in |it|.
3015 receiver->map()->is_observed() &&
3016 !it->name().is_identical_to(it->factory()->hidden_string());
3017 MaybeHandle<Object> maybe_old;
3018 if (is_observed) maybe_old = it->GetDataValue();
3020 // Possibly migrate to the most up-to-date map that will be able to store
3021 // |value| under it->name().
3022 it->PrepareForDataProperty(value);
3024 // Write the property value.
3025 it->WriteDataValue(value);
3027 // Send the change record if there are observers.
3028 if (is_observed && !value->SameValue(*maybe_old.ToHandleChecked())) {
3029 RETURN_ON_EXCEPTION(it->isolate(), JSObject::EnqueueChangeRecord(
3030 receiver, "update", it->name(),
3031 maybe_old.ToHandleChecked()),
3039 MaybeHandle<Object> Object::AddDataProperty(LookupIterator* it,
3040 Handle<Object> value,
3041 PropertyAttributes attributes,
3042 StrictMode strict_mode,
3043 StoreFromKeyed store_mode) {
3044 DCHECK(!it->GetReceiver()->IsJSProxy());
3045 if (!it->GetReceiver()->IsJSObject()) {
3046 // TODO(verwaest): Throw a TypeError with a more specific message.
3047 return WriteToReadOnlyProperty(it, value, strict_mode);
3050 Handle<JSObject> receiver = it->GetStoreTarget();
3052 // If the receiver is a JSGlobalProxy, store on the prototype (JSGlobalObject)
3053 // instead. If the prototype is Null, the proxy is detached.
3054 if (receiver->IsJSGlobalProxy()) return value;
3056 // If the receiver is Indexed Exotic object (currently only typed arrays),
3057 // disallow adding properties with numeric names.
3058 if (it->IsSpecialNumericIndex()) return value;
3060 // Possibly migrate to the most up-to-date map that will be able to store
3061 // |value| under it->name() with |attributes|.
3062 it->PrepareTransitionToDataProperty(value, attributes, store_mode);
3063 if (it->state() != LookupIterator::TRANSITION) {
3064 if (strict_mode == SLOPPY) return value;
3066 Handle<Object> args[1] = {it->name()};
3067 THROW_NEW_ERROR(it->isolate(),
3068 NewTypeError("object_not_extensible",
3069 HandleVector(args, arraysize(args))),
3072 it->ApplyTransitionToDataProperty();
3074 // TODO(verwaest): Encapsulate dictionary handling better.
3075 if (receiver->map()->is_dictionary_map()) {
3076 // TODO(verwaest): Probably should ensure this is done beforehand.
3077 it->InternalizeName();
3078 JSObject::AddSlowProperty(receiver, it->name(), value, attributes);
3080 // Write the property value.
3081 it->WriteDataValue(value);
3084 // Send the change record if there are observers.
3085 if (receiver->map()->is_observed() &&
3086 !it->name().is_identical_to(it->factory()->hidden_string())) {
3087 RETURN_ON_EXCEPTION(it->isolate(), JSObject::EnqueueChangeRecord(
3088 receiver, "add", it->name(),
3089 it->factory()->the_hole_value()),
3097 MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes(
3098 Handle<JSObject> object,
3100 Handle<Object> value,
3102 StrictMode strict_mode) {
3103 Isolate *isolate = object->GetIsolate();
3104 for (PrototypeIterator iter(isolate, object); !iter.IsAtEnd();
3106 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
3107 return JSProxy::SetPropertyViaPrototypesWithHandler(
3108 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), object,
3109 isolate->factory()->Uint32ToString(index), // name
3110 value, strict_mode, found);
3112 Handle<JSObject> js_proto =
3113 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
3114 if (!js_proto->HasDictionaryElements()) {
3117 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary());
3118 int entry = dictionary->FindEntry(index);
3119 if (entry != SeededNumberDictionary::kNotFound) {
3120 PropertyDetails details = dictionary->DetailsAt(entry);
3121 if (details.type() == CALLBACKS) {
3123 Handle<Object> structure(dictionary->ValueAt(entry), isolate);
3124 return SetElementWithCallback(object, structure, index, value, js_proto,
3130 return isolate->factory()->the_hole_value();
3134 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
3135 // Only supports adding slack to owned descriptors.
3136 DCHECK(map->owns_descriptors());
3138 Handle<DescriptorArray> descriptors(map->instance_descriptors());
3139 int old_size = map->NumberOfOwnDescriptors();
3140 if (slack <= descriptors->NumberOfSlackDescriptors()) return;
3142 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
3143 descriptors, old_size, slack);
3145 if (old_size == 0) {
3146 map->set_instance_descriptors(*new_descriptors);
3150 // If the source descriptors had an enum cache we copy it. This ensures
3151 // that the maps to which we push the new descriptor array back can rely
3152 // on a cache always being available once it is set. If the map has more
3153 // enumerated descriptors than available in the original cache, the cache
3154 // will be lazily replaced by the extended cache when needed.
3155 if (descriptors->HasEnumCache()) {
3156 new_descriptors->CopyEnumCacheFrom(*descriptors);
3159 // Replace descriptors by new_descriptors in all maps that share it.
3160 map->GetHeap()->incremental_marking()->RecordWrites(*descriptors);
3163 for (Object* current = map->GetBackPointer();
3164 !current->IsUndefined();
3165 current = walk_map->GetBackPointer()) {
3166 walk_map = Map::cast(current);
3167 if (walk_map->instance_descriptors() != *descriptors) break;
3168 walk_map->set_instance_descriptors(*new_descriptors);
3171 map->set_instance_descriptors(*new_descriptors);
3176 static int AppendUniqueCallbacks(NeanderArray* callbacks,
3177 Handle<typename T::Array> array,
3178 int valid_descriptors) {
3179 int nof_callbacks = callbacks->length();
3181 Isolate* isolate = array->GetIsolate();
3182 // Ensure the keys are unique names before writing them into the
3183 // instance descriptor. Since it may cause a GC, it has to be done before we
3184 // temporarily put the heap in an invalid state while appending descriptors.
3185 for (int i = 0; i < nof_callbacks; ++i) {
3186 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3187 if (entry->name()->IsUniqueName()) continue;
3188 Handle<String> key =
3189 isolate->factory()->InternalizeString(
3190 Handle<String>(String::cast(entry->name())));
3191 entry->set_name(*key);
3194 // Fill in new callback descriptors. Process the callbacks from
3195 // back to front so that the last callback with a given name takes
3196 // precedence over previously added callbacks with that name.
3197 for (int i = nof_callbacks - 1; i >= 0; i--) {
3198 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3199 Handle<Name> key(Name::cast(entry->name()));
3200 // Check if a descriptor with this name already exists before writing.
3201 if (!T::Contains(key, entry, valid_descriptors, array)) {
3202 T::Insert(key, entry, valid_descriptors, array);
3203 valid_descriptors++;
3207 return valid_descriptors;
3210 struct DescriptorArrayAppender {
3211 typedef DescriptorArray Array;
3212 static bool Contains(Handle<Name> key,
3213 Handle<AccessorInfo> entry,
3214 int valid_descriptors,
3215 Handle<DescriptorArray> array) {
3216 DisallowHeapAllocation no_gc;
3217 return array->Search(*key, valid_descriptors) != DescriptorArray::kNotFound;
3219 static void Insert(Handle<Name> key,
3220 Handle<AccessorInfo> entry,
3221 int valid_descriptors,
3222 Handle<DescriptorArray> array) {
3223 DisallowHeapAllocation no_gc;
3224 CallbacksDescriptor desc(key, entry, entry->property_attributes());
3225 array->Append(&desc);
3230 struct FixedArrayAppender {
3231 typedef FixedArray Array;
3232 static bool Contains(Handle<Name> key,
3233 Handle<AccessorInfo> entry,
3234 int valid_descriptors,
3235 Handle<FixedArray> array) {
3236 for (int i = 0; i < valid_descriptors; i++) {
3237 if (*key == AccessorInfo::cast(array->get(i))->name()) return true;
3241 static void Insert(Handle<Name> key,
3242 Handle<AccessorInfo> entry,
3243 int valid_descriptors,
3244 Handle<FixedArray> array) {
3245 DisallowHeapAllocation no_gc;
3246 array->set(valid_descriptors, *entry);
3251 void Map::AppendCallbackDescriptors(Handle<Map> map,
3252 Handle<Object> descriptors) {
3253 int nof = map->NumberOfOwnDescriptors();
3254 Handle<DescriptorArray> array(map->instance_descriptors());
3255 NeanderArray callbacks(descriptors);
3256 DCHECK(array->NumberOfSlackDescriptors() >= callbacks.length());
3257 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof);
3258 map->SetNumberOfOwnDescriptors(nof);
3262 int AccessorInfo::AppendUnique(Handle<Object> descriptors,
3263 Handle<FixedArray> array,
3264 int valid_descriptors) {
3265 NeanderArray callbacks(descriptors);
3266 DCHECK(array->length() >= callbacks.length() + valid_descriptors);
3267 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks,
3273 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
3274 DCHECK(!map.is_null());
3275 for (int i = 0; i < maps->length(); ++i) {
3276 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
3283 static Handle<T> MaybeNull(T* p) {
3284 if (p == NULL) return Handle<T>::null();
3285 return Handle<T>(p);
3289 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
3290 ElementsKind kind = elements_kind();
3291 Handle<Map> transitioned_map = Handle<Map>::null();
3292 Handle<Map> current_map(this);
3293 bool packed = IsFastPackedElementsKind(kind);
3294 if (IsTransitionableFastElementsKind(kind)) {
3295 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
3296 kind = GetNextMoreGeneralFastElementsKind(kind, false);
3297 Handle<Map> maybe_transitioned_map =
3298 MaybeNull(current_map->LookupElementsTransitionMap(kind));
3299 if (maybe_transitioned_map.is_null()) break;
3300 if (ContainsMap(candidates, maybe_transitioned_map) &&
3301 (packed || !IsFastPackedElementsKind(kind))) {
3302 transitioned_map = maybe_transitioned_map;
3303 if (!IsFastPackedElementsKind(kind)) packed = false;
3305 current_map = maybe_transitioned_map;
3308 return transitioned_map;
3312 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
3313 Map* current_map = map;
3315 IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind)
3317 : TERMINAL_FAST_ELEMENTS_KIND;
3319 // Support for legacy API: SetIndexedPropertiesTo{External,Pixel}Data
3320 // allows to change elements from arbitrary kind to any ExternalArray
3321 // elements kind. Satisfy its requirements, checking whether we already
3322 // have the cached transition.
3323 if (IsExternalArrayElementsKind(to_kind) &&
3324 !IsFixedTypedArrayElementsKind(map->elements_kind())) {
3325 if (map->HasElementsTransition()) {
3326 Map* next_map = map->elements_transition_map();
3327 if (next_map->elements_kind() == to_kind) return next_map;
3332 ElementsKind kind = map->elements_kind();
3333 while (kind != target_kind) {
3334 kind = GetNextTransitionElementsKind(kind);
3335 if (!current_map->HasElementsTransition()) return current_map;
3336 current_map = current_map->elements_transition_map();
3339 if (to_kind != kind && current_map->HasElementsTransition()) {
3340 DCHECK(to_kind == DICTIONARY_ELEMENTS);
3341 Map* next_map = current_map->elements_transition_map();
3342 if (next_map->elements_kind() == to_kind) return next_map;
3345 DCHECK(current_map->elements_kind() == target_kind);
3350 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
3351 Map* to_map = FindClosestElementsTransition(this, to_kind);
3352 if (to_map->elements_kind() == to_kind) return to_map;
3357 bool Map::IsMapInArrayPrototypeChain() {
3358 Isolate* isolate = GetIsolate();
3359 if (isolate->initial_array_prototype()->map() == this) {
3363 if (isolate->initial_object_prototype()->map() == this) {
3371 static Handle<Map> AddMissingElementsTransitions(Handle<Map> map,
3372 ElementsKind to_kind) {
3373 DCHECK(IsTransitionElementsKind(map->elements_kind()));
3375 Handle<Map> current_map = map;
3377 ElementsKind kind = map->elements_kind();
3378 if (!map->is_prototype_map()) {
3379 while (kind != to_kind && !IsTerminalElementsKind(kind)) {
3380 kind = GetNextTransitionElementsKind(kind);
3382 Map::CopyAsElementsKind(current_map, kind, INSERT_TRANSITION);
3386 // In case we are exiting the fast elements kind system, just add the map in
3388 if (kind != to_kind) {
3389 current_map = Map::CopyAsElementsKind(
3390 current_map, to_kind, INSERT_TRANSITION);
3393 DCHECK(current_map->elements_kind() == to_kind);
3398 Handle<Map> Map::TransitionElementsTo(Handle<Map> map,
3399 ElementsKind to_kind) {
3400 ElementsKind from_kind = map->elements_kind();
3401 if (from_kind == to_kind) return map;
3403 Isolate* isolate = map->GetIsolate();
3404 Context* native_context = isolate->context()->native_context();
3405 Object* maybe_array_maps = native_context->js_array_maps();
3406 if (maybe_array_maps->IsFixedArray()) {
3407 DisallowHeapAllocation no_gc;
3408 FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
3409 if (array_maps->get(from_kind) == *map) {
3410 Object* maybe_transitioned_map = array_maps->get(to_kind);
3411 if (maybe_transitioned_map->IsMap()) {
3412 return handle(Map::cast(maybe_transitioned_map));
3417 return TransitionElementsToSlow(map, to_kind);
3421 Handle<Map> Map::TransitionElementsToSlow(Handle<Map> map,
3422 ElementsKind to_kind) {
3423 ElementsKind from_kind = map->elements_kind();
3425 if (from_kind == to_kind) {
3429 bool allow_store_transition =
3430 // Only remember the map transition if there is not an already existing
3431 // non-matching element transition.
3432 !map->IsUndefined() && !map->is_dictionary_map() &&
3433 IsTransitionElementsKind(from_kind);
3435 // Only store fast element maps in ascending generality.
3436 if (IsFastElementsKind(to_kind)) {
3437 allow_store_transition &=
3438 IsTransitionableFastElementsKind(from_kind) &&
3439 IsMoreGeneralElementsKindTransition(from_kind, to_kind);
3442 if (!allow_store_transition) {
3443 return Map::CopyAsElementsKind(map, to_kind, OMIT_TRANSITION);
3446 return Map::AsElementsKind(map, to_kind);
3451 Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) {
3452 Handle<Map> closest_map(FindClosestElementsTransition(*map, kind));
3454 if (closest_map->elements_kind() == kind) {
3458 return AddMissingElementsTransitions(closest_map, kind);
3462 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
3463 ElementsKind to_kind) {
3464 Handle<Map> map(object->map());
3465 return Map::TransitionElementsTo(map, to_kind);
3469 Maybe<bool> JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy,
3470 Handle<Name> name) {
3471 Isolate* isolate = proxy->GetIsolate();
3473 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3474 if (name->IsSymbol()) return maybe(false);
3476 Handle<Object> args[] = { name };
3477 Handle<Object> result;
3478 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3479 isolate, result, CallTrap(proxy, "has", isolate->derived_has_trap(),
3480 arraysize(args), args),
3483 return maybe(result->BooleanValue());
3487 MaybeHandle<Object> JSProxy::SetPropertyWithHandler(Handle<JSProxy> proxy,
3488 Handle<Object> receiver,
3490 Handle<Object> value,
3491 StrictMode strict_mode) {
3492 Isolate* isolate = proxy->GetIsolate();
3494 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3495 if (name->IsSymbol()) return value;
3497 Handle<Object> args[] = { receiver, name, value };
3498 RETURN_ON_EXCEPTION(
3502 isolate->derived_set_trap(),
3511 MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler(
3512 Handle<JSProxy> proxy, Handle<Object> receiver, Handle<Name> name,
3513 Handle<Object> value, StrictMode strict_mode, bool* done) {
3514 Isolate* isolate = proxy->GetIsolate();
3515 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy.
3517 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3518 if (name->IsSymbol()) {
3520 return isolate->factory()->the_hole_value();
3523 *done = true; // except where redefined...
3524 Handle<Object> args[] = { name };
3525 Handle<Object> result;
3526 ASSIGN_RETURN_ON_EXCEPTION(
3529 "getPropertyDescriptor",
3535 if (result->IsUndefined()) {
3537 return isolate->factory()->the_hole_value();
3540 // Emulate [[GetProperty]] semantics for proxies.
3541 Handle<Object> argv[] = { result };
3542 Handle<Object> desc;
3543 ASSIGN_RETURN_ON_EXCEPTION(
3545 Execution::Call(isolate,
3546 isolate->to_complete_property_descriptor(),
3552 // [[GetProperty]] requires to check that all properties are configurable.
3553 Handle<String> configurable_name =
3554 isolate->factory()->InternalizeOneByteString(
3555 STATIC_CHAR_VECTOR("configurable_"));
3556 Handle<Object> configurable =
3557 Object::GetProperty(desc, configurable_name).ToHandleChecked();
3558 DCHECK(configurable->IsBoolean());
3559 if (configurable->IsFalse()) {
3560 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3561 STATIC_CHAR_VECTOR("getPropertyDescriptor"));
3562 Handle<Object> args[] = { handler, trap, name };
3563 THROW_NEW_ERROR(isolate, NewTypeError("proxy_prop_not_configurable",
3564 HandleVector(args, arraysize(args))),
3567 DCHECK(configurable->IsTrue());
3569 // Check for DataDescriptor.
3570 Handle<String> hasWritable_name =
3571 isolate->factory()->InternalizeOneByteString(
3572 STATIC_CHAR_VECTOR("hasWritable_"));
3573 Handle<Object> hasWritable =
3574 Object::GetProperty(desc, hasWritable_name).ToHandleChecked();
3575 DCHECK(hasWritable->IsBoolean());
3576 if (hasWritable->IsTrue()) {
3577 Handle<String> writable_name = isolate->factory()->InternalizeOneByteString(
3578 STATIC_CHAR_VECTOR("writable_"));
3579 Handle<Object> writable =
3580 Object::GetProperty(desc, writable_name).ToHandleChecked();
3581 DCHECK(writable->IsBoolean());
3582 *done = writable->IsFalse();
3583 if (!*done) return isolate->factory()->the_hole_value();
3584 if (strict_mode == SLOPPY) return value;
3585 Handle<Object> args[] = { name, receiver };
3586 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
3587 HandleVector(args, arraysize(args))),
3591 // We have an AccessorDescriptor.
3592 Handle<String> set_name =
3593 isolate->factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("set_"));
3594 Handle<Object> setter = Object::GetProperty(desc, set_name).ToHandleChecked();
3595 if (!setter->IsUndefined()) {
3596 // TODO(rossberg): nicer would be to cast to some JSCallable here...
3597 return SetPropertyWithDefinedSetter(
3598 receiver, Handle<JSReceiver>::cast(setter), value);
3601 if (strict_mode == SLOPPY) return value;
3602 Handle<Object> args2[] = { name, proxy };
3603 THROW_NEW_ERROR(isolate, NewTypeError("no_setter_in_callback",
3604 HandleVector(args2, arraysize(args2))),
3609 MaybeHandle<Object> JSProxy::DeletePropertyWithHandler(
3610 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) {
3611 Isolate* isolate = proxy->GetIsolate();
3613 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3614 if (name->IsSymbol()) return isolate->factory()->false_value();
3616 Handle<Object> args[] = { name };
3617 Handle<Object> result;
3618 ASSIGN_RETURN_ON_EXCEPTION(
3627 bool result_bool = result->BooleanValue();
3628 if (mode == STRICT_DELETION && !result_bool) {
3629 Handle<Object> handler(proxy->handler(), isolate);
3630 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString(
3631 STATIC_CHAR_VECTOR("delete"));
3632 Handle<Object> args[] = { handler, trap_name };
3633 THROW_NEW_ERROR(isolate, NewTypeError("handler_failed",
3634 HandleVector(args, arraysize(args))),
3637 return isolate->factory()->ToBoolean(result_bool);
3641 MaybeHandle<Object> JSProxy::DeleteElementWithHandler(
3642 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) {
3643 Isolate* isolate = proxy->GetIsolate();
3644 Handle<String> name = isolate->factory()->Uint32ToString(index);
3645 return JSProxy::DeletePropertyWithHandler(proxy, name, mode);
3649 Maybe<PropertyAttributes> JSProxy::GetPropertyAttributesWithHandler(
3650 Handle<JSProxy> proxy, Handle<Object> receiver, Handle<Name> name) {
3651 Isolate* isolate = proxy->GetIsolate();
3652 HandleScope scope(isolate);
3654 // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3655 if (name->IsSymbol()) return maybe(ABSENT);
3657 Handle<Object> args[] = { name };
3658 Handle<Object> result;
3659 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3661 proxy->CallTrap(proxy, "getPropertyDescriptor", Handle<Object>(),
3662 arraysize(args), args),
3663 Maybe<PropertyAttributes>());
3665 if (result->IsUndefined()) return maybe(ABSENT);
3667 Handle<Object> argv[] = { result };
3668 Handle<Object> desc;
3669 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
3671 Execution::Call(isolate, isolate->to_complete_property_descriptor(),
3672 result, arraysize(argv), argv),
3673 Maybe<PropertyAttributes>());
3675 // Convert result to PropertyAttributes.
3676 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString(
3677 STATIC_CHAR_VECTOR("enumerable_"));
3678 Handle<Object> enumerable;
3679 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, enumerable,
3680 Object::GetProperty(desc, enum_n),
3681 Maybe<PropertyAttributes>());
3682 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString(
3683 STATIC_CHAR_VECTOR("configurable_"));
3684 Handle<Object> configurable;
3685 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, configurable,
3686 Object::GetProperty(desc, conf_n),
3687 Maybe<PropertyAttributes>());
3688 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString(
3689 STATIC_CHAR_VECTOR("writable_"));
3690 Handle<Object> writable;
3691 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, writable,
3692 Object::GetProperty(desc, writ_n),
3693 Maybe<PropertyAttributes>());
3694 if (!writable->BooleanValue()) {
3695 Handle<String> set_n = isolate->factory()->InternalizeOneByteString(
3696 STATIC_CHAR_VECTOR("set_"));
3697 Handle<Object> setter;
3698 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, setter,
3699 Object::GetProperty(desc, set_n),
3700 Maybe<PropertyAttributes>());
3701 writable = isolate->factory()->ToBoolean(!setter->IsUndefined());
3704 if (configurable->IsFalse()) {
3705 Handle<Object> handler(proxy->handler(), isolate);
3706 Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3707 STATIC_CHAR_VECTOR("getPropertyDescriptor"));
3708 Handle<Object> args[] = { handler, trap, name };
3709 Handle<Object> error;
3710 MaybeHandle<Object> maybe_error = isolate->factory()->NewTypeError(
3711 "proxy_prop_not_configurable", HandleVector(args, arraysize(args)));
3712 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
3716 int attributes = NONE;
3717 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM;
3718 if (!configurable->BooleanValue()) attributes |= DONT_DELETE;
3719 if (!writable->BooleanValue()) attributes |= READ_ONLY;
3720 return maybe(static_cast<PropertyAttributes>(attributes));
3724 Maybe<PropertyAttributes> JSProxy::GetElementAttributeWithHandler(
3725 Handle<JSProxy> proxy, Handle<JSReceiver> receiver, uint32_t index) {
3726 Isolate* isolate = proxy->GetIsolate();
3727 Handle<String> name = isolate->factory()->Uint32ToString(index);
3728 return GetPropertyAttributesWithHandler(proxy, receiver, name);
3732 void JSProxy::Fix(Handle<JSProxy> proxy) {
3733 Isolate* isolate = proxy->GetIsolate();
3735 // Save identity hash.
3736 Handle<Object> hash(proxy->GetIdentityHash(), isolate);
3738 if (proxy->IsJSFunctionProxy()) {
3739 isolate->factory()->BecomeJSFunction(proxy);
3740 // Code will be set on the JavaScript side.
3742 isolate->factory()->BecomeJSObject(proxy);
3744 DCHECK(proxy->IsJSObject());
3746 // Inherit identity, if it was present.
3747 if (hash->IsSmi()) {
3748 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy),
3749 Handle<Smi>::cast(hash));
3754 MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy,
3756 Handle<Object> derived,
3758 Handle<Object> argv[]) {
3759 Isolate* isolate = proxy->GetIsolate();
3760 Handle<Object> handler(proxy->handler(), isolate);
3762 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name);
3763 Handle<Object> trap;
3764 ASSIGN_RETURN_ON_EXCEPTION(
3766 Object::GetPropertyOrElement(handler, trap_name),
3769 if (trap->IsUndefined()) {
3770 if (derived.is_null()) {
3771 Handle<Object> args[] = { handler, trap_name };
3772 THROW_NEW_ERROR(isolate,
3773 NewTypeError("handler_trap_missing",
3774 HandleVector(args, arraysize(args))),
3777 trap = Handle<Object>(derived);
3780 return Execution::Call(isolate, trap, handler, argc, argv);
3784 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
3785 DCHECK(object->map()->inobject_properties() == map->inobject_properties());
3786 ElementsKind obj_kind = object->map()->elements_kind();
3787 ElementsKind map_kind = map->elements_kind();
3788 if (map_kind != obj_kind) {
3789 ElementsKind to_kind = map_kind;
3790 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
3791 IsDictionaryElementsKind(obj_kind)) {
3794 if (IsDictionaryElementsKind(to_kind)) {
3795 NormalizeElements(object);
3797 TransitionElementsKind(object, to_kind);
3799 map = Map::AsElementsKind(map, to_kind);
3801 JSObject::MigrateToMap(object, map);
3805 void JSObject::MigrateInstance(Handle<JSObject> object) {
3806 Handle<Map> original_map(object->map());
3807 Handle<Map> map = Map::Update(original_map);
3808 map->set_migration_target(true);
3809 MigrateToMap(object, map);
3810 if (FLAG_trace_migration) {
3811 object->PrintInstanceMigration(stdout, *original_map, *map);
3817 bool JSObject::TryMigrateInstance(Handle<JSObject> object) {
3818 Isolate* isolate = object->GetIsolate();
3819 DisallowDeoptimization no_deoptimization(isolate);
3820 Handle<Map> original_map(object->map(), isolate);
3821 Handle<Map> new_map;
3822 if (!Map::TryUpdate(original_map).ToHandle(&new_map)) {
3825 JSObject::MigrateToMap(object, new_map);
3826 if (FLAG_trace_migration) {
3827 object->PrintInstanceMigration(stdout, *original_map, object->map());
3833 void JSObject::WriteToField(int descriptor, Object* value) {
3834 DisallowHeapAllocation no_gc;
3836 DescriptorArray* desc = map()->instance_descriptors();
3837 PropertyDetails details = desc->GetDetails(descriptor);
3839 DCHECK(details.type() == FIELD);
3841 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
3842 if (details.representation().IsDouble()) {
3843 // Nothing more to be done.
3844 if (value->IsUninitialized()) return;
3845 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
3846 DCHECK(box->IsMutableHeapNumber());
3847 box->set_value(value->Number());
3849 FastPropertyAtPut(index, value);
3854 void JSObject::AddProperty(Handle<JSObject> object, Handle<Name> name,
3855 Handle<Object> value,
3856 PropertyAttributes attributes) {
3857 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
3858 CHECK_NE(LookupIterator::ACCESS_CHECK, it.state());
3861 DCHECK(!object->IsJSProxy());
3862 DCHECK(!name->AsArrayIndex(&index));
3863 Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it);
3864 DCHECK(maybe.has_value);
3865 DCHECK(!it.IsFound());
3866 DCHECK(object->map()->is_extensible() ||
3867 name.is_identical_to(it.isolate()->factory()->hidden_string()));
3869 AddDataProperty(&it, value, attributes, STRICT,
3870 CERTAINLY_NOT_STORE_FROM_KEYED).Check();
3874 // Reconfigures a property to a data property with attributes, even if it is not
3876 MaybeHandle<Object> JSObject::SetOwnPropertyIgnoreAttributes(
3877 Handle<JSObject> object,
3879 Handle<Object> value,
3880 PropertyAttributes attributes,
3881 ExecutableAccessorInfoHandling handling) {
3882 DCHECK(!value->IsTheHole());
3883 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
3884 bool is_observed = object->map()->is_observed() &&
3885 *name != it.isolate()->heap()->hidden_string();
3886 for (; it.IsFound(); it.Next()) {
3887 switch (it.state()) {
3888 case LookupIterator::INTERCEPTOR:
3889 case LookupIterator::JSPROXY:
3890 case LookupIterator::NOT_FOUND:
3891 case LookupIterator::TRANSITION:
3894 case LookupIterator::ACCESS_CHECK:
3895 if (!it.isolate()->MayNamedAccess(object, name, v8::ACCESS_SET)) {
3896 return SetPropertyWithFailedAccessCheck(&it, value, SLOPPY);
3900 case LookupIterator::ACCESSOR: {
3901 PropertyDetails details = it.property_details();
3902 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
3903 // Ensure the context isn't changed after calling into accessors.
3904 AssertNoContextChange ncc(it.isolate());
3906 Handle<Object> accessors = it.GetAccessors();
3908 if (is_observed && accessors->IsAccessorInfo()) {
3909 ASSIGN_RETURN_ON_EXCEPTION(
3910 it.isolate(), old_value,
3911 GetPropertyWithAccessor(it.GetReceiver(), it.name(),
3912 it.GetHolder<JSObject>(), accessors),
3916 // Special handling for ExecutableAccessorInfo, which behaves like a
3918 if (handling == DONT_FORCE_FIELD &&
3919 accessors->IsExecutableAccessorInfo()) {
3920 Handle<Object> result;
3921 ASSIGN_RETURN_ON_EXCEPTION(
3922 it.isolate(), result,
3923 JSObject::SetPropertyWithAccessor(it.GetReceiver(), it.name(),
3924 value, it.GetHolder<JSObject>(),
3927 DCHECK(result->SameValue(*value));
3929 if (details.attributes() == attributes) {
3930 // Regular property update if the attributes match.
3931 if (is_observed && !old_value->SameValue(*value)) {
3932 // If we are setting the prototype of a function and are
3933 // observed, don't send change records because the prototype
3934 // handles that itself.
3935 if (!object->IsJSFunction() ||
3936 !Name::Equals(it.isolate()->factory()->prototype_string(),
3938 !Handle<JSFunction>::cast(object)->should_have_prototype()) {
3939 RETURN_ON_EXCEPTION(
3941 EnqueueChangeRecord(object, "update", name, old_value),
3948 // Reconfigure the accessor if attributes mismatch.
3949 Handle<ExecutableAccessorInfo> new_data = Accessors::CloneAccessor(
3950 it.isolate(), Handle<ExecutableAccessorInfo>::cast(accessors));
3951 new_data->set_property_attributes(attributes);
3952 // By clearing the setter we don't have to introduce a lookup to
3953 // the setter, simply make it unavailable to reflect the
3955 if (attributes & READ_ONLY) new_data->clear_setter();
3956 SetPropertyCallback(object, name, new_data, attributes);
3958 if (old_value->SameValue(*value)) {
3959 old_value = it.isolate()->factory()->the_hole_value();
3961 RETURN_ON_EXCEPTION(
3963 EnqueueChangeRecord(object, "reconfigure", name, old_value),
3969 it.ReconfigureDataProperty(value, attributes);
3970 it.PrepareForDataProperty(value);
3971 it.WriteDataValue(value);
3974 if (old_value->SameValue(*value)) {
3975 old_value = it.isolate()->factory()->the_hole_value();
3977 RETURN_ON_EXCEPTION(
3979 EnqueueChangeRecord(object, "reconfigure", name, old_value),
3986 case LookupIterator::DATA: {
3987 PropertyDetails details = it.property_details();
3988 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
3989 // Regular property update if the attributes match.
3990 if (details.attributes() == attributes) {
3991 return SetDataProperty(&it, value);
3993 // Reconfigure the data property if the attributes mismatch.
3994 if (is_observed) old_value = it.GetDataValue();
3996 it.ReconfigureDataProperty(value, attributes);
3997 it.PrepareForDataProperty(value);
3998 it.WriteDataValue(value);
4001 if (old_value->SameValue(*value)) {
4002 old_value = it.isolate()->factory()->the_hole_value();
4004 RETURN_ON_EXCEPTION(
4006 EnqueueChangeRecord(object, "reconfigure", name, old_value),
4015 return AddDataProperty(&it, value, attributes, STRICT,
4016 CERTAINLY_NOT_STORE_FROM_KEYED);
4020 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithInterceptor(
4021 Handle<JSObject> holder,
4022 Handle<Object> receiver,
4023 Handle<Name> name) {
4024 // TODO(rossberg): Support symbols in the API.
4025 if (name->IsSymbol()) return maybe(ABSENT);
4027 Isolate* isolate = holder->GetIsolate();
4028 HandleScope scope(isolate);
4030 // Make sure that the top context does not change when doing
4031 // callbacks or interceptor calls.
4032 AssertNoContextChange ncc(isolate);
4034 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
4035 PropertyCallbackArguments args(
4036 isolate, interceptor->data(), *receiver, *holder);
4037 if (!interceptor->query()->IsUndefined()) {
4038 v8::NamedPropertyQueryCallback query =
4039 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query());
4041 ApiNamedPropertyAccess("interceptor-named-has", *holder, *name));
4042 v8::Handle<v8::Integer> result =
4043 args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name)));
4044 if (!result.IsEmpty()) {
4045 DCHECK(result->IsInt32());
4046 return maybe(static_cast<PropertyAttributes>(result->Int32Value()));
4048 } else if (!interceptor->getter()->IsUndefined()) {
4049 v8::NamedPropertyGetterCallback getter =
4050 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
4052 ApiNamedPropertyAccess("interceptor-named-get-has", *holder, *name));
4053 v8::Handle<v8::Value> result =
4054 args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4055 if (!result.IsEmpty()) return maybe(DONT_ENUM);
4058 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>());
4059 return maybe(ABSENT);
4063 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
4064 Handle<JSReceiver> object, Handle<Name> name) {
4065 // Check whether the name is an array index.
4067 if (object->IsJSObject() && name->AsArrayIndex(&index)) {
4068 return GetOwnElementAttribute(object, index);
4070 LookupIterator it(object, name, LookupIterator::HIDDEN);
4071 return GetPropertyAttributes(&it);
4075 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
4076 LookupIterator* it) {
4077 for (; it->IsFound(); it->Next()) {
4078 switch (it->state()) {
4079 case LookupIterator::NOT_FOUND:
4080 case LookupIterator::TRANSITION:
4082 case LookupIterator::JSPROXY:
4083 return JSProxy::GetPropertyAttributesWithHandler(
4084 it->GetHolder<JSProxy>(), it->GetReceiver(), it->name());
4085 case LookupIterator::INTERCEPTOR: {
4086 Maybe<PropertyAttributes> result =
4087 JSObject::GetPropertyAttributesWithInterceptor(
4088 it->GetHolder<JSObject>(), it->GetReceiver(), it->name());
4089 if (!result.has_value) return result;
4090 if (result.value != ABSENT) return result;
4093 case LookupIterator::ACCESS_CHECK:
4094 if (it->HasAccess(v8::ACCESS_HAS)) break;
4095 return JSObject::GetPropertyAttributesWithFailedAccessCheck(it);
4096 case LookupIterator::ACCESSOR:
4097 case LookupIterator::DATA:
4098 return maybe(it->property_details().attributes());
4101 return maybe(ABSENT);
4105 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithReceiver(
4106 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4107 bool check_prototype) {
4108 Isolate* isolate = object->GetIsolate();
4110 // Check access rights if needed.
4111 if (object->IsAccessCheckNeeded()) {
4112 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
4113 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
4114 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>());
4115 return maybe(ABSENT);
4119 if (object->IsJSGlobalProxy()) {
4120 PrototypeIterator iter(isolate, object);
4121 if (iter.IsAtEnd()) return maybe(ABSENT);
4122 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4123 return JSObject::GetElementAttributeWithReceiver(
4124 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4125 index, check_prototype);
4128 // Check for lookup interceptor except when bootstrapping.
4129 if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
4130 return JSObject::GetElementAttributeWithInterceptor(
4131 object, receiver, index, check_prototype);
4134 return GetElementAttributeWithoutInterceptor(
4135 object, receiver, index, check_prototype);
4139 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithInterceptor(
4140 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4141 bool check_prototype) {
4142 Isolate* isolate = object->GetIsolate();
4143 HandleScope scope(isolate);
4145 // Make sure that the top context does not change when doing
4146 // callbacks or interceptor calls.
4147 AssertNoContextChange ncc(isolate);
4149 Maybe<PropertyAttributes> from_interceptor =
4150 GetElementAttributeFromInterceptor(object, receiver, index);
4151 if (!from_interceptor.has_value) return Maybe<PropertyAttributes>();
4152 if (from_interceptor.value != ABSENT) return maybe(from_interceptor.value);
4154 return GetElementAttributeWithoutInterceptor(object, receiver, index,
4159 Maybe<PropertyAttributes> JSObject::GetElementAttributeFromInterceptor(
4160 Handle<JSObject> object, Handle<Object> receiver, uint32_t index) {
4161 Isolate* isolate = object->GetIsolate();
4162 AssertNoContextChange ncc(isolate);
4164 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4165 PropertyCallbackArguments args(
4166 isolate, interceptor->data(), *receiver, *object);
4167 if (!interceptor->query()->IsUndefined()) {
4168 v8::IndexedPropertyQueryCallback query =
4169 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query());
4171 ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index));
4172 v8::Handle<v8::Integer> result = args.Call(query, index);
4173 if (!result.IsEmpty())
4174 return maybe(static_cast<PropertyAttributes>(result->Int32Value()));
4175 } else if (!interceptor->getter()->IsUndefined()) {
4176 v8::IndexedPropertyGetterCallback getter =
4177 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
4179 ApiIndexedPropertyAccess(
4180 "interceptor-indexed-get-has", *object, index));
4181 v8::Handle<v8::Value> result = args.Call(getter, index);
4182 if (!result.IsEmpty()) return maybe(NONE);
4184 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>());
4185 return maybe(ABSENT);
4189 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithoutInterceptor(
4190 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index,
4191 bool check_prototype) {
4192 PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes(
4193 receiver, object, index);
4194 if (attr != ABSENT) return maybe(attr);
4196 // Handle [] on String objects.
4197 if (object->IsStringObjectWithCharacterAt(index)) {
4198 return maybe(static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE));
4201 if (!check_prototype) return maybe(ABSENT);
4203 PrototypeIterator iter(object->GetIsolate(), object);
4204 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
4205 // We need to follow the spec and simulate a call to [[GetOwnProperty]].
4206 return JSProxy::GetElementAttributeWithHandler(
4207 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4210 if (iter.IsAtEnd()) return maybe(ABSENT);
4211 return GetElementAttributeWithReceiver(
4212 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver,
4217 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
4218 Handle<FixedArray> array(
4219 isolate->factory()->NewFixedArray(kEntries, TENURED));
4220 return Handle<NormalizedMapCache>::cast(array);
4224 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
4225 PropertyNormalizationMode mode) {
4226 DisallowHeapAllocation no_gc;
4227 Object* value = FixedArray::get(GetIndex(fast_map));
4228 if (!value->IsMap() ||
4229 !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) {
4230 return MaybeHandle<Map>();
4232 return handle(Map::cast(value));
4236 void NormalizedMapCache::Set(Handle<Map> fast_map,
4237 Handle<Map> normalized_map) {
4238 DisallowHeapAllocation no_gc;
4239 DCHECK(normalized_map->is_dictionary_map());
4240 FixedArray::set(GetIndex(fast_map), *normalized_map);
4244 void NormalizedMapCache::Clear() {
4245 int entries = length();
4246 for (int i = 0; i != entries; i++) {
4252 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object,
4254 Handle<Code> code) {
4255 Handle<Map> map(object->map());
4256 Map::UpdateCodeCache(map, name, code);
4260 void JSObject::NormalizeProperties(Handle<JSObject> object,
4261 PropertyNormalizationMode mode,
4262 int expected_additional_properties) {
4263 if (!object->HasFastProperties()) return;
4265 Handle<Map> map(object->map());
4266 Handle<Map> new_map = Map::Normalize(map, mode);
4268 MigrateFastToSlow(object, new_map, expected_additional_properties);
4272 void JSObject::MigrateFastToSlow(Handle<JSObject> object,
4273 Handle<Map> new_map,
4274 int expected_additional_properties) {
4275 // The global object is always normalized.
4276 DCHECK(!object->IsGlobalObject());
4277 // JSGlobalProxy must never be normalized
4278 DCHECK(!object->IsJSGlobalProxy());
4280 Isolate* isolate = object->GetIsolate();
4281 HandleScope scope(isolate);
4282 Handle<Map> map(object->map());
4284 // Allocate new content.
4285 int real_size = map->NumberOfOwnDescriptors();
4286 int property_count = real_size;
4287 if (expected_additional_properties > 0) {
4288 property_count += expected_additional_properties;
4290 property_count += 2; // Make space for two more properties.
4292 Handle<NameDictionary> dictionary =
4293 NameDictionary::New(isolate, property_count);
4295 Handle<DescriptorArray> descs(map->instance_descriptors());
4296 for (int i = 0; i < real_size; i++) {
4297 PropertyDetails details = descs->GetDetails(i);
4298 switch (details.type()) {
4300 Handle<Name> key(descs->GetKey(i));
4301 Handle<Object> value(descs->GetConstant(i), isolate);
4302 PropertyDetails d = PropertyDetails(
4303 details.attributes(), NORMAL, i + 1);
4304 dictionary = NameDictionary::Add(dictionary, key, value, d);
4308 Handle<Name> key(descs->GetKey(i));
4309 FieldIndex index = FieldIndex::ForDescriptor(*map, i);
4310 Handle<Object> value(
4311 object->RawFastPropertyAt(index), isolate);
4312 if (details.representation().IsDouble()) {
4313 DCHECK(value->IsMutableHeapNumber());
4314 Handle<HeapNumber> old = Handle<HeapNumber>::cast(value);
4315 value = isolate->factory()->NewHeapNumber(old->value());
4318 PropertyDetails(details.attributes(), NORMAL, i + 1);
4319 dictionary = NameDictionary::Add(dictionary, key, value, d);
4323 Handle<Name> key(descs->GetKey(i));
4324 Handle<Object> value(descs->GetCallbacksObject(i), isolate);
4325 PropertyDetails d = PropertyDetails(
4326 details.attributes(), CALLBACKS, i + 1);
4327 dictionary = NameDictionary::Add(dictionary, key, value, d);
4336 // Copy the next enumeration index from instance descriptor.
4337 dictionary->SetNextEnumerationIndex(real_size + 1);
4339 // From here on we cannot fail and we shouldn't GC anymore.
4340 DisallowHeapAllocation no_allocation;
4342 // Resize the object in the heap if necessary.
4343 int new_instance_size = new_map->instance_size();
4344 int instance_size_delta = map->instance_size() - new_instance_size;
4345 DCHECK(instance_size_delta >= 0);
4347 if (instance_size_delta > 0) {
4348 Heap* heap = isolate->heap();
4349 heap->CreateFillerObjectAt(object->address() + new_instance_size,
4350 instance_size_delta);
4351 heap->AdjustLiveBytes(object->address(), -instance_size_delta,
4352 Heap::FROM_MUTATOR);
4355 // We are storing the new map using release store after creating a filler for
4356 // the left-over space to avoid races with the sweeper thread.
4357 object->synchronized_set_map(*new_map);
4359 object->set_properties(*dictionary);
4361 isolate->counters()->props_to_dictionary()->Increment();
4364 if (FLAG_trace_normalization) {
4365 OFStream os(stdout);
4366 os << "Object properties have been normalized:\n";
4373 void JSObject::MigrateSlowToFast(Handle<JSObject> object,
4374 int unused_property_fields) {
4375 if (object->HasFastProperties()) return;
4376 DCHECK(!object->IsGlobalObject());
4377 Isolate* isolate = object->GetIsolate();
4378 Factory* factory = isolate->factory();
4379 Handle<NameDictionary> dictionary(object->property_dictionary());
4381 // Make sure we preserve dictionary representation if there are too many
4383 int number_of_elements = dictionary->NumberOfElements();
4384 if (number_of_elements > kMaxNumberOfDescriptors) return;
4386 Handle<FixedArray> iteration_order;
4387 if (number_of_elements != dictionary->NextEnumerationIndex()) {
4389 NameDictionary::DoGenerateNewEnumerationIndices(dictionary);
4391 iteration_order = NameDictionary::BuildIterationIndicesArray(dictionary);
4394 int instance_descriptor_length = iteration_order->length();
4395 int number_of_fields = 0;
4397 // Compute the length of the instance descriptor.
4398 for (int i = 0; i < instance_descriptor_length; i++) {
4399 int index = Smi::cast(iteration_order->get(i))->value();
4400 DCHECK(dictionary->IsKey(dictionary->KeyAt(index)));
4402 Object* value = dictionary->ValueAt(index);
4403 PropertyType type = dictionary->DetailsAt(index).type();
4404 DCHECK(type != FIELD);
4405 if (type == NORMAL && !value->IsJSFunction()) {
4406 number_of_fields += 1;
4410 int inobject_props = object->map()->inobject_properties();
4412 // Allocate new map.
4413 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
4414 new_map->set_dictionary_map(false);
4416 if (instance_descriptor_length == 0) {
4417 DisallowHeapAllocation no_gc;
4418 DCHECK_LE(unused_property_fields, inobject_props);
4419 // Transform the object.
4420 new_map->set_unused_property_fields(inobject_props);
4421 object->synchronized_set_map(*new_map);
4422 object->set_properties(isolate->heap()->empty_fixed_array());
4423 // Check that it really works.
4424 DCHECK(object->HasFastProperties());
4428 // Allocate the instance descriptor.
4429 Handle<DescriptorArray> descriptors = DescriptorArray::Allocate(
4430 isolate, instance_descriptor_length);
4432 int number_of_allocated_fields =
4433 number_of_fields + unused_property_fields - inobject_props;
4434 if (number_of_allocated_fields < 0) {
4435 // There is enough inobject space for all fields (including unused).
4436 number_of_allocated_fields = 0;
4437 unused_property_fields = inobject_props - number_of_fields;
4440 // Allocate the fixed array for the fields.
4441 Handle<FixedArray> fields = factory->NewFixedArray(
4442 number_of_allocated_fields);
4444 // Fill in the instance descriptor and the fields.
4445 int current_offset = 0;
4446 for (int i = 0; i < instance_descriptor_length; i++) {
4447 int index = Smi::cast(iteration_order->get(i))->value();
4448 Object* k = dictionary->KeyAt(index);
4449 DCHECK(dictionary->IsKey(k));
4451 Object* value = dictionary->ValueAt(index);
4453 if (k->IsSymbol()) {
4454 key = handle(Symbol::cast(k));
4456 // Ensure the key is a unique name before writing into the
4457 // instance descriptor.
4458 key = factory->InternalizeString(handle(String::cast(k)));
4461 PropertyDetails details = dictionary->DetailsAt(index);
4462 int enumeration_index = details.dictionary_index();
4463 PropertyType type = details.type();
4465 if (value->IsJSFunction()) {
4466 ConstantDescriptor d(key, handle(value, isolate), details.attributes());
4467 descriptors->Set(enumeration_index - 1, &d);
4468 } else if (type == NORMAL) {
4469 if (current_offset < inobject_props) {
4470 object->InObjectPropertyAtPut(current_offset, value,
4471 UPDATE_WRITE_BARRIER);
4473 int offset = current_offset - inobject_props;
4474 fields->set(offset, value);
4476 FieldDescriptor d(key, current_offset++, details.attributes(),
4477 // TODO(verwaest): value->OptimalRepresentation();
4478 Representation::Tagged());
4479 descriptors->Set(enumeration_index - 1, &d);
4480 } else if (type == CALLBACKS) {
4481 CallbacksDescriptor d(key, handle(value, isolate), details.attributes());
4482 descriptors->Set(enumeration_index - 1, &d);
4487 DCHECK(current_offset == number_of_fields);
4489 descriptors->Sort();
4491 DisallowHeapAllocation no_gc;
4492 new_map->InitializeDescriptors(*descriptors);
4493 new_map->set_unused_property_fields(unused_property_fields);
4495 // Transform the object.
4496 object->synchronized_set_map(*new_map);
4498 object->set_properties(*fields);
4499 DCHECK(object->IsJSObject());
4501 // Check that it really works.
4502 DCHECK(object->HasFastProperties());
4506 void JSObject::ResetElements(Handle<JSObject> object) {
4507 Isolate* isolate = object->GetIsolate();
4508 CHECK(object->map() != isolate->heap()->sloppy_arguments_elements_map());
4509 if (object->map()->has_dictionary_elements()) {
4510 Handle<SeededNumberDictionary> new_elements =
4511 SeededNumberDictionary::New(isolate, 0);
4512 object->set_elements(*new_elements);
4514 object->set_elements(object->map()->GetInitialElements());
4519 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary(
4520 Handle<FixedArrayBase> array,
4522 Handle<SeededNumberDictionary> dictionary) {
4523 Isolate* isolate = array->GetIsolate();
4524 Factory* factory = isolate->factory();
4525 bool has_double_elements = array->IsFixedDoubleArray();
4526 for (int i = 0; i < length; i++) {
4527 Handle<Object> value;
4528 if (has_double_elements) {
4529 Handle<FixedDoubleArray> double_array =
4530 Handle<FixedDoubleArray>::cast(array);
4531 if (double_array->is_the_hole(i)) {
4532 value = factory->the_hole_value();
4534 value = factory->NewHeapNumber(double_array->get_scalar(i));
4537 value = handle(Handle<FixedArray>::cast(array)->get(i), isolate);
4539 if (!value->IsTheHole()) {
4540 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
4542 SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details);
4549 Handle<SeededNumberDictionary> JSObject::NormalizeElements(
4550 Handle<JSObject> object) {
4551 DCHECK(!object->HasExternalArrayElements() &&
4552 !object->HasFixedTypedArrayElements());
4553 Isolate* isolate = object->GetIsolate();
4555 // Find the backing store.
4556 Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements()));
4558 (array->map() == isolate->heap()->sloppy_arguments_elements_map());
4560 array = handle(FixedArrayBase::cast(
4561 Handle<FixedArray>::cast(array)->get(1)));
4563 if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array);
4565 DCHECK(object->HasFastSmiOrObjectElements() ||
4566 object->HasFastDoubleElements() ||
4567 object->HasFastArgumentsElements());
4568 // Compute the effective length and allocate a new backing store.
4569 int length = object->IsJSArray()
4570 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
4572 int old_capacity = 0;
4573 int used_elements = 0;
4574 object->GetElementsCapacityAndUsage(&old_capacity, &used_elements);
4575 Handle<SeededNumberDictionary> dictionary =
4576 SeededNumberDictionary::New(isolate, used_elements);
4578 dictionary = CopyFastElementsToDictionary(array, length, dictionary);
4580 // Switch to using the dictionary as the backing storage for elements.
4582 FixedArray::cast(object->elements())->set(1, *dictionary);
4584 // Set the new map first to satify the elements type assert in
4586 Handle<Map> new_map =
4587 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
4589 JSObject::MigrateToMap(object, new_map);
4590 object->set_elements(*dictionary);
4593 isolate->counters()->elements_to_dictionary()->Increment();
4596 if (FLAG_trace_normalization) {
4597 OFStream os(stdout);
4598 os << "Object elements have been normalized:\n";
4603 DCHECK(object->HasDictionaryElements() ||
4604 object->HasDictionaryArgumentsElements());
4609 static Smi* GenerateIdentityHash(Isolate* isolate) {
4613 // Generate a random 32-bit hash value but limit range to fit
4615 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue;
4617 } while (hash_value == 0 && attempts < 30);
4618 hash_value = hash_value != 0 ? hash_value : 1; // never return 0
4620 return Smi::FromInt(hash_value);
4624 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) {
4625 DCHECK(!object->IsJSGlobalProxy());
4626 Isolate* isolate = object->GetIsolate();
4627 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4631 template<typename ProxyType>
4632 static Handle<Smi> GetOrCreateIdentityHashHelper(Handle<ProxyType> proxy) {
4633 Isolate* isolate = proxy->GetIsolate();
4635 Handle<Object> maybe_hash(proxy->hash(), isolate);
4636 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash);
4638 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate);
4639 proxy->set_hash(*hash);
4644 Object* JSObject::GetIdentityHash() {
4645 DisallowHeapAllocation no_gc;
4646 Isolate* isolate = GetIsolate();
4647 if (IsJSGlobalProxy()) {
4648 return JSGlobalProxy::cast(this)->hash();
4650 Object* stored_value =
4651 GetHiddenProperty(isolate->factory()->identity_hash_string());
4652 return stored_value->IsSmi()
4654 : isolate->heap()->undefined_value();
4658 Handle<Smi> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) {
4659 if (object->IsJSGlobalProxy()) {
4660 return GetOrCreateIdentityHashHelper(Handle<JSGlobalProxy>::cast(object));
4663 Isolate* isolate = object->GetIsolate();
4665 Handle<Object> maybe_hash(object->GetIdentityHash(), isolate);
4666 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash);
4668 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate);
4669 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4674 Object* JSProxy::GetIdentityHash() {
4675 return this->hash();
4679 Handle<Smi> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) {
4680 return GetOrCreateIdentityHashHelper(proxy);
4684 Object* JSObject::GetHiddenProperty(Handle<Name> key) {
4685 DisallowHeapAllocation no_gc;
4686 DCHECK(key->IsUniqueName());
4687 if (IsJSGlobalProxy()) {
4688 // JSGlobalProxies store their hash internally.
4689 DCHECK(*key != GetHeap()->identity_hash_string());
4690 // For a proxy, use the prototype as target object.
4691 PrototypeIterator iter(GetIsolate(), this);
4692 // If the proxy is detached, return undefined.
4693 if (iter.IsAtEnd()) return GetHeap()->the_hole_value();
4694 DCHECK(iter.GetCurrent()->IsJSGlobalObject());
4695 return JSObject::cast(iter.GetCurrent())->GetHiddenProperty(key);
4697 DCHECK(!IsJSGlobalProxy());
4698 Object* inline_value = GetHiddenPropertiesHashTable();
4700 if (inline_value->IsSmi()) {
4701 // Handle inline-stored identity hash.
4702 if (*key == GetHeap()->identity_hash_string()) {
4703 return inline_value;
4705 return GetHeap()->the_hole_value();
4709 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value();
4711 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
4712 Object* entry = hashtable->Lookup(key);
4717 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object,
4719 Handle<Object> value) {
4720 Isolate* isolate = object->GetIsolate();
4722 DCHECK(key->IsUniqueName());
4723 if (object->IsJSGlobalProxy()) {
4724 // JSGlobalProxies store their hash internally.
4725 DCHECK(*key != *isolate->factory()->identity_hash_string());
4726 // For a proxy, use the prototype as target object.
4727 PrototypeIterator iter(isolate, object);
4728 // If the proxy is detached, return undefined.
4729 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
4730 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4731 return SetHiddenProperty(
4732 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key,
4735 DCHECK(!object->IsJSGlobalProxy());
4737 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4739 // If there is no backing store yet, store the identity hash inline.
4740 if (value->IsSmi() &&
4741 *key == *isolate->factory()->identity_hash_string() &&
4742 (inline_value->IsUndefined() || inline_value->IsSmi())) {
4743 return JSObject::SetHiddenPropertiesHashTable(object, value);
4746 Handle<ObjectHashTable> hashtable =
4747 GetOrCreateHiddenPropertiesHashtable(object);
4749 // If it was found, check if the key is already in the dictionary.
4750 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key,
4752 if (*new_table != *hashtable) {
4753 // If adding the key expanded the dictionary (i.e., Add returned a new
4754 // dictionary), store it back to the object.
4755 SetHiddenPropertiesHashTable(object, new_table);
4758 // Return this to mark success.
4763 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) {
4764 Isolate* isolate = object->GetIsolate();
4765 DCHECK(key->IsUniqueName());
4767 if (object->IsJSGlobalProxy()) {
4768 PrototypeIterator iter(isolate, object);
4769 if (iter.IsAtEnd()) return;
4770 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4771 return DeleteHiddenProperty(
4772 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key);
4775 Object* inline_value = object->GetHiddenPropertiesHashTable();
4777 // We never delete (inline-stored) identity hashes.
4778 DCHECK(*key != *isolate->factory()->identity_hash_string());
4779 if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
4781 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value));
4782 bool was_present = false;
4783 ObjectHashTable::Remove(hashtable, key, &was_present);
4787 bool JSObject::HasHiddenProperties(Handle<JSObject> object) {
4788 Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string();
4789 LookupIterator it(object, hidden, LookupIterator::OWN_SKIP_INTERCEPTOR);
4790 Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it);
4791 // Cannot get an exception since the hidden_string isn't accessible to JS.
4792 DCHECK(maybe.has_value);
4793 return maybe.value != ABSENT;
4797 Object* JSObject::GetHiddenPropertiesHashTable() {
4798 DCHECK(!IsJSGlobalProxy());
4799 if (HasFastProperties()) {
4800 // If the object has fast properties, check whether the first slot
4801 // in the descriptor array matches the hidden string. Since the
4802 // hidden strings hash code is zero (and no other name has hash
4803 // code zero) it will always occupy the first entry if present.
4804 DescriptorArray* descriptors = this->map()->instance_descriptors();
4805 if (descriptors->number_of_descriptors() > 0) {
4806 int sorted_index = descriptors->GetSortedKeyIndex(0);
4807 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
4808 sorted_index < map()->NumberOfOwnDescriptors()) {
4809 DCHECK(descriptors->GetType(sorted_index) == FIELD);
4810 DCHECK(descriptors->GetDetails(sorted_index).representation().
4811 IsCompatibleForLoad(Representation::Tagged()));
4812 FieldIndex index = FieldIndex::ForDescriptor(this->map(),
4814 return this->RawFastPropertyAt(index);
4816 return GetHeap()->undefined_value();
4819 return GetHeap()->undefined_value();
4822 Isolate* isolate = GetIsolate();
4823 LookupIterator it(handle(this), isolate->factory()->hidden_string(),
4824 LookupIterator::OWN_SKIP_INTERCEPTOR);
4825 // Access check is always skipped for the hidden string anyways.
4826 return *GetDataProperty(&it);
4830 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable(
4831 Handle<JSObject> object) {
4832 Isolate* isolate = object->GetIsolate();
4834 static const int kInitialCapacity = 4;
4835 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4836 if (inline_value->IsHashTable()) {
4837 return Handle<ObjectHashTable>::cast(inline_value);
4840 Handle<ObjectHashTable> hashtable = ObjectHashTable::New(
4841 isolate, kInitialCapacity, USE_CUSTOM_MINIMUM_CAPACITY);
4843 if (inline_value->IsSmi()) {
4844 // We were storing the identity hash inline and now allocated an actual
4845 // dictionary. Put the identity hash into the new dictionary.
4846 hashtable = ObjectHashTable::Put(hashtable,
4847 isolate->factory()->identity_hash_string(),
4851 SetHiddenPropertiesHashTable(object, hashtable);
4856 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object,
4857 Handle<Object> value) {
4858 DCHECK(!object->IsJSGlobalProxy());
4859 Isolate* isolate = object->GetIsolate();
4860 Handle<Name> name = isolate->factory()->hidden_string();
4861 SetOwnPropertyIgnoreAttributes(object, name, value, DONT_ENUM).Assert();
4866 MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor(
4867 Handle<JSObject> holder, Handle<JSObject> receiver, Handle<Name> name) {
4868 Isolate* isolate = holder->GetIsolate();
4870 // TODO(rossberg): Support symbols in the API.
4871 if (name->IsSymbol()) return MaybeHandle<Object>();
4873 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor());
4874 if (interceptor->deleter()->IsUndefined()) return MaybeHandle<Object>();
4876 v8::NamedPropertyDeleterCallback deleter =
4877 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter());
4879 ApiNamedPropertyAccess("interceptor-named-delete", *holder, *name));
4880 PropertyCallbackArguments args(isolate, interceptor->data(), *receiver,
4882 v8::Handle<v8::Boolean> result =
4883 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4884 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4885 if (result.IsEmpty()) return MaybeHandle<Object>();
4887 DCHECK(result->IsBoolean());
4888 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
4889 result_internal->VerifyApiCallResultType();
4890 // Rebox CustomArguments::kReturnValueOffset before returning.
4891 return handle(*result_internal, isolate);
4895 MaybeHandle<Object> JSObject::DeleteElementWithInterceptor(
4896 Handle<JSObject> object,
4898 Isolate* isolate = object->GetIsolate();
4899 Factory* factory = isolate->factory();
4901 // Make sure that the top context does not change when doing
4902 // callbacks or interceptor calls.
4903 AssertNoContextChange ncc(isolate);
4905 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4906 if (interceptor->deleter()->IsUndefined()) return factory->false_value();
4907 v8::IndexedPropertyDeleterCallback deleter =
4908 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter());
4910 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index));
4911 PropertyCallbackArguments args(
4912 isolate, interceptor->data(), *object, *object);
4913 v8::Handle<v8::Boolean> result = args.Call(deleter, index);
4914 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4915 if (!result.IsEmpty()) {
4916 DCHECK(result->IsBoolean());
4917 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
4918 result_internal->VerifyApiCallResultType();
4919 // Rebox CustomArguments::kReturnValueOffset before returning.
4920 return handle(*result_internal, isolate);
4922 MaybeHandle<Object> delete_result = object->GetElementsAccessor()->Delete(
4923 object, index, NORMAL_DELETION);
4924 return delete_result;
4928 MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object,
4931 Isolate* isolate = object->GetIsolate();
4932 Factory* factory = isolate->factory();
4934 // Check access rights if needed.
4935 if (object->IsAccessCheckNeeded() &&
4936 !isolate->MayIndexedAccess(object, index, v8::ACCESS_DELETE)) {
4937 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE);
4938 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
4939 return factory->false_value();
4942 if (object->IsStringObjectWithCharacterAt(index)) {
4943 if (mode == STRICT_DELETION) {
4944 // Deleting a non-configurable property in strict mode.
4945 Handle<Object> name = factory->NewNumberFromUint(index);
4946 Handle<Object> args[2] = { name, object };
4947 THROW_NEW_ERROR(isolate, NewTypeError("strict_delete_property",
4948 HandleVector(args, 2)),
4951 return factory->false_value();
4954 if (object->IsJSGlobalProxy()) {
4955 PrototypeIterator iter(isolate, object);
4956 if (iter.IsAtEnd()) return factory->false_value();
4957 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
4958 return DeleteElement(
4959 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index,
4963 Handle<Object> old_value;
4964 bool should_enqueue_change_record = false;
4965 if (object->map()->is_observed()) {
4966 Maybe<bool> maybe = HasOwnElement(object, index);
4967 if (!maybe.has_value) return MaybeHandle<Object>();
4968 should_enqueue_change_record = maybe.value;
4969 if (should_enqueue_change_record) {
4970 if (!GetOwnElementAccessorPair(object, index).is_null()) {
4971 old_value = Handle<Object>::cast(factory->the_hole_value());
4973 old_value = Object::GetElement(
4974 isolate, object, index).ToHandleChecked();
4979 // Skip interceptor if forcing deletion.
4980 MaybeHandle<Object> maybe_result;
4981 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) {
4982 maybe_result = DeleteElementWithInterceptor(object, index);
4984 maybe_result = object->GetElementsAccessor()->Delete(object, index, mode);
4986 Handle<Object> result;
4987 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object);
4989 if (should_enqueue_change_record) {
4990 Maybe<bool> maybe = HasOwnElement(object, index);
4991 if (!maybe.has_value) return MaybeHandle<Object>();
4993 Handle<String> name = factory->Uint32ToString(index);
4994 RETURN_ON_EXCEPTION(
4995 isolate, EnqueueChangeRecord(object, "delete", name, old_value),
5004 MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object,
5006 DeleteMode delete_mode) {
5007 // ECMA-262, 3rd, 8.6.2.5
5008 DCHECK(name->IsName());
5011 if (name->AsArrayIndex(&index)) {
5012 return DeleteElement(object, index, delete_mode);
5015 // Skip interceptors on FORCE_DELETION.
5016 LookupIterator::Configuration config =
5017 delete_mode == FORCE_DELETION ? LookupIterator::HIDDEN_SKIP_INTERCEPTOR
5018 : LookupIterator::HIDDEN;
5020 LookupIterator it(object, name, config);
5022 bool is_observed = object->map()->is_observed() &&
5023 *name != it.isolate()->heap()->hidden_string();
5024 Handle<Object> old_value = it.isolate()->factory()->the_hole_value();
5026 for (; it.IsFound(); it.Next()) {
5027 switch (it.state()) {
5028 case LookupIterator::JSPROXY:
5029 case LookupIterator::NOT_FOUND:
5030 case LookupIterator::TRANSITION:
5032 case LookupIterator::ACCESS_CHECK:
5033 if (it.HasAccess(v8::ACCESS_DELETE)) break;
5034 it.isolate()->ReportFailedAccessCheck(it.GetHolder<JSObject>(),
5036 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it.isolate(), Object);
5037 return it.isolate()->factory()->false_value();
5038 case LookupIterator::INTERCEPTOR: {
5039 MaybeHandle<Object> maybe_result =
5040 JSObject::DeletePropertyWithInterceptor(it.GetHolder<JSObject>(),
5042 // Delete with interceptor succeeded. Return result.
5043 if (!maybe_result.is_null()) return maybe_result;
5044 // An exception was thrown in the interceptor. Propagate.
5045 if (it.isolate()->has_pending_exception()) return maybe_result;
5048 case LookupIterator::DATA:
5050 old_value = it.GetDataValue();
5053 case LookupIterator::ACCESSOR: {
5054 if (delete_mode != FORCE_DELETION && !it.IsConfigurable()) {
5055 // Fail if the property is not configurable.
5056 if (delete_mode == STRICT_DELETION) {
5057 Handle<Object> args[2] = {name, object};
5058 THROW_NEW_ERROR(it.isolate(),
5059 NewTypeError("strict_delete_property",
5060 HandleVector(args, arraysize(args))),
5063 return it.isolate()->factory()->false_value();
5066 PropertyNormalizationMode mode = object->map()->is_prototype_map()
5067 ? KEEP_INOBJECT_PROPERTIES
5068 : CLEAR_INOBJECT_PROPERTIES;
5069 Handle<JSObject> holder = it.GetHolder<JSObject>();
5070 // TODO(verwaest): Remove this temporary compatibility hack when blink
5071 // tests are updated.
5072 if (!holder.is_identical_to(object) &&
5073 !(object->IsJSGlobalProxy() && holder->IsJSGlobalObject())) {
5074 return it.isolate()->factory()->true_value();
5076 NormalizeProperties(holder, mode, 0);
5077 Handle<Object> result =
5078 DeleteNormalizedProperty(holder, name, delete_mode);
5079 ReoptimizeIfPrototype(holder);
5082 RETURN_ON_EXCEPTION(
5084 EnqueueChangeRecord(object, "delete", name, old_value), Object);
5092 return it.isolate()->factory()->true_value();
5096 MaybeHandle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object,
5099 if (object->IsJSProxy()) {
5100 return JSProxy::DeleteElementWithHandler(
5101 Handle<JSProxy>::cast(object), index, mode);
5103 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode);
5107 MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object,
5110 if (object->IsJSProxy()) {
5111 return JSProxy::DeletePropertyWithHandler(
5112 Handle<JSProxy>::cast(object), name, mode);
5114 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode);
5118 bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
5121 DCHECK(IsFastObjectElementsKind(kind) ||
5122 kind == DICTIONARY_ELEMENTS);
5123 if (IsFastObjectElementsKind(kind)) {
5124 int length = IsJSArray()
5125 ? Smi::cast(JSArray::cast(this)->length())->value()
5126 : elements->length();
5127 for (int i = 0; i < length; ++i) {
5128 Object* element = elements->get(i);
5129 if (!element->IsTheHole() && element == object) return true;
5133 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object);
5134 if (!key->IsUndefined()) return true;
5140 // Check whether this object references another object.
5141 bool JSObject::ReferencesObject(Object* obj) {
5142 Map* map_of_this = map();
5143 Heap* heap = GetHeap();
5144 DisallowHeapAllocation no_allocation;
5146 // Is the object the constructor for this object?
5147 if (map_of_this->constructor() == obj) {
5151 // Is the object the prototype for this object?
5152 if (map_of_this->prototype() == obj) {
5156 // Check if the object is among the named properties.
5157 Object* key = SlowReverseLookup(obj);
5158 if (!key->IsUndefined()) {
5162 // Check if the object is among the indexed properties.
5163 ElementsKind kind = GetElementsKind();
5165 // Raw pixels and external arrays do not reference other
5167 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5168 case EXTERNAL_##TYPE##_ELEMENTS: \
5169 case TYPE##_ELEMENTS: \
5172 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5173 #undef TYPED_ARRAY_CASE
5175 case FAST_DOUBLE_ELEMENTS:
5176 case FAST_HOLEY_DOUBLE_ELEMENTS:
5178 case FAST_SMI_ELEMENTS:
5179 case FAST_HOLEY_SMI_ELEMENTS:
5182 case FAST_HOLEY_ELEMENTS:
5183 case DICTIONARY_ELEMENTS: {
5184 FixedArray* elements = FixedArray::cast(this->elements());
5185 if (ReferencesObjectFromElements(elements, kind, obj)) return true;
5188 case SLOPPY_ARGUMENTS_ELEMENTS: {
5189 FixedArray* parameter_map = FixedArray::cast(elements());
5190 // Check the mapped parameters.
5191 int length = parameter_map->length();
5192 for (int i = 2; i < length; ++i) {
5193 Object* value = parameter_map->get(i);
5194 if (!value->IsTheHole() && value == obj) return true;
5196 // Check the arguments.
5197 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5198 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
5199 FAST_HOLEY_ELEMENTS;
5200 if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
5205 // For functions check the context.
5206 if (IsJSFunction()) {
5207 // Get the constructor function for arguments array.
5208 Map* arguments_map =
5209 heap->isolate()->context()->native_context()->sloppy_arguments_map();
5210 JSFunction* arguments_function =
5211 JSFunction::cast(arguments_map->constructor());
5213 // Get the context and don't check if it is the native context.
5214 JSFunction* f = JSFunction::cast(this);
5215 Context* context = f->context();
5216 if (context->IsNativeContext()) {
5220 // Check the non-special context slots.
5221 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) {
5222 // Only check JS objects.
5223 if (context->get(i)->IsJSObject()) {
5224 JSObject* ctxobj = JSObject::cast(context->get(i));
5225 // If it is an arguments array check the content.
5226 if (ctxobj->map()->constructor() == arguments_function) {
5227 if (ctxobj->ReferencesObject(obj)) {
5230 } else if (ctxobj == obj) {
5236 // Check the context extension (if any) if it can have references.
5237 if (context->has_extension() && !context->IsCatchContext()) {
5238 // With harmony scoping, a JSFunction may have a global context.
5239 // TODO(mvstanton): walk into the ScopeInfo.
5240 if (FLAG_harmony_scoping && context->IsGlobalContext()) {
5244 return JSObject::cast(context->extension())->ReferencesObject(obj);
5248 // No references to object.
5253 MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
5254 Isolate* isolate = object->GetIsolate();
5256 if (!object->map()->is_extensible()) return object;
5258 if (object->IsAccessCheckNeeded() &&
5259 !isolate->MayNamedAccess(
5260 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5261 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5262 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5263 return isolate->factory()->false_value();
5266 if (object->IsJSGlobalProxy()) {
5267 PrototypeIterator iter(isolate, object);
5268 if (iter.IsAtEnd()) return object;
5269 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
5270 return PreventExtensions(
5271 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)));
5274 // It's not possible to seal objects with external array elements
5275 if (object->HasExternalArrayElements() ||
5276 object->HasFixedTypedArrayElements()) {
5277 THROW_NEW_ERROR(isolate,
5278 NewTypeError("cant_prevent_ext_external_array_elements",
5279 HandleVector(&object, 1)),
5283 // If there are fast elements we normalize.
5284 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
5285 DCHECK(object->HasDictionaryElements() ||
5286 object->HasDictionaryArgumentsElements());
5288 // Make sure that we never go back to fast case.
5289 dictionary->set_requires_slow_elements();
5291 // Do a map transition, other objects with this map may still
5293 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5294 Handle<Map> new_map = Map::Copy(handle(object->map()));
5296 new_map->set_is_extensible(false);
5297 JSObject::MigrateToMap(object, new_map);
5298 DCHECK(!object->map()->is_extensible());
5300 if (object->map()->is_observed()) {
5301 RETURN_ON_EXCEPTION(
5303 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(),
5304 isolate->factory()->the_hole_value()),
5311 template<typename Dictionary>
5312 static void FreezeDictionary(Dictionary* dictionary) {
5313 int capacity = dictionary->Capacity();
5314 for (int i = 0; i < capacity; i++) {
5315 Object* k = dictionary->KeyAt(i);
5316 if (dictionary->IsKey(k) &&
5317 !(k->IsSymbol() && Symbol::cast(k)->is_private())) {
5318 PropertyDetails details = dictionary->DetailsAt(i);
5319 int attrs = DONT_DELETE;
5320 // READ_ONLY is an invalid attribute for JS setters/getters.
5321 if (details.type() == CALLBACKS) {
5322 Object* v = dictionary->ValueAt(i);
5323 if (v->IsPropertyCell()) v = PropertyCell::cast(v)->value();
5324 if (!v->IsAccessorPair()) attrs |= READ_ONLY;
5328 details = details.CopyAddAttributes(
5329 static_cast<PropertyAttributes>(attrs));
5330 dictionary->DetailsAtPut(i, details);
5336 MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) {
5337 // Freezing sloppy arguments should be handled elsewhere.
5338 DCHECK(!object->HasSloppyArgumentsElements());
5339 DCHECK(!object->map()->is_observed());
5341 if (object->map()->is_frozen()) return object;
5343 Isolate* isolate = object->GetIsolate();
5344 if (object->IsAccessCheckNeeded() &&
5345 !isolate->MayNamedAccess(
5346 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5347 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS);
5348 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
5349 return isolate->factory()->false_value();
5352 if (object->IsJSGlobalProxy()) {
5353 PrototypeIterator iter(isolate, object);
5354 if (iter.IsAtEnd()) return object;
5355 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
5356 return Freeze(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)));
5359 // It's not possible to freeze objects with external array elements
5360 if (object->HasExternalArrayElements() ||
5361 object->HasFixedTypedArrayElements()) {
5362 THROW_NEW_ERROR(isolate,
5363 NewTypeError("cant_prevent_ext_external_array_elements",
5364 HandleVector(&object, 1)),
5368 Handle<SeededNumberDictionary> new_element_dictionary;
5369 if (!object->elements()->IsDictionary()) {
5370 int length = object->IsJSArray()
5371 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
5372 : object->elements()->length();
5376 object->GetElementsCapacityAndUsage(&capacity, &used);
5377 new_element_dictionary = SeededNumberDictionary::New(isolate, used);
5379 // Move elements to a dictionary; avoid calling NormalizeElements to avoid
5380 // unnecessary transitions.
5381 new_element_dictionary = CopyFastElementsToDictionary(
5382 handle(object->elements()), length, new_element_dictionary);
5384 // No existing elements, use a pre-allocated empty backing store
5385 new_element_dictionary =
5386 isolate->factory()->empty_slow_element_dictionary();
5390 Handle<Map> old_map(object->map(), isolate);
5391 int transition_index = old_map->SearchTransition(
5392 isolate->heap()->frozen_symbol());
5393 if (transition_index != TransitionArray::kNotFound) {
5394 Handle<Map> transition_map(old_map->GetTransition(transition_index));
5395 DCHECK(transition_map->has_dictionary_elements());
5396 DCHECK(transition_map->is_frozen());
5397 DCHECK(!transition_map->is_extensible());
5398 JSObject::MigrateToMap(object, transition_map);
5399 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5400 // Create a new descriptor array with fully-frozen properties
5401 Handle<Map> new_map = Map::CopyForFreeze(old_map);
5402 JSObject::MigrateToMap(object, new_map);
5404 DCHECK(old_map->is_dictionary_map() || !old_map->is_prototype_map());
5405 // Slow path: need to normalize properties for safety
5406 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0);
5408 // Create a new map, since other objects with this map may be extensible.
5409 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5410 Handle<Map> new_map = Map::Copy(handle(object->map()));
5412 new_map->set_is_extensible(false);
5413 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5414 JSObject::MigrateToMap(object, new_map);
5416 // Freeze dictionary-mode properties
5417 FreezeDictionary(object->property_dictionary());
5420 DCHECK(object->map()->has_dictionary_elements());
5421 if (!new_element_dictionary.is_null()) {
5422 object->set_elements(*new_element_dictionary);
5425 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) {
5426 SeededNumberDictionary* dictionary = object->element_dictionary();
5427 // Make sure we never go back to the fast case
5428 dictionary->set_requires_slow_elements();
5429 // Freeze all elements in the dictionary
5430 FreezeDictionary(dictionary);
5437 void JSObject::SetObserved(Handle<JSObject> object) {
5438 DCHECK(!object->IsJSGlobalProxy());
5439 DCHECK(!object->IsJSGlobalObject());
5440 Isolate* isolate = object->GetIsolate();
5441 Handle<Map> new_map;
5442 Handle<Map> old_map(object->map(), isolate);
5443 DCHECK(!old_map->is_observed());
5444 int transition_index = old_map->SearchTransition(
5445 isolate->heap()->observed_symbol());
5446 if (transition_index != TransitionArray::kNotFound) {
5447 new_map = handle(old_map->GetTransition(transition_index), isolate);
5448 DCHECK(new_map->is_observed());
5449 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5450 new_map = Map::CopyForObserved(old_map);
5452 new_map = Map::Copy(old_map);
5453 new_map->set_is_observed();
5455 JSObject::MigrateToMap(object, new_map);
5459 Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object,
5460 Representation representation,
5462 Isolate* isolate = object->GetIsolate();
5463 Handle<Object> raw_value(object->RawFastPropertyAt(index), isolate);
5464 return Object::WrapForRead(isolate, raw_value, representation);
5468 template<class ContextObject>
5469 class JSObjectWalkVisitor {
5471 JSObjectWalkVisitor(ContextObject* site_context, bool copying,
5472 JSObject::DeepCopyHints hints)
5473 : site_context_(site_context),
5477 MUST_USE_RESULT MaybeHandle<JSObject> StructureWalk(Handle<JSObject> object);
5480 MUST_USE_RESULT inline MaybeHandle<JSObject> VisitElementOrProperty(
5481 Handle<JSObject> object,
5482 Handle<JSObject> value) {
5483 Handle<AllocationSite> current_site = site_context()->EnterNewScope();
5484 MaybeHandle<JSObject> copy_of_value = StructureWalk(value);
5485 site_context()->ExitScope(current_site, value);
5486 return copy_of_value;
5489 inline ContextObject* site_context() { return site_context_; }
5490 inline Isolate* isolate() { return site_context()->isolate(); }
5492 inline bool copying() const { return copying_; }
5495 ContextObject* site_context_;
5496 const bool copying_;
5497 const JSObject::DeepCopyHints hints_;
5501 template <class ContextObject>
5502 MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk(
5503 Handle<JSObject> object) {
5504 Isolate* isolate = this->isolate();
5505 bool copying = this->copying();
5506 bool shallow = hints_ == JSObject::kObjectIsShallow;
5509 StackLimitCheck check(isolate);
5511 if (check.HasOverflowed()) {
5512 isolate->StackOverflow();
5513 return MaybeHandle<JSObject>();
5517 if (object->map()->is_deprecated()) {
5518 JSObject::MigrateInstance(object);
5521 Handle<JSObject> copy;
5523 Handle<AllocationSite> site_to_pass;
5524 if (site_context()->ShouldCreateMemento(object)) {
5525 site_to_pass = site_context()->current();
5527 copy = isolate->factory()->CopyJSObjectWithAllocationSite(
5528 object, site_to_pass);
5533 DCHECK(copying || copy.is_identical_to(object));
5535 ElementsKind kind = copy->GetElementsKind();
5536 if (copying && IsFastSmiOrObjectElementsKind(kind) &&
5537 FixedArray::cast(copy->elements())->map() ==
5538 isolate->heap()->fixed_cow_array_map()) {
5539 isolate->counters()->cow_arrays_created_runtime()->Increment();
5543 HandleScope scope(isolate);
5545 // Deep copy own properties.
5546 if (copy->HasFastProperties()) {
5547 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors());
5548 int limit = copy->map()->NumberOfOwnDescriptors();
5549 for (int i = 0; i < limit; i++) {
5550 PropertyDetails details = descriptors->GetDetails(i);
5551 if (details.type() != FIELD) continue;
5552 FieldIndex index = FieldIndex::ForDescriptor(copy->map(), i);
5553 Handle<Object> value(object->RawFastPropertyAt(index), isolate);
5554 if (value->IsJSObject()) {
5555 ASSIGN_RETURN_ON_EXCEPTION(
5557 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5560 Representation representation = details.representation();
5561 value = Object::NewStorageFor(isolate, value, representation);
5564 copy->FastPropertyAtPut(index, *value);
5568 Handle<FixedArray> names =
5569 isolate->factory()->NewFixedArray(copy->NumberOfOwnProperties());
5570 copy->GetOwnPropertyNames(*names, 0);
5571 for (int i = 0; i < names->length(); i++) {
5572 DCHECK(names->get(i)->IsString());
5573 Handle<String> key_string(String::cast(names->get(i)));
5574 Maybe<PropertyAttributes> maybe =
5575 JSReceiver::GetOwnPropertyAttributes(copy, key_string);
5576 DCHECK(maybe.has_value);
5577 PropertyAttributes attributes = maybe.value;
5578 // Only deep copy fields from the object literal expression.
5579 // In particular, don't try to copy the length attribute of
5581 if (attributes != NONE) continue;
5582 Handle<Object> value =
5583 Object::GetProperty(copy, key_string).ToHandleChecked();
5584 if (value->IsJSObject()) {
5585 Handle<JSObject> result;
5586 ASSIGN_RETURN_ON_EXCEPTION(
5588 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5591 // Creating object copy for literals. No strict mode needed.
5592 JSObject::SetProperty(copy, key_string, result, SLOPPY).Assert();
5598 // Deep copy own elements.
5599 // Pixel elements cannot be created using an object literal.
5600 DCHECK(!copy->HasExternalArrayElements());
5602 case FAST_SMI_ELEMENTS:
5604 case FAST_HOLEY_SMI_ELEMENTS:
5605 case FAST_HOLEY_ELEMENTS: {
5606 Handle<FixedArray> elements(FixedArray::cast(copy->elements()));
5607 if (elements->map() == isolate->heap()->fixed_cow_array_map()) {
5609 for (int i = 0; i < elements->length(); i++) {
5610 DCHECK(!elements->get(i)->IsJSObject());
5614 for (int i = 0; i < elements->length(); i++) {
5615 Handle<Object> value(elements->get(i), isolate);
5616 DCHECK(value->IsSmi() ||
5617 value->IsTheHole() ||
5618 (IsFastObjectElementsKind(copy->GetElementsKind())));
5619 if (value->IsJSObject()) {
5620 Handle<JSObject> result;
5621 ASSIGN_RETURN_ON_EXCEPTION(
5623 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5626 elements->set(i, *result);
5633 case DICTIONARY_ELEMENTS: {
5634 Handle<SeededNumberDictionary> element_dictionary(
5635 copy->element_dictionary());
5636 int capacity = element_dictionary->Capacity();
5637 for (int i = 0; i < capacity; i++) {
5638 Object* k = element_dictionary->KeyAt(i);
5639 if (element_dictionary->IsKey(k)) {
5640 Handle<Object> value(element_dictionary->ValueAt(i), isolate);
5641 if (value->IsJSObject()) {
5642 Handle<JSObject> result;
5643 ASSIGN_RETURN_ON_EXCEPTION(
5645 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)),
5648 element_dictionary->ValueAtPut(i, *result);
5655 case SLOPPY_ARGUMENTS_ELEMENTS:
5660 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5661 case EXTERNAL_##TYPE##_ELEMENTS: \
5662 case TYPE##_ELEMENTS: \
5664 TYPED_ARRAYS(TYPED_ARRAY_CASE)
5665 #undef TYPED_ARRAY_CASE
5667 case FAST_DOUBLE_ELEMENTS:
5668 case FAST_HOLEY_DOUBLE_ELEMENTS:
5669 // No contained objects, nothing to do.
5678 MaybeHandle<JSObject> JSObject::DeepWalk(
5679 Handle<JSObject> object,
5680 AllocationSiteCreationContext* site_context) {
5681 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false,
5683 MaybeHandle<JSObject> result = v.StructureWalk(object);
5684 Handle<JSObject> for_assert;
5685 DCHECK(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object));
5690 MaybeHandle<JSObject> JSObject::DeepCopy(
5691 Handle<JSObject> object,
5692 AllocationSiteUsageContext* site_context,
5693 DeepCopyHints hints) {
5694 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints);
5695 MaybeHandle<JSObject> copy = v.StructureWalk(object);
5696 Handle<JSObject> for_assert;
5697 DCHECK(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object));
5702 // Tests for the fast common case for property enumeration:
5703 // - This object and all prototypes has an enum cache (which means that
5704 // it is no proxy, has no interceptors and needs no access checks).
5705 // - This object has no elements.
5706 // - No prototype has enumerable properties/elements.
5707 bool JSReceiver::IsSimpleEnum() {
5708 for (PrototypeIterator iter(GetIsolate(), this,
5709 PrototypeIterator::START_AT_RECEIVER);
5710 !iter.IsAtEnd(); iter.Advance()) {
5711 if (!iter.GetCurrent()->IsJSObject()) return false;
5712 JSObject* curr = JSObject::cast(iter.GetCurrent());
5713 int enum_length = curr->map()->EnumLength();
5714 if (enum_length == kInvalidEnumCacheSentinel) return false;
5715 if (curr->IsAccessCheckNeeded()) return false;
5716 DCHECK(!curr->HasNamedInterceptor());
5717 DCHECK(!curr->HasIndexedInterceptor());
5718 if (curr->NumberOfEnumElements() > 0) return false;
5719 if (curr != this && enum_length != 0) return false;
5725 static bool FilterKey(Object* key, PropertyAttributes filter) {
5726 if ((filter & SYMBOLIC) && key->IsSymbol()) {
5730 if ((filter & PRIVATE_SYMBOL) &&
5731 key->IsSymbol() && Symbol::cast(key)->is_private()) {
5735 if ((filter & STRING) && !key->IsSymbol()) {
5743 int Map::NumberOfDescribedProperties(DescriptorFlag which,
5744 PropertyAttributes filter) {
5746 DescriptorArray* descs = instance_descriptors();
5747 int limit = which == ALL_DESCRIPTORS
5748 ? descs->number_of_descriptors()
5749 : NumberOfOwnDescriptors();
5750 for (int i = 0; i < limit; i++) {
5751 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
5752 !FilterKey(descs->GetKey(i), filter)) {
5760 int Map::NextFreePropertyIndex() {
5762 int number_of_own_descriptors = NumberOfOwnDescriptors();
5763 DescriptorArray* descs = instance_descriptors();
5764 for (int i = 0; i < number_of_own_descriptors; i++) {
5765 if (descs->GetType(i) == FIELD) {
5766 int current_index = descs->GetFieldIndex(i);
5767 if (current_index > max_index) max_index = current_index;
5770 return max_index + 1;
5774 static bool ContainsOnlyValidKeys(Handle<FixedArray> array) {
5775 int len = array->length();
5776 for (int i = 0; i < len; i++) {
5777 Object* e = array->get(i);
5778 if (!(e->IsString() || e->IsNumber())) return false;
5784 static Handle<FixedArray> ReduceFixedArrayTo(
5785 Handle<FixedArray> array, int length) {
5786 DCHECK(array->length() >= length);
5787 if (array->length() == length) return array;
5789 Handle<FixedArray> new_array =
5790 array->GetIsolate()->factory()->NewFixedArray(length);
5791 for (int i = 0; i < length; ++i) new_array->set(i, array->get(i));
5796 static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
5797 bool cache_result) {
5798 Isolate* isolate = object->GetIsolate();
5799 if (object->HasFastProperties()) {
5800 int own_property_count = object->map()->EnumLength();
5801 // If the enum length of the given map is set to kInvalidEnumCache, this
5802 // means that the map itself has never used the present enum cache. The
5803 // first step to using the cache is to set the enum length of the map by
5804 // counting the number of own descriptors that are not DONT_ENUM or
5806 if (own_property_count == kInvalidEnumCacheSentinel) {
5807 own_property_count = object->map()->NumberOfDescribedProperties(
5808 OWN_DESCRIPTORS, DONT_SHOW);
5810 DCHECK(own_property_count == object->map()->NumberOfDescribedProperties(
5811 OWN_DESCRIPTORS, DONT_SHOW));
5814 if (object->map()->instance_descriptors()->HasEnumCache()) {
5815 DescriptorArray* desc = object->map()->instance_descriptors();
5816 Handle<FixedArray> keys(desc->GetEnumCache(), isolate);
5818 // In case the number of properties required in the enum are actually
5819 // present, we can reuse the enum cache. Otherwise, this means that the
5820 // enum cache was generated for a previous (smaller) version of the
5821 // Descriptor Array. In that case we regenerate the enum cache.
5822 if (own_property_count <= keys->length()) {
5823 if (cache_result) object->map()->SetEnumLength(own_property_count);
5824 isolate->counters()->enum_cache_hits()->Increment();
5825 return ReduceFixedArrayTo(keys, own_property_count);
5829 Handle<Map> map(object->map());
5831 if (map->instance_descriptors()->IsEmpty()) {
5832 isolate->counters()->enum_cache_hits()->Increment();
5833 if (cache_result) map->SetEnumLength(0);
5834 return isolate->factory()->empty_fixed_array();
5837 isolate->counters()->enum_cache_misses()->Increment();
5839 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(
5840 own_property_count);
5841 Handle<FixedArray> indices = isolate->factory()->NewFixedArray(
5842 own_property_count);
5844 Handle<DescriptorArray> descs =
5845 Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
5847 int size = map->NumberOfOwnDescriptors();
5850 for (int i = 0; i < size; i++) {
5851 PropertyDetails details = descs->GetDetails(i);
5852 Object* key = descs->GetKey(i);
5853 if (!(details.IsDontEnum() || key->IsSymbol())) {
5854 storage->set(index, key);
5855 if (!indices.is_null()) {
5856 if (details.type() != FIELD) {
5857 indices = Handle<FixedArray>();
5859 FieldIndex field_index = FieldIndex::ForDescriptor(*map, i);
5860 int load_by_field_index = field_index.GetLoadByFieldIndex();
5861 indices->set(index, Smi::FromInt(load_by_field_index));
5867 DCHECK(index == storage->length());
5869 Handle<FixedArray> bridge_storage =
5870 isolate->factory()->NewFixedArray(
5871 DescriptorArray::kEnumCacheBridgeLength);
5872 DescriptorArray* desc = object->map()->instance_descriptors();
5873 desc->SetEnumCache(*bridge_storage,
5875 indices.is_null() ? Object::cast(Smi::FromInt(0))
5876 : Object::cast(*indices));
5878 object->map()->SetEnumLength(own_property_count);
5882 Handle<NameDictionary> dictionary(object->property_dictionary());
5883 int length = dictionary->NumberOfEnumElements();
5885 return Handle<FixedArray>(isolate->heap()->empty_fixed_array());
5887 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length);
5888 dictionary->CopyEnumKeysTo(*storage);
5894 MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object,
5895 KeyCollectionType type) {
5896 USE(ContainsOnlyValidKeys);
5897 Isolate* isolate = object->GetIsolate();
5898 Handle<FixedArray> content = isolate->factory()->empty_fixed_array();
5899 Handle<JSFunction> arguments_function(
5900 JSFunction::cast(isolate->sloppy_arguments_map()->constructor()));
5902 // Only collect keys if access is permitted.
5903 for (PrototypeIterator iter(isolate, object,
5904 PrototypeIterator::START_AT_RECEIVER);
5905 !iter.IsAtEnd(); iter.Advance()) {
5906 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
5907 Handle<JSProxy> proxy(JSProxy::cast(*PrototypeIterator::GetCurrent(iter)),
5909 Handle<Object> args[] = { proxy };
5910 Handle<Object> names;
5911 ASSIGN_RETURN_ON_EXCEPTION(
5913 Execution::Call(isolate,
5914 isolate->proxy_enumerate(),
5919 ASSIGN_RETURN_ON_EXCEPTION(
5921 FixedArray::AddKeysFromArrayLike(
5922 content, Handle<JSObject>::cast(names)),
5927 Handle<JSObject> current =
5928 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
5930 // Check access rights if required.
5931 if (current->IsAccessCheckNeeded() &&
5932 !isolate->MayNamedAccess(
5933 current, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) {
5934 isolate->ReportFailedAccessCheck(current, v8::ACCESS_KEYS);
5935 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, FixedArray);
5939 // Compute the element keys.
5940 Handle<FixedArray> element_keys =
5941 isolate->factory()->NewFixedArray(current->NumberOfEnumElements());
5942 current->GetEnumElementKeys(*element_keys);
5943 ASSIGN_RETURN_ON_EXCEPTION(
5945 FixedArray::UnionOfKeys(content, element_keys),
5947 DCHECK(ContainsOnlyValidKeys(content));
5949 // Add the element keys from the interceptor.
5950 if (current->HasIndexedInterceptor()) {
5951 Handle<JSObject> result;
5952 if (JSObject::GetKeysForIndexedInterceptor(
5953 current, object).ToHandle(&result)) {
5954 ASSIGN_RETURN_ON_EXCEPTION(
5956 FixedArray::AddKeysFromArrayLike(content, result),
5959 DCHECK(ContainsOnlyValidKeys(content));
5962 // We can cache the computed property keys if access checks are
5963 // not needed and no interceptors are involved.
5965 // We do not use the cache if the object has elements and
5966 // therefore it does not make sense to cache the property names
5967 // for arguments objects. Arguments objects will always have
5969 // Wrapped strings have elements, but don't have an elements
5970 // array or dictionary. So the fast inline test for whether to
5971 // use the cache says yes, so we should not create a cache.
5972 bool cache_enum_keys =
5973 ((current->map()->constructor() != *arguments_function) &&
5974 !current->IsJSValue() &&
5975 !current->IsAccessCheckNeeded() &&
5976 !current->HasNamedInterceptor() &&
5977 !current->HasIndexedInterceptor());
5978 // Compute the property keys and cache them if possible.
5979 ASSIGN_RETURN_ON_EXCEPTION(
5981 FixedArray::UnionOfKeys(
5982 content, GetEnumPropertyKeys(current, cache_enum_keys)),
5984 DCHECK(ContainsOnlyValidKeys(content));
5986 // Add the property keys from the interceptor.
5987 if (current->HasNamedInterceptor()) {
5988 Handle<JSObject> result;
5989 if (JSObject::GetKeysForNamedInterceptor(
5990 current, object).ToHandle(&result)) {
5991 ASSIGN_RETURN_ON_EXCEPTION(
5993 FixedArray::AddKeysFromArrayLike(content, result),
5996 DCHECK(ContainsOnlyValidKeys(content));
5999 // If we only want own properties we bail out after the first
6001 if (type == OWN_ONLY) break;
6007 // Try to update an accessor in an elements dictionary. Return true if the
6008 // update succeeded, and false otherwise.
6009 static bool UpdateGetterSetterInDictionary(
6010 SeededNumberDictionary* dictionary,
6014 PropertyAttributes attributes) {
6015 int entry = dictionary->FindEntry(index);
6016 if (entry != SeededNumberDictionary::kNotFound) {
6017 Object* result = dictionary->ValueAt(entry);
6018 PropertyDetails details = dictionary->DetailsAt(entry);
6019 if (details.type() == CALLBACKS && result->IsAccessorPair()) {
6020 DCHECK(details.IsConfigurable());
6021 if (details.attributes() != attributes) {
6022 dictionary->DetailsAtPut(
6024 PropertyDetails(attributes, CALLBACKS, index));
6026 AccessorPair::cast(result)->SetComponents(getter, setter);
6034 void JSObject::DefineElementAccessor(Handle<JSObject> object,
6036 Handle<Object> getter,
6037 Handle<Object> setter,
6038 PropertyAttributes attributes) {
6039 switch (object->GetElementsKind()) {
6040 case FAST_SMI_ELEMENTS:
6042 case FAST_DOUBLE_ELEMENTS:
6043 case FAST_HOLEY_SMI_ELEMENTS:
6044 case FAST_HOLEY_ELEMENTS:
6045 case FAST_HOLEY_DOUBLE_ELEMENTS:
6048 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6049 case EXTERNAL_##TYPE##_ELEMENTS: \
6050 case TYPE##_ELEMENTS: \
6052 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6053 #undef TYPED_ARRAY_CASE
6054 // Ignore getters and setters on pixel and external array elements.
6057 case DICTIONARY_ELEMENTS:
6058 if (UpdateGetterSetterInDictionary(object->element_dictionary(),
6066 case SLOPPY_ARGUMENTS_ELEMENTS: {
6067 // Ascertain whether we have read-only properties or an existing
6068 // getter/setter pair in an arguments elements dictionary backing
6070 FixedArray* parameter_map = FixedArray::cast(object->elements());
6071 uint32_t length = parameter_map->length();
6073 index < (length - 2) ? parameter_map->get(index + 2) : NULL;
6074 if (probe == NULL || probe->IsTheHole()) {
6075 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
6076 if (arguments->IsDictionary()) {
6077 SeededNumberDictionary* dictionary =
6078 SeededNumberDictionary::cast(arguments);
6079 if (UpdateGetterSetterInDictionary(dictionary,
6092 Isolate* isolate = object->GetIsolate();
6093 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair();
6094 accessors->SetComponents(*getter, *setter);
6096 SetElementCallback(object, index, accessors, attributes);
6100 bool Map::DictionaryElementsInPrototypeChainOnly() {
6101 if (IsDictionaryElementsKind(elements_kind())) {
6105 for (PrototypeIterator iter(this); !iter.IsAtEnd(); iter.Advance()) {
6106 if (iter.GetCurrent()->IsJSProxy()) {
6107 // Be conservative, don't walk into proxies.
6111 if (IsDictionaryElementsKind(
6112 JSObject::cast(iter.GetCurrent())->map()->elements_kind())) {
6121 void JSObject::SetElementCallback(Handle<JSObject> object,
6123 Handle<Object> structure,
6124 PropertyAttributes attributes) {
6125 Heap* heap = object->GetHeap();
6126 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6128 // Normalize elements to make this operation simple.
6129 bool had_dictionary_elements = object->HasDictionaryElements();
6130 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
6131 DCHECK(object->HasDictionaryElements() ||
6132 object->HasDictionaryArgumentsElements());
6133 // Update the dictionary with the new CALLBACKS property.
6134 dictionary = SeededNumberDictionary::Set(dictionary, index, structure,
6136 dictionary->set_requires_slow_elements();
6138 // Update the dictionary backing store on the object.
6139 if (object->elements()->map() == heap->sloppy_arguments_elements_map()) {
6140 // Also delete any parameter alias.
6142 // TODO(kmillikin): when deleting the last parameter alias we could
6143 // switch to a direct backing store without the parameter map. This
6144 // would allow GC of the context.
6145 FixedArray* parameter_map = FixedArray::cast(object->elements());
6146 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
6147 parameter_map->set(index + 2, heap->the_hole_value());
6149 parameter_map->set(1, *dictionary);
6151 object->set_elements(*dictionary);
6153 if (!had_dictionary_elements) {
6154 // KeyedStoreICs (at least the non-generic ones) need a reset.
6155 heap->ClearAllICsByKind(Code::KEYED_STORE_IC);
6161 void JSObject::SetPropertyCallback(Handle<JSObject> object,
6163 Handle<Object> structure,
6164 PropertyAttributes attributes) {
6165 PropertyNormalizationMode mode = object->map()->is_prototype_map()
6166 ? KEEP_INOBJECT_PROPERTIES
6167 : CLEAR_INOBJECT_PROPERTIES;
6168 // Normalize object to make this operation simple.
6169 NormalizeProperties(object, mode, 0);
6171 // For the global object allocate a new map to invalidate the global inline
6172 // caches which have a global property cell reference directly in the code.
6173 if (object->IsGlobalObject()) {
6174 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
6175 DCHECK(new_map->is_dictionary_map());
6176 JSObject::MigrateToMap(object, new_map);
6178 // When running crankshaft, changing the map is not enough. We
6179 // need to deoptimize all functions that rely on this global
6181 Deoptimizer::DeoptimizeGlobalObject(*object);
6184 // Update the dictionary with the new CALLBACKS property.
6185 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6186 SetNormalizedProperty(object, name, structure, details);
6188 ReoptimizeIfPrototype(object);
6192 MaybeHandle<Object> JSObject::DefineAccessor(Handle<JSObject> object,
6194 Handle<Object> getter,
6195 Handle<Object> setter,
6196 PropertyAttributes attributes) {
6197 Isolate* isolate = object->GetIsolate();
6198 // Check access rights if needed.
6199 if (object->IsAccessCheckNeeded() &&
6200 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6201 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6202 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6203 return isolate->factory()->undefined_value();
6206 if (object->IsJSGlobalProxy()) {
6207 PrototypeIterator iter(isolate, object);
6208 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
6209 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
6210 DefineAccessor(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)),
6211 name, getter, setter, attributes);
6212 return isolate->factory()->undefined_value();
6215 // Make sure that the top context does not change when doing callbacks or
6216 // interceptor calls.
6217 AssertNoContextChange ncc(isolate);
6219 // Try to flatten before operating on the string.
6220 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
6223 bool is_element = name->AsArrayIndex(&index);
6225 Handle<Object> old_value = isolate->factory()->the_hole_value();
6226 bool is_observed = object->map()->is_observed() &&
6227 *name != isolate->heap()->hidden_string();
6228 bool preexists = false;
6231 Maybe<bool> maybe = HasOwnElement(object, index);
6232 // Workaround for a GCC 4.4.3 bug which leads to "‘preexists’ may be used
6233 // uninitialized in this function".
6234 if (!maybe.has_value) {
6236 return isolate->factory()->undefined_value();
6238 preexists = maybe.value;
6239 if (preexists && GetOwnElementAccessorPair(object, index).is_null()) {
6241 Object::GetElement(isolate, object, index).ToHandleChecked();
6244 LookupIterator it(object, name, LookupIterator::HIDDEN_SKIP_INTERCEPTOR);
6245 CHECK(GetPropertyAttributes(&it).has_value);
6246 preexists = it.IsFound();
6247 if (preexists && (it.state() == LookupIterator::DATA ||
6248 it.GetAccessors()->IsAccessorInfo())) {
6249 old_value = GetProperty(&it).ToHandleChecked();
6255 DefineElementAccessor(object, index, getter, setter, attributes);
6257 DCHECK(getter->IsSpecFunction() || getter->IsUndefined() ||
6259 DCHECK(setter->IsSpecFunction() || setter->IsUndefined() ||
6261 // At least one of the accessors needs to be a new value.
6262 DCHECK(!getter->IsNull() || !setter->IsNull());
6263 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
6264 if (it.state() == LookupIterator::ACCESS_CHECK) {
6265 // We already did an access check before. We do have access.
6268 if (!getter->IsNull()) {
6269 it.TransitionToAccessorProperty(ACCESSOR_GETTER, getter, attributes);
6271 if (!setter->IsNull()) {
6272 it.TransitionToAccessorProperty(ACCESSOR_SETTER, setter, attributes);
6277 const char* type = preexists ? "reconfigure" : "add";
6278 RETURN_ON_EXCEPTION(
6279 isolate, EnqueueChangeRecord(object, type, name, old_value), Object);
6282 return isolate->factory()->undefined_value();
6286 MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object,
6287 Handle<AccessorInfo> info) {
6288 Isolate* isolate = object->GetIsolate();
6289 Factory* factory = isolate->factory();
6290 Handle<Name> name(Name::cast(info->name()));
6292 // Check access rights if needed.
6293 if (object->IsAccessCheckNeeded() &&
6294 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) {
6295 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
6296 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6297 return factory->undefined_value();
6300 if (object->IsJSGlobalProxy()) {
6301 PrototypeIterator iter(isolate, object);
6302 if (iter.IsAtEnd()) return object;
6303 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
6305 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), info);
6308 // Make sure that the top context does not change when doing callbacks or
6309 // interceptor calls.
6310 AssertNoContextChange ncc(isolate);
6312 // Try to flatten before operating on the string.
6313 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name));
6316 bool is_element = name->AsArrayIndex(&index);
6319 if (object->IsJSArray()) return factory->undefined_value();
6321 // Accessors overwrite previous callbacks (cf. with getters/setters).
6322 switch (object->GetElementsKind()) {
6323 case FAST_SMI_ELEMENTS:
6325 case FAST_DOUBLE_ELEMENTS:
6326 case FAST_HOLEY_SMI_ELEMENTS:
6327 case FAST_HOLEY_ELEMENTS:
6328 case FAST_HOLEY_DOUBLE_ELEMENTS:
6331 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6332 case EXTERNAL_##TYPE##_ELEMENTS: \
6333 case TYPE##_ELEMENTS: \
6335 TYPED_ARRAYS(TYPED_ARRAY_CASE)
6336 #undef TYPED_ARRAY_CASE
6337 // Ignore getters and setters on pixel and external array
6339 return factory->undefined_value();
6341 case DICTIONARY_ELEMENTS:
6343 case SLOPPY_ARGUMENTS_ELEMENTS:
6348 SetElementCallback(object, index, info, info->property_attributes());
6351 LookupIterator it(object, name, LookupIterator::HIDDEN_SKIP_INTERCEPTOR);
6352 CHECK(GetPropertyAttributes(&it).has_value);
6353 // ES5 forbids turning a property into an accessor if it's not
6354 // configurable. See 8.6.1 (Table 5).
6355 if (it.IsFound() && (it.IsReadOnly() || !it.IsConfigurable())) {
6356 return factory->undefined_value();
6359 SetPropertyCallback(object, name, info, info->property_attributes());
6366 MaybeHandle<Object> JSObject::GetAccessor(Handle<JSObject> object,
6368 AccessorComponent component) {
6369 Isolate* isolate = object->GetIsolate();
6371 // Make sure that the top context does not change when doing callbacks or
6372 // interceptor calls.
6373 AssertNoContextChange ncc(isolate);
6375 // Make the lookup and include prototypes.
6377 if (name->AsArrayIndex(&index)) {
6378 for (PrototypeIterator iter(isolate, object,
6379 PrototypeIterator::START_AT_RECEIVER);
6380 !iter.IsAtEnd(); iter.Advance()) {
6381 Handle<Object> current = PrototypeIterator::GetCurrent(iter);
6382 // Check access rights if needed.
6383 if (current->IsAccessCheckNeeded() &&
6384 !isolate->MayNamedAccess(Handle<JSObject>::cast(current), name,
6386 isolate->ReportFailedAccessCheck(Handle<JSObject>::cast(current),
6388 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6389 return isolate->factory()->undefined_value();
6392 if (current->IsJSObject() &&
6393 Handle<JSObject>::cast(current)->HasDictionaryElements()) {
6394 JSObject* js_object = JSObject::cast(*current);
6395 SeededNumberDictionary* dictionary = js_object->element_dictionary();
6396 int entry = dictionary->FindEntry(index);
6397 if (entry != SeededNumberDictionary::kNotFound) {
6398 Object* element = dictionary->ValueAt(entry);
6399 if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
6400 element->IsAccessorPair()) {
6401 return handle(AccessorPair::cast(element)->GetComponent(component),
6408 LookupIterator it(object, name,
6409 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
6410 for (; it.IsFound(); it.Next()) {
6411 switch (it.state()) {
6412 case LookupIterator::INTERCEPTOR:
6413 case LookupIterator::NOT_FOUND:
6414 case LookupIterator::TRANSITION:
6417 case LookupIterator::ACCESS_CHECK:
6418 if (it.HasAccess(v8::ACCESS_HAS)) continue;
6419 isolate->ReportFailedAccessCheck(it.GetHolder<JSObject>(),
6421 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
6422 return isolate->factory()->undefined_value();
6424 case LookupIterator::JSPROXY:
6425 return isolate->factory()->undefined_value();
6427 case LookupIterator::DATA:
6429 case LookupIterator::ACCESSOR: {
6430 Handle<Object> maybe_pair = it.GetAccessors();
6431 if (maybe_pair->IsAccessorPair()) {
6433 AccessorPair::cast(*maybe_pair)->GetComponent(component),
6440 return isolate->factory()->undefined_value();
6444 Object* JSObject::SlowReverseLookup(Object* value) {
6445 if (HasFastProperties()) {
6446 int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
6447 DescriptorArray* descs = map()->instance_descriptors();
6448 for (int i = 0; i < number_of_own_descriptors; i++) {
6449 if (descs->GetType(i) == FIELD) {
6451 RawFastPropertyAt(FieldIndex::ForDescriptor(map(), i));
6452 if (descs->GetDetails(i).representation().IsDouble()) {
6453 DCHECK(property->IsMutableHeapNumber());
6454 if (value->IsNumber() && property->Number() == value->Number()) {
6455 return descs->GetKey(i);
6457 } else if (property == value) {
6458 return descs->GetKey(i);
6460 } else if (descs->GetType(i) == CONSTANT) {
6461 if (descs->GetConstant(i) == value) {
6462 return descs->GetKey(i);
6466 return GetHeap()->undefined_value();
6468 return property_dictionary()->SlowReverseLookup(value);
6473 Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) {
6474 Handle<Map> result = map->GetIsolate()->factory()->NewMap(
6475 map->instance_type(), instance_size);
6476 result->set_prototype(map->prototype());
6477 result->set_constructor(map->constructor());
6478 result->set_bit_field(map->bit_field());
6479 result->set_bit_field2(map->bit_field2());
6480 int new_bit_field3 = map->bit_field3();
6481 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
6482 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
6483 new_bit_field3 = EnumLengthBits::update(new_bit_field3,
6484 kInvalidEnumCacheSentinel);
6485 new_bit_field3 = Deprecated::update(new_bit_field3, false);
6486 if (!map->is_dictionary_map()) {
6487 new_bit_field3 = IsUnstable::update(new_bit_field3, false);
6489 new_bit_field3 = ConstructionCount::update(new_bit_field3,
6490 JSFunction::kNoSlackTracking);
6491 result->set_bit_field3(new_bit_field3);
6496 Handle<Map> Map::Normalize(Handle<Map> fast_map,
6497 PropertyNormalizationMode mode) {
6498 DCHECK(!fast_map->is_dictionary_map());
6500 Isolate* isolate = fast_map->GetIsolate();
6501 Handle<Object> maybe_cache(isolate->native_context()->normalized_map_cache(),
6503 bool use_cache = !maybe_cache->IsUndefined();
6504 Handle<NormalizedMapCache> cache;
6505 if (use_cache) cache = Handle<NormalizedMapCache>::cast(maybe_cache);
6507 Handle<Map> new_map;
6508 if (use_cache && cache->Get(fast_map, mode).ToHandle(&new_map)) {
6510 if (FLAG_verify_heap) new_map->DictionaryMapVerify();
6512 #ifdef ENABLE_SLOW_DCHECKS
6513 if (FLAG_enable_slow_asserts) {
6514 // The cached map should match newly created normalized map bit-by-bit,
6515 // except for the code cache, which can contain some ics which can be
6516 // applied to the shared map.
6517 Handle<Map> fresh = Map::CopyNormalized(fast_map, mode);
6519 DCHECK(memcmp(fresh->address(),
6521 Map::kCodeCacheOffset) == 0);
6522 STATIC_ASSERT(Map::kDependentCodeOffset ==
6523 Map::kCodeCacheOffset + kPointerSize);
6524 int offset = Map::kDependentCodeOffset + kPointerSize;
6525 DCHECK(memcmp(fresh->address() + offset,
6526 new_map->address() + offset,
6527 Map::kSize - offset) == 0);
6531 new_map = Map::CopyNormalized(fast_map, mode);
6533 cache->Set(fast_map, new_map);
6534 isolate->counters()->normalized_maps()->Increment();
6537 fast_map->NotifyLeafMapLayoutChange();
6542 Handle<Map> Map::CopyNormalized(Handle<Map> map,
6543 PropertyNormalizationMode mode) {
6544 int new_instance_size = map->instance_size();
6545 if (mode == CLEAR_INOBJECT_PROPERTIES) {
6546 new_instance_size -= map->inobject_properties() * kPointerSize;
6549 Handle<Map> result = RawCopy(map, new_instance_size);
6551 if (mode != CLEAR_INOBJECT_PROPERTIES) {
6552 result->set_inobject_properties(map->inobject_properties());
6555 result->set_dictionary_map(true);
6556 result->set_migration_target(false);
6559 if (FLAG_verify_heap) result->DictionaryMapVerify();
6566 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) {
6567 Handle<Map> result = RawCopy(map, map->instance_size());
6569 // Please note instance_type and instance_size are set when allocated.
6570 result->set_inobject_properties(map->inobject_properties());
6571 result->set_unused_property_fields(map->unused_property_fields());
6573 result->set_pre_allocated_property_fields(
6574 map->pre_allocated_property_fields());
6575 result->ClearCodeCache(map->GetHeap());
6576 map->NotifyLeafMapLayoutChange();
6581 Handle<Map> Map::ShareDescriptor(Handle<Map> map,
6582 Handle<DescriptorArray> descriptors,
6583 Descriptor* descriptor) {
6584 // Sanity check. This path is only to be taken if the map owns its descriptor
6585 // array, implying that its NumberOfOwnDescriptors equals the number of
6586 // descriptors in the descriptor array.
6587 DCHECK(map->NumberOfOwnDescriptors() ==
6588 map->instance_descriptors()->number_of_descriptors());
6590 Handle<Map> result = CopyDropDescriptors(map);
6591 Handle<Name> name = descriptor->GetKey();
6593 // Ensure there's space for the new descriptor in the shared descriptor array.
6594 if (descriptors->NumberOfSlackDescriptors() == 0) {
6595 int old_size = descriptors->number_of_descriptors();
6596 if (old_size == 0) {
6597 descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1);
6599 EnsureDescriptorSlack(
6600 map, SlackForArraySize(old_size, kMaxNumberOfDescriptors));
6601 descriptors = handle(map->instance_descriptors());
6606 DisallowHeapAllocation no_gc;
6607 descriptors->Append(descriptor);
6608 result->InitializeDescriptors(*descriptors);
6611 DCHECK(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1);
6612 ConnectTransition(map, result, name, SIMPLE_TRANSITION);
6618 void Map::ConnectTransition(Handle<Map> parent, Handle<Map> child,
6619 Handle<Name> name, SimpleTransitionFlag flag) {
6620 parent->set_owns_descriptors(false);
6621 if (parent->is_prototype_map()) {
6622 DCHECK(child->is_prototype_map());
6624 Handle<TransitionArray> transitions =
6625 TransitionArray::Insert(parent, name, child, flag);
6626 if (!parent->HasTransitionArray() ||
6627 *transitions != parent->transitions()) {
6628 parent->set_transitions(*transitions);
6630 child->SetBackPointer(*parent);
6635 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map,
6636 Handle<DescriptorArray> descriptors,
6637 TransitionFlag flag,
6638 MaybeHandle<Name> maybe_name,
6639 SimpleTransitionFlag simple_flag) {
6640 DCHECK(descriptors->IsSortedNoDuplicates());
6642 Handle<Map> result = CopyDropDescriptors(map);
6643 result->InitializeDescriptors(*descriptors);
6645 if (!map->is_prototype_map()) {
6646 if (flag == INSERT_TRANSITION && map->CanHaveMoreTransitions()) {
6648 CHECK(maybe_name.ToHandle(&name));
6649 ConnectTransition(map, result, name, simple_flag);
6651 int length = descriptors->number_of_descriptors();
6652 for (int i = 0; i < length; i++) {
6653 descriptors->SetRepresentation(i, Representation::Tagged());
6654 if (descriptors->GetDetails(i).type() == FIELD) {
6655 descriptors->SetValue(i, HeapType::Any());
6665 // Since this method is used to rewrite an existing transition tree, it can
6666 // always insert transitions without checking.
6667 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map,
6669 Handle<DescriptorArray> descriptors) {
6670 DCHECK(descriptors->IsSortedNoDuplicates());
6672 Handle<Map> result = CopyDropDescriptors(map);
6674 result->InitializeDescriptors(*descriptors);
6675 result->SetNumberOfOwnDescriptors(new_descriptor + 1);
6677 int unused_property_fields = map->unused_property_fields();
6678 if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
6679 unused_property_fields = map->unused_property_fields() - 1;
6680 if (unused_property_fields < 0) {
6681 unused_property_fields += JSObject::kFieldsAdded;
6685 result->set_unused_property_fields(unused_property_fields);
6687 Handle<Name> name = handle(descriptors->GetKey(new_descriptor));
6688 ConnectTransition(map, result, name, SIMPLE_TRANSITION);
6694 Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind,
6695 TransitionFlag flag) {
6696 if (flag == INSERT_TRANSITION) {
6697 DCHECK(!map->HasElementsTransition() ||
6698 ((map->elements_transition_map()->elements_kind() ==
6699 DICTIONARY_ELEMENTS ||
6700 IsExternalArrayElementsKind(
6701 map->elements_transition_map()->elements_kind())) &&
6702 (kind == DICTIONARY_ELEMENTS ||
6703 IsExternalArrayElementsKind(kind))));
6704 DCHECK(!IsFastElementsKind(kind) ||
6705 IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
6706 DCHECK(kind != map->elements_kind());
6709 bool insert_transition = flag == INSERT_TRANSITION &&
6710 map->CanHaveMoreTransitions() &&
6711 !map->HasElementsTransition();
6713 if (insert_transition && map->owns_descriptors()) {
6714 // In case the map owned its own descriptors, share the descriptors and
6715 // transfer ownership to the new map.
6716 Handle<Map> new_map = CopyDropDescriptors(map);
6718 ConnectElementsTransition(map, new_map);
6720 new_map->set_elements_kind(kind);
6721 new_map->InitializeDescriptors(map->instance_descriptors());
6725 // In case the map did not own its own descriptors, a split is forced by
6726 // copying the map; creating a new descriptor array cell.
6727 // Create a new free-floating map only if we are not allowed to store it.
6728 Handle<Map> new_map = Copy(map);
6730 new_map->set_elements_kind(kind);
6732 if (insert_transition) {
6733 ConnectElementsTransition(map, new_map);
6740 Handle<Map> Map::CopyForObserved(Handle<Map> map) {
6741 DCHECK(!map->is_observed());
6743 Isolate* isolate = map->GetIsolate();
6745 // In case the map owned its own descriptors, share the descriptors and
6746 // transfer ownership to the new map.
6747 Handle<Map> new_map;
6748 if (map->owns_descriptors()) {
6749 new_map = CopyDropDescriptors(map);
6751 DCHECK(!map->is_prototype_map());
6752 new_map = Copy(map);
6755 new_map->set_is_observed();
6756 if (map->owns_descriptors()) {
6757 new_map->InitializeDescriptors(map->instance_descriptors());
6760 if (map->CanHaveMoreTransitions()) {
6761 Handle<Name> name = isolate->factory()->observed_symbol();
6762 ConnectTransition(map, new_map, name, FULL_TRANSITION);
6768 Handle<Map> Map::Copy(Handle<Map> map) {
6769 Handle<DescriptorArray> descriptors(map->instance_descriptors());
6770 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
6771 Handle<DescriptorArray> new_descriptors =
6772 DescriptorArray::CopyUpTo(descriptors, number_of_own_descriptors);
6773 return CopyReplaceDescriptors(
6774 map, new_descriptors, OMIT_TRANSITION, MaybeHandle<Name>());
6778 Handle<Map> Map::Create(Isolate* isolate, int inobject_properties) {
6779 Handle<Map> copy = Copy(handle(isolate->object_function()->initial_map()));
6781 // Check that we do not overflow the instance size when adding the extra
6782 // inobject properties. If the instance size overflows, we allocate as many
6783 // properties as we can as inobject properties.
6784 int max_extra_properties =
6785 (JSObject::kMaxInstanceSize - JSObject::kHeaderSize) >> kPointerSizeLog2;
6787 if (inobject_properties > max_extra_properties) {
6788 inobject_properties = max_extra_properties;
6791 int new_instance_size =
6792 JSObject::kHeaderSize + kPointerSize * inobject_properties;
6794 // Adjust the map with the extra inobject properties.
6795 copy->set_inobject_properties(inobject_properties);
6796 copy->set_unused_property_fields(inobject_properties);
6797 copy->set_instance_size(new_instance_size);
6798 copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy));
6803 Handle<Map> Map::CopyForFreeze(Handle<Map> map) {
6804 int num_descriptors = map->NumberOfOwnDescriptors();
6805 Isolate* isolate = map->GetIsolate();
6806 Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
6807 handle(map->instance_descriptors(), isolate), num_descriptors, FROZEN);
6808 Handle<Map> new_map = CopyReplaceDescriptors(
6809 map, new_desc, INSERT_TRANSITION, isolate->factory()->frozen_symbol());
6811 new_map->set_is_extensible(false);
6812 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
6817 bool DescriptorArray::CanHoldValue(int descriptor, Object* value) {
6818 PropertyDetails details = GetDetails(descriptor);
6819 switch (details.type()) {
6821 return value->FitsRepresentation(details.representation()) &&
6822 GetFieldType(descriptor)->NowContains(value);
6825 DCHECK(GetConstant(descriptor) != value ||
6826 value->FitsRepresentation(details.representation()));
6827 return GetConstant(descriptor) == value;
6842 Handle<Map> Map::PrepareForDataProperty(Handle<Map> map, int descriptor,
6843 Handle<Object> value) {
6844 // Dictionaries can store any property value.
6845 if (map->is_dictionary_map()) return map;
6847 // Migrate to the newest map before storing the property.
6850 Handle<DescriptorArray> descriptors(map->instance_descriptors());
6852 if (descriptors->CanHoldValue(descriptor, *value)) return map;
6854 Isolate* isolate = map->GetIsolate();
6855 Representation representation = value->OptimalRepresentation();
6856 Handle<HeapType> type = value->OptimalType(isolate, representation);
6858 return GeneralizeRepresentation(map, descriptor, representation, type,
6863 Handle<Map> Map::TransitionToDataProperty(Handle<Map> map, Handle<Name> name,
6864 Handle<Object> value,
6865 PropertyAttributes attributes,
6866 StoreFromKeyed store_mode) {
6867 // Dictionary maps can always have additional data properties.
6868 if (map->is_dictionary_map()) return map;
6870 // Migrate to the newest map before storing the property.
6873 int index = map->SearchTransition(*name);
6874 if (index != TransitionArray::kNotFound) {
6875 Handle<Map> transition(map->GetTransition(index));
6876 int descriptor = transition->LastAdded();
6878 // TODO(verwaest): Handle attributes better.
6879 DescriptorArray* descriptors = transition->instance_descriptors();
6880 if (descriptors->GetDetails(descriptor).attributes() != attributes) {
6881 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
6884 return Map::PrepareForDataProperty(transition, descriptor, value);
6887 TransitionFlag flag = INSERT_TRANSITION;
6888 MaybeHandle<Map> maybe_map;
6889 if (value->IsJSFunction()) {
6890 maybe_map = Map::CopyWithConstant(map, name, value, attributes, flag);
6891 } else if (!map->TooManyFastProperties(store_mode)) {
6892 Isolate* isolate = name->GetIsolate();
6893 Representation representation = value->OptimalRepresentation();
6894 Handle<HeapType> type = value->OptimalType(isolate, representation);
6896 Map::CopyWithField(map, name, type, attributes, representation, flag);
6900 if (!maybe_map.ToHandle(&result)) {
6901 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
6908 Handle<Map> Map::ReconfigureDataProperty(Handle<Map> map, int descriptor,
6909 PropertyAttributes attributes) {
6910 // Dictionaries have to be reconfigured in-place.
6911 DCHECK(!map->is_dictionary_map());
6913 // For now, give up on transitioning and just create a unique map.
6914 // TODO(verwaest/ishell): Cache transitions with different attributes.
6915 return CopyGeneralizeAllRepresentations(map, descriptor, FORCE_FIELD,
6916 attributes, "attributes mismatch");
6920 Handle<Map> Map::TransitionToAccessorProperty(Handle<Map> map,
6922 AccessorComponent component,
6923 Handle<Object> accessor,
6924 PropertyAttributes attributes) {
6925 Isolate* isolate = name->GetIsolate();
6927 // Dictionary maps can always have additional data properties.
6928 if (map->is_dictionary_map()) {
6929 // For global objects, property cells are inlined. We need to change the
6931 if (map->IsGlobalObjectMap()) return Copy(map);
6935 // Migrate to the newest map before transitioning to the new property.
6938 PropertyNormalizationMode mode = map->is_prototype_map()
6939 ? KEEP_INOBJECT_PROPERTIES
6940 : CLEAR_INOBJECT_PROPERTIES;
6942 int index = map->SearchTransition(*name);
6943 if (index != TransitionArray::kNotFound) {
6944 Handle<Map> transition(map->GetTransition(index));
6945 DescriptorArray* descriptors = transition->instance_descriptors();
6946 // Fast path, assume that we're modifying the last added descriptor.
6947 int descriptor = transition->LastAdded();
6948 if (descriptors->GetKey(descriptor) != *name) {
6949 // If not, search for the descriptor.
6950 descriptor = descriptors->SearchWithCache(*name, *transition);
6953 if (descriptors->GetDetails(descriptor).type() != CALLBACKS) {
6954 return Map::Normalize(map, mode);
6957 // TODO(verwaest): Handle attributes better.
6958 if (descriptors->GetDetails(descriptor).attributes() != attributes) {
6959 return Map::Normalize(map, mode);
6962 Handle<Object> maybe_pair(descriptors->GetValue(descriptor), isolate);
6963 if (!maybe_pair->IsAccessorPair()) {
6964 return Map::Normalize(map, mode);
6967 Handle<AccessorPair> pair = Handle<AccessorPair>::cast(maybe_pair);
6968 if (pair->get(component) != *accessor) {
6969 return Map::Normalize(map, mode);
6975 Handle<AccessorPair> pair;
6976 DescriptorArray* old_descriptors = map->instance_descriptors();
6977 int descriptor = old_descriptors->SearchWithCache(*name, *map);
6978 if (descriptor != DescriptorArray::kNotFound) {
6979 PropertyDetails old_details = old_descriptors->GetDetails(descriptor);
6980 if (old_details.type() != CALLBACKS) {
6981 return Map::Normalize(map, mode);
6984 if (old_details.attributes() != attributes) {
6985 return Map::Normalize(map, mode);
6988 Handle<Object> maybe_pair(old_descriptors->GetValue(descriptor), isolate);
6989 if (!maybe_pair->IsAccessorPair()) {
6990 return Map::Normalize(map, mode);
6993 Object* current = Handle<AccessorPair>::cast(maybe_pair)->get(component);
6994 if (current == *accessor) return map;
6996 if (!current->IsTheHole()) {
6997 return Map::Normalize(map, mode);
7000 pair = AccessorPair::Copy(Handle<AccessorPair>::cast(maybe_pair));
7001 } else if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors ||
7002 map->TooManyFastProperties(CERTAINLY_NOT_STORE_FROM_KEYED)) {
7003 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES);
7005 pair = isolate->factory()->NewAccessorPair();
7008 pair->set(component, *accessor);
7009 TransitionFlag flag = INSERT_TRANSITION;
7010 CallbacksDescriptor new_desc(name, pair, attributes);
7011 return Map::CopyInsertDescriptor(map, &new_desc, flag);
7015 Handle<Map> Map::CopyAddDescriptor(Handle<Map> map,
7016 Descriptor* descriptor,
7017 TransitionFlag flag) {
7018 Handle<DescriptorArray> descriptors(map->instance_descriptors());
7020 // Ensure the key is unique.
7021 descriptor->KeyToUniqueName();
7023 if (flag == INSERT_TRANSITION &&
7024 map->owns_descriptors() &&
7025 map->CanHaveMoreTransitions()) {
7026 return ShareDescriptor(map, descriptors, descriptor);
7029 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
7030 descriptors, map->NumberOfOwnDescriptors(), 1);
7031 new_descriptors->Append(descriptor);
7033 return CopyReplaceDescriptors(
7034 map, new_descriptors, flag, descriptor->GetKey(), SIMPLE_TRANSITION);
7038 Handle<Map> Map::CopyInsertDescriptor(Handle<Map> map,
7039 Descriptor* descriptor,
7040 TransitionFlag flag) {
7041 Handle<DescriptorArray> old_descriptors(map->instance_descriptors());
7043 // Ensure the key is unique.
7044 descriptor->KeyToUniqueName();
7046 // We replace the key if it is already present.
7047 int index = old_descriptors->SearchWithCache(*descriptor->GetKey(), *map);
7048 if (index != DescriptorArray::kNotFound) {
7049 return CopyReplaceDescriptor(map, old_descriptors, descriptor, index, flag);
7051 return CopyAddDescriptor(map, descriptor, flag);
7055 Handle<DescriptorArray> DescriptorArray::CopyUpTo(
7056 Handle<DescriptorArray> desc,
7057 int enumeration_index,
7059 return DescriptorArray::CopyUpToAddAttributes(
7060 desc, enumeration_index, NONE, slack);
7064 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
7065 Handle<DescriptorArray> desc,
7066 int enumeration_index,
7067 PropertyAttributes attributes,
7069 if (enumeration_index + slack == 0) {
7070 return desc->GetIsolate()->factory()->empty_descriptor_array();
7073 int size = enumeration_index;
7075 Handle<DescriptorArray> descriptors =
7076 DescriptorArray::Allocate(desc->GetIsolate(), size, slack);
7077 DescriptorArray::WhitenessWitness witness(*descriptors);
7079 if (attributes != NONE) {
7080 for (int i = 0; i < size; ++i) {
7081 Object* value = desc->GetValue(i);
7082 Name* key = desc->GetKey(i);
7083 PropertyDetails details = desc->GetDetails(i);
7084 // Bulk attribute changes never affect private properties.
7085 if (!key->IsSymbol() || !Symbol::cast(key)->is_private()) {
7086 int mask = DONT_DELETE | DONT_ENUM;
7087 // READ_ONLY is an invalid attribute for JS setters/getters.
7088 if (details.type() != CALLBACKS || !value->IsAccessorPair()) {
7091 details = details.CopyAddAttributes(
7092 static_cast<PropertyAttributes>(attributes & mask));
7094 Descriptor inner_desc(
7095 handle(key), handle(value, desc->GetIsolate()), details);
7096 descriptors->Set(i, &inner_desc, witness);
7099 for (int i = 0; i < size; ++i) {
7100 descriptors->CopyFrom(i, *desc, witness);
7104 if (desc->number_of_descriptors() != enumeration_index) descriptors->Sort();
7110 Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map,
7111 Handle<DescriptorArray> descriptors,
7112 Descriptor* descriptor,
7113 int insertion_index,
7114 TransitionFlag flag) {
7115 // Ensure the key is unique.
7116 descriptor->KeyToUniqueName();
7118 Handle<Name> key = descriptor->GetKey();
7119 DCHECK(*key == descriptors->GetKey(insertion_index));
7121 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
7122 descriptors, map->NumberOfOwnDescriptors());
7124 new_descriptors->Replace(insertion_index, descriptor);
7126 SimpleTransitionFlag simple_flag =
7127 (insertion_index == descriptors->number_of_descriptors() - 1)
7130 return CopyReplaceDescriptors(map, new_descriptors, flag, key, simple_flag);
7134 void Map::UpdateCodeCache(Handle<Map> map,
7136 Handle<Code> code) {
7137 Isolate* isolate = map->GetIsolate();
7138 HandleScope scope(isolate);
7139 // Allocate the code cache if not present.
7140 if (map->code_cache()->IsFixedArray()) {
7141 Handle<Object> result = isolate->factory()->NewCodeCache();
7142 map->set_code_cache(*result);
7145 // Update the code cache.
7146 Handle<CodeCache> code_cache(CodeCache::cast(map->code_cache()), isolate);
7147 CodeCache::Update(code_cache, name, code);
7151 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) {
7152 // Do a lookup if a code cache exists.
7153 if (!code_cache()->IsFixedArray()) {
7154 return CodeCache::cast(code_cache())->Lookup(name, flags);
7156 return GetHeap()->undefined_value();
7161 int Map::IndexInCodeCache(Object* name, Code* code) {
7162 // Get the internal index if a code cache exists.
7163 if (!code_cache()->IsFixedArray()) {
7164 return CodeCache::cast(code_cache())->GetIndex(name, code);
7170 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) {
7171 // No GC is supposed to happen between a call to IndexInCodeCache and
7172 // RemoveFromCodeCache so the code cache must be there.
7173 DCHECK(!code_cache()->IsFixedArray());
7174 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
7178 // An iterator over all map transitions in an descriptor array, reusing the
7179 // constructor field of the map while it is running. Negative values in
7180 // the constructor field indicate an active map transition iteration. The
7181 // original constructor is restored after iterating over all entries.
7182 class IntrusiveMapTransitionIterator {
7184 IntrusiveMapTransitionIterator(
7185 Map* map, TransitionArray* transition_array, Object* constructor)
7187 transition_array_(transition_array),
7188 constructor_(constructor) { }
7190 void StartIfNotStarted() {
7191 DCHECK(!(*IteratorField())->IsSmi() || IsIterating());
7192 if (!(*IteratorField())->IsSmi()) {
7193 DCHECK(*IteratorField() == constructor_);
7194 *IteratorField() = Smi::FromInt(-1);
7198 bool IsIterating() {
7199 return (*IteratorField())->IsSmi() &&
7200 Smi::cast(*IteratorField())->value() < 0;
7204 DCHECK(IsIterating());
7205 int value = Smi::cast(*IteratorField())->value();
7206 int index = -value - 1;
7207 int number_of_transitions = transition_array_->number_of_transitions();
7208 if (index < number_of_transitions) {
7209 *IteratorField() = Smi::FromInt(value - 1);
7210 return transition_array_->GetTarget(index);
7213 *IteratorField() = constructor_;
7218 Object** IteratorField() {
7219 return HeapObject::RawField(map_, Map::kConstructorOffset);
7223 TransitionArray* transition_array_;
7224 Object* constructor_;
7228 // An iterator over all prototype transitions, reusing the constructor field
7229 // of the map while it is running. Positive values in the constructor field
7230 // indicate an active prototype transition iteration. The original constructor
7231 // is restored after iterating over all entries.
7232 class IntrusivePrototypeTransitionIterator {
7234 IntrusivePrototypeTransitionIterator(
7235 Map* map, HeapObject* proto_trans, Object* constructor)
7236 : map_(map), proto_trans_(proto_trans), constructor_(constructor) { }
7238 void StartIfNotStarted() {
7239 if (!(*IteratorField())->IsSmi()) {
7240 DCHECK(*IteratorField() == constructor_);
7241 *IteratorField() = Smi::FromInt(0);
7245 bool IsIterating() {
7246 return (*IteratorField())->IsSmi() &&
7247 Smi::cast(*IteratorField())->value() >= 0;
7251 DCHECK(IsIterating());
7252 int transitionNumber = Smi::cast(*IteratorField())->value();
7253 if (transitionNumber < NumberOfTransitions()) {
7254 *IteratorField() = Smi::FromInt(transitionNumber + 1);
7255 return GetTransition(transitionNumber);
7257 *IteratorField() = constructor_;
7262 Object** IteratorField() {
7263 return HeapObject::RawField(map_, Map::kConstructorOffset);
7266 int NumberOfTransitions() {
7267 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7268 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
7269 return Smi::cast(num)->value();
7272 Map* GetTransition(int transitionNumber) {
7273 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7274 return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
7277 int IndexFor(int transitionNumber) {
7278 return Map::kProtoTransitionHeaderSize +
7279 Map::kProtoTransitionMapOffset +
7280 transitionNumber * Map::kProtoTransitionElementsPerEntry;
7284 HeapObject* proto_trans_;
7285 Object* constructor_;
7289 // To traverse the transition tree iteratively, we have to store two kinds of
7290 // information in a map: The parent map in the traversal and which children of a
7291 // node have already been visited. To do this without additional memory, we
7292 // temporarily reuse two fields with known values:
7294 // (1) The map of the map temporarily holds the parent, and is restored to the
7295 // meta map afterwards.
7297 // (2) The info which children have already been visited depends on which part
7298 // of the map we currently iterate. We use the constructor field of the
7299 // map to store the current index. We can do that because the constructor
7300 // is the same for all involved maps.
7302 // (a) If we currently follow normal map transitions, we temporarily store
7303 // the current index in the constructor field, and restore it to the
7304 // original constructor afterwards. Note that a single descriptor can
7305 // have 0, 1, or 2 transitions.
7307 // (b) If we currently follow prototype transitions, we temporarily store
7308 // the current index in the constructor field, and restore it to the
7309 // original constructor afterwards.
7311 // Note that the child iterator is just a concatenation of two iterators: One
7312 // iterating over map transitions and one iterating over prototype transisitons.
7313 class TraversableMap : public Map {
7315 // Record the parent in the traversal within this map. Note that this destroys
7317 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); }
7319 // Reset the current map's map, returning the parent previously stored in it.
7320 TraversableMap* GetAndResetParent() {
7321 TraversableMap* old_parent = static_cast<TraversableMap*>(map());
7322 set_map_no_write_barrier(GetHeap()->meta_map());
7326 // If we have an unvisited child map, return that one and advance. If we have
7327 // none, return NULL and restore the overwritten constructor field.
7328 TraversableMap* ChildIteratorNext(Object* constructor) {
7329 if (!HasTransitionArray()) return NULL;
7331 TransitionArray* transition_array = transitions();
7332 if (transition_array->HasPrototypeTransitions()) {
7333 HeapObject* proto_transitions =
7334 transition_array->GetPrototypeTransitions();
7335 IntrusivePrototypeTransitionIterator proto_iterator(this,
7338 proto_iterator.StartIfNotStarted();
7339 if (proto_iterator.IsIterating()) {
7340 Map* next = proto_iterator.Next();
7341 if (next != NULL) return static_cast<TraversableMap*>(next);
7345 IntrusiveMapTransitionIterator transition_iterator(this,
7348 transition_iterator.StartIfNotStarted();
7349 if (transition_iterator.IsIterating()) {
7350 Map* next = transition_iterator.Next();
7351 if (next != NULL) return static_cast<TraversableMap*>(next);
7359 // Traverse the transition tree in postorder without using the C++ stack by
7360 // doing pointer reversal.
7361 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
7362 // Make sure that we do not allocate in the callback.
7363 DisallowHeapAllocation no_allocation;
7365 TraversableMap* current = static_cast<TraversableMap*>(this);
7366 // Get the root constructor here to restore it later when finished iterating
7368 Object* root_constructor = constructor();
7370 TraversableMap* child = current->ChildIteratorNext(root_constructor);
7371 if (child != NULL) {
7372 child->SetParent(current);
7375 TraversableMap* parent = current->GetAndResetParent();
7376 callback(current, data);
7377 if (current == this) break;
7384 void CodeCache::Update(
7385 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7386 // The number of monomorphic stubs for normal load/store/call IC's can grow to
7387 // a large number and therefore they need to go into a hash table. They are
7388 // used to load global properties from cells.
7389 if (code->type() == Code::NORMAL) {
7390 // Make sure that a hash table is allocated for the normal load code cache.
7391 if (code_cache->normal_type_cache()->IsUndefined()) {
7392 Handle<Object> result =
7393 CodeCacheHashTable::New(code_cache->GetIsolate(),
7394 CodeCacheHashTable::kInitialSize);
7395 code_cache->set_normal_type_cache(*result);
7397 UpdateNormalTypeCache(code_cache, name, code);
7399 DCHECK(code_cache->default_cache()->IsFixedArray());
7400 UpdateDefaultCache(code_cache, name, code);
7405 void CodeCache::UpdateDefaultCache(
7406 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7407 // When updating the default code cache we disregard the type encoded in the
7408 // flags. This allows call constant stubs to overwrite call field
7410 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags());
7412 // First check whether we can update existing code cache without
7414 Handle<FixedArray> cache = handle(code_cache->default_cache());
7415 int length = cache->length();
7417 DisallowHeapAllocation no_alloc;
7418 int deleted_index = -1;
7419 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7420 Object* key = cache->get(i);
7421 if (key->IsNull()) {
7422 if (deleted_index < 0) deleted_index = i;
7425 if (key->IsUndefined()) {
7426 if (deleted_index >= 0) i = deleted_index;
7427 cache->set(i + kCodeCacheEntryNameOffset, *name);
7428 cache->set(i + kCodeCacheEntryCodeOffset, *code);
7431 if (name->Equals(Name::cast(key))) {
7433 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags();
7434 if (Code::RemoveTypeFromFlags(found) == flags) {
7435 cache->set(i + kCodeCacheEntryCodeOffset, *code);
7441 // Reached the end of the code cache. If there were deleted
7442 // elements, reuse the space for the first of them.
7443 if (deleted_index >= 0) {
7444 cache->set(deleted_index + kCodeCacheEntryNameOffset, *name);
7445 cache->set(deleted_index + kCodeCacheEntryCodeOffset, *code);
7450 // Extend the code cache with some new entries (at least one). Must be a
7451 // multiple of the entry size.
7452 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize;
7453 new_length = new_length - new_length % kCodeCacheEntrySize;
7454 DCHECK((new_length % kCodeCacheEntrySize) == 0);
7455 cache = FixedArray::CopySize(cache, new_length);
7457 // Add the (name, code) pair to the new cache.
7458 cache->set(length + kCodeCacheEntryNameOffset, *name);
7459 cache->set(length + kCodeCacheEntryCodeOffset, *code);
7460 code_cache->set_default_cache(*cache);
7464 void CodeCache::UpdateNormalTypeCache(
7465 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) {
7466 // Adding a new entry can cause a new cache to be allocated.
7467 Handle<CodeCacheHashTable> cache(
7468 CodeCacheHashTable::cast(code_cache->normal_type_cache()));
7469 Handle<Object> new_cache = CodeCacheHashTable::Put(cache, name, code);
7470 code_cache->set_normal_type_cache(*new_cache);
7474 Object* CodeCache::Lookup(Name* name, Code::Flags flags) {
7475 Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags));
7476 if (result->IsCode()) {
7477 if (Code::cast(result)->flags() == flags) return result;
7478 return GetHeap()->undefined_value();
7480 return LookupNormalTypeCache(name, flags);
7484 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) {
7485 FixedArray* cache = default_cache();
7486 int length = cache->length();
7487 for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7488 Object* key = cache->get(i + kCodeCacheEntryNameOffset);
7489 // Skip deleted elements.
7490 if (key->IsNull()) continue;
7491 if (key->IsUndefined()) return key;
7492 if (name->Equals(Name::cast(key))) {
7493 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset));
7494 if (Code::RemoveTypeFromFlags(code->flags()) == flags) {
7499 return GetHeap()->undefined_value();
7503 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) {
7504 if (!normal_type_cache()->IsUndefined()) {
7505 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7506 return cache->Lookup(name, flags);
7508 return GetHeap()->undefined_value();
7513 int CodeCache::GetIndex(Object* name, Code* code) {
7514 if (code->type() == Code::NORMAL) {
7515 if (normal_type_cache()->IsUndefined()) return -1;
7516 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7517 return cache->GetIndex(Name::cast(name), code->flags());
7520 FixedArray* array = default_cache();
7521 int len = array->length();
7522 for (int i = 0; i < len; i += kCodeCacheEntrySize) {
7523 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1;
7529 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
7530 if (code->type() == Code::NORMAL) {
7531 DCHECK(!normal_type_cache()->IsUndefined());
7532 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7533 DCHECK(cache->GetIndex(Name::cast(name), code->flags()) == index);
7534 cache->RemoveByIndex(index);
7536 FixedArray* array = default_cache();
7537 DCHECK(array->length() >= index && array->get(index)->IsCode());
7538 // Use null instead of undefined for deleted elements to distinguish
7539 // deleted elements from unused elements. This distinction is used
7540 // when looking up in the cache and when updating the cache.
7541 DCHECK_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset);
7542 array->set_null(index - 1); // Name.
7543 array->set_null(index); // Code.
7548 // The key in the code cache hash table consists of the property name and the
7549 // code object. The actual match is on the name and the code flags. If a key
7550 // is created using the flags and not a code object it can only be used for
7551 // lookup not to create a new entry.
7552 class CodeCacheHashTableKey : public HashTableKey {
7554 CodeCacheHashTableKey(Handle<Name> name, Code::Flags flags)
7555 : name_(name), flags_(flags), code_() { }
7557 CodeCacheHashTableKey(Handle<Name> name, Handle<Code> code)
7558 : name_(name), flags_(code->flags()), code_(code) { }
7560 bool IsMatch(Object* other) OVERRIDE {
7561 if (!other->IsFixedArray()) return false;
7562 FixedArray* pair = FixedArray::cast(other);
7563 Name* name = Name::cast(pair->get(0));
7564 Code::Flags flags = Code::cast(pair->get(1))->flags();
7565 if (flags != flags_) {
7568 return name_->Equals(name);
7571 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) {
7572 return name->Hash() ^ flags;
7575 uint32_t Hash() OVERRIDE { return NameFlagsHashHelper(*name_, flags_); }
7577 uint32_t HashForObject(Object* obj) OVERRIDE {
7578 FixedArray* pair = FixedArray::cast(obj);
7579 Name* name = Name::cast(pair->get(0));
7580 Code* code = Code::cast(pair->get(1));
7581 return NameFlagsHashHelper(name, code->flags());
7584 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
7585 Handle<Code> code = code_.ToHandleChecked();
7586 Handle<FixedArray> pair = isolate->factory()->NewFixedArray(2);
7587 pair->set(0, *name_);
7588 pair->set(1, *code);
7595 // TODO(jkummerow): We should be able to get by without this.
7596 MaybeHandle<Code> code_;
7600 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) {
7601 DisallowHeapAllocation no_alloc;
7602 CodeCacheHashTableKey key(handle(name), flags);
7603 int entry = FindEntry(&key);
7604 if (entry == kNotFound) return GetHeap()->undefined_value();
7605 return get(EntryToIndex(entry) + 1);
7609 Handle<CodeCacheHashTable> CodeCacheHashTable::Put(
7610 Handle<CodeCacheHashTable> cache, Handle<Name> name, Handle<Code> code) {
7611 CodeCacheHashTableKey key(name, code);
7613 Handle<CodeCacheHashTable> new_cache = EnsureCapacity(cache, 1, &key);
7615 int entry = new_cache->FindInsertionEntry(key.Hash());
7616 Handle<Object> k = key.AsHandle(cache->GetIsolate());
7618 new_cache->set(EntryToIndex(entry), *k);
7619 new_cache->set(EntryToIndex(entry) + 1, *code);
7620 new_cache->ElementAdded();
7625 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) {
7626 DisallowHeapAllocation no_alloc;
7627 CodeCacheHashTableKey key(handle(name), flags);
7628 int entry = FindEntry(&key);
7629 return (entry == kNotFound) ? -1 : entry;
7633 void CodeCacheHashTable::RemoveByIndex(int index) {
7635 Heap* heap = GetHeap();
7636 set(EntryToIndex(index), heap->the_hole_value());
7637 set(EntryToIndex(index) + 1, heap->the_hole_value());
7642 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache,
7643 MapHandleList* maps,
7645 Handle<Code> code) {
7646 Isolate* isolate = code_cache->GetIsolate();
7647 if (code_cache->cache()->IsUndefined()) {
7648 Handle<PolymorphicCodeCacheHashTable> result =
7649 PolymorphicCodeCacheHashTable::New(
7651 PolymorphicCodeCacheHashTable::kInitialSize);
7652 code_cache->set_cache(*result);
7654 // This entry shouldn't be contained in the cache yet.
7655 DCHECK(PolymorphicCodeCacheHashTable::cast(code_cache->cache())
7656 ->Lookup(maps, flags)->IsUndefined());
7658 Handle<PolymorphicCodeCacheHashTable> hash_table =
7659 handle(PolymorphicCodeCacheHashTable::cast(code_cache->cache()));
7660 Handle<PolymorphicCodeCacheHashTable> new_cache =
7661 PolymorphicCodeCacheHashTable::Put(hash_table, maps, flags, code);
7662 code_cache->set_cache(*new_cache);
7666 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps,
7667 Code::Flags flags) {
7668 if (!cache()->IsUndefined()) {
7669 PolymorphicCodeCacheHashTable* hash_table =
7670 PolymorphicCodeCacheHashTable::cast(cache());
7671 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate());
7673 return GetIsolate()->factory()->undefined_value();
7678 // Despite their name, object of this class are not stored in the actual
7679 // hash table; instead they're temporarily used for lookups. It is therefore
7680 // safe to have a weak (non-owning) pointer to a MapList as a member field.
7681 class PolymorphicCodeCacheHashTableKey : public HashTableKey {
7683 // Callers must ensure that |maps| outlives the newly constructed object.
7684 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags)
7686 code_flags_(code_flags) {}
7688 bool IsMatch(Object* other) OVERRIDE {
7689 MapHandleList other_maps(kDefaultListAllocationSize);
7691 FromObject(other, &other_flags, &other_maps);
7692 if (code_flags_ != other_flags) return false;
7693 if (maps_->length() != other_maps.length()) return false;
7694 // Compare just the hashes first because it's faster.
7695 int this_hash = MapsHashHelper(maps_, code_flags_);
7696 int other_hash = MapsHashHelper(&other_maps, other_flags);
7697 if (this_hash != other_hash) return false;
7699 // Full comparison: for each map in maps_, look for an equivalent map in
7700 // other_maps. This implementation is slow, but probably good enough for
7701 // now because the lists are short (<= 4 elements currently).
7702 for (int i = 0; i < maps_->length(); ++i) {
7703 bool match_found = false;
7704 for (int j = 0; j < other_maps.length(); ++j) {
7705 if (*(maps_->at(i)) == *(other_maps.at(j))) {
7710 if (!match_found) return false;
7715 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
7716 uint32_t hash = code_flags;
7717 for (int i = 0; i < maps->length(); ++i) {
7718 hash ^= maps->at(i)->Hash();
7723 uint32_t Hash() OVERRIDE {
7724 return MapsHashHelper(maps_, code_flags_);
7727 uint32_t HashForObject(Object* obj) OVERRIDE {
7728 MapHandleList other_maps(kDefaultListAllocationSize);
7730 FromObject(obj, &other_flags, &other_maps);
7731 return MapsHashHelper(&other_maps, other_flags);
7734 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
7735 // The maps in |maps_| must be copied to a newly allocated FixedArray,
7736 // both because the referenced MapList is short-lived, and because C++
7737 // objects can't be stored in the heap anyway.
7738 Handle<FixedArray> list =
7739 isolate->factory()->NewUninitializedFixedArray(maps_->length() + 1);
7740 list->set(0, Smi::FromInt(code_flags_));
7741 for (int i = 0; i < maps_->length(); ++i) {
7742 list->set(i + 1, *maps_->at(i));
7748 static MapHandleList* FromObject(Object* obj,
7750 MapHandleList* maps) {
7751 FixedArray* list = FixedArray::cast(obj);
7753 *code_flags = Smi::cast(list->get(0))->value();
7754 for (int i = 1; i < list->length(); ++i) {
7755 maps->Add(Handle<Map>(Map::cast(list->get(i))));
7760 MapHandleList* maps_; // weak.
7762 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
7766 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps,
7768 DisallowHeapAllocation no_alloc;
7769 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
7770 int entry = FindEntry(&key);
7771 if (entry == kNotFound) return GetHeap()->undefined_value();
7772 return get(EntryToIndex(entry) + 1);
7776 Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put(
7777 Handle<PolymorphicCodeCacheHashTable> hash_table,
7778 MapHandleList* maps,
7780 Handle<Code> code) {
7781 PolymorphicCodeCacheHashTableKey key(maps, code_kind);
7782 Handle<PolymorphicCodeCacheHashTable> cache =
7783 EnsureCapacity(hash_table, 1, &key);
7784 int entry = cache->FindInsertionEntry(key.Hash());
7786 Handle<Object> obj = key.AsHandle(hash_table->GetIsolate());
7787 cache->set(EntryToIndex(entry), *obj);
7788 cache->set(EntryToIndex(entry) + 1, *code);
7789 cache->ElementAdded();
7794 void FixedArray::Shrink(int new_length) {
7795 DCHECK(0 <= new_length && new_length <= length());
7796 if (new_length < length()) {
7797 GetHeap()->RightTrimFixedArray<Heap::FROM_MUTATOR>(
7798 this, length() - new_length);
7803 MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike(
7804 Handle<FixedArray> content,
7805 Handle<JSObject> array) {
7806 DCHECK(array->IsJSArray() || array->HasSloppyArgumentsElements());
7807 ElementsAccessor* accessor = array->GetElementsAccessor();
7808 Handle<FixedArray> result;
7809 ASSIGN_RETURN_ON_EXCEPTION(
7810 array->GetIsolate(), result,
7811 accessor->AddElementsToFixedArray(array, array, content),
7814 #ifdef ENABLE_SLOW_DCHECKS
7815 if (FLAG_enable_slow_asserts) {
7816 DisallowHeapAllocation no_allocation;
7817 for (int i = 0; i < result->length(); i++) {
7818 Object* current = result->get(i);
7819 DCHECK(current->IsNumber() || current->IsName());
7827 MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first,
7828 Handle<FixedArray> second) {
7829 ElementsAccessor* accessor = ElementsAccessor::ForArray(second);
7830 Handle<FixedArray> result;
7831 ASSIGN_RETURN_ON_EXCEPTION(
7832 first->GetIsolate(), result,
7833 accessor->AddElementsToFixedArray(
7834 Handle<Object>::null(), // receiver
7835 Handle<JSObject>::null(), // holder
7837 Handle<FixedArrayBase>::cast(second)),
7840 #ifdef ENABLE_SLOW_DCHECKS
7841 if (FLAG_enable_slow_asserts) {
7842 DisallowHeapAllocation no_allocation;
7843 for (int i = 0; i < result->length(); i++) {
7844 Object* current = result->get(i);
7845 DCHECK(current->IsNumber() || current->IsName());
7853 Handle<FixedArray> FixedArray::CopySize(
7854 Handle<FixedArray> array, int new_length, PretenureFlag pretenure) {
7855 Isolate* isolate = array->GetIsolate();
7856 if (new_length == 0) return isolate->factory()->empty_fixed_array();
7857 Handle<FixedArray> result =
7858 isolate->factory()->NewFixedArray(new_length, pretenure);
7860 DisallowHeapAllocation no_gc;
7861 int len = array->length();
7862 if (new_length < len) len = new_length;
7863 // We are taking the map from the old fixed array so the map is sure to
7864 // be an immortal immutable object.
7865 result->set_map_no_write_barrier(array->map());
7866 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
7867 for (int i = 0; i < len; i++) {
7868 result->set(i, array->get(i), mode);
7874 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
7875 DisallowHeapAllocation no_gc;
7876 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
7877 for (int index = 0; index < len; index++) {
7878 dest->set(dest_pos+index, get(pos+index), mode);
7884 bool FixedArray::IsEqualTo(FixedArray* other) {
7885 if (length() != other->length()) return false;
7886 for (int i = 0 ; i < length(); ++i) {
7887 if (get(i) != other->get(i)) return false;
7894 Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
7895 int number_of_descriptors,
7897 DCHECK(0 <= number_of_descriptors);
7898 Factory* factory = isolate->factory();
7899 // Do not use DescriptorArray::cast on incomplete object.
7900 int size = number_of_descriptors + slack;
7901 if (size == 0) return factory->empty_descriptor_array();
7902 // Allocate the array of keys.
7903 Handle<FixedArray> result = factory->NewFixedArray(LengthFor(size));
7905 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
7906 result->set(kEnumCacheIndex, Smi::FromInt(0));
7907 return Handle<DescriptorArray>::cast(result);
7911 void DescriptorArray::ClearEnumCache() {
7912 set(kEnumCacheIndex, Smi::FromInt(0));
7916 void DescriptorArray::Replace(int index, Descriptor* descriptor) {
7917 descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index));
7918 Set(index, descriptor);
7922 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
7923 FixedArray* new_cache,
7924 Object* new_index_cache) {
7925 DCHECK(bridge_storage->length() >= kEnumCacheBridgeLength);
7926 DCHECK(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
7928 DCHECK(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
7929 FixedArray::cast(bridge_storage)->
7930 set(kEnumCacheBridgeCacheIndex, new_cache);
7931 FixedArray::cast(bridge_storage)->
7932 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
7933 set(kEnumCacheIndex, bridge_storage);
7937 void DescriptorArray::CopyFrom(int index,
7938 DescriptorArray* src,
7939 const WhitenessWitness& witness) {
7940 Object* value = src->GetValue(index);
7941 PropertyDetails details = src->GetDetails(index);
7942 Descriptor desc(handle(src->GetKey(index)),
7943 handle(value, src->GetIsolate()),
7945 Set(index, &desc, witness);
7949 // We need the whiteness witness since sort will reshuffle the entries in the
7950 // descriptor array. If the descriptor array were to be black, the shuffling
7951 // would move a slot that was already recorded as pointing into an evacuation
7952 // candidate. This would result in missing updates upon evacuation.
7953 void DescriptorArray::Sort() {
7954 // In-place heap sort.
7955 int len = number_of_descriptors();
7956 // Reset sorting since the descriptor array might contain invalid pointers.
7957 for (int i = 0; i < len; ++i) SetSortedKey(i, i);
7958 // Bottom-up max-heap construction.
7959 // Index of the last node with children
7960 const int max_parent_index = (len / 2) - 1;
7961 for (int i = max_parent_index; i >= 0; --i) {
7962 int parent_index = i;
7963 const uint32_t parent_hash = GetSortedKey(i)->Hash();
7964 while (parent_index <= max_parent_index) {
7965 int child_index = 2 * parent_index + 1;
7966 uint32_t child_hash = GetSortedKey(child_index)->Hash();
7967 if (child_index + 1 < len) {
7968 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
7969 if (right_child_hash > child_hash) {
7971 child_hash = right_child_hash;
7974 if (child_hash <= parent_hash) break;
7975 SwapSortedKeys(parent_index, child_index);
7976 // Now element at child_index could be < its children.
7977 parent_index = child_index; // parent_hash remains correct.
7981 // Extract elements and create sorted array.
7982 for (int i = len - 1; i > 0; --i) {
7983 // Put max element at the back of the array.
7984 SwapSortedKeys(0, i);
7985 // Shift down the new top element.
7986 int parent_index = 0;
7987 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
7988 const int max_parent_index = (i / 2) - 1;
7989 while (parent_index <= max_parent_index) {
7990 int child_index = parent_index * 2 + 1;
7991 uint32_t child_hash = GetSortedKey(child_index)->Hash();
7992 if (child_index + 1 < i) {
7993 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
7994 if (right_child_hash > child_hash) {
7996 child_hash = right_child_hash;
7999 if (child_hash <= parent_hash) break;
8000 SwapSortedKeys(parent_index, child_index);
8001 parent_index = child_index;
8004 DCHECK(IsSortedNoDuplicates());
8008 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) {
8009 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair();
8010 copy->set_getter(pair->getter());
8011 copy->set_setter(pair->setter());
8016 Object* AccessorPair::GetComponent(AccessorComponent component) {
8017 Object* accessor = get(component);
8018 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
8022 Handle<DeoptimizationInputData> DeoptimizationInputData::New(
8023 Isolate* isolate, int deopt_entry_count, PretenureFlag pretenure) {
8024 DCHECK(deopt_entry_count > 0);
8025 return Handle<DeoptimizationInputData>::cast(
8026 isolate->factory()->NewFixedArray(LengthFor(deopt_entry_count),
8031 Handle<DeoptimizationOutputData> DeoptimizationOutputData::New(
8033 int number_of_deopt_points,
8034 PretenureFlag pretenure) {
8035 Handle<FixedArray> result;
8036 if (number_of_deopt_points == 0) {
8037 result = isolate->factory()->empty_fixed_array();
8039 result = isolate->factory()->NewFixedArray(
8040 LengthOfFixedArray(number_of_deopt_points), pretenure);
8042 return Handle<DeoptimizationOutputData>::cast(result);
8047 bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
8048 if (IsEmpty()) return other->IsEmpty();
8049 if (other->IsEmpty()) return false;
8050 if (length() != other->length()) return false;
8051 for (int i = 0; i < length(); ++i) {
8052 if (get(i) != other->get(i)) return false;
8059 bool String::LooksValid() {
8060 if (!GetIsolate()->heap()->Contains(this)) return false;
8065 String::FlatContent String::GetFlatContent() {
8066 DCHECK(!AllowHeapAllocation::IsAllowed());
8067 int length = this->length();
8068 StringShape shape(this);
8069 String* string = this;
8071 if (shape.representation_tag() == kConsStringTag) {
8072 ConsString* cons = ConsString::cast(string);
8073 if (cons->second()->length() != 0) {
8074 return FlatContent();
8076 string = cons->first();
8077 shape = StringShape(string);
8079 if (shape.representation_tag() == kSlicedStringTag) {
8080 SlicedString* slice = SlicedString::cast(string);
8081 offset = slice->offset();
8082 string = slice->parent();
8083 shape = StringShape(string);
8084 DCHECK(shape.representation_tag() != kConsStringTag &&
8085 shape.representation_tag() != kSlicedStringTag);
8087 if (shape.encoding_tag() == kOneByteStringTag) {
8088 const uint8_t* start;
8089 if (shape.representation_tag() == kSeqStringTag) {
8090 start = SeqOneByteString::cast(string)->GetChars();
8092 start = ExternalOneByteString::cast(string)->GetChars();
8094 return FlatContent(start + offset, length);
8096 DCHECK(shape.encoding_tag() == kTwoByteStringTag);
8098 if (shape.representation_tag() == kSeqStringTag) {
8099 start = SeqTwoByteString::cast(string)->GetChars();
8101 start = ExternalTwoByteString::cast(string)->GetChars();
8103 return FlatContent(start + offset, length);
8108 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8109 RobustnessFlag robust_flag,
8112 int* length_return) {
8113 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8114 return SmartArrayPointer<char>(NULL);
8116 // Negative length means the to the end of the string.
8117 if (length < 0) length = kMaxInt - offset;
8119 // Compute the size of the UTF-8 string. Start at the specified offset.
8120 StringCharacterStream stream(this, offset);
8121 int character_position = offset;
8123 int last = unibrow::Utf16::kNoPreviousCharacter;
8124 while (stream.HasMore() && character_position++ < offset + length) {
8125 uint16_t character = stream.GetNext();
8126 utf8_bytes += unibrow::Utf8::Length(character, last);
8130 if (length_return) {
8131 *length_return = utf8_bytes;
8134 char* result = NewArray<char>(utf8_bytes + 1);
8136 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset.
8137 stream.Reset(this, offset);
8138 character_position = offset;
8139 int utf8_byte_position = 0;
8140 last = unibrow::Utf16::kNoPreviousCharacter;
8141 while (stream.HasMore() && character_position++ < offset + length) {
8142 uint16_t character = stream.GetNext();
8143 if (allow_nulls == DISALLOW_NULLS && character == 0) {
8146 utf8_byte_position +=
8147 unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
8150 result[utf8_byte_position] = 0;
8151 return SmartArrayPointer<char>(result);
8155 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
8156 RobustnessFlag robust_flag,
8157 int* length_return) {
8158 return ToCString(allow_nulls, robust_flag, 0, -1, length_return);
8162 const uc16* String::GetTwoByteData(unsigned start) {
8163 DCHECK(!IsOneByteRepresentationUnderneath());
8164 switch (StringShape(this).representation_tag()) {
8166 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
8167 case kExternalStringTag:
8168 return ExternalTwoByteString::cast(this)->
8169 ExternalTwoByteStringGetData(start);
8170 case kSlicedStringTag: {
8171 SlicedString* slice = SlicedString::cast(this);
8172 return slice->parent()->GetTwoByteData(start + slice->offset());
8174 case kConsStringTag:
8183 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) {
8184 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8185 return SmartArrayPointer<uc16>();
8187 StringCharacterStream stream(this);
8189 uc16* result = NewArray<uc16>(length() + 1);
8192 while (stream.HasMore()) {
8193 uint16_t character = stream.GetNext();
8194 result[i++] = character;
8197 return SmartArrayPointer<uc16>(result);
8201 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) {
8202 return reinterpret_cast<uc16*>(
8203 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start;
8207 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) {
8208 Relocatable* current = isolate->relocatable_top();
8209 while (current != NULL) {
8210 current->PostGarbageCollection();
8211 current = current->prev_;
8216 // Reserve space for statics needing saving and restoring.
8217 int Relocatable::ArchiveSpacePerThread() {
8218 return sizeof(Relocatable*); // NOLINT
8222 // Archive statics that are thread-local.
8223 char* Relocatable::ArchiveState(Isolate* isolate, char* to) {
8224 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
8225 isolate->set_relocatable_top(NULL);
8226 return to + ArchiveSpacePerThread();
8230 // Restore statics that are thread-local.
8231 char* Relocatable::RestoreState(Isolate* isolate, char* from) {
8232 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
8233 return from + ArchiveSpacePerThread();
8237 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
8238 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
8240 return thread_storage + ArchiveSpacePerThread();
8244 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) {
8245 Iterate(v, isolate->relocatable_top());
8249 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
8250 Relocatable* current = top;
8251 while (current != NULL) {
8252 current->IterateInstance(v);
8253 current = current->prev_;
8258 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str)
8259 : Relocatable(isolate),
8260 str_(str.location()),
8261 length_(str->length()) {
8262 PostGarbageCollection();
8266 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input)
8267 : Relocatable(isolate),
8270 length_(input.length()),
8271 start_(input.start()) {}
8274 void FlatStringReader::PostGarbageCollection() {
8275 if (str_ == NULL) return;
8276 Handle<String> str(str_);
8277 DCHECK(str->IsFlat());
8278 DisallowHeapAllocation no_gc;
8279 // This does not actually prevent the vector from being relocated later.
8280 String::FlatContent content = str->GetFlatContent();
8281 DCHECK(content.IsFlat());
8282 is_one_byte_ = content.IsOneByte();
8284 start_ = content.ToOneByteVector().start();
8286 start_ = content.ToUC16Vector().start();
8291 void ConsStringIterator::Initialize(ConsString* cons_string, int offset) {
8292 DCHECK(cons_string != NULL);
8293 root_ = cons_string;
8295 // Force stack blown condition to trigger restart.
8297 maximum_depth_ = kStackSize + depth_;
8298 DCHECK(StackBlown());
8302 String* ConsStringIterator::Continue(int* offset_out) {
8303 DCHECK(depth_ != 0);
8304 DCHECK_EQ(0, *offset_out);
8305 bool blew_stack = StackBlown();
8306 String* string = NULL;
8307 // Get the next leaf if there is one.
8308 if (!blew_stack) string = NextLeaf(&blew_stack);
8309 // Restart search from root.
8311 DCHECK(string == NULL);
8312 string = Search(offset_out);
8314 // Ensure future calls return null immediately.
8315 if (string == NULL) Reset(NULL);
8320 String* ConsStringIterator::Search(int* offset_out) {
8321 ConsString* cons_string = root_;
8322 // Reset the stack, pushing the root string.
8325 frames_[0] = cons_string;
8326 const int consumed = consumed_;
8329 // Loop until the string is found which contains the target offset.
8330 String* string = cons_string->first();
8331 int length = string->length();
8333 if (consumed < offset + length) {
8334 // Target offset is in the left branch.
8335 // Keep going if we're still in a ConString.
8336 type = string->map()->instance_type();
8337 if ((type & kStringRepresentationMask) == kConsStringTag) {
8338 cons_string = ConsString::cast(string);
8339 PushLeft(cons_string);
8342 // Tell the stack we're done descending.
8343 AdjustMaximumDepth();
8346 // Update progress through the string.
8348 // Keep going if we're still in a ConString.
8349 string = cons_string->second();
8350 type = string->map()->instance_type();
8351 if ((type & kStringRepresentationMask) == kConsStringTag) {
8352 cons_string = ConsString::cast(string);
8353 PushRight(cons_string);
8356 // Need this to be updated for the current string.
8357 length = string->length();
8358 // Account for the possibility of an empty right leaf.
8359 // This happens only if we have asked for an offset outside the string.
8361 // Reset so future operations will return null immediately.
8365 // Tell the stack we're done descending.
8366 AdjustMaximumDepth();
8367 // Pop stack so next iteration is in correct place.
8370 DCHECK(length != 0);
8371 // Adjust return values and exit.
8372 consumed_ = offset + length;
8373 *offset_out = consumed - offset;
8381 String* ConsStringIterator::NextLeaf(bool* blew_stack) {
8383 // Tree traversal complete.
8385 *blew_stack = false;
8388 // We've lost track of higher nodes.
8394 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)];
8395 String* string = cons_string->second();
8396 int32_t type = string->map()->instance_type();
8397 if ((type & kStringRepresentationMask) != kConsStringTag) {
8398 // Pop stack so next iteration is in correct place.
8400 int length = string->length();
8401 // Could be a flattened ConsString.
8402 if (length == 0) continue;
8403 consumed_ += length;
8406 cons_string = ConsString::cast(string);
8407 PushRight(cons_string);
8408 // Need to traverse all the way left.
8411 string = cons_string->first();
8412 type = string->map()->instance_type();
8413 if ((type & kStringRepresentationMask) != kConsStringTag) {
8414 AdjustMaximumDepth();
8415 int length = string->length();
8416 DCHECK(length != 0);
8417 consumed_ += length;
8420 cons_string = ConsString::cast(string);
8421 PushLeft(cons_string);
8429 uint16_t ConsString::ConsStringGet(int index) {
8430 DCHECK(index >= 0 && index < this->length());
8432 // Check for a flattened cons string
8433 if (second()->length() == 0) {
8434 String* left = first();
8435 return left->Get(index);
8438 String* string = String::cast(this);
8441 if (StringShape(string).IsCons()) {
8442 ConsString* cons_string = ConsString::cast(string);
8443 String* left = cons_string->first();
8444 if (left->length() > index) {
8447 index -= left->length();
8448 string = cons_string->second();
8451 return string->Get(index);
8460 uint16_t SlicedString::SlicedStringGet(int index) {
8461 return parent()->Get(offset() + index);
8465 template <typename sinkchar>
8466 void String::WriteToFlat(String* src,
8470 String* source = src;
8474 DCHECK(0 <= from && from <= to && to <= source->length());
8475 switch (StringShape(source).full_representation_tag()) {
8476 case kOneByteStringTag | kExternalStringTag: {
8477 CopyChars(sink, ExternalOneByteString::cast(source)->GetChars() + from,
8481 case kTwoByteStringTag | kExternalStringTag: {
8483 ExternalTwoByteString::cast(source)->GetChars();
8489 case kOneByteStringTag | kSeqStringTag: {
8491 SeqOneByteString::cast(source)->GetChars() + from,
8495 case kTwoByteStringTag | kSeqStringTag: {
8497 SeqTwoByteString::cast(source)->GetChars() + from,
8501 case kOneByteStringTag | kConsStringTag:
8502 case kTwoByteStringTag | kConsStringTag: {
8503 ConsString* cons_string = ConsString::cast(source);
8504 String* first = cons_string->first();
8505 int boundary = first->length();
8506 if (to - boundary >= boundary - from) {
8507 // Right hand side is longer. Recurse over left.
8508 if (from < boundary) {
8509 WriteToFlat(first, sink, from, boundary);
8510 sink += boundary - from;
8516 source = cons_string->second();
8518 // Left hand side is longer. Recurse over right.
8519 if (to > boundary) {
8520 String* second = cons_string->second();
8521 // When repeatedly appending to a string, we get a cons string that
8522 // is unbalanced to the left, a list, essentially. We inline the
8523 // common case of sequential one-byte right child.
8524 if (to - boundary == 1) {
8525 sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
8526 } else if (second->IsSeqOneByteString()) {
8527 CopyChars(sink + boundary - from,
8528 SeqOneByteString::cast(second)->GetChars(),
8532 sink + boundary - from,
8542 case kOneByteStringTag | kSlicedStringTag:
8543 case kTwoByteStringTag | kSlicedStringTag: {
8544 SlicedString* slice = SlicedString::cast(source);
8545 unsigned offset = slice->offset();
8546 WriteToFlat(slice->parent(), sink, from + offset, to + offset);
8555 template <typename SourceChar>
8556 static void CalculateLineEndsImpl(Isolate* isolate,
8557 List<int>* line_ends,
8558 Vector<const SourceChar> src,
8559 bool include_ending_line) {
8560 const int src_len = src.length();
8561 StringSearch<uint8_t, SourceChar> search(isolate, STATIC_CHAR_VECTOR("\n"));
8563 // Find and record line ends.
8565 while (position != -1 && position < src_len) {
8566 position = search.Search(src, position);
8567 if (position != -1) {
8568 line_ends->Add(position);
8570 } else if (include_ending_line) {
8571 // Even if the last line misses a line end, it is counted.
8572 line_ends->Add(src_len);
8579 Handle<FixedArray> String::CalculateLineEnds(Handle<String> src,
8580 bool include_ending_line) {
8582 // Rough estimate of line count based on a roughly estimated average
8583 // length of (unpacked) code.
8584 int line_count_estimate = src->length() >> 4;
8585 List<int> line_ends(line_count_estimate);
8586 Isolate* isolate = src->GetIsolate();
8587 { DisallowHeapAllocation no_allocation; // ensure vectors stay valid.
8588 // Dispatch on type of strings.
8589 String::FlatContent content = src->GetFlatContent();
8590 DCHECK(content.IsFlat());
8591 if (content.IsOneByte()) {
8592 CalculateLineEndsImpl(isolate,
8594 content.ToOneByteVector(),
8595 include_ending_line);
8597 CalculateLineEndsImpl(isolate,
8599 content.ToUC16Vector(),
8600 include_ending_line);
8603 int line_count = line_ends.length();
8604 Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count);
8605 for (int i = 0; i < line_count; i++) {
8606 array->set(i, Smi::FromInt(line_ends[i]));
8612 // Compares the contents of two strings by reading and comparing
8613 // int-sized blocks of characters.
8614 template <typename Char>
8615 static inline bool CompareRawStringContents(const Char* const a,
8616 const Char* const b,
8618 return CompareChars(a, b, length) == 0;
8622 template<typename Chars1, typename Chars2>
8623 class RawStringComparator : public AllStatic {
8625 static inline bool compare(const Chars1* a, const Chars2* b, int len) {
8626 DCHECK(sizeof(Chars1) != sizeof(Chars2));
8627 for (int i = 0; i < len; i++) {
8638 class RawStringComparator<uint16_t, uint16_t> {
8640 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) {
8641 return CompareRawStringContents(a, b, len);
8647 class RawStringComparator<uint8_t, uint8_t> {
8649 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) {
8650 return CompareRawStringContents(a, b, len);
8655 class StringComparator {
8658 State() : is_one_byte_(true), length_(0), buffer8_(NULL) {}
8660 void Init(String* string) {
8661 ConsString* cons_string = String::VisitFlat(this, string);
8662 iter_.Reset(cons_string);
8663 if (cons_string != NULL) {
8665 string = iter_.Next(&offset);
8666 String::VisitFlat(this, string, offset);
8670 inline void VisitOneByteString(const uint8_t* chars, int length) {
8671 is_one_byte_ = true;
8676 inline void VisitTwoByteString(const uint16_t* chars, int length) {
8677 is_one_byte_ = false;
8682 void Advance(int consumed) {
8683 DCHECK(consumed <= length_);
8685 if (length_ != consumed) {
8687 buffer8_ += consumed;
8689 buffer16_ += consumed;
8691 length_ -= consumed;
8696 String* next = iter_.Next(&offset);
8697 DCHECK_EQ(0, offset);
8698 DCHECK(next != NULL);
8699 String::VisitFlat(this, next);
8702 ConsStringIterator iter_;
8706 const uint8_t* buffer8_;
8707 const uint16_t* buffer16_;
8711 DISALLOW_COPY_AND_ASSIGN(State);
8715 inline StringComparator() {}
8717 template<typename Chars1, typename Chars2>
8718 static inline bool Equals(State* state_1, State* state_2, int to_check) {
8719 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_);
8720 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_);
8721 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check);
8724 bool Equals(String* string_1, String* string_2) {
8725 int length = string_1->length();
8726 state_1_.Init(string_1);
8727 state_2_.Init(string_2);
8729 int to_check = Min(state_1_.length_, state_2_.length_);
8730 DCHECK(to_check > 0 && to_check <= length);
8732 if (state_1_.is_one_byte_) {
8733 if (state_2_.is_one_byte_) {
8734 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check);
8736 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check);
8739 if (state_2_.is_one_byte_) {
8740 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check);
8742 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check);
8746 if (!is_equal) return false;
8748 // Exit condition. Strings are equal.
8749 if (length == 0) return true;
8750 state_1_.Advance(to_check);
8751 state_2_.Advance(to_check);
8759 DISALLOW_COPY_AND_ASSIGN(StringComparator);
8763 bool String::SlowEquals(String* other) {
8764 DisallowHeapAllocation no_gc;
8765 // Fast check: negative check with lengths.
8767 if (len != other->length()) return false;
8768 if (len == 0) return true;
8770 // Fast check: if hash code is computed for both strings
8771 // a fast negative check can be performed.
8772 if (HasHashCode() && other->HasHashCode()) {
8773 #ifdef ENABLE_SLOW_DCHECKS
8774 if (FLAG_enable_slow_asserts) {
8775 if (Hash() != other->Hash()) {
8776 bool found_difference = false;
8777 for (int i = 0; i < len; i++) {
8778 if (Get(i) != other->Get(i)) {
8779 found_difference = true;
8783 DCHECK(found_difference);
8787 if (Hash() != other->Hash()) return false;
8790 // We know the strings are both non-empty. Compare the first chars
8791 // before we try to flatten the strings.
8792 if (this->Get(0) != other->Get(0)) return false;
8794 if (IsSeqOneByteString() && other->IsSeqOneByteString()) {
8795 const uint8_t* str1 = SeqOneByteString::cast(this)->GetChars();
8796 const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars();
8797 return CompareRawStringContents(str1, str2, len);
8800 StringComparator comparator;
8801 return comparator.Equals(this, other);
8805 bool String::SlowEquals(Handle<String> one, Handle<String> two) {
8806 // Fast check: negative check with lengths.
8807 int one_length = one->length();
8808 if (one_length != two->length()) return false;
8809 if (one_length == 0) return true;
8811 // Fast check: if hash code is computed for both strings
8812 // a fast negative check can be performed.
8813 if (one->HasHashCode() && two->HasHashCode()) {
8814 #ifdef ENABLE_SLOW_DCHECKS
8815 if (FLAG_enable_slow_asserts) {
8816 if (one->Hash() != two->Hash()) {
8817 bool found_difference = false;
8818 for (int i = 0; i < one_length; i++) {
8819 if (one->Get(i) != two->Get(i)) {
8820 found_difference = true;
8824 DCHECK(found_difference);
8828 if (one->Hash() != two->Hash()) return false;
8831 // We know the strings are both non-empty. Compare the first chars
8832 // before we try to flatten the strings.
8833 if (one->Get(0) != two->Get(0)) return false;
8835 one = String::Flatten(one);
8836 two = String::Flatten(two);
8838 DisallowHeapAllocation no_gc;
8839 String::FlatContent flat1 = one->GetFlatContent();
8840 String::FlatContent flat2 = two->GetFlatContent();
8842 if (flat1.IsOneByte() && flat2.IsOneByte()) {
8843 return CompareRawStringContents(flat1.ToOneByteVector().start(),
8844 flat2.ToOneByteVector().start(),
8847 for (int i = 0; i < one_length; i++) {
8848 if (flat1.Get(i) != flat2.Get(i)) return false;
8855 bool String::MarkAsUndetectable() {
8856 if (StringShape(this).IsInternalized()) return false;
8858 Map* map = this->map();
8859 Heap* heap = GetHeap();
8860 if (map == heap->string_map()) {
8861 this->set_map(heap->undetectable_string_map());
8863 } else if (map == heap->one_byte_string_map()) {
8864 this->set_map(heap->undetectable_one_byte_string_map());
8867 // Rest cannot be marked as undetectable
8872 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
8873 int slen = length();
8874 // Can't check exact length equality, but we can check bounds.
8875 int str_len = str.length();
8876 if (!allow_prefix_match &&
8878 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) {
8882 unsigned remaining_in_str = static_cast<unsigned>(str_len);
8883 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start());
8884 for (i = 0; i < slen && remaining_in_str > 0; i++) {
8885 unsigned cursor = 0;
8886 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor);
8887 DCHECK(cursor > 0 && cursor <= remaining_in_str);
8888 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
8889 if (i > slen - 1) return false;
8890 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
8891 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
8893 if (Get(i) != r) return false;
8895 utf8_data += cursor;
8896 remaining_in_str -= cursor;
8898 return (allow_prefix_match || i == slen) && remaining_in_str == 0;
8902 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) {
8903 int slen = length();
8904 if (str.length() != slen) return false;
8905 DisallowHeapAllocation no_gc;
8906 FlatContent content = GetFlatContent();
8907 if (content.IsOneByte()) {
8908 return CompareChars(content.ToOneByteVector().start(),
8909 str.start(), slen) == 0;
8911 for (int i = 0; i < slen; i++) {
8912 if (Get(i) != static_cast<uint16_t>(str[i])) return false;
8918 bool String::IsTwoByteEqualTo(Vector<const uc16> str) {
8919 int slen = length();
8920 if (str.length() != slen) return false;
8921 DisallowHeapAllocation no_gc;
8922 FlatContent content = GetFlatContent();
8923 if (content.IsTwoByte()) {
8924 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
8926 for (int i = 0; i < slen; i++) {
8927 if (Get(i) != str[i]) return false;
8933 uint32_t String::ComputeAndSetHash() {
8934 // Should only be called if hash code has not yet been computed.
8935 DCHECK(!HasHashCode());
8937 // Store the hash code in the object.
8938 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed());
8939 set_hash_field(field);
8941 // Check the hash code is there.
8942 DCHECK(HasHashCode());
8943 uint32_t result = field >> kHashShift;
8944 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
8949 bool String::ComputeArrayIndex(uint32_t* index) {
8950 int length = this->length();
8951 if (length == 0 || length > kMaxArrayIndexSize) return false;
8952 StringCharacterStream stream(this);
8953 return StringToArrayIndex(&stream, index);
8957 bool String::SlowAsArrayIndex(uint32_t* index) {
8958 if (length() <= kMaxCachedArrayIndexLength) {
8959 Hash(); // force computation of hash code
8960 uint32_t field = hash_field();
8961 if ((field & kIsNotArrayIndexMask) != 0) return false;
8962 // Isolate the array index form the full hash field.
8963 *index = ArrayIndexValueBits::decode(field);
8966 return ComputeArrayIndex(index);
8971 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
8972 int new_size, old_size;
8973 int old_length = string->length();
8974 if (old_length <= new_length) return string;
8976 if (string->IsSeqOneByteString()) {
8977 old_size = SeqOneByteString::SizeFor(old_length);
8978 new_size = SeqOneByteString::SizeFor(new_length);
8980 DCHECK(string->IsSeqTwoByteString());
8981 old_size = SeqTwoByteString::SizeFor(old_length);
8982 new_size = SeqTwoByteString::SizeFor(new_length);
8985 int delta = old_size - new_size;
8987 Address start_of_string = string->address();
8988 DCHECK_OBJECT_ALIGNED(start_of_string);
8989 DCHECK_OBJECT_ALIGNED(start_of_string + new_size);
8991 Heap* heap = string->GetHeap();
8992 NewSpace* newspace = heap->new_space();
8993 if (newspace->Contains(start_of_string) &&
8994 newspace->top() == start_of_string + old_size) {
8995 // Last allocated object in new space. Simply lower allocation top.
8996 newspace->set_top(start_of_string + new_size);
8998 // Sizes are pointer size aligned, so that we can use filler objects
8999 // that are a multiple of pointer size.
9000 heap->CreateFillerObjectAt(start_of_string + new_size, delta);
9002 heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR);
9004 // We are storing the new length using release store after creating a filler
9005 // for the left-over space to avoid races with the sweeper thread.
9006 string->synchronized_set_length(new_length);
9008 if (new_length == 0) return heap->isolate()->factory()->empty_string();
9013 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
9014 // For array indexes mix the length into the hash as an array index could
9017 DCHECK(length <= String::kMaxArrayIndexSize);
9018 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
9019 (1 << String::kArrayIndexValueBits));
9021 value <<= String::ArrayIndexValueBits::kShift;
9022 value |= length << String::ArrayIndexLengthBits::kShift;
9024 DCHECK((value & String::kIsNotArrayIndexMask) == 0);
9025 DCHECK((length > String::kMaxCachedArrayIndexLength) ||
9026 (value & String::kContainsCachedArrayIndexMask) == 0);
9031 uint32_t StringHasher::GetHashField() {
9032 if (length_ <= String::kMaxHashCalcLength) {
9033 if (is_array_index_) {
9034 return MakeArrayIndexHash(array_index_, length_);
9036 return (GetHashCore(raw_running_hash_) << String::kHashShift) |
9037 String::kIsNotArrayIndexMask;
9039 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask;
9044 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars,
9046 int* utf16_length_out) {
9047 int vector_length = chars.length();
9048 // Handle some edge cases
9049 if (vector_length <= 1) {
9050 DCHECK(vector_length == 0 ||
9051 static_cast<uint8_t>(chars.start()[0]) <=
9052 unibrow::Utf8::kMaxOneByteChar);
9053 *utf16_length_out = vector_length;
9054 return HashSequentialString(chars.start(), vector_length, seed);
9056 // Start with a fake length which won't affect computation.
9057 // It will be updated later.
9058 StringHasher hasher(String::kMaxArrayIndexSize, seed);
9059 unsigned remaining = static_cast<unsigned>(vector_length);
9060 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start());
9061 int utf16_length = 0;
9062 bool is_index = true;
9063 DCHECK(hasher.is_array_index_);
9064 while (remaining > 0) {
9065 unsigned consumed = 0;
9066 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed);
9067 DCHECK(consumed > 0 && consumed <= remaining);
9069 remaining -= consumed;
9070 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode;
9071 utf16_length += is_two_characters ? 2 : 1;
9072 // No need to keep hashing. But we do need to calculate utf16_length.
9073 if (utf16_length > String::kMaxHashCalcLength) continue;
9074 if (is_two_characters) {
9075 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c);
9076 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c);
9077 hasher.AddCharacter(c1);
9078 hasher.AddCharacter(c2);
9079 if (is_index) is_index = hasher.UpdateIndex(c1);
9080 if (is_index) is_index = hasher.UpdateIndex(c2);
9082 hasher.AddCharacter(c);
9083 if (is_index) is_index = hasher.UpdateIndex(c);
9086 *utf16_length_out = static_cast<int>(utf16_length);
9087 // Must set length here so that hash computation is correct.
9088 hasher.length_ = utf16_length;
9089 return hasher.GetHashField();
9093 void String::PrintOn(FILE* file) {
9094 int length = this->length();
9095 for (int i = 0; i < length; i++) {
9096 PrintF(file, "%c", Get(i));
9101 inline static uint32_t ObjectAddressForHashing(Object* object) {
9102 uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
9103 return value & MemoryChunk::kAlignmentMask;
9108 // For performance reasons we only hash the 3 most variable fields of a map:
9109 // constructor, prototype and bit_field2. For predictability reasons we
9110 // use objects' offsets in respective pages for hashing instead of raw
9113 // Shift away the tag.
9114 int hash = ObjectAddressForHashing(constructor()) >> 2;
9116 // XOR-ing the prototype and constructor directly yields too many zero bits
9117 // when the two pointers are close (which is fairly common).
9118 // To avoid this we shift the prototype bits relatively to the constructor.
9119 hash ^= ObjectAddressForHashing(prototype()) << (32 - kPageSizeBits);
9121 return hash ^ (hash >> 16) ^ bit_field2();
9125 static bool CheckEquivalent(Map* first, Map* second) {
9127 first->constructor() == second->constructor() &&
9128 first->prototype() == second->prototype() &&
9129 first->instance_type() == second->instance_type() &&
9130 first->bit_field() == second->bit_field() &&
9131 first->bit_field2() == second->bit_field2() &&
9132 first->is_frozen() == second->is_frozen() &&
9133 first->has_instance_call_handler() == second->has_instance_call_handler();
9137 bool Map::EquivalentToForTransition(Map* other) {
9138 return CheckEquivalent(this, other);
9142 bool Map::EquivalentToForNormalization(Map* other,
9143 PropertyNormalizationMode mode) {
9144 int properties = mode == CLEAR_INOBJECT_PROPERTIES
9145 ? 0 : other->inobject_properties();
9146 return CheckEquivalent(this, other) && inobject_properties() == properties;
9150 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
9151 // Unfortunately the serializer relies on pointers within an object being
9152 // visited in-order, so we have to iterate both the code and heap pointers in
9153 // the small section before doing so in the extended section.
9154 for (int s = 0; s <= final_section(); ++s) {
9155 LayoutSection section = static_cast<LayoutSection>(s);
9156 ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR,
9158 while (!code_iter.is_finished()) {
9159 v->VisitCodeEntry(reinterpret_cast<Address>(
9160 RawFieldOfElementAt(code_iter.next_index())));
9163 ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR,
9165 while (!heap_iter.is_finished()) {
9166 v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index()));
9172 void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) {
9173 Type type[] = { CODE_PTR, HEAP_PTR };
9174 Address default_value[] = {
9175 isolate->builtins()->builtin(Builtins::kIllegal)->entry(),
9176 reinterpret_cast<Address>(isolate->heap()->undefined_value()) };
9178 for (int i = 0; i < 2; ++i) {
9179 for (int s = 0; s <= final_section(); ++s) {
9180 LayoutSection section = static_cast<LayoutSection>(s);
9181 if (number_of_entries(type[i], section) > 0) {
9182 int offset = OffsetOfElementAt(first_index(type[i], section));
9184 reinterpret_cast<Address*>(HeapObject::RawField(this, offset)),
9186 number_of_entries(type[i], section));
9193 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
9194 // Iterate over all fields in the body but take care in dealing with
9196 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset);
9197 v->VisitCodeEntry(this->address() + kCodeEntryOffset);
9198 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
9202 void JSFunction::MarkForOptimization() {
9203 DCHECK(!IsOptimized());
9204 DCHECK(shared()->allows_lazy_compilation() ||
9205 code()->optimizable());
9206 DCHECK(!shared()->is_generator());
9207 set_code_no_write_barrier(
9208 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
9209 // No write barrier required, since the builtin is part of the root set.
9213 void JSFunction::MarkForConcurrentOptimization() {
9214 DCHECK(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9215 DCHECK(!IsOptimized());
9216 DCHECK(shared()->allows_lazy_compilation() || code()->optimizable());
9217 DCHECK(!shared()->is_generator());
9218 DCHECK(GetIsolate()->concurrent_recompilation_enabled());
9219 if (FLAG_trace_concurrent_recompilation) {
9220 PrintF(" ** Marking ");
9222 PrintF(" for concurrent recompilation.\n");
9224 set_code_no_write_barrier(
9225 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
9226 // No write barrier required, since the builtin is part of the root set.
9230 void JSFunction::MarkInOptimizationQueue() {
9231 // We can only arrive here via the concurrent-recompilation builtin. If
9232 // break points were set, the code would point to the lazy-compile builtin.
9233 DCHECK(!GetIsolate()->DebuggerHasBreakPoints());
9234 DCHECK(IsMarkedForConcurrentOptimization() && !IsOptimized());
9235 DCHECK(shared()->allows_lazy_compilation() || code()->optimizable());
9236 DCHECK(GetIsolate()->concurrent_recompilation_enabled());
9237 if (FLAG_trace_concurrent_recompilation) {
9238 PrintF(" ** Queueing ");
9240 PrintF(" for concurrent recompilation.\n");
9242 set_code_no_write_barrier(
9243 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
9244 // No write barrier required, since the builtin is part of the root set.
9248 Handle<JSFunction> JSFunction::CloneClosure(Handle<JSFunction> function) {
9249 Isolate* isolate = function->GetIsolate();
9250 Handle<Map> map(function->map());
9251 Handle<SharedFunctionInfo> shared(function->shared());
9252 Handle<Context> context(function->context());
9253 Handle<JSFunction> clone =
9254 isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, context);
9256 if (shared->bound()) {
9257 clone->set_function_bindings(function->function_bindings());
9260 // In typical case, __proto__ of ``function`` is the default Function
9261 // prototype, which means that SetPrototype below is a no-op.
9262 // In rare cases when that is not true, we mutate the clone's __proto__.
9263 Handle<Object> original_prototype(map->prototype(), isolate);
9264 if (*original_prototype != clone->map()->prototype()) {
9265 JSObject::SetPrototype(clone, original_prototype, false).Assert();
9272 void SharedFunctionInfo::AddToOptimizedCodeMap(
9273 Handle<SharedFunctionInfo> shared,
9274 Handle<Context> native_context,
9276 Handle<FixedArray> literals,
9277 BailoutId osr_ast_id) {
9278 Isolate* isolate = shared->GetIsolate();
9279 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
9280 DCHECK(native_context->IsNativeContext());
9281 STATIC_ASSERT(kEntryLength == 4);
9282 Handle<FixedArray> new_code_map;
9283 Handle<Object> value(shared->optimized_code_map(), isolate);
9285 if (value->IsSmi()) {
9286 // No optimized code map.
9287 DCHECK_EQ(0, Smi::cast(*value)->value());
9288 // Create 3 entries per context {context, code, literals}.
9289 new_code_map = isolate->factory()->NewFixedArray(kInitialLength);
9290 old_length = kEntriesStart;
9292 // Copy old map and append one new entry.
9293 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value);
9294 DCHECK_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id));
9295 old_length = old_code_map->length();
9296 new_code_map = FixedArray::CopySize(
9297 old_code_map, old_length + kEntryLength);
9298 // Zap the old map for the sake of the heap verifier.
9299 if (Heap::ShouldZapGarbage()) {
9300 Object** data = old_code_map->data_start();
9301 MemsetPointer(data, isolate->heap()->the_hole_value(), old_length);
9304 new_code_map->set(old_length + kContextOffset, *native_context);
9305 new_code_map->set(old_length + kCachedCodeOffset, *code);
9306 new_code_map->set(old_length + kLiteralsOffset, *literals);
9307 new_code_map->set(old_length + kOsrAstIdOffset,
9308 Smi::FromInt(osr_ast_id.ToInt()));
9311 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9312 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext());
9313 DCHECK(new_code_map->get(i + kCachedCodeOffset)->IsCode());
9314 DCHECK(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
9315 Code::OPTIMIZED_FUNCTION);
9316 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
9317 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
9320 shared->set_optimized_code_map(*new_code_map);
9324 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
9325 DCHECK(index > kEntriesStart);
9326 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9328 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9329 DCHECK_NE(NULL, cached_literals);
9330 return cached_literals;
9336 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
9337 DCHECK(index > kEntriesStart);
9338 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9339 Code* code = Code::cast(code_map->get(index));
9340 DCHECK_NE(NULL, code);
9345 void SharedFunctionInfo::ClearOptimizedCodeMap() {
9346 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9348 // If the next map link slot is already used then the function was
9349 // enqueued with code flushing and we remove it now.
9350 if (!code_map->get(kNextMapIndex)->IsUndefined()) {
9351 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
9352 flusher->EvictOptimizedCodeMap(this);
9355 DCHECK(code_map->get(kNextMapIndex)->IsUndefined());
9356 set_optimized_code_map(Smi::FromInt(0));
9360 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
9361 const char* reason) {
9362 DisallowHeapAllocation no_gc;
9363 if (optimized_code_map()->IsSmi()) return;
9365 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9366 int dst = kEntriesStart;
9367 int length = code_map->length();
9368 for (int src = kEntriesStart; src < length; src += kEntryLength) {
9369 DCHECK(code_map->get(src)->IsNativeContext());
9370 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) {
9371 // Evict the src entry by not copying it to the dst entry.
9372 if (FLAG_trace_opt) {
9373 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9375 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
9379 PrintF(" (osr ast id %d)]\n", osr.ToInt());
9383 // Keep the src entry by copying it to the dst entry.
9385 code_map->set(dst + kContextOffset,
9386 code_map->get(src + kContextOffset));
9387 code_map->set(dst + kCachedCodeOffset,
9388 code_map->get(src + kCachedCodeOffset));
9389 code_map->set(dst + kLiteralsOffset,
9390 code_map->get(src + kLiteralsOffset));
9391 code_map->set(dst + kOsrAstIdOffset,
9392 code_map->get(src + kOsrAstIdOffset));
9394 dst += kEntryLength;
9397 if (dst != length) {
9398 // Always trim even when array is cleared because of heap verifier.
9399 GetHeap()->RightTrimFixedArray<Heap::FROM_MUTATOR>(code_map, length - dst);
9400 if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap();
9405 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
9406 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9407 DCHECK(shrink_by % kEntryLength == 0);
9408 DCHECK(shrink_by <= code_map->length() - kEntriesStart);
9409 // Always trim even when array is cleared because of heap verifier.
9410 GetHeap()->RightTrimFixedArray<Heap::FROM_GC>(code_map, shrink_by);
9411 if (code_map->length() == kEntriesStart) {
9412 ClearOptimizedCodeMap();
9417 void JSObject::OptimizeAsPrototype(Handle<JSObject> object,
9418 PrototypeOptimizationMode mode) {
9419 if (object->IsGlobalObject()) return;
9420 if (object->IsJSGlobalProxy()) return;
9421 if (mode == FAST_PROTOTYPE && !object->map()->is_prototype_map()) {
9422 // First normalize to ensure all JSFunctions are CONSTANT.
9423 JSObject::NormalizeProperties(object, KEEP_INOBJECT_PROPERTIES, 0);
9425 if (!object->HasFastProperties()) {
9426 JSObject::MigrateSlowToFast(object, 0);
9428 if (mode == FAST_PROTOTYPE && object->HasFastProperties() &&
9429 !object->map()->is_prototype_map()) {
9430 Handle<Map> new_map = Map::Copy(handle(object->map()));
9431 JSObject::MigrateToMap(object, new_map);
9432 object->map()->set_is_prototype_map(true);
9437 void JSObject::ReoptimizeIfPrototype(Handle<JSObject> object) {
9438 if (!object->map()->is_prototype_map()) return;
9439 OptimizeAsPrototype(object, FAST_PROTOTYPE);
9443 Handle<Object> CacheInitialJSArrayMaps(
9444 Handle<Context> native_context, Handle<Map> initial_map) {
9445 // Replace all of the cached initial array maps in the native context with
9446 // the appropriate transitioned elements kind maps.
9447 Factory* factory = native_context->GetIsolate()->factory();
9448 Handle<FixedArray> maps = factory->NewFixedArrayWithHoles(
9449 kElementsKindCount, TENURED);
9451 Handle<Map> current_map = initial_map;
9452 ElementsKind kind = current_map->elements_kind();
9453 DCHECK(kind == GetInitialFastElementsKind());
9454 maps->set(kind, *current_map);
9455 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
9456 i < kFastElementsKindCount; ++i) {
9457 Handle<Map> new_map;
9458 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
9459 if (current_map->HasElementsTransition()) {
9460 new_map = handle(current_map->elements_transition_map());
9461 DCHECK(new_map->elements_kind() == next_kind);
9463 new_map = Map::CopyAsElementsKind(
9464 current_map, next_kind, INSERT_TRANSITION);
9466 maps->set(next_kind, *new_map);
9467 current_map = new_map;
9469 native_context->set_js_array_maps(*maps);
9474 void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
9475 Handle<Object> value) {
9476 Isolate* isolate = function->GetIsolate();
9478 DCHECK(value->IsJSReceiver());
9480 // Now some logic for the maps of the objects that are created by using this
9481 // function as a constructor.
9482 if (function->has_initial_map()) {
9483 // If the function has allocated the initial map replace it with a
9484 // copy containing the new prototype. Also complete any in-object
9485 // slack tracking that is in progress at this point because it is
9486 // still tracking the old copy.
9487 if (function->IsInobjectSlackTrackingInProgress()) {
9488 function->CompleteInobjectSlackTracking();
9491 Handle<Map> initial_map(function->initial_map(), isolate);
9493 if (!initial_map->GetIsolate()->bootstrapper()->IsActive() &&
9494 initial_map->instance_type() == JS_OBJECT_TYPE) {
9495 // Put the value in the initial map field until an initial map is needed.
9496 // At that point, a new initial map is created and the prototype is put
9497 // into the initial map where it belongs.
9498 function->set_prototype_or_initial_map(*value);
9500 Handle<Map> new_map = Map::Copy(initial_map);
9501 JSFunction::SetInitialMap(function, new_map, value);
9503 // If the function is used as the global Array function, cache the
9504 // initial map (and transitioned versions) in the native context.
9505 Context* native_context = function->context()->native_context();
9506 Object* array_function =
9507 native_context->get(Context::ARRAY_FUNCTION_INDEX);
9508 if (array_function->IsJSFunction() &&
9509 *function == JSFunction::cast(array_function)) {
9510 CacheInitialJSArrayMaps(handle(native_context, isolate), new_map);
9514 // Deoptimize all code that embeds the previous initial map.
9515 initial_map->dependent_code()->DeoptimizeDependentCodeGroup(
9516 isolate, DependentCode::kInitialMapChangedGroup);
9518 // Put the value in the initial map field until an initial map is
9519 // needed. At that point, a new initial map is created and the
9520 // prototype is put into the initial map where it belongs.
9521 function->set_prototype_or_initial_map(*value);
9523 isolate->heap()->ClearInstanceofCache();
9527 void JSFunction::SetPrototype(Handle<JSFunction> function,
9528 Handle<Object> value) {
9529 DCHECK(function->should_have_prototype());
9530 Handle<Object> construct_prototype = value;
9532 // If the value is not a JSReceiver, store the value in the map's
9533 // constructor field so it can be accessed. Also, set the prototype
9534 // used for constructing objects to the original object prototype.
9535 // See ECMA-262 13.2.2.
9536 if (!value->IsJSReceiver()) {
9537 // Copy the map so this does not affect unrelated functions.
9538 // Remove map transitions because they point to maps with a
9539 // different prototype.
9540 Handle<Map> new_map = Map::Copy(handle(function->map()));
9542 JSObject::MigrateToMap(function, new_map);
9543 new_map->set_constructor(*value);
9544 new_map->set_non_instance_prototype(true);
9545 Isolate* isolate = new_map->GetIsolate();
9546 construct_prototype = handle(
9547 isolate->context()->native_context()->initial_object_prototype(),
9550 function->map()->set_non_instance_prototype(false);
9553 return SetInstancePrototype(function, construct_prototype);
9557 bool JSFunction::RemovePrototype() {
9558 Context* native_context = context()->native_context();
9559 Map* no_prototype_map = shared()->strict_mode() == SLOPPY
9560 ? native_context->sloppy_function_without_prototype_map()
9561 : native_context->strict_function_without_prototype_map();
9563 if (map() == no_prototype_map) return true;
9566 if (map() != (shared()->strict_mode() == SLOPPY
9567 ? native_context->sloppy_function_map()
9568 : native_context->strict_function_map())) {
9573 set_map(no_prototype_map);
9574 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
9579 void JSFunction::SetInitialMap(Handle<JSFunction> function, Handle<Map> map,
9580 Handle<Object> prototype) {
9581 if (prototype->IsJSObject()) {
9582 Handle<JSObject> js_proto = Handle<JSObject>::cast(prototype);
9583 JSObject::OptimizeAsPrototype(js_proto, FAST_PROTOTYPE);
9585 map->set_prototype(*prototype);
9586 function->set_prototype_or_initial_map(*map);
9587 map->set_constructor(*function);
9591 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
9592 if (function->has_initial_map()) return;
9593 Isolate* isolate = function->GetIsolate();
9595 // First create a new map with the size and number of in-object properties
9596 // suggested by the function.
9597 InstanceType instance_type;
9599 int in_object_properties;
9600 if (function->shared()->is_generator()) {
9601 instance_type = JS_GENERATOR_OBJECT_TYPE;
9602 instance_size = JSGeneratorObject::kSize;
9603 in_object_properties = 0;
9605 instance_type = JS_OBJECT_TYPE;
9606 instance_size = function->shared()->CalculateInstanceSize();
9607 in_object_properties = function->shared()->CalculateInObjectProperties();
9609 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size);
9611 // Fetch or allocate prototype.
9612 Handle<Object> prototype;
9613 if (function->has_instance_prototype()) {
9614 prototype = handle(function->instance_prototype(), isolate);
9616 prototype = isolate->factory()->NewFunctionPrototype(function);
9618 map->set_inobject_properties(in_object_properties);
9619 map->set_unused_property_fields(in_object_properties);
9620 DCHECK(map->has_fast_object_elements());
9622 // Finally link initial map and constructor function.
9623 JSFunction::SetInitialMap(function, map, Handle<JSReceiver>::cast(prototype));
9625 if (!function->shared()->is_generator()) {
9626 function->StartInobjectSlackTracking();
9631 void JSFunction::SetInstanceClassName(String* name) {
9632 shared()->set_instance_class_name(name);
9636 void JSFunction::PrintName(FILE* out) {
9637 SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
9638 PrintF(out, "%s", name.get());
9642 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
9643 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
9647 // The filter is a pattern that matches function names in this way:
9648 // "*" all; the default
9649 // "-" all but the top-level function
9650 // "-name" all but the function "name"
9651 // "" only the top-level function
9652 // "name" only the function "name"
9653 // "name*" only functions starting with "name"
9654 // "~" none; the tilde is not an identifier
9655 bool JSFunction::PassesFilter(const char* raw_filter) {
9656 if (*raw_filter == '*') return true;
9657 String* name = shared()->DebugName();
9658 Vector<const char> filter = CStrVector(raw_filter);
9659 if (filter.length() == 0) return name->length() == 0;
9660 if (filter[0] == '-') {
9662 if (filter.length() == 1) {
9663 return (name->length() != 0);
9664 } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) {
9667 if (filter[filter.length() - 1] == '*' &&
9668 name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) {
9673 } else if (name->IsUtf8EqualTo(filter)) {
9676 if (filter[filter.length() - 1] == '*' &&
9677 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) {
9684 void Oddball::Initialize(Isolate* isolate,
9685 Handle<Oddball> oddball,
9686 const char* to_string,
9687 Handle<Object> to_number,
9689 Handle<String> internalized_to_string =
9690 isolate->factory()->InternalizeUtf8String(to_string);
9691 oddball->set_to_string(*internalized_to_string);
9692 oddball->set_to_number(*to_number);
9693 oddball->set_kind(kind);
9697 void Script::InitLineEnds(Handle<Script> script) {
9698 if (!script->line_ends()->IsUndefined()) return;
9700 Isolate* isolate = script->GetIsolate();
9702 if (!script->source()->IsString()) {
9703 DCHECK(script->source()->IsUndefined());
9704 Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0);
9705 script->set_line_ends(*empty);
9706 DCHECK(script->line_ends()->IsFixedArray());
9710 Handle<String> src(String::cast(script->source()), isolate);
9712 Handle<FixedArray> array = String::CalculateLineEnds(src, true);
9714 if (*array != isolate->heap()->empty_fixed_array()) {
9715 array->set_map(isolate->heap()->fixed_cow_array_map());
9718 script->set_line_ends(*array);
9719 DCHECK(script->line_ends()->IsFixedArray());
9723 int Script::GetColumnNumber(Handle<Script> script, int code_pos) {
9724 int line_number = GetLineNumber(script, code_pos);
9725 if (line_number == -1) return -1;
9727 DisallowHeapAllocation no_allocation;
9728 FixedArray* line_ends_array = FixedArray::cast(script->line_ends());
9729 line_number = line_number - script->line_offset()->value();
9730 if (line_number == 0) return code_pos + script->column_offset()->value();
9731 int prev_line_end_pos =
9732 Smi::cast(line_ends_array->get(line_number - 1))->value();
9733 return code_pos - (prev_line_end_pos + 1);
9737 int Script::GetLineNumberWithArray(int code_pos) {
9738 DisallowHeapAllocation no_allocation;
9739 DCHECK(line_ends()->IsFixedArray());
9740 FixedArray* line_ends_array = FixedArray::cast(line_ends());
9741 int line_ends_len = line_ends_array->length();
9742 if (line_ends_len == 0) return -1;
9744 if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) {
9745 return line_offset()->value();
9749 int right = line_ends_len;
9750 while (int half = (right - left) / 2) {
9751 if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) {
9757 return right + line_offset()->value();
9761 int Script::GetLineNumber(Handle<Script> script, int code_pos) {
9762 InitLineEnds(script);
9763 return script->GetLineNumberWithArray(code_pos);
9767 int Script::GetLineNumber(int code_pos) {
9768 DisallowHeapAllocation no_allocation;
9769 if (!line_ends()->IsUndefined()) return GetLineNumberWithArray(code_pos);
9771 // Slow mode: we do not have line_ends. We have to iterate through source.
9772 if (!source()->IsString()) return -1;
9774 String* source_string = String::cast(source());
9776 int len = source_string->length();
9777 for (int pos = 0; pos < len; pos++) {
9778 if (pos == code_pos) break;
9779 if (source_string->Get(pos) == '\n') line++;
9785 Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) {
9786 Isolate* isolate = script->GetIsolate();
9787 Handle<String> name_or_source_url_key =
9788 isolate->factory()->InternalizeOneByteString(
9789 STATIC_CHAR_VECTOR("nameOrSourceURL"));
9790 Handle<JSObject> script_wrapper = Script::GetWrapper(script);
9791 Handle<Object> property = Object::GetProperty(
9792 script_wrapper, name_or_source_url_key).ToHandleChecked();
9793 DCHECK(property->IsJSFunction());
9794 Handle<JSFunction> method = Handle<JSFunction>::cast(property);
9795 Handle<Object> result;
9796 // Do not check against pending exception, since this function may be called
9797 // when an exception has already been pending.
9798 if (!Execution::TryCall(method, script_wrapper, 0, NULL).ToHandle(&result)) {
9799 return isolate->factory()->undefined_value();
9805 Handle<JSObject> Script::GetWrapper(Handle<Script> script) {
9806 Isolate* isolate = script->GetIsolate();
9807 if (!script->wrapper()->IsUndefined()) {
9808 Handle<WeakCell> cell(WeakCell::cast(script->wrapper()));
9809 if (!cell->cleared()) {
9810 // Return a handle for the existing script wrapper from the cache.
9811 return handle(JSObject::cast(cell->value()));
9813 // If we found an empty WeakCell, that means the script wrapper was
9814 // GCed. We are not notified directly of that, so we decrement here
9815 // so that we at least don't count double for any given script.
9816 isolate->counters()->script_wrappers()->Decrement();
9818 // Construct a new script wrapper.
9819 isolate->counters()->script_wrappers()->Increment();
9820 Handle<JSFunction> constructor = isolate->script_function();
9821 Handle<JSValue> result =
9822 Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor));
9823 result->set_value(*script);
9824 Handle<WeakCell> cell = isolate->factory()->NewWeakCell(result);
9825 script->set_wrapper(*cell);
9830 String* SharedFunctionInfo::DebugName() {
9832 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name();
9833 return String::cast(n);
9837 bool SharedFunctionInfo::HasSourceCode() const {
9838 return !script()->IsUndefined() &&
9839 !reinterpret_cast<Script*>(script())->source()->IsUndefined();
9843 Handle<Object> SharedFunctionInfo::GetSourceCode() {
9844 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
9845 Handle<String> source(String::cast(Script::cast(script())->source()));
9846 return GetIsolate()->factory()->NewSubString(
9847 source, start_position(), end_position());
9851 bool SharedFunctionInfo::IsInlineable() {
9852 // Check that the function has a script associated with it.
9853 if (!script()->IsScript()) return false;
9854 if (optimization_disabled()) return false;
9855 // If we never ran this (unlikely) then lets try to optimize it.
9856 if (code()->kind() != Code::FUNCTION) return true;
9857 return code()->optimizable();
9861 int SharedFunctionInfo::SourceSize() {
9862 return end_position() - start_position();
9866 int SharedFunctionInfo::CalculateInstanceSize() {
9868 JSObject::kHeaderSize +
9869 expected_nof_properties() * kPointerSize;
9870 if (instance_size > JSObject::kMaxInstanceSize) {
9871 instance_size = JSObject::kMaxInstanceSize;
9873 return instance_size;
9877 int SharedFunctionInfo::CalculateInObjectProperties() {
9878 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize;
9882 // Output the source code without any allocation in the heap.
9883 std::ostream& operator<<(std::ostream& os, const SourceCodeOf& v) {
9884 const SharedFunctionInfo* s = v.value;
9885 // For some native functions there is no source.
9886 if (!s->HasSourceCode()) return os << "<No Source>";
9888 // Get the source for the script which this function came from.
9889 // Don't use String::cast because we don't want more assertion errors while
9890 // we are already creating a stack dump.
9891 String* script_source =
9892 reinterpret_cast<String*>(Script::cast(s->script())->source());
9894 if (!script_source->LooksValid()) return os << "<Invalid Source>";
9896 if (!s->is_toplevel()) {
9898 Object* name = s->name();
9899 if (name->IsString() && String::cast(name)->length() > 0) {
9900 String::cast(name)->PrintUC16(os);
9904 int len = s->end_position() - s->start_position();
9905 if (len <= v.max_length || v.max_length < 0) {
9906 script_source->PrintUC16(os, s->start_position(), s->end_position());
9909 script_source->PrintUC16(os, s->start_position(),
9910 s->start_position() + v.max_length);
9911 return os << "...\n";
9916 static bool IsCodeEquivalent(Code* code, Code* recompiled) {
9917 if (code->instruction_size() != recompiled->instruction_size()) return false;
9918 ByteArray* code_relocation = code->relocation_info();
9919 ByteArray* recompiled_relocation = recompiled->relocation_info();
9920 int length = code_relocation->length();
9921 if (length != recompiled_relocation->length()) return false;
9922 int compare = memcmp(code_relocation->GetDataStartAddress(),
9923 recompiled_relocation->GetDataStartAddress(),
9925 return compare == 0;
9929 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
9930 DCHECK(!has_deoptimization_support());
9931 DisallowHeapAllocation no_allocation;
9932 Code* code = this->code();
9933 if (IsCodeEquivalent(code, recompiled)) {
9934 // Copy the deoptimization data from the recompiled code.
9935 code->set_deoptimization_data(recompiled->deoptimization_data());
9936 code->set_has_deoptimization_support(true);
9938 // TODO(3025757): In case the recompiled isn't equivalent to the
9939 // old code, we have to replace it. We should try to avoid this
9940 // altogether because it flushes valuable type feedback by
9941 // effectively resetting all IC state.
9942 ReplaceCode(recompiled);
9944 DCHECK(has_deoptimization_support());
9948 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
9949 // Disable optimization for the shared function info and mark the
9950 // code as non-optimizable. The marker on the shared function info
9951 // is there because we flush non-optimized code thereby loosing the
9952 // non-optimizable information for the code. When the code is
9953 // regenerated and set on the shared function info it is marked as
9954 // non-optimizable if optimization is disabled for the shared
9956 set_optimization_disabled(true);
9957 set_bailout_reason(reason);
9958 // Code should be the lazy compilation stub or else unoptimized. If the
9959 // latter, disable optimization for the code too.
9960 DCHECK(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN);
9961 if (code()->kind() == Code::FUNCTION) {
9962 code()->set_optimizable(false);
9964 PROFILE(GetIsolate(), CodeDisableOptEvent(code(), this));
9965 if (FLAG_trace_opt) {
9966 PrintF("[disabled optimization for ");
9968 PrintF(", reason: %s]\n", GetBailoutReason(reason));
9973 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) {
9974 DCHECK(!id.IsNone());
9975 Code* unoptimized = code();
9976 DeoptimizationOutputData* data =
9977 DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
9978 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this);
9980 return true; // Return true if there was no DCHECK.
9984 void JSFunction::StartInobjectSlackTracking() {
9985 DCHECK(has_initial_map() && !IsInobjectSlackTrackingInProgress());
9987 if (!FLAG_clever_optimizations) return;
9988 Map* map = initial_map();
9990 // Only initiate the tracking the first time.
9991 if (map->done_inobject_slack_tracking()) return;
9992 map->set_done_inobject_slack_tracking(true);
9994 // No tracking during the snapshot construction phase.
9995 Isolate* isolate = GetIsolate();
9996 if (isolate->serializer_enabled()) return;
9998 if (map->unused_property_fields() == 0) return;
10000 map->set_construction_count(kGenerousAllocationCount);
10004 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
10005 code()->ClearInlineCaches();
10006 // If we clear ICs, we need to clear the type feedback vector too, since
10007 // CallICs are synced with a feedback vector slot.
10008 ClearTypeFeedbackInfo();
10009 set_ic_age(new_ic_age);
10010 if (code()->kind() == Code::FUNCTION) {
10011 code()->set_profiler_ticks(0);
10012 if (optimization_disabled() &&
10013 opt_count() >= FLAG_max_opt_count) {
10014 // Re-enable optimizations if they were disabled due to opt_count limit.
10015 set_optimization_disabled(false);
10016 code()->set_optimizable(true);
10019 set_deopt_count(0);
10024 static void GetMinInobjectSlack(Map* map, void* data) {
10025 int slack = map->unused_property_fields();
10026 if (*reinterpret_cast<int*>(data) > slack) {
10027 *reinterpret_cast<int*>(data) = slack;
10032 static void ShrinkInstanceSize(Map* map, void* data) {
10033 int slack = *reinterpret_cast<int*>(data);
10034 map->set_inobject_properties(map->inobject_properties() - slack);
10035 map->set_unused_property_fields(map->unused_property_fields() - slack);
10036 map->set_instance_size(map->instance_size() - slack * kPointerSize);
10038 // Visitor id might depend on the instance size, recalculate it.
10039 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
10043 void JSFunction::CompleteInobjectSlackTracking() {
10044 DCHECK(has_initial_map());
10045 Map* map = initial_map();
10047 DCHECK(map->done_inobject_slack_tracking());
10048 map->set_construction_count(kNoSlackTracking);
10050 int slack = map->unused_property_fields();
10051 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
10053 // Resize the initial map and all maps in its transition tree.
10054 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
10059 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
10060 BailoutId osr_ast_id) {
10061 DisallowHeapAllocation no_gc;
10062 DCHECK(native_context->IsNativeContext());
10063 if (!FLAG_cache_optimized_code) return -1;
10064 Object* value = optimized_code_map();
10065 if (!value->IsSmi()) {
10066 FixedArray* optimized_code_map = FixedArray::cast(value);
10067 int length = optimized_code_map->length();
10068 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
10069 for (int i = kEntriesStart; i < length; i += kEntryLength) {
10070 if (optimized_code_map->get(i + kContextOffset) == native_context &&
10071 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
10072 return i + kCachedCodeOffset;
10075 if (FLAG_trace_opt) {
10076 PrintF("[didn't find optimized code in optimized code map for ");
10085 #define DECLARE_TAG(ignore1, name, ignore2) name,
10086 const char* const VisitorSynchronization::kTags[
10087 VisitorSynchronization::kNumberOfSyncTags] = {
10088 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10093 #define DECLARE_TAG(ignore1, ignore2, name) name,
10094 const char* const VisitorSynchronization::kTagNames[
10095 VisitorSynchronization::kNumberOfSyncTags] = {
10096 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
10101 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
10102 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
10103 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
10104 Object* old_target = target;
10105 VisitPointer(&target);
10106 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10110 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
10111 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
10112 Object* stub = rinfo->code_age_stub();
10114 VisitPointer(&stub);
10119 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
10120 Object* code = Code::GetObjectFromEntryAddress(entry_address);
10121 Object* old_code = code;
10122 VisitPointer(&code);
10123 if (code != old_code) {
10124 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry();
10129 void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
10130 DCHECK(rinfo->rmode() == RelocInfo::CELL);
10131 Object* cell = rinfo->target_cell();
10132 Object* old_cell = cell;
10133 VisitPointer(&cell);
10134 if (cell != old_cell) {
10135 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
10140 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
10141 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
10142 rinfo->IsPatchedReturnSequence()) ||
10143 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
10144 rinfo->IsPatchedDebugBreakSlotSequence()));
10145 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
10146 Object* old_target = target;
10147 VisitPointer(&target);
10148 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10152 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
10153 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
10154 Object* p = rinfo->target_object();
10159 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
10160 Address p = rinfo->target_reference();
10161 VisitExternalReference(&p);
10165 void Code::InvalidateRelocation() {
10166 InvalidateEmbeddedObjects();
10167 set_relocation_info(GetHeap()->empty_byte_array());
10171 void Code::InvalidateEmbeddedObjects() {
10172 Object* undefined = GetHeap()->undefined_value();
10173 Cell* undefined_cell = GetHeap()->undefined_cell();
10174 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10175 RelocInfo::ModeMask(RelocInfo::CELL);
10176 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10177 RelocInfo::Mode mode = it.rinfo()->rmode();
10178 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10179 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
10180 } else if (mode == RelocInfo::CELL) {
10181 it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER);
10187 void Code::Relocate(intptr_t delta) {
10188 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
10189 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH);
10191 CpuFeatures::FlushICache(instruction_start(), instruction_size());
10195 void Code::CopyFrom(const CodeDesc& desc) {
10196 DCHECK(Marking::Color(this) == Marking::WHITE_OBJECT);
10199 CopyBytes(instruction_start(), desc.buffer,
10200 static_cast<size_t>(desc.instr_size));
10203 CopyBytes(relocation_start(),
10204 desc.buffer + desc.buffer_size - desc.reloc_size,
10205 static_cast<size_t>(desc.reloc_size));
10207 // unbox handles and relocate
10208 intptr_t delta = instruction_start() - desc.buffer;
10209 int mode_mask = RelocInfo::kCodeTargetMask |
10210 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10211 RelocInfo::ModeMask(RelocInfo::CELL) |
10212 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
10213 RelocInfo::kApplyMask;
10214 // Needed to find target_object and runtime_entry on X64
10215 Assembler* origin = desc.origin;
10216 AllowDeferredHandleDereference embedding_raw_address;
10217 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10218 RelocInfo::Mode mode = it.rinfo()->rmode();
10219 if (mode == RelocInfo::EMBEDDED_OBJECT) {
10220 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10221 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
10222 } else if (mode == RelocInfo::CELL) {
10223 Handle<Cell> cell = it.rinfo()->target_cell_handle();
10224 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
10225 } else if (RelocInfo::IsCodeTarget(mode)) {
10226 // rewrite code handles in inline cache targets to direct
10227 // pointers to the first instruction in the code object
10228 Handle<Object> p = it.rinfo()->target_object_handle(origin);
10229 Code* code = Code::cast(*p);
10230 it.rinfo()->set_target_address(code->instruction_start(),
10231 SKIP_WRITE_BARRIER,
10232 SKIP_ICACHE_FLUSH);
10233 } else if (RelocInfo::IsRuntimeEntry(mode)) {
10234 Address p = it.rinfo()->target_runtime_entry(origin);
10235 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER,
10236 SKIP_ICACHE_FLUSH);
10237 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) {
10238 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin);
10239 Code* code = Code::cast(*p);
10240 it.rinfo()->set_code_age_stub(code, SKIP_ICACHE_FLUSH);
10242 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH);
10245 CpuFeatures::FlushICache(instruction_start(), instruction_size());
10249 // Locate the source position which is closest to the address in the code. This
10250 // is using the source position information embedded in the relocation info.
10251 // The position returned is relative to the beginning of the script where the
10252 // source for this function is found.
10253 int Code::SourcePosition(Address pc) {
10254 int distance = kMaxInt;
10255 int position = RelocInfo::kNoPosition; // Initially no position found.
10256 // Run through all the relocation info to find the best matching source
10257 // position. All the code needs to be considered as the sequence of the
10258 // instructions in the code does not necessarily follow the same order as the
10260 RelocIterator it(this, RelocInfo::kPositionMask);
10261 while (!it.done()) {
10262 // Only look at positions after the current pc.
10263 if (it.rinfo()->pc() < pc) {
10264 // Get position and distance.
10266 int dist = static_cast<int>(pc - it.rinfo()->pc());
10267 int pos = static_cast<int>(it.rinfo()->data());
10268 // If this position is closer than the current candidate or if it has the
10269 // same distance as the current candidate and the position is higher then
10270 // this position is the new candidate.
10271 if ((dist < distance) ||
10272 (dist == distance && pos > position)) {
10283 // Same as Code::SourcePosition above except it only looks for statement
10285 int Code::SourceStatementPosition(Address pc) {
10286 // First find the position as close as possible using all position
10288 int position = SourcePosition(pc);
10289 // Now find the closest statement position before the position.
10290 int statement_position = 0;
10291 RelocIterator it(this, RelocInfo::kPositionMask);
10292 while (!it.done()) {
10293 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
10294 int p = static_cast<int>(it.rinfo()->data());
10295 if (statement_position < p && p <= position) {
10296 statement_position = p;
10301 return statement_position;
10305 SafepointEntry Code::GetSafepointEntry(Address pc) {
10306 SafepointTable table(this);
10307 return table.FindEntry(pc);
10311 Object* Code::FindNthObject(int n, Map* match_map) {
10312 DCHECK(is_inline_cache_stub());
10313 DisallowHeapAllocation no_allocation;
10314 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10315 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10316 RelocInfo* info = it.rinfo();
10317 Object* object = info->target_object();
10318 if (object->IsHeapObject()) {
10319 if (HeapObject::cast(object)->map() == match_map) {
10320 if (--n == 0) return object;
10328 AllocationSite* Code::FindFirstAllocationSite() {
10329 Object* result = FindNthObject(1, GetHeap()->allocation_site_map());
10330 return (result != NULL) ? AllocationSite::cast(result) : NULL;
10334 Map* Code::FindFirstMap() {
10335 Object* result = FindNthObject(1, GetHeap()->meta_map());
10336 return (result != NULL) ? Map::cast(result) : NULL;
10340 void Code::FindAndReplace(const FindAndReplacePattern& pattern) {
10341 DCHECK(is_inline_cache_stub() || is_handler());
10342 DisallowHeapAllocation no_allocation;
10343 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10344 STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32);
10345 int current_pattern = 0;
10346 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10347 RelocInfo* info = it.rinfo();
10348 Object* object = info->target_object();
10349 if (object->IsHeapObject()) {
10350 Map* map = HeapObject::cast(object)->map();
10351 if (map == *pattern.find_[current_pattern]) {
10352 info->set_target_object(*pattern.replace_[current_pattern]);
10353 if (++current_pattern == pattern.count_) return;
10361 void Code::FindAllMaps(MapHandleList* maps) {
10362 DCHECK(is_inline_cache_stub());
10363 DisallowHeapAllocation no_allocation;
10364 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10365 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10366 RelocInfo* info = it.rinfo();
10367 Object* object = info->target_object();
10368 if (object->IsMap()) maps->Add(handle(Map::cast(object)));
10373 Code* Code::FindFirstHandler() {
10374 DCHECK(is_inline_cache_stub());
10375 DisallowHeapAllocation no_allocation;
10376 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10377 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10378 RelocInfo* info = it.rinfo();
10379 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10380 if (code->kind() == Code::HANDLER) return code;
10386 bool Code::FindHandlers(CodeHandleList* code_list, int length) {
10387 DCHECK(is_inline_cache_stub());
10388 DisallowHeapAllocation no_allocation;
10389 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10391 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10392 if (i == length) return true;
10393 RelocInfo* info = it.rinfo();
10394 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10395 // IC stubs with handlers never contain non-handler code objects before
10396 // handler targets.
10397 if (code->kind() != Code::HANDLER) break;
10398 code_list->Add(Handle<Code>(code));
10401 return i == length;
10405 MaybeHandle<Code> Code::FindHandlerForMap(Map* map) {
10406 DCHECK(is_inline_cache_stub());
10407 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10408 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10409 bool return_next = false;
10410 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10411 RelocInfo* info = it.rinfo();
10412 if (info->rmode() == RelocInfo::EMBEDDED_OBJECT) {
10413 Object* object = info->target_object();
10414 if (object == map) return_next = true;
10415 } else if (return_next) {
10416 Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10417 DCHECK(code->kind() == Code::HANDLER);
10418 return handle(code);
10421 return MaybeHandle<Code>();
10425 Name* Code::FindFirstName() {
10426 DCHECK(is_inline_cache_stub());
10427 DisallowHeapAllocation no_allocation;
10428 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10429 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10430 RelocInfo* info = it.rinfo();
10431 Object* object = info->target_object();
10432 if (object->IsName()) return Name::cast(object);
10438 void Code::ClearInlineCaches() {
10439 ClearInlineCaches(NULL);
10443 void Code::ClearInlineCaches(Code::Kind kind) {
10444 ClearInlineCaches(&kind);
10448 void Code::ClearInlineCaches(Code::Kind* kind) {
10449 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10450 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
10451 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
10452 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10453 RelocInfo* info = it.rinfo();
10454 Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
10455 if (target->is_inline_cache_stub()) {
10456 if (kind == NULL || *kind == target->kind()) {
10457 IC::Clear(this->GetIsolate(), info->pc(),
10458 info->host()->constant_pool());
10465 void SharedFunctionInfo::ClearTypeFeedbackInfo() {
10466 feedback_vector()->ClearSlots(this);
10470 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
10471 DisallowHeapAllocation no_gc;
10472 DCHECK(kind() == FUNCTION);
10473 BackEdgeTable back_edges(this, &no_gc);
10474 for (uint32_t i = 0; i < back_edges.length(); i++) {
10475 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
10477 return BailoutId::None();
10481 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
10482 DisallowHeapAllocation no_gc;
10483 DCHECK(kind() == FUNCTION);
10484 BackEdgeTable back_edges(this, &no_gc);
10485 for (uint32_t i = 0; i < back_edges.length(); i++) {
10486 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
10488 UNREACHABLE(); // We expect to find the back edge.
10493 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
10494 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
10498 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) {
10499 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge,
10500 NO_MARKING_PARITY);
10504 static Code::Age EffectiveAge(Code::Age age) {
10505 if (age == Code::kNotExecutedCodeAge) {
10506 // Treat that's never been executed as old immediately.
10507 age = Code::kIsOldCodeAge;
10508 } else if (age == Code::kExecutedOnceCodeAge) {
10509 // Pre-age code that has only been executed once.
10510 age = Code::kPreAgedCodeAge;
10516 void Code::MakeYoung() {
10517 byte* sequence = FindCodeAgeSequence();
10518 if (sequence != NULL) MakeCodeAgeSequenceYoung(sequence, GetIsolate());
10522 void Code::MakeOlder(MarkingParity current_parity) {
10523 byte* sequence = FindCodeAgeSequence();
10524 if (sequence != NULL) {
10526 MarkingParity code_parity;
10527 Isolate* isolate = GetIsolate();
10528 GetCodeAgeAndParity(isolate, sequence, &age, &code_parity);
10529 age = EffectiveAge(age);
10530 if (age != kLastCodeAge && code_parity != current_parity) {
10531 PatchPlatformCodeAge(isolate,
10533 static_cast<Age>(age + 1),
10540 bool Code::IsOld() {
10541 return GetAge() >= kIsOldCodeAge;
10545 byte* Code::FindCodeAgeSequence() {
10546 return FLAG_age_code &&
10547 prologue_offset() != Code::kPrologueOffsetNotSet &&
10548 (kind() == OPTIMIZED_FUNCTION ||
10549 (kind() == FUNCTION && !has_debug_break_slots()))
10550 ? instruction_start() + prologue_offset()
10555 Code::Age Code::GetAge() {
10556 return EffectiveAge(GetRawAge());
10560 Code::Age Code::GetRawAge() {
10561 byte* sequence = FindCodeAgeSequence();
10562 if (sequence == NULL) {
10563 return kNoAgeCodeAge;
10566 MarkingParity parity;
10567 GetCodeAgeAndParity(GetIsolate(), sequence, &age, &parity);
10572 void Code::GetCodeAgeAndParity(Code* code, Age* age,
10573 MarkingParity* parity) {
10574 Isolate* isolate = code->GetIsolate();
10575 Builtins* builtins = isolate->builtins();
10577 #define HANDLE_CODE_AGE(AGE) \
10578 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \
10579 if (code == stub) { \
10580 *age = k##AGE##CodeAge; \
10581 *parity = EVEN_MARKING_PARITY; \
10584 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10585 if (code == stub) { \
10586 *age = k##AGE##CodeAge; \
10587 *parity = ODD_MARKING_PARITY; \
10590 CODE_AGE_LIST(HANDLE_CODE_AGE)
10591 #undef HANDLE_CODE_AGE
10592 stub = *builtins->MarkCodeAsExecutedOnce();
10593 if (code == stub) {
10594 *age = kNotExecutedCodeAge;
10595 *parity = NO_MARKING_PARITY;
10598 stub = *builtins->MarkCodeAsExecutedTwice();
10599 if (code == stub) {
10600 *age = kExecutedOnceCodeAge;
10601 *parity = NO_MARKING_PARITY;
10608 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) {
10609 Builtins* builtins = isolate->builtins();
10611 #define HANDLE_CODE_AGE(AGE) \
10612 case k##AGE##CodeAge: { \
10613 Code* stub = parity == EVEN_MARKING_PARITY \
10614 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \
10615 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10618 CODE_AGE_LIST(HANDLE_CODE_AGE)
10619 #undef HANDLE_CODE_AGE
10620 case kNotExecutedCodeAge: {
10621 DCHECK(parity == NO_MARKING_PARITY);
10622 return *builtins->MarkCodeAsExecutedOnce();
10624 case kExecutedOnceCodeAge: {
10625 DCHECK(parity == NO_MARKING_PARITY);
10626 return *builtins->MarkCodeAsExecutedTwice();
10636 void Code::PrintDeoptLocation(FILE* out, int bailout_id) {
10637 const char* last_comment = NULL;
10638 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
10639 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
10640 for (RelocIterator it(this, mask); !it.done(); it.next()) {
10641 RelocInfo* info = it.rinfo();
10642 if (info->rmode() == RelocInfo::COMMENT) {
10643 last_comment = reinterpret_cast<const char*>(info->data());
10644 } else if (last_comment != NULL) {
10645 if ((bailout_id == Deoptimizer::GetDeoptimizationId(
10646 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
10647 (bailout_id == Deoptimizer::GetDeoptimizationId(
10648 GetIsolate(), info->target_address(), Deoptimizer::SOFT)) ||
10649 (bailout_id == Deoptimizer::GetDeoptimizationId(
10650 GetIsolate(), info->target_address(), Deoptimizer::LAZY))) {
10651 CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
10652 PrintF(out, " %s\n", last_comment);
10660 bool Code::CanDeoptAt(Address pc) {
10661 DeoptimizationInputData* deopt_data =
10662 DeoptimizationInputData::cast(deoptimization_data());
10663 Address code_start_address = instruction_start();
10664 for (int i = 0; i < deopt_data->DeoptCount(); i++) {
10665 if (deopt_data->Pc(i)->value() == -1) continue;
10666 Address address = code_start_address + deopt_data->Pc(i)->value();
10667 if (address == pc) return true;
10673 // Identify kind of code.
10674 const char* Code::Kind2String(Kind kind) {
10676 #define CASE(name) case name: return #name;
10677 CODE_KIND_LIST(CASE)
10679 case NUMBER_OF_KINDS: break;
10686 #ifdef ENABLE_DISASSEMBLER
10688 void DeoptimizationInputData::DeoptimizationInputDataPrint(
10689 std::ostream& os) { // NOLINT
10690 disasm::NameConverter converter;
10691 int deopt_count = DeoptCount();
10692 os << "Deoptimization Input Data (deopt points = " << deopt_count << ")\n";
10693 if (0 != deopt_count) {
10694 os << " index ast id argc pc";
10695 if (FLAG_print_code_verbose) os << " commands";
10698 for (int i = 0; i < deopt_count; i++) {
10699 // TODO(svenpanne) Add some basic formatting to our streams.
10700 Vector<char> buf1 = Vector<char>::New(128);
10701 SNPrintF(buf1, "%6d %6d %6d %6d", i, AstId(i).ToInt(),
10702 ArgumentsStackHeight(i)->value(), Pc(i)->value());
10703 os << buf1.start();
10705 if (!FLAG_print_code_verbose) {
10709 // Print details of the frame translation.
10710 int translation_index = TranslationIndex(i)->value();
10711 TranslationIterator iterator(TranslationByteArray(), translation_index);
10712 Translation::Opcode opcode =
10713 static_cast<Translation::Opcode>(iterator.Next());
10714 DCHECK(Translation::BEGIN == opcode);
10715 int frame_count = iterator.Next();
10716 int jsframe_count = iterator.Next();
10717 os << " " << Translation::StringFor(opcode)
10718 << " {frame count=" << frame_count
10719 << ", js frame count=" << jsframe_count << "}\n";
10721 while (iterator.HasNext() &&
10722 Translation::BEGIN !=
10723 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
10724 Vector<char> buf2 = Vector<char>::New(128);
10725 SNPrintF(buf2, "%27s %s ", "", Translation::StringFor(opcode));
10726 os << buf2.start();
10729 case Translation::BEGIN:
10733 case Translation::JS_FRAME: {
10734 int ast_id = iterator.Next();
10735 int function_id = iterator.Next();
10736 unsigned height = iterator.Next();
10737 os << "{ast_id=" << ast_id << ", function=";
10738 if (function_id != Translation::kSelfLiteralId) {
10739 Object* function = LiteralArray()->get(function_id);
10740 os << Brief(JSFunction::cast(function)->shared()->DebugName());
10744 os << ", height=" << height << "}";
10748 case Translation::COMPILED_STUB_FRAME: {
10749 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next());
10750 os << "{kind=" << stub_kind << "}";
10754 case Translation::ARGUMENTS_ADAPTOR_FRAME:
10755 case Translation::CONSTRUCT_STUB_FRAME: {
10756 int function_id = iterator.Next();
10757 JSFunction* function =
10758 JSFunction::cast(LiteralArray()->get(function_id));
10759 unsigned height = iterator.Next();
10760 os << "{function=" << Brief(function->shared()->DebugName())
10761 << ", height=" << height << "}";
10765 case Translation::GETTER_STUB_FRAME:
10766 case Translation::SETTER_STUB_FRAME: {
10767 int function_id = iterator.Next();
10768 JSFunction* function =
10769 JSFunction::cast(LiteralArray()->get(function_id));
10770 os << "{function=" << Brief(function->shared()->DebugName()) << "}";
10774 case Translation::REGISTER: {
10775 int reg_code = iterator.Next();
10776 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}";
10780 case Translation::INT32_REGISTER: {
10781 int reg_code = iterator.Next();
10782 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}";
10786 case Translation::UINT32_REGISTER: {
10787 int reg_code = iterator.Next();
10788 os << "{input=" << converter.NameOfCPURegister(reg_code)
10793 case Translation::DOUBLE_REGISTER: {
10794 int reg_code = iterator.Next();
10795 os << "{input=" << DoubleRegister::AllocationIndexToString(reg_code)
10800 case Translation::STACK_SLOT: {
10801 int input_slot_index = iterator.Next();
10802 os << "{input=" << input_slot_index << "}";
10806 case Translation::INT32_STACK_SLOT: {
10807 int input_slot_index = iterator.Next();
10808 os << "{input=" << input_slot_index << "}";
10812 case Translation::UINT32_STACK_SLOT: {
10813 int input_slot_index = iterator.Next();
10814 os << "{input=" << input_slot_index << " (unsigned)}";
10818 case Translation::DOUBLE_STACK_SLOT: {
10819 int input_slot_index = iterator.Next();
10820 os << "{input=" << input_slot_index << "}";
10824 case Translation::LITERAL: {
10825 unsigned literal_index = iterator.Next();
10826 os << "{literal_id=" << literal_index << "}";
10830 case Translation::DUPLICATED_OBJECT: {
10831 int object_index = iterator.Next();
10832 os << "{object_index=" << object_index << "}";
10836 case Translation::ARGUMENTS_OBJECT:
10837 case Translation::CAPTURED_OBJECT: {
10838 int args_length = iterator.Next();
10839 os << "{length=" << args_length << "}";
10849 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(
10850 std::ostream& os) { // NOLINT
10851 os << "Deoptimization Output Data (deopt points = " << this->DeoptPoints()
10853 if (this->DeoptPoints() == 0) return;
10855 os << "ast id pc state\n";
10856 for (int i = 0; i < this->DeoptPoints(); i++) {
10857 int pc_and_state = this->PcAndState(i)->value();
10858 // TODO(svenpanne) Add some basic formatting to our streams.
10859 Vector<char> buf = Vector<char>::New(100);
10860 SNPrintF(buf, "%6d %8d %s\n", this->AstId(i).ToInt(),
10861 FullCodeGenerator::PcField::decode(pc_and_state),
10862 FullCodeGenerator::State2String(
10863 FullCodeGenerator::StateField::decode(pc_and_state)));
10869 const char* Code::ICState2String(InlineCacheState state) {
10871 case UNINITIALIZED: return "UNINITIALIZED";
10872 case PREMONOMORPHIC: return "PREMONOMORPHIC";
10873 case MONOMORPHIC: return "MONOMORPHIC";
10874 case PROTOTYPE_FAILURE:
10875 return "PROTOTYPE_FAILURE";
10876 case POLYMORPHIC: return "POLYMORPHIC";
10877 case MEGAMORPHIC: return "MEGAMORPHIC";
10878 case GENERIC: return "GENERIC";
10879 case DEBUG_STUB: return "DEBUG_STUB";
10888 const char* Code::StubType2String(StubType type) {
10890 case NORMAL: return "NORMAL";
10891 case FAST: return "FAST";
10893 UNREACHABLE(); // keep the compiler happy
10898 void Code::PrintExtraICState(std::ostream& os, // NOLINT
10899 Kind kind, ExtraICState extra) {
10900 os << "extra_ic_state = ";
10901 if ((kind == STORE_IC || kind == KEYED_STORE_IC) && (extra == STRICT)) {
10904 os << extra << "\n";
10909 void Code::Disassemble(const char* name, std::ostream& os) { // NOLINT
10910 os << "kind = " << Kind2String(kind()) << "\n";
10911 if (IsCodeStubOrIC()) {
10912 const char* n = CodeStub::MajorName(CodeStub::GetMajorKey(this), true);
10913 os << "major_key = " << (n == NULL ? "null" : n) << "\n";
10915 if (is_inline_cache_stub()) {
10916 os << "ic_state = " << ICState2String(ic_state()) << "\n";
10917 PrintExtraICState(os, kind(), extra_ic_state());
10918 if (ic_state() == MONOMORPHIC) {
10919 os << "type = " << StubType2String(type()) << "\n";
10921 if (is_compare_ic_stub()) {
10922 DCHECK(CodeStub::GetMajorKey(this) == CodeStub::CompareIC);
10923 CompareICStub stub(stub_key(), GetIsolate());
10924 os << "compare_state = " << CompareICState::GetStateName(stub.left())
10925 << "*" << CompareICState::GetStateName(stub.right()) << " -> "
10926 << CompareICState::GetStateName(stub.state()) << "\n";
10927 os << "compare_operation = " << Token::Name(stub.op()) << "\n";
10930 if ((name != NULL) && (name[0] != '\0')) {
10931 os << "name = " << name << "\n";
10933 if (kind() == OPTIMIZED_FUNCTION) {
10934 os << "stack_slots = " << stack_slots() << "\n";
10937 os << "Instructions (size = " << instruction_size() << ")\n";
10939 Isolate* isolate = GetIsolate();
10940 int decode_size = is_crankshafted()
10941 ? static_cast<int>(safepoint_table_offset())
10942 : instruction_size();
10943 // If there might be a back edge table, stop before reaching it.
10944 if (kind() == Code::FUNCTION) {
10946 Min(decode_size, static_cast<int>(back_edge_table_offset()));
10948 byte* begin = instruction_start();
10949 byte* end = begin + decode_size;
10950 Disassembler::Decode(isolate, &os, begin, end, this);
10954 if (kind() == FUNCTION) {
10955 DeoptimizationOutputData* data =
10956 DeoptimizationOutputData::cast(this->deoptimization_data());
10957 data->DeoptimizationOutputDataPrint(os);
10958 } else if (kind() == OPTIMIZED_FUNCTION) {
10959 DeoptimizationInputData* data =
10960 DeoptimizationInputData::cast(this->deoptimization_data());
10961 data->DeoptimizationInputDataPrint(os);
10965 if (is_crankshafted()) {
10966 SafepointTable table(this);
10967 os << "Safepoints (size = " << table.size() << ")\n";
10968 for (unsigned i = 0; i < table.length(); i++) {
10969 unsigned pc_offset = table.GetPcOffset(i);
10970 os << static_cast<const void*>(instruction_start() + pc_offset) << " ";
10971 // TODO(svenpanne) Add some basic formatting to our streams.
10972 Vector<char> buf1 = Vector<char>::New(30);
10973 SNPrintF(buf1, "%4d", pc_offset);
10974 os << buf1.start() << " ";
10975 table.PrintEntry(i, os);
10976 os << " (sp -> fp) ";
10977 SafepointEntry entry = table.GetEntry(i);
10978 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
10979 Vector<char> buf2 = Vector<char>::New(30);
10980 SNPrintF(buf2, "%6d", entry.deoptimization_index());
10981 os << buf2.start();
10985 if (entry.argument_count() > 0) {
10986 os << " argc: " << entry.argument_count();
10991 } else if (kind() == FUNCTION) {
10992 unsigned offset = back_edge_table_offset();
10993 // If there is no back edge table, the "table start" will be at or after
10994 // (due to alignment) the end of the instruction stream.
10995 if (static_cast<int>(offset) < instruction_size()) {
10996 DisallowHeapAllocation no_gc;
10997 BackEdgeTable back_edges(this, &no_gc);
10999 os << "Back edges (size = " << back_edges.length() << ")\n";
11000 os << "ast_id pc_offset loop_depth\n";
11002 for (uint32_t i = 0; i < back_edges.length(); i++) {
11003 Vector<char> buf = Vector<char>::New(100);
11004 SNPrintF(buf, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(),
11005 back_edges.pc_offset(i), back_edges.loop_depth(i));
11011 #ifdef OBJECT_PRINT
11012 if (!type_feedback_info()->IsUndefined()) {
11013 OFStream os(stdout);
11014 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(os);
11020 os << "RelocInfo (size = " << relocation_size() << ")\n";
11021 for (RelocIterator it(this); !it.done(); it.next()) {
11022 it.rinfo()->Print(GetIsolate(), os);
11026 #ifdef OBJECT_PRINT
11027 if (FLAG_enable_ool_constant_pool) {
11028 ConstantPoolArray* pool = constant_pool();
11029 if (pool->length()) {
11030 os << "Constant Pool\n";
11037 #endif // ENABLE_DISASSEMBLER
11040 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength(
11041 Handle<JSObject> object,
11044 SetFastElementsCapacitySmiMode smi_mode) {
11045 // We should never end in here with a pixel or external array.
11046 DCHECK(!object->HasExternalArrayElements());
11048 // Allocate a new fast elements backing store.
11049 Handle<FixedArray> new_elements =
11050 object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity);
11052 ElementsKind elements_kind = object->GetElementsKind();
11053 ElementsKind new_elements_kind;
11054 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
11055 // or if it's allowed and the old elements array contained only SMIs.
11056 bool has_fast_smi_elements =
11057 (smi_mode == kForceSmiElements) ||
11058 ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements());
11059 if (has_fast_smi_elements) {
11060 if (IsHoleyElementsKind(elements_kind)) {
11061 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
11063 new_elements_kind = FAST_SMI_ELEMENTS;
11066 if (IsHoleyElementsKind(elements_kind)) {
11067 new_elements_kind = FAST_HOLEY_ELEMENTS;
11069 new_elements_kind = FAST_ELEMENTS;
11072 Handle<FixedArrayBase> old_elements(object->elements());
11073 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind);
11074 accessor->CopyElements(object, new_elements, elements_kind);
11076 if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) {
11077 Handle<Map> new_map = (new_elements_kind != elements_kind)
11078 ? GetElementsTransitionMap(object, new_elements_kind)
11079 : handle(object->map());
11080 JSObject::ValidateElements(object);
11081 JSObject::SetMapAndElements(object, new_map, new_elements);
11083 // Transition through the allocation site as well if present.
11084 JSObject::UpdateAllocationSite(object, new_elements_kind);
11086 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements);
11087 parameter_map->set(1, *new_elements);
11090 if (FLAG_trace_elements_transitions) {
11091 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11092 object->GetElementsKind(), new_elements);
11095 if (object->IsJSArray()) {
11096 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11098 return new_elements;
11102 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object,
11105 // We should never end in here with a pixel or external array.
11106 DCHECK(!object->HasExternalArrayElements());
11108 Handle<FixedArrayBase> elems =
11109 object->GetIsolate()->factory()->NewFixedDoubleArray(capacity);
11111 ElementsKind elements_kind = object->GetElementsKind();
11112 CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS);
11113 ElementsKind new_elements_kind = elements_kind;
11114 if (IsHoleyElementsKind(elements_kind)) {
11115 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
11117 new_elements_kind = FAST_DOUBLE_ELEMENTS;
11120 Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind);
11122 Handle<FixedArrayBase> old_elements(object->elements());
11123 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS);
11124 accessor->CopyElements(object, elems, elements_kind);
11126 JSObject::ValidateElements(object);
11127 JSObject::SetMapAndElements(object, new_map, elems);
11129 if (FLAG_trace_elements_transitions) {
11130 PrintElementsTransition(stdout, object, elements_kind, old_elements,
11131 object->GetElementsKind(), elems);
11134 if (object->IsJSArray()) {
11135 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11141 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) {
11142 DCHECK(capacity >= 0);
11143 array->GetIsolate()->factory()->NewJSArrayStorage(
11144 array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
11148 void JSArray::Expand(Handle<JSArray> array, int required_size) {
11149 ElementsAccessor* accessor = array->GetElementsAccessor();
11150 accessor->SetCapacityAndLength(array, required_size, required_size);
11154 // Returns false if the passed-in index is marked non-configurable,
11155 // which will cause the ES5 truncation operation to halt, and thus
11156 // no further old values need be collected.
11157 static bool GetOldValue(Isolate* isolate,
11158 Handle<JSObject> object,
11160 List<Handle<Object> >* old_values,
11161 List<uint32_t>* indices) {
11162 Maybe<PropertyAttributes> maybe =
11163 JSReceiver::GetOwnElementAttribute(object, index);
11164 DCHECK(maybe.has_value);
11165 DCHECK(maybe.value != ABSENT);
11166 if (maybe.value == DONT_DELETE) return false;
11167 Handle<Object> value;
11168 if (!JSObject::GetOwnElementAccessorPair(object, index).is_null()) {
11169 value = Handle<Object>::cast(isolate->factory()->the_hole_value());
11171 value = Object::GetElement(isolate, object, index).ToHandleChecked();
11173 old_values->Add(value);
11174 indices->Add(index);
11178 MUST_USE_RESULT static MaybeHandle<Object> EnqueueSpliceRecord(
11179 Handle<JSArray> object, uint32_t index, Handle<JSArray> deleted,
11180 uint32_t add_count) {
11181 Isolate* isolate = object->GetIsolate();
11182 HandleScope scope(isolate);
11183 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index);
11184 Handle<Object> add_count_object =
11185 isolate->factory()->NewNumberFromUint(add_count);
11187 Handle<Object> args[] =
11188 { object, index_object, deleted, add_count_object };
11190 return Execution::Call(
11191 isolate, Handle<JSFunction>(isolate->observers_enqueue_splice()),
11192 isolate->factory()->undefined_value(), arraysize(args), args);
11196 MUST_USE_RESULT static MaybeHandle<Object> BeginPerformSplice(
11197 Handle<JSArray> object) {
11198 Isolate* isolate = object->GetIsolate();
11199 HandleScope scope(isolate);
11200 Handle<Object> args[] = { object };
11202 return Execution::Call(
11203 isolate, Handle<JSFunction>(isolate->observers_begin_perform_splice()),
11204 isolate->factory()->undefined_value(), arraysize(args), args);
11208 MUST_USE_RESULT static MaybeHandle<Object> EndPerformSplice(
11209 Handle<JSArray> object) {
11210 Isolate* isolate = object->GetIsolate();
11211 HandleScope scope(isolate);
11212 Handle<Object> args[] = { object };
11214 return Execution::Call(
11215 isolate, Handle<JSFunction>(isolate->observers_end_perform_splice()),
11216 isolate->factory()->undefined_value(), arraysize(args), args);
11220 MaybeHandle<Object> JSArray::SetElementsLength(
11221 Handle<JSArray> array,
11222 Handle<Object> new_length_handle) {
11223 if (array->HasFastElements()) {
11224 // If the new array won't fit in a some non-trivial fraction of the max old
11225 // space size, then force it to go dictionary mode.
11226 int max_fast_array_size = static_cast<int>(
11227 (array->GetHeap()->MaxOldGenerationSize() / kDoubleSize) / 4);
11228 if (new_length_handle->IsNumber() &&
11229 NumberToInt32(*new_length_handle) >= max_fast_array_size) {
11230 NormalizeElements(array);
11234 // We should never end in here with a pixel or external array.
11235 DCHECK(array->AllowsSetElementsLength());
11236 if (!array->map()->is_observed()) {
11237 return array->GetElementsAccessor()->SetLength(array, new_length_handle);
11240 Isolate* isolate = array->GetIsolate();
11241 List<uint32_t> indices;
11242 List<Handle<Object> > old_values;
11243 Handle<Object> old_length_handle(array->length(), isolate);
11244 uint32_t old_length = 0;
11245 CHECK(old_length_handle->ToArrayIndex(&old_length));
11246 uint32_t new_length = 0;
11247 CHECK(new_length_handle->ToArrayIndex(&new_length));
11249 static const PropertyAttributes kNoAttrFilter = NONE;
11250 int num_elements = array->NumberOfOwnElements(kNoAttrFilter);
11251 if (num_elements > 0) {
11252 if (old_length == static_cast<uint32_t>(num_elements)) {
11253 // Simple case for arrays without holes.
11254 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
11255 if (!GetOldValue(isolate, array, i, &old_values, &indices)) break;
11258 // For sparse arrays, only iterate over existing elements.
11259 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
11260 // the to-be-removed indices twice.
11261 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
11262 array->GetOwnElementKeys(*keys, kNoAttrFilter);
11263 while (num_elements-- > 0) {
11264 uint32_t index = NumberToUint32(keys->get(num_elements));
11265 if (index < new_length) break;
11266 if (!GetOldValue(isolate, array, index, &old_values, &indices)) break;
11271 Handle<Object> hresult;
11272 ASSIGN_RETURN_ON_EXCEPTION(
11274 array->GetElementsAccessor()->SetLength(array, new_length_handle),
11277 CHECK(array->length()->ToArrayIndex(&new_length));
11278 if (old_length == new_length) return hresult;
11280 RETURN_ON_EXCEPTION(isolate, BeginPerformSplice(array), Object);
11282 for (int i = 0; i < indices.length(); ++i) {
11283 // For deletions where the property was an accessor, old_values[i]
11284 // will be the hole, which instructs EnqueueChangeRecord to elide
11285 // the "oldValue" property.
11286 RETURN_ON_EXCEPTION(
11288 JSObject::EnqueueChangeRecord(
11289 array, "delete", isolate->factory()->Uint32ToString(indices[i]),
11293 RETURN_ON_EXCEPTION(isolate,
11294 JSObject::EnqueueChangeRecord(
11295 array, "update", isolate->factory()->length_string(),
11296 old_length_handle),
11299 RETURN_ON_EXCEPTION(isolate, EndPerformSplice(array), Object);
11301 uint32_t index = Min(old_length, new_length);
11302 uint32_t add_count = new_length > old_length ? new_length - old_length : 0;
11303 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0;
11304 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
11305 if (delete_count > 0) {
11306 for (int i = indices.length() - 1; i >= 0; i--) {
11307 // Skip deletions where the property was an accessor, leaving holes
11308 // in the array of old values.
11309 if (old_values[i]->IsTheHole()) continue;
11310 JSObject::SetElement(
11311 deleted, indices[i] - index, old_values[i], NONE, SLOPPY).Assert();
11314 SetProperty(deleted, isolate->factory()->length_string(),
11315 isolate->factory()->NewNumberFromUint(delete_count),
11319 RETURN_ON_EXCEPTION(
11320 isolate, EnqueueSpliceRecord(array, index, deleted, add_count), Object);
11326 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map,
11327 Handle<Object> prototype) {
11328 FixedArray* cache = map->GetPrototypeTransitions();
11329 int number_of_transitions = map->NumberOfProtoTransitions();
11330 const int proto_offset =
11331 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset;
11332 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
11333 const int step = kProtoTransitionElementsPerEntry;
11334 for (int i = 0; i < number_of_transitions; i++) {
11335 if (cache->get(proto_offset + i * step) == *prototype) {
11336 Object* result = cache->get(map_offset + i * step);
11337 return Handle<Map>(Map::cast(result));
11340 return Handle<Map>();
11344 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map,
11345 Handle<Object> prototype,
11346 Handle<Map> target_map) {
11347 DCHECK(target_map->IsMap());
11348 DCHECK(HeapObject::cast(*prototype)->map()->IsMap());
11349 // Don't cache prototype transition if this map is either shared, or a map of
11351 if (map->is_prototype_map()) return map;
11352 if (map->is_dictionary_map() || !FLAG_cache_prototype_transitions) return map;
11354 const int step = kProtoTransitionElementsPerEntry;
11355 const int header = kProtoTransitionHeaderSize;
11357 Handle<FixedArray> cache(map->GetPrototypeTransitions());
11358 int capacity = (cache->length() - header) / step;
11359 int transitions = map->NumberOfProtoTransitions() + 1;
11361 if (transitions > capacity) {
11362 if (capacity > kMaxCachedPrototypeTransitions) return map;
11364 // Grow array by factor 2 over and above what we need.
11365 cache = FixedArray::CopySize(cache, transitions * 2 * step + header);
11367 SetPrototypeTransitions(map, cache);
11370 // Reload number of transitions as GC might shrink them.
11371 int last = map->NumberOfProtoTransitions();
11372 int entry = header + last * step;
11374 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype);
11375 cache->set(entry + kProtoTransitionMapOffset, *target_map);
11376 map->SetNumberOfProtoTransitions(last + 1);
11382 void Map::ZapTransitions() {
11383 TransitionArray* transition_array = transitions();
11384 // TODO(mstarzinger): Temporarily use a slower version instead of the faster
11385 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer.
11386 Object** data = transition_array->data_start();
11387 Object* the_hole = GetHeap()->the_hole_value();
11388 int length = transition_array->length();
11389 for (int i = 0; i < length; i++) {
11390 data[i] = the_hole;
11395 void Map::ZapPrototypeTransitions() {
11396 FixedArray* proto_transitions = GetPrototypeTransitions();
11397 MemsetPointer(proto_transitions->data_start(),
11398 GetHeap()->the_hole_value(),
11399 proto_transitions->length());
11404 void Map::AddDependentCompilationInfo(Handle<Map> map,
11405 DependentCode::DependencyGroup group,
11406 CompilationInfo* info) {
11407 Handle<DependentCode> codes =
11408 DependentCode::Insert(handle(map->dependent_code(), info->isolate()),
11409 group, info->object_wrapper());
11410 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
11411 info->dependencies(group)->Add(map, info->zone());
11416 void Map::AddDependentCode(Handle<Map> map,
11417 DependentCode::DependencyGroup group,
11418 Handle<Code> code) {
11419 Handle<DependentCode> codes = DependentCode::Insert(
11420 Handle<DependentCode>(map->dependent_code()), group, code);
11421 if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
11426 void Map::AddDependentIC(Handle<Map> map,
11427 Handle<Code> stub) {
11428 DCHECK(stub->next_code_link()->IsUndefined());
11429 int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup);
11431 // Slow path: insert the head of the list with possible heap allocation.
11432 Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub);
11434 // Fast path: link the stub to the existing head of the list without any
11435 // heap allocation.
11437 map->dependent_code()->AddToDependentICList(stub);
11442 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
11443 Recompute(entries);
11447 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) {
11448 start_indexes_[0] = 0;
11449 for (int g = 1; g <= kGroupCount; g++) {
11450 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1));
11451 start_indexes_[g] = start_indexes_[g - 1] + count;
11456 DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
11457 DependencyGroup group) {
11458 AllowDeferredHandleDereference dependencies_are_safe;
11459 if (group == DependentCode::kPropertyCellChangedGroup) {
11460 return Handle<PropertyCell>::cast(object)->dependent_code();
11461 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
11462 group == DependentCode::kAllocationSiteTransitionChangedGroup) {
11463 return Handle<AllocationSite>::cast(object)->dependent_code();
11465 return Handle<Map>::cast(object)->dependent_code();
11469 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
11470 DependencyGroup group,
11471 Handle<Object> object) {
11472 GroupStartIndexes starts(*entries);
11473 int start = starts.at(group);
11474 int end = starts.at(group + 1);
11475 int number_of_entries = starts.number_of_entries();
11476 // Check for existing entry to avoid duplicates.
11477 for (int i = start; i < end; i++) {
11478 if (entries->object_at(i) == *object) return entries;
11480 if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
11481 int capacity = kCodesStartIndex + number_of_entries + 1;
11482 if (capacity > 5) capacity = capacity * 5 / 4;
11483 Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
11484 FixedArray::CopySize(entries, capacity, TENURED));
11485 // The number of codes can change after GC.
11486 starts.Recompute(*entries);
11487 start = starts.at(group);
11488 end = starts.at(group + 1);
11489 number_of_entries = starts.number_of_entries();
11490 for (int i = 0; i < number_of_entries; i++) {
11491 entries->clear_at(i);
11493 // If the old fixed array was empty, we need to reset counters of the
11495 if (number_of_entries == 0) {
11496 for (int g = 0; g < kGroupCount; g++) {
11497 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
11500 entries = new_entries;
11502 entries->ExtendGroup(group);
11503 entries->set_object_at(end, *object);
11504 entries->set_number_of_entries(group, end + 1 - start);
11509 void DependentCode::UpdateToFinishedCode(DependencyGroup group,
11510 CompilationInfo* info,
11512 DisallowHeapAllocation no_gc;
11513 AllowDeferredHandleDereference get_object_wrapper;
11514 Foreign* info_wrapper = *info->object_wrapper();
11515 GroupStartIndexes starts(this);
11516 int start = starts.at(group);
11517 int end = starts.at(group + 1);
11518 for (int i = start; i < end; i++) {
11519 if (object_at(i) == info_wrapper) {
11520 set_object_at(i, code);
11526 for (int i = start; i < end; i++) {
11527 DCHECK(is_code_at(i) || compilation_info_at(i) != info);
11533 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
11534 CompilationInfo* info) {
11535 DisallowHeapAllocation no_allocation;
11536 AllowDeferredHandleDereference get_object_wrapper;
11537 Foreign* info_wrapper = *info->object_wrapper();
11538 GroupStartIndexes starts(this);
11539 int start = starts.at(group);
11540 int end = starts.at(group + 1);
11541 // Find compilation info wrapper.
11543 for (int i = start; i < end; i++) {
11544 if (object_at(i) == info_wrapper) {
11549 if (info_pos == -1) return; // Not found.
11550 int gap = info_pos;
11551 // Use the last of each group to fill the gap in the previous group.
11552 for (int i = group; i < kGroupCount; i++) {
11553 int last_of_group = starts.at(i + 1) - 1;
11554 DCHECK(last_of_group >= gap);
11555 if (last_of_group == gap) continue;
11556 copy(last_of_group, gap);
11557 gap = last_of_group;
11559 DCHECK(gap == starts.number_of_entries() - 1);
11560 clear_at(gap); // Clear last gap.
11561 set_number_of_entries(group, end - start - 1);
11564 for (int i = start; i < end - 1; i++) {
11565 DCHECK(is_code_at(i) || compilation_info_at(i) != info);
11571 static bool CodeListContains(Object* head, Code* code) {
11572 while (!head->IsUndefined()) {
11573 if (head == code) return true;
11574 head = Code::cast(head)->next_code_link();
11580 bool DependentCode::Contains(DependencyGroup group, Code* code) {
11581 GroupStartIndexes starts(this);
11582 int start = starts.at(group);
11583 int end = starts.at(group + 1);
11584 if (group == kWeakICGroup) {
11585 return CodeListContains(object_at(start), code);
11587 for (int i = start; i < end; i++) {
11588 if (object_at(i) == code) return true;
11594 bool DependentCode::MarkCodeForDeoptimization(
11596 DependentCode::DependencyGroup group) {
11597 DisallowHeapAllocation no_allocation_scope;
11598 DependentCode::GroupStartIndexes starts(this);
11599 int start = starts.at(group);
11600 int end = starts.at(group + 1);
11601 int code_entries = starts.number_of_entries();
11602 if (start == end) return false;
11604 // Mark all the code that needs to be deoptimized.
11605 bool marked = false;
11606 for (int i = start; i < end; i++) {
11607 if (is_code_at(i)) {
11608 Code* code = code_at(i);
11609 if (!code->marked_for_deoptimization()) {
11610 SetMarkedForDeoptimization(code, group);
11614 CompilationInfo* info = compilation_info_at(i);
11615 info->AbortDueToDependencyChange();
11618 // Compact the array by moving all subsequent groups to fill in the new holes.
11619 for (int src = end, dst = start; src < code_entries; src++, dst++) {
11622 // Now the holes are at the end of the array, zap them for heap-verifier.
11623 int removed = end - start;
11624 for (int i = code_entries - removed; i < code_entries; i++) {
11627 set_number_of_entries(group, 0);
11632 void DependentCode::DeoptimizeDependentCodeGroup(
11634 DependentCode::DependencyGroup group) {
11635 DCHECK(AllowCodeDependencyChange::IsAllowed());
11636 DisallowHeapAllocation no_allocation_scope;
11637 bool marked = MarkCodeForDeoptimization(isolate, group);
11639 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate);
11643 void DependentCode::AddToDependentICList(Handle<Code> stub) {
11644 DisallowHeapAllocation no_heap_allocation;
11645 GroupStartIndexes starts(this);
11646 int i = starts.at(kWeakICGroup);
11647 Object* head = object_at(i);
11648 // Try to insert the stub after the head of the list to minimize number of
11649 // writes to the DependentCode array, since a write to the array can make it
11650 // strong if it was alread marked by incremental marker.
11651 if (head->IsCode()) {
11652 stub->set_next_code_link(Code::cast(head)->next_code_link());
11653 Code::cast(head)->set_next_code_link(*stub);
11655 stub->set_next_code_link(head);
11656 set_object_at(i, *stub);
11661 void DependentCode::SetMarkedForDeoptimization(Code* code,
11662 DependencyGroup group) {
11663 code->set_marked_for_deoptimization(true);
11664 if (FLAG_trace_deopt &&
11665 (code->deoptimization_data() != code->GetHeap()->empty_fixed_array())) {
11666 DeoptimizationInputData* deopt_data =
11667 DeoptimizationInputData::cast(code->deoptimization_data());
11668 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
11669 PrintF(scope.file(), "[marking dependent code 0x%08" V8PRIxPTR
11670 " (opt #%d) for deoptimization, reason: %s]\n",
11671 reinterpret_cast<intptr_t>(code),
11672 deopt_data->OptimizationId()->value(), DependencyGroupName(group));
11677 const char* DependentCode::DependencyGroupName(DependencyGroup group) {
11681 case kWeakCodeGroup:
11682 return "weak-code";
11683 case kTransitionGroup:
11684 return "transition";
11685 case kPrototypeCheckGroup:
11686 return "prototype-check";
11687 case kElementsCantBeAddedGroup:
11688 return "elements-cant-be-added";
11689 case kPropertyCellChangedGroup:
11690 return "property-cell-changed";
11691 case kFieldTypeGroup:
11692 return "field-type";
11693 case kInitialMapChangedGroup:
11694 return "initial-map-changed";
11695 case kAllocationSiteTenuringChangedGroup:
11696 return "allocation-site-tenuring-changed";
11697 case kAllocationSiteTransitionChangedGroup:
11698 return "allocation-site-transition-changed";
11705 Handle<Map> Map::TransitionToPrototype(Handle<Map> map,
11706 Handle<Object> prototype) {
11707 Handle<Map> new_map = GetPrototypeTransition(map, prototype);
11708 if (new_map.is_null()) {
11709 new_map = Copy(map);
11710 PutPrototypeTransition(map, prototype, new_map);
11711 new_map->set_prototype(*prototype);
11717 MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object,
11718 Handle<Object> value,
11719 bool from_javascript) {
11721 int size = object->Size();
11724 Isolate* isolate = object->GetIsolate();
11725 Heap* heap = isolate->heap();
11726 // Silently ignore the change if value is not a JSObject or null.
11727 // SpiderMonkey behaves this way.
11728 if (!value->IsJSReceiver() && !value->IsNull()) return value;
11730 // From 8.6.2 Object Internal Methods
11732 // In addition, if [[Extensible]] is false the value of the [[Class]] and
11733 // [[Prototype]] internal properties of the object may not be modified.
11735 // Implementation specific extensions that modify [[Class]], [[Prototype]]
11736 // or [[Extensible]] must not violate the invariants defined in the preceding
11738 if (!object->map()->is_extensible()) {
11739 Handle<Object> args[] = { object };
11740 THROW_NEW_ERROR(isolate, NewTypeError("non_extensible_proto",
11741 HandleVector(args, arraysize(args))),
11745 // Before we can set the prototype we need to be sure
11746 // prototype cycles are prevented.
11747 // It is sufficient to validate that the receiver is not in the new prototype
11749 for (PrototypeIterator iter(isolate, *value,
11750 PrototypeIterator::START_AT_RECEIVER);
11751 !iter.IsAtEnd(); iter.Advance()) {
11752 if (JSReceiver::cast(iter.GetCurrent()) == *object) {
11754 THROW_NEW_ERROR(isolate,
11755 NewError("cyclic_proto", HandleVector<Object>(NULL, 0)),
11760 bool dictionary_elements_in_chain =
11761 object->map()->DictionaryElementsInPrototypeChainOnly();
11762 Handle<JSObject> real_receiver = object;
11764 if (from_javascript) {
11765 // Find the first object in the chain whose prototype object is not
11766 // hidden and set the new prototype on that object.
11767 PrototypeIterator iter(isolate, real_receiver);
11768 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
11770 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter));
11775 // Set the new prototype of the object.
11776 Handle<Map> map(real_receiver->map());
11778 // Nothing to do if prototype is already set.
11779 if (map->prototype() == *value) return value;
11781 if (value->IsJSObject()) {
11782 PrototypeOptimizationMode mode =
11783 from_javascript ? REGULAR_PROTOTYPE : FAST_PROTOTYPE;
11784 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value), mode);
11787 Handle<Map> new_map = Map::TransitionToPrototype(map, value);
11788 DCHECK(new_map->prototype() == *value);
11789 JSObject::MigrateToMap(real_receiver, new_map);
11791 if (from_javascript && !dictionary_elements_in_chain &&
11792 new_map->DictionaryElementsInPrototypeChainOnly()) {
11793 // If the prototype chain didn't previously have element callbacks, then
11794 // KeyedStoreICs need to be cleared to ensure any that involve this
11796 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC);
11799 heap->ClearInstanceofCache();
11800 DCHECK(size == object->Size());
11805 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
11807 uint32_t first_arg,
11808 uint32_t arg_count,
11809 EnsureElementsMode mode) {
11810 // Elements in |Arguments| are ordered backwards (because they're on the
11811 // stack), but the method that's called here iterates over them in forward
11813 return EnsureCanContainElements(
11814 object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode);
11818 MaybeHandle<AccessorPair> JSObject::GetOwnElementAccessorPair(
11819 Handle<JSObject> object,
11821 if (object->IsJSGlobalProxy()) {
11822 PrototypeIterator iter(object->GetIsolate(), object);
11823 if (iter.IsAtEnd()) return MaybeHandle<AccessorPair>();
11824 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
11825 return GetOwnElementAccessorPair(
11826 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index);
11829 // Check for lookup interceptor.
11830 if (object->HasIndexedInterceptor()) return MaybeHandle<AccessorPair>();
11832 return object->GetElementsAccessor()->GetAccessorPair(object, object, index);
11836 MaybeHandle<Object> JSObject::SetElementWithInterceptor(
11837 Handle<JSObject> object,
11839 Handle<Object> value,
11840 PropertyAttributes attributes,
11841 StrictMode strict_mode,
11842 bool check_prototype,
11843 SetPropertyMode set_mode) {
11844 Isolate* isolate = object->GetIsolate();
11846 // Make sure that the top context does not change when doing
11847 // callbacks or interceptor calls.
11848 AssertNoContextChange ncc(isolate);
11850 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
11851 if (!interceptor->setter()->IsUndefined()) {
11852 v8::IndexedPropertySetterCallback setter =
11853 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter());
11855 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index));
11856 PropertyCallbackArguments args(isolate, interceptor->data(), *object,
11858 v8::Handle<v8::Value> result =
11859 args.Call(setter, index, v8::Utils::ToLocal(value));
11860 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11861 if (!result.IsEmpty()) return value;
11864 return SetElementWithoutInterceptor(object, index, value, attributes,
11871 MaybeHandle<Object> JSObject::GetElementWithCallback(
11872 Handle<JSObject> object,
11873 Handle<Object> receiver,
11874 Handle<Object> structure,
11876 Handle<Object> holder) {
11877 Isolate* isolate = object->GetIsolate();
11878 DCHECK(!structure->IsForeign());
11879 // api style callbacks.
11880 if (structure->IsExecutableAccessorInfo()) {
11881 Handle<ExecutableAccessorInfo> data =
11882 Handle<ExecutableAccessorInfo>::cast(structure);
11883 Object* fun_obj = data->getter();
11884 v8::AccessorNameGetterCallback call_fun =
11885 v8::ToCData<v8::AccessorNameGetterCallback>(fun_obj);
11886 if (call_fun == NULL) return isolate->factory()->undefined_value();
11887 Handle<JSObject> holder_handle = Handle<JSObject>::cast(holder);
11888 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11889 Handle<String> key = isolate->factory()->NumberToString(number);
11890 LOG(isolate, ApiNamedPropertyAccess("load", *holder_handle, *key));
11891 PropertyCallbackArguments
11892 args(isolate, data->data(), *receiver, *holder_handle);
11893 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key));
11894 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11895 if (result.IsEmpty()) return isolate->factory()->undefined_value();
11896 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
11897 result_internal->VerifyApiCallResultType();
11898 // Rebox handle before return.
11899 return handle(*result_internal, isolate);
11902 // __defineGetter__ callback
11903 if (structure->IsAccessorPair()) {
11904 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
11906 if (getter->IsSpecFunction()) {
11907 // TODO(rossberg): nicer would be to cast to some JSCallable here...
11908 return GetPropertyWithDefinedGetter(
11909 receiver, Handle<JSReceiver>::cast(getter));
11911 // Getter is not a function.
11912 return isolate->factory()->undefined_value();
11915 if (structure->IsDeclaredAccessorInfo()) {
11916 return GetDeclaredAccessorProperty(
11917 receiver, Handle<DeclaredAccessorInfo>::cast(structure), isolate);
11921 return MaybeHandle<Object>();
11925 MaybeHandle<Object> JSObject::SetElementWithCallback(
11926 Handle<Object> object, Handle<Object> structure, uint32_t index,
11927 Handle<Object> value, Handle<JSObject> holder, StrictMode strict_mode) {
11928 Isolate* isolate = holder->GetIsolate();
11930 // We should never get here to initialize a const with the hole
11931 // value since a const declaration would conflict with the setter.
11932 DCHECK(!value->IsTheHole());
11933 DCHECK(!structure->IsForeign());
11934 if (structure->IsExecutableAccessorInfo()) {
11935 // api style callbacks
11936 Handle<ExecutableAccessorInfo> data =
11937 Handle<ExecutableAccessorInfo>::cast(structure);
11938 Object* call_obj = data->setter();
11939 v8::AccessorNameSetterCallback call_fun =
11940 v8::ToCData<v8::AccessorNameSetterCallback>(call_obj);
11941 if (call_fun == NULL) return value;
11942 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11943 Handle<String> key(isolate->factory()->NumberToString(number));
11944 LOG(isolate, ApiNamedPropertyAccess("store", *holder, *key));
11945 PropertyCallbackArguments
11946 args(isolate, data->data(), *object, *holder);
11947 args.Call(call_fun,
11948 v8::Utils::ToLocal(key),
11949 v8::Utils::ToLocal(value));
11950 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
11954 if (structure->IsAccessorPair()) {
11955 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
11956 if (setter->IsSpecFunction()) {
11957 // TODO(rossberg): nicer would be to cast to some JSCallable here...
11958 return SetPropertyWithDefinedSetter(
11959 object, Handle<JSReceiver>::cast(setter), value);
11961 if (strict_mode == SLOPPY) return value;
11962 Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
11963 Handle<Object> args[2] = { key, holder };
11965 isolate, NewTypeError("no_setter_in_callback", HandleVector(args, 2)),
11970 // TODO(dcarney): Handle correctly.
11971 if (structure->IsDeclaredAccessorInfo()) return value;
11974 return MaybeHandle<Object>();
11978 bool JSObject::HasFastArgumentsElements() {
11979 Heap* heap = GetHeap();
11980 if (!elements()->IsFixedArray()) return false;
11981 FixedArray* elements = FixedArray::cast(this->elements());
11982 if (elements->map() != heap->sloppy_arguments_elements_map()) {
11985 FixedArray* arguments = FixedArray::cast(elements->get(1));
11986 return !arguments->IsDictionary();
11990 bool JSObject::HasDictionaryArgumentsElements() {
11991 Heap* heap = GetHeap();
11992 if (!elements()->IsFixedArray()) return false;
11993 FixedArray* elements = FixedArray::cast(this->elements());
11994 if (elements->map() != heap->sloppy_arguments_elements_map()) {
11997 FixedArray* arguments = FixedArray::cast(elements->get(1));
11998 return arguments->IsDictionary();
12002 // Adding n elements in fast case is O(n*n).
12003 // Note: revisit design to have dual undefined values to capture absent
12005 MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object,
12007 Handle<Object> value,
12008 StrictMode strict_mode,
12009 bool check_prototype) {
12010 DCHECK(object->HasFastSmiOrObjectElements() ||
12011 object->HasFastArgumentsElements());
12013 Isolate* isolate = object->GetIsolate();
12015 // Array optimizations rely on the prototype lookups of Array objects always
12016 // returning undefined. If there is a store to the initial prototype object,
12017 // make sure all of these optimizations are invalidated.
12018 if (isolate->is_initial_object_prototype(*object) ||
12019 isolate->is_initial_array_prototype(*object)) {
12020 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate,
12021 DependentCode::kElementsCantBeAddedGroup);
12024 Handle<FixedArray> backing_store(FixedArray::cast(object->elements()));
12025 if (backing_store->map() ==
12026 isolate->heap()->sloppy_arguments_elements_map()) {
12027 backing_store = handle(FixedArray::cast(backing_store->get(1)));
12029 backing_store = EnsureWritableFastElements(object);
12031 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
12033 if (check_prototype &&
12034 (index >= capacity || backing_store->get(index)->IsTheHole())) {
12036 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12037 object, index, value, &found, strict_mode);
12038 if (found) return result;
12041 uint32_t new_capacity = capacity;
12042 // Check if the length property of this object needs to be updated.
12043 uint32_t array_length = 0;
12044 bool must_update_array_length = false;
12045 bool introduces_holes = true;
12046 if (object->IsJSArray()) {
12047 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12048 introduces_holes = index > array_length;
12049 if (index >= array_length) {
12050 must_update_array_length = true;
12051 array_length = index + 1;
12054 introduces_holes = index >= capacity;
12057 // If the array is growing, and it's not growth by a single element at the
12058 // end, make sure that the ElementsKind is HOLEY.
12059 ElementsKind elements_kind = object->GetElementsKind();
12060 if (introduces_holes &&
12061 IsFastElementsKind(elements_kind) &&
12062 !IsFastHoleyElementsKind(elements_kind)) {
12063 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12064 TransitionElementsKind(object, transitioned_kind);
12067 // Check if the capacity of the backing store needs to be increased, or if
12068 // a transition to slow elements is necessary.
12069 if (index >= capacity) {
12070 bool convert_to_slow = true;
12071 if ((index - capacity) < kMaxGap) {
12072 new_capacity = NewElementsCapacity(index + 1);
12073 DCHECK(new_capacity > index);
12074 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12075 convert_to_slow = false;
12078 if (convert_to_slow) {
12079 NormalizeElements(object);
12080 return SetDictionaryElement(object, index, value, NONE, strict_mode,
12084 // Convert to fast double elements if appropriate.
12085 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
12086 // Consider fixing the boilerplate as well if we have one.
12087 ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
12088 ? FAST_HOLEY_DOUBLE_ELEMENTS
12089 : FAST_DOUBLE_ELEMENTS;
12091 UpdateAllocationSite(object, to_kind);
12093 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length);
12094 FixedDoubleArray::cast(object->elements())->set(index, value->Number());
12095 JSObject::ValidateElements(object);
12098 // Change elements kind from Smi-only to generic FAST if necessary.
12099 if (object->HasFastSmiElements() && !value->IsSmi()) {
12100 ElementsKind kind = object->HasFastHoleyElements()
12101 ? FAST_HOLEY_ELEMENTS
12104 UpdateAllocationSite(object, kind);
12105 Handle<Map> new_map = GetElementsTransitionMap(object, kind);
12106 JSObject::MigrateToMap(object, new_map);
12107 DCHECK(IsFastObjectElementsKind(object->GetElementsKind()));
12109 // Increase backing store capacity if that's been decided previously.
12110 if (new_capacity != capacity) {
12111 SetFastElementsCapacitySmiMode smi_mode =
12112 value->IsSmi() && object->HasFastSmiElements()
12113 ? kAllowSmiElements
12114 : kDontAllowSmiElements;
12115 Handle<FixedArray> new_elements =
12116 SetFastElementsCapacityAndLength(object, new_capacity, array_length,
12118 new_elements->set(index, *value);
12119 JSObject::ValidateElements(object);
12123 // Finally, set the new element and length.
12124 DCHECK(object->elements()->IsFixedArray());
12125 backing_store->set(index, *value);
12126 if (must_update_array_length) {
12127 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length));
12133 MaybeHandle<Object> JSObject::SetDictionaryElement(
12134 Handle<JSObject> object,
12136 Handle<Object> value,
12137 PropertyAttributes attributes,
12138 StrictMode strict_mode,
12139 bool check_prototype,
12140 SetPropertyMode set_mode) {
12141 DCHECK(object->HasDictionaryElements() ||
12142 object->HasDictionaryArgumentsElements());
12143 Isolate* isolate = object->GetIsolate();
12145 // Insert element in the dictionary.
12146 Handle<FixedArray> elements(FixedArray::cast(object->elements()));
12147 bool is_arguments =
12148 (elements->map() == isolate->heap()->sloppy_arguments_elements_map());
12149 Handle<SeededNumberDictionary> dictionary(is_arguments
12150 ? SeededNumberDictionary::cast(elements->get(1))
12151 : SeededNumberDictionary::cast(*elements));
12153 int entry = dictionary->FindEntry(index);
12154 if (entry != SeededNumberDictionary::kNotFound) {
12155 Handle<Object> element(dictionary->ValueAt(entry), isolate);
12156 PropertyDetails details = dictionary->DetailsAt(entry);
12157 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
12158 return SetElementWithCallback(object, element, index, value, object,
12161 dictionary->UpdateMaxNumberKey(index);
12162 // If a value has not been initialized we allow writing to it even if it
12163 // is read-only (a declared const that has not been initialized). If a
12164 // value is being defined we skip attribute checks completely.
12165 if (set_mode == DEFINE_PROPERTY) {
12166 details = PropertyDetails(
12167 attributes, NORMAL, details.dictionary_index());
12168 dictionary->DetailsAtPut(entry, details);
12169 } else if (details.IsReadOnly() && !element->IsTheHole()) {
12170 if (strict_mode == SLOPPY) {
12171 return isolate->factory()->undefined_value();
12173 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12174 Handle<Object> args[2] = { number, object };
12175 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
12176 HandleVector(args, 2)),
12180 // Elements of the arguments object in slow mode might be slow aliases.
12181 if (is_arguments && element->IsAliasedArgumentsEntry()) {
12182 Handle<AliasedArgumentsEntry> entry =
12183 Handle<AliasedArgumentsEntry>::cast(element);
12184 Handle<Context> context(Context::cast(elements->get(0)));
12185 int context_index = entry->aliased_context_slot();
12186 DCHECK(!context->get(context_index)->IsTheHole());
12187 context->set(context_index, *value);
12188 // For elements that are still writable we keep slow aliasing.
12189 if (!details.IsReadOnly()) value = element;
12191 dictionary->ValueAtPut(entry, *value);
12194 // Index not already used. Look for an accessor in the prototype chain.
12196 if (check_prototype) {
12198 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12199 object, index, value, &found, strict_mode);
12200 if (found) return result;
12203 // When we set the is_extensible flag to false we always force the
12204 // element into dictionary mode (and force them to stay there).
12205 if (!object->map()->is_extensible()) {
12206 if (strict_mode == SLOPPY) {
12207 return isolate->factory()->undefined_value();
12209 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12210 Handle<String> name = isolate->factory()->NumberToString(number);
12211 Handle<Object> args[1] = { name };
12212 THROW_NEW_ERROR(isolate, NewTypeError("object_not_extensible",
12213 HandleVector(args, 1)),
12218 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
12219 Handle<SeededNumberDictionary> new_dictionary =
12220 SeededNumberDictionary::AddNumberEntry(dictionary, index, value,
12222 if (*dictionary != *new_dictionary) {
12223 if (is_arguments) {
12224 elements->set(1, *new_dictionary);
12226 object->set_elements(*new_dictionary);
12228 dictionary = new_dictionary;
12232 // Update the array length if this JSObject is an array.
12233 if (object->IsJSArray()) {
12234 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index,
12238 // Attempt to put this object back in fast case.
12239 if (object->ShouldConvertToFastElements()) {
12240 uint32_t new_length = 0;
12241 if (object->IsJSArray()) {
12242 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length));
12244 new_length = dictionary->max_number_key() + 1;
12246 bool has_smi_only_elements = false;
12247 bool should_convert_to_fast_double_elements =
12248 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
12249 SetFastElementsCapacitySmiMode smi_mode =
12250 has_smi_only_elements ? kForceSmiElements : kAllowSmiElements;
12252 if (should_convert_to_fast_double_elements) {
12253 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length);
12255 SetFastElementsCapacityAndLength(object, new_length, new_length,
12258 JSObject::ValidateElements(object);
12260 if (FLAG_trace_normalization) {
12261 OFStream os(stdout);
12262 os << "Object elements are fast case again:\n";
12270 MaybeHandle<Object> JSObject::SetFastDoubleElement(
12271 Handle<JSObject> object,
12273 Handle<Object> value,
12274 StrictMode strict_mode,
12275 bool check_prototype) {
12276 DCHECK(object->HasFastDoubleElements());
12278 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements()));
12279 uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
12281 // If storing to an element that isn't in the array, pass the store request
12282 // up the prototype chain before storing in the receiver's elements.
12283 if (check_prototype &&
12284 (index >= elms_length ||
12285 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) {
12287 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes(
12288 object, index, value, &found, strict_mode);
12289 if (found) return result;
12292 // If the value object is not a heap number, switch to fast elements and try
12294 bool value_is_smi = value->IsSmi();
12295 bool introduces_holes = true;
12296 uint32_t length = elms_length;
12297 if (object->IsJSArray()) {
12298 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length));
12299 introduces_holes = index > length;
12301 introduces_holes = index >= elms_length;
12304 if (!value->IsNumber()) {
12305 SetFastElementsCapacityAndLength(object, elms_length, length,
12306 kDontAllowSmiElements);
12307 Handle<Object> result;
12308 ASSIGN_RETURN_ON_EXCEPTION(
12309 object->GetIsolate(), result,
12310 SetFastElement(object, index, value, strict_mode, check_prototype),
12312 JSObject::ValidateElements(object);
12316 double double_value = value_is_smi
12317 ? static_cast<double>(Handle<Smi>::cast(value)->value())
12318 : Handle<HeapNumber>::cast(value)->value();
12320 // If the array is growing, and it's not growth by a single element at the
12321 // end, make sure that the ElementsKind is HOLEY.
12322 ElementsKind elements_kind = object->GetElementsKind();
12323 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
12324 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12325 TransitionElementsKind(object, transitioned_kind);
12328 // Check whether there is extra space in the fixed array.
12329 if (index < elms_length) {
12330 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements()));
12331 elms->set(index, double_value);
12332 if (object->IsJSArray()) {
12333 // Update the length of the array if needed.
12334 uint32_t array_length = 0;
12336 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12337 if (index >= array_length) {
12338 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1));
12344 // Allow gap in fast case.
12345 if ((index - elms_length) < kMaxGap) {
12346 // Try allocating extra space.
12347 int new_capacity = NewElementsCapacity(index+1);
12348 if (!object->ShouldConvertToSlowElements(new_capacity)) {
12349 DCHECK(static_cast<uint32_t>(new_capacity) > index);
12350 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1);
12351 FixedDoubleArray::cast(object->elements())->set(index, double_value);
12352 JSObject::ValidateElements(object);
12357 // Otherwise default to slow case.
12358 DCHECK(object->HasFastDoubleElements());
12359 DCHECK(object->map()->has_fast_double_elements());
12360 DCHECK(object->elements()->IsFixedDoubleArray() ||
12361 object->elements()->length() == 0);
12363 NormalizeElements(object);
12364 DCHECK(object->HasDictionaryElements());
12365 return SetElement(object, index, value, NONE, strict_mode, check_prototype);
12369 MaybeHandle<Object> JSReceiver::SetElement(Handle<JSReceiver> object,
12371 Handle<Object> value,
12372 PropertyAttributes attributes,
12373 StrictMode strict_mode) {
12374 if (object->IsJSProxy()) {
12375 return JSProxy::SetElementWithHandler(
12376 Handle<JSProxy>::cast(object), object, index, value, strict_mode);
12378 return JSObject::SetElement(
12379 Handle<JSObject>::cast(object), index, value, attributes, strict_mode);
12383 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
12385 Handle<Object> value,
12386 StrictMode strict_mode) {
12387 DCHECK(!object->HasExternalArrayElements());
12388 return JSObject::SetElement(object, index, value, NONE, strict_mode, false);
12392 MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object,
12394 Handle<Object> value,
12395 PropertyAttributes attributes,
12396 StrictMode strict_mode,
12397 bool check_prototype,
12398 SetPropertyMode set_mode) {
12399 Isolate* isolate = object->GetIsolate();
12401 if (object->HasExternalArrayElements() ||
12402 object->HasFixedTypedArrayElements()) {
12403 if (!value->IsNumber() && !value->IsUndefined()) {
12404 ASSIGN_RETURN_ON_EXCEPTION(
12406 Execution::ToNumber(isolate, value), Object);
12410 // Check access rights if needed.
12411 if (object->IsAccessCheckNeeded()) {
12412 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_SET)) {
12413 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET);
12414 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12419 if (object->IsJSGlobalProxy()) {
12420 PrototypeIterator iter(isolate, object);
12421 if (iter.IsAtEnd()) return value;
12422 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
12424 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index,
12425 value, attributes, strict_mode, check_prototype, set_mode);
12428 // Don't allow element properties to be redefined for external arrays.
12429 if ((object->HasExternalArrayElements() ||
12430 object->HasFixedTypedArrayElements()) &&
12431 set_mode == DEFINE_PROPERTY) {
12432 Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12433 Handle<Object> args[] = { object, number };
12434 THROW_NEW_ERROR(isolate, NewTypeError("redef_external_array_element",
12435 HandleVector(args, arraysize(args))),
12439 // Normalize the elements to enable attributes on the property.
12440 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
12441 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
12442 // Make sure that we never go back to fast case.
12443 dictionary->set_requires_slow_elements();
12446 if (!object->map()->is_observed()) {
12447 return object->HasIndexedInterceptor()
12448 ? SetElementWithInterceptor(object, index, value, attributes,
12449 strict_mode, check_prototype, set_mode)
12450 : SetElementWithoutInterceptor(object, index, value, attributes,
12451 strict_mode, check_prototype, set_mode);
12454 Maybe<PropertyAttributes> maybe =
12455 JSReceiver::GetOwnElementAttribute(object, index);
12456 if (!maybe.has_value) return MaybeHandle<Object>();
12457 PropertyAttributes old_attributes = maybe.value;
12459 Handle<Object> old_value = isolate->factory()->the_hole_value();
12460 Handle<Object> old_length_handle;
12461 Handle<Object> new_length_handle;
12463 if (old_attributes != ABSENT) {
12464 if (GetOwnElementAccessorPair(object, index).is_null()) {
12465 old_value = Object::GetElement(isolate, object, index).ToHandleChecked();
12467 } else if (object->IsJSArray()) {
12468 // Store old array length in case adding an element grows the array.
12469 old_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12473 // Check for lookup interceptor
12474 Handle<Object> result;
12475 ASSIGN_RETURN_ON_EXCEPTION(
12477 object->HasIndexedInterceptor()
12478 ? SetElementWithInterceptor(
12479 object, index, value, attributes,
12480 strict_mode, check_prototype, set_mode)
12481 : SetElementWithoutInterceptor(
12482 object, index, value, attributes,
12483 strict_mode, check_prototype, set_mode),
12486 Handle<String> name = isolate->factory()->Uint32ToString(index);
12487 maybe = GetOwnElementAttribute(object, index);
12488 if (!maybe.has_value) return MaybeHandle<Object>();
12489 PropertyAttributes new_attributes = maybe.value;
12491 if (old_attributes == ABSENT) {
12492 if (object->IsJSArray() &&
12493 !old_length_handle->SameValue(
12494 Handle<JSArray>::cast(object)->length())) {
12495 new_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12497 uint32_t old_length = 0;
12498 uint32_t new_length = 0;
12499 CHECK(old_length_handle->ToArrayIndex(&old_length));
12500 CHECK(new_length_handle->ToArrayIndex(&new_length));
12502 RETURN_ON_EXCEPTION(
12503 isolate, BeginPerformSplice(Handle<JSArray>::cast(object)), Object);
12504 RETURN_ON_EXCEPTION(
12505 isolate, EnqueueChangeRecord(object, "add", name, old_value), Object);
12506 RETURN_ON_EXCEPTION(
12507 isolate, EnqueueChangeRecord(object, "update",
12508 isolate->factory()->length_string(),
12509 old_length_handle),
12511 RETURN_ON_EXCEPTION(
12512 isolate, EndPerformSplice(Handle<JSArray>::cast(object)), Object);
12513 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
12514 RETURN_ON_EXCEPTION(
12516 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length,
12517 deleted, new_length - old_length),
12520 RETURN_ON_EXCEPTION(
12521 isolate, EnqueueChangeRecord(object, "add", name, old_value), Object);
12523 } else if (old_value->IsTheHole()) {
12524 RETURN_ON_EXCEPTION(
12525 isolate, EnqueueChangeRecord(object, "reconfigure", name, old_value),
12528 Handle<Object> new_value =
12529 Object::GetElement(isolate, object, index).ToHandleChecked();
12530 bool value_changed = !old_value->SameValue(*new_value);
12531 if (old_attributes != new_attributes) {
12532 if (!value_changed) old_value = isolate->factory()->the_hole_value();
12533 RETURN_ON_EXCEPTION(
12534 isolate, EnqueueChangeRecord(object, "reconfigure", name, old_value),
12536 } else if (value_changed) {
12537 RETURN_ON_EXCEPTION(
12538 isolate, EnqueueChangeRecord(object, "update", name, old_value),
12547 MaybeHandle<Object> JSObject::SetElementWithoutInterceptor(
12548 Handle<JSObject> object,
12550 Handle<Object> value,
12551 PropertyAttributes attributes,
12552 StrictMode strict_mode,
12553 bool check_prototype,
12554 SetPropertyMode set_mode) {
12555 DCHECK(object->HasDictionaryElements() ||
12556 object->HasDictionaryArgumentsElements() ||
12557 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
12558 Isolate* isolate = object->GetIsolate();
12559 if (FLAG_trace_external_array_abuse &&
12560 IsExternalArrayElementsKind(object->GetElementsKind())) {
12561 CheckArrayAbuse(object, "external elements write", index);
12563 if (FLAG_trace_js_array_abuse &&
12564 !IsExternalArrayElementsKind(object->GetElementsKind())) {
12565 if (object->IsJSArray()) {
12566 CheckArrayAbuse(object, "elements write", index, true);
12569 if (object->IsJSArray() && JSArray::WouldChangeReadOnlyLength(
12570 Handle<JSArray>::cast(object), index)) {
12571 if (strict_mode == SLOPPY) {
12574 return JSArray::ReadOnlyLengthError(Handle<JSArray>::cast(object));
12577 switch (object->GetElementsKind()) {
12578 case FAST_SMI_ELEMENTS:
12579 case FAST_ELEMENTS:
12580 case FAST_HOLEY_SMI_ELEMENTS:
12581 case FAST_HOLEY_ELEMENTS:
12582 return SetFastElement(object, index, value, strict_mode, check_prototype);
12583 case FAST_DOUBLE_ELEMENTS:
12584 case FAST_HOLEY_DOUBLE_ELEMENTS:
12585 return SetFastDoubleElement(object, index, value, strict_mode,
12588 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
12589 case EXTERNAL_##TYPE##_ELEMENTS: { \
12590 Handle<External##Type##Array> array( \
12591 External##Type##Array::cast(object->elements())); \
12592 return External##Type##Array::SetValue(array, index, value); \
12594 case TYPE##_ELEMENTS: { \
12595 Handle<Fixed##Type##Array> array( \
12596 Fixed##Type##Array::cast(object->elements())); \
12597 return Fixed##Type##Array::SetValue(array, index, value); \
12600 TYPED_ARRAYS(TYPED_ARRAY_CASE)
12602 #undef TYPED_ARRAY_CASE
12604 case DICTIONARY_ELEMENTS:
12605 return SetDictionaryElement(object, index, value, attributes, strict_mode,
12608 case SLOPPY_ARGUMENTS_ELEMENTS: {
12609 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()));
12610 uint32_t length = parameter_map->length();
12611 Handle<Object> probe = index < length - 2 ?
12612 Handle<Object>(parameter_map->get(index + 2), isolate) :
12614 if (!probe.is_null() && !probe->IsTheHole()) {
12615 Handle<Context> context(Context::cast(parameter_map->get(0)));
12616 int context_index = Handle<Smi>::cast(probe)->value();
12617 DCHECK(!context->get(context_index)->IsTheHole());
12618 context->set(context_index, *value);
12619 // Redefining attributes of an aliased element destroys fast aliasing.
12620 if (set_mode == SET_PROPERTY || attributes == NONE) return value;
12621 parameter_map->set_the_hole(index + 2);
12622 // For elements that are still writable we re-establish slow aliasing.
12623 if ((attributes & READ_ONLY) == 0) {
12624 value = Handle<Object>::cast(
12625 isolate->factory()->NewAliasedArgumentsEntry(context_index));
12628 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)));
12629 if (arguments->IsDictionary()) {
12630 return SetDictionaryElement(object, index, value, attributes,
12635 return SetFastElement(object, index, value, strict_mode,
12640 // All possible cases have been handled above. Add a return to avoid the
12641 // complaints from the compiler.
12643 return isolate->factory()->null_value();
12647 const double AllocationSite::kPretenureRatio = 0.85;
12650 void AllocationSite::ResetPretenureDecision() {
12651 set_pretenure_decision(kUndecided);
12652 set_memento_found_count(0);
12653 set_memento_create_count(0);
12657 PretenureFlag AllocationSite::GetPretenureMode() {
12658 PretenureDecision mode = pretenure_decision();
12659 // Zombie objects "decide" to be untenured.
12660 return mode == kTenure ? TENURED : NOT_TENURED;
12664 bool AllocationSite::IsNestedSite() {
12665 DCHECK(FLAG_trace_track_allocation_sites);
12666 Object* current = GetHeap()->allocation_sites_list();
12667 while (current->IsAllocationSite()) {
12668 AllocationSite* current_site = AllocationSite::cast(current);
12669 if (current_site->nested_site() == this) {
12672 current = current_site->weak_next();
12678 void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
12679 ElementsKind to_kind) {
12680 Isolate* isolate = site->GetIsolate();
12682 if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) {
12683 Handle<JSArray> transition_info =
12684 handle(JSArray::cast(site->transition_info()));
12685 ElementsKind kind = transition_info->GetElementsKind();
12686 // if kind is holey ensure that to_kind is as well.
12687 if (IsHoleyElementsKind(kind)) {
12688 to_kind = GetHoleyElementsKind(to_kind);
12690 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12691 // If the array is huge, it's not likely to be defined in a local
12692 // function, so we shouldn't make new instances of it very often.
12693 uint32_t length = 0;
12694 CHECK(transition_info->length()->ToArrayIndex(&length));
12695 if (length <= kMaximumArrayBytesToPretransition) {
12696 if (FLAG_trace_track_allocation_sites) {
12697 bool is_nested = site->IsNestedSite();
12699 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
12700 reinterpret_cast<void*>(*site),
12701 is_nested ? "(nested)" : "",
12702 ElementsKindToString(kind),
12703 ElementsKindToString(to_kind));
12705 JSObject::TransitionElementsKind(transition_info, to_kind);
12706 site->dependent_code()->DeoptimizeDependentCodeGroup(
12707 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
12711 ElementsKind kind = site->GetElementsKind();
12712 // if kind is holey ensure that to_kind is as well.
12713 if (IsHoleyElementsKind(kind)) {
12714 to_kind = GetHoleyElementsKind(to_kind);
12716 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12717 if (FLAG_trace_track_allocation_sites) {
12718 PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
12719 reinterpret_cast<void*>(*site),
12720 ElementsKindToString(kind),
12721 ElementsKindToString(to_kind));
12723 site->SetElementsKind(to_kind);
12724 site->dependent_code()->DeoptimizeDependentCodeGroup(
12725 isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
12732 void AllocationSite::AddDependentCompilationInfo(Handle<AllocationSite> site,
12734 CompilationInfo* info) {
12735 DependentCode::DependencyGroup group = site->ToDependencyGroup(reason);
12736 Handle<DependentCode> dep(site->dependent_code());
12737 Handle<DependentCode> codes =
12738 DependentCode::Insert(dep, group, info->object_wrapper());
12739 if (*codes != site->dependent_code()) site->set_dependent_code(*codes);
12740 info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone());
12744 const char* AllocationSite::PretenureDecisionName(PretenureDecision decision) {
12745 switch (decision) {
12746 case kUndecided: return "undecided";
12747 case kDontTenure: return "don't tenure";
12748 case kMaybeTenure: return "maybe tenure";
12749 case kTenure: return "tenure";
12750 case kZombie: return "zombie";
12751 default: UNREACHABLE();
12757 void JSObject::UpdateAllocationSite(Handle<JSObject> object,
12758 ElementsKind to_kind) {
12759 if (!object->IsJSArray()) return;
12761 Heap* heap = object->GetHeap();
12762 if (!heap->InNewSpace(*object)) return;
12764 Handle<AllocationSite> site;
12766 DisallowHeapAllocation no_allocation;
12768 AllocationMemento* memento = heap->FindAllocationMemento(*object);
12769 if (memento == NULL) return;
12771 // Walk through to the Allocation Site
12772 site = handle(memento->GetAllocationSite());
12774 AllocationSite::DigestTransitionFeedback(site, to_kind);
12778 void JSObject::TransitionElementsKind(Handle<JSObject> object,
12779 ElementsKind to_kind) {
12780 ElementsKind from_kind = object->map()->elements_kind();
12782 if (IsFastHoleyElementsKind(from_kind)) {
12783 to_kind = GetHoleyElementsKind(to_kind);
12786 if (from_kind == to_kind) return;
12787 // Don't update the site if to_kind isn't fast
12788 if (IsFastElementsKind(to_kind)) {
12789 UpdateAllocationSite(object, to_kind);
12792 Isolate* isolate = object->GetIsolate();
12793 if (object->elements() == isolate->heap()->empty_fixed_array() ||
12794 (IsFastSmiOrObjectElementsKind(from_kind) &&
12795 IsFastSmiOrObjectElementsKind(to_kind)) ||
12796 (from_kind == FAST_DOUBLE_ELEMENTS &&
12797 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
12798 DCHECK(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
12799 // No change is needed to the elements() buffer, the transition
12800 // only requires a map change.
12801 Handle<Map> new_map = GetElementsTransitionMap(object, to_kind);
12802 MigrateToMap(object, new_map);
12803 if (FLAG_trace_elements_transitions) {
12804 Handle<FixedArrayBase> elms(object->elements());
12805 PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms);
12810 Handle<FixedArrayBase> elms(object->elements());
12811 uint32_t capacity = static_cast<uint32_t>(elms->length());
12812 uint32_t length = capacity;
12814 if (object->IsJSArray()) {
12815 Object* raw_length = Handle<JSArray>::cast(object)->length();
12816 if (raw_length->IsUndefined()) {
12817 // If length is undefined, then JSArray is being initialized and has no
12818 // elements, assume a length of zero.
12821 CHECK(raw_length->ToArrayIndex(&length));
12825 if (IsFastSmiElementsKind(from_kind) &&
12826 IsFastDoubleElementsKind(to_kind)) {
12827 SetFastDoubleElementsCapacityAndLength(object, capacity, length);
12828 JSObject::ValidateElements(object);
12832 if (IsFastDoubleElementsKind(from_kind) &&
12833 IsFastObjectElementsKind(to_kind)) {
12834 SetFastElementsCapacityAndLength(object, capacity, length,
12835 kDontAllowSmiElements);
12836 JSObject::ValidateElements(object);
12840 // This method should never be called for any other case than the ones
12847 bool Map::IsValidElementsTransition(ElementsKind from_kind,
12848 ElementsKind to_kind) {
12849 // Transitions can't go backwards.
12850 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
12854 // Transitions from HOLEY -> PACKED are not allowed.
12855 return !IsFastHoleyElementsKind(from_kind) ||
12856 IsFastHoleyElementsKind(to_kind);
12860 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array,
12862 Handle<Object> value) {
12863 uint32_t old_len = 0;
12864 CHECK(array->length()->ToArrayIndex(&old_len));
12865 // Check to see if we need to update the length. For now, we make
12866 // sure that the length stays within 32-bits (unsigned).
12867 if (index >= old_len && index != 0xffffffff) {
12868 Handle<Object> len = array->GetIsolate()->factory()->NewNumber(
12869 static_cast<double>(index) + 1);
12870 array->set_length(*len);
12875 bool JSArray::IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map) {
12876 Isolate* isolate = jsarray_map->GetIsolate();
12877 DCHECK(!jsarray_map->is_dictionary_map());
12878 LookupResult lookup(isolate);
12879 Handle<Name> length_string = isolate->factory()->length_string();
12880 jsarray_map->LookupDescriptor(NULL, *length_string, &lookup);
12881 return lookup.IsReadOnly();
12885 bool JSArray::WouldChangeReadOnlyLength(Handle<JSArray> array,
12887 uint32_t length = 0;
12888 CHECK(array->length()->ToArrayIndex(&length));
12889 if (length <= index) {
12890 LookupIterator it(array, array->GetIsolate()->factory()->length_string(),
12891 LookupIterator::OWN_SKIP_INTERCEPTOR);
12892 CHECK_NE(LookupIterator::ACCESS_CHECK, it.state());
12893 CHECK(it.IsFound());
12894 CHECK_EQ(LookupIterator::ACCESSOR, it.state());
12895 return it.IsReadOnly();
12901 MaybeHandle<Object> JSArray::ReadOnlyLengthError(Handle<JSArray> array) {
12902 Isolate* isolate = array->GetIsolate();
12903 Handle<Name> length = isolate->factory()->length_string();
12904 Handle<Object> args[2] = { length, array };
12905 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property",
12906 HandleVector(args, arraysize(args))),
12911 MaybeHandle<Object> JSObject::GetElementWithInterceptor(
12912 Handle<JSObject> object,
12913 Handle<Object> receiver,
12915 Isolate* isolate = object->GetIsolate();
12917 // Make sure that the top context does not change when doing
12918 // callbacks or interceptor calls.
12919 AssertNoContextChange ncc(isolate);
12921 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate);
12922 if (!interceptor->getter()->IsUndefined()) {
12923 v8::IndexedPropertyGetterCallback getter =
12924 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
12926 ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index));
12927 PropertyCallbackArguments
12928 args(isolate, interceptor->data(), *receiver, *object);
12929 v8::Handle<v8::Value> result = args.Call(getter, index);
12930 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
12931 if (!result.IsEmpty()) {
12932 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12933 result_internal->VerifyApiCallResultType();
12934 // Rebox handle before return.
12935 return handle(*result_internal, isolate);
12939 ElementsAccessor* handler = object->GetElementsAccessor();
12940 Handle<Object> result;
12941 ASSIGN_RETURN_ON_EXCEPTION(
12942 isolate, result, handler->Get(receiver, object, index),
12944 if (!result->IsTheHole()) return result;
12946 PrototypeIterator iter(isolate, object);
12947 if (iter.IsAtEnd()) return isolate->factory()->undefined_value();
12948 return Object::GetElementWithReceiver(
12949 isolate, PrototypeIterator::GetCurrent(iter), receiver, index);
12953 bool JSObject::HasDenseElements() {
12956 GetElementsCapacityAndUsage(&capacity, &used);
12957 return (capacity == 0) || (used > (capacity / 2));
12961 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
12965 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
12966 FixedArray* backing_store = NULL;
12967 switch (GetElementsKind()) {
12968 case SLOPPY_ARGUMENTS_ELEMENTS:
12969 backing_store_base =
12970 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
12971 backing_store = FixedArray::cast(backing_store_base);
12972 if (backing_store->IsDictionary()) {
12973 SeededNumberDictionary* dictionary =
12974 SeededNumberDictionary::cast(backing_store);
12975 *capacity = dictionary->Capacity();
12976 *used = dictionary->NumberOfElements();
12980 case FAST_SMI_ELEMENTS:
12981 case FAST_ELEMENTS:
12983 *capacity = backing_store_base->length();
12984 *used = Smi::cast(JSArray::cast(this)->length())->value();
12987 // Fall through if packing is not guaranteed.
12988 case FAST_HOLEY_SMI_ELEMENTS:
12989 case FAST_HOLEY_ELEMENTS:
12990 backing_store = FixedArray::cast(backing_store_base);
12991 *capacity = backing_store->length();
12992 for (int i = 0; i < *capacity; ++i) {
12993 if (!backing_store->get(i)->IsTheHole()) ++(*used);
12996 case DICTIONARY_ELEMENTS: {
12997 SeededNumberDictionary* dictionary = element_dictionary();
12998 *capacity = dictionary->Capacity();
12999 *used = dictionary->NumberOfElements();
13002 case FAST_DOUBLE_ELEMENTS:
13004 *capacity = backing_store_base->length();
13005 *used = Smi::cast(JSArray::cast(this)->length())->value();
13008 // Fall through if packing is not guaranteed.
13009 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13010 *capacity = elements()->length();
13011 if (*capacity == 0) break;
13012 FixedDoubleArray * elms = FixedDoubleArray::cast(elements());
13013 for (int i = 0; i < *capacity; i++) {
13014 if (!elms->is_the_hole(i)) ++(*used);
13019 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13020 case EXTERNAL_##TYPE##_ELEMENTS: \
13021 case TYPE##_ELEMENTS: \
13023 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13024 #undef TYPED_ARRAY_CASE
13026 // External arrays are considered 100% used.
13027 FixedArrayBase* external_array = FixedArrayBase::cast(elements());
13028 *capacity = external_array->length();
13029 *used = external_array->length();
13036 bool JSObject::WouldConvertToSlowElements(Handle<Object> key) {
13038 if (HasFastElements() && key->ToArrayIndex(&index)) {
13039 Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements()));
13040 uint32_t capacity = static_cast<uint32_t>(backing_store->length());
13041 if (index >= capacity) {
13042 if ((index - capacity) >= kMaxGap) return true;
13043 uint32_t new_capacity = NewElementsCapacity(index + 1);
13044 return ShouldConvertToSlowElements(new_capacity);
13051 bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
13052 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
13053 kMaxUncheckedFastElementsLength);
13054 if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
13055 (new_capacity <= kMaxUncheckedFastElementsLength &&
13056 GetHeap()->InNewSpace(this))) {
13059 // If the fast-case backing storage takes up roughly three times as
13060 // much space (in machine words) as a dictionary backing storage
13061 // would, the object should have slow elements.
13062 int old_capacity = 0;
13063 int used_elements = 0;
13064 GetElementsCapacityAndUsage(&old_capacity, &used_elements);
13065 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
13066 SeededNumberDictionary::kEntrySize;
13067 return 3 * dictionary_size <= new_capacity;
13071 bool JSObject::ShouldConvertToFastElements() {
13072 DCHECK(HasDictionaryElements() || HasDictionaryArgumentsElements());
13073 // If the elements are sparse, we should not go back to fast case.
13074 if (!HasDenseElements()) return false;
13075 // An object requiring access checks is never allowed to have fast
13076 // elements. If it had fast elements we would skip security checks.
13077 if (IsAccessCheckNeeded()) return false;
13078 // Observed objects may not go to fast mode because they rely on map checks,
13079 // and for fast element accesses we sometimes check element kinds only.
13080 if (map()->is_observed()) return false;
13082 FixedArray* elements = FixedArray::cast(this->elements());
13083 SeededNumberDictionary* dictionary = NULL;
13084 if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) {
13085 dictionary = SeededNumberDictionary::cast(elements->get(1));
13087 dictionary = SeededNumberDictionary::cast(elements);
13089 // If an element has been added at a very high index in the elements
13090 // dictionary, we cannot go back to fast case.
13091 if (dictionary->requires_slow_elements()) return false;
13092 // If the dictionary backing storage takes up roughly half as much
13093 // space (in machine words) as a fast-case backing storage would,
13094 // the object should have fast elements.
13095 uint32_t array_size = 0;
13097 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
13099 array_size = dictionary->max_number_key();
13101 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
13102 SeededNumberDictionary::kEntrySize;
13103 return 2 * dictionary_size >= array_size;
13107 bool JSObject::ShouldConvertToFastDoubleElements(
13108 bool* has_smi_only_elements) {
13109 *has_smi_only_elements = false;
13110 if (HasSloppyArgumentsElements()) return false;
13111 if (FLAG_unbox_double_arrays) {
13112 DCHECK(HasDictionaryElements());
13113 SeededNumberDictionary* dictionary = element_dictionary();
13114 bool found_double = false;
13115 for (int i = 0; i < dictionary->Capacity(); i++) {
13116 Object* key = dictionary->KeyAt(i);
13117 if (key->IsNumber()) {
13118 Object* value = dictionary->ValueAt(i);
13119 if (!value->IsNumber()) return false;
13120 if (!value->IsSmi()) {
13121 found_double = true;
13125 *has_smi_only_elements = !found_double;
13126 return found_double;
13133 // Certain compilers request function template instantiation when they
13134 // see the definition of the other template functions in the
13135 // class. This requires us to have the template functions put
13136 // together, so even though this function belongs in objects-debug.cc,
13137 // we keep it here instead to satisfy certain compilers.
13138 #ifdef OBJECT_PRINT
13139 template <typename Derived, typename Shape, typename Key>
13140 void Dictionary<Derived, Shape, Key>::Print(std::ostream& os) { // NOLINT
13141 int capacity = DerivedHashTable::Capacity();
13142 for (int i = 0; i < capacity; i++) {
13143 Object* k = DerivedHashTable::KeyAt(i);
13144 if (DerivedHashTable::IsKey(k)) {
13146 if (k->IsString()) {
13147 String::cast(k)->StringPrint(os);
13151 os << ": " << Brief(ValueAt(i)) << " " << DetailsAt(i) << "\n";
13158 template<typename Derived, typename Shape, typename Key>
13159 void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) {
13161 int capacity = DerivedHashTable::Capacity();
13162 DisallowHeapAllocation no_gc;
13163 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
13164 for (int i = 0; i < capacity; i++) {
13165 Object* k = Dictionary::KeyAt(i);
13166 if (Dictionary::IsKey(k)) {
13167 elements->set(pos++, ValueAt(i), mode);
13170 DCHECK(pos == elements->length());
13174 InterceptorInfo* JSObject::GetNamedInterceptor() {
13175 DCHECK(map()->has_named_interceptor());
13176 JSFunction* constructor = JSFunction::cast(map()->constructor());
13177 DCHECK(constructor->shared()->IsApiFunction());
13179 constructor->shared()->get_api_func_data()->named_property_handler();
13180 return InterceptorInfo::cast(result);
13184 InterceptorInfo* JSObject::GetIndexedInterceptor() {
13185 DCHECK(map()->has_indexed_interceptor());
13186 JSFunction* constructor = JSFunction::cast(map()->constructor());
13187 DCHECK(constructor->shared()->IsApiFunction());
13189 constructor->shared()->get_api_func_data()->indexed_property_handler();
13190 return InterceptorInfo::cast(result);
13194 MaybeHandle<Object> JSObject::GetPropertyWithInterceptor(
13195 Handle<JSObject> holder,
13196 Handle<Object> receiver,
13197 Handle<Name> name) {
13198 Isolate* isolate = holder->GetIsolate();
13200 // TODO(rossberg): Support symbols in the API.
13201 if (name->IsSymbol()) return isolate->factory()->undefined_value();
13203 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor(), isolate);
13204 Handle<String> name_string = Handle<String>::cast(name);
13206 if (interceptor->getter()->IsUndefined()) return MaybeHandle<Object>();
13208 v8::NamedPropertyGetterCallback getter =
13209 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
13211 ApiNamedPropertyAccess("interceptor-named-get", *holder, *name));
13212 PropertyCallbackArguments
13213 args(isolate, interceptor->data(), *receiver, *holder);
13214 v8::Handle<v8::Value> result =
13215 args.Call(getter, v8::Utils::ToLocal(name_string));
13216 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
13217 if (result.IsEmpty()) return MaybeHandle<Object>();
13219 Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13220 result_internal->VerifyApiCallResultType();
13221 // Rebox handle before return
13222 return handle(*result_internal, isolate);
13226 // Compute the property keys from the interceptor.
13227 // TODO(rossberg): support symbols in API, and filter here if needed.
13228 MaybeHandle<JSObject> JSObject::GetKeysForNamedInterceptor(
13229 Handle<JSObject> object, Handle<JSReceiver> receiver) {
13230 Isolate* isolate = receiver->GetIsolate();
13231 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
13232 PropertyCallbackArguments
13233 args(isolate, interceptor->data(), *receiver, *object);
13234 v8::Handle<v8::Object> result;
13235 if (!interceptor->enumerator()->IsUndefined()) {
13236 v8::NamedPropertyEnumeratorCallback enum_fun =
13237 v8::ToCData<v8::NamedPropertyEnumeratorCallback>(
13238 interceptor->enumerator());
13239 LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object));
13240 result = args.Call(enum_fun);
13242 if (result.IsEmpty()) return MaybeHandle<JSObject>();
13243 #if ENABLE_EXTRA_CHECKS
13244 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
13245 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
13247 // Rebox before returning.
13248 return handle(*v8::Utils::OpenHandle(*result), isolate);
13252 // Compute the element keys from the interceptor.
13253 MaybeHandle<JSObject> JSObject::GetKeysForIndexedInterceptor(
13254 Handle<JSObject> object, Handle<JSReceiver> receiver) {
13255 Isolate* isolate = receiver->GetIsolate();
13256 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
13257 PropertyCallbackArguments
13258 args(isolate, interceptor->data(), *receiver, *object);
13259 v8::Handle<v8::Object> result;
13260 if (!interceptor->enumerator()->IsUndefined()) {
13261 v8::IndexedPropertyEnumeratorCallback enum_fun =
13262 v8::ToCData<v8::IndexedPropertyEnumeratorCallback>(
13263 interceptor->enumerator());
13264 LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object));
13265 result = args.Call(enum_fun);
13267 if (result.IsEmpty()) return MaybeHandle<JSObject>();
13268 #if ENABLE_EXTRA_CHECKS
13269 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() ||
13270 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements());
13272 // Rebox before returning.
13273 return handle(*v8::Utils::OpenHandle(*result), isolate);
13277 Maybe<bool> JSObject::HasRealNamedProperty(Handle<JSObject> object,
13278 Handle<Name> key) {
13279 LookupIterator it(object, key, LookupIterator::OWN_SKIP_INTERCEPTOR);
13280 Maybe<PropertyAttributes> maybe_result = GetPropertyAttributes(&it);
13281 if (!maybe_result.has_value) return Maybe<bool>();
13282 return maybe(it.IsFound());
13286 Maybe<bool> JSObject::HasRealElementProperty(Handle<JSObject> object,
13288 Isolate* isolate = object->GetIsolate();
13289 HandleScope scope(isolate);
13290 // Check access rights if needed.
13291 if (object->IsAccessCheckNeeded()) {
13292 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) {
13293 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS);
13294 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<bool>());
13295 return maybe(false);
13299 if (object->IsJSGlobalProxy()) {
13300 HandleScope scope(isolate);
13301 PrototypeIterator iter(isolate, object);
13302 if (iter.IsAtEnd()) return maybe(false);
13303 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject());
13304 return HasRealElementProperty(
13305 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index);
13308 Maybe<PropertyAttributes> result =
13309 GetElementAttributeWithoutInterceptor(object, object, index, false);
13310 if (!result.has_value) return Maybe<bool>();
13311 return maybe(result.value != ABSENT);
13315 Maybe<bool> JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object,
13316 Handle<Name> key) {
13317 LookupIterator it(object, key, LookupIterator::OWN_SKIP_INTERCEPTOR);
13318 Maybe<PropertyAttributes> maybe_result = GetPropertyAttributes(&it);
13319 if (!maybe_result.has_value) return Maybe<bool>();
13320 return maybe(it.state() == LookupIterator::ACCESSOR);
13324 int JSObject::NumberOfOwnProperties(PropertyAttributes filter) {
13325 if (HasFastProperties()) {
13326 Map* map = this->map();
13327 if (filter == NONE) return map->NumberOfOwnDescriptors();
13328 if (filter & DONT_ENUM) {
13329 int result = map->EnumLength();
13330 if (result != kInvalidEnumCacheSentinel) return result;
13332 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
13334 return property_dictionary()->NumberOfElementsFilterAttributes(filter);
13338 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
13339 Object* temp = get(i);
13342 if (this != numbers) {
13343 temp = numbers->get(i);
13344 numbers->set(i, Smi::cast(numbers->get(j)));
13345 numbers->set(j, Smi::cast(temp));
13350 static void InsertionSortPairs(FixedArray* content,
13351 FixedArray* numbers,
13353 for (int i = 1; i < len; i++) {
13356 (NumberToUint32(numbers->get(j - 1)) >
13357 NumberToUint32(numbers->get(j)))) {
13358 content->SwapPairs(numbers, j - 1, j);
13365 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) {
13366 // In-place heap sort.
13367 DCHECK(content->length() == numbers->length());
13369 // Bottom-up max-heap construction.
13370 for (int i = 1; i < len; ++i) {
13371 int child_index = i;
13372 while (child_index > 0) {
13373 int parent_index = ((child_index + 1) >> 1) - 1;
13374 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13375 uint32_t child_value = NumberToUint32(numbers->get(child_index));
13376 if (parent_value < child_value) {
13377 content->SwapPairs(numbers, parent_index, child_index);
13381 child_index = parent_index;
13385 // Extract elements and create sorted array.
13386 for (int i = len - 1; i > 0; --i) {
13387 // Put max element at the back of the array.
13388 content->SwapPairs(numbers, 0, i);
13389 // Sift down the new top element.
13390 int parent_index = 0;
13392 int child_index = ((parent_index + 1) << 1) - 1;
13393 if (child_index >= i) break;
13394 uint32_t child1_value = NumberToUint32(numbers->get(child_index));
13395 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1));
13396 uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13397 if (child_index + 1 >= i || child1_value > child2_value) {
13398 if (parent_value > child1_value) break;
13399 content->SwapPairs(numbers, parent_index, child_index);
13400 parent_index = child_index;
13402 if (parent_value > child2_value) break;
13403 content->SwapPairs(numbers, parent_index, child_index + 1);
13404 parent_index = child_index + 1;
13411 // Sort this array and the numbers as pairs wrt. the (distinct) numbers.
13412 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
13413 DCHECK(this->length() == numbers->length());
13414 // For small arrays, simply use insertion sort.
13416 InsertionSortPairs(this, numbers, len);
13419 // Check the range of indices.
13420 uint32_t min_index = NumberToUint32(numbers->get(0));
13421 uint32_t max_index = min_index;
13423 for (i = 1; i < len; i++) {
13424 if (NumberToUint32(numbers->get(i)) < min_index) {
13425 min_index = NumberToUint32(numbers->get(i));
13426 } else if (NumberToUint32(numbers->get(i)) > max_index) {
13427 max_index = NumberToUint32(numbers->get(i));
13430 if (max_index - min_index + 1 == len) {
13431 // Indices form a contiguous range, unless there are duplicates.
13432 // Do an in-place linear time sort assuming distinct numbers, but
13433 // avoid hanging in case they are not.
13434 for (i = 0; i < len; i++) {
13437 // While the current element at i is not at its correct position p,
13438 // swap the elements at these two positions.
13439 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i &&
13441 SwapPairs(numbers, i, p);
13445 HeapSortPairs(this, numbers, len);
13451 // Fill in the names of own properties into the supplied storage. The main
13452 // purpose of this function is to provide reflection information for the object
13454 void JSObject::GetOwnPropertyNames(
13455 FixedArray* storage, int index, PropertyAttributes filter) {
13456 DCHECK(storage->length() >= (NumberOfOwnProperties(filter) - index));
13457 if (HasFastProperties()) {
13458 int real_size = map()->NumberOfOwnDescriptors();
13459 DescriptorArray* descs = map()->instance_descriptors();
13460 for (int i = 0; i < real_size; i++) {
13461 if ((descs->GetDetails(i).attributes() & filter) == 0 &&
13462 !FilterKey(descs->GetKey(i), filter)) {
13463 storage->set(index++, descs->GetKey(i));
13467 property_dictionary()->CopyKeysTo(storage,
13470 NameDictionary::UNSORTED);
13475 int JSObject::NumberOfOwnElements(PropertyAttributes filter) {
13476 return GetOwnElementKeys(NULL, filter);
13480 int JSObject::NumberOfEnumElements() {
13481 // Fast case for objects with no elements.
13482 if (!IsJSValue() && HasFastObjectElements()) {
13483 uint32_t length = IsJSArray() ?
13484 static_cast<uint32_t>(
13485 Smi::cast(JSArray::cast(this)->length())->value()) :
13486 static_cast<uint32_t>(FixedArray::cast(elements())->length());
13487 if (length == 0) return 0;
13489 // Compute the number of enumerable elements.
13490 return NumberOfOwnElements(static_cast<PropertyAttributes>(DONT_ENUM));
13494 int JSObject::GetOwnElementKeys(FixedArray* storage,
13495 PropertyAttributes filter) {
13497 switch (GetElementsKind()) {
13498 case FAST_SMI_ELEMENTS:
13499 case FAST_ELEMENTS:
13500 case FAST_HOLEY_SMI_ELEMENTS:
13501 case FAST_HOLEY_ELEMENTS: {
13502 int length = IsJSArray() ?
13503 Smi::cast(JSArray::cast(this)->length())->value() :
13504 FixedArray::cast(elements())->length();
13505 for (int i = 0; i < length; i++) {
13506 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
13507 if (storage != NULL) {
13508 storage->set(counter, Smi::FromInt(i));
13513 DCHECK(!storage || storage->length() >= counter);
13516 case FAST_DOUBLE_ELEMENTS:
13517 case FAST_HOLEY_DOUBLE_ELEMENTS: {
13518 int length = IsJSArray() ?
13519 Smi::cast(JSArray::cast(this)->length())->value() :
13520 FixedArrayBase::cast(elements())->length();
13521 for (int i = 0; i < length; i++) {
13522 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) {
13523 if (storage != NULL) {
13524 storage->set(counter, Smi::FromInt(i));
13529 DCHECK(!storage || storage->length() >= counter);
13533 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13534 case EXTERNAL_##TYPE##_ELEMENTS: \
13535 case TYPE##_ELEMENTS: \
13537 TYPED_ARRAYS(TYPED_ARRAY_CASE)
13538 #undef TYPED_ARRAY_CASE
13540 int length = FixedArrayBase::cast(elements())->length();
13541 while (counter < length) {
13542 if (storage != NULL) {
13543 storage->set(counter, Smi::FromInt(counter));
13547 DCHECK(!storage || storage->length() >= counter);
13551 case DICTIONARY_ELEMENTS: {
13552 if (storage != NULL) {
13553 element_dictionary()->CopyKeysTo(storage,
13555 SeededNumberDictionary::SORTED);
13557 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
13560 case SLOPPY_ARGUMENTS_ELEMENTS: {
13561 FixedArray* parameter_map = FixedArray::cast(elements());
13562 int mapped_length = parameter_map->length() - 2;
13563 FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
13564 if (arguments->IsDictionary()) {
13565 // Copy the keys from arguments first, because Dictionary::CopyKeysTo
13566 // will insert in storage starting at index 0.
13567 SeededNumberDictionary* dictionary =
13568 SeededNumberDictionary::cast(arguments);
13569 if (storage != NULL) {
13570 dictionary->CopyKeysTo(
13571 storage, filter, SeededNumberDictionary::UNSORTED);
13573 counter += dictionary->NumberOfElementsFilterAttributes(filter);
13574 for (int i = 0; i < mapped_length; ++i) {
13575 if (!parameter_map->get(i + 2)->IsTheHole()) {
13576 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13580 if (storage != NULL) storage->SortPairs(storage, counter);
13583 int backing_length = arguments->length();
13585 for (; i < mapped_length; ++i) {
13586 if (!parameter_map->get(i + 2)->IsTheHole()) {
13587 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13589 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
13590 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13594 for (; i < backing_length; ++i) {
13595 if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13603 if (this->IsJSValue()) {
13604 Object* val = JSValue::cast(this)->value();
13605 if (val->IsString()) {
13606 String* str = String::cast(val);
13608 for (int i = 0; i < str->length(); i++) {
13609 storage->set(counter + i, Smi::FromInt(i));
13612 counter += str->length();
13615 DCHECK(!storage || storage->length() == counter);
13620 int JSObject::GetEnumElementKeys(FixedArray* storage) {
13621 return GetOwnElementKeys(storage, static_cast<PropertyAttributes>(DONT_ENUM));
13625 const char* Symbol::PrivateSymbolToName() const {
13626 Heap* heap = GetIsolate()->heap();
13627 #define SYMBOL_CHECK_AND_PRINT(name) \
13628 if (this == heap->name()) return #name;
13629 PRIVATE_SYMBOL_LIST(SYMBOL_CHECK_AND_PRINT)
13630 #undef SYMBOL_CHECK_AND_PRINT
13635 void Symbol::SymbolShortPrint(std::ostream& os) {
13636 os << "<Symbol: " << Hash();
13637 if (!name()->IsUndefined()) {
13639 HeapStringAllocator allocator;
13640 StringStream accumulator(&allocator);
13641 String::cast(name())->StringShortPrint(&accumulator);
13642 os << accumulator.ToCString().get();
13644 os << " (" << PrivateSymbolToName() << ")";
13650 // StringSharedKeys are used as keys in the eval cache.
13651 class StringSharedKey : public HashTableKey {
13653 StringSharedKey(Handle<String> source,
13654 Handle<SharedFunctionInfo> shared,
13655 StrictMode strict_mode,
13656 int scope_position)
13659 strict_mode_(strict_mode),
13660 scope_position_(scope_position) { }
13662 bool IsMatch(Object* other) OVERRIDE {
13663 DisallowHeapAllocation no_allocation;
13664 if (!other->IsFixedArray()) {
13665 if (!other->IsNumber()) return false;
13666 uint32_t other_hash = static_cast<uint32_t>(other->Number());
13667 return Hash() == other_hash;
13669 FixedArray* other_array = FixedArray::cast(other);
13670 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13671 if (shared != *shared_) return false;
13672 int strict_unchecked = Smi::cast(other_array->get(2))->value();
13673 DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13674 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13675 if (strict_mode != strict_mode_) return false;
13676 int scope_position = Smi::cast(other_array->get(3))->value();
13677 if (scope_position != scope_position_) return false;
13678 String* source = String::cast(other_array->get(1));
13679 return source->Equals(*source_);
13682 static uint32_t StringSharedHashHelper(String* source,
13683 SharedFunctionInfo* shared,
13684 StrictMode strict_mode,
13685 int scope_position) {
13686 uint32_t hash = source->Hash();
13687 if (shared->HasSourceCode()) {
13688 // Instead of using the SharedFunctionInfo pointer in the hash
13689 // code computation, we use a combination of the hash of the
13690 // script source code and the start position of the calling scope.
13691 // We do this to ensure that the cache entries can survive garbage
13693 Script* script(Script::cast(shared->script()));
13694 hash ^= String::cast(script->source())->Hash();
13695 if (strict_mode == STRICT) hash ^= 0x8000;
13696 hash += scope_position;
13701 uint32_t Hash() OVERRIDE {
13702 return StringSharedHashHelper(*source_, *shared_, strict_mode_,
13706 uint32_t HashForObject(Object* obj) OVERRIDE {
13707 DisallowHeapAllocation no_allocation;
13708 if (obj->IsNumber()) {
13709 return static_cast<uint32_t>(obj->Number());
13711 FixedArray* other_array = FixedArray::cast(obj);
13712 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13713 String* source = String::cast(other_array->get(1));
13714 int strict_unchecked = Smi::cast(other_array->get(2))->value();
13715 DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13716 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13717 int scope_position = Smi::cast(other_array->get(3))->value();
13718 return StringSharedHashHelper(
13719 source, shared, strict_mode, scope_position);
13723 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13724 Handle<FixedArray> array = isolate->factory()->NewFixedArray(4);
13725 array->set(0, *shared_);
13726 array->set(1, *source_);
13727 array->set(2, Smi::FromInt(strict_mode_));
13728 array->set(3, Smi::FromInt(scope_position_));
13733 Handle<String> source_;
13734 Handle<SharedFunctionInfo> shared_;
13735 StrictMode strict_mode_;
13736 int scope_position_;
13740 // RegExpKey carries the source and flags of a regular expression as key.
13741 class RegExpKey : public HashTableKey {
13743 RegExpKey(Handle<String> string, JSRegExp::Flags flags)
13745 flags_(Smi::FromInt(flags.value())) { }
13747 // Rather than storing the key in the hash table, a pointer to the
13748 // stored value is stored where the key should be. IsMatch then
13749 // compares the search key to the found object, rather than comparing
13751 bool IsMatch(Object* obj) OVERRIDE {
13752 FixedArray* val = FixedArray::cast(obj);
13753 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
13754 && (flags_ == val->get(JSRegExp::kFlagsIndex));
13757 uint32_t Hash() OVERRIDE { return RegExpHash(*string_, flags_); }
13759 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13760 // Plain hash maps, which is where regexp keys are used, don't
13761 // use this function.
13763 return MaybeHandle<Object>().ToHandleChecked();
13766 uint32_t HashForObject(Object* obj) OVERRIDE {
13767 FixedArray* val = FixedArray::cast(obj);
13768 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
13769 Smi::cast(val->get(JSRegExp::kFlagsIndex)));
13772 static uint32_t RegExpHash(String* string, Smi* flags) {
13773 return string->Hash() + flags->value();
13776 Handle<String> string_;
13781 Handle<Object> OneByteStringKey::AsHandle(Isolate* isolate) {
13782 if (hash_field_ == 0) Hash();
13783 return isolate->factory()->NewOneByteInternalizedString(string_, hash_field_);
13787 Handle<Object> TwoByteStringKey::AsHandle(Isolate* isolate) {
13788 if (hash_field_ == 0) Hash();
13789 return isolate->factory()->NewTwoByteInternalizedString(string_, hash_field_);
13793 Handle<Object> SeqOneByteSubStringKey::AsHandle(Isolate* isolate) {
13794 if (hash_field_ == 0) Hash();
13795 return isolate->factory()->NewOneByteInternalizedSubString(
13796 string_, from_, length_, hash_field_);
13800 bool SeqOneByteSubStringKey::IsMatch(Object* string) {
13801 Vector<const uint8_t> chars(string_->GetChars() + from_, length_);
13802 return String::cast(string)->IsOneByteEqualTo(chars);
13806 // InternalizedStringKey carries a string/internalized-string object as key.
13807 class InternalizedStringKey : public HashTableKey {
13809 explicit InternalizedStringKey(Handle<String> string)
13810 : string_(string) { }
13812 virtual bool IsMatch(Object* string) OVERRIDE {
13813 return String::cast(string)->Equals(*string_);
13816 virtual uint32_t Hash() OVERRIDE { return string_->Hash(); }
13818 virtual uint32_t HashForObject(Object* other) OVERRIDE {
13819 return String::cast(other)->Hash();
13822 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
13823 // Internalize the string if possible.
13824 MaybeHandle<Map> maybe_map =
13825 isolate->factory()->InternalizedStringMapForString(string_);
13827 if (maybe_map.ToHandle(&map)) {
13828 string_->set_map_no_write_barrier(*map);
13829 DCHECK(string_->IsInternalizedString());
13832 // Otherwise allocate a new internalized string.
13833 return isolate->factory()->NewInternalizedStringImpl(
13834 string_, string_->length(), string_->hash_field());
13837 static uint32_t StringHash(Object* obj) {
13838 return String::cast(obj)->Hash();
13841 Handle<String> string_;
13845 template<typename Derived, typename Shape, typename Key>
13846 void HashTable<Derived, Shape, Key>::IteratePrefix(ObjectVisitor* v) {
13847 IteratePointers(v, 0, kElementsStartOffset);
13851 template<typename Derived, typename Shape, typename Key>
13852 void HashTable<Derived, Shape, Key>::IterateElements(ObjectVisitor* v) {
13854 kElementsStartOffset,
13855 kHeaderSize + length() * kPointerSize);
13859 template<typename Derived, typename Shape, typename Key>
13860 Handle<Derived> HashTable<Derived, Shape, Key>::New(
13862 int at_least_space_for,
13863 MinimumCapacity capacity_option,
13864 PretenureFlag pretenure) {
13865 DCHECK(0 <= at_least_space_for);
13866 DCHECK(!capacity_option || base::bits::IsPowerOfTwo32(at_least_space_for));
13867 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
13868 ? at_least_space_for
13869 : ComputeCapacity(at_least_space_for);
13870 if (capacity > HashTable::kMaxCapacity) {
13871 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
13874 Factory* factory = isolate->factory();
13875 int length = EntryToIndex(capacity);
13876 Handle<FixedArray> array = factory->NewFixedArray(length, pretenure);
13877 array->set_map_no_write_barrier(*factory->hash_table_map());
13878 Handle<Derived> table = Handle<Derived>::cast(array);
13880 table->SetNumberOfElements(0);
13881 table->SetNumberOfDeletedElements(0);
13882 table->SetCapacity(capacity);
13887 // Find entry for key otherwise return kNotFound.
13888 int NameDictionary::FindEntry(Handle<Name> key) {
13889 if (!key->IsUniqueName()) {
13890 return DerivedHashTable::FindEntry(key);
13893 // Optimized for unique names. Knowledge of the key type allows:
13894 // 1. Move the check if the key is unique out of the loop.
13895 // 2. Avoid comparing hash codes in unique-to-unique comparison.
13896 // 3. Detect a case when a dictionary key is not unique but the key is.
13897 // In case of positive result the dictionary key may be replaced by the
13898 // internalized string with minimal performance penalty. It gives a chance
13899 // to perform further lookups in code stubs (and significant performance
13900 // boost a certain style of code).
13902 // EnsureCapacity will guarantee the hash table is never full.
13903 uint32_t capacity = Capacity();
13904 uint32_t entry = FirstProbe(key->Hash(), capacity);
13905 uint32_t count = 1;
13908 int index = EntryToIndex(entry);
13909 Object* element = get(index);
13910 if (element->IsUndefined()) break; // Empty entry.
13911 if (*key == element) return entry;
13912 if (!element->IsUniqueName() &&
13913 !element->IsTheHole() &&
13914 Name::cast(element)->Equals(*key)) {
13915 // Replace a key that is a non-internalized string by the equivalent
13916 // internalized string for faster further lookups.
13920 DCHECK(element->IsTheHole() || !Name::cast(element)->Equals(*key));
13921 entry = NextProbe(entry, count++, capacity);
13927 template<typename Derived, typename Shape, typename Key>
13928 void HashTable<Derived, Shape, Key>::Rehash(
13929 Handle<Derived> new_table,
13931 DCHECK(NumberOfElements() < new_table->Capacity());
13933 DisallowHeapAllocation no_gc;
13934 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
13936 // Copy prefix to new array.
13937 for (int i = kPrefixStartIndex;
13938 i < kPrefixStartIndex + Shape::kPrefixSize;
13940 new_table->set(i, get(i), mode);
13943 // Rehash the elements.
13944 int capacity = Capacity();
13945 for (int i = 0; i < capacity; i++) {
13946 uint32_t from_index = EntryToIndex(i);
13947 Object* k = get(from_index);
13949 uint32_t hash = HashTable::HashForObject(key, k);
13950 uint32_t insertion_index =
13951 EntryToIndex(new_table->FindInsertionEntry(hash));
13952 for (int j = 0; j < Shape::kEntrySize; j++) {
13953 new_table->set(insertion_index + j, get(from_index + j), mode);
13957 new_table->SetNumberOfElements(NumberOfElements());
13958 new_table->SetNumberOfDeletedElements(0);
13962 template<typename Derived, typename Shape, typename Key>
13963 uint32_t HashTable<Derived, Shape, Key>::EntryForProbe(
13967 uint32_t expected) {
13968 uint32_t hash = HashTable::HashForObject(key, k);
13969 uint32_t capacity = Capacity();
13970 uint32_t entry = FirstProbe(hash, capacity);
13971 for (int i = 1; i < probe; i++) {
13972 if (entry == expected) return expected;
13973 entry = NextProbe(entry, i, capacity);
13979 template<typename Derived, typename Shape, typename Key>
13980 void HashTable<Derived, Shape, Key>::Swap(uint32_t entry1,
13982 WriteBarrierMode mode) {
13983 int index1 = EntryToIndex(entry1);
13984 int index2 = EntryToIndex(entry2);
13985 Object* temp[Shape::kEntrySize];
13986 for (int j = 0; j < Shape::kEntrySize; j++) {
13987 temp[j] = get(index1 + j);
13989 for (int j = 0; j < Shape::kEntrySize; j++) {
13990 set(index1 + j, get(index2 + j), mode);
13992 for (int j = 0; j < Shape::kEntrySize; j++) {
13993 set(index2 + j, temp[j], mode);
13998 template<typename Derived, typename Shape, typename Key>
13999 void HashTable<Derived, Shape, Key>::Rehash(Key key) {
14000 DisallowHeapAllocation no_gc;
14001 WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
14002 uint32_t capacity = Capacity();
14004 for (int probe = 1; !done; probe++) {
14005 // All elements at entries given by one of the first _probe_ probes
14006 // are placed correctly. Other elements might need to be moved.
14008 for (uint32_t current = 0; current < capacity; current++) {
14009 Object* current_key = get(EntryToIndex(current));
14010 if (IsKey(current_key)) {
14011 uint32_t target = EntryForProbe(key, current_key, probe, current);
14012 if (current == target) continue;
14013 Object* target_key = get(EntryToIndex(target));
14014 if (!IsKey(target_key) ||
14015 EntryForProbe(key, target_key, probe, target) != target) {
14016 // Put the current element into the correct position.
14017 Swap(current, target, mode);
14018 // The other element will be processed on the next iteration.
14021 // The place for the current element is occupied. Leave the element
14022 // for the next probe.
14031 template<typename Derived, typename Shape, typename Key>
14032 Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity(
14033 Handle<Derived> table,
14036 PretenureFlag pretenure) {
14037 Isolate* isolate = table->GetIsolate();
14038 int capacity = table->Capacity();
14039 int nof = table->NumberOfElements() + n;
14040 int nod = table->NumberOfDeletedElements();
14042 // 50% is still free after adding n elements and
14043 // at most 50% of the free elements are deleted elements.
14044 if (nod <= (capacity - nof) >> 1) {
14045 int needed_free = nof >> 1;
14046 if (nof + needed_free <= capacity) return table;
14049 const int kMinCapacityForPretenure = 256;
14050 bool should_pretenure = pretenure == TENURED ||
14051 ((capacity > kMinCapacityForPretenure) &&
14052 !isolate->heap()->InNewSpace(*table));
14053 Handle<Derived> new_table = HashTable::New(
14056 USE_DEFAULT_MINIMUM_CAPACITY,
14057 should_pretenure ? TENURED : NOT_TENURED);
14059 table->Rehash(new_table, key);
14064 template<typename Derived, typename Shape, typename Key>
14065 Handle<Derived> HashTable<Derived, Shape, Key>::Shrink(Handle<Derived> table,
14067 int capacity = table->Capacity();
14068 int nof = table->NumberOfElements();
14070 // Shrink to fit the number of elements if only a quarter of the
14071 // capacity is filled with elements.
14072 if (nof > (capacity >> 2)) return table;
14073 // Allocate a new dictionary with room for at least the current
14074 // number of elements. The allocation method will make sure that
14075 // there is extra room in the dictionary for additions. Don't go
14076 // lower than room for 16 elements.
14077 int at_least_room_for = nof;
14078 if (at_least_room_for < 16) return table;
14080 Isolate* isolate = table->GetIsolate();
14081 const int kMinCapacityForPretenure = 256;
14083 (at_least_room_for > kMinCapacityForPretenure) &&
14084 !isolate->heap()->InNewSpace(*table);
14085 Handle<Derived> new_table = HashTable::New(
14088 USE_DEFAULT_MINIMUM_CAPACITY,
14089 pretenure ? TENURED : NOT_TENURED);
14091 table->Rehash(new_table, key);
14096 template<typename Derived, typename Shape, typename Key>
14097 uint32_t HashTable<Derived, Shape, Key>::FindInsertionEntry(uint32_t hash) {
14098 uint32_t capacity = Capacity();
14099 uint32_t entry = FirstProbe(hash, capacity);
14100 uint32_t count = 1;
14101 // EnsureCapacity will guarantee the hash table is never full.
14103 Object* element = KeyAt(entry);
14104 if (element->IsUndefined() || element->IsTheHole()) break;
14105 entry = NextProbe(entry, count++, capacity);
14111 // Force instantiation of template instances class.
14112 // Please note this list is compiler dependent.
14114 template class HashTable<StringTable, StringTableShape, HashTableKey*>;
14116 template class HashTable<CompilationCacheTable,
14117 CompilationCacheShape,
14120 template class HashTable<MapCache, MapCacheShape, HashTableKey*>;
14122 template class HashTable<ObjectHashTable,
14123 ObjectHashTableShape,
14126 template class HashTable<WeakHashTable, WeakHashTableShape<2>, Handle<Object> >;
14128 template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >;
14130 template class Dictionary<SeededNumberDictionary,
14131 SeededNumberDictionaryShape,
14134 template class Dictionary<UnseededNumberDictionary,
14135 UnseededNumberDictionaryShape,
14138 template Handle<SeededNumberDictionary>
14139 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14140 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14142 template Handle<UnseededNumberDictionary>
14143 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14144 New(Isolate*, int at_least_space_for, PretenureFlag pretenure);
14146 template Handle<NameDictionary>
14147 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14148 New(Isolate*, int n, PretenureFlag pretenure);
14150 template Handle<SeededNumberDictionary>
14151 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14152 AtPut(Handle<SeededNumberDictionary>, uint32_t, Handle<Object>);
14154 template Handle<UnseededNumberDictionary>
14155 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14156 AtPut(Handle<UnseededNumberDictionary>, uint32_t, Handle<Object>);
14159 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14160 SlowReverseLookup(Object* value);
14163 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14164 SlowReverseLookup(Object* value);
14167 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14170 PropertyAttributes,
14171 Dictionary<SeededNumberDictionary,
14172 SeededNumberDictionaryShape,
14173 uint32_t>::SortMode);
14175 template Handle<Object>
14176 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::DeleteProperty(
14177 Handle<NameDictionary>, int, JSObject::DeleteMode);
14179 template Handle<Object>
14180 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14181 DeleteProperty(Handle<SeededNumberDictionary>, int, JSObject::DeleteMode);
14183 template Handle<NameDictionary>
14184 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
14185 New(Isolate*, int, MinimumCapacity, PretenureFlag);
14187 template Handle<NameDictionary>
14188 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
14189 Shrink(Handle<NameDictionary>, Handle<Name>);
14191 template Handle<SeededNumberDictionary>
14192 HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14193 Shrink(Handle<SeededNumberDictionary>, uint32_t);
14195 template void Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14199 PropertyAttributes,
14201 NameDictionary, NameDictionaryShape, Handle<Name> >::SortMode);
14204 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14205 NumberOfElementsFilterAttributes(PropertyAttributes);
14207 template Handle<NameDictionary>
14208 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::Add(
14209 Handle<NameDictionary>, Handle<Name>, Handle<Object>, PropertyDetails);
14211 template Handle<FixedArray> Dictionary<
14212 NameDictionary, NameDictionaryShape,
14213 Handle<Name> >::BuildIterationIndicesArray(Handle<NameDictionary>);
14215 template Handle<FixedArray> Dictionary<
14216 NameDictionary, NameDictionaryShape,
14217 Handle<Name> >::GenerateNewEnumerationIndices(Handle<NameDictionary>);
14220 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14221 NumberOfElementsFilterAttributes(PropertyAttributes);
14223 template Handle<SeededNumberDictionary>
14224 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14225 Add(Handle<SeededNumberDictionary>,
14230 template Handle<UnseededNumberDictionary>
14231 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14232 Add(Handle<UnseededNumberDictionary>,
14237 template Handle<SeededNumberDictionary>
14238 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14239 EnsureCapacity(Handle<SeededNumberDictionary>, int, uint32_t);
14241 template Handle<UnseededNumberDictionary>
14242 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>::
14243 EnsureCapacity(Handle<UnseededNumberDictionary>, int, uint32_t);
14245 template Handle<NameDictionary>
14246 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14247 EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>);
14250 int Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
14251 NumberOfEnumElements();
14254 int Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
14255 NumberOfEnumElements();
14257 template bool Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape,
14258 uint32_t>::HasComplexElements();
14260 template int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape,
14261 uint32_t>::FindEntry(uint32_t);
14264 Handle<Object> JSObject::PrepareSlowElementsForSort(
14265 Handle<JSObject> object, uint32_t limit) {
14266 DCHECK(object->HasDictionaryElements());
14267 Isolate* isolate = object->GetIsolate();
14268 // Must stay in dictionary mode, either because of requires_slow_elements,
14269 // or because we are not going to sort (and therefore compact) all of the
14271 Handle<SeededNumberDictionary> dict(object->element_dictionary(), isolate);
14272 Handle<SeededNumberDictionary> new_dict =
14273 SeededNumberDictionary::New(isolate, dict->NumberOfElements());
14276 uint32_t undefs = 0;
14277 int capacity = dict->Capacity();
14278 Handle<Smi> bailout(Smi::FromInt(-1), isolate);
14279 // Entry to the new dictionary does not cause it to grow, as we have
14280 // allocated one that is large enough for all entries.
14281 DisallowHeapAllocation no_gc;
14282 for (int i = 0; i < capacity; i++) {
14283 Object* k = dict->KeyAt(i);
14284 if (!dict->IsKey(k)) continue;
14286 DCHECK(k->IsNumber());
14287 DCHECK(!k->IsSmi() || Smi::cast(k)->value() >= 0);
14288 DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0);
14289 DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32);
14291 HandleScope scope(isolate);
14292 Handle<Object> value(dict->ValueAt(i), isolate);
14293 PropertyDetails details = dict->DetailsAt(i);
14294 if (details.type() == CALLBACKS || details.IsReadOnly()) {
14295 // Bail out and do the sorting of undefineds and array holes in JS.
14296 // Also bail out if the element is not supposed to be moved.
14300 uint32_t key = NumberToUint32(k);
14302 if (value->IsUndefined()) {
14304 } else if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14305 // Adding an entry with the key beyond smi-range requires
14306 // allocation. Bailout.
14309 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14310 new_dict, pos, value, details);
14311 DCHECK(result.is_identical_to(new_dict));
14315 } else if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
14316 // Adding an entry with the key beyond smi-range requires
14317 // allocation. Bailout.
14320 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14321 new_dict, key, value, details);
14322 DCHECK(result.is_identical_to(new_dict));
14327 uint32_t result = pos;
14328 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
14329 while (undefs > 0) {
14330 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14331 // Adding an entry with the key beyond smi-range requires
14332 // allocation. Bailout.
14335 HandleScope scope(isolate);
14336 Handle<Object> result = SeededNumberDictionary::AddNumberEntry(
14337 new_dict, pos, isolate->factory()->undefined_value(), no_details);
14338 DCHECK(result.is_identical_to(new_dict));
14344 object->set_elements(*new_dict);
14346 AllowHeapAllocation allocate_return_value;
14347 return isolate->factory()->NewNumberFromUint(result);
14351 // Collects all defined (non-hole) and non-undefined (array) elements at
14352 // the start of the elements array.
14353 // If the object is in dictionary mode, it is converted to fast elements
14355 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object,
14357 Isolate* isolate = object->GetIsolate();
14358 if (object->HasSloppyArgumentsElements() ||
14359 object->map()->is_observed()) {
14360 return handle(Smi::FromInt(-1), isolate);
14363 if (object->HasDictionaryElements()) {
14364 // Convert to fast elements containing only the existing properties.
14365 // Ordering is irrelevant, since we are going to sort anyway.
14366 Handle<SeededNumberDictionary> dict(object->element_dictionary());
14367 if (object->IsJSArray() || dict->requires_slow_elements() ||
14368 dict->max_number_key() >= limit) {
14369 return JSObject::PrepareSlowElementsForSort(object, limit);
14371 // Convert to fast elements.
14373 Handle<Map> new_map =
14374 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS);
14376 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ?
14377 NOT_TENURED: TENURED;
14378 Handle<FixedArray> fast_elements =
14379 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure);
14380 dict->CopyValuesTo(*fast_elements);
14381 JSObject::ValidateElements(object);
14383 JSObject::SetMapAndElements(object, new_map, fast_elements);
14384 } else if (object->HasExternalArrayElements() ||
14385 object->HasFixedTypedArrayElements()) {
14386 // Typed arrays cannot have holes or undefined elements.
14387 return handle(Smi::FromInt(
14388 FixedArrayBase::cast(object->elements())->length()), isolate);
14389 } else if (!object->HasFastDoubleElements()) {
14390 EnsureWritableFastElements(object);
14392 DCHECK(object->HasFastSmiOrObjectElements() ||
14393 object->HasFastDoubleElements());
14395 // Collect holes at the end, undefined before that and the rest at the
14396 // start, and return the number of non-hole, non-undefined values.
14398 Handle<FixedArrayBase> elements_base(object->elements());
14399 uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
14400 if (limit > elements_length) {
14401 limit = elements_length ;
14404 return handle(Smi::FromInt(0), isolate);
14407 uint32_t result = 0;
14408 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) {
14409 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base);
14410 // Split elements into defined and the_hole, in that order.
14411 unsigned int holes = limit;
14412 // Assume most arrays contain no holes and undefined values, so minimize the
14413 // number of stores of non-undefined, non-the-hole values.
14414 for (unsigned int i = 0; i < holes; i++) {
14415 if (elements->is_the_hole(i)) {
14420 // Position i needs to be filled.
14421 while (holes > i) {
14422 if (elements->is_the_hole(holes)) {
14425 elements->set(i, elements->get_scalar(holes));
14431 while (holes < limit) {
14432 elements->set_the_hole(holes);
14436 FixedArray* elements = FixedArray::cast(*elements_base);
14437 DisallowHeapAllocation no_gc;
14439 // Split elements into defined, undefined and the_hole, in that order. Only
14440 // count locations for undefined and the hole, and fill them afterwards.
14441 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
14442 unsigned int undefs = limit;
14443 unsigned int holes = limit;
14444 // Assume most arrays contain no holes and undefined values, so minimize the
14445 // number of stores of non-undefined, non-the-hole values.
14446 for (unsigned int i = 0; i < undefs; i++) {
14447 Object* current = elements->get(i);
14448 if (current->IsTheHole()) {
14451 } else if (current->IsUndefined()) {
14456 // Position i needs to be filled.
14457 while (undefs > i) {
14458 current = elements->get(undefs);
14459 if (current->IsTheHole()) {
14462 } else if (current->IsUndefined()) {
14465 elements->set(i, current, write_barrier);
14471 while (undefs < holes) {
14472 elements->set_undefined(undefs);
14475 while (holes < limit) {
14476 elements->set_the_hole(holes);
14481 return isolate->factory()->NewNumberFromUint(result);
14485 ExternalArrayType JSTypedArray::type() {
14486 switch (elements()->map()->instance_type()) {
14487 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \
14488 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14489 case FIXED_##TYPE##_ARRAY_TYPE: \
14490 return kExternal##Type##Array;
14492 TYPED_ARRAYS(INSTANCE_TYPE_TO_ARRAY_TYPE)
14493 #undef INSTANCE_TYPE_TO_ARRAY_TYPE
14497 return static_cast<ExternalArrayType>(-1);
14502 size_t JSTypedArray::element_size() {
14503 switch (elements()->map()->instance_type()) {
14504 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \
14505 case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14508 TYPED_ARRAYS(INSTANCE_TYPE_TO_ELEMENT_SIZE)
14509 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE
14518 Handle<Object> ExternalUint8ClampedArray::SetValue(
14519 Handle<ExternalUint8ClampedArray> array,
14521 Handle<Object> value) {
14522 uint8_t clamped_value = 0;
14523 if (index < static_cast<uint32_t>(array->length())) {
14524 if (value->IsSmi()) {
14525 int int_value = Handle<Smi>::cast(value)->value();
14526 if (int_value < 0) {
14528 } else if (int_value > 255) {
14529 clamped_value = 255;
14531 clamped_value = static_cast<uint8_t>(int_value);
14533 } else if (value->IsHeapNumber()) {
14534 double double_value = Handle<HeapNumber>::cast(value)->value();
14535 if (!(double_value > 0)) {
14536 // NaN and less than zero clamp to zero.
14538 } else if (double_value > 255) {
14539 // Greater than 255 clamp to 255.
14540 clamped_value = 255;
14542 // Other doubles are rounded to the nearest integer.
14543 clamped_value = static_cast<uint8_t>(lrint(double_value));
14546 // Clamp undefined to zero (default). All other types have been
14547 // converted to a number type further up in the call chain.
14548 DCHECK(value->IsUndefined());
14550 array->set(index, clamped_value);
14552 return handle(Smi::FromInt(clamped_value), array->GetIsolate());
14556 template<typename ExternalArrayClass, typename ValueType>
14557 static Handle<Object> ExternalArrayIntSetter(
14559 Handle<ExternalArrayClass> receiver,
14561 Handle<Object> value) {
14562 ValueType cast_value = 0;
14563 if (index < static_cast<uint32_t>(receiver->length())) {
14564 if (value->IsSmi()) {
14565 int int_value = Handle<Smi>::cast(value)->value();
14566 cast_value = static_cast<ValueType>(int_value);
14567 } else if (value->IsHeapNumber()) {
14568 double double_value = Handle<HeapNumber>::cast(value)->value();
14569 cast_value = static_cast<ValueType>(DoubleToInt32(double_value));
14571 // Clamp undefined to zero (default). All other types have been
14572 // converted to a number type further up in the call chain.
14573 DCHECK(value->IsUndefined());
14575 receiver->set(index, cast_value);
14577 return isolate->factory()->NewNumberFromInt(cast_value);
14581 Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array,
14583 Handle<Object> value) {
14584 return ExternalArrayIntSetter<ExternalInt8Array, int8_t>(
14585 array->GetIsolate(), array, index, value);
14589 Handle<Object> ExternalUint8Array::SetValue(Handle<ExternalUint8Array> array,
14591 Handle<Object> value) {
14592 return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>(
14593 array->GetIsolate(), array, index, value);
14597 Handle<Object> ExternalInt16Array::SetValue(Handle<ExternalInt16Array> array,
14599 Handle<Object> value) {
14600 return ExternalArrayIntSetter<ExternalInt16Array, int16_t>(
14601 array->GetIsolate(), array, index, value);
14605 Handle<Object> ExternalUint16Array::SetValue(Handle<ExternalUint16Array> array,
14607 Handle<Object> value) {
14608 return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>(
14609 array->GetIsolate(), array, index, value);
14613 Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array,
14615 Handle<Object> value) {
14616 return ExternalArrayIntSetter<ExternalInt32Array, int32_t>(
14617 array->GetIsolate(), array, index, value);
14621 Handle<Object> ExternalUint32Array::SetValue(
14622 Handle<ExternalUint32Array> array,
14624 Handle<Object> value) {
14625 uint32_t cast_value = 0;
14626 if (index < static_cast<uint32_t>(array->length())) {
14627 if (value->IsSmi()) {
14628 int int_value = Handle<Smi>::cast(value)->value();
14629 cast_value = static_cast<uint32_t>(int_value);
14630 } else if (value->IsHeapNumber()) {
14631 double double_value = Handle<HeapNumber>::cast(value)->value();
14632 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value));
14634 // Clamp undefined to zero (default). All other types have been
14635 // converted to a number type further up in the call chain.
14636 DCHECK(value->IsUndefined());
14638 array->set(index, cast_value);
14640 return array->GetIsolate()->factory()->NewNumberFromUint(cast_value);
14644 Handle<Object> ExternalFloat32Array::SetValue(
14645 Handle<ExternalFloat32Array> array,
14647 Handle<Object> value) {
14648 float cast_value = static_cast<float>(base::OS::nan_value());
14649 if (index < static_cast<uint32_t>(array->length())) {
14650 if (value->IsSmi()) {
14651 int int_value = Handle<Smi>::cast(value)->value();
14652 cast_value = static_cast<float>(int_value);
14653 } else if (value->IsHeapNumber()) {
14654 double double_value = Handle<HeapNumber>::cast(value)->value();
14655 cast_value = static_cast<float>(double_value);
14657 // Clamp undefined to NaN (default). All other types have been
14658 // converted to a number type further up in the call chain.
14659 DCHECK(value->IsUndefined());
14661 array->set(index, cast_value);
14663 return array->GetIsolate()->factory()->NewNumber(cast_value);
14667 Handle<Object> ExternalFloat64Array::SetValue(
14668 Handle<ExternalFloat64Array> array,
14670 Handle<Object> value) {
14671 double double_value = base::OS::nan_value();
14672 if (index < static_cast<uint32_t>(array->length())) {
14673 if (value->IsNumber()) {
14674 double_value = value->Number();
14676 // Clamp undefined to NaN (default). All other types have been
14677 // converted to a number type further up in the call chain.
14678 DCHECK(value->IsUndefined());
14680 array->set(index, double_value);
14682 return array->GetIsolate()->factory()->NewNumber(double_value);
14686 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell(
14687 Handle<JSGlobalObject> global,
14688 Handle<Name> name) {
14689 DCHECK(!global->HasFastProperties());
14690 int entry = global->property_dictionary()->FindEntry(name);
14691 if (entry == NameDictionary::kNotFound) {
14692 Isolate* isolate = global->GetIsolate();
14693 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(
14694 isolate->factory()->the_hole_value());
14695 PropertyDetails details(NONE, NORMAL, 0);
14696 details = details.AsDeleted();
14697 Handle<NameDictionary> dictionary = NameDictionary::Add(
14698 handle(global->property_dictionary()), name, cell, details);
14699 global->set_properties(*dictionary);
14702 Object* value = global->property_dictionary()->ValueAt(entry);
14703 DCHECK(value->IsPropertyCell());
14704 return handle(PropertyCell::cast(value));
14709 // This class is used for looking up two character strings in the string table.
14710 // If we don't have a hit we don't want to waste much time so we unroll the
14711 // string hash calculation loop here for speed. Doesn't work if the two
14712 // characters form a decimal integer, since such strings have a different hash
14714 class TwoCharHashTableKey : public HashTableKey {
14716 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
14717 : c1_(c1), c2_(c2) {
14719 uint32_t hash = seed;
14721 hash += hash << 10;
14725 hash += hash << 10;
14729 hash ^= hash >> 11;
14730 hash += hash << 15;
14731 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
14734 // If this assert fails then we failed to reproduce the two-character
14735 // version of the string hashing algorithm above. One reason could be
14736 // that we were passed two digits as characters, since the hash
14737 // algorithm is different in that case.
14738 uint16_t chars[2] = {c1, c2};
14739 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed);
14740 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask;
14741 DCHECK_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash));
14745 bool IsMatch(Object* o) OVERRIDE {
14746 if (!o->IsString()) return false;
14747 String* other = String::cast(o);
14748 if (other->length() != 2) return false;
14749 if (other->Get(0) != c1_) return false;
14750 return other->Get(1) == c2_;
14753 uint32_t Hash() OVERRIDE { return hash_; }
14754 uint32_t HashForObject(Object* key) OVERRIDE {
14755 if (!key->IsString()) return 0;
14756 return String::cast(key)->Hash();
14759 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
14760 // The TwoCharHashTableKey is only used for looking in the string
14761 // table, not for adding to it.
14763 return MaybeHandle<Object>().ToHandleChecked();
14773 MaybeHandle<String> StringTable::InternalizeStringIfExists(
14775 Handle<String> string) {
14776 if (string->IsInternalizedString()) {
14779 return LookupStringIfExists(isolate, string);
14783 MaybeHandle<String> StringTable::LookupStringIfExists(
14785 Handle<String> string) {
14786 Handle<StringTable> string_table = isolate->factory()->string_table();
14787 InternalizedStringKey key(string);
14788 int entry = string_table->FindEntry(&key);
14789 if (entry == kNotFound) {
14790 return MaybeHandle<String>();
14792 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
14793 DCHECK(StringShape(*result).IsInternalized());
14799 MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists(
14803 Handle<StringTable> string_table = isolate->factory()->string_table();
14804 TwoCharHashTableKey key(c1, c2, isolate->heap()->HashSeed());
14805 int entry = string_table->FindEntry(&key);
14806 if (entry == kNotFound) {
14807 return MaybeHandle<String>();
14809 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate);
14810 DCHECK(StringShape(*result).IsInternalized());
14816 void StringTable::EnsureCapacityForDeserialization(Isolate* isolate,
14818 Handle<StringTable> table = isolate->factory()->string_table();
14819 // We need a key instance for the virtual hash function.
14820 InternalizedStringKey dummy_key(Handle<String>::null());
14821 table = StringTable::EnsureCapacity(table, expected, &dummy_key);
14822 isolate->factory()->set_string_table(table);
14826 Handle<String> StringTable::LookupString(Isolate* isolate,
14827 Handle<String> string) {
14828 InternalizedStringKey key(string);
14829 return LookupKey(isolate, &key);
14833 Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) {
14834 Handle<StringTable> table = isolate->factory()->string_table();
14835 int entry = table->FindEntry(key);
14837 // String already in table.
14838 if (entry != kNotFound) {
14839 return handle(String::cast(table->KeyAt(entry)), isolate);
14842 // Adding new string. Grow table if needed.
14843 table = StringTable::EnsureCapacity(table, 1, key);
14845 // Create string object.
14846 Handle<Object> string = key->AsHandle(isolate);
14847 // There must be no attempts to internalize strings that could throw
14848 // InvalidStringLength error.
14849 CHECK(!string.is_null());
14851 // Add the new string and return it along with the string table.
14852 entry = table->FindInsertionEntry(key->Hash());
14853 table->set(EntryToIndex(entry), *string);
14854 table->ElementAdded();
14856 isolate->factory()->set_string_table(table);
14857 return Handle<String>::cast(string);
14861 Handle<Object> CompilationCacheTable::Lookup(Handle<String> src,
14862 Handle<Context> context) {
14863 Isolate* isolate = GetIsolate();
14864 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14865 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
14866 RelocInfo::kNoPosition);
14867 int entry = FindEntry(&key);
14868 if (entry == kNotFound) return isolate->factory()->undefined_value();
14869 int index = EntryToIndex(entry);
14870 if (!get(index)->IsFixedArray()) return isolate->factory()->undefined_value();
14871 return Handle<Object>(get(index + 1), isolate);
14875 Handle<Object> CompilationCacheTable::LookupEval(
14876 Handle<String> src, Handle<SharedFunctionInfo> outer_info,
14877 StrictMode strict_mode, int scope_position) {
14878 Isolate* isolate = GetIsolate();
14879 // Cache key is the tuple (source, outer shared function info, scope position)
14880 // to unambiguously identify the context chain the cached eval code assumes.
14881 StringSharedKey key(src, outer_info, strict_mode, scope_position);
14882 int entry = FindEntry(&key);
14883 if (entry == kNotFound) return isolate->factory()->undefined_value();
14884 int index = EntryToIndex(entry);
14885 if (!get(index)->IsFixedArray()) return isolate->factory()->undefined_value();
14886 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
14890 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src,
14891 JSRegExp::Flags flags) {
14892 Isolate* isolate = GetIsolate();
14893 DisallowHeapAllocation no_allocation;
14894 RegExpKey key(src, flags);
14895 int entry = FindEntry(&key);
14896 if (entry == kNotFound) return isolate->factory()->undefined_value();
14897 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
14901 Handle<CompilationCacheTable> CompilationCacheTable::Put(
14902 Handle<CompilationCacheTable> cache, Handle<String> src,
14903 Handle<Context> context, Handle<Object> value) {
14904 Isolate* isolate = cache->GetIsolate();
14905 Handle<SharedFunctionInfo> shared(context->closure()->shared());
14906 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY,
14907 RelocInfo::kNoPosition);
14908 int entry = cache->FindEntry(&key);
14909 if (entry != kNotFound) {
14910 Handle<Object> k = key.AsHandle(isolate);
14911 cache->set(EntryToIndex(entry), *k);
14912 cache->set(EntryToIndex(entry) + 1, *value);
14916 cache = EnsureCapacity(cache, 1, &key);
14917 entry = cache->FindInsertionEntry(key.Hash());
14919 isolate->factory()->NewNumber(static_cast<double>(key.Hash()));
14920 cache->set(EntryToIndex(entry), *k);
14921 cache->set(EntryToIndex(entry) + 1, Smi::FromInt(kHashGenerations));
14922 cache->ElementAdded();
14927 Handle<CompilationCacheTable> CompilationCacheTable::PutEval(
14928 Handle<CompilationCacheTable> cache, Handle<String> src,
14929 Handle<SharedFunctionInfo> outer_info, Handle<SharedFunctionInfo> value,
14930 int scope_position) {
14931 Isolate* isolate = cache->GetIsolate();
14932 StringSharedKey key(src, outer_info, value->strict_mode(), scope_position);
14933 int entry = cache->FindEntry(&key);
14934 if (entry != kNotFound) {
14935 Handle<Object> k = key.AsHandle(isolate);
14936 cache->set(EntryToIndex(entry), *k);
14937 cache->set(EntryToIndex(entry) + 1, *value);
14941 cache = EnsureCapacity(cache, 1, &key);
14942 entry = cache->FindInsertionEntry(key.Hash());
14944 isolate->factory()->NewNumber(static_cast<double>(key.Hash()));
14945 cache->set(EntryToIndex(entry), *k);
14946 cache->set(EntryToIndex(entry) + 1, Smi::FromInt(kHashGenerations));
14947 cache->ElementAdded();
14952 Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp(
14953 Handle<CompilationCacheTable> cache, Handle<String> src,
14954 JSRegExp::Flags flags, Handle<FixedArray> value) {
14955 RegExpKey key(src, flags);
14956 cache = EnsureCapacity(cache, 1, &key);
14957 int entry = cache->FindInsertionEntry(key.Hash());
14958 // We store the value in the key slot, and compare the search key
14959 // to the stored value with a custon IsMatch function during lookups.
14960 cache->set(EntryToIndex(entry), *value);
14961 cache->set(EntryToIndex(entry) + 1, *value);
14962 cache->ElementAdded();
14967 void CompilationCacheTable::Age() {
14968 DisallowHeapAllocation no_allocation;
14969 Object* the_hole_value = GetHeap()->the_hole_value();
14970 for (int entry = 0, size = Capacity(); entry < size; entry++) {
14971 int entry_index = EntryToIndex(entry);
14972 int value_index = entry_index + 1;
14974 if (get(entry_index)->IsNumber()) {
14975 Smi* count = Smi::cast(get(value_index));
14976 count = Smi::FromInt(count->value() - 1);
14977 if (count->value() == 0) {
14978 NoWriteBarrierSet(this, entry_index, the_hole_value);
14979 NoWriteBarrierSet(this, value_index, the_hole_value);
14982 NoWriteBarrierSet(this, value_index, count);
14984 } else if (get(entry_index)->IsFixedArray()) {
14985 SharedFunctionInfo* info = SharedFunctionInfo::cast(get(value_index));
14986 if (info->code()->kind() != Code::FUNCTION || info->code()->IsOld()) {
14987 NoWriteBarrierSet(this, entry_index, the_hole_value);
14988 NoWriteBarrierSet(this, value_index, the_hole_value);
14996 void CompilationCacheTable::Remove(Object* value) {
14997 DisallowHeapAllocation no_allocation;
14998 Object* the_hole_value = GetHeap()->the_hole_value();
14999 for (int entry = 0, size = Capacity(); entry < size; entry++) {
15000 int entry_index = EntryToIndex(entry);
15001 int value_index = entry_index + 1;
15002 if (get(value_index) == value) {
15003 NoWriteBarrierSet(this, entry_index, the_hole_value);
15004 NoWriteBarrierSet(this, value_index, the_hole_value);
15012 // StringsKey used for HashTable where key is array of internalized strings.
15013 class StringsKey : public HashTableKey {
15015 explicit StringsKey(Handle<FixedArray> strings) : strings_(strings) { }
15017 bool IsMatch(Object* strings) OVERRIDE {
15018 FixedArray* o = FixedArray::cast(strings);
15019 int len = strings_->length();
15020 if (o->length() != len) return false;
15021 for (int i = 0; i < len; i++) {
15022 if (o->get(i) != strings_->get(i)) return false;
15027 uint32_t Hash() OVERRIDE { return HashForObject(*strings_); }
15029 uint32_t HashForObject(Object* obj) OVERRIDE {
15030 FixedArray* strings = FixedArray::cast(obj);
15031 int len = strings->length();
15033 for (int i = 0; i < len; i++) {
15034 hash ^= String::cast(strings->get(i))->Hash();
15039 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { return strings_; }
15042 Handle<FixedArray> strings_;
15046 Object* MapCache::Lookup(FixedArray* array) {
15047 DisallowHeapAllocation no_alloc;
15048 StringsKey key(handle(array));
15049 int entry = FindEntry(&key);
15050 if (entry == kNotFound) return GetHeap()->undefined_value();
15051 return get(EntryToIndex(entry) + 1);
15055 Handle<MapCache> MapCache::Put(
15056 Handle<MapCache> map_cache, Handle<FixedArray> array, Handle<Map> value) {
15057 StringsKey key(array);
15059 Handle<MapCache> new_cache = EnsureCapacity(map_cache, 1, &key);
15060 int entry = new_cache->FindInsertionEntry(key.Hash());
15061 new_cache->set(EntryToIndex(entry), *array);
15062 new_cache->set(EntryToIndex(entry) + 1, *value);
15063 new_cache->ElementAdded();
15068 template<typename Derived, typename Shape, typename Key>
15069 Handle<Derived> Dictionary<Derived, Shape, Key>::New(
15071 int at_least_space_for,
15072 PretenureFlag pretenure) {
15073 DCHECK(0 <= at_least_space_for);
15074 Handle<Derived> dict = DerivedHashTable::New(isolate,
15075 at_least_space_for,
15076 USE_DEFAULT_MINIMUM_CAPACITY,
15079 // Initialize the next enumeration index.
15080 dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex);
15085 template <typename Derived, typename Shape, typename Key>
15086 Handle<FixedArray> Dictionary<Derived, Shape, Key>::BuildIterationIndicesArray(
15087 Handle<Derived> dictionary) {
15088 Factory* factory = dictionary->GetIsolate()->factory();
15089 int length = dictionary->NumberOfElements();
15091 Handle<FixedArray> iteration_order = factory->NewFixedArray(length);
15092 Handle<FixedArray> enumeration_order = factory->NewFixedArray(length);
15094 // Fill both the iteration order array and the enumeration order array
15095 // with property details.
15096 int capacity = dictionary->Capacity();
15098 for (int i = 0; i < capacity; i++) {
15099 if (dictionary->IsKey(dictionary->KeyAt(i))) {
15100 int index = dictionary->DetailsAt(i).dictionary_index();
15101 iteration_order->set(pos, Smi::FromInt(i));
15102 enumeration_order->set(pos, Smi::FromInt(index));
15106 DCHECK(pos == length);
15108 // Sort the arrays wrt. enumeration order.
15109 iteration_order->SortPairs(*enumeration_order, enumeration_order->length());
15110 return iteration_order;
15114 template <typename Derived, typename Shape, typename Key>
15116 Dictionary<Derived, Shape, Key>::GenerateNewEnumerationIndices(
15117 Handle<Derived> dictionary) {
15118 int length = dictionary->NumberOfElements();
15120 Handle<FixedArray> iteration_order = BuildIterationIndicesArray(dictionary);
15121 DCHECK(iteration_order->length() == length);
15123 // Iterate over the dictionary using the enumeration order and update
15124 // the dictionary with new enumeration indices.
15125 for (int i = 0; i < length; i++) {
15126 int index = Smi::cast(iteration_order->get(i))->value();
15127 DCHECK(dictionary->IsKey(dictionary->KeyAt(index)));
15129 int enum_index = PropertyDetails::kInitialIndex + i;
15131 PropertyDetails details = dictionary->DetailsAt(index);
15132 PropertyDetails new_details =
15133 PropertyDetails(details.attributes(), details.type(), enum_index);
15134 dictionary->DetailsAtPut(index, new_details);
15137 // Set the next enumeration index.
15138 dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length);
15139 return iteration_order;
15143 template<typename Derived, typename Shape, typename Key>
15144 Handle<Derived> Dictionary<Derived, Shape, Key>::EnsureCapacity(
15145 Handle<Derived> dictionary, int n, Key key) {
15146 // Check whether there are enough enumeration indices to add n elements.
15147 if (Shape::kIsEnumerable &&
15148 !PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) {
15149 // If not, we generate new indices for the properties.
15150 GenerateNewEnumerationIndices(dictionary);
15152 return DerivedHashTable::EnsureCapacity(dictionary, n, key);
15156 template<typename Derived, typename Shape, typename Key>
15157 Handle<Object> Dictionary<Derived, Shape, Key>::DeleteProperty(
15158 Handle<Derived> dictionary,
15160 JSObject::DeleteMode mode) {
15161 Factory* factory = dictionary->GetIsolate()->factory();
15162 PropertyDetails details = dictionary->DetailsAt(entry);
15163 // Ignore attributes if forcing a deletion.
15164 if (!details.IsConfigurable() && mode != JSReceiver::FORCE_DELETION) {
15165 return factory->false_value();
15168 dictionary->SetEntry(
15169 entry, factory->the_hole_value(), factory->the_hole_value());
15170 dictionary->ElementRemoved();
15171 return factory->true_value();
15175 template<typename Derived, typename Shape, typename Key>
15176 Handle<Derived> Dictionary<Derived, Shape, Key>::AtPut(
15177 Handle<Derived> dictionary, Key key, Handle<Object> value) {
15178 int entry = dictionary->FindEntry(key);
15180 // If the entry is present set the value;
15181 if (entry != Dictionary::kNotFound) {
15182 dictionary->ValueAtPut(entry, *value);
15186 // Check whether the dictionary should be extended.
15187 dictionary = EnsureCapacity(dictionary, 1, key);
15189 USE(Shape::AsHandle(dictionary->GetIsolate(), key));
15191 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
15193 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
15198 template<typename Derived, typename Shape, typename Key>
15199 Handle<Derived> Dictionary<Derived, Shape, Key>::Add(
15200 Handle<Derived> dictionary,
15202 Handle<Object> value,
15203 PropertyDetails details) {
15204 // Valdate key is absent.
15205 SLOW_DCHECK((dictionary->FindEntry(key) == Dictionary::kNotFound));
15206 // Check whether the dictionary should be extended.
15207 dictionary = EnsureCapacity(dictionary, 1, key);
15209 AddEntry(dictionary, key, value, details, dictionary->Hash(key));
15214 // Add a key, value pair to the dictionary.
15215 template<typename Derived, typename Shape, typename Key>
15216 void Dictionary<Derived, Shape, Key>::AddEntry(
15217 Handle<Derived> dictionary,
15219 Handle<Object> value,
15220 PropertyDetails details,
15222 // Compute the key object.
15223 Handle<Object> k = Shape::AsHandle(dictionary->GetIsolate(), key);
15225 uint32_t entry = dictionary->FindInsertionEntry(hash);
15226 // Insert element at empty or deleted entry
15227 if (!details.IsDeleted() &&
15228 details.dictionary_index() == 0 &&
15229 Shape::kIsEnumerable) {
15230 // Assign an enumeration index to the property and update
15231 // SetNextEnumerationIndex.
15232 int index = dictionary->NextEnumerationIndex();
15233 details = PropertyDetails(details.attributes(), details.type(), index);
15234 dictionary->SetNextEnumerationIndex(index + 1);
15236 dictionary->SetEntry(entry, k, value, details);
15237 DCHECK((dictionary->KeyAt(entry)->IsNumber() ||
15238 dictionary->KeyAt(entry)->IsName()));
15239 dictionary->ElementAdded();
15243 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) {
15244 DisallowHeapAllocation no_allocation;
15245 // If the dictionary requires slow elements an element has already
15246 // been added at a high index.
15247 if (requires_slow_elements()) return;
15248 // Check if this index is high enough that we should require slow
15250 if (key > kRequiresSlowElementsLimit) {
15251 set_requires_slow_elements();
15254 // Update max key value.
15255 Object* max_index_object = get(kMaxNumberKeyIndex);
15256 if (!max_index_object->IsSmi() || max_number_key() < key) {
15257 FixedArray::set(kMaxNumberKeyIndex,
15258 Smi::FromInt(key << kRequiresSlowElementsTagSize));
15263 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry(
15264 Handle<SeededNumberDictionary> dictionary,
15266 Handle<Object> value,
15267 PropertyDetails details) {
15268 dictionary->UpdateMaxNumberKey(key);
15269 SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound);
15270 return Add(dictionary, key, value, details);
15274 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry(
15275 Handle<UnseededNumberDictionary> dictionary,
15277 Handle<Object> value) {
15278 SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound);
15279 return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0));
15283 Handle<SeededNumberDictionary> SeededNumberDictionary::AtNumberPut(
15284 Handle<SeededNumberDictionary> dictionary,
15286 Handle<Object> value) {
15287 dictionary->UpdateMaxNumberKey(key);
15288 return AtPut(dictionary, key, value);
15292 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AtNumberPut(
15293 Handle<UnseededNumberDictionary> dictionary,
15295 Handle<Object> value) {
15296 return AtPut(dictionary, key, value);
15300 Handle<SeededNumberDictionary> SeededNumberDictionary::Set(
15301 Handle<SeededNumberDictionary> dictionary,
15303 Handle<Object> value,
15304 PropertyDetails details) {
15305 int entry = dictionary->FindEntry(key);
15306 if (entry == kNotFound) {
15307 return AddNumberEntry(dictionary, key, value, details);
15309 // Preserve enumeration index.
15310 details = PropertyDetails(details.attributes(),
15312 dictionary->DetailsAt(entry).dictionary_index());
15313 Handle<Object> object_key =
15314 SeededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
15315 dictionary->SetEntry(entry, object_key, value, details);
15320 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set(
15321 Handle<UnseededNumberDictionary> dictionary,
15323 Handle<Object> value) {
15324 int entry = dictionary->FindEntry(key);
15325 if (entry == kNotFound) return AddNumberEntry(dictionary, key, value);
15326 Handle<Object> object_key =
15327 UnseededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key);
15328 dictionary->SetEntry(entry, object_key, value);
15334 template<typename Derived, typename Shape, typename Key>
15335 int Dictionary<Derived, Shape, Key>::NumberOfElementsFilterAttributes(
15336 PropertyAttributes filter) {
15337 int capacity = DerivedHashTable::Capacity();
15339 for (int i = 0; i < capacity; i++) {
15340 Object* k = DerivedHashTable::KeyAt(i);
15341 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15342 PropertyDetails details = DetailsAt(i);
15343 if (details.IsDeleted()) continue;
15344 PropertyAttributes attr = details.attributes();
15345 if ((attr & filter) == 0) result++;
15352 template<typename Derived, typename Shape, typename Key>
15353 int Dictionary<Derived, Shape, Key>::NumberOfEnumElements() {
15354 return NumberOfElementsFilterAttributes(
15355 static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC));
15359 template <typename Derived, typename Shape, typename Key>
15360 bool Dictionary<Derived, Shape, Key>::HasComplexElements() {
15361 int capacity = DerivedHashTable::Capacity();
15362 for (int i = 0; i < capacity; i++) {
15363 Object* k = DerivedHashTable::KeyAt(i);
15364 if (DerivedHashTable::IsKey(k) && !FilterKey(k, NONE)) {
15365 PropertyDetails details = DetailsAt(i);
15366 if (details.IsDeleted()) continue;
15367 if (details.type() == CALLBACKS) return true;
15368 PropertyAttributes attr = details.attributes();
15369 if (attr & (READ_ONLY | DONT_DELETE | DONT_ENUM)) return true;
15376 template <typename Derived, typename Shape, typename Key>
15377 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
15378 FixedArray* storage, PropertyAttributes filter,
15379 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
15380 DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter));
15381 int capacity = DerivedHashTable::Capacity();
15383 for (int i = 0; i < capacity; i++) {
15384 Object* k = DerivedHashTable::KeyAt(i);
15385 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15386 PropertyDetails details = DetailsAt(i);
15387 if (details.IsDeleted()) continue;
15388 PropertyAttributes attr = details.attributes();
15389 if ((attr & filter) == 0) storage->set(index++, k);
15392 if (sort_mode == Dictionary::SORTED) {
15393 storage->SortPairs(storage, index);
15395 DCHECK(storage->length() >= index);
15399 struct EnumIndexComparator {
15400 explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { }
15401 bool operator() (Smi* a, Smi* b) {
15402 PropertyDetails da(dict->DetailsAt(a->value()));
15403 PropertyDetails db(dict->DetailsAt(b->value()));
15404 return da.dictionary_index() < db.dictionary_index();
15406 NameDictionary* dict;
15410 void NameDictionary::CopyEnumKeysTo(FixedArray* storage) {
15411 int length = storage->length();
15412 int capacity = Capacity();
15413 int properties = 0;
15414 for (int i = 0; i < capacity; i++) {
15415 Object* k = KeyAt(i);
15416 if (IsKey(k) && !k->IsSymbol()) {
15417 PropertyDetails details = DetailsAt(i);
15418 if (details.IsDeleted() || details.IsDontEnum()) continue;
15419 storage->set(properties, Smi::FromInt(i));
15421 if (properties == length) break;
15424 CHECK_EQ(length, properties);
15425 EnumIndexComparator cmp(this);
15426 Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress());
15427 std::sort(start, start + length, cmp);
15428 for (int i = 0; i < length; i++) {
15429 int index = Smi::cast(storage->get(i))->value();
15430 storage->set(i, KeyAt(index));
15435 template<typename Derived, typename Shape, typename Key>
15436 void Dictionary<Derived, Shape, Key>::CopyKeysTo(
15437 FixedArray* storage,
15439 PropertyAttributes filter,
15440 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) {
15441 DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter));
15442 int capacity = DerivedHashTable::Capacity();
15443 for (int i = 0; i < capacity; i++) {
15444 Object* k = DerivedHashTable::KeyAt(i);
15445 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) {
15446 PropertyDetails details = DetailsAt(i);
15447 if (details.IsDeleted()) continue;
15448 PropertyAttributes attr = details.attributes();
15449 if ((attr & filter) == 0) storage->set(index++, k);
15452 if (sort_mode == Dictionary::SORTED) {
15453 storage->SortPairs(storage, index);
15455 DCHECK(storage->length() >= index);
15459 // Backwards lookup (slow).
15460 template<typename Derived, typename Shape, typename Key>
15461 Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) {
15462 int capacity = DerivedHashTable::Capacity();
15463 for (int i = 0; i < capacity; i++) {
15464 Object* k = DerivedHashTable::KeyAt(i);
15465 if (Dictionary::IsKey(k)) {
15466 Object* e = ValueAt(i);
15467 if (e->IsPropertyCell()) {
15468 e = PropertyCell::cast(e)->value();
15470 if (e == value) return k;
15473 Heap* heap = Dictionary::GetHeap();
15474 return heap->undefined_value();
15478 Object* ObjectHashTable::Lookup(Handle<Object> key) {
15479 DisallowHeapAllocation no_gc;
15480 DCHECK(IsKey(*key));
15482 // If the object does not have an identity hash, it was never used as a key.
15483 Object* hash = key->GetHash();
15484 if (hash->IsUndefined()) {
15485 return GetHeap()->the_hole_value();
15487 int entry = FindEntry(key);
15488 if (entry == kNotFound) return GetHeap()->the_hole_value();
15489 return get(EntryToIndex(entry) + 1);
15493 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table,
15494 Handle<Object> key,
15495 Handle<Object> value) {
15496 DCHECK(table->IsKey(*key));
15497 DCHECK(!value->IsTheHole());
15499 Isolate* isolate = table->GetIsolate();
15501 // Make sure the key object has an identity hash code.
15502 Handle<Smi> hash = Object::GetOrCreateHash(isolate, key);
15504 int entry = table->FindEntry(key);
15506 // Key is already in table, just overwrite value.
15507 if (entry != kNotFound) {
15508 table->set(EntryToIndex(entry) + 1, *value);
15512 // Check whether the hash table should be extended.
15513 table = EnsureCapacity(table, 1, key);
15514 table->AddEntry(table->FindInsertionEntry(hash->value()),
15521 Handle<ObjectHashTable> ObjectHashTable::Remove(Handle<ObjectHashTable> table,
15522 Handle<Object> key,
15523 bool* was_present) {
15524 DCHECK(table->IsKey(*key));
15526 Object* hash = key->GetHash();
15527 if (hash->IsUndefined()) {
15528 *was_present = false;
15532 int entry = table->FindEntry(key);
15533 if (entry == kNotFound) {
15534 *was_present = false;
15538 *was_present = true;
15539 table->RemoveEntry(entry);
15540 return Shrink(table, key);
15544 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
15545 set(EntryToIndex(entry), key);
15546 set(EntryToIndex(entry) + 1, value);
15551 void ObjectHashTable::RemoveEntry(int entry) {
15552 set_the_hole(EntryToIndex(entry));
15553 set_the_hole(EntryToIndex(entry) + 1);
15558 Object* WeakHashTable::Lookup(Handle<Object> key) {
15559 DisallowHeapAllocation no_gc;
15560 DCHECK(IsKey(*key));
15561 int entry = FindEntry(key);
15562 if (entry == kNotFound) return GetHeap()->the_hole_value();
15563 return get(EntryToValueIndex(entry));
15567 Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table,
15568 Handle<Object> key,
15569 Handle<Object> value) {
15570 DCHECK(table->IsKey(*key));
15571 int entry = table->FindEntry(key);
15572 // Key is already in table, just overwrite value.
15573 if (entry != kNotFound) {
15574 // TODO(ulan): Skipping write barrier is a temporary solution to avoid
15575 // memory leaks. Remove this once we have special visitor for weak fixed
15577 table->set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
15581 // Check whether the hash table should be extended.
15582 table = EnsureCapacity(table, 1, key, TENURED);
15584 table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value);
15589 void WeakHashTable::AddEntry(int entry,
15590 Handle<Object> key,
15591 Handle<Object> value) {
15592 DisallowHeapAllocation no_allocation;
15593 // TODO(ulan): Skipping write barrier is a temporary solution to avoid
15594 // memory leaks. Remove this once we have special visitor for weak fixed
15596 set(EntryToIndex(entry), *key, SKIP_WRITE_BARRIER);
15597 set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
15602 template<class Derived, class Iterator, int entrysize>
15603 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate(
15604 Isolate* isolate, int capacity, PretenureFlag pretenure) {
15605 // Capacity must be a power of two, since we depend on being able
15606 // to divide and multiple by 2 (kLoadFactor) to derive capacity
15607 // from number of buckets. If we decide to change kLoadFactor
15608 // to something other than 2, capacity should be stored as another
15609 // field of this object.
15610 capacity = base::bits::RoundUpToPowerOfTwo32(Max(kMinCapacity, capacity));
15611 if (capacity > kMaxCapacity) {
15612 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
15614 int num_buckets = capacity / kLoadFactor;
15615 Handle<FixedArray> backing_store = isolate->factory()->NewFixedArray(
15616 kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure);
15617 backing_store->set_map_no_write_barrier(
15618 isolate->heap()->ordered_hash_table_map());
15619 Handle<Derived> table = Handle<Derived>::cast(backing_store);
15620 for (int i = 0; i < num_buckets; ++i) {
15621 table->set(kHashTableStartIndex + i, Smi::FromInt(kNotFound));
15623 table->SetNumberOfBuckets(num_buckets);
15624 table->SetNumberOfElements(0);
15625 table->SetNumberOfDeletedElements(0);
15630 template<class Derived, class Iterator, int entrysize>
15631 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable(
15632 Handle<Derived> table) {
15633 DCHECK(!table->IsObsolete());
15635 int nof = table->NumberOfElements();
15636 int nod = table->NumberOfDeletedElements();
15637 int capacity = table->Capacity();
15638 if ((nof + nod) < capacity) return table;
15639 // Don't need to grow if we can simply clear out deleted entries instead.
15640 // Note that we can't compact in place, though, so we always allocate
15642 return Rehash(table, (nod < (capacity >> 1)) ? capacity << 1 : capacity);
15646 template<class Derived, class Iterator, int entrysize>
15647 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink(
15648 Handle<Derived> table) {
15649 DCHECK(!table->IsObsolete());
15651 int nof = table->NumberOfElements();
15652 int capacity = table->Capacity();
15653 if (nof >= (capacity >> 2)) return table;
15654 return Rehash(table, capacity / 2);
15658 template<class Derived, class Iterator, int entrysize>
15659 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear(
15660 Handle<Derived> table) {
15661 DCHECK(!table->IsObsolete());
15663 Handle<Derived> new_table =
15664 Allocate(table->GetIsolate(),
15666 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
15668 table->SetNextTable(*new_table);
15669 table->SetNumberOfDeletedElements(-1);
15675 template<class Derived, class Iterator, int entrysize>
15676 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Remove(
15677 Handle<Derived> table, Handle<Object> key, bool* was_present) {
15678 int entry = table->FindEntry(key);
15679 if (entry == kNotFound) {
15680 *was_present = false;
15683 *was_present = true;
15684 table->RemoveEntry(entry);
15685 return Shrink(table);
15689 template<class Derived, class Iterator, int entrysize>
15690 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash(
15691 Handle<Derived> table, int new_capacity) {
15692 DCHECK(!table->IsObsolete());
15694 Handle<Derived> new_table =
15695 Allocate(table->GetIsolate(),
15697 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED);
15698 int nof = table->NumberOfElements();
15699 int nod = table->NumberOfDeletedElements();
15700 int new_buckets = new_table->NumberOfBuckets();
15702 int removed_holes_index = 0;
15704 for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) {
15705 Object* key = table->KeyAt(old_entry);
15706 if (key->IsTheHole()) {
15707 table->SetRemovedIndexAt(removed_holes_index++, old_entry);
15711 Object* hash = key->GetHash();
15712 int bucket = Smi::cast(hash)->value() & (new_buckets - 1);
15713 Object* chain_entry = new_table->get(kHashTableStartIndex + bucket);
15714 new_table->set(kHashTableStartIndex + bucket, Smi::FromInt(new_entry));
15715 int new_index = new_table->EntryToIndex(new_entry);
15716 int old_index = table->EntryToIndex(old_entry);
15717 for (int i = 0; i < entrysize; ++i) {
15718 Object* value = table->get(old_index + i);
15719 new_table->set(new_index + i, value);
15721 new_table->set(new_index + kChainOffset, chain_entry);
15725 DCHECK_EQ(nod, removed_holes_index);
15727 new_table->SetNumberOfElements(nof);
15728 table->SetNextTable(*new_table);
15734 template <class Derived, class Iterator, int entrysize>
15735 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry(
15736 Handle<Object> key, int hash) {
15737 DCHECK(!IsObsolete());
15739 DisallowHeapAllocation no_gc;
15740 DCHECK(!key->IsTheHole());
15741 for (int entry = HashToEntry(hash); entry != kNotFound;
15742 entry = ChainAt(entry)) {
15743 Object* candidate = KeyAt(entry);
15744 if (candidate->SameValueZero(*key))
15751 template <class Derived, class Iterator, int entrysize>
15752 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry(
15753 Handle<Object> key) {
15754 DisallowHeapAllocation no_gc;
15755 Object* hash = key->GetHash();
15756 if (!hash->IsSmi()) return kNotFound;
15757 return FindEntry(key, Smi::cast(hash)->value());
15761 template <class Derived, class Iterator, int entrysize>
15762 int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) {
15763 DCHECK(!IsObsolete());
15765 int entry = UsedCapacity();
15766 int bucket = HashToBucket(hash);
15767 int index = EntryToIndex(entry);
15768 Object* chain_entry = get(kHashTableStartIndex + bucket);
15769 set(kHashTableStartIndex + bucket, Smi::FromInt(entry));
15770 set(index + kChainOffset, chain_entry);
15771 SetNumberOfElements(NumberOfElements() + 1);
15776 template<class Derived, class Iterator, int entrysize>
15777 void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) {
15778 DCHECK(!IsObsolete());
15780 int index = EntryToIndex(entry);
15781 for (int i = 0; i < entrysize; ++i) {
15782 set_the_hole(index + i);
15784 SetNumberOfElements(NumberOfElements() - 1);
15785 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
15789 template Handle<OrderedHashSet>
15790 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Allocate(
15791 Isolate* isolate, int capacity, PretenureFlag pretenure);
15793 template Handle<OrderedHashSet>
15794 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::EnsureGrowable(
15795 Handle<OrderedHashSet> table);
15797 template Handle<OrderedHashSet>
15798 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Shrink(
15799 Handle<OrderedHashSet> table);
15801 template Handle<OrderedHashSet>
15802 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Clear(
15803 Handle<OrderedHashSet> table);
15805 template Handle<OrderedHashSet>
15806 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Remove(
15807 Handle<OrderedHashSet> table, Handle<Object> key, bool* was_present);
15809 template int OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry(
15810 Handle<Object> key, int hash);
15811 template int OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry(
15812 Handle<Object> key);
15815 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::AddEntry(int hash);
15818 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::RemoveEntry(int entry);
15821 template Handle<OrderedHashMap>
15822 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Allocate(
15823 Isolate* isolate, int capacity, PretenureFlag pretenure);
15825 template Handle<OrderedHashMap>
15826 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::EnsureGrowable(
15827 Handle<OrderedHashMap> table);
15829 template Handle<OrderedHashMap>
15830 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Shrink(
15831 Handle<OrderedHashMap> table);
15833 template Handle<OrderedHashMap>
15834 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Clear(
15835 Handle<OrderedHashMap> table);
15837 template Handle<OrderedHashMap>
15838 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Remove(
15839 Handle<OrderedHashMap> table, Handle<Object> key, bool* was_present);
15841 template int OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry(
15842 Handle<Object> key, int hash);
15843 template int OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry(
15844 Handle<Object> key);
15847 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::AddEntry(int hash);
15850 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::RemoveEntry(int entry);
15853 bool OrderedHashSet::Contains(Handle<Object> key) {
15854 return FindEntry(key) != kNotFound;
15858 Handle<OrderedHashSet> OrderedHashSet::Add(Handle<OrderedHashSet> table,
15859 Handle<Object> key) {
15860 int hash = GetOrCreateHash(table->GetIsolate(), key)->value();
15861 if (table->FindEntry(key, hash) != kNotFound) return table;
15863 table = EnsureGrowable(table);
15865 int index = table->AddEntry(hash);
15866 table->set(index, *key);
15871 Object* OrderedHashMap::Lookup(Handle<Object> key) {
15872 DisallowHeapAllocation no_gc;
15873 int entry = FindEntry(key);
15874 if (entry == kNotFound) return GetHeap()->the_hole_value();
15875 return ValueAt(entry);
15879 Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table,
15880 Handle<Object> key,
15881 Handle<Object> value) {
15882 DCHECK(!key->IsTheHole());
15884 int hash = GetOrCreateHash(table->GetIsolate(), key)->value();
15885 int entry = table->FindEntry(key, hash);
15887 if (entry != kNotFound) {
15888 table->set(table->EntryToIndex(entry) + kValueOffset, *value);
15892 table = EnsureGrowable(table);
15894 int index = table->AddEntry(hash);
15895 table->set(index, *key);
15896 table->set(index + kValueOffset, *value);
15901 template<class Derived, class TableType>
15902 void OrderedHashTableIterator<Derived, TableType>::Transition() {
15903 DisallowHeapAllocation no_allocation;
15904 TableType* table = TableType::cast(this->table());
15905 if (!table->IsObsolete()) return;
15907 int index = Smi::cast(this->index())->value();
15908 while (table->IsObsolete()) {
15909 TableType* next_table = table->NextTable();
15912 int nod = table->NumberOfDeletedElements();
15914 // When we clear the table we set the number of deleted elements to -1.
15918 int old_index = index;
15919 for (int i = 0; i < nod; ++i) {
15920 int removed_index = table->RemovedIndexAt(i);
15921 if (removed_index >= old_index) break;
15927 table = next_table;
15931 set_index(Smi::FromInt(index));
15935 template<class Derived, class TableType>
15936 bool OrderedHashTableIterator<Derived, TableType>::HasMore() {
15937 DisallowHeapAllocation no_allocation;
15938 if (this->table()->IsUndefined()) return false;
15942 TableType* table = TableType::cast(this->table());
15943 int index = Smi::cast(this->index())->value();
15944 int used_capacity = table->UsedCapacity();
15946 while (index < used_capacity && table->KeyAt(index)->IsTheHole()) {
15950 set_index(Smi::FromInt(index));
15952 if (index < used_capacity) return true;
15954 set_table(GetHeap()->undefined_value());
15959 template<class Derived, class TableType>
15960 Smi* OrderedHashTableIterator<Derived, TableType>::Next(JSArray* value_array) {
15961 DisallowHeapAllocation no_allocation;
15963 FixedArray* array = FixedArray::cast(value_array->elements());
15964 static_cast<Derived*>(this)->PopulateValueArray(array);
15966 return Smi::cast(kind());
15968 return Smi::FromInt(0);
15973 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Next(
15974 JSArray* value_array);
15977 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::HasMore();
15980 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::MoveNext();
15983 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::CurrentKey();
15986 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Transition();
15990 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Next(
15991 JSArray* value_array);
15994 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::HasMore();
15997 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::MoveNext();
16000 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::CurrentKey();
16003 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Transition();
16006 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator(
16007 DeclaredAccessorDescriptor* descriptor)
16008 : array_(descriptor->serialized_data()->GetDataStartAddress()),
16009 length_(descriptor->serialized_data()->length()),
16014 const DeclaredAccessorDescriptorData*
16015 DeclaredAccessorDescriptorIterator::Next() {
16016 DCHECK(offset_ < length_);
16017 uint8_t* ptr = &array_[offset_];
16018 DCHECK(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0);
16019 const DeclaredAccessorDescriptorData* data =
16020 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr);
16021 offset_ += sizeof(*data);
16022 DCHECK(offset_ <= length_);
16027 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create(
16029 const DeclaredAccessorDescriptorData& descriptor,
16030 Handle<DeclaredAccessorDescriptor> previous) {
16031 int previous_length =
16032 previous.is_null() ? 0 : previous->serialized_data()->length();
16033 int length = sizeof(descriptor) + previous_length;
16034 Handle<ByteArray> serialized_descriptor =
16035 isolate->factory()->NewByteArray(length);
16036 Handle<DeclaredAccessorDescriptor> value =
16037 isolate->factory()->NewDeclaredAccessorDescriptor();
16038 value->set_serialized_data(*serialized_descriptor);
16039 // Copy in the data.
16041 DisallowHeapAllocation no_allocation;
16042 uint8_t* array = serialized_descriptor->GetDataStartAddress();
16043 if (previous_length != 0) {
16044 uint8_t* previous_array =
16045 previous->serialized_data()->GetDataStartAddress();
16046 MemCopy(array, previous_array, previous_length);
16047 array += previous_length;
16049 DCHECK(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0);
16050 DeclaredAccessorDescriptorData* data =
16051 reinterpret_cast<DeclaredAccessorDescriptorData*>(array);
16052 *data = descriptor;
16058 // Check if there is a break point at this code position.
16059 bool DebugInfo::HasBreakPoint(int code_position) {
16060 // Get the break point info object for this code position.
16061 Object* break_point_info = GetBreakPointInfo(code_position);
16063 // If there is no break point info object or no break points in the break
16064 // point info object there is no break point at this code position.
16065 if (break_point_info->IsUndefined()) return false;
16066 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0;
16070 // Get the break point info object for this code position.
16071 Object* DebugInfo::GetBreakPointInfo(int code_position) {
16072 // Find the index of the break point info object for this code position.
16073 int index = GetBreakPointInfoIndex(code_position);
16075 // Return the break point info object if any.
16076 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value();
16077 return BreakPointInfo::cast(break_points()->get(index));
16081 // Clear a break point at the specified code position.
16082 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info,
16084 Handle<Object> break_point_object) {
16085 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16086 debug_info->GetIsolate());
16087 if (break_point_info->IsUndefined()) return;
16088 BreakPointInfo::ClearBreakPoint(
16089 Handle<BreakPointInfo>::cast(break_point_info),
16090 break_point_object);
16094 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
16096 int source_position,
16097 int statement_position,
16098 Handle<Object> break_point_object) {
16099 Isolate* isolate = debug_info->GetIsolate();
16100 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
16102 if (!break_point_info->IsUndefined()) {
16103 BreakPointInfo::SetBreakPoint(
16104 Handle<BreakPointInfo>::cast(break_point_info),
16105 break_point_object);
16109 // Adding a new break point for a code position which did not have any
16110 // break points before. Try to find a free slot.
16111 int index = kNoBreakPointInfo;
16112 for (int i = 0; i < debug_info->break_points()->length(); i++) {
16113 if (debug_info->break_points()->get(i)->IsUndefined()) {
16118 if (index == kNoBreakPointInfo) {
16119 // No free slot - extend break point info array.
16120 Handle<FixedArray> old_break_points =
16121 Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
16122 Handle<FixedArray> new_break_points =
16123 isolate->factory()->NewFixedArray(
16124 old_break_points->length() +
16125 DebugInfo::kEstimatedNofBreakPointsInFunction);
16127 debug_info->set_break_points(*new_break_points);
16128 for (int i = 0; i < old_break_points->length(); i++) {
16129 new_break_points->set(i, old_break_points->get(i));
16131 index = old_break_points->length();
16133 DCHECK(index != kNoBreakPointInfo);
16135 // Allocate new BreakPointInfo object and set the break point.
16136 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
16137 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
16138 new_break_point_info->set_code_position(Smi::FromInt(code_position));
16139 new_break_point_info->set_source_position(Smi::FromInt(source_position));
16140 new_break_point_info->
16141 set_statement_position(Smi::FromInt(statement_position));
16142 new_break_point_info->set_break_point_objects(
16143 isolate->heap()->undefined_value());
16144 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
16145 debug_info->break_points()->set(index, *new_break_point_info);
16149 // Get the break point objects for a code position.
16150 Object* DebugInfo::GetBreakPointObjects(int code_position) {
16151 Object* break_point_info = GetBreakPointInfo(code_position);
16152 if (break_point_info->IsUndefined()) {
16153 return GetHeap()->undefined_value();
16155 return BreakPointInfo::cast(break_point_info)->break_point_objects();
16159 // Get the total number of break points.
16160 int DebugInfo::GetBreakPointCount() {
16161 if (break_points()->IsUndefined()) return 0;
16163 for (int i = 0; i < break_points()->length(); i++) {
16164 if (!break_points()->get(i)->IsUndefined()) {
16165 BreakPointInfo* break_point_info =
16166 BreakPointInfo::cast(break_points()->get(i));
16167 count += break_point_info->GetBreakPointCount();
16174 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
16175 Handle<Object> break_point_object) {
16176 Heap* heap = debug_info->GetHeap();
16177 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
16178 for (int i = 0; i < debug_info->break_points()->length(); i++) {
16179 if (!debug_info->break_points()->get(i)->IsUndefined()) {
16180 Handle<BreakPointInfo> break_point_info =
16181 Handle<BreakPointInfo>(BreakPointInfo::cast(
16182 debug_info->break_points()->get(i)));
16183 if (BreakPointInfo::HasBreakPointObject(break_point_info,
16184 break_point_object)) {
16185 return *break_point_info;
16189 return heap->undefined_value();
16193 // Find the index of the break point info object for the specified code
16195 int DebugInfo::GetBreakPointInfoIndex(int code_position) {
16196 if (break_points()->IsUndefined()) return kNoBreakPointInfo;
16197 for (int i = 0; i < break_points()->length(); i++) {
16198 if (!break_points()->get(i)->IsUndefined()) {
16199 BreakPointInfo* break_point_info =
16200 BreakPointInfo::cast(break_points()->get(i));
16201 if (break_point_info->code_position()->value() == code_position) {
16206 return kNoBreakPointInfo;
16210 // Remove the specified break point object.
16211 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
16212 Handle<Object> break_point_object) {
16213 Isolate* isolate = break_point_info->GetIsolate();
16214 // If there are no break points just ignore.
16215 if (break_point_info->break_point_objects()->IsUndefined()) return;
16216 // If there is a single break point clear it if it is the same.
16217 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16218 if (break_point_info->break_point_objects() == *break_point_object) {
16219 break_point_info->set_break_point_objects(
16220 isolate->heap()->undefined_value());
16224 // If there are multiple break points shrink the array
16225 DCHECK(break_point_info->break_point_objects()->IsFixedArray());
16226 Handle<FixedArray> old_array =
16227 Handle<FixedArray>(
16228 FixedArray::cast(break_point_info->break_point_objects()));
16229 Handle<FixedArray> new_array =
16230 isolate->factory()->NewFixedArray(old_array->length() - 1);
16231 int found_count = 0;
16232 for (int i = 0; i < old_array->length(); i++) {
16233 if (old_array->get(i) == *break_point_object) {
16234 DCHECK(found_count == 0);
16237 new_array->set(i - found_count, old_array->get(i));
16240 // If the break point was found in the list change it.
16241 if (found_count > 0) break_point_info->set_break_point_objects(*new_array);
16245 // Add the specified break point object.
16246 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
16247 Handle<Object> break_point_object) {
16248 Isolate* isolate = break_point_info->GetIsolate();
16250 // If there was no break point objects before just set it.
16251 if (break_point_info->break_point_objects()->IsUndefined()) {
16252 break_point_info->set_break_point_objects(*break_point_object);
16255 // If the break point object is the same as before just ignore.
16256 if (break_point_info->break_point_objects() == *break_point_object) return;
16257 // If there was one break point object before replace with array.
16258 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16259 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
16260 array->set(0, break_point_info->break_point_objects());
16261 array->set(1, *break_point_object);
16262 break_point_info->set_break_point_objects(*array);
16265 // If there was more than one break point before extend array.
16266 Handle<FixedArray> old_array =
16267 Handle<FixedArray>(
16268 FixedArray::cast(break_point_info->break_point_objects()));
16269 Handle<FixedArray> new_array =
16270 isolate->factory()->NewFixedArray(old_array->length() + 1);
16271 for (int i = 0; i < old_array->length(); i++) {
16272 // If the break point was there before just ignore.
16273 if (old_array->get(i) == *break_point_object) return;
16274 new_array->set(i, old_array->get(i));
16276 // Add the new break point.
16277 new_array->set(old_array->length(), *break_point_object);
16278 break_point_info->set_break_point_objects(*new_array);
16282 bool BreakPointInfo::HasBreakPointObject(
16283 Handle<BreakPointInfo> break_point_info,
16284 Handle<Object> break_point_object) {
16286 if (break_point_info->break_point_objects()->IsUndefined()) return false;
16287 // Single break point.
16288 if (!break_point_info->break_point_objects()->IsFixedArray()) {
16289 return break_point_info->break_point_objects() == *break_point_object;
16291 // Multiple break points.
16292 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects());
16293 for (int i = 0; i < array->length(); i++) {
16294 if (array->get(i) == *break_point_object) {
16302 // Get the number of break points.
16303 int BreakPointInfo::GetBreakPointCount() {
16305 if (break_point_objects()->IsUndefined()) return 0;
16306 // Single break point.
16307 if (!break_point_objects()->IsFixedArray()) return 1;
16308 // Multiple break points.
16309 return FixedArray::cast(break_point_objects())->length();
16313 Object* JSDate::GetField(Object* object, Smi* index) {
16314 return JSDate::cast(object)->DoGetField(
16315 static_cast<FieldIndex>(index->value()));
16319 Object* JSDate::DoGetField(FieldIndex index) {
16320 DCHECK(index != kDateValue);
16322 DateCache* date_cache = GetIsolate()->date_cache();
16324 if (index < kFirstUncachedField) {
16325 Object* stamp = cache_stamp();
16326 if (stamp != date_cache->stamp() && stamp->IsSmi()) {
16327 // Since the stamp is not NaN, the value is also not NaN.
16328 int64_t local_time_ms =
16329 date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
16330 SetCachedFields(local_time_ms, date_cache);
16333 case kYear: return year();
16334 case kMonth: return month();
16335 case kDay: return day();
16336 case kWeekday: return weekday();
16337 case kHour: return hour();
16338 case kMinute: return min();
16339 case kSecond: return sec();
16340 default: UNREACHABLE();
16344 if (index >= kFirstUTCField) {
16345 return GetUTCField(index, value()->Number(), date_cache);
16348 double time = value()->Number();
16349 if (std::isnan(time)) return GetIsolate()->heap()->nan_value();
16351 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
16352 int days = DateCache::DaysFromTime(local_time_ms);
16354 if (index == kDays) return Smi::FromInt(days);
16356 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16357 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
16358 DCHECK(index == kTimeInDay);
16359 return Smi::FromInt(time_in_day_ms);
16363 Object* JSDate::GetUTCField(FieldIndex index,
16365 DateCache* date_cache) {
16366 DCHECK(index >= kFirstUTCField);
16368 if (std::isnan(value)) return GetIsolate()->heap()->nan_value();
16370 int64_t time_ms = static_cast<int64_t>(value);
16372 if (index == kTimezoneOffset) {
16373 return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
16376 int days = DateCache::DaysFromTime(time_ms);
16378 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
16380 if (index <= kDayUTC) {
16381 int year, month, day;
16382 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16383 if (index == kYearUTC) return Smi::FromInt(year);
16384 if (index == kMonthUTC) return Smi::FromInt(month);
16385 DCHECK(index == kDayUTC);
16386 return Smi::FromInt(day);
16389 int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
16391 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
16392 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
16393 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
16394 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
16395 case kDaysUTC: return Smi::FromInt(days);
16396 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
16397 default: UNREACHABLE();
16405 void JSDate::SetValue(Object* value, bool is_value_nan) {
16407 if (is_value_nan) {
16408 HeapNumber* nan = GetIsolate()->heap()->nan_value();
16409 set_cache_stamp(nan, SKIP_WRITE_BARRIER);
16410 set_year(nan, SKIP_WRITE_BARRIER);
16411 set_month(nan, SKIP_WRITE_BARRIER);
16412 set_day(nan, SKIP_WRITE_BARRIER);
16413 set_hour(nan, SKIP_WRITE_BARRIER);
16414 set_min(nan, SKIP_WRITE_BARRIER);
16415 set_sec(nan, SKIP_WRITE_BARRIER);
16416 set_weekday(nan, SKIP_WRITE_BARRIER);
16418 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
16423 void JSDate::SetCachedFields(int64_t local_time_ms, DateCache* date_cache) {
16424 int days = DateCache::DaysFromTime(local_time_ms);
16425 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16426 int year, month, day;
16427 date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16428 int weekday = date_cache->Weekday(days);
16429 int hour = time_in_day_ms / (60 * 60 * 1000);
16430 int min = (time_in_day_ms / (60 * 1000)) % 60;
16431 int sec = (time_in_day_ms / 1000) % 60;
16432 set_cache_stamp(date_cache->stamp());
16433 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
16434 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
16435 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
16436 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
16437 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
16438 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
16439 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
16443 void JSArrayBuffer::Neuter() {
16444 CHECK(is_neuterable());
16445 CHECK(is_external());
16446 set_backing_store(NULL);
16447 set_byte_length(Smi::FromInt(0));
16451 void JSArrayBufferView::NeuterView() {
16452 CHECK(JSArrayBuffer::cast(buffer())->is_neuterable());
16453 set_byte_offset(Smi::FromInt(0));
16454 set_byte_length(Smi::FromInt(0));
16458 void JSDataView::Neuter() {
16463 void JSTypedArray::Neuter() {
16465 set_length(Smi::FromInt(0));
16466 set_elements(GetHeap()->EmptyExternalArrayForMap(map()));
16470 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) {
16471 switch (elements_kind) {
16472 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
16473 case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS;
16475 TYPED_ARRAYS(TYPED_ARRAY_CASE)
16476 #undef TYPED_ARRAY_CASE
16480 return FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND;
16485 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
16486 Handle<JSTypedArray> typed_array) {
16488 Handle<Map> map(typed_array->map());
16489 Isolate* isolate = typed_array->GetIsolate();
16491 DCHECK(IsFixedTypedArrayElementsKind(map->elements_kind()));
16493 Handle<Map> new_map = Map::TransitionElementsTo(
16495 FixedToExternalElementsKind(map->elements_kind()));
16497 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
16498 Handle<FixedTypedArrayBase> fixed_typed_array(
16499 FixedTypedArrayBase::cast(typed_array->elements()));
16500 Runtime::SetupArrayBufferAllocatingData(isolate, buffer,
16501 fixed_typed_array->DataSize(), false);
16502 memcpy(buffer->backing_store(),
16503 fixed_typed_array->DataPtr(),
16504 fixed_typed_array->DataSize());
16505 Handle<ExternalArray> new_elements =
16506 isolate->factory()->NewExternalArray(
16507 fixed_typed_array->length(), typed_array->type(),
16508 static_cast<uint8_t*>(buffer->backing_store()));
16510 buffer->set_weak_first_view(*typed_array);
16511 DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
16512 typed_array->set_buffer(*buffer);
16513 JSObject::SetMapAndElements(typed_array, new_map, new_elements);
16519 Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
16520 Handle<Object> result(buffer(), GetIsolate());
16521 if (*result != Smi::FromInt(0)) {
16522 DCHECK(IsExternalArrayElementsKind(map()->elements_kind()));
16523 return Handle<JSArrayBuffer>::cast(result);
16525 Handle<JSTypedArray> self(this);
16526 return MaterializeArrayBuffer(self);
16530 HeapType* PropertyCell::type() {
16531 return static_cast<HeapType*>(type_raw());
16535 void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) {
16536 DCHECK(IsPropertyCell());
16537 set_type_raw(type, ignored);
16541 Handle<HeapType> PropertyCell::UpdatedType(Handle<PropertyCell> cell,
16542 Handle<Object> value) {
16543 Isolate* isolate = cell->GetIsolate();
16544 Handle<HeapType> old_type(cell->type(), isolate);
16545 Handle<HeapType> new_type = HeapType::Constant(value, isolate);
16547 if (new_type->Is(old_type)) return old_type;
16549 cell->dependent_code()->DeoptimizeDependentCodeGroup(
16550 isolate, DependentCode::kPropertyCellChangedGroup);
16552 if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) {
16556 return HeapType::Any(isolate);
16560 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell,
16561 Handle<Object> value) {
16562 cell->set_value(*value);
16563 if (!HeapType::Any()->Is(cell->type())) {
16564 Handle<HeapType> new_type = UpdatedType(cell, value);
16565 cell->set_type(*new_type);
16571 void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell,
16572 CompilationInfo* info) {
16573 Handle<DependentCode> codes =
16574 DependentCode::Insert(handle(cell->dependent_code(), info->isolate()),
16575 DependentCode::kPropertyCellChangedGroup,
16576 info->object_wrapper());
16577 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes);
16578 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
16579 cell, info->zone());
16582 } } // namespace v8::internal