// Cache the called function in a global property cell. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// megamorphic.
+ // r0 : number of arguments to the construct function
// r1 : the function to call
// r2 : cache cell for call target
Label initialize, done, miss, megamorphic, not_array_function;
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the cell either some other function or an
// AllocationSite. Do a map check on the object in ecx.
- Handle<Map> allocation_site_map(
- masm->isolate()->heap()->allocation_site_map(),
- masm->isolate());
__ ldr(r5, FieldMemOperand(r3, 0));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ b(ne, &miss);
{
FrameScope scope(masm, StackFrame::INTERNAL);
+ // Arguments register must be smi-tagged to call out.
__ SmiTag(r0);
__ push(r0);
__ push(r1);
b(gt, &no_memento_available);
ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
cmp(scratch_reg,
- Operand(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+ Operand(isolate()->factory()->allocation_memento_map()));
bind(&no_memento_available);
}
JS_OBJECT_TYPE);
// Store the map
- Handle<Map> allocation_site_map(isolate()->heap()->allocation_site_map(),
- isolate());
+ Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
AddStoreMapConstant(object, allocation_site_map);
// Store the payload (smi elements kind)
Add<HConstant>(factory->empty_fixed_array());
HValue* shared_info = GetParameter(0);
+ AddIncrementCounter(counters->fast_new_closure_total());
+
// Create a new closure from the given function info in new space
HValue* size = Add<HConstant>(JSFunction::kSize);
HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
NOT_TENURED, JS_FUNCTION_TYPE);
- AddIncrementCounter(counters->fast_new_closure_total());
int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
casted_stub()->is_generator());
&isolate()->heap()->roots_[Heap::k##camel_name##RootIndex])); \
}
ROOT_LIST(ROOT_ACCESSOR)
-#undef ROOT_ACCESSOR_ACCESSOR
+#undef ROOT_ACCESSOR
+
+#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
+ inline Handle<Map> name##_map() { \
+ return Handle<Map>(BitCast<Map**>( \
+ &isolate()->heap()->roots_[Heap::k##Name##MapRootIndex])); \
+ }
+ STRUCT_LIST(STRUCT_MAP_ACCESSOR)
+#undef STRUCT_MAP_ACCESSOR
#define STRING_ACCESSOR(name, str) \
inline Handle<String> name() { \
MaybeObject* Heap::AllocateAllocationSite() {
- Object* result;
+ AllocationSite* site;
MaybeObject* maybe_result = Allocate(allocation_site_map(),
OLD_POINTER_SPACE);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- AllocationSite* site = AllocationSite::cast(result);
+ if (!maybe_result->To(&site)) return maybe_result;
site->Initialize();
// Link the site
site->set_weak_next(allocation_sites_list());
set_allocation_sites_list(site);
- return result;
+ return site;
}
}
-MaybeObject* Heap::CopyJSObject(JSObject* source) {
+MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
// Never used to copy functions. If functions need to be copied we
// have to be careful to clear the literals array.
SLOW_ASSERT(!source->IsJSFunction());
int object_size = map->instance_size();
Object* clone;
+ ASSERT(site == NULL || (AllocationSite::CanTrack(map->instance_type()) &&
+ map->instance_type() == JS_ARRAY_TYPE));
+
WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
// If we're forced to always allocate, we use the general allocation
} else {
wb_mode = SKIP_WRITE_BARRIER;
- { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
+ { int adjusted_object_size = site != NULL
+ ? object_size + AllocationMemento::kSize
+ : object_size;
+ MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
}
SLOW_ASSERT(InNewSpace(clone));
CopyBlock(HeapObject::cast(clone)->address(),
source->address(),
object_size);
- }
- SLOW_ASSERT(
- JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
- FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
- FixedArray* properties = FixedArray::cast(source->properties());
- // Update elements if necessary.
- if (elements->length() > 0) {
- Object* elem;
- { MaybeObject* maybe_elem;
- if (elements->map() == fixed_cow_array_map()) {
- maybe_elem = FixedArray::cast(elements);
- } else if (source->HasFastDoubleElements()) {
- maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
- } else {
- maybe_elem = CopyFixedArray(FixedArray::cast(elements));
- }
- if (!maybe_elem->ToObject(&elem)) return maybe_elem;
- }
- JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
- }
- // Update properties if necessary.
- if (properties->length() > 0) {
- Object* prop;
- { MaybeObject* maybe_prop = CopyFixedArray(properties);
- if (!maybe_prop->ToObject(&prop)) return maybe_prop;
+ if (site != NULL) {
+ AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
+ reinterpret_cast<Address>(clone) + object_size);
+ alloc_memento->set_map_no_write_barrier(allocation_memento_map());
+ ASSERT(site->map() == allocation_site_map());
+ alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
}
- JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
- }
- // Return the new clone.
- return clone;
-}
-
-
-MaybeObject* Heap::CopyJSObjectWithAllocationSite(
- JSObject* source,
- AllocationSite* site) {
- // Never used to copy functions. If functions need to be copied we
- // have to be careful to clear the literals array.
- SLOW_ASSERT(!source->IsJSFunction());
-
- // Make the clone.
- Map* map = source->map();
- int object_size = map->instance_size();
- Object* clone;
-
- ASSERT(AllocationSite::CanTrack(map->instance_type()));
- ASSERT(map->instance_type() == JS_ARRAY_TYPE);
- WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
-
- // If we're forced to always allocate, we use the general allocation
- // functions which may leave us with an object in old space.
- int adjusted_object_size = object_size;
- if (always_allocate()) {
- // We'll only track origin if we are certain to allocate in new space
- const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
- if ((object_size + AllocationMemento::kSize) < kMinFreeNewSpaceAfterGC) {
- adjusted_object_size += AllocationMemento::kSize;
- }
-
- { MaybeObject* maybe_clone =
- AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
- if (!maybe_clone->ToObject(&clone)) return maybe_clone;
- }
- Address clone_address = HeapObject::cast(clone)->address();
- CopyBlock(clone_address,
- source->address(),
- object_size);
- // Update write barrier for all fields that lie beyond the header.
- int write_barrier_offset = adjusted_object_size > object_size
- ? JSArray::kSize + AllocationMemento::kSize
- : JSObject::kHeaderSize;
- if (((object_size - write_barrier_offset) / kPointerSize) > 0) {
- RecordWrites(clone_address,
- write_barrier_offset,
- (object_size - write_barrier_offset) / kPointerSize);
- }
-
- // Track allocation site information, if we failed to allocate it inline.
- if (InNewSpace(clone) &&
- adjusted_object_size == object_size) {
- MaybeObject* maybe_alloc_memento =
- AllocateStruct(ALLOCATION_MEMENTO_TYPE);
- AllocationMemento* alloc_memento;
- if (maybe_alloc_memento->To(&alloc_memento)) {
- alloc_memento->set_map_no_write_barrier(allocation_memento_map());
- ASSERT(site->map() == allocation_site_map());
- alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
- }
- }
- } else {
- wb_mode = SKIP_WRITE_BARRIER;
- adjusted_object_size += AllocationMemento::kSize;
-
- { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
- if (!maybe_clone->ToObject(&clone)) return maybe_clone;
- }
- SLOW_ASSERT(InNewSpace(clone));
- // Since we know the clone is allocated in new space, we can copy
- // the contents without worrying about updating the write barrier.
- CopyBlock(HeapObject::cast(clone)->address(),
- source->address(),
- object_size);
- }
-
- if (adjusted_object_size > object_size) {
- AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
- reinterpret_cast<Address>(clone) + object_size);
- alloc_memento->set_map_no_write_barrier(allocation_memento_map());
- ASSERT(site->map() == allocation_site_map());
- alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
}
SLOW_ASSERT(
// Returns a deep copy of the JavaScript object.
// Properties and elements are copied too.
// Returns failure if allocation failed.
- MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
-
- MUST_USE_RESULT MaybeObject* CopyJSObjectWithAllocationSite(
- JSObject* source, AllocationSite* site);
+ // Optionally takes an AllocationSite to be appended in an AllocationMemento.
+ MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source,
+ AllocationSite* site = NULL);
// Allocates the function prototype.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
ASSERT(alloc_site != NULL);
HInnerAllocatedObject* alloc_memento = Add<HInnerAllocatedObject>(
previous_object, previous_object_size);
- Handle<Map> alloc_memento_map(
- isolate()->heap()->allocation_memento_map());
+ Handle<Map> alloc_memento_map =
+ isolate()->factory()->allocation_memento_map();
AddStoreMapConstant(alloc_memento, alloc_memento_map);
HObjectAccess access = HObjectAccess::ForAllocationMementoSite();
Add<HStoreNamedField>(alloc_memento, access, alloc_site);
ElementsKind boilerplate_elements_kind =
Handle<JSObject>::cast(boilerplate_object)->GetElementsKind();
- // TODO(mvstanton): This heuristic is only a temporary solution. In the
- // end, we want to quit creating allocation site info after a certain number
- // of GCs for a call site.
- AllocationSiteMode mode = AllocationSite::GetMode(
- boilerplate_elements_kind);
+ ASSERT(AllocationSite::CanTrack(boilerplate_object->map()->instance_type()));
// Check whether to use fast or slow deep-copying for boilerplate.
int max_properties = kMaxFastLiteralProperties;
if (IsFastLiteral(boilerplate_object,
kMaxFastLiteralDepth,
&max_properties)) {
+ // TODO(mvstanton): This heuristic is only a temporary solution. In the
+ // end, we want to quit creating allocation site info after a certain number
+ // of GCs for a call site.
+ AllocationSiteMode mode = AllocationSite::GetMode(
+ boilerplate_elements_kind);
+
+ // it doesn't make sense to create allocation mementos if we are going to
+ // create in old space.
+ if (mode == TRACK_ALLOCATION_SITE &&
+ isolate()->heap()->GetPretenureMode() == TENURED) {
+ mode = DONT_TRACK_ALLOCATION_SITE;
+ }
+
literal = BuildFastLiteral(boilerplate_object,
site,
mode);
int object_offset = object_size;
InstanceType instance_type = boilerplate_object->map()->instance_type();
- bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
- AllocationSite::CanTrack(instance_type);
+ bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE;
- // If using allocation sites, then the payload on the site should already
- // be filled in as a valid (boilerplate) array.
+ // If using allocation sites, then
+ // 1) the payload on the site should already be filled in as a valid
+ // (boilerplate) array, and
+ // 2) we shouldn't be pretenuring the allocations.
ASSERT(!create_allocation_site_info ||
- AllocationSite::cast(*allocation_site_object)->IsLiteralSite());
+ (AllocationSite::cast(*allocation_site_object)->IsLiteralSite() &&
+ isolate()->heap()->GetPretenureMode() == NOT_TENURED));
if (create_allocation_site_info) {
object_size += AllocationMemento::kSize;
HInstruction* object = Add<HAllocate>(object_size_constant, type,
isolate()->heap()->GetPretenureMode(), instance_type);
-
BuildEmitObjectHeader(boilerplate_object, object);
if (create_allocation_site_info) {
// Cache the called function in a global property cell. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// megamorphic.
+ // eax : number of arguments to the construct function
// ebx : cache cell for call target
// edi : the function to call
Isolate* isolate = masm->isolate();
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the cell either some other function or an
// AllocationSite. Do a map check on the object in ecx.
- Handle<Map> allocation_site_map(
- masm->isolate()->heap()->allocation_site_map(),
- masm->isolate());
+ Handle<Map> allocation_site_map =
+ masm->isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
__ j(not_equal, &miss);
{
FrameScope scope(masm, StackFrame::INTERNAL);
+ // Arguments register must be smi-tagged to call out.
__ SmiTag(eax);
__ push(eax);
__ push(edi);
__ inc(edx);
__ mov(ecx, FieldOperand(ebx, Cell::kValueOffset));
if (FLAG_debug_code) {
- Handle<Map> allocation_site_map(
- masm->isolate()->heap()->allocation_site_map(),
- masm->isolate());
+ Handle<Map> allocation_site_map =
+ masm->isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
__ Assert(equal, kExpectedAllocationSiteInCell);
}
__ cmp(ebx, Immediate(undefined_sentinel));
__ j(equal, &no_info);
__ mov(edx, FieldOperand(ebx, Cell::kValueOffset));
- __ cmp(FieldOperand(edx, 0), Immediate(Handle<Map>(
- masm->isolate()->heap()->allocation_site_map())));
+ __ cmp(FieldOperand(edx, 0), Immediate(
+ masm->isolate()->factory()->allocation_site_map()));
__ j(not_equal, &no_info);
__ mov(edx, FieldOperand(edx, AllocationSite::kTransitionInfoOffset));
cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
j(greater, &no_memento_available);
cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
- Immediate(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+ Immediate(isolate()->factory()->allocation_memento_map()));
bind(&no_memento_available);
}
// Cache the called function in a global property cell. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// megamorphic.
+ // a0 : number of arguments to the construct function
// a1 : the function to call
// a2 : cache cell for call target
Label initialize, done, miss, megamorphic, not_array_function;
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the cell either some other function or an
// AllocationSite. Do a map check on the object in a3.
- Handle<Map> allocation_site_map(
- masm->isolate()->heap()->allocation_site_map(),
- masm->isolate());
__ lw(t1, FieldMemOperand(a3, 0));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&miss, ne, t1, Operand(at));
1 << 5 | // a1
1 << 6; // a2
+ // Arguments register must be smi-tagged to call out.
__ SmiTag(a0);
__ MultiPush(kSavedRegs);
Branch(&no_memento_available, gt, scratch_reg, Operand(at));
lw(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
Branch(allocation_memento_present, cond, scratch_reg,
- Operand(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+ Operand(isolate()->factory()->allocation_memento_map()));
bind(&no_memento_available);
}
ASSERT(*elements != isolate->heap()->empty_fixed_array());
Handle<Object> boilerplate =
Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements);
- if (boilerplate.is_null()) return site;
+ if (boilerplate.is_null()) {
+ ASSERT(site.is_null());
+ return site;
+ }
site = isolate->factory()->NewAllocationSite();
site->set_transition_info(*boilerplate);
literals->set(literals_index, *site);
AllocationSiteMode mode = AllocationSite::GetMode(
boilerplate->GetElementsKind());
if (mode == TRACK_ALLOCATION_SITE) {
- return isolate->heap()->CopyJSObjectWithAllocationSite(
- boilerplate, *site);
+ return isolate->heap()->CopyJSObject(boilerplate, *site);
}
return isolate->heap()->CopyJSObject(boilerplate);
// Cache the called function in a global property cell. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// megamorphic.
+ // rax : number of arguments to the construct function
// rbx : cache cell for call target
// rdi : the function to call
Isolate* isolate = masm->isolate();
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the cell either some other function or an
// AllocationSite. Do a map check on the object in rcx.
- Handle<Map> allocation_site_map(
- masm->isolate()->heap()->allocation_site_map(),
- masm->isolate());
+ Handle<Map> allocation_site_map =
+ masm->isolate()->factory()->allocation_site_map();
__ Cmp(FieldOperand(rcx, 0), allocation_site_map);
__ j(not_equal, &miss);
{
FrameScope scope(masm, StackFrame::INTERNAL);
+ // Arguments register must be smi-tagged to call out.
__ Integer32ToSmi(rax, rax);
__ push(rax);
__ push(rdi);
__ incl(rdx);
__ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
if (FLAG_debug_code) {
- Handle<Map> allocation_site_map(
- masm->isolate()->heap()->allocation_site_map(),
- masm->isolate());
+ Handle<Map> allocation_site_map =
+ masm->isolate()->factory()->allocation_site_map();
__ Cmp(FieldOperand(rcx, 0), allocation_site_map);
__ Assert(equal, kExpectedAllocationSiteInCell);
}
__ j(equal, &no_info);
__ movq(rdx, FieldOperand(rbx, Cell::kValueOffset));
__ Cmp(FieldOperand(rdx, 0),
- Handle<Map>(masm->isolate()->heap()->allocation_site_map()));
+ masm->isolate()->factory()->allocation_site_map());
__ j(not_equal, &no_info);
__ movq(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset));