void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm, AllocationSiteMode mode,
- Label* allocation_site_info_found) {
+ Label* allocation_memento_found) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : key
// -- r4 : scratch (elements)
// -----------------------------------
if (mode == TRACK_ALLOCATION_SITE) {
- ASSERT(allocation_site_info_found != NULL);
- __ TestJSArrayForAllocationSiteInfo(r2, r4);
- __ b(eq, allocation_site_info_found);
+ ASSERT(allocation_memento_found != NULL);
+ __ TestJSArrayForAllocationMemento(r2, r4);
+ __ b(eq, allocation_memento_found);
}
// Set transitioned map.
Label loop, entry, convert_hole, gc_required, only_change_map, done;
if (mode == TRACK_ALLOCATION_SITE) {
- __ TestJSArrayForAllocationSiteInfo(r2, r4);
+ __ TestJSArrayForAllocationMemento(r2, r4);
__ b(eq, fail);
}
Label entry, loop, convert_hole, gc_required, only_change_map;
if (mode == TRACK_ALLOCATION_SITE) {
- __ TestJSArrayForAllocationSiteInfo(r2, r4);
+ __ TestJSArrayForAllocationMemento(r2, r4);
__ b(eq, fail);
}
void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
Register object = ToRegister(instr->object());
Register temp = ToRegister(instr->temp());
- __ TestJSArrayForAllocationSiteInfo(object, temp);
+ __ TestJSArrayForAllocationMemento(object, temp);
DeoptimizeIf(eq, instr->environment());
}
}
-void MacroAssembler::TestJSArrayForAllocationSiteInfo(
+void MacroAssembler::TestJSArrayForAllocationMemento(
Register receiver_reg,
Register scratch_reg) {
- Label no_info_available;
+ Label no_memento_available;
ExternalReference new_space_start =
ExternalReference::new_space_start(isolate());
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
add(scratch_reg, receiver_reg,
- Operand(JSArray::kSize + AllocationSiteInfo::kSize - kHeapObjectTag));
+ Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
cmp(scratch_reg, Operand(new_space_start));
- b(lt, &no_info_available);
+ b(lt, &no_memento_available);
mov(ip, Operand(new_space_allocation_top));
ldr(ip, MemOperand(ip));
cmp(scratch_reg, ip);
- b(gt, &no_info_available);
- ldr(scratch_reg, MemOperand(scratch_reg, -AllocationSiteInfo::kSize));
+ b(gt, &no_memento_available);
+ ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
cmp(scratch_reg,
- Operand(Handle<Map>(isolate()->heap()->allocation_site_info_map())));
- bind(&no_info_available);
+ Operand(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+ bind(&no_memento_available);
}
// in r0. Assumes that any other register can be used as a scratch.
void CheckEnumCache(Register null_value, Label* call_runtime);
- // AllocationSiteInfo support. Arrays may have an associated
- // AllocationSiteInfo object that can be checked for in order to pretransition
+ // AllocationMemento support. Arrays may have an associated
+ // AllocationMemento object that can be checked for in order to pretransition
// to another type.
// On entry, receiver_reg should point to the array object.
// scratch_reg gets clobbered.
// If allocation info is present, condition flags are set to eq
- void TestJSArrayForAllocationSiteInfo(Register receiver_reg,
- Register scratch_reg);
+ void TestJSArrayForAllocationMemento(Register receiver_reg,
+ Register scratch_reg);
private:
void CallCFunctionHelper(Register function,
MaybeObject* maybe_array = array->Initialize(0);
if (maybe_array->IsFailure()) return maybe_array;
- AllocationSiteInfo* info = AllocationSiteInfo::FindForJSObject(array);
- if (info != NULL && info->IsValid()) {
- AllocationSite* site = info->GetAllocationSite();
+ AllocationMemento* memento = AllocationMemento::FindForJSObject(array);
+ if (memento != NULL && memento->IsValid()) {
+ AllocationSite* site = memento->GetAllocationSite();
ElementsKind to_kind = site->GetElementsKind();
if (IsMoreGeneralElementsKindTransition(array->GetElementsKind(),
to_kind)) {
// We have advice that we should change the elements kind
if (FLAG_trace_track_allocation_sites) {
- PrintF("AllocationSiteInfo: pre-transitioning array %p(%s->%s)\n",
+ PrintF("AllocationSite: pre-transitioning array %p(%s->%s)\n",
reinterpret_cast<void*>(array),
ElementsKindToString(array->GetElementsKind()),
ElementsKindToString(to_kind));
checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site, undefined);
checker.Then();
- HObjectAccess access = HObjectAccess::ForAllocationSiteInfoSite();
+ HObjectAccess access = HObjectAccess::ForAllocationSiteTransitionInfo();
HInstruction* boilerplate = AddLoad(allocation_site, access);
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
HValue* elements = AddLoadElements(boilerplate);
class ElementsTransitionGenerator : public AllStatic {
public:
// If |mode| is set to DONT_TRACK_ALLOCATION_SITE,
- // |allocation_site_info_found| may be NULL.
+ // |allocation_memento_found| may be NULL.
static void GenerateMapChangeElementsTransition(MacroAssembler* masm,
AllocationSiteMode mode,
- Label* allocation_site_info_found);
+ Label* allocation_memento_found);
static void GenerateSmiToDouble(MacroAssembler* masm,
AllocationSiteMode mode,
Label* fail);
// space when new space is full and the object is not a large object.
AllocationSpace retry_space =
(space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
- int size = map->instance_size() + AllocationSiteInfo::kSize;
+ int size = map->instance_size() + AllocationMemento::kSize;
Object* result;
MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
if (!maybe_result->ToObject(&result)) return maybe_result;
// No need for write barrier since object is white and map is in old space.
HeapObject::cast(result)->set_map_no_write_barrier(map);
- AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
+ AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
reinterpret_cast<Address>(result) + map->instance_size());
- alloc_info->set_map_no_write_barrier(allocation_site_info_map());
- alloc_info->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER);
+ alloc_memento->set_map_no_write_barrier(allocation_memento_map());
+ alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER);
return result;
}
if (always_allocate()) {
// We'll only track origin if we are certain to allocate in new space
const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
- if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) {
- adjusted_object_size += AllocationSiteInfo::kSize;
+ if ((object_size + AllocationMemento::kSize) < kMinFreeNewSpaceAfterGC) {
+ adjusted_object_size += AllocationMemento::kSize;
}
{ MaybeObject* maybe_clone =
object_size);
// Update write barrier for all fields that lie beyond the header.
int write_barrier_offset = adjusted_object_size > object_size
- ? JSArray::kSize + AllocationSiteInfo::kSize
+ ? JSArray::kSize + AllocationMemento::kSize
: JSObject::kHeaderSize;
if (((object_size - write_barrier_offset) / kPointerSize) > 0) {
RecordWrites(clone_address,
// Track allocation site information, if we failed to allocate it inline.
if (InNewSpace(clone) &&
adjusted_object_size == object_size) {
- MaybeObject* maybe_alloc_info =
- AllocateStruct(ALLOCATION_SITE_INFO_TYPE);
- AllocationSiteInfo* alloc_info;
- if (maybe_alloc_info->To(&alloc_info)) {
- alloc_info->set_map_no_write_barrier(allocation_site_info_map());
- alloc_info->set_allocation_site(site, SKIP_WRITE_BARRIER);
+ MaybeObject* maybe_alloc_memento =
+ AllocateStruct(ALLOCATION_MEMENTO_TYPE);
+ AllocationMemento* alloc_memento;
+ if (maybe_alloc_memento->To(&alloc_memento)) {
+ alloc_memento->set_map_no_write_barrier(allocation_memento_map());
+ alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
}
}
} else {
wb_mode = SKIP_WRITE_BARRIER;
- adjusted_object_size += AllocationSiteInfo::kSize;
+ adjusted_object_size += AllocationMemento::kSize;
{ MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
}
if (adjusted_object_size > object_size) {
- AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
+ AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
reinterpret_cast<Address>(clone) + object_size);
- alloc_info->set_map_no_write_barrier(allocation_site_info_map());
- alloc_info->set_allocation_site(site, SKIP_WRITE_BARRIER);
+ alloc_memento->set_map_no_write_barrier(allocation_memento_map());
+ alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
}
SLOW_ASSERT(
return HObjectAccess(kInobject, Cell::kValueOffset);
}
- static HObjectAccess ForAllocationSiteInfoSite() {
- return HObjectAccess(kInobject, AllocationSiteInfo::kAllocationSiteOffset);
+ static HObjectAccess ForAllocationMementoSite() {
+ return HObjectAccess(kInobject, AllocationMemento::kAllocationSiteOffset);
}
// Create an access to an offset in a fixed array header.
AddStore(array, HObjectAccess::ForArrayLength(), length_field);
if (mode == TRACK_ALLOCATION_SITE) {
- BuildCreateAllocationSiteInfo(array,
- JSArray::kSize,
- allocation_site_payload);
+ BuildCreateAllocationMemento(array,
+ JSArray::kSize,
+ allocation_site_payload);
}
int elements_location = JSArray::kSize;
if (mode == TRACK_ALLOCATION_SITE) {
- elements_location += AllocationSiteInfo::kSize;
+ elements_location += AllocationMemento::kSize;
}
HInnerAllocatedObject* elements =
// All sizes here are multiples of kPointerSize.
int size = JSArray::kSize;
if (mode == TRACK_ALLOCATION_SITE) {
- size += AllocationSiteInfo::kSize;
+ size += AllocationMemento::kSize;
}
int elems_offset = size;
if (length > 0) {
// Create an allocation site info if requested.
if (mode == TRACK_ALLOCATION_SITE) {
- BuildCreateAllocationSiteInfo(object, JSArray::kSize, allocation_site);
+ BuildCreateAllocationMemento(object, JSArray::kSize, allocation_site);
}
if (length > 0) {
}
-HValue* HGraphBuilder::BuildCreateAllocationSiteInfo(HValue* previous_object,
- int previous_object_size,
- HValue* alloc_site) {
+HValue* HGraphBuilder::BuildCreateAllocationMemento(HValue* previous_object,
+ int previous_object_size,
+ HValue* alloc_site) {
ASSERT(alloc_site != NULL);
- HInnerAllocatedObject* alloc_site_info = Add<HInnerAllocatedObject>(
+ HInnerAllocatedObject* alloc_memento = Add<HInnerAllocatedObject>(
previous_object, previous_object_size);
- Handle<Map> alloc_site_info_map(
- isolate()->heap()->allocation_site_info_map());
- AddStoreMapConstant(alloc_site_info, alloc_site_info_map);
- HObjectAccess access = HObjectAccess::ForAllocationSiteInfoSite();
- AddStore(alloc_site_info, access, alloc_site);
- return alloc_site_info;
+ Handle<Map> alloc_memento_map(
+ isolate()->heap()->allocation_memento_map());
+ AddStoreMapConstant(alloc_memento, alloc_memento_map);
+ HObjectAccess access = HObjectAccess::ForAllocationMementoSite();
+ AddStore(alloc_memento, access, alloc_site);
+ return alloc_memento;
}
int base_size = JSArray::kSize;
if (mode_ == TRACK_ALLOCATION_SITE) {
- base_size += AllocationSiteInfo::kSize;
+ base_size += AllocationMemento::kSize;
}
if (IsFastDoubleElementsKind(kind_)) {
HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
int base_size = JSArray::kSize;
if (mode_ == TRACK_ALLOCATION_SITE) {
- base_size += AllocationSiteInfo::kSize;
+ base_size += AllocationMemento::kSize;
}
base_size += IsFastDoubleElementsKind(kind_)
&data_size,
&pointer_size)) {
if (mode == TRACK_ALLOCATION_SITE) {
- pointer_size += AllocationSiteInfo::kSize;
+ pointer_size += AllocationMemento::kSize;
}
Handle<JSObject> boilerplate_object = DeepCopy(original_boilerplate_object);
// Create allocation site info.
if (mode == TRACK_ALLOCATION_SITE &&
boilerplate_object->map()->CanTrackAllocationSite()) {
- elements_offset += AllocationSiteInfo::kSize;
- *offset += AllocationSiteInfo::kSize;
- BuildCreateAllocationSiteInfo(target, JSArray::kSize, allocation_site);
+ elements_offset += AllocationMemento::kSize;
+ *offset += AllocationMemento::kSize;
+ BuildCreateAllocationMemento(target, JSArray::kSize, allocation_site);
}
}
HValue* capacity);
// array must have been allocated with enough room for
- // 1) the JSArray, 2) a AllocationSiteInfo if mode requires it,
+ // 1) the JSArray, 2) a AllocationMemento if mode requires it,
// 3) a FixedArray or FixedDoubleArray.
// A pointer to the Fixed(Double)Array is returned.
HInnerAllocatedObject* BuildJSArrayHeader(HValue* array,
int position,
HIfContinuation* continuation);
- HValue* BuildCreateAllocationSiteInfo(HValue* previous_object,
- int previous_object_size,
- HValue* payload);
+ HValue* BuildCreateAllocationMemento(HValue* previous_object,
+ int previous_object_size,
+ HValue* payload);
HInstruction* BuildGetNativeContext(HValue* context);
HInstruction* BuildGetArrayFunction(HValue* context);
void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm, AllocationSiteMode mode,
- Label* allocation_site_info_found) {
+ Label* allocation_memento_found) {
// ----------- S t a t e -------------
// -- eax : value
// -- ebx : target map
// -- esp[0] : return address
// -----------------------------------
if (mode == TRACK_ALLOCATION_SITE) {
- ASSERT(allocation_site_info_found != NULL);
- __ TestJSArrayForAllocationSiteInfo(edx, edi);
- __ j(equal, allocation_site_info_found);
+ ASSERT(allocation_memento_found != NULL);
+ __ TestJSArrayForAllocationMemento(edx, edi);
+ __ j(equal, allocation_memento_found);
}
// Set transitioned map.
Label loop, entry, convert_hole, gc_required, only_change_map;
if (mode == TRACK_ALLOCATION_SITE) {
- __ TestJSArrayForAllocationSiteInfo(edx, edi);
+ __ TestJSArrayForAllocationMemento(edx, edi);
__ j(equal, fail);
}
Label loop, entry, convert_hole, gc_required, only_change_map, success;
if (mode == TRACK_ALLOCATION_SITE) {
- __ TestJSArrayForAllocationSiteInfo(edx, edi);
+ __ TestJSArrayForAllocationMemento(edx, edi);
__ j(equal, fail);
}
void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
Register object = ToRegister(instr->object());
Register temp = ToRegister(instr->temp());
- __ TestJSArrayForAllocationSiteInfo(object, temp);
+ __ TestJSArrayForAllocationMemento(object, temp);
DeoptimizeIf(equal, instr->environment());
}
}
-void MacroAssembler::TestJSArrayForAllocationSiteInfo(
+void MacroAssembler::TestJSArrayForAllocationMemento(
Register receiver_reg,
Register scratch_reg) {
- Label no_info_available;
+ Label no_memento_available;
ExternalReference new_space_start =
ExternalReference::new_space_start(isolate());
ExternalReference::new_space_allocation_top_address(isolate());
lea(scratch_reg, Operand(receiver_reg,
- JSArray::kSize + AllocationSiteInfo::kSize - kHeapObjectTag));
+ JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
cmp(scratch_reg, Immediate(new_space_start));
- j(less, &no_info_available);
+ j(less, &no_memento_available);
cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
- j(greater, &no_info_available);
- cmp(MemOperand(scratch_reg, -AllocationSiteInfo::kSize),
- Immediate(Handle<Map>(isolate()->heap()->allocation_site_info_map())));
- bind(&no_info_available);
+ j(greater, &no_memento_available);
+ cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
+ Immediate(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+ bind(&no_memento_available);
}
// in eax. Assumes that any other register can be used as a scratch.
void CheckEnumCache(Label* call_runtime);
- // AllocationSiteInfo support. Arrays may have an associated
- // AllocationSiteInfo object that can be checked for in order to pretransition
+ // AllocationMemento support. Arrays may have an associated
+ // AllocationMemento object that can be checked for in order to pretransition
// to another type.
// On entry, receiver_reg should point to the array object.
// scratch_reg gets clobbered.
// If allocation info is present, conditional code is set to equal
- void TestJSArrayForAllocationSiteInfo(Register receiver_reg,
- Register scratch_reg);
+ void TestJSArrayForAllocationMemento(Register receiver_reg,
+ Register scratch_reg);
private:
bool generating_stub_;
void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm, AllocationSiteMode mode,
- Label* allocation_site_info_found) {
+ Label* allocation_memento_found) {
// ----------- S t a t e -------------
// -- a0 : value
// -- a1 : key
// -- t0 : scratch (elements)
// -----------------------------------
if (mode == TRACK_ALLOCATION_SITE) {
- ASSERT(allocation_site_info_found != NULL);
- masm->TestJSArrayForAllocationSiteInfo(a2, t0, eq,
- allocation_site_info_found);
+ ASSERT(allocation_memento_found != NULL);
+ masm->TestJSArrayForAllocationMemento(a2, t0, eq,
+ allocation_memento_found);
}
// Set transitioned map.
Register scratch = t6;
if (mode == TRACK_ALLOCATION_SITE) {
- masm->TestJSArrayForAllocationSiteInfo(a2, t0, eq, fail);
+ masm->TestJSArrayForAllocationMemento(a2, t0, eq, fail);
}
// Check for empty arrays, which only require a map transition and no changes
Label entry, loop, convert_hole, gc_required, only_change_map;
if (mode == TRACK_ALLOCATION_SITE) {
- masm->TestJSArrayForAllocationSiteInfo(a2, t0, eq, fail);
+ masm->TestJSArrayForAllocationMemento(a2, t0, eq, fail);
}
// Check for empty arrays, which only require a map transition and no changes
Register object = ToRegister(instr->object());
Register temp = ToRegister(instr->temp());
Label fail;
- __ TestJSArrayForAllocationSiteInfo(object, temp, ne, &fail);
+ __ TestJSArrayForAllocationMemento(object, temp, ne, &fail);
DeoptimizeIf(al, instr->environment());
__ bind(&fail);
}
}
-void MacroAssembler::TestJSArrayForAllocationSiteInfo(
+void MacroAssembler::TestJSArrayForAllocationMemento(
Register receiver_reg,
Register scratch_reg,
Condition cond,
- Label* allocation_info_present) {
- Label no_info_available;
+ Label* allocation_memento_present) {
+ Label no_memento_available;
ExternalReference new_space_start =
ExternalReference::new_space_start(isolate());
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
Addu(scratch_reg, receiver_reg,
- Operand(JSArray::kSize + AllocationSiteInfo::kSize - kHeapObjectTag));
- Branch(&no_info_available, lt, scratch_reg, Operand(new_space_start));
+ Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
+ Branch(&no_memento_available, lt, scratch_reg, Operand(new_space_start));
li(at, Operand(new_space_allocation_top));
lw(at, MemOperand(at));
- Branch(&no_info_available, gt, scratch_reg, Operand(at));
- lw(scratch_reg, MemOperand(scratch_reg, -AllocationSiteInfo::kSize));
- Branch(allocation_info_present, cond, scratch_reg,
- Operand(Handle<Map>(isolate()->heap()->allocation_site_info_map())));
- bind(&no_info_available);
+ Branch(&no_memento_available, gt, scratch_reg, Operand(at));
+ lw(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
+ Branch(allocation_memento_present, cond, scratch_reg,
+ Operand(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+ bind(&no_memento_available);
}
// in a0. Assumes that any other register can be used as a scratch.
void CheckEnumCache(Register null_value, Label* call_runtime);
- // AllocationSiteInfo support. Arrays may have an associated
- // AllocationSiteInfo object that can be checked for in order to pretransition
+ // AllocationMemento support. Arrays may have an associated
+ // AllocationMemento object that can be checked for in order to pretransition
// to another type.
// On entry, receiver_reg should point to the array object.
// scratch_reg gets clobbered.
// If allocation info is present, jump to allocation_info_present
- void TestJSArrayForAllocationSiteInfo(Register receiver_reg,
- Register scratch_reg,
- Condition cond,
- Label* allocation_info_present);
+ void TestJSArrayForAllocationMemento(Register receiver_reg,
+ Register scratch_reg,
+ Condition cond,
+ Label* allocation_memento_present);
private:
void CallCFunctionHelper(Register function,
}
-void AllocationSiteInfo::AllocationSiteInfoVerify() {
- CHECK(IsAllocationSiteInfo());
+void AllocationMemento::AllocationMementoVerify() {
+ CHECK(IsAllocationMemento());
VerifyHeapPointer(allocation_site());
CHECK(!IsValid() || GetAllocationSite()->IsAllocationSite());
}
ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
-ACCESSORS(AllocationSiteInfo, allocation_site, Object, kAllocationSiteOffset)
+ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
ACCESSORS(Script, source, Object, kSourceOffset)
ACCESSORS(Script, name, Object, kNameOffset)
}
-void AllocationSiteInfo::AllocationSiteInfoPrint(FILE* out) {
- HeapObject::PrintHeader(out, "AllocationSiteInfo");
+void AllocationMemento::AllocationMementoPrint(FILE* out) {
+ HeapObject::PrintHeader(out, "AllocationMemento");
PrintF(out, " - allocation site: ");
if (IsValid()) {
GetAllocationSite()->Print();
}
-AllocationSiteInfo* AllocationSiteInfo::FindForJSObject(JSObject* object) {
- // Currently, AllocationSiteInfo objects are only allocated immediately
+AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object) {
+ // Currently, AllocationMemento objects are only allocated immediately
// after JSArrays in NewSpace, and detecting whether a JSArray has one
// involves carefully checking the object immediately after the JSArray
- // (if there is one) to see if it's an AllocationSiteInfo.
+ // (if there is one) to see if it's an AllocationMemento.
if (FLAG_track_allocation_sites && object->GetHeap()->InNewSpace(object)) {
Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) +
object->Size();
- if ((ptr_end + AllocationSiteInfo::kSize) <=
+ if ((ptr_end + AllocationMemento::kSize) <=
object->GetHeap()->NewSpaceTop()) {
// There is room in newspace for allocation info. Do we have some?
- Map** possible_allocation_site_info_map =
+ Map** possible_allocation_memento_map =
reinterpret_cast<Map**>(ptr_end);
- if (*possible_allocation_site_info_map ==
- object->GetHeap()->allocation_site_info_map()) {
- AllocationSiteInfo* info = AllocationSiteInfo::cast(
+ if (*possible_allocation_memento_map ==
+ object->GetHeap()->allocation_memento_map()) {
+ AllocationMemento* memento = AllocationMemento::cast(
reinterpret_cast<Object*>(ptr_end + 1));
- return info;
+ return memento;
}
}
}
return this;
}
- AllocationSiteInfo* info = AllocationSiteInfo::FindForJSObject(this);
- if (info == NULL || !info->IsValid()) {
+ AllocationMemento* memento = AllocationMemento::FindForJSObject(this);
+ if (memento == NULL || !memento->IsValid()) {
return this;
}
// Walk through to the Allocation Site
- AllocationSite* site = info->GetAllocationSite();
+ AllocationSite* site = memento->GetAllocationSite();
if (site->IsLiteralSite()) {
JSArray* transition_info = JSArray::cast(site->transition_info());
ElementsKind kind = transition_info->GetElementsKind();
V(OBJECT_TEMPLATE_INFO_TYPE) \
V(SIGNATURE_INFO_TYPE) \
V(TYPE_SWITCH_INFO_TYPE) \
- V(ALLOCATION_SITE_INFO_TYPE) \
+ V(ALLOCATION_MEMENTO_TYPE) \
V(ALLOCATION_SITE_TYPE) \
V(SCRIPT_TYPE) \
V(CODE_CACHE_TYPE) \
V(TYPE_SWITCH_INFO, TypeSwitchInfo, type_switch_info) \
V(SCRIPT, Script, script) \
V(ALLOCATION_SITE, AllocationSite, allocation_site) \
- V(ALLOCATION_SITE_INFO, AllocationSiteInfo, allocation_site_info) \
+ V(ALLOCATION_MEMENTO, AllocationMemento, allocation_memento) \
V(CODE_CACHE, CodeCache, code_cache) \
V(POLYMORPHIC_CODE_CACHE, PolymorphicCodeCache, polymorphic_code_cache) \
V(TYPE_FEEDBACK_INFO, TypeFeedbackInfo, type_feedback_info) \
SIGNATURE_INFO_TYPE,
TYPE_SWITCH_INFO_TYPE,
ALLOCATION_SITE_TYPE,
- ALLOCATION_SITE_INFO_TYPE,
+ ALLOCATION_MEMENTO_TYPE,
SCRIPT_TYPE,
CODE_CACHE_TYPE,
POLYMORPHIC_CODE_CACHE_TYPE,
};
-class AllocationSiteInfo: public Struct {
+class AllocationMemento: public Struct {
public:
static const int kAllocationSiteOffset = HeapObject::kHeaderSize;
static const int kSize = kAllocationSiteOffset + kPointerSize;
return AllocationSite::cast(allocation_site());
}
- DECLARE_PRINTER(AllocationSiteInfo)
- DECLARE_VERIFIER(AllocationSiteInfo)
+ DECLARE_PRINTER(AllocationMemento)
+ DECLARE_VERIFIER(AllocationMemento)
- // Returns NULL if no AllocationSiteInfo is available for object.
- static AllocationSiteInfo* FindForJSObject(JSObject* object);
- static inline AllocationSiteInfo* cast(Object* obj);
+ // Returns NULL if no AllocationMemento is available for object.
+ static AllocationMemento* FindForJSObject(JSObject* object);
+ static inline AllocationMemento* cast(Object* obj);
private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSiteInfo);
+ DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationMemento);
};
void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm, AllocationSiteMode mode,
- Label* allocation_site_info_found) {
+ Label* allocation_memento_found) {
// ----------- S t a t e -------------
// -- rax : value
// -- rbx : target map
// -- rsp[0] : return address
// -----------------------------------
if (mode == TRACK_ALLOCATION_SITE) {
- ASSERT(allocation_site_info_found != NULL);
- __ TestJSArrayForAllocationSiteInfo(rdx, rdi);
- __ j(equal, allocation_site_info_found);
+ ASSERT(allocation_memento_found != NULL);
+ __ TestJSArrayForAllocationMemento(rdx, rdi);
+ __ j(equal, allocation_memento_found);
}
// Set transitioned map.
Label allocated, new_backing_store, only_change_map, done;
if (mode == TRACK_ALLOCATION_SITE) {
- __ TestJSArrayForAllocationSiteInfo(rdx, rdi);
+ __ TestJSArrayForAllocationMemento(rdx, rdi);
__ j(equal, fail);
}
Label loop, entry, convert_hole, gc_required, only_change_map;
if (mode == TRACK_ALLOCATION_SITE) {
- __ TestJSArrayForAllocationSiteInfo(rdx, rdi);
+ __ TestJSArrayForAllocationMemento(rdx, rdi);
__ j(equal, fail);
}
void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
Register object = ToRegister(instr->object());
Register temp = ToRegister(instr->temp());
- __ TestJSArrayForAllocationSiteInfo(object, temp);
+ __ TestJSArrayForAllocationMemento(object, temp);
DeoptimizeIf(equal, instr->environment());
}
j(not_equal, &next);
}
-void MacroAssembler::TestJSArrayForAllocationSiteInfo(
+void MacroAssembler::TestJSArrayForAllocationMemento(
Register receiver_reg,
Register scratch_reg) {
- Label no_info_available;
+ Label no_memento_available;
ExternalReference new_space_start =
ExternalReference::new_space_start(isolate());
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
lea(scratch_reg, Operand(receiver_reg,
- JSArray::kSize + AllocationSiteInfo::kSize - kHeapObjectTag));
+ JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
movq(kScratchRegister, new_space_start);
cmpq(scratch_reg, kScratchRegister);
- j(less, &no_info_available);
+ j(less, &no_memento_available);
cmpq(scratch_reg, ExternalOperand(new_space_allocation_top));
- j(greater, &no_info_available);
- CompareRoot(MemOperand(scratch_reg, -AllocationSiteInfo::kSize),
- Heap::kAllocationSiteInfoMapRootIndex);
- bind(&no_info_available);
+ j(greater, &no_memento_available);
+ CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
+ Heap::kAllocationMementoMapRootIndex);
+ bind(&no_memento_available);
}
void CheckEnumCache(Register null_value,
Label* call_runtime);
- // AllocationSiteInfo support. Arrays may have an associated
- // AllocationSiteInfo object that can be checked for in order to pretransition
+ // AllocationMemento support. Arrays may have an associated
+ // AllocationMemento object that can be checked for in order to pretransition
// to another type.
// On entry, receiver_reg should point to the array object.
// scratch_reg gets clobbered.
// If allocation info is present, condition flags are set to equal
- void TestJSArrayForAllocationSiteInfo(Register receiver_reg,
- Register scratch_reg);
+ void TestJSArrayForAllocationMemento(Register receiver_reg,
+ Register scratch_reg);
private:
// Order general registers are pushed by Pushad.
array_data->set(1, Smi::FromInt(2));
AllocateAllButNBytes(HEAP->new_space(),
- JSArray::kSize + AllocationSiteInfo::kSize +
+ JSArray::kSize + AllocationMemento::kSize +
kPointerSize);
Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
CHECK_EQ(Smi::FromInt(2), array->length());
CHECK(array->HasFastSmiOrObjectElements());
- // We need filler the size of AllocationSiteInfo object, plus an extra
+ // We need filler the size of AllocationMemento object, plus an extra
// fill pointer value.
MaybeObject* maybe_object = HEAP->AllocateRaw(
- AllocationSiteInfo::kSize + kPointerSize, NEW_SPACE, OLD_POINTER_SPACE);
+ AllocationMemento::kSize + kPointerSize, NEW_SPACE, OLD_POINTER_SPACE);
Object* obj = NULL;
CHECK(maybe_object->ToObject(&obj));
Address addr_obj = reinterpret_cast<Address>(
reinterpret_cast<byte*>(obj - kHeapObjectTag));
HEAP->CreateFillerObjectAt(addr_obj,
- AllocationSiteInfo::kSize + kPointerSize);
+ AllocationMemento::kSize + kPointerSize);
// Give the array a name, making sure not to allocate strings.
v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);