Upstream version 11.39.260.0
[platform/framework/web/crosswalk.git] / src / v8 / src / xdk-allocation.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <sstream>
6 #include <string>
7
8 #include "src/v8.h"
9
10 #include "src/xdk-allocation.h"
11
12 #include "frames-inl.h"
13 #include "src/xdk-utils.h"
14
15 namespace v8 {
16 namespace internal {
17
18 XDKAllocationTracker::XDKAllocationTracker(HeapProfiler* heap_profiler,
19                                            HeapObjectsMap *ids,
20                                            StringsStorage *names,
21                                            int stackDepth,
22                                            bool collectRetention,
23                                            bool strict_collection)
24     : heap_profiler_(heap_profiler),
25     ids_(ids),
26     names_(names),
27     stackDepth_(stackDepth),
28     collectRetention_(collectRetention),
29     strict_collection_(strict_collection) {
30   references_ = new References();
31   aggregated_chunks_ = new AggregatedChunks();
32   runtime_info_ = new RuntimeInfo(aggregated_chunks_);
33   symbols_ = new SymbolsStorage(ids_->heap(), names_);
34   collectedStacks_ = new ShadowStack();
35   classNames_ = new ClassNames(names_);
36
37   List<unsigned> stack_ooc;
38   stack_ooc.Add(symbols_->registerSymInfo(1, "OutOfContext", "NoSource",
39                                           0, 0));
40   outOfContextFrame_ = collectedStacks_->registerStack(stack_ooc);
41
42   List<unsigned> stack_abc;
43   stack_abc.Add(symbols_->registerSymInfo(2, "AllocatedBeforeCollection",
44                                           "NoSource", 0, 0));
45   AllocatedBeforeCollectionFrame_ = collectedStacks_->registerStack(stack_abc);
46
47   runtime_info_->InitABCFrame(AllocatedBeforeCollectionFrame_);
48
49   baseTime_ = v8::base::Time::Now();
50   latest_delta_ = 0;
51 }
52
53
54 XDKAllocationTracker::~XDKAllocationTracker() {
55   delete collectedStacks_;
56   delete classNames_;
57   delete aggregated_chunks_;
58   delete runtime_info_;
59   delete symbols_;
60   delete references_;
61 }
62
63
64 // Heap profiler regularly takes time for storing when object was allocated,
65 // deallocated, when object's retention snapshot was taken, etc
66 unsigned int XDKAllocationTracker::GetTimeDelta() {
67   v8::base::TimeDelta td = v8::base::Time::Now() - baseTime_;
68   return static_cast<unsigned int>(td.InMilliseconds());
69 }
70
71
72 void XDKAllocationTracker::OnAlloc(Address addr, int size) {
73   DisallowHeapAllocation no_alloc;
74   Heap *heap = ids_->heap();
75
76   // below call saves from the crash during StackTraceFrameIterator creation
77   // Mark the new block as FreeSpace to make sure the heap is iterable
78   // while we are capturing stack trace.
79   FreeListNode::FromAddress(addr)->set_size(heap, size);
80
81   Isolate *isolate = heap->isolate();
82   StackTraceFrameIterator it(isolate);
83   List<unsigned> stack;
84
85   // TODO(amalyshe): checking of isolate->handle_scope_data()->level is quite
86   // artificial. need to understand when we can have such behaviour
87   // if level == 0 we will crash in getting of source info
88   while (isolate->handle_scope_data()->level && !it.done() &&
89       stack.length() < stackDepth_) {
90     JavaScriptFrame *frame = it.frame();
91     if (!frame->function())
92       break;
93     SharedFunctionInfo *shared = frame->function()->shared();
94     if (!shared)
95       break;
96
97     stack.Add(symbols_->FindOrRegisterFrame(frame));
98     it.Advance();
99   }
100
101   unsigned sid;
102   if (!stack.is_empty()) {
103     sid = collectedStacks_->registerStack(stack);
104   } else {
105     sid = outOfContextFrame_;
106   }
107
108   latest_delta_ = GetTimeDelta();
109
110   PostCollectedInfo* info = runtime_info_->AddPostCollectedInfo(addr,
111                                                                 latest_delta_);
112   info->size_ = size;
113   info->timeStamp_ = latest_delta_;
114   info->stackId_ = sid;
115   info->className_ = (unsigned int)-1;
116   info->dirty_ = false;
117 }
118
119
120 void XDKAllocationTracker::OnMove(Address from, Address to, int size) {
121   DisallowHeapAllocation no_alloc;
122   // look for the prev address
123   PostCollectedInfo* info_from = runtime_info_->FindPostCollectedInfo(from);
124   if (info_from == NULL) {
125     return;
126   }
127
128   runtime_info_->AddPostCollectedInfo(to, latest_delta_, info_from);
129   runtime_info_->RemoveInfo(from);
130 }
131
132
133 HeapEventXDK* XDKAllocationTracker::stopTracking() {
134   std::string symbols, types, frames, chunks, retentions;
135   SerializeChunk(&symbols, &types, &frames, &chunks, &retentions);
136   CollectFreedObjects(true);
137   std::string symbolsA, typesA, framesA, chunksA, retentionsA;
138   SerializeChunk(&symbolsA, &typesA, &framesA, &chunksA, &retentionsA, true);
139
140   // TODO(amalyshe): check who releases this object - new HeapEventXDK
141   return (new HeapEventXDK(GetTimeDelta(), symbols+symbolsA, types+typesA,
142       frames+framesA, chunks+chunksA, ""));
143 }
144
145
146 void XDKAllocationTracker::CollectFreedObjects(bool bAll, bool initPreCollect) {
147   clearIndividualReteiners();
148   if (collectRetention_) {
149     XDKSnapshotFiller filler(ids_, names_, this);
150     HeapSnapshotGenerator generator(heap_profiler_, NULL, NULL, NULL,
151                                     ids_->heap(), &filler);
152     generator.GenerateSnapshot();
153   }
154
155   Heap *heap = ids_->heap();
156   if (!heap) {
157     return;
158   }
159
160   unsigned int ts = GetTimeDelta();
161   if (bAll) {
162     ts += RETAINED_DELTA;
163   }
164
165   // CDT heap profiler calls CollectAllGarbage twice because after the first
166   // pass there are weak retained object not collected, but due to perf issues
167   // and because we do garbage collection regularly, we leave here only one call
168   // only for strict collection like in test where we need to make sure that
169   // object is definitely collected, we collect twice
170   heap->CollectAllGarbage(
171       Heap::kMakeHeapIterableMask,
172       "XDKAllocationTracker::CollectFreedObjects");
173   if (strict_collection_) {
174     heap->CollectAllGarbage(
175         Heap::kMakeHeapIterableMask,
176         "XDKAllocationTracker::CollectFreedObjects");
177   }
178   std::map<Address, RefSet> individualReteiners;
179
180   // TODO(amalyshe): check what DisallowHeapAllocation no_alloc means because in
181   // standalone v8 this part is crashed if DisallowHeapAllocation is defined
182   // DisallowHeapAllocation no_alloc;
183   if (!bAll) {
184     HeapIterator iterator(heap);
185     HeapObject* obj = iterator.next();
186     for (;
187          obj != NULL;
188          obj = iterator.next()) {
189       Address addr = obj->address();
190
191       PostCollectedInfo* info = runtime_info_->FindPostCollectedInfo(addr);
192       if (!info) {
193         // if we don't find info, we consider it as pre collection allocated
194         // object. need to add to the full picture for retentions
195         if (initPreCollect) {
196           info = runtime_info_->AddPreCollectionInfo(addr, obj->Size());
197         }
198       }
199
200       if (info) {
201         info->dirty_ = true;
202       }
203       // check of the class name and its initialization
204       if ((info && info->className_ == (unsigned)-1) || !info) {
205         InitClassName(addr, ts, obj->Size());
206       }
207     }
208   }
209
210   if (collectRetention_) {
211     std::map<Address, RefSet>::const_iterator citir =
212         individualReteiners_.begin();
213     while (citir != individualReteiners_.end()) {
214       PostCollectedInfo* infoChild =
215           runtime_info_->FindPostCollectedInfo(citir->first);
216       if (infoChild) {
217         RefId rfId;
218         rfId.stackId_ = infoChild->stackId_;
219         rfId.classId_ = infoChild->className_;
220
221         references_->addReference(rfId, citir->second, infoChild->timeStamp_);
222       }
223       citir++;
224     }
225   }
226
227   runtime_info_->CollectGarbaged(ts);
228 }
229
230
231 void XDKAllocationTracker::SerializeChunk(std::string* symbols,
232                                           std::string* types,
233                                           std::string* frames,
234                                           std::string* chunks,
235                                           std::string* retentions,
236                                           bool final) {
237   if (final) {
238     *symbols = symbols_->SerializeChunk();
239     *types = classNames_->SerializeChunk();
240   }
241   *frames = collectedStacks_->SerializeChunk();
242   *chunks = aggregated_chunks_->SerializeChunk();
243
244   *retentions = references_->serialize();
245   std::stringstream retentionsT;
246   retentionsT << GetTimeDelta() << std::endl << retentions->c_str();
247   *retentions = retentionsT.str();
248   references_->clear();
249 }
250
251
252 OutputStream::WriteResult XDKAllocationTracker::SendChunk(
253     OutputStream* stream) {
254   // go over all aggregated_ and send data to the stream
255   std::string symbols, types, frames, chunks, retentions;
256   SerializeChunk(&symbols, &types, &frames, &chunks, &retentions);
257
258   OutputStream::WriteResult ret = stream->WriteHeapXDKChunk(
259       symbols.c_str(), symbols.length(),
260       frames.c_str(), frames.length(),
261       types.c_str(), types.length(),
262       chunks.c_str(), chunks.length(),
263       retentions.c_str(), retentions.length());
264   return ret;
265 }
266
267
268 unsigned XDKAllocationTracker::GetTraceNodeId(Address address) {
269     PostCollectedInfo* info = runtime_info_->FindPostCollectedInfo(address);
270     if (info) {
271       return info->stackId_;
272     } else {
273       return AllocatedBeforeCollectionFrame_;
274     }
275 }
276
277
278 void XDKAllocationTracker::clearIndividualReteiners() {
279   individualReteiners_.clear();
280 }
281
282
283 std::map<Address, RefSet>* XDKAllocationTracker::GetIndividualReteiners() {
284   return &individualReteiners_;
285 }
286
287
288 unsigned XDKAllocationTracker::FindClassName(Address address) {
289   PostCollectedInfo* info = runtime_info_->FindPostCollectedInfo(address);
290   if (info) {
291     return info->className_;
292   } else {
293     return (unsigned)-1;
294   }
295 }
296
297
298 unsigned XDKAllocationTracker::InitClassName(Address address, unsigned ts,
299                                              unsigned size) {
300   unsigned id = -2;
301   PostCollectedInfo* info = runtime_info_->FindPostCollectedInfo(address);
302   if (!info) {
303     info = runtime_info_->AddPostCollectedInfo(address, ts);
304     info->className_ = -1;
305     info->stackId_ = outOfContextFrame_;
306     info->timeStamp_ = ts;
307     info->size_ = size;
308   }
309   if (info->className_ == (unsigned)-1) {
310     String* str = classNames_->GetConstructorName(address);
311     if (str) {
312       // get const char*, it's safe because pointer will be retained in the
313       // name_ until it is destroyed
314       id = classNames_->registerName(names_->GetName(str));
315     }
316   }
317   info->className_ = id;
318   return id;
319 }
320
321
322 unsigned XDKAllocationTracker::FindOrInitClassName(Address address,
323                                                    unsigned ts) {
324   unsigned id = FindClassName(address);
325   if (id == (unsigned)-1) {
326     // TODO(amalyshe) check if 0 size here is appropriate
327     id = InitClassName(address, ts, 0);
328   }
329   return id;
330 }
331
332
333 // -----------------------------------------------------------------------------
334 // this is almost a copy and duplication of
335 // V8HeapExplorer::AddEntry. refactoring is impossible because
336 // heap-snapshot-generator rely on it's structure which is not fully suitable
337 // for us.
338 HeapEntry* XDKSnapshotFiller::AddEntry(HeapThing ptr,
339                                        HeapEntriesAllocator* allocator) {
340   HeapObject* object = reinterpret_cast<HeapObject*>(ptr);
341   if (object->IsJSFunction()) {
342     JSFunction* func = JSFunction::cast(object);
343     SharedFunctionInfo* shared = func->shared();
344     const char* name = shared->bound() ? "native_bind" :
345         names_->GetName(String::cast(shared->name()));
346     return AddEntry(ptr, object, HeapEntry::kClosure, name);
347   } else if (object->IsJSRegExp()) {
348     JSRegExp* re = JSRegExp::cast(object);
349     return AddEntry(ptr, object,
350                     HeapEntry::kRegExp,
351                     names_->GetName(re->Pattern()));
352   } else if (object->IsJSObject()) {
353     return AddEntry(ptr, object, HeapEntry::kObject, "");
354   } else if (object->IsString()) {
355     String* string = String::cast(object);
356     if (string->IsConsString())
357       return AddEntry(ptr, object,
358                       HeapEntry::kConsString,
359                       "(concatenated string)");
360     if (string->IsSlicedString())
361       return AddEntry(ptr, object,
362                       HeapEntry::kSlicedString,
363                       "(sliced string)");
364     return AddEntry(ptr, object,
365                     HeapEntry::kString,
366                     names_->GetName(String::cast(object)));
367   } else if (object->IsSymbol()) {
368     return AddEntry(ptr, object, HeapEntry::kSymbol, "symbol");
369   } else if (object->IsCode()) {
370     return AddEntry(ptr, object, HeapEntry::kCode, "");
371   } else if (object->IsSharedFunctionInfo()) {
372     String* name = String::cast(SharedFunctionInfo::cast(object)->name());
373     return AddEntry(ptr, object,
374                     HeapEntry::kCode,
375                     names_->GetName(name));
376   } else if (object->IsScript()) {
377     Object* name = Script::cast(object)->name();
378     return AddEntry(ptr, object,
379                     HeapEntry::kCode,
380                     name->IsString()
381                         ? names_->GetName(String::cast(name))
382                         : "");
383   } else if (object->IsNativeContext()) {
384     return AddEntry(ptr, object, HeapEntry::kHidden, "system / NativeContext");
385   } else if (object->IsContext()) {
386     return AddEntry(ptr, object, HeapEntry::kObject, "system / Context");
387   } else if (object->IsFixedArray() ||
388              object->IsFixedDoubleArray() ||
389              object->IsByteArray() ||
390              object->IsExternalArray()) {
391     return AddEntry(ptr, object, HeapEntry::kArray, "");
392   } else if (object->IsHeapNumber()) {
393     return AddEntry(ptr, object, HeapEntry::kHeapNumber, "number");
394   }
395
396   return AddEntry(ptr, object, HeapEntry::kHidden, "system / NOT SUPORTED YET");
397 }
398
399
400 HeapEntry* XDKSnapshotFiller::AddEntry(HeapThing thing,
401                     HeapObject* object,
402                     HeapEntry::Type type,
403                     const char* name) {
404   Address address = object->address();
405   unsigned trace_node_id = 0;
406   trace_node_id = allocation_tracker_->GetTraceNodeId(address);
407
408   // cannot store pointer in the map because List reallcoates content regularly
409   // and the only  one way to find the entry - by index. so, index is cached in
410   // the map
411   // TODO(amalyshe): need to reuse type. it seems it is important
412   HeapEntry entry(NULL, &heap_entries_list_, type, name, 0, 0,
413                   trace_node_id);
414   heap_entries_list_.Add(entry);
415   HeapEntry* pEntry = &heap_entries_list_.last();
416
417   HashMap::Entry* cache_entry = heap_entries_.Lookup(thing, Hash(thing), true);
418   DCHECK(cache_entry->value == NULL);
419   int index = pEntry->index();
420   cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(index));
421
422   // TODO(amalyshe): it seems this storage might be optimized
423   HashMap::Entry* address_entry = index_to_address_.Lookup(
424       reinterpret_cast<void*>(index+1), HashInt(index+1), true);
425   address_entry->value = reinterpret_cast<void*>(address);
426
427   return pEntry;
428 }
429
430
431 HeapEntry* XDKSnapshotFiller::FindEntry(HeapThing thing) {
432   HashMap::Entry* cache_entry = heap_entries_.Lookup(thing, Hash(thing), false);
433   if (cache_entry == NULL) return NULL;
434   return &heap_entries_list_[static_cast<int>(
435       reinterpret_cast<intptr_t>(cache_entry->value))];
436 }
437
438
439 HeapEntry* XDKSnapshotFiller::FindOrAddEntry(HeapThing ptr,
440                                              HeapEntriesAllocator* allocator) {
441   HeapEntry* entry = FindEntry(ptr);
442   return entry != NULL ? entry : AddEntry(ptr, allocator);
443 }
444
445
446 void XDKSnapshotFiller::SetIndexedReference(HeapGraphEdge::Type type,
447     int parent,
448     int index,
449     HeapEntry* child_entry) {
450   if (child_entry->trace_node_id() < 3) {
451     return;
452   }
453   HashMap::Entry* address_entry_child = index_to_address_.Lookup(
454       reinterpret_cast<void*>(child_entry->index()+1),
455       HashInt(child_entry->index()+1), false);
456   DCHECK(address_entry_child != NULL);
457   if (!address_entry_child) {
458     return;
459   }
460
461   Address child_addr = reinterpret_cast<Address>(address_entry_child->value);
462
463   std::map<Address, RefSet>* individualReteiners =
464       allocation_tracker_->GetIndividualReteiners();
465   // get the parent's address, constructor name and form the RefId
466   HashMap::Entry* address_entry = index_to_address_.Lookup(
467       reinterpret_cast<void*>(parent+1), HashInt(parent+1), false);
468   DCHECK(address_entry != NULL);
469   if (!address_entry) {
470     return;
471   }
472   HeapEntry* parent_entry = &(heap_entries_list_[parent]);
473   Address parent_addr = reinterpret_cast<Address>(address_entry->value);
474   RefId parent_ref_id;
475   parent_ref_id.stackId_ = parent_entry->trace_node_id();
476   parent_ref_id.classId_ =
477       allocation_tracker_->FindOrInitClassName(parent_addr, 0);
478
479   std::stringstream str;
480   str << index << " element in Array";
481   parent_ref_id.field_ = str.str();
482
483   (*individualReteiners)[child_addr].references_.insert(parent_ref_id);
484 }
485
486
487 void XDKSnapshotFiller::SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
488     int parent,
489     HeapEntry* child_entry) {
490 }
491
492
493 void XDKSnapshotFiller::SetNamedReference(HeapGraphEdge::Type type,
494     int parent,
495     const char* reference_name,
496     HeapEntry* child_entry) {
497   if (child_entry->trace_node_id() < 3) {
498     return;
499   }
500
501   std::map<Address, RefSet>* individualReteiners =
502       allocation_tracker_->GetIndividualReteiners();
503   // get the parent's address, constructor name and form the RefId
504   HashMap::Entry* address_entry = index_to_address_.Lookup(
505       reinterpret_cast<void*>(parent+1), HashInt(parent+1), false);
506   DCHECK(address_entry != NULL);
507   if (!address_entry) {
508     return;
509   }
510   HeapEntry* parent_entry = &(heap_entries_list_[parent]);
511   Address parent_addr = reinterpret_cast<Address>(address_entry->value);
512   RefId parent_ref_id;
513   parent_ref_id.stackId_ = parent_entry->trace_node_id();
514   // TODO(amalyshe): need to get access to classNames_
515   parent_ref_id.classId_ =
516       allocation_tracker_->FindOrInitClassName(parent_addr, 0);
517   parent_ref_id.field_ = reference_name;
518
519   HashMap::Entry* address_entry_child = index_to_address_.Lookup(
520       reinterpret_cast<void*>(child_entry->index()+1),
521       HashInt(child_entry->index()+1), false);
522   DCHECK(address_entry_child != NULL);
523   if (!address_entry_child) {
524     return;
525   }
526   Address child_addr = reinterpret_cast<Address>(address_entry_child->value);
527
528   (*individualReteiners)[child_addr].references_.insert(parent_ref_id);
529 }
530
531
532 void XDKSnapshotFiller::SetNamedAutoIndexReference(HeapGraphEdge::Type type,
533                                 int parent,
534                                 HeapEntry* child_entry) {
535 }
536
537
538 }
539 }  // namespace v8::internal